+
+
+
+
+ """,
+ """
+
+
+
+
+
+
""",
+ ),
+ (""" """, """ """),
+ ],
+)
+def test_nh3_clean_configuration(test_input, expected):
+ assert nh3_clean_with_whitelist(test_input).replace(" ", "") == expected.replace(" ", "")
diff --git a/posthog/api/test/test_team.py b/posthog/api/test/test_team.py
index 052604d151276..297d23e54372c 100644
--- a/posthog/api/test/test_team.py
+++ b/posthog/api/test/test_team.py
@@ -54,7 +54,8 @@ def test_retrieve_project(self):
get_instance_setting("PERSON_ON_EVENTS_ENABLED") or get_instance_setting("PERSON_ON_EVENTS_V2_ENABLED"),
)
self.assertEqual(
- response_data["groups_on_events_querying_enabled"], get_instance_setting("GROUPS_ON_EVENTS_ENABLED")
+ response_data["groups_on_events_querying_enabled"],
+ get_instance_setting("GROUPS_ON_EVENTS_ENABLED"),
)
# TODO: These assertions will no longer make sense when we fully remove these attributes from the model
@@ -188,13 +189,17 @@ def test_cant_update_project_from_another_org(self):
def test_filter_permission(self):
response = self.client.patch(
- f"/api/projects/{self.team.id}/", {"test_account_filters": [{"key": "$current_url", "value": "test"}]}
+ f"/api/projects/{self.team.id}/",
+ {"test_account_filters": [{"key": "$current_url", "value": "test"}]},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.json()
self.assertEqual(response_data["name"], self.team.name)
- self.assertEqual(response_data["test_account_filters"], [{"key": "$current_url", "value": "test"}])
+ self.assertEqual(
+ response_data["test_account_filters"],
+ [{"key": "$current_url", "value": "test"}],
+ )
@patch("posthog.api.team.delete_bulky_postgres_data")
@patch("posthoganalytics.capture")
@@ -211,13 +216,18 @@ def test_delete_team_own_second(self, mock_capture: MagicMock, mock_delete_bulky
self.assertEqual(response.status_code, 204)
self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1)
self.assertEqual(
- AsyncDeletion.objects.filter(team_id=team.id, deletion_type=DeletionType.Team, key=str(team.id)).count(), 1
+ AsyncDeletion.objects.filter(team_id=team.id, deletion_type=DeletionType.Team, key=str(team.id)).count(),
+ 1,
)
mock_capture.assert_called_once_with(
self.user.distinct_id,
"team deleted",
properties={},
- groups={"instance": ANY, "organization": str(self.organization.id), "project": str(self.team.uuid)},
+ groups={
+ "instance": ANY,
+ "organization": str(self.organization.id),
+ "project": str(self.team.uuid),
+ },
)
mock_delete_bulky_postgres_data.assert_called_once_with(team_ids=[team.pk])
@@ -240,14 +250,23 @@ def test_delete_bulky_postgres_data(self):
cohort = Cohort.objects.create(team=team, created_by=self.user, name="test")
person = Person.objects.create(
- team=team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com", "team": "posthog"}
+ team=team,
+ distinct_ids=["example_id"],
+ properties={"email": "tim@posthog.com", "team": "posthog"},
)
person.add_distinct_id("test")
flag = FeatureFlag.objects.create(
- team=team, name="test", key="test", rollout_percentage=50, created_by=self.user
+ team=team,
+ name="test",
+ key="test",
+ rollout_percentage=50,
+ created_by=self.user,
)
FeatureFlagHashKeyOverride.objects.create(
- team_id=team.pk, person_id=person.id, feature_flag_key=flag.key, hash_key="test"
+ team_id=team.pk,
+ person_id=person.id,
+ feature_flag_key=flag.key,
+ hash_key="test",
)
CohortPeople.objects.create(cohort_id=cohort.pk, person_id=person.pk)
EarlyAccessFeature.objects.create(
@@ -359,13 +378,16 @@ def test_update_timezone_remove_cache(self):
data={"filters": {"events": json.dumps([{"id": "user signed up"}])}},
)
response = self.client.post(
- f"/api/projects/{self.team.id}/insights/", data={"filters": {"events": json.dumps([{"id": "$pageview"}])}}
+ f"/api/projects/{self.team.id}/insights/",
+ data={"filters": {"events": json.dumps([{"id": "$pageview"}])}},
).json()
self.client.get(
- f"/api/projects/{self.team.id}/insights/trend/", data={"events": json.dumps([{"id": "$pageview"}])}
+ f"/api/projects/{self.team.id}/insights/trend/",
+ data={"events": json.dumps([{"id": "$pageview"}])},
)
self.client.get(
- f"/api/projects/{self.team.id}/insights/trend/", data={"events": json.dumps([{"id": "user signed up"}])}
+ f"/api/projects/{self.team.id}/insights/trend/",
+ data={"events": json.dumps([{"id": "user signed up"}])},
)
self.assertEqual(cache.get(response["filters_hash"])["result"][0]["count"], 0)
@@ -412,7 +434,8 @@ def test_team_is_cached_on_create_and_update(self):
self.assertEqual(cached_team.id, response.json()["id"])
response = self.client.patch(
- f"/api/projects/{team_id}/", {"timezone": "Europe/Istanbul", "session_recording_opt_in": True}
+ f"/api/projects/{team_id}/",
+ {"timezone": "Europe/Istanbul", "session_recording_opt_in": True},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -443,7 +466,10 @@ def test_turn_on_exception_autocapture(self):
response = self.client.get("/api/projects/@current/")
assert response.json()["autocapture_exceptions_opt_in"] is None
- response = self.client.patch("/api/projects/@current/", {"autocapture_exceptions_opt_in": "Welwyn Garden City"})
+ response = self.client.patch(
+ "/api/projects/@current/",
+ {"autocapture_exceptions_opt_in": "Welwyn Garden City"},
+ )
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json()["detail"] == "Must be a valid boolean."
@@ -457,12 +483,16 @@ def test_configure_exception_autocapture_event_dropping(self):
assert response.json()["autocapture_exceptions_errors_to_ignore"] is None
response = self.client.patch(
- "/api/projects/@current/", {"autocapture_exceptions_errors_to_ignore": {"wat": "am i"}}
+ "/api/projects/@current/",
+ {"autocapture_exceptions_errors_to_ignore": {"wat": "am i"}},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json()["detail"] == "Must provide a list for field: autocapture_exceptions_errors_to_ignore."
- response = self.client.patch("/api/projects/@current/", {"autocapture_exceptions_errors_to_ignore": [1, False]})
+ response = self.client.patch(
+ "/api/projects/@current/",
+ {"autocapture_exceptions_errors_to_ignore": [1, False]},
+ )
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert (
response.json()["detail"]
@@ -470,7 +500,8 @@ def test_configure_exception_autocapture_event_dropping(self):
)
response = self.client.patch(
- "/api/projects/@current/", {"autocapture_exceptions_errors_to_ignore": ["wat am i"]}
+ "/api/projects/@current/",
+ {"autocapture_exceptions_errors_to_ignore": ["wat am i"]},
)
assert response.status_code == status.HTTP_200_OK
response = self.client.get("/api/projects/@current/")
@@ -478,7 +509,8 @@ def test_configure_exception_autocapture_event_dropping(self):
def test_configure_exception_autocapture_event_dropping_only_allows_simple_config(self):
response = self.client.patch(
- "/api/projects/@current/", {"autocapture_exceptions_errors_to_ignore": ["abc" * 300]}
+ "/api/projects/@current/",
+ {"autocapture_exceptions_errors_to_ignore": ["abc" * 300]},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert (
@@ -488,10 +520,30 @@ def test_configure_exception_autocapture_event_dropping_only_allows_simple_confi
@parameterized.expand(
[
- ["non numeric string", "Welwyn Garden City", "invalid_input", "A valid number is required."],
- ["negative number", "-1", "min_value", "Ensure this value is greater than or equal to 0."],
- ["greater than one", "1.5", "max_value", "Ensure this value is less than or equal to 1."],
- ["too many digits", "0.534", "max_decimal_places", "Ensure that there are no more than 2 decimal places."],
+ [
+ "non numeric string",
+ "Welwyn Garden City",
+ "invalid_input",
+ "A valid number is required.",
+ ],
+ [
+ "negative number",
+ "-1",
+ "min_value",
+ "Ensure this value is greater than or equal to 0.",
+ ],
+ [
+ "greater than one",
+ "1.5",
+ "max_value",
+ "Ensure this value is less than or equal to 1.",
+ ],
+ [
+ "too many digits",
+ "0.534",
+ "max_decimal_places",
+ "Ensure that there are no more than 2 decimal places.",
+ ],
]
)
def test_invalid_session_recording_sample_rates(
@@ -508,9 +560,24 @@ def test_invalid_session_recording_sample_rates(
@parameterized.expand(
[
- ["non numeric string", "Trentham monkey forest", "invalid_input", "A valid integer is required."],
- ["negative number", "-1", "min_value", "Ensure this value is greater than or equal to 0."],
- ["greater than 15000", "15001", "max_value", "Ensure this value is less than or equal to 15000."],
+ [
+ "non numeric string",
+ "Trentham monkey forest",
+ "invalid_input",
+ "A valid integer is required.",
+ ],
+ [
+ "negative number",
+ "-1",
+ "min_value",
+ "Ensure this value is greater than or equal to 0.",
+ ],
+ [
+ "greater than 15000",
+ "15001",
+ "max_value",
+ "Ensure this value is less than or equal to 15000.",
+ ],
["too many digits", "0.5", "invalid_input", "A valid integer is required."],
]
)
@@ -518,7 +585,8 @@ def test_invalid_session_recording_minimum_duration(
self, _name: str, provided_value: str, expected_code: str, expected_error: str
) -> None:
response = self.client.patch(
- "/api/projects/@current/", {"session_recording_minimum_duration_milliseconds": provided_value}
+ "/api/projects/@current/",
+ {"session_recording_minimum_duration_milliseconds": provided_value},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json() == {
@@ -530,7 +598,12 @@ def test_invalid_session_recording_minimum_duration(
@parameterized.expand(
[
- ["string", "Marple bridge", "invalid_input", "Must provide a dictionary or None."],
+ [
+ "string",
+ "Marple bridge",
+ "invalid_input",
+ "Must provide a dictionary or None.",
+ ],
["numeric", "-1", "invalid_input", "Must provide a dictionary or None."],
[
"unexpected json - no id",
@@ -566,11 +639,15 @@ def test_invalid_session_recording_linked_flag(
def test_can_set_and_unset_session_recording_linked_flag(self) -> None:
first_patch_response = self.client.patch(
- "/api/projects/@current/", {"session_recording_linked_flag": {"id": 1, "key": "provided_value"}}
+ "/api/projects/@current/",
+ {"session_recording_linked_flag": {"id": 1, "key": "provided_value"}},
)
assert first_patch_response.status_code == status.HTTP_200_OK
get_response = self.client.get("/api/projects/@current/")
- assert get_response.json()["session_recording_linked_flag"] == {"id": 1, "key": "provided_value"}
+ assert get_response.json()["session_recording_linked_flag"] == {
+ "id": 1,
+ "key": "provided_value",
+ }
response = self.client.patch("/api/projects/@current/", {"session_recording_linked_flag": None})
assert response.status_code == status.HTTP_200_OK
@@ -585,7 +662,11 @@ def create_team(organization: Organization, name: str = "Test team") -> Team:
with real world scenarios.
"""
return Team.objects.create(
- organization=organization, name=name, ingested_event=True, completed_snippet_onboarding=True, is_demo=True
+ organization=organization,
+ name=name,
+ ingested_event=True,
+ completed_snippet_onboarding=True,
+ is_demo=True,
)
diff --git a/posthog/api/test/test_uploaded_media.py b/posthog/api/test/test_uploaded_media.py
index c611643cb2610..2a7a23407fef6 100644
--- a/posthog/api/test/test_uploaded_media.py
+++ b/posthog/api/test/test_uploaded_media.py
@@ -52,7 +52,9 @@ def test_can_upload_and_retrieve_a_file(self) -> None:
with self.settings(OBJECT_STORAGE_ENABLED=True, OBJECT_STORAGE_MEDIA_UPLOADS_FOLDER=TEST_BUCKET):
with open(get_path_to("a-small-but-valid.gif"), "rb") as image:
response = self.client.post(
- f"/api/projects/{self.team.id}/uploaded_media", {"image": image}, format="multipart"
+ f"/api/projects/{self.team.id}/uploaded_media",
+ {"image": image},
+ format="multipart",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json())
assert response.json()["name"] == "a-small-but-valid.gif"
@@ -68,14 +70,22 @@ def test_can_upload_and_retrieve_a_file(self) -> None:
def test_rejects_non_image_file_type(self) -> None:
fake_file = SimpleUploadedFile(name="test_image.jpg", content=b"a fake image", content_type="text/csv")
response = self.client.post(
- f"/api/projects/{self.team.id}/uploaded_media", {"image": fake_file}, format="multipart"
+ f"/api/projects/{self.team.id}/uploaded_media",
+ {"image": fake_file},
+ format="multipart",
+ )
+ self.assertEqual(
+ response.status_code,
+ status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
+ response.json(),
)
- self.assertEqual(response.status_code, status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, response.json())
def test_rejects_file_manually_crafted_to_start_with_image_magic_bytes(self) -> None:
with open(get_path_to("file-masquerading-as-a.gif"), "rb") as image:
response = self.client.post(
- f"/api/projects/{self.team.id}/uploaded_media", {"image": image}, format="multipart"
+ f"/api/projects/{self.team.id}/uploaded_media",
+ {"image": image},
+ format="multipart",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.json())
@@ -88,10 +98,14 @@ def test_made_up_id_is_404(self) -> None:
def test_rejects_too_large_file_type(self) -> None:
four_megabytes_plus_a_little = b"1" * (4 * 1024 * 1024 + 1)
fake_big_file = SimpleUploadedFile(
- name="test_image.jpg", content=four_megabytes_plus_a_little, content_type="image/jpeg"
+ name="test_image.jpg",
+ content=four_megabytes_plus_a_little,
+ content_type="image/jpeg",
)
response = self.client.post(
- f"/api/projects/{self.team.id}/uploaded_media", {"image": fake_big_file}, format="multipart"
+ f"/api/projects/{self.team.id}/uploaded_media",
+ {"image": fake_big_file},
+ format="multipart",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.json())
self.assertEqual(response.json()["detail"], "Uploaded media must be less than 4MB")
@@ -100,7 +114,12 @@ def test_rejects_upload_when_object_storage_is_unavailable(self) -> None:
with override_settings(OBJECT_STORAGE_ENABLED=False):
fake_big_file = SimpleUploadedFile(name="test_image.jpg", content=b"", content_type="image/jpeg")
response = self.client.post(
- f"/api/projects/{self.team.id}/uploaded_media", {"image": fake_big_file}, format="multipart"
+ f"/api/projects/{self.team.id}/uploaded_media",
+ {"image": fake_big_file},
+ format="multipart",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.json())
- self.assertEqual(response.json()["detail"], "Object storage must be available to allow media uploads.")
+ self.assertEqual(
+ response.json()["detail"],
+ "Object storage must be available to allow media uploads.",
+ )
diff --git a/posthog/api/test/test_user.py b/posthog/api/test/test_user.py
index 62dca8dad6277..c5d93bbef211e 100644
--- a/posthog/api/test/test_user.py
+++ b/posthog/api/test/test_user.py
@@ -206,7 +206,11 @@ def test_update_current_user(self, mock_capture, mock_identify_task):
"partial_notification_settings",
]
},
- groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)},
+ groups={
+ "instance": ANY,
+ "organization": str(self.team.organization_id),
+ "project": str(self.team.uuid),
+ },
)
@patch("posthog.tasks.user_identify.identify_task")
@@ -221,7 +225,12 @@ def test_set_scene_personalisation_for_user_dashboard_must_be_in_current_team(
response = self.client.post(
"/api/users/@me/scene_personalisation",
# even if someone tries to send a different user or team they are ignored
- {"user": 12345, "team": 12345, "dashboard": str(dashboard_one.id), "scene": "Person"},
+ {
+ "user": 12345,
+ "team": 12345,
+ "dashboard": str(dashboard_one.id),
+ "scene": "Person",
+ },
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
@@ -320,7 +329,12 @@ def _assert_set_scene_choice(
response = self.client.post(
"/api/users/@me/scene_personalisation",
# even if someone tries to send a different user or team they are ignored
- {"user": 12345, "team": 12345, "dashboard": str(dashboard.id), "scene": scene},
+ {
+ "user": 12345,
+ "team": 12345,
+ "dashboard": str(dashboard.id),
+ "scene": scene,
+ },
)
assert response.status_code == status.HTTP_200_OK
response_data = response.json()
@@ -354,7 +368,10 @@ def test_no_notifications_when_user_email_is_changed_and_email_not_available(
@patch("posthog.tasks.email.send_email_change_emails.delay")
@patch("posthog.tasks.email.send_email_verification.delay")
def test_notifications_sent_when_user_email_is_changed_and_email_available(
- self, mock_send_email_verification, mock_send_email_change_emails, mock_is_email_available
+ self,
+ mock_send_email_verification,
+ mock_send_email_change_emails,
+ mock_is_email_available,
):
"""Test that when a user updates their email, they receive a verification email before the switch actually happens."""
self.user.email = "alpha@example.com"
@@ -381,7 +398,10 @@ def test_notifications_sent_when_user_email_is_changed_and_email_available(
token = email_verification_token_generator.make_token(self.user)
with freeze_time("2020-01-01T21:37:00+00:00"):
- response = self.client.post(f"/api/users/@me/verify_email/", {"uuid": self.user.uuid, "token": token})
+ response = self.client.post(
+ f"/api/users/@me/verify_email/",
+ {"uuid": self.user.uuid, "token": token},
+ )
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.user.refresh_from_db()
@@ -389,7 +409,10 @@ def test_notifications_sent_when_user_email_is_changed_and_email_available(
self.assertIsNone(self.user.pending_email)
mock_is_email_available.assert_called_once()
mock_send_email_change_emails.assert_called_once_with(
- "2020-01-01T21:37:00+00:00", self.user.first_name, "alpha@example.com", "beta@example.com"
+ "2020-01-01T21:37:00+00:00",
+ self.user.first_name,
+ "alpha@example.com",
+ "beta@example.com",
)
@patch("posthog.api.user.is_email_available", return_value=True)
@@ -420,7 +443,8 @@ def test_cannot_upgrade_yourself_to_staff_user(self):
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(
- response.json(), self.permission_denied_response("You are not a staff user, contact your instance admin.")
+ response.json(),
+ self.permission_denied_response("You are not a staff user, contact your instance admin."),
)
self.user.refresh_from_db()
@@ -447,7 +471,11 @@ def test_can_update_current_organization(self, mock_capture, mock_identify):
self.user.distinct_id,
"user updated",
properties={"updated_attrs": ["current_organization", "current_team"]},
- groups={"instance": ANY, "organization": str(self.new_org.id), "project": str(self.new_project.uuid)},
+ groups={
+ "instance": ANY,
+ "organization": str(self.new_org.id),
+ "project": str(self.new_project.uuid),
+ },
)
@patch("posthog.tasks.user_identify.identify_task")
@@ -471,7 +499,11 @@ def test_can_update_current_project(self, mock_capture, mock_identify):
self.user.distinct_id,
"user updated",
properties={"updated_attrs": ["current_organization", "current_team"]},
- groups={"instance": ANY, "organization": str(self.new_org.id), "project": str(team.uuid)},
+ groups={
+ "instance": ANY,
+ "organization": str(self.new_org.id),
+ "project": str(team.uuid),
+ },
)
def test_cannot_set_mismatching_org_and_team(self):
@@ -481,7 +513,11 @@ def test_cannot_set_mismatching_org_and_team(self):
self.user.join(organization=org)
response = self.client.patch(
- "/api/users/@me/", {"set_current_team": team.id, "set_current_organization": self.organization.id}
+ "/api/users/@me/",
+ {
+ "set_current_team": team.id,
+ "set_current_organization": self.organization.id,
+ },
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
@@ -583,7 +619,10 @@ def test_user_can_update_password(self, mock_capture, mock_identify):
user = self._create_user("bob@posthog.com", password="A12345678")
self.client.force_login(user)
- response = self.client.patch("/api/users/@me/", {"current_password": "A12345678", "password": "a_new_password"})
+ response = self.client.patch(
+ "/api/users/@me/",
+ {"current_password": "A12345678", "password": "a_new_password"},
+ )
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.json()
self.assertEqual(response_data["email"], "bob@posthog.com")
@@ -602,7 +641,11 @@ def test_user_can_update_password(self, mock_capture, mock_identify):
user.distinct_id,
"user updated",
properties={"updated_attrs": ["password"]},
- groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)},
+ groups={
+ "instance": ANY,
+ "organization": str(self.team.organization_id),
+ "project": str(self.team.uuid),
+ },
)
# User can log in with new password
@@ -616,7 +659,8 @@ def test_user_with_no_password_set_can_set_password(self, mock_capture, mock_ide
self.client.force_login(user)
response = self.client.patch(
- "/api/users/@me/", {"password": "a_new_password"} # note we don't send current password
+ "/api/users/@me/",
+ {"password": "a_new_password"}, # note we don't send current password
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.json()
@@ -636,11 +680,18 @@ def test_user_with_no_password_set_can_set_password(self, mock_capture, mock_ide
user.distinct_id,
"user updated",
properties={"updated_attrs": ["password"]},
- groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)},
+ groups={
+ "instance": ANY,
+ "organization": str(self.team.organization_id),
+ "project": str(self.team.uuid),
+ },
)
# User can log in with new password
- response = self.client.post("/api/login", {"email": "no_password@posthog.com", "password": "a_new_password"})
+ response = self.client.post(
+ "/api/login",
+ {"email": "no_password@posthog.com", "password": "a_new_password"},
+ )
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_user_with_unusable_password_set_can_set_password(self):
@@ -663,7 +714,10 @@ def test_user_with_unusable_password_set_can_set_password(self):
@patch("posthog.tasks.user_identify.identify_task")
@patch("posthoganalytics.capture")
def test_cannot_update_to_insecure_password(self, mock_capture, mock_identify):
- response = self.client.patch("/api/users/@me/", {"current_password": self.CONFIG_PASSWORD, "password": "123"})
+ response = self.client.patch(
+ "/api/users/@me/",
+ {"current_password": self.CONFIG_PASSWORD, "password": "123"},
+ )
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.json(),
@@ -740,7 +794,10 @@ def test_user_cannot_update_password_with_incorrect_current_password_and_ratelim
for _ in range(7):
response = self.client.patch("/api/users/@me/", {"current_password": "wrong", "password": "12345678"})
self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS)
- self.assertDictContainsSubset({"attr": None, "code": "throttled", "type": "throttled_error"}, response.json())
+ self.assertDictContainsSubset(
+ {"attr": None, "code": "throttled", "type": "throttled_error"},
+ response.json(),
+ )
# Password was not changed
self.user.refresh_from_db()
@@ -957,7 +1014,9 @@ def test_user_can_request_verification_email(self, mock_capture):
html_message = mail.outbox[0].alternatives[0][0] # type: ignore
self.validate_basic_html(
- html_message, "https://my.posthog.net", preheader="Please follow the link inside to verify your account."
+ html_message,
+ "https://my.posthog.net",
+ preheader="Please follow the link inside to verify your account.",
)
link_index = html_message.find("https://my.posthog.net/verify_email")
reset_link = html_message[link_index : html_message.find('"', link_index)]
@@ -975,7 +1034,11 @@ def test_user_can_request_verification_email(self, mock_capture):
self.user.distinct_id,
"user logged in",
properties={"social_provider": ""},
- groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)},
+ groups={
+ "instance": ANY,
+ "organization": str(self.team.organization_id),
+ "project": str(self.team.uuid),
+ },
)
mock_capture.assert_any_call(
self.user.distinct_id,
@@ -1003,14 +1066,18 @@ def test_cant_verify_more_than_six_times(self):
for i in range(7):
with self.settings(CELERY_TASK_ALWAYS_EAGER=True, SITE_URL="https://my.posthog.net"):
- response = self.client.post(f"/api/users/@me/request_email_verification/", {"uuid": self.user.uuid})
+ response = self.client.post(
+ f"/api/users/@me/request_email_verification/",
+ {"uuid": self.user.uuid},
+ )
if i < 6:
self.assertEqual(response.status_code, status.HTTP_200_OK)
else:
# Fourth request should fail
self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS)
self.assertDictContainsSubset(
- {"attr": None, "code": "throttled", "type": "throttled_error"}, response.json()
+ {"attr": None, "code": "throttled", "type": "throttled_error"},
+ response.json(),
)
# Three emails should be sent, fourth should not
@@ -1028,7 +1095,12 @@ def test_cant_validate_email_verification_token_without_a_token(self):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.json(),
- {"type": "validation_error", "code": "required", "detail": "This field is required.", "attr": "token"},
+ {
+ "type": "validation_error",
+ "code": "required",
+ "detail": "This field is required.",
+ "attr": "token",
+ },
)
def test_invalid_verification_token_returns_error(self):
@@ -1038,8 +1110,16 @@ def test_invalid_verification_token_returns_error(self):
# tokens expire after one day
expired_token = default_token_generator.make_token(self.user)
- for token in [valid_token[:-1], "not_even_trying", self.user.uuid, expired_token]:
- response = self.client.post(f"/api/users/@me/verify_email/", {"uuid": self.user.uuid, "token": token})
+ for token in [
+ valid_token[:-1],
+ "not_even_trying",
+ self.user.uuid,
+ expired_token,
+ ]:
+ response = self.client.post(
+ f"/api/users/@me/verify_email/",
+ {"uuid": self.user.uuid, "token": token},
+ )
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.json(),
diff --git a/posthog/api/test/test_utils.py b/posthog/api/test/test_utils.py
index 84a3d7315c220..8e0f08b009606 100644
--- a/posthog/api/test/test_utils.py
+++ b/posthog/api/test/test_utils.py
@@ -46,17 +46,24 @@ def test_format_paginated_url(self):
"http://testserver/api/some_url?offset=10",
)
self.assertEqual(
- format_paginated_url(request("/api/some_url?offset=0"), offset=0, page_size=10), "api/some_url?offset=10"
+ format_paginated_url(request("/api/some_url?offset=0"), offset=0, page_size=10),
+ "api/some_url?offset=10",
)
self.assertEqual(
format_paginated_url(
- request("/api/some_url?offset=0"), offset=0, page_size=10, mode=PaginationMode.previous
+ request("/api/some_url?offset=0"),
+ offset=0,
+ page_size=10,
+ mode=PaginationMode.previous,
),
None,
)
self.assertEqual(
format_paginated_url(
- request("/api/some_url?offset=0"), offset=20, page_size=10, mode=PaginationMode.previous
+ request("/api/some_url?offset=0"),
+ offset=20,
+ page_size=10,
+ mode=PaginationMode.previous,
),
"api/some_url?offset=0",
)
@@ -64,7 +71,11 @@ def test_format_paginated_url(self):
def test_get_target_entity(self):
request = lambda url: cast(Any, RequestFactory().get(url))
filter = Filter(
- data={"entity_id": "$pageview", "entity_type": "events", "events": [{"id": "$pageview", "type": "events"}]}
+ data={
+ "entity_id": "$pageview",
+ "entity_type": "events",
+ "events": [{"id": "$pageview", "type": "events"}],
+ }
)
entity = get_target_entity(filter)
@@ -90,10 +101,20 @@ def test_get_target_entity(self):
assert entity.math == "unique_group"
def test_check_definition_ids_inclusion_field_sql(self):
+ definition_ids = [
+ "",
+ None,
+ '["1fcefbef-7ea1-42fd-abca-4848b53133c0", "c8452399-8a10-4142-864d-6f2ca8c65154"]',
+ ]
- definition_ids = ["", None, '["1fcefbef-7ea1-42fd-abca-4848b53133c0", "c8452399-8a10-4142-864d-6f2ca8c65154"]']
-
- expected_ids_list = [[], [], ["1fcefbef-7ea1-42fd-abca-4848b53133c0", "c8452399-8a10-4142-864d-6f2ca8c65154"]]
+ expected_ids_list = [
+ [],
+ [],
+ [
+ "1fcefbef-7ea1-42fd-abca-4848b53133c0",
+ "c8452399-8a10-4142-864d-6f2ca8c65154",
+ ],
+ ]
for raw_ids, expected_ids in zip(definition_ids, expected_ids_list):
ordered_expected_ids = list(set(expected_ids)) # type: ignore
@@ -155,27 +176,43 @@ def test_raise_if_user_provided_url_unsafe(self):
raise_if_user_provided_url_unsafe("https://1.1.1.1") # Safe, public IP
self.assertRaisesMessage(ValueError, "No hostname", lambda: raise_if_user_provided_url_unsafe(""))
self.assertRaisesMessage(ValueError, "No hostname", lambda: raise_if_user_provided_url_unsafe("@@@"))
- self.assertRaisesMessage(ValueError, "No hostname", lambda: raise_if_user_provided_url_unsafe("posthog.com"))
+ self.assertRaisesMessage(
+ ValueError,
+ "No hostname",
+ lambda: raise_if_user_provided_url_unsafe("posthog.com"),
+ )
self.assertRaisesMessage(
ValueError,
"Scheme must be either HTTP or HTTPS",
lambda: raise_if_user_provided_url_unsafe("ftp://posthog.com"),
)
self.assertRaisesMessage(
- ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://localhost")
+ ValueError,
+ "Internal hostname",
+ lambda: raise_if_user_provided_url_unsafe("http://localhost"),
)
self.assertRaisesMessage(
- ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://192.168.0.5")
+ ValueError,
+ "Internal hostname",
+ lambda: raise_if_user_provided_url_unsafe("http://192.168.0.5"),
)
self.assertRaisesMessage(
- ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://0.0.0.0")
+ ValueError,
+ "Internal hostname",
+ lambda: raise_if_user_provided_url_unsafe("http://0.0.0.0"),
)
self.assertRaisesMessage(
- ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://10.0.0.24")
+ ValueError,
+ "Internal hostname",
+ lambda: raise_if_user_provided_url_unsafe("http://10.0.0.24"),
)
self.assertRaisesMessage(
- ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://172.20.0.21")
+ ValueError,
+ "Internal hostname",
+ lambda: raise_if_user_provided_url_unsafe("http://172.20.0.21"),
)
self.assertRaisesMessage(
- ValueError, "Invalid hostname", lambda: raise_if_user_provided_url_unsafe("http://fgtggggzzggggfd.com")
+ ValueError,
+ "Invalid hostname",
+ lambda: raise_if_user_provided_url_unsafe("http://fgtggggzzggggfd.com"),
) # Non-existent
diff --git a/posthog/api/uploaded_media.py b/posthog/api/uploaded_media.py
index 5b0e68b5ab2e7..4893994ecdb55 100644
--- a/posthog/api/uploaded_media.py
+++ b/posthog/api/uploaded_media.py
@@ -7,7 +7,11 @@
from drf_spectacular.utils import extend_schema
from PIL import Image
from rest_framework import status, viewsets
-from rest_framework.exceptions import APIException, UnsupportedMediaType, ValidationError
+from rest_framework.exceptions import (
+ APIException,
+ UnsupportedMediaType,
+ ValidationError,
+)
from rest_framework.parsers import FormParser, MultiPartParser
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from rest_framework.response import Response
@@ -16,7 +20,10 @@
from posthog.api.routing import StructuredViewSetMixin
from posthog.models import UploadedMedia
from posthog.models.uploaded_media import ObjectStorageUnavailable
-from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission
+from posthog.permissions import (
+ ProjectMembershipNecessaryPermissions,
+ TeamMemberAccessPermission,
+)
from posthog.storage import object_storage
FOUR_MEGABYTES = 4 * 1024 * 1024
@@ -43,7 +50,12 @@ def validate_image_file(file: Optional[bytes], user: int) -> bool:
im.close()
return True
except Exception as e:
- logger.error("uploaded_media.image_verification_error", user=user, exception=e, exc_info=True)
+ logger.error(
+ "uploaded_media.image_verification_error",
+ user=user,
+ exception=e,
+ exc_info=True,
+ )
return False
@@ -61,7 +73,10 @@ def download(request, *args, **kwargs) -> HttpResponse:
file_bytes = object_storage.read_bytes(instance.media_location)
- statsd.incr("uploaded_media.served", tags={"team_id": instance.team_id, "uuid": kwargs["image_uuid"]})
+ statsd.incr(
+ "uploaded_media.served",
+ tags={"team_id": instance.team_id, "uuid": kwargs["image_uuid"]},
+ )
return HttpResponse(
file_bytes,
@@ -109,15 +124,20 @@ def create(self, request, *args, **kwargs) -> Response:
bytes_to_verify = object_storage.read_bytes(uploaded_media.media_location)
if not validate_image_file(bytes_to_verify, user=request.user.id):
statsd.incr(
- "uploaded_media.image_failed_validation", tags={"file_name": file.name, "team": self.team_id}
+ "uploaded_media.image_failed_validation",
+ tags={"file_name": file.name, "team": self.team_id},
)
# TODO a batch process can delete media with no records in the DB or for deleted teams
uploaded_media.delete()
- raise ValidationError(code="invalid_image", detail="Uploaded media must be a valid image")
+ raise ValidationError(
+ code="invalid_image",
+ detail="Uploaded media must be a valid image",
+ )
headers = self.get_success_headers(uploaded_media.get_absolute_url())
statsd.incr(
- "uploaded_media.uploaded", tags={"team_id": self.team.pk, "content_type": file.content_type}
+ "uploaded_media.uploaded",
+ tags={"team_id": self.team.pk, "content_type": file.content_type},
)
return Response(
{
@@ -134,7 +154,8 @@ def create(self, request, *args, **kwargs) -> Response:
raise ValidationError(code="no-image-provided", detail="An image file must be provided")
except ObjectStorageUnavailable:
raise ValidationError(
- code="object_storage_required", detail="Object storage must be available to allow media uploads."
+ code="object_storage_required",
+ detail="Object storage must be available to allow media uploads.",
)
def get_success_headers(self, location: str) -> Dict:
diff --git a/posthog/api/user.py b/posthog/api/user.py
index 75276eca4f5ce..541a428074389 100644
--- a/posthog/api/user.py
+++ b/posthog/api/user.py
@@ -35,7 +35,11 @@
from posthog.auth import authenticate_secondarily
from posthog.cloud_utils import is_cloud
from posthog.email import is_email_available
-from posthog.event_usage import report_user_logged_in, report_user_updated, report_user_verified_email
+from posthog.event_usage import (
+ report_user_logged_in,
+ report_user_updated,
+ report_user_verified_email,
+)
from posthog.models import Team, User, UserScenePersonalisation, Dashboard
from posthog.models.organization import Organization
from posthog.models.user import NOTIFICATION_DEFAULTS, Notifications
@@ -110,7 +114,10 @@ class Meta:
"has_seen_product_intro_for",
"scene_personalisation",
]
- extra_kwargs = {"date_joined": {"read_only": True}, "password": {"write_only": True}}
+ extra_kwargs = {
+ "date_joined": {"read_only": True},
+ "password": {"write_only": True},
+ }
def get_has_password(self, instance: User) -> bool:
return instance.has_usable_password()
@@ -166,12 +173,14 @@ def validate_password_change(
# usable (properly hashed) and that a password actually exists.
if not current_password:
raise serializers.ValidationError(
- {"current_password": ["This field is required when updating your password."]}, code="required"
+ {"current_password": ["This field is required when updating your password."]},
+ code="required",
)
if not instance.check_password(current_password):
raise serializers.ValidationError(
- {"current_password": ["Your current password is incorrect."]}, code="incorrect_password"
+ {"current_password": ["Your current password is incorrect."]},
+ code="incorrect_password",
)
try:
validate_password(password, instance)
@@ -276,7 +285,12 @@ def save(self, **kwargs):
)
-class UserViewSet(mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet):
+class UserViewSet(
+ mixins.RetrieveModelMixin,
+ mixins.UpdateModelMixin,
+ mixins.ListModelMixin,
+ viewsets.GenericViewSet,
+):
throttle_classes = [UserAuthenticationThrottle]
serializer_class = UserSerializer
permission_classes = [permissions.IsAuthenticated]
@@ -305,7 +319,10 @@ def get_queryset(self):
return queryset
def get_serializer_context(self):
- return {**super().get_serializer_context(), "user_permissions": UserPermissions(cast(User, self.request.user))}
+ return {
+ **super().get_serializer_context(),
+ "user_permissions": UserPermissions(cast(User, self.request.user)),
+ }
@action(methods=["GET"], detail=True)
def start_2fa_setup(self, request, **kwargs):
@@ -319,7 +336,9 @@ def start_2fa_setup(self, request, **kwargs):
@action(methods=["POST"], detail=True)
def validate_2fa(self, request, **kwargs):
form = TOTPDeviceForm(
- request.session["django_two_factor-hex"], request.user, data={"token": request.data["token"]}
+ request.session["django_two_factor-hex"],
+ request.user,
+ data={"token": request.data["token"]},
)
if not form.is_valid():
raise serializers.ValidationError("Token is not valid", code="token_invalid")
@@ -345,7 +364,8 @@ def verify_email(self, request, **kwargs):
if not user or not EmailVerifier.check_token(user, token):
raise serializers.ValidationError(
- {"token": ["This verification token is invalid or has expired."]}, code="invalid_token"
+ {"token": ["This verification token is invalid or has expired."]},
+ code="invalid_token",
)
if user.pending_email:
@@ -364,7 +384,10 @@ def verify_email(self, request, **kwargs):
return Response({"success": True, "token": token})
@action(
- methods=["POST"], detail=True, permission_classes=[AllowAny], throttle_classes=[UserEmailVerificationThrottle]
+ methods=["POST"],
+ detail=True,
+ permission_classes=[AllowAny],
+ throttle_classes=[UserEmailVerificationThrottle],
)
def request_email_verification(self, request, **kwargs):
uuid = request.data["uuid"]
diff --git a/posthog/apps.py b/posthog/apps.py
index 6ae001ccf93fc..3e6b2aaf76fee 100644
--- a/posthog/apps.py
+++ b/posthog/apps.py
@@ -7,7 +7,12 @@
from posthoganalytics.client import Client
from posthog.settings import SELF_CAPTURE, SKIP_ASYNC_MIGRATIONS_SETUP
-from posthog.utils import get_git_branch, get_git_commit, get_machine_id, get_self_capture_api_token
+from posthog.utils import (
+ get_git_branch,
+ get_git_commit,
+ get_machine_id,
+ get_self_capture_api_token,
+)
logger = structlog.get_logger(__name__)
diff --git a/posthog/async_migrations/definition.py b/posthog/async_migrations/definition.py
index 77e7261aab55e..859b8af08819d 100644
--- a/posthog/async_migrations/definition.py
+++ b/posthog/async_migrations/definition.py
@@ -19,7 +19,11 @@
class AsyncMigrationOperation:
- def __init__(self, fn: Callable[[str], None], rollback_fn: Callable[[str], None] = lambda _: None):
+ def __init__(
+ self,
+ fn: Callable[[str], None],
+ rollback_fn: Callable[[str], None] = lambda _: None,
+ ):
self.fn = fn
# This should not be a long operation as it will be executed synchronously!
@@ -55,7 +59,10 @@ def rollback_fn(self, query_id: str):
self._execute_op(query_id, self.rollback, self.rollback_settings)
def _execute_op(self, query_id: str, sql: str, settings: Optional[Dict]):
- from posthog.async_migrations.utils import execute_op_clickhouse, execute_op_postgres
+ from posthog.async_migrations.utils import (
+ execute_op_clickhouse,
+ execute_op_postgres,
+ )
if self.database == AnalyticsDBMS.CLICKHOUSE:
execute_op_clickhouse(
diff --git a/posthog/async_migrations/disk_util.py b/posthog/async_migrations/disk_util.py
index 96ff6a383e7fc..ac7398a2c3e2f 100644
--- a/posthog/async_migrations/disk_util.py
+++ b/posthog/async_migrations/disk_util.py
@@ -27,10 +27,17 @@ def analyze_enough_disk_space_free_for_table(table_name: str, required_ratio: fl
total_disk_space - (free_disk_space - %(ratio)s * table_size) AS required,
formatReadableSize(required)
""",
- {"database": CLICKHOUSE_DATABASE, "table_name": table_name, "ratio": required_ratio},
+ {
+ "database": CLICKHOUSE_DATABASE,
+ "table_name": table_name,
+ "ratio": required_ratio,
+ },
)[0]
if current_ratio >= required_ratio:
return (True, None)
else:
- return (False, f"Upgrade your ClickHouse storage to at least {required_space_pretty}.")
+ return (
+ False,
+ f"Upgrade your ClickHouse storage to at least {required_space_pretty}.",
+ )
diff --git a/posthog/async_migrations/examples/example.py b/posthog/async_migrations/examples/example.py
index 1c0143744d796..c079c5ca6e504 100644
--- a/posthog/async_migrations/examples/example.py
+++ b/posthog/async_migrations/examples/example.py
@@ -28,7 +28,6 @@ def example_rollback_fn(uuid: str):
class Migration(AsyncMigrationDefinition):
-
description = "An example async migration."
posthog_min_version = "1.29.0"
diff --git a/posthog/async_migrations/examples/test_migration.py b/posthog/async_migrations/examples/test_migration.py
index 4b85264004865..7b3516c8ced91 100644
--- a/posthog/async_migrations/examples/test_migration.py
+++ b/posthog/async_migrations/examples/test_migration.py
@@ -26,7 +26,6 @@ def side_effect_rollback(self, _):
class Migration(AsyncMigrationDefinition):
-
# For testing only!!
fail = False
error_message = "Healthcheck failed"
diff --git a/posthog/async_migrations/examples/test_with_rollback_exception.py b/posthog/async_migrations/examples/test_with_rollback_exception.py
index 75371d4cfba37..b17f52391c9d6 100644
--- a/posthog/async_migrations/examples/test_with_rollback_exception.py
+++ b/posthog/async_migrations/examples/test_with_rollback_exception.py
@@ -1,4 +1,7 @@
-from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation
+from posthog.async_migrations.definition import (
+ AsyncMigrationDefinition,
+ AsyncMigrationOperation,
+)
# For testing purposes
@@ -8,7 +11,6 @@ def raise_exception_fn(_):
class Migration(AsyncMigrationDefinition):
-
# For testing only!!
description = "Another example async migration that's less realistic and used in tests."
diff --git a/posthog/async_migrations/migrations/0001_events_sample_by.py b/posthog/async_migrations/migrations/0001_events_sample_by.py
index 6a27833d38e0d..4098fd38f32a1 100644
--- a/posthog/async_migrations/migrations/0001_events_sample_by.py
+++ b/posthog/async_migrations/migrations/0001_events_sample_by.py
@@ -1,6 +1,9 @@
from typing import List
-from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation
+from posthog.async_migrations.definition import (
+ AsyncMigrationDefinition,
+ AsyncMigrationOperation,
+)
"""
Nooping this migration for future compatibility. Superseded by 0002_events_sample_by.
@@ -10,7 +13,6 @@
class Migration(AsyncMigrationDefinition):
-
description = "Test migration"
posthog_max_version = "1.33.9"
diff --git a/posthog/async_migrations/migrations/0002_events_sample_by.py b/posthog/async_migrations/migrations/0002_events_sample_by.py
index c4d7ca9181f67..7ad43de2934a3 100644
--- a/posthog/async_migrations/migrations/0002_events_sample_by.py
+++ b/posthog/async_migrations/migrations/0002_events_sample_by.py
@@ -59,7 +59,6 @@ def generate_insert_into_op(partition_gte: int, partition_lt=None) -> AsyncMigra
class Migration(AsyncMigrationDefinition):
-
description = (
"Schema change to the events table ensuring our SAMPLE BY clause is compatible with ClickHouse >=21.7.0."
)
@@ -138,7 +137,10 @@ def operations(self):
),
AsyncMigrationOperation(
fn=lambda query_id: run_optimize_table(
- unique_name="0002_events_sample_by", query_id=query_id, table_name=EVENTS_TABLE_NAME, final=True
+ unique_name="0002_events_sample_by",
+ query_id=query_id,
+ table_name=EVENTS_TABLE_NAME,
+ final=True,
)
),
]
diff --git a/posthog/async_migrations/migrations/0003_fill_person_distinct_id2.py b/posthog/async_migrations/migrations/0003_fill_person_distinct_id2.py
index 3cb5f123c5124..ba1d6dd917292 100644
--- a/posthog/async_migrations/migrations/0003_fill_person_distinct_id2.py
+++ b/posthog/async_migrations/migrations/0003_fill_person_distinct_id2.py
@@ -1,6 +1,9 @@
from functools import cached_property
-from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperationSQL
+from posthog.async_migrations.definition import (
+ AsyncMigrationDefinition,
+ AsyncMigrationOperationSQL,
+)
from posthog.client import sync_execute
from posthog.constants import AnalyticsDBMS
from posthog.settings import CLICKHOUSE_DATABASE
@@ -29,7 +32,6 @@
class Migration(AsyncMigrationDefinition):
-
description = "Set up person_distinct_id2 table, speeding up person-related queries."
depends_on = "0002_events_sample_by"
diff --git a/posthog/async_migrations/migrations/0004_replicated_schema.py b/posthog/async_migrations/migrations/0004_replicated_schema.py
index 18f54a315e621..9bdbdc4ebe5f3 100644
--- a/posthog/async_migrations/migrations/0004_replicated_schema.py
+++ b/posthog/async_migrations/migrations/0004_replicated_schema.py
@@ -49,7 +49,6 @@
class Migration(AsyncMigrationDefinition):
-
description = "Replace tables with replicated counterparts"
depends_on = "0003_fill_person_distinct_id2"
diff --git a/posthog/async_migrations/migrations/0005_person_replacing_by_version.py b/posthog/async_migrations/migrations/0005_person_replacing_by_version.py
index 69e38de0a4bf8..276d6c54abed3 100644
--- a/posthog/async_migrations/migrations/0005_person_replacing_by_version.py
+++ b/posthog/async_migrations/migrations/0005_person_replacing_by_version.py
@@ -160,9 +160,14 @@ def operations(self):
ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'
""",
),
- AsyncMigrationOperationSQL(database=AnalyticsDBMS.CLICKHOUSE, sql=PERSONS_TABLE_MV_SQL, rollback=None),
+ AsyncMigrationOperationSQL(
+ database=AnalyticsDBMS.CLICKHOUSE,
+ sql=PERSONS_TABLE_MV_SQL,
+ rollback=None,
+ ),
AsyncMigrationOperation(
- fn=self.copy_persons_from_postgres, rollback_fn=lambda _: self.unset_highwatermark()
+ fn=self.copy_persons_from_postgres,
+ rollback_fn=lambda _: self.unset_highwatermark(),
),
]
@@ -195,10 +200,16 @@ def copy_persons_from_postgres(self, query_id: str):
should_continue = self._copy_batch_from_postgres(query_id)
self.unset_highwatermark()
run_optimize_table(
- unique_name="0005_person_replacing_by_version", query_id=query_id, table_name=PERSON_TABLE, final=True
+ unique_name="0005_person_replacing_by_version",
+ query_id=query_id,
+ table_name=PERSON_TABLE,
+ final=True,
)
except Exception as err:
- logger.warn("Re-copying persons from postgres failed. Marking async migration as complete.", error=err)
+ logger.warn(
+ "Re-copying persons from postgres failed. Marking async migration as complete.",
+ error=err,
+ )
capture_exception(err)
def _copy_batch_from_postgres(self, query_id: str) -> bool:
diff --git a/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py b/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py
index f83b509698394..62f539f333481 100644
--- a/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py
+++ b/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py
@@ -1,6 +1,9 @@
from typing import List
-from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation
+from posthog.async_migrations.definition import (
+ AsyncMigrationDefinition,
+ AsyncMigrationOperation,
+)
"""
Nooping this migration for future compatibility. Superseded by 0007_persons_and_groups_on_events_backfill.
@@ -10,7 +13,6 @@
class Migration(AsyncMigrationDefinition):
-
description = "No-op migration"
posthog_max_version = "1.41.99"
diff --git a/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py b/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py
index de0911a54a41a..9b140eedf8a1c 100644
--- a/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py
+++ b/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py
@@ -10,7 +10,11 @@
AsyncMigrationOperationSQL,
)
from posthog.async_migrations.disk_util import analyze_enough_disk_space_free_for_table
-from posthog.async_migrations.utils import execute_op_clickhouse, run_optimize_table, sleep_until_finished
+from posthog.async_migrations.utils import (
+ execute_op_clickhouse,
+ run_optimize_table,
+ sleep_until_finished,
+)
from posthog.client import sync_execute
from posthog.models.event.sql import EVENTS_DATA_TABLE
from posthog.utils import str_to_bool
@@ -74,16 +78,36 @@ class Migration(AsyncMigrationDefinition):
posthog_max_version = "1.41.99"
parameters = {
- "PERSON_DICT_CACHE_SIZE": (5000000, "ClickHouse cache size (in rows) for persons data.", int),
+ "PERSON_DICT_CACHE_SIZE": (
+ 5000000,
+ "ClickHouse cache size (in rows) for persons data.",
+ int,
+ ),
"PERSON_DISTINCT_ID_DICT_CACHE_SIZE": (
5000000,
"ClickHouse cache size (in rows) for person distinct id data.",
int,
),
- "GROUPS_DICT_CACHE_SIZE": (1000000, "ClickHouse cache size (in rows) for groups data.", int),
- "RUN_DATA_VALIDATION_POSTCHECK": ("True", "Whether to run a postcheck validating the backfilled data.", str),
- "TIMESTAMP_LOWER_BOUND": ("2020-01-01", "Timestamp lower bound for events to backfill", str),
- "TIMESTAMP_UPPER_BOUND": ("2024-01-01", "Timestamp upper bound for events to backfill", str),
+ "GROUPS_DICT_CACHE_SIZE": (
+ 1000000,
+ "ClickHouse cache size (in rows) for groups data.",
+ int,
+ ),
+ "RUN_DATA_VALIDATION_POSTCHECK": (
+ "True",
+ "Whether to run a postcheck validating the backfilled data.",
+ str,
+ ),
+ "TIMESTAMP_LOWER_BOUND": (
+ "2020-01-01",
+ "Timestamp lower bound for events to backfill",
+ str,
+ ),
+ "TIMESTAMP_UPPER_BOUND": (
+ "2024-01-01",
+ "Timestamp upper bound for events to backfill",
+ str,
+ ),
"TEAM_ID": (
None,
"The team_id of team to run backfill for. If unset the backfill will run for all teams.",
@@ -95,7 +119,6 @@ def precheck(self):
return analyze_enough_disk_space_free_for_table(EVENTS_DATA_TABLE(), required_ratio=2.0)
def is_required(self) -> bool:
-
# we don't check groupX_created_at columns as they are 0 by default
rows_to_backfill_check = sync_execute(
"""
@@ -435,8 +458,9 @@ def _run_backfill_mutation(self, query_id):
)
def _create_dictionaries(self, query_id):
- execute_op_clickhouse(
- f"""
+ (
+ execute_op_clickhouse(
+ f"""
CREATE DICTIONARY IF NOT EXISTS {settings.CLICKHOUSE_DATABASE}.person_dict {{on_cluster_clause}}
(
team_id Int64,
@@ -449,12 +473,14 @@ def _create_dictionaries(self, query_id):
LAYOUT(complex_key_cache(size_in_cells %(cache_size)s max_threads_for_updates 6 allow_read_expired_keys 1))
Lifetime(60000)
""",
- {"cache_size": self.get_parameter("PERSON_DICT_CACHE_SIZE")},
- per_shard=True,
- query_id=query_id,
- ),
- execute_op_clickhouse(
- f"""
+ {"cache_size": self.get_parameter("PERSON_DICT_CACHE_SIZE")},
+ per_shard=True,
+ query_id=query_id,
+ ),
+ )
+ (
+ execute_op_clickhouse(
+ f"""
CREATE DICTIONARY IF NOT EXISTS {settings.CLICKHOUSE_DATABASE}.person_distinct_id2_dict {{on_cluster_clause}}
(
team_id Int64,
@@ -466,10 +492,11 @@ def _create_dictionaries(self, query_id):
LAYOUT(complex_key_cache(size_in_cells %(cache_size)s max_threads_for_updates 6 allow_read_expired_keys 1))
Lifetime(60000)
""",
- {"cache_size": self.get_parameter("PERSON_DISTINCT_ID_DICT_CACHE_SIZE")},
- per_shard=True,
- query_id=query_id,
- ),
+ {"cache_size": self.get_parameter("PERSON_DISTINCT_ID_DICT_CACHE_SIZE")},
+ per_shard=True,
+ query_id=query_id,
+ ),
+ )
execute_op_clickhouse(
f"""
CREATE DICTIONARY IF NOT EXISTS {settings.CLICKHOUSE_DATABASE}.groups_dict {{on_cluster_clause}}
@@ -501,7 +528,10 @@ def _count_running_mutations(self):
FROM clusterAllReplicas(%(cluster)s, system, 'mutations')
WHERE not is_done AND command LIKE %(pattern)s
""",
- {"cluster": settings.CLICKHOUSE_CLUSTER, "pattern": "%person_created_at = toDateTime(0)%"},
+ {
+ "cluster": settings.CLICKHOUSE_CLUSTER,
+ "pattern": "%person_created_at = toDateTime(0)%",
+ },
)[0][0]
def _clear_temporary_tables(self, query_id):
diff --git a/posthog/async_migrations/migrations/0008_speed_up_kafka_timestamp_filters.py b/posthog/async_migrations/migrations/0008_speed_up_kafka_timestamp_filters.py
index b8a3fa65bfdf5..20d81d063cd26 100644
--- a/posthog/async_migrations/migrations/0008_speed_up_kafka_timestamp_filters.py
+++ b/posthog/async_migrations/migrations/0008_speed_up_kafka_timestamp_filters.py
@@ -3,7 +3,10 @@
import structlog
from django.conf import settings
-from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperationSQL
+from posthog.async_migrations.definition import (
+ AsyncMigrationDefinition,
+ AsyncMigrationOperationSQL,
+)
from posthog.client import sync_execute
from posthog.constants import AnalyticsDBMS
from posthog.version_requirement import ServiceVersionRequirement
diff --git a/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py b/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py
index ea21377f19b69..9b4c64c9af869 100644
--- a/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py
+++ b/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py
@@ -1,6 +1,9 @@
from typing import List
-from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation
+from posthog.async_migrations.definition import (
+ AsyncMigrationDefinition,
+ AsyncMigrationOperation,
+)
class Migration(AsyncMigrationDefinition):
diff --git a/posthog/async_migrations/migrations/0010_move_old_partitions.py b/posthog/async_migrations/migrations/0010_move_old_partitions.py
index 990e339bff581..8097224014f00 100644
--- a/posthog/async_migrations/migrations/0010_move_old_partitions.py
+++ b/posthog/async_migrations/migrations/0010_move_old_partitions.py
@@ -2,7 +2,10 @@
import structlog
-from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperationSQL
+from posthog.async_migrations.definition import (
+ AsyncMigrationDefinition,
+ AsyncMigrationOperationSQL,
+)
from posthog.client import sync_execute
from posthog.constants import AnalyticsDBMS
from posthog.version_requirement import ServiceVersionRequirement
@@ -20,9 +23,21 @@ class Migration(AsyncMigrationDefinition):
posthog_max_version = "1.49.99"
parameters = {
- "OLDEST_PARTITION_TO_KEEP": ("200001", "ID of the oldest partition to keep", str),
- "NEWEST_PARTITION_TO_KEEP": ("202308", "ID of the newest partition to keep", str),
- "OPTIMIZE_TABLE": (False, "Optimize sharded_events table after moving partitions?", bool),
+ "OLDEST_PARTITION_TO_KEEP": (
+ "200001",
+ "ID of the oldest partition to keep",
+ str,
+ ),
+ "NEWEST_PARTITION_TO_KEEP": (
+ "202308",
+ "ID of the newest partition to keep",
+ str,
+ ),
+ "OPTIMIZE_TABLE": (
+ False,
+ "Optimize sharded_events table after moving partitions?",
+ bool,
+ ),
}
service_version_requirements = [ServiceVersionRequirement(service="clickhouse", supported_version=">=22.3.0")]
diff --git a/posthog/async_migrations/runner.py b/posthog/async_migrations/runner.py
index 931ee5d67a232..78f2afcf21201 100644
--- a/posthog/async_migrations/runner.py
+++ b/posthog/async_migrations/runner.py
@@ -19,7 +19,11 @@
trigger_migration,
update_async_migration,
)
-from posthog.models.async_migration import AsyncMigration, MigrationStatus, get_all_running_async_migrations
+from posthog.models.async_migration import (
+ AsyncMigration,
+ MigrationStatus,
+ get_all_running_async_migrations,
+)
from posthog.models.instance_setting import get_instance_setting
from posthog.models.utils import UUIDT
from posthog.version_requirement import ServiceVersionRequirement
@@ -33,7 +37,9 @@
def start_async_migration(
- migration_name: str, ignore_posthog_version=False, migration_definition: Optional[AsyncMigrationDefinition] = None
+ migration_name: str,
+ ignore_posthog_version=False,
+ migration_definition: Optional[AsyncMigrationDefinition] = None,
) -> bool:
"""
Performs some basic checks to ensure the migration can indeed run, and then kickstarts the chain of operations
@@ -63,7 +69,10 @@ def start_async_migration(
if not (
ignore_posthog_version
- or is_posthog_version_compatible(migration_instance.posthog_min_version, migration_instance.posthog_max_version)
+ or is_posthog_version_compatible(
+ migration_instance.posthog_min_version,
+ migration_instance.posthog_max_version,
+ )
):
process_error(
migration_instance,
@@ -102,7 +111,9 @@ def start_async_migration(
ok, error = run_migration_precheck(migration_instance)
if not ok:
process_error(
- migration_instance, f"Migration precheck failed with error:{error}", status=MigrationStatus.FailedAtStartup
+ migration_instance,
+ f"Migration precheck failed with error:{error}",
+ status=MigrationStatus.FailedAtStartup,
)
return False
@@ -245,7 +256,10 @@ def attempt_migration_rollback(migration_instance: AsyncMigration):
return
update_async_migration(
- migration_instance=migration_instance, status=MigrationStatus.RolledBack, progress=0, current_operation_index=0
+ migration_instance=migration_instance,
+ status=MigrationStatus.RolledBack,
+ progress=0,
+ current_operation_index=0,
)
diff --git a/posthog/async_migrations/setup.py b/posthog/async_migrations/setup.py
index 30a74b0acf76c..fff7205a4c8e0 100644
--- a/posthog/async_migrations/setup.py
+++ b/posthog/async_migrations/setup.py
@@ -6,7 +6,10 @@
from posthog.async_migrations.definition import AsyncMigrationDefinition
from posthog.constants import FROZEN_POSTHOG_VERSION
-from posthog.models.async_migration import AsyncMigration, get_all_completed_async_migrations
+from posthog.models.async_migration import (
+ AsyncMigration,
+ get_all_completed_async_migrations,
+)
from posthog.models.instance_setting import get_instance_setting
from posthog.settings import TEST
diff --git a/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py b/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py
index 27c660a8c749d..4e6588ad45920 100644
--- a/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py
+++ b/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py
@@ -5,14 +5,25 @@
import pytest
from posthog.async_migrations.runner import start_async_migration
-from posthog.async_migrations.setup import get_async_migration_definition, setup_async_migrations
+from posthog.async_migrations.setup import (
+ get_async_migration_definition,
+ setup_async_migrations,
+)
from posthog.async_migrations.test.util import AsyncMigrationBaseTest
from posthog.client import query_with_columns, sync_execute
from posthog.models import Person
-from posthog.models.async_migration import AsyncMigration, AsyncMigrationError, MigrationStatus
+from posthog.models.async_migration import (
+ AsyncMigration,
+ AsyncMigrationError,
+ MigrationStatus,
+)
from posthog.models.event.util import create_event
from posthog.models.group.util import create_group
-from posthog.models.person.util import create_person, create_person_distinct_id, delete_person
+from posthog.models.person.util import (
+ create_person,
+ create_person_distinct_id,
+ delete_person,
+)
from posthog.models.utils import UUIDT
from posthog.test.base import ClickhouseTestMixin, run_clickhouse_statement_in_parallel
@@ -269,7 +280,12 @@ def test_data_copy_groups(self):
team=self.team,
distinct_id="1",
event="$pageview",
- properties={"$group_0": "org:7", "$group_1": "77", "$group_2": "77", "$group_3": "77"},
+ properties={
+ "$group_0": "org:7",
+ "$group_1": "77",
+ "$group_2": "77",
+ "$group_3": "77",
+ },
)
# we need to also create person data so the backfill postcheck does not fail
@@ -327,7 +343,10 @@ def test_rollback(self):
migration_successful = run_migration()
self.assertFalse(migration_successful)
- self.assertEqual(AsyncMigration.objects.get(name=MIGRATION_NAME).status, MigrationStatus.RolledBack)
+ self.assertEqual(
+ AsyncMigration.objects.get(name=MIGRATION_NAME).status,
+ MigrationStatus.RolledBack,
+ )
MIGRATION_DEFINITION.operations[-1].fn = old_fn
@@ -553,7 +572,8 @@ def test_check_person_data_failure(self):
# Test that we fail the postcheck with the right message when 3 out of 101 events is incomplete (~2%)
with self.assertRaisesRegex(
- Exception, "Backfill did not work succesfully. ~2% of events did not get the correct data for persons."
+ Exception,
+ "Backfill did not work succesfully. ~2% of events did not get the correct data for persons.",
):
MIGRATION_DEFINITION._check_person_data() # type: ignore
diff --git a/posthog/async_migrations/test/test_0010_move_old_partitions.py b/posthog/async_migrations/test/test_0010_move_old_partitions.py
index 272b51c1735c8..3cc21d3b67a58 100644
--- a/posthog/async_migrations/test/test_0010_move_old_partitions.py
+++ b/posthog/async_migrations/test/test_0010_move_old_partitions.py
@@ -1,7 +1,10 @@
import pytest
from posthog.async_migrations.runner import start_async_migration
-from posthog.async_migrations.setup import get_async_migration_definition, setup_async_migrations
+from posthog.async_migrations.setup import (
+ get_async_migration_definition,
+ setup_async_migrations,
+)
from posthog.async_migrations.test.util import AsyncMigrationBaseTest
from posthog.models.event.util import create_event
from posthog.models.utils import UUIDT
@@ -24,18 +27,38 @@ def run_migration():
class Test0010MoveOldPartitions(AsyncMigrationBaseTest):
def setUp(self):
- MIGRATION_DEFINITION.parameters["OLDEST_PARTITION_TO_KEEP"] = ("202301", "", str)
- MIGRATION_DEFINITION.parameters["NEWEST_PARTITION_TO_KEEP"] = ("202302", "", str)
+ MIGRATION_DEFINITION.parameters["OLDEST_PARTITION_TO_KEEP"] = (
+ "202301",
+ "",
+ str,
+ )
+ MIGRATION_DEFINITION.parameters["NEWEST_PARTITION_TO_KEEP"] = (
+ "202302",
+ "",
+ str,
+ )
MIGRATION_DEFINITION.parameters["OPTIMIZE_TABLE"] = (False, "", bool)
create_event(
- event_uuid=uuid1, team=self.team, distinct_id="1", event="$pageview", timestamp="1900-01-02T00:00:00Z"
+ event_uuid=uuid1,
+ team=self.team,
+ distinct_id="1",
+ event="$pageview",
+ timestamp="1900-01-02T00:00:00Z",
)
create_event(
- event_uuid=uuid2, team=self.team, distinct_id="1", event="$pageview", timestamp="2022-02-02T00:00:00Z"
+ event_uuid=uuid2,
+ team=self.team,
+ distinct_id="1",
+ event="$pageview",
+ timestamp="2022-02-02T00:00:00Z",
)
create_event(
- event_uuid=uuid3, team=self.team, distinct_id="1", event="$pageview", timestamp="2045-02-02T00:00:00Z"
+ event_uuid=uuid3,
+ team=self.team,
+ distinct_id="1",
+ event="$pageview",
+ timestamp="2045-02-02T00:00:00Z",
)
super().setUp()
@@ -44,7 +67,6 @@ def tearDown(self):
super().tearDown()
def test_completes_successfully(self):
-
self.assertTrue(run_migration())
# create table + 3 move operations
diff --git a/posthog/async_migrations/test/test_definition.py b/posthog/async_migrations/test/test_definition.py
index 24c556841649a..1acdfa758499b 100644
--- a/posthog/async_migrations/test/test_definition.py
+++ b/posthog/async_migrations/test/test_definition.py
@@ -1,7 +1,10 @@
import pytest
from infi.clickhouse_orm.utils import import_submodules
-from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation
+from posthog.async_migrations.definition import (
+ AsyncMigrationDefinition,
+ AsyncMigrationOperation,
+)
from posthog.async_migrations.setup import (
ASYNC_MIGRATIONS_EXAMPLE_MODULE_PATH,
get_async_migration_definition,
@@ -16,7 +19,10 @@
class TestAsyncMigrationDefinition(BaseTest):
def test_get_async_migration_definition(self):
- from posthog.async_migrations.examples.example import example_fn, example_rollback_fn
+ from posthog.async_migrations.examples.example import (
+ example_fn,
+ example_rollback_fn,
+ )
modules = import_submodules(ASYNC_MIGRATIONS_EXAMPLE_MODULE_PATH)
example_migration = modules["example"].Migration("example")
@@ -28,7 +34,12 @@ def test_get_async_migration_definition(self):
self.assertEqual(example_migration.posthog_max_version, "1.30.0")
self.assertEqual(example_migration.operations[-1].fn, example_fn)
self.assertEqual(example_migration.operations[-1].rollback_fn, example_rollback_fn)
- self.assertTrue(isinstance(example_migration.service_version_requirements[0], ServiceVersionRequirement))
+ self.assertTrue(
+ isinstance(
+ example_migration.service_version_requirements[0],
+ ServiceVersionRequirement,
+ )
+ )
def test_get_migration_instance_and_parameters(self):
setup_async_migrations(ignore_posthog_version=True)
@@ -41,7 +52,8 @@ def test_get_migration_instance_and_parameters(self):
self.assertEqual(definition.migration_instance(), instance)
self.assertEqual(
- definition.get_parameter("PERSON_DICT_CACHE_SIZE"), definition.parameters["PERSON_DICT_CACHE_SIZE"][0]
+ definition.get_parameter("PERSON_DICT_CACHE_SIZE"),
+ definition.parameters["PERSON_DICT_CACHE_SIZE"][0],
)
instance.parameters = {"PERSON_DICT_CACHE_SIZE": 123}
diff --git a/posthog/async_migrations/test/test_migrations_not_required.py b/posthog/async_migrations/test/test_migrations_not_required.py
index 76f9de401e097..9665f534ac81f 100644
--- a/posthog/async_migrations/test/test_migrations_not_required.py
+++ b/posthog/async_migrations/test/test_migrations_not_required.py
@@ -7,6 +7,7 @@
pytestmark = pytest.mark.async_migrations
+
# Async migrations are data migrations aimed at getting users from an old schema to a new schema
# Fresh installs should have the new schema, however. So check that async migrations are being
# written correctly such that this is the case
@@ -19,4 +20,7 @@ def setUp(self):
def test_async_migrations_not_required_on_fresh_instances(self):
for name, migration in ALL_ASYNC_MIGRATIONS.items():
- self.assertFalse(migration.is_required(), f"Async migration {name} is_required returned True")
+ self.assertFalse(
+ migration.is_required(),
+ f"Async migration {name} is_required returned True",
+ )
diff --git a/posthog/async_migrations/test/test_runner.py b/posthog/async_migrations/test/test_runner.py
index f433a5e36be3e..9c4a7b1fe5ea5 100644
--- a/posthog/async_migrations/test/test_runner.py
+++ b/posthog/async_migrations/test/test_runner.py
@@ -9,9 +9,16 @@
run_async_migration_next_op,
start_async_migration,
)
-from posthog.async_migrations.test.util import AsyncMigrationBaseTest, create_async_migration
+from posthog.async_migrations.test.util import (
+ AsyncMigrationBaseTest,
+ create_async_migration,
+)
from posthog.async_migrations.utils import update_async_migration
-from posthog.models.async_migration import AsyncMigration, AsyncMigrationError, MigrationStatus
+from posthog.models.async_migration import (
+ AsyncMigration,
+ AsyncMigrationError,
+ MigrationStatus,
+)
from posthog.models.utils import UUIDT
pytestmark = pytest.mark.async_migrations
@@ -52,7 +59,6 @@ def test_run_migration_in_full(self):
self.assertEqual(self.migration.sec.side_effect_rollback_count, 0)
def test_rollback_migration(self):
-
self.migration.sec.reset_count()
migration_successful = start_async_migration("test_migration")
diff --git a/posthog/async_migrations/test/test_utils.py b/posthog/async_migrations/test/test_utils.py
index f2e45a24ab042..da01ec9dda54d 100644
--- a/posthog/async_migrations/test/test_utils.py
+++ b/posthog/async_migrations/test/test_utils.py
@@ -4,7 +4,10 @@
import pytest
from posthog.async_migrations.definition import AsyncMigrationOperationSQL
-from posthog.async_migrations.test.util import AsyncMigrationBaseTest, create_async_migration
+from posthog.async_migrations.test.util import (
+ AsyncMigrationBaseTest,
+ create_async_migration,
+)
from posthog.async_migrations.utils import (
complete_migration,
execute_on_each_shard,
diff --git a/posthog/async_migrations/utils.py b/posthog/async_migrations/utils.py
index efa70424e8846..20ad64cf7d75b 100644
--- a/posthog/async_migrations/utils.py
+++ b/posthog/async_migrations/utils.py
@@ -15,7 +15,11 @@
from posthog.clickhouse.client.connection import make_ch_pool
from posthog.clickhouse.query_tagging import reset_query_tags, tag_queries
from posthog.email import is_email_available
-from posthog.models.async_migration import AsyncMigration, AsyncMigrationError, MigrationStatus
+from posthog.models.async_migration import (
+ AsyncMigration,
+ AsyncMigrationError,
+ MigrationStatus,
+)
from posthog.models.instance_setting import get_instance_setting
from posthog.models.user import User
from posthog.settings import (
@@ -154,7 +158,13 @@ def sleep_until_finished(name, is_running: Callable[[], bool]) -> None:
def run_optimize_table(
- *, unique_name: str, query_id: str, table_name: str, deduplicate=False, final=False, per_shard=False
+ *,
+ unique_name: str,
+ query_id: str,
+ table_name: str,
+ deduplicate=False,
+ final=False,
+ per_shard=False,
):
"""
Runs the passed OPTIMIZE TABLE query.
@@ -163,7 +173,10 @@ def run_optimize_table(
we'll wait for that to complete first.
"""
if not TEST and _get_number_running_on_cluster(f"%%optimize:{unique_name}%%") > 0:
- sleep_until_finished(unique_name, lambda: _get_number_running_on_cluster(f"%%optimize:{unique_name}%%") > 0)
+ sleep_until_finished(
+ unique_name,
+ lambda: _get_number_running_on_cluster(f"%%optimize:{unique_name}%%") > 0,
+ )
else:
final_clause = "FINAL" if final else ""
deduplicate_clause = "DEDUPLICATE" if deduplicate else ""
@@ -175,7 +188,10 @@ def run_optimize_table(
execute_op_clickhouse(
sql,
query_id=f"optimize:{unique_name}/{query_id}",
- settings={"max_execution_time": ASYNC_MIGRATIONS_DEFAULT_TIMEOUT_SECONDS, "mutations_sync": 2},
+ settings={
+ "max_execution_time": ASYNC_MIGRATIONS_DEFAULT_TIMEOUT_SECONDS,
+ "mutations_sync": 2,
+ },
per_shard=per_shard,
)
@@ -213,7 +229,9 @@ def process_error(
from posthog.tasks.email import send_async_migration_errored_email
send_async_migration_errored_email.delay(
- migration_key=migration_instance.name, time=now().isoformat(), error=error
+ migration_key=migration_instance.name,
+ time=now().isoformat(),
+ error=error,
)
if (
@@ -237,7 +255,9 @@ def trigger_migration(migration_instance: AsyncMigration, fresh_start: bool = Tr
def force_stop_migration(
- migration_instance: AsyncMigration, error: str = "Force stopped by user", rollback: bool = True
+ migration_instance: AsyncMigration,
+ error: str = "Force stopped by user",
+ rollback: bool = True,
):
"""
In theory this is dangerous, as it can cause another task to be lost
@@ -299,7 +319,10 @@ def mark_async_migration_as_running(migration_instance: AsyncMigration) -> bool:
# update to running iff the state was Starting (ui triggered) or NotStarted (api triggered)
with transaction.atomic():
instance = AsyncMigration.objects.select_for_update().get(pk=migration_instance.pk)
- if instance.status not in [MigrationStatus.Starting, MigrationStatus.NotStarted]:
+ if instance.status not in [
+ MigrationStatus.Starting,
+ MigrationStatus.NotStarted,
+ ]:
return False
instance.status = MigrationStatus.Running
instance.current_query_id = ""
diff --git a/posthog/batch_exports/http.py b/posthog/batch_exports/http.py
index 06fb9866ac0e9..aa71cc9060a13 100644
--- a/posthog/batch_exports/http.py
+++ b/posthog/batch_exports/http.py
@@ -88,7 +88,11 @@ class RunsCursorPagination(CursorPagination):
class BatchExportRunViewSet(StructuredViewSetMixin, viewsets.ReadOnlyModelViewSet):
queryset = BatchExportRun.objects.all()
- permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission]
+ permission_classes = [
+ IsAuthenticated,
+ ProjectMembershipNecessaryPermissions,
+ TeamMemberAccessPermission,
+ ]
serializer_class = BatchExportRunSerializer
pagination_class = RunsCursorPagination
@@ -98,7 +102,8 @@ def get_queryset(self, date_range: tuple[dt.datetime, dt.datetime] | None = None
if date_range:
return self.queryset.filter(
- batch_export_id=self.kwargs["parent_lookup_batch_export_id"], created_at__range=date_range
+ batch_export_id=self.kwargs["parent_lookup_batch_export_id"],
+ created_at__range=date_range,
).order_by("-created_at")
else:
return self.queryset.filter(batch_export_id=self.kwargs["parent_lookup_batch_export_id"]).order_by(
@@ -178,7 +183,10 @@ def create(self, validated_data: dict) -> BatchExport:
str(team.uuid),
groups={"organization": str(team.organization.id)},
group_properties={
- "organization": {"id": str(team.organization.id), "created_at": team.organization.created_at}
+ "organization": {
+ "id": str(team.organization.id),
+ "created_at": team.organization.created_at,
+ }
},
send_feature_flag_events=False,
):
@@ -216,7 +224,11 @@ def update(self, batch_export: BatchExport, validated_data: dict) -> BatchExport
class BatchExportViewSet(StructuredViewSetMixin, viewsets.ModelViewSet):
queryset = BatchExport.objects.all()
- permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission]
+ permission_classes = [
+ IsAuthenticated,
+ ProjectMembershipNecessaryPermissions,
+ TeamMemberAccessPermission,
+ ]
serializer_class = BatchExportSerializer
def get_queryset(self):
@@ -319,7 +331,11 @@ class Meta:
class BatchExportLogViewSet(StructuredViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet):
- permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission]
+ permission_classes = [
+ IsAuthenticated,
+ ProjectMembershipNecessaryPermissions,
+ TeamMemberAccessPermission,
+ ]
serializer_class = BatchExportLogEntrySerializer
def get_queryset(self):
diff --git a/posthog/batch_exports/models.py b/posthog/batch_exports/models.py
index 633163b831238..dc86c2ce7286a 100644
--- a/posthog/batch_exports/models.py
+++ b/posthog/batch_exports/models.py
@@ -38,7 +38,9 @@ class Destination(models.TextChoices):
}
type: models.CharField = models.CharField(
- choices=Destination.choices, max_length=64, help_text="A choice of supported BatchExportDestination types."
+ choices=Destination.choices,
+ max_length=64,
+ help_text="A choice of supported BatchExportDestination types.",
)
config: models.JSONField = models.JSONField(
default=dict,
@@ -46,10 +48,12 @@ class Destination(models.TextChoices):
help_text="A JSON field to store all configuration parameters required to access a BatchExportDestination.",
)
created_at: models.DateTimeField = models.DateTimeField(
- auto_now_add=True, help_text="The timestamp at which this BatchExportDestination was created."
+ auto_now_add=True,
+ help_text="The timestamp at which this BatchExportDestination was created.",
)
last_updated_at: models.DateTimeField = models.DateTimeField(
- auto_now=True, help_text="The timestamp at which this BatchExportDestination was last updated."
+ auto_now=True,
+ help_text="The timestamp at which this BatchExportDestination was last updated.",
)
@@ -74,7 +78,9 @@ class Status(models.TextChoices):
STARTING = "Starting"
batch_export = models.ForeignKey(
- "BatchExport", on_delete=models.CASCADE, help_text="The BatchExport this run belongs to."
+ "BatchExport",
+ on_delete=models.CASCADE,
+ help_text="The BatchExport this run belongs to.",
)
status: models.CharField = models.CharField(
choices=Status.choices, max_length=64, help_text="The status of this run."
@@ -89,17 +95,25 @@ class Status(models.TextChoices):
data_interval_end: models.DateTimeField = models.DateTimeField(help_text="The end of the data interval.")
cursor: models.TextField = models.TextField(null=True, help_text="An opaque cursor that may be used to resume.")
created_at: models.DateTimeField = models.DateTimeField(
- auto_now_add=True, help_text="The timestamp at which this BatchExportRun was created."
+ auto_now_add=True,
+ help_text="The timestamp at which this BatchExportRun was created.",
)
finished_at: models.DateTimeField = models.DateTimeField(
- null=True, help_text="The timestamp at which this BatchExportRun finished, successfully or not."
+ null=True,
+ help_text="The timestamp at which this BatchExportRun finished, successfully or not.",
)
last_updated_at: models.DateTimeField = models.DateTimeField(
- auto_now=True, help_text="The timestamp at which this BatchExportRun was last updated."
+ auto_now=True,
+ help_text="The timestamp at which this BatchExportRun was last updated.",
)
-BATCH_EXPORT_INTERVALS = [("hour", "hour"), ("day", "day"), ("week", "week"), ("every 5 minutes", "every 5 minutes")]
+BATCH_EXPORT_INTERVALS = [
+ ("hour", "hour"),
+ ("day", "day"),
+ ("week", "week"),
+ ("every 5 minutes", "every 5 minutes"),
+]
class BatchExport(UUIDModel):
@@ -113,7 +127,9 @@ class BatchExport(UUIDModel):
team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, help_text="The team this belongs to.")
name: models.TextField = models.TextField(help_text="A human-readable name for this BatchExport.")
destination: models.ForeignKey = models.ForeignKey(
- "BatchExportDestination", on_delete=models.CASCADE, help_text="The destination to export data to."
+ "BatchExportDestination",
+ on_delete=models.CASCADE,
+ help_text="The destination to export data to.",
)
interval = models.CharField(
max_length=64,
@@ -125,19 +141,27 @@ class BatchExport(UUIDModel):
paused = models.BooleanField(default=False, help_text="Whether this BatchExport is paused or not.")
deleted = models.BooleanField(default=False, help_text="Whether this BatchExport is deleted or not.")
created_at: models.DateTimeField = models.DateTimeField(
- auto_now_add=True, help_text="The timestamp at which this BatchExport was created."
+ auto_now_add=True,
+ help_text="The timestamp at which this BatchExport was created.",
)
last_updated_at: models.DateTimeField = models.DateTimeField(
- auto_now=True, help_text="The timestamp at which this BatchExport was last updated."
+ auto_now=True,
+ help_text="The timestamp at which this BatchExport was last updated.",
)
last_paused_at: models.DateTimeField = models.DateTimeField(
- null=True, default=None, help_text="The timestamp at which this BatchExport was last paused."
+ null=True,
+ default=None,
+ help_text="The timestamp at which this BatchExport was last paused.",
)
start_at: models.DateTimeField = models.DateTimeField(
- null=True, default=None, help_text="Time before which any Batch Export runs won't be triggered."
+ null=True,
+ default=None,
+ help_text="Time before which any Batch Export runs won't be triggered.",
)
end_at: models.DateTimeField = models.DateTimeField(
- null=True, default=None, help_text="Time after which any Batch Export runs won't be triggered."
+ null=True,
+ default=None,
+ help_text="Time after which any Batch Export runs won't be triggered.",
)
@property
@@ -244,7 +268,9 @@ class Status(models.TextChoices):
team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, help_text="The team this belongs to.")
batch_export = models.ForeignKey(
- "BatchExport", on_delete=models.CASCADE, help_text="The BatchExport this backfill belongs to."
+ "BatchExport",
+ on_delete=models.CASCADE,
+ help_text="The BatchExport this backfill belongs to.",
)
start_at: models.DateTimeField = models.DateTimeField(help_text="The start of the data interval.")
end_at: models.DateTimeField = models.DateTimeField(help_text="The end of the data interval.")
@@ -252,11 +278,14 @@ class Status(models.TextChoices):
choices=Status.choices, max_length=64, help_text="The status of this backfill."
)
created_at: models.DateTimeField = models.DateTimeField(
- auto_now_add=True, help_text="The timestamp at which this BatchExportBackfill was created."
+ auto_now_add=True,
+ help_text="The timestamp at which this BatchExportBackfill was created.",
)
finished_at: models.DateTimeField = models.DateTimeField(
- null=True, help_text="The timestamp at which this BatchExportBackfill finished, successfully or not."
+ null=True,
+ help_text="The timestamp at which this BatchExportBackfill finished, successfully or not.",
)
last_updated_at: models.DateTimeField = models.DateTimeField(
- auto_now=True, help_text="The timestamp at which this BatchExportBackfill was last updated."
+ auto_now=True,
+ help_text="The timestamp at which this BatchExportBackfill was last updated.",
)
diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py
index ffe5ee0b692d9..008096d5f50bc 100644
--- a/posthog/batch_exports/service.py
+++ b/posthog/batch_exports/service.py
@@ -186,7 +186,10 @@ async def pause_schedule(temporal: Client, schedule_id: str, note: str | None =
def unpause_batch_export(
- temporal: Client, batch_export_id: str, note: str | None = None, backfill: bool = False
+ temporal: Client,
+ batch_export_id: str,
+ note: str | None = None,
+ backfill: bool = False,
) -> None:
"""Pause this BatchExport.
diff --git a/posthog/caching/calculate_results.py b/posthog/caching/calculate_results.py
index 73fc11de7b54f..be11c4ffe48b5 100644
--- a/posthog/caching/calculate_results.py
+++ b/posthog/caching/calculate_results.py
@@ -16,12 +16,23 @@
)
from posthog.decorators import CacheType
from posthog.logging.timing import timed
-from posthog.models import Dashboard, DashboardTile, EventDefinition, Filter, Insight, RetentionFilter, Team
+from posthog.models import (
+ Dashboard,
+ DashboardTile,
+ EventDefinition,
+ Filter,
+ Insight,
+ RetentionFilter,
+ Team,
+)
from posthog.models.filters import PathFilter
from posthog.models.filters.stickiness_filter import StickinessFilter
from posthog.models.filters.utils import get_filter
from posthog.models.insight import generate_insight_cache_key
-from posthog.queries.funnels import ClickhouseFunnelTimeToConvert, ClickhouseFunnelTrends
+from posthog.queries.funnels import (
+ ClickhouseFunnelTimeToConvert,
+ ClickhouseFunnelTrends,
+)
from posthog.queries.funnels.utils import get_funnel_order_class
from posthog.queries.paths import Paths
from posthog.queries.retention import Retention
@@ -225,6 +236,10 @@ def _events_from_filter(filter: Union[RetentionFilter, StickinessFilter, PathFil
return []
except Exception as exc:
- logger.error("update_cache_item.could_not_list_events_from_filter", exc=exc, exc_info=True)
+ logger.error(
+ "update_cache_item.could_not_list_events_from_filter",
+ exc=exc,
+ exc_info=True,
+ )
capture_exception(exc)
return []
diff --git a/posthog/caching/fetch_from_cache.py b/posthog/caching/fetch_from_cache.py
index b507cdf4d277e..d7c0e5e03e50a 100644
--- a/posthog/caching/fetch_from_cache.py
+++ b/posthog/caching/fetch_from_cache.py
@@ -5,14 +5,19 @@
from django.utils.timezone import now
from prometheus_client import Counter
-from posthog.caching.calculate_results import calculate_cache_key, calculate_result_by_insight
+from posthog.caching.calculate_results import (
+ calculate_cache_key,
+ calculate_result_by_insight,
+)
from posthog.caching.insight_cache import update_cached_state
from posthog.models import DashboardTile, Insight
from posthog.models.dashboard import Dashboard
from posthog.utils import get_safe_cache
insight_cache_read_counter = Counter(
- "posthog_cloud_insight_cache_read", "A read from the redis insight cache", labelnames=["result"]
+ "posthog_cloud_insight_cache_read",
+ "A read from the redis insight cache",
+ labelnames=["result"],
)
@@ -72,7 +77,9 @@ def fetch_cached_insight_result(target: Union[Insight, DashboardTile], refresh_f
def synchronously_update_cache(
- insight: Insight, dashboard: Optional[Dashboard], refresh_frequency: Optional[timedelta] = None
+ insight: Insight,
+ dashboard: Optional[Dashboard],
+ refresh_frequency: Optional[timedelta] = None,
) -> InsightResult:
cache_key, cache_type, result = calculate_result_by_insight(team=insight.team, insight=insight, dashboard=dashboard)
timestamp = now()
diff --git a/posthog/caching/insight_cache.py b/posthog/caching/insight_cache.py
index b019fd774df39..d1214c3a67a98 100644
--- a/posthog/caching/insight_cache.py
+++ b/posthog/caching/insight_cache.py
@@ -42,7 +42,9 @@ def schedule_cache_updates():
if len(representative_by_cache_key) > 0:
logger.warn(
- "Scheduled caches to be updated", candidates=len(to_update), tasks_created=len(representative_by_cache_key)
+ "Scheduled caches to be updated",
+ candidates=len(to_update),
+ tasks_created=len(representative_by_cache_key),
)
else:
logger.warn("No caches were found to be updated")
@@ -120,7 +122,12 @@ def update_cache(caching_state_id: UUID):
statsd.incr("caching_state_update_success")
statsd.incr("caching_state_update_rows_updated", rows_updated)
statsd.timing("caching_state_update_success_timing", duration)
- logger.warn("Re-calculated insight cache", rows_updated=rows_updated, duration=duration, **metadata)
+ logger.warn(
+ "Re-calculated insight cache",
+ rows_updated=rows_updated,
+ duration=duration,
+ **metadata,
+ )
else:
logger.warn(
"Failed to re-calculate insight cache",
@@ -137,11 +144,18 @@ def update_cache(caching_state_id: UUID):
update_cache_task.apply_async(args=[caching_state_id], countdown=timedelta(minutes=10).total_seconds())
InsightCachingState.objects.filter(pk=caching_state.pk).update(
- refresh_attempt=caching_state.refresh_attempt + 1, last_refresh_queued_at=now()
+ refresh_attempt=caching_state.refresh_attempt + 1,
+ last_refresh_queued_at=now(),
)
-def update_cached_state(team_id: int, cache_key: str, timestamp: datetime, result: Any, ttl: Optional[int] = None):
+def update_cached_state(
+ team_id: int,
+ cache_key: str,
+ timestamp: datetime,
+ result: Any,
+ ttl: Optional[int] = None,
+):
cache.set(cache_key, result, ttl if ttl is not None else settings.CACHED_RESULTS_TTL)
insight_cache_write_counter.inc()
@@ -156,6 +170,9 @@ def _extract_insight_dashboard(caching_state: InsightCachingState) -> Tuple[Insi
if caching_state.dashboard_tile is not None:
assert caching_state.dashboard_tile.insight is not None
- return caching_state.dashboard_tile.insight, caching_state.dashboard_tile.dashboard
+ return (
+ caching_state.dashboard_tile.insight,
+ caching_state.dashboard_tile.dashboard,
+ )
else:
return caching_state.insight, None
diff --git a/posthog/caching/insight_caching_state.py b/posthog/caching/insight_caching_state.py
index fc87915c25a98..a8ae36c14f05a 100644
--- a/posthog/caching/insight_caching_state.py
+++ b/posthog/caching/insight_caching_state.py
@@ -20,6 +20,7 @@
logger = structlog.get_logger(__name__)
+
# :TODO: Make these configurable
class TargetCacheAge(Enum):
NO_CACHING = None
@@ -95,7 +96,12 @@ def sync_insight_cache_states():
tiles = (
DashboardTile.objects.all()
.filter(insight__isnull=False)
- .prefetch_related("dashboard", "dashboard__sharingconfiguration_set", "insight", "insight__team")
+ .prefetch_related(
+ "dashboard",
+ "dashboard__sharingconfiguration_set",
+ "insight",
+ "insight__team",
+ )
.order_by("pk")
)
@@ -105,7 +111,10 @@ def sync_insight_cache_states():
def upsert(
- team: Team, target: Union[DashboardTile, Insight], lazy_loader: Optional[LazyLoader] = None, execute=True
+ team: Team,
+ target: Union[DashboardTile, Insight],
+ lazy_loader: Optional[LazyLoader] = None,
+ execute=True,
) -> Optional[InsightCachingState]:
lazy_loader = lazy_loader or LazyLoader()
cache_key = calculate_cache_key(target)
@@ -129,7 +138,11 @@ def upsert(
return model
-def sync_insight_caching_state(team_id: int, insight_id: Optional[int] = None, dashboard_tile_id: Optional[int] = None):
+def sync_insight_caching_state(
+ team_id: int,
+ insight_id: Optional[int] = None,
+ dashboard_tile_id: Optional[int] = None,
+):
try:
team = Team.objects.get(pk=team_id)
item: Optional[DashboardTile | Insight] = None
diff --git a/posthog/caching/insights_api.py b/posthog/caching/insights_api.py
index 399e889cf18af..1b07f37bc7804 100644
--- a/posthog/caching/insights_api.py
+++ b/posthog/caching/insights_api.py
@@ -5,7 +5,10 @@
import zoneinfo
from rest_framework import request
-from posthog.caching.calculate_results import CLICKHOUSE_MAX_EXECUTION_TIME, calculate_cache_key
+from posthog.caching.calculate_results import (
+ CLICKHOUSE_MAX_EXECUTION_TIME,
+ calculate_cache_key,
+)
from posthog.caching.insight_caching_state import InsightCachingState
from posthog.models import DashboardTile, Insight
from posthog.models.filters.utils import get_filter
@@ -25,7 +28,11 @@
def should_refresh_insight(
- insight: Insight, dashboard_tile: Optional[DashboardTile], *, request: request.Request, is_shared=False
+ insight: Insight,
+ dashboard_tile: Optional[DashboardTile],
+ *,
+ request: request.Request,
+ is_shared=False,
) -> Tuple[bool, timedelta]:
"""Return whether the insight should be refreshed now, and what's the minimum wait time between refreshes.
diff --git a/posthog/caching/test/test_fetch_from_cache.py b/posthog/caching/test/test_fetch_from_cache.py
index 4ffd44d24eca3..6ac03f0a0e451 100644
--- a/posthog/caching/test/test_fetch_from_cache.py
+++ b/posthog/caching/test/test_fetch_from_cache.py
@@ -11,7 +11,13 @@
)
from posthog.decorators import CacheType
from posthog.models import Insight
-from posthog.test.base import BaseTest, ClickhouseTestMixin, _create_event, _create_insight, flush_persons_and_events
+from posthog.test.base import (
+ BaseTest,
+ ClickhouseTestMixin,
+ _create_event,
+ _create_insight,
+ flush_persons_and_events,
+)
from posthog.utils import get_safe_cache
@@ -20,12 +26,24 @@ class TestFetchFromCache(ClickhouseTestMixin, BaseTest):
def setUp(self):
super().setUp()
- _create_event(team=self.team, event="$pageview", distinct_id="1", properties={"prop": "val"})
- _create_event(team=self.team, event="$pageview", distinct_id="2", properties={"prop": "another_val"})
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="1",
+ properties={"prop": "val"},
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="2",
+ properties={"prop": "another_val"},
+ )
flush_persons_and_events()
insight, dashboard, dashboard_tile = _create_insight(
- self.team, {"events": [{"id": "$pageview"}], "properties": []}, {"properties": [{}]}
+ self.team,
+ {"events": [{"id": "$pageview"}], "properties": []},
+ {"properties": [{}]},
)
self.dashboard = dashboard
self.insight = insight
diff --git a/posthog/caching/test/test_insight_cache.py b/posthog/caching/test/test_insight_cache.py
index 99ff1d8ca63d2..1dbe0b5ce2dc1 100644
--- a/posthog/caching/test/test_insight_cache.py
+++ b/posthog/caching/test/test_insight_cache.py
@@ -7,10 +7,19 @@
from freezegun import freeze_time
from posthog.caching.calculate_results import get_cache_type
-from posthog.caching.insight_cache import fetch_states_in_need_of_updating, schedule_cache_updates, update_cache
+from posthog.caching.insight_cache import (
+ fetch_states_in_need_of_updating,
+ schedule_cache_updates,
+ update_cache,
+)
from posthog.caching.insight_caching_state import upsert
from posthog.caching.test.test_insight_caching_state import create_insight, filter_dict
-from posthog.constants import INSIGHT_PATHS, INSIGHT_RETENTION, INSIGHT_STICKINESS, INSIGHT_TRENDS
+from posthog.constants import (
+ INSIGHT_PATHS,
+ INSIGHT_RETENTION,
+ INSIGHT_STICKINESS,
+ INSIGHT_TRENDS,
+)
from posthog.decorators import CacheType
from posthog.models import Filter, InsightCachingState, RetentionFilter, Team, User
from posthog.models.filters import PathFilter
@@ -64,7 +73,10 @@ def test_schedule_cache_updates(update_cache_task, team: Team, user: User):
schedule_cache_updates()
- assert update_cache_task.delay.call_args_list == [call(caching_state1.pk), call(caching_state3.pk)]
+ assert update_cache_task.delay.call_args_list == [
+ call(caching_state1.pk),
+ call(caching_state3.pk),
+ ]
last_refresh_queued_at = InsightCachingState.objects.filter(team=team).values_list(
"last_refresh_queued_at", flat=True
@@ -81,9 +93,27 @@ def test_schedule_cache_updates(update_cache_task, team: Team, user: User):
({"last_refresh": None}, 1),
({"target_cache_age": None, "last_refresh": None}, 0),
({"target_cache_age": timedelta(days=1), "last_refresh": timedelta(days=2)}, 1),
- ({"target_cache_age": timedelta(days=1), "last_refresh": timedelta(hours=23)}, 0),
- ({"target_cache_age": timedelta(days=1), "last_refresh_queued_at": timedelta(hours=23)}, 1),
- ({"target_cache_age": timedelta(days=1), "last_refresh_queued_at": timedelta(minutes=5)}, 0),
+ (
+ {
+ "target_cache_age": timedelta(days=1),
+ "last_refresh": timedelta(hours=23),
+ },
+ 0,
+ ),
+ (
+ {
+ "target_cache_age": timedelta(days=1),
+ "last_refresh_queued_at": timedelta(hours=23),
+ },
+ 1,
+ ),
+ (
+ {
+ "target_cache_age": timedelta(days=1),
+ "last_refresh_queued_at": timedelta(minutes=5),
+ },
+ 0,
+ ),
({"refresh_attempt": 2}, 1),
({"refresh_attempt": 3}, 0),
],
@@ -137,7 +167,11 @@ def test_update_cache_updates_identical_cache_keys(team: Team, user: User, cache
@patch("posthog.celery.update_cache_task")
@patch("posthog.caching.insight_cache.calculate_result_by_insight")
def test_update_cache_when_calculation_fails(
- spy_calculate_result_by_insight, spy_update_cache_task, team: Team, user: User, cache
+ spy_calculate_result_by_insight,
+ spy_update_cache_task,
+ team: Team,
+ user: User,
+ cache,
):
caching_state = create_insight_caching_state(team, user, refresh_attempt=1)
spy_calculate_result_by_insight.side_effect = Exception()
@@ -180,6 +214,11 @@ def test_update_cache_when_recently_refreshed(spy_calculate_result_by_insight, t
],
)
@pytest.mark.django_db
-def test_get_cache_type(team: Team, filter_model: Callable, insight_type: str, expected_cache_type: CacheType) -> None:
+def test_get_cache_type(
+ team: Team,
+ filter_model: Callable,
+ insight_type: str,
+ expected_cache_type: CacheType,
+) -> None:
filter = filter_model(data={"insight": insight_type}, team=team)
assert get_cache_type(filter) == expected_cache_type
diff --git a/posthog/caching/test/test_insight_caching_state.py b/posthog/caching/test/test_insight_caching_state.py
index 9b6f60aecf1c9..03a3652555202 100644
--- a/posthog/caching/test/test_insight_caching_state.py
+++ b/posthog/caching/test/test_insight_caching_state.py
@@ -52,7 +52,12 @@ def create_insight(
insight = Insight.objects.create(team=team, filters=filters, deleted=deleted, query=query)
if viewed_at_delta is not None:
- InsightViewed.objects.create(insight=insight, last_viewed_at=now() - viewed_at_delta, user=user, team=team)
+ InsightViewed.objects.create(
+ insight=insight,
+ last_viewed_at=now() - viewed_at_delta,
+ user=user,
+ team=team,
+ )
if is_shared:
SharingConfiguration.objects.create(team=team, insight=insight, enabled=True)
@@ -78,7 +83,9 @@ def create_tile(
mock_active_teams.return_value = {team.pk} if team_should_be_active else set()
dashboard = Dashboard.objects.create(
- team=team, last_accessed_at=now() - viewed_at_delta if viewed_at_delta else None, deleted=dashboard_deleted
+ team=team,
+ last_accessed_at=now() - viewed_at_delta if viewed_at_delta else None,
+ deleted=dashboard_deleted,
)
if on_home_dashboard:
@@ -109,36 +116,91 @@ def create_tile(
[
# Insight test cases
pytest.param(create_insight, {}, TargetCacheAge.MID_PRIORITY, id="shared insight (base)"),
- pytest.param(create_insight, {"is_shared": False}, TargetCacheAge.NO_CACHING, id="not shared insight"),
pytest.param(
- create_insight, {"team_should_be_active": False}, TargetCacheAge.NO_CACHING, id="insight with inactive team"
+ create_insight,
+ {"is_shared": False},
+ TargetCacheAge.NO_CACHING,
+ id="not shared insight",
+ ),
+ pytest.param(
+ create_insight,
+ {"team_should_be_active": False},
+ TargetCacheAge.NO_CACHING,
+ id="insight with inactive team",
+ ),
+ pytest.param(
+ create_insight,
+ {"viewed_at_delta": None},
+ TargetCacheAge.NO_CACHING,
+ id="insight never viewed",
),
- pytest.param(create_insight, {"viewed_at_delta": None}, TargetCacheAge.NO_CACHING, id="insight never viewed"),
pytest.param(
create_insight,
{"viewed_at_delta": timedelta(weeks=100)},
TargetCacheAge.NO_CACHING,
id="insight viewed long time ago",
),
- pytest.param(create_insight, {"filters": {}}, TargetCacheAge.NO_CACHING, id="insight with no filters"),
- pytest.param(create_insight, {"deleted": True}, TargetCacheAge.NO_CACHING, id="deleted insight"),
+ pytest.param(
+ create_insight,
+ {"filters": {}},
+ TargetCacheAge.NO_CACHING,
+ id="insight with no filters",
+ ),
+ pytest.param(
+ create_insight,
+ {"deleted": True},
+ TargetCacheAge.NO_CACHING,
+ id="deleted insight",
+ ),
# Dashboard tile test cases
pytest.param(create_tile, {}, TargetCacheAge.LOW_PRIORITY, id="shared tile (base)"),
- pytest.param(create_tile, {"is_dashboard_shared": False}, TargetCacheAge.NO_CACHING, id="not shared tile"),
pytest.param(
- create_tile, {"team_should_be_active": False}, TargetCacheAge.NO_CACHING, id="tile with inactive team"
+ create_tile,
+ {"is_dashboard_shared": False},
+ TargetCacheAge.NO_CACHING,
+ id="not shared tile",
+ ),
+ pytest.param(
+ create_tile,
+ {"team_should_be_active": False},
+ TargetCacheAge.NO_CACHING,
+ id="tile with inactive team",
+ ),
+ pytest.param(
+ create_tile,
+ {"dashboard_tile_deleted": True},
+ TargetCacheAge.NO_CACHING,
+ id="deleted tile",
+ ),
+ pytest.param(
+ create_tile,
+ {"dashboard_deleted": True},
+ TargetCacheAge.NO_CACHING,
+ id="tile with deleted dashboard",
),
- pytest.param(create_tile, {"dashboard_tile_deleted": True}, TargetCacheAge.NO_CACHING, id="deleted tile"),
pytest.param(
- create_tile, {"dashboard_deleted": True}, TargetCacheAge.NO_CACHING, id="tile with deleted dashboard"
+ create_tile,
+ {"insight_deleted": True},
+ TargetCacheAge.NO_CACHING,
+ id="tile with deleted insight",
),
- pytest.param(create_tile, {"insight_deleted": True}, TargetCacheAge.NO_CACHING, id="tile with deleted insight"),
pytest.param(
- create_tile, {"insight_filters": {}}, TargetCacheAge.NO_CACHING, id="tile with insight with no filters"
+ create_tile,
+ {"insight_filters": {}},
+ TargetCacheAge.NO_CACHING,
+ id="tile with insight with no filters",
),
- pytest.param(create_tile, {"text_tile": True}, TargetCacheAge.NO_CACHING, id="tile with text"),
pytest.param(
- create_tile, {"on_home_dashboard": True}, TargetCacheAge.HIGH_PRIORITY, id="tile on home dashboard"
+ create_tile,
+ {"text_tile": True},
+ TargetCacheAge.NO_CACHING,
+ id="tile with text",
+ ),
+ pytest.param(
+ create_tile,
+ {"on_home_dashboard": True},
+ TargetCacheAge.HIGH_PRIORITY,
+ id="tile on home dashboard",
),
pytest.param(
create_tile,
@@ -165,7 +227,10 @@ def create_tile(
id="recently viewed tile (2)",
),
pytest.param(
- create_tile, {"viewed_at_delta": timedelta(days=20)}, TargetCacheAge.LOW_PRIORITY, id="tile viewed ages ago"
+ create_tile,
+ {"viewed_at_delta": timedelta(days=20)},
+ TargetCacheAge.LOW_PRIORITY,
+ id="tile viewed ages ago",
),
# cacheable types of query
pytest.param(
@@ -182,13 +247,19 @@ def create_tile(
),
pytest.param(
create_insight,
- {"query": {"kind": "TimeToSeeDataSessionsQuery"}, "viewed_at_delta": timedelta(days=1)},
+ {
+ "query": {"kind": "TimeToSeeDataSessionsQuery"},
+ "viewed_at_delta": timedelta(days=1),
+ },
TargetCacheAge.MID_PRIORITY,
id="insight with TimeToSeeDataSessionsQuery query viewed recently",
),
pytest.param(
create_insight,
- {"query": {"kind": "TimeToSeeDataQuery"}, "viewed_at_delta": timedelta(days=1)},
+ {
+ "query": {"kind": "TimeToSeeDataQuery"},
+ "viewed_at_delta": timedelta(days=1),
+ },
TargetCacheAge.MID_PRIORITY,
id="insight with TimeToSeeDataQuery query viewed recently",
),
@@ -220,7 +291,12 @@ def create_tile(
@pytest.mark.django_db
@patch("posthog.caching.insight_caching_state.active_teams")
def test_calculate_target_age(
- mock_active_teams, team: Team, user: User, create_item, create_item_kw: Dict, expected_target_age: TargetCacheAge
+ mock_active_teams,
+ team: Team,
+ user: User,
+ create_item,
+ create_item_kw: Dict,
+ expected_target_age: TargetCacheAge,
):
item = cast(
Union[Insight, DashboardTile],
diff --git a/posthog/caching/test/test_should_refresh_insight.py b/posthog/caching/test/test_should_refresh_insight.py
index 12fb385ef2926..9c8932cd61e19 100644
--- a/posthog/caching/test/test_should_refresh_insight.py
+++ b/posthog/caching/test/test_should_refresh_insight.py
@@ -8,7 +8,10 @@
from rest_framework.request import Request
from posthog.caching.calculate_results import CLICKHOUSE_MAX_EXECUTION_TIME
from posthog.caching.insight_caching_state import InsightCachingState
-from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, should_refresh_insight
+from posthog.caching.insights_api import (
+ BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL,
+ should_refresh_insight,
+)
from posthog.test.base import BaseTest, ClickhouseTestMixin, _create_insight
@@ -97,7 +100,9 @@ def test_insights_with_hour_intervals_can_be_refreshed_more_often(self):
@freeze_time("2012-01-14T03:21:34.000Z")
def test_insights_with_ranges_lower_than_7_days_can_be_refreshed_more_often(self):
insight, _, _ = _create_insight(
- self.team, {"events": [{"id": "$pageview"}], "interval": "day", "date_from": "-3d"}, {}
+ self.team,
+ {"events": [{"id": "$pageview"}], "interval": "day", "date_from": "-3d"},
+ {},
)
should_refresh_now, refresh_frequency = should_refresh_insight(insight, None, request=self.refresh_request)
@@ -116,7 +121,9 @@ def test_insights_with_ranges_lower_than_7_days_can_be_refreshed_more_often(self
@freeze_time("2012-01-14T03:21:34.000Z")
def test_dashboard_filters_should_override_insight_filters_when_deciding_on_refresh_time(self):
insight, _, dashboard_tile = _create_insight(
- self.team, {"events": [{"id": "$pageview"}], "interval": "month"}, {"interval": "hour"}
+ self.team,
+ {"events": [{"id": "$pageview"}], "interval": "month"},
+ {"interval": "hour"},
)
should_refresh_now, refresh_frequency = should_refresh_insight(
diff --git a/posthog/caching/test/test_tolerant_zlib_compressor.py b/posthog/caching/test/test_tolerant_zlib_compressor.py
index 3f895f244b49e..acefa330fe228 100644
--- a/posthog/caching/test/test_tolerant_zlib_compressor.py
+++ b/posthog/caching/test/test_tolerant_zlib_compressor.py
@@ -15,8 +15,18 @@ class TestTolerantZlibCompressor(TestCase):
@parameterized.expand(
[
- ("test_when_disabled_compress_is_the_identity", False, uncompressed_bytes, uncompressed_bytes),
- ("test_when_enabled_can_compress", True, uncompressed_bytes, compressed_bytes),
+ (
+ "test_when_disabled_compress_is_the_identity",
+ False,
+ uncompressed_bytes,
+ uncompressed_bytes,
+ ),
+ (
+ "test_when_enabled_can_compress",
+ True,
+ uncompressed_bytes,
+ compressed_bytes,
+ ),
(
"test_when_enabled_does_not_compress_small_values",
True,
@@ -32,9 +42,24 @@ def test_the_zlib_compressor_compression(self, _, setting: bool, input: bytes, o
@parameterized.expand(
[
- ("test_when_disabled_decompress_is_the_identity", False, uncompressed_bytes, uncompressed_bytes),
- ("test_when_enabled_can_decompress", True, compressed_bytes, uncompressed_bytes),
- ("test_when_disabled_can_still_decompress", False, compressed_bytes, uncompressed_bytes),
+ (
+ "test_when_disabled_decompress_is_the_identity",
+ False,
+ uncompressed_bytes,
+ uncompressed_bytes,
+ ),
+ (
+ "test_when_enabled_can_decompress",
+ True,
+ compressed_bytes,
+ uncompressed_bytes,
+ ),
+ (
+ "test_when_disabled_can_still_decompress",
+ False,
+ compressed_bytes,
+ uncompressed_bytes,
+ ),
]
)
def test_the_zlib_compressor_decompression(self, _, setting: bool, input: bytes, output: bytes) -> None:
diff --git a/posthog/caching/utils.py b/posthog/caching/utils.py
index 45ff4ba640968..636fdbb19c53e 100644
--- a/posthog/caching/utils.py
+++ b/posthog/caching/utils.py
@@ -56,7 +56,10 @@ def active_teams() -> Set[int]:
)
if not teams_by_recency:
return set()
- redis.zadd(RECENTLY_ACCESSED_TEAMS_REDIS_KEY, {team: score for team, score in teams_by_recency})
+ redis.zadd(
+ RECENTLY_ACCESSED_TEAMS_REDIS_KEY,
+ {team: score for team, score in teams_by_recency},
+ )
redis.expire(RECENTLY_ACCESSED_TEAMS_REDIS_KEY, IN_A_DAY)
all_teams = teams_by_recency
@@ -71,7 +74,10 @@ def stale_cache_invalidation_disabled(team: Team) -> bool:
str(team.uuid),
groups={"organization": str(team.organization.id)},
group_properties={
- "organization": {"id": str(team.organization.id), "created_at": team.organization.created_at}
+ "organization": {
+ "id": str(team.organization.id),
+ "created_at": team.organization.created_at,
+ }
},
only_evaluate_locally=True,
send_feature_flag_events=False,
@@ -81,7 +87,9 @@ def stale_cache_invalidation_disabled(team: Team) -> bool:
def is_stale_filter(
- team: Team, filter: Filter | RetentionFilter | StickinessFilter | PathFilter, cached_result: Any
+ team: Team,
+ filter: Filter | RetentionFilter | StickinessFilter | PathFilter,
+ cached_result: Any,
) -> bool:
interval = filter.period.lower() if isinstance(filter, RetentionFilter) else filter.interval
return is_stale(team, filter.date_to, interval, cached_result)
diff --git a/posthog/celery.py b/posthog/celery.py
index fb9043f56467a..1eb5bb40db888 100644
--- a/posthog/celery.py
+++ b/posthog/celery.py
@@ -104,7 +104,10 @@ def on_worker_start(**kwargs) -> None:
def add_periodic_task_with_expiry(
- sender: Celery, schedule_seconds: int, task_signature: Signature, name: str | None = None
+ sender: Celery,
+ schedule_seconds: int,
+ task_signature: Signature,
+ name: str | None = None,
):
"""
If the workers get delayed in processing tasks, then tasks that fire every X seconds get queued multiple times
@@ -125,7 +128,10 @@ def add_periodic_task_with_expiry(
def setup_periodic_tasks(sender: Celery, **kwargs):
# Monitoring tasks
add_periodic_task_with_expiry(
- sender, 60, monitoring_check_clickhouse_schema_drift.s(), "check clickhouse schema drift"
+ sender,
+ 60,
+ monitoring_check_clickhouse_schema_drift.s(),
+ "check clickhouse schema drift",
)
if not settings.DEBUG:
@@ -136,15 +142,22 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
# Update events table partitions twice a week
sender.add_periodic_task(
- crontab(day_of_week="mon,fri", hour="0", minute="0"), update_event_partitions.s() # check twice a week
+ crontab(day_of_week="mon,fri", hour="0", minute="0"),
+ update_event_partitions.s(), # check twice a week
)
# Send all instance usage to the Billing service
sender.add_periodic_task(
- crontab(hour="0", minute="5"), send_org_usage_reports.s(), name="send instance usage report"
+ crontab(hour="0", minute="5"),
+ send_org_usage_reports.s(),
+ name="send instance usage report",
)
# Update local usage info for rate limiting purposes - offset by 30 minutes to not clash with the above
- sender.add_periodic_task(crontab(hour="*", minute="30"), update_quota_limiting.s(), name="update quota limiting")
+ sender.add_periodic_task(
+ crontab(hour="*", minute="30"),
+ update_quota_limiting.s(),
+ name="update quota limiting",
+ )
# PostHog Cloud cron jobs
# NOTE: We can't use is_cloud here as some Django elements aren't loaded yet. We check in the task execution instead
@@ -152,7 +165,11 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
sender.add_periodic_task(crontab(hour="4", minute="0"), verify_persons_data_in_sync.s())
# Every 30 minutes, send decide request counts to the main posthog instance
- sender.add_periodic_task(crontab(minute="*/30"), calculate_decide_usage.s(), name="calculate decide usage")
+ sender.add_periodic_task(
+ crontab(minute="*/30"),
+ calculate_decide_usage.s(),
+ name="calculate decide usage",
+ )
# Reset master project data every Monday at Thursday at 5 AM UTC. Mon and Thu because doing this every day
# would be too hard on ClickHouse, and those days ensure most users will have data at most 3 days old.
@@ -166,7 +183,9 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
sync_insight_cache_states_schedule = get_crontab(settings.SYNC_INSIGHT_CACHE_STATES_SCHEDULE)
if sync_insight_cache_states_schedule:
sender.add_periodic_task(
- sync_insight_cache_states_schedule, sync_insight_cache_states_task.s(), name="sync insight cache states"
+ sync_insight_cache_states_schedule,
+ sync_insight_cache_states_task.s(),
+ name="sync insight cache states",
)
add_periodic_task_with_expiry(
@@ -226,7 +245,9 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
name="PG table cache hit rate",
)
sender.add_periodic_task(
- crontab(minute="0", hour="*"), pg_plugin_server_query_timing.s(), name="PG plugin server query timing"
+ crontab(minute="0", hour="*"),
+ pg_plugin_server_query_timing.s(),
+ name="PG plugin server query timing",
)
add_periodic_task_with_expiry(
sender,
@@ -244,7 +265,9 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
if clear_clickhouse_crontab := get_crontab(settings.CLEAR_CLICKHOUSE_REMOVED_DATA_SCHEDULE_CRON):
sender.add_periodic_task(
- clear_clickhouse_crontab, clickhouse_clear_removed_data.s(), name="clickhouse clear removed data"
+ clear_clickhouse_crontab,
+ clickhouse_clear_removed_data.s(),
+ name="clickhouse clear removed data",
)
if clear_clickhouse_deleted_person_crontab := get_crontab(settings.CLEAR_CLICKHOUSE_DELETED_PERSON_SCHEDULE_CRON):
@@ -256,17 +279,21 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
if settings.EE_AVAILABLE:
sender.add_periodic_task(
- crontab(hour="0", minute=str(randrange(0, 40))), clickhouse_send_license_usage.s()
+ crontab(hour="0", minute=str(randrange(0, 40))),
+ clickhouse_send_license_usage.s(),
) # every day at a random minute past midnight. Randomize to avoid overloading license.posthog.com
sender.add_periodic_task(
- crontab(hour="4", minute=str(randrange(0, 40))), clickhouse_send_license_usage.s()
+ crontab(hour="4", minute=str(randrange(0, 40))),
+ clickhouse_send_license_usage.s(),
) # again a few hours later just to make sure
materialize_columns_crontab = get_crontab(settings.MATERIALIZE_COLUMNS_SCHEDULE_CRON)
if materialize_columns_crontab:
sender.add_periodic_task(
- materialize_columns_crontab, clickhouse_materialize_columns.s(), name="clickhouse materialize columns"
+ materialize_columns_crontab,
+ clickhouse_materialize_columns.s(),
+ name="clickhouse materialize columns",
)
sender.add_periodic_task(
@@ -276,7 +303,10 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
)
sender.add_periodic_task(crontab(hour="*", minute="55"), schedule_all_subscriptions.s())
- sender.add_periodic_task(crontab(hour="2", minute=str(randrange(0, 40))), ee_persist_finished_recordings.s())
+ sender.add_periodic_task(
+ crontab(hour="2", minute=str(randrange(0, 40))),
+ ee_persist_finished_recordings.s(),
+ )
sender.add_periodic_task(
crontab(minute="0", hour="*"),
@@ -303,7 +333,10 @@ def setup_periodic_tasks(sender: Celery, **kwargs):
def pre_run_signal_handler(task_id, task, **kwargs):
from statshog.defaults.django import statsd
- from posthog.clickhouse.client.connection import Workload, set_default_clickhouse_workload_type
+ from posthog.clickhouse.client.connection import (
+ Workload,
+ set_default_clickhouse_workload_type,
+ )
from posthog.clickhouse.query_tagging import tag_queries
statsd.incr("celery_tasks_metrics.pre_run", tags={"name": task.name})
@@ -359,7 +392,15 @@ def enqueue_clickhouse_execute_with_progress(
"""
from posthog.client import execute_with_progress
- execute_with_progress(team_id, query_id, query, args, settings, with_column_types, task_id=self.request.id)
+ execute_with_progress(
+ team_id,
+ query_id,
+ query,
+ args,
+ settings,
+ with_column_types,
+ task_id=self.request.id,
+ )
@app.task(ignore_result=True)
@@ -425,7 +466,9 @@ def pg_plugin_server_query_timing():
if key == "query_type":
continue
statsd.gauge(
- f"pg_plugin_server_query_{key}", value, tags={"query_type": row_dictionary["query_type"]}
+ f"pg_plugin_server_query_{key}",
+ value,
+ tags={"query_type": row_dictionary["query_type"]},
)
except:
# if this doesn't work keep going
@@ -457,7 +500,13 @@ def pg_row_count():
pass
-CLICKHOUSE_TABLES = ["events", "person", "person_distinct_id2", "session_replay_events", "log_entries"]
+CLICKHOUSE_TABLES = [
+ "events",
+ "person",
+ "person_distinct_id2",
+ "session_replay_events",
+ "log_entries",
+]
if not is_cloud():
CLICKHOUSE_TABLES.append("session_recording_events")
@@ -482,7 +531,11 @@ def clickhouse_lag():
)
query = QUERY.format(table=table)
lag = sync_execute(query)[0][2]
- statsd.gauge("posthog_celery_clickhouse__table_lag_seconds", lag, tags={"table": table})
+ statsd.gauge(
+ "posthog_celery_clickhouse__table_lag_seconds",
+ lag,
+ tags={"table": table},
+ )
lag_gauge.labels(table_name=table).set(lag)
except:
pass
@@ -535,7 +588,12 @@ def ingestion_lag():
pass
-KNOWN_CELERY_TASK_IDENTIFIERS = {"pluginJob", "runEveryHour", "runEveryMinute", "runEveryDay"}
+KNOWN_CELERY_TASK_IDENTIFIERS = {
+ "pluginJob",
+ "runEveryHour",
+ "runEveryMinute",
+ "runEveryDay",
+}
@app.task(ignore_result=True)
@@ -588,7 +646,11 @@ def graphile_worker_queue_size():
seen_task_identifier.add(task_identifier)
waiting_jobs_gauge.labels(task_identifier=task_identifier).set(count)
processing_lag_gauge.labels(task_identifier=task_identifier).set(time.time() - float(oldest))
- statsd.gauge("graphile_waiting_jobs", count, tags={"task_identifier": task_identifier})
+ statsd.gauge(
+ "graphile_waiting_jobs",
+ count,
+ tags={"task_identifier": task_identifier},
+ )
# The query will not return rows for empty queues, creating missing points.
# Let's emit updates for known queues even if they are empty.
@@ -618,7 +680,11 @@ def clickhouse_row_count():
query = QUERY.format(table=table)
rows = sync_execute(query)[0][0]
row_count_gauge.labels(table_name=table).set(rows)
- statsd.gauge(f"posthog_celery_clickhouse_table_row_count", rows, tags={"table": table})
+ statsd.gauge(
+ f"posthog_celery_clickhouse_table_row_count",
+ rows,
+ tags={"table": table},
+ )
except:
pass
@@ -681,7 +747,11 @@ def clickhouse_part_count():
)
for table, parts in rows:
parts_count_gauge.labels(table=table).set(parts)
- statsd.gauge(f"posthog_celery_clickhouse_table_parts_count", parts, tags={"table": table})
+ statsd.gauge(
+ f"posthog_celery_clickhouse_table_parts_count",
+ parts,
+ tags={"table": table},
+ )
@app.task(ignore_result=True)
@@ -710,7 +780,11 @@ def clickhouse_mutation_count():
)
for table, muts in rows:
mutations_count_gauge.labels(table=table).set(muts)
- statsd.gauge(f"posthog_celery_clickhouse_table_mutations_count", muts, tags={"table": table})
+ statsd.gauge(
+ f"posthog_celery_clickhouse_table_mutations_count",
+ muts,
+ tags={"table": table},
+ )
@app.task(ignore_result=True)
@@ -739,7 +813,9 @@ def redis_celery_queue_depth():
try:
with pushed_metrics_registry("redis_celery_queue_depth_registry") as registry:
celery_task_queue_depth_gauge = Gauge(
- "posthog_celery_queue_depth", "We use this to monitor the depth of the celery queue.", registry=registry
+ "posthog_celery_queue_depth",
+ "We use this to monitor the depth of the celery queue.",
+ registry=registry,
)
llen = get_client().llen("celery")
@@ -767,7 +843,9 @@ def clean_stale_partials():
@app.task(ignore_result=True)
def monitoring_check_clickhouse_schema_drift():
- from posthog.tasks.check_clickhouse_schema_drift import check_clickhouse_schema_drift
+ from posthog.tasks.check_clickhouse_schema_drift import (
+ check_clickhouse_schema_drift,
+ )
check_clickhouse_schema_drift()
@@ -801,7 +879,11 @@ def update_cache_task(caching_state_id: UUID):
@app.task(ignore_result=True)
-def sync_insight_caching_state(team_id: int, insight_id: Optional[int] = None, dashboard_tile_id: Optional[int] = None):
+def sync_insight_caching_state(
+ team_id: int,
+ insight_id: Optional[int] = None,
+ dashboard_tile_id: Optional[int] = None,
+):
from posthog.caching.insight_caching_state import sync_insight_caching_state
sync_insight_caching_state(team_id, insight_id, dashboard_tile_id)
@@ -851,7 +933,9 @@ def calculate_decide_usage() -> None:
def find_flags_with_enriched_analytics():
from datetime import datetime, timedelta
- from posthog.models.feature_flag.flag_analytics import find_flags_with_enriched_analytics
+ from posthog.models.feature_flag.flag_analytics import (
+ find_flags_with_enriched_analytics,
+ )
end = datetime.now()
begin = end - timedelta(hours=12)
@@ -869,7 +953,9 @@ def demo_reset_master_team():
@app.task(ignore_result=True)
def sync_all_organization_available_features():
- from posthog.tasks.sync_all_organization_available_features import sync_all_organization_available_features
+ from posthog.tasks.sync_all_organization_available_features import (
+ sync_all_organization_available_features,
+ )
sync_all_organization_available_features()
@@ -883,7 +969,9 @@ def check_async_migration_health():
@app.task(ignore_result=True)
def verify_persons_data_in_sync():
- from posthog.tasks.verify_persons_data_in_sync import verify_persons_data_in_sync as verify
+ from posthog.tasks.verify_persons_data_in_sync import (
+ verify_persons_data_in_sync as verify,
+ )
if not is_cloud():
return
@@ -905,7 +993,9 @@ def recompute_materialized_columns_enabled() -> bool:
def clickhouse_materialize_columns():
if recompute_materialized_columns_enabled():
try:
- from ee.clickhouse.materialized_columns.analyze import materialize_properties_task
+ from ee.clickhouse.materialized_columns.analyze import (
+ materialize_properties_task,
+ )
except ImportError:
pass
else:
@@ -943,7 +1033,9 @@ def update_quota_limiting():
@app.task(ignore_result=True)
def schedule_all_subscriptions():
try:
- from ee.tasks.subscriptions import schedule_all_subscriptions as _schedule_all_subscriptions
+ from ee.tasks.subscriptions import (
+ schedule_all_subscriptions as _schedule_all_subscriptions,
+ )
except ImportError:
pass
else:
diff --git a/posthog/clickhouse/client/connection.py b/posthog/clickhouse/client/connection.py
index 8cf665d857c60..fbbfd08086822 100644
--- a/posthog/clickhouse/client/connection.py
+++ b/posthog/clickhouse/client/connection.py
@@ -30,7 +30,10 @@ def get_pool(workload: Workload, team_id=None, readonly=False):
# Note that `readonly` does nothing if the relevant vars are not set!
if readonly and settings.READONLY_CLICKHOUSE_USER is not None and settings.READONLY_CLICKHOUSE_PASSWORD:
- return make_ch_pool(user=settings.READONLY_CLICKHOUSE_USER, password=settings.READONLY_CLICKHOUSE_PASSWORD)
+ return make_ch_pool(
+ user=settings.READONLY_CLICKHOUSE_USER,
+ password=settings.READONLY_CLICKHOUSE_PASSWORD,
+ )
if (
workload == Workload.OFFLINE or workload == Workload.DEFAULT and _default_workload == Workload.OFFLINE
diff --git a/posthog/clickhouse/client/execute.py b/posthog/clickhouse/client/execute.py
index 60cad345fcaa7..5f039c78c19f9 100644
--- a/posthog/clickhouse/client/execute.py
+++ b/posthog/clickhouse/client/execute.py
@@ -40,7 +40,10 @@
@lru_cache(maxsize=1)
def default_settings() -> Dict:
- return {"join_algorithm": "direct,parallel_hash", "distributed_replica_max_ignored_errors": 1000}
+ return {
+ "join_algorithm": "direct,parallel_hash",
+ "distributed_replica_max_ignored_errors": 1000,
+ }
@lru_cache(maxsize=1)
@@ -81,7 +84,7 @@ def sync_execute(
from posthog.test.base import flush_persons_and_events
flush_persons_and_events()
- except ModuleNotFoundError: # when we run plugin server tests it tries to run above, ignore
+ except (ModuleNotFoundError): # when we run plugin server tests it tries to run above, ignore
pass
with get_pool(workload, team_id, readonly).get_client() as client:
@@ -91,7 +94,10 @@ def sync_execute(
query_id = validated_client_query_id()
core_settings = {**default_settings(), **(settings or {})}
tags["query_settings"] = core_settings
- settings = {**core_settings, "log_comment": json.dumps(tags, separators=(",", ":"))}
+ settings = {
+ **core_settings,
+ "log_comment": json.dumps(tags, separators=(",", ":")),
+ }
try:
result = client.execute(
prepared_sql,
@@ -102,7 +108,10 @@ def sync_execute(
)
except Exception as err:
err = wrap_query_error(err)
- statsd.incr("clickhouse_sync_execution_failure", tags={"failed": True, "reason": type(err).__name__})
+ statsd.incr(
+ "clickhouse_sync_execution_failure",
+ tags={"failed": True, "reason": type(err).__name__},
+ )
raise err
finally:
@@ -147,7 +156,12 @@ def query_with_columns(
@patchable
-def _prepare_query(client: SyncClient, query: str, args: QueryArgs, workload: Workload = Workload.DEFAULT):
+def _prepare_query(
+ client: SyncClient,
+ query: str,
+ args: QueryArgs,
+ workload: Workload = Workload.DEFAULT,
+):
"""
Given a string query with placeholders we do one of two things:
@@ -219,7 +233,9 @@ def format_sql(rendered_sql, colorize=True):
import pygments.lexers
return pygments.highlight(
- formatted_sql, pygments.lexers.get_lexer_by_name("sql"), pygments.formatters.TerminalFormatter()
+ formatted_sql,
+ pygments.lexers.get_lexer_by_name("sql"),
+ pygments.formatters.TerminalFormatter(),
)
except:
pass
diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py
index 89de42427f568..3bb28c3f20075 100644
--- a/posthog/clickhouse/client/execute_async.py
+++ b/posthog/clickhouse/client/execute_async.py
@@ -49,7 +49,14 @@ def generate_redis_results_key(query_id):
def execute_with_progress(
- team_id, query_id, query, args=None, settings=None, with_column_types=False, update_freq=0.2, task_id=None
+ team_id,
+ query_id,
+ query,
+ args=None,
+ settings=None,
+ with_column_types=False,
+ update_freq=0.2,
+ task_id=None,
):
"""
Kick off query with progress reporting
@@ -81,7 +88,10 @@ def execute_with_progress(
try:
progress = ch_client.execute_with_progress(
- prepared_sql, params=prepared_args, settings=settings, with_column_types=with_column_types
+ prepared_sql,
+ params=prepared_args,
+ settings=settings,
+ with_column_types=with_column_types,
)
for num_rows, total_rows in progress:
query_status = QueryStatus(
@@ -145,7 +155,14 @@ def execute_with_progress(
def enqueue_execute_with_progress(
- team_id, query, args=None, settings=None, with_column_types=False, bypass_celery=False, query_id=None, force=False
+ team_id,
+ query,
+ args=None,
+ settings=None,
+ with_column_types=False,
+ bypass_celery=False,
+ query_id=None,
+ force=False,
):
if not query_id:
query_id = _query_hash(query, team_id, args)
diff --git a/posthog/clickhouse/client/test/test_connection.py b/posthog/clickhouse/client/test/test_connection.py
index d40e544bf16fc..e05a87b84e60c 100644
--- a/posthog/clickhouse/client/test/test_connection.py
+++ b/posthog/clickhouse/client/test/test_connection.py
@@ -1,6 +1,11 @@
import pytest
-from posthog.clickhouse.client.connection import Workload, get_pool, make_ch_pool, set_default_clickhouse_workload_type
+from posthog.clickhouse.client.connection import (
+ Workload,
+ get_pool,
+ make_ch_pool,
+ set_default_clickhouse_workload_type,
+)
def test_connection_pool_creation_without_offline_cluster(settings):
diff --git a/posthog/clickhouse/dead_letter_queue.py b/posthog/clickhouse/dead_letter_queue.py
index 53896bbfa6869..298d99e4ed88b 100644
--- a/posthog/clickhouse/dead_letter_queue.py
+++ b/posthog/clickhouse/dead_letter_queue.py
@@ -86,7 +86,9 @@
_offset
FROM {database}.kafka_{table_name}
""".format(
- table_name=DEAD_LETTER_QUEUE_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE
+ table_name=DEAD_LETTER_QUEUE_TABLE,
+ cluster=CLICKHOUSE_CLUSTER,
+ database=CLICKHOUSE_DATABASE,
)
diff --git a/posthog/clickhouse/log_entries.py b/posthog/clickhouse/log_entries.py
index 017ee408aea44..471ca18eac7fb 100644
--- a/posthog/clickhouse/log_entries.py
+++ b/posthog/clickhouse/log_entries.py
@@ -69,7 +69,9 @@
_offset
FROM {database}.kafka_{table_name}
""".format(
- table_name=LOG_ENTRIES_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE
+ table_name=LOG_ENTRIES_TABLE,
+ cluster=CLICKHOUSE_CLUSTER,
+ database=CLICKHOUSE_DATABASE,
)
diff --git a/posthog/clickhouse/migrations/0003_person.py b/posthog/clickhouse/migrations/0003_person.py
index d780fd855ec8b..ccdcf428de43b 100644
--- a/posthog/clickhouse/migrations/0003_person.py
+++ b/posthog/clickhouse/migrations/0003_person.py
@@ -1,5 +1,9 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.person.sql import COMMENT_DISTINCT_ID_COLUMN_SQL, PERSONS_DISTINCT_ID_TABLE_SQL, PERSONS_TABLE_SQL
+from posthog.models.person.sql import (
+ COMMENT_DISTINCT_ID_COLUMN_SQL,
+ PERSONS_DISTINCT_ID_TABLE_SQL,
+ PERSONS_TABLE_SQL,
+)
operations = [
run_sql_with_exceptions(PERSONS_TABLE_SQL()),
diff --git a/posthog/clickhouse/migrations/0004_kafka.py b/posthog/clickhouse/migrations/0004_kafka.py
index 5243e206bd098..857398c2a3cc7 100644
--- a/posthog/clickhouse/migrations/0004_kafka.py
+++ b/posthog/clickhouse/migrations/0004_kafka.py
@@ -1,5 +1,8 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.event.sql import DISTRIBUTED_EVENTS_TABLE_SQL, WRITABLE_EVENTS_TABLE_SQL
+from posthog.models.event.sql import (
+ DISTRIBUTED_EVENTS_TABLE_SQL,
+ WRITABLE_EVENTS_TABLE_SQL,
+)
from posthog.models.person.sql import (
KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL,
KAFKA_PERSONS_TABLE_SQL,
diff --git a/posthog/clickhouse/migrations/0012_person_id_deleted_column.py b/posthog/clickhouse/migrations/0012_person_id_deleted_column.py
index ef324ce2417f9..40a3a0a0ef4f6 100644
--- a/posthog/clickhouse/migrations/0012_person_id_deleted_column.py
+++ b/posthog/clickhouse/migrations/0012_person_id_deleted_column.py
@@ -1,5 +1,8 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.person.sql import KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL, PERSONS_DISTINCT_ID_TABLE_MV_SQL
+from posthog.models.person.sql import (
+ KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL,
+ PERSONS_DISTINCT_ID_TABLE_MV_SQL,
+)
from posthog.settings import CLICKHOUSE_CLUSTER
operations = [
diff --git a/posthog/clickhouse/migrations/0018_group_analytics_schema.py b/posthog/clickhouse/migrations/0018_group_analytics_schema.py
index 69e923f8b7989..05cf74d0c24ae 100644
--- a/posthog/clickhouse/migrations/0018_group_analytics_schema.py
+++ b/posthog/clickhouse/migrations/0018_group_analytics_schema.py
@@ -1,5 +1,9 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.group.sql import GROUPS_TABLE_MV_SQL, GROUPS_TABLE_SQL, KAFKA_GROUPS_TABLE_SQL
+from posthog.models.group.sql import (
+ GROUPS_TABLE_MV_SQL,
+ GROUPS_TABLE_SQL,
+ KAFKA_GROUPS_TABLE_SQL,
+)
operations = [
run_sql_with_exceptions(GROUPS_TABLE_SQL()),
diff --git a/posthog/clickhouse/migrations/0023_dead_letter_queue_tags.py b/posthog/clickhouse/migrations/0023_dead_letter_queue_tags.py
index f34752a660a28..cce6212290056 100644
--- a/posthog/clickhouse/migrations/0023_dead_letter_queue_tags.py
+++ b/posthog/clickhouse/migrations/0023_dead_letter_queue_tags.py
@@ -1,5 +1,8 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.clickhouse.dead_letter_queue import DEAD_LETTER_QUEUE_TABLE_MV_SQL, KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL
+from posthog.clickhouse.dead_letter_queue import (
+ DEAD_LETTER_QUEUE_TABLE_MV_SQL,
+ KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL,
+)
from posthog.settings import CLICKHOUSE_CLUSTER
operations = [
diff --git a/posthog/clickhouse/migrations/0025_json_events.py b/posthog/clickhouse/migrations/0025_json_events.py
index 1dd452dff732e..fd8056b227123 100644
--- a/posthog/clickhouse/migrations/0025_json_events.py
+++ b/posthog/clickhouse/migrations/0025_json_events.py
@@ -1,5 +1,8 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.event.sql import EVENTS_TABLE_JSON_MV_SQL, KAFKA_EVENTS_TABLE_JSON_SQL
+from posthog.models.event.sql import (
+ EVENTS_TABLE_JSON_MV_SQL,
+ KAFKA_EVENTS_TABLE_JSON_SQL,
+)
operations = [
run_sql_with_exceptions(KAFKA_EVENTS_TABLE_JSON_SQL()),
diff --git a/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py b/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py
index d2fc6a7d4bac9..b27c8ad29f59a 100644
--- a/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py
+++ b/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py
@@ -1,6 +1,9 @@
from infi.clickhouse_orm import migrations
-from posthog.clickhouse.materialized_columns import get_materialized_columns, materialize
+from posthog.clickhouse.materialized_columns import (
+ get_materialized_columns,
+ materialize,
+)
from posthog.client import sync_execute
from posthog.settings import CLICKHOUSE_CLUSTER
@@ -38,7 +41,6 @@ def ensure_only_new_column_exists(database, table_name, old_column_name, new_col
def materialize_session_and_window_id(database):
-
properties = ["$session_id", "$window_id"]
for property_name in properties:
materialized_columns = get_materialized_columns("events", use_cache=False)
diff --git a/posthog/clickhouse/migrations/0027_persons_and_groups_on_events.py b/posthog/clickhouse/migrations/0027_persons_and_groups_on_events.py
index 500d2e1184f4b..534a2d6dbf01c 100644
--- a/posthog/clickhouse/migrations/0027_persons_and_groups_on_events.py
+++ b/posthog/clickhouse/migrations/0027_persons_and_groups_on_events.py
@@ -2,7 +2,10 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
from posthog.client import sync_execute
-from posthog.models.event.sql import EVENTS_TABLE_JSON_MV_SQL, KAFKA_EVENTS_TABLE_JSON_SQL
+from posthog.models.event.sql import (
+ EVENTS_TABLE_JSON_MV_SQL,
+ KAFKA_EVENTS_TABLE_JSON_SQL,
+)
from posthog.settings import CLICKHOUSE_CLUSTER
ADD_COLUMNS_BASE_SQL = """
diff --git a/posthog/clickhouse/migrations/0028_dead_letter_queue_settings.py b/posthog/clickhouse/migrations/0028_dead_letter_queue_settings.py
index fd8676e47b74e..ff7746fe8e326 100644
--- a/posthog/clickhouse/migrations/0028_dead_letter_queue_settings.py
+++ b/posthog/clickhouse/migrations/0028_dead_letter_queue_settings.py
@@ -1,5 +1,8 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.clickhouse.dead_letter_queue import DEAD_LETTER_QUEUE_TABLE_MV_SQL, KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL
+from posthog.clickhouse.dead_letter_queue import (
+ DEAD_LETTER_QUEUE_TABLE_MV_SQL,
+ KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL,
+)
from posthog.settings.data_stores import CLICKHOUSE_CLUSTER
operations = [
diff --git a/posthog/clickhouse/migrations/0030_created_at_persons_and_groups_on_events.py b/posthog/clickhouse/migrations/0030_created_at_persons_and_groups_on_events.py
index a68f39422ab65..254ff78a531ff 100644
--- a/posthog/clickhouse/migrations/0030_created_at_persons_and_groups_on_events.py
+++ b/posthog/clickhouse/migrations/0030_created_at_persons_and_groups_on_events.py
@@ -2,7 +2,10 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
from posthog.client import sync_execute
-from posthog.models.event.sql import EVENTS_TABLE_JSON_MV_SQL, KAFKA_EVENTS_TABLE_JSON_SQL
+from posthog.models.event.sql import (
+ EVENTS_TABLE_JSON_MV_SQL,
+ KAFKA_EVENTS_TABLE_JSON_SQL,
+)
from posthog.settings import CLICKHOUSE_CLUSTER
ADD_COLUMNS_BASE_SQL = """
diff --git a/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py b/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py
index be819a0111a01..d6705db02eb33 100644
--- a/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py
+++ b/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py
@@ -1,12 +1,13 @@
from infi.clickhouse_orm import migrations
from posthog.client import sync_execute
-from posthog.session_recordings.sql.session_recording_event_sql import MATERIALIZED_COLUMNS
+from posthog.session_recordings.sql.session_recording_event_sql import (
+ MATERIALIZED_COLUMNS,
+)
from posthog.settings import CLICKHOUSE_CLUSTER
def create_events_summary_mat_columns(database):
-
columns_to_add = [
"events_summary",
"click_count",
diff --git a/posthog/clickhouse/migrations/0042_kafka_partitions_stats.py b/posthog/clickhouse/migrations/0042_kafka_partitions_stats.py
index afc8e898f1327..1a588a1092474 100644
--- a/posthog/clickhouse/migrations/0042_kafka_partitions_stats.py
+++ b/posthog/clickhouse/migrations/0042_kafka_partitions_stats.py
@@ -1,5 +1,8 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.kafka_client.topics import KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW, KAFKA_SESSION_RECORDING_EVENTS
+from posthog.kafka_client.topics import (
+ KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW,
+ KAFKA_SESSION_RECORDING_EVENTS,
+)
from posthog.models.kafka_partition_stats.sql import (
CREATE_PARTITION_STATISTICS_KAFKA_TABLE,
CREATE_PARTITION_STATISTICS_MV,
diff --git a/posthog/clickhouse/plugin_log_entries.py b/posthog/clickhouse/plugin_log_entries.py
index cb03e34eb3471..1f4f7c70d7146 100644
--- a/posthog/clickhouse/plugin_log_entries.py
+++ b/posthog/clickhouse/plugin_log_entries.py
@@ -61,7 +61,9 @@
_offset
FROM {database}.kafka_{table_name}
""".format(
- table_name=PLUGIN_LOG_ENTRIES_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE
+ table_name=PLUGIN_LOG_ENTRIES_TABLE,
+ cluster=CLICKHOUSE_CLUSTER,
+ database=CLICKHOUSE_DATABASE,
)
diff --git a/posthog/clickhouse/system_status.py b/posthog/clickhouse/system_status.py
index 2317e41c39e1d..bd9bd22f427c6 100644
--- a/posthog/clickhouse/system_status.py
+++ b/posthog/clickhouse/system_status.py
@@ -6,10 +6,17 @@
from dateutil.relativedelta import relativedelta
from django.utils import timezone
-from posthog.api.dead_letter_queue import get_dead_letter_queue_events_last_24h, get_dead_letter_queue_size
+from posthog.api.dead_letter_queue import (
+ get_dead_letter_queue_events_last_24h,
+ get_dead_letter_queue_size,
+)
from posthog.cache_utils import cache_for
from posthog.client import query_with_columns, sync_execute
-from posthog.models.event.util import get_event_count, get_event_count_for_last_month, get_event_count_month_to_date
+from posthog.models.event.util import (
+ get_event_count,
+ get_event_count_for_last_month,
+ get_event_count_month_to_date,
+)
from posthog.session_recordings.models.system_status_queries import (
get_recording_status_month_to_date,
)
@@ -25,12 +32,20 @@
def system_status() -> Generator[SystemStatusRow, None, None]:
alive = is_alive()
- yield {"key": "clickhouse_alive", "metric": "Clickhouse database alive", "value": alive}
+ yield {
+ "key": "clickhouse_alive",
+ "metric": "Clickhouse database alive",
+ "value": alive,
+ }
if not alive:
return
- yield {"key": "clickhouse_event_count", "metric": "Events in ClickHouse", "value": get_event_count()}
+ yield {
+ "key": "clickhouse_event_count",
+ "metric": "Events in ClickHouse",
+ "value": get_event_count(),
+ }
yield {
"key": "clickhouse_event_count_last_month",
"metric": "Events recorded last month",
@@ -67,8 +82,16 @@ def system_status() -> Generator[SystemStatusRow, None, None]:
for index, (total_space, free_space) in enumerate(disk_status):
metric = "Clickhouse disk" if len(disk_status) == 1 else f"Clickhouse disk {index}"
- yield {"key": f"clickhouse_disk_{index}_free_space", "metric": f"{metric} free space", "value": free_space}
- yield {"key": f"clickhouse_disk_{index}_total_space", "metric": f"{metric} total space", "value": total_space}
+ yield {
+ "key": f"clickhouse_disk_{index}_free_space",
+ "metric": f"{metric} free space",
+ "value": free_space,
+ }
+ yield {
+ "key": f"clickhouse_disk_{index}_total_space",
+ "metric": f"{metric} total space",
+ "value": total_space,
+ }
table_sizes = sync_execute(
"""
@@ -97,7 +120,10 @@ def system_status() -> Generator[SystemStatusRow, None, None]:
"key": "clickhouse_system_metrics",
"metric": "Clickhouse system metrics",
"value": "",
- "subrows": {"columns": ["Metric", "Value", "Description"], "rows": list(sorted(system_metrics))},
+ "subrows": {
+ "columns": ["Metric", "Value", "Description"],
+ "rows": list(sorted(system_metrics)),
+ },
}
# This timestamp is a naive timestamp (does not include a timezone)
@@ -121,9 +147,16 @@ def system_status() -> Generator[SystemStatusRow, None, None]:
dead_letter_queue_size = get_dead_letter_queue_size()
- yield {"key": "dead_letter_queue_size", "metric": "Dead letter queue size", "value": dead_letter_queue_size}
+ yield {
+ "key": "dead_letter_queue_size",
+ "metric": "Dead letter queue size",
+ "value": dead_letter_queue_size,
+ }
- dead_letter_queue_events_high, dead_letter_queue_events_last_day = dead_letter_queue_ratio()
+ (
+ dead_letter_queue_events_high,
+ dead_letter_queue_events_last_day,
+ ) = dead_letter_queue_ratio()
yield {
"key": "dead_letter_queue_events_last_day",
diff --git a/posthog/clickhouse/test/test_person_overrides.py b/posthog/clickhouse/test/test_person_overrides.py
index f0d33c7d617f4..dc8bc2b17c503 100644
--- a/posthog/clickhouse/test/test_person_overrides.py
+++ b/posthog/clickhouse/test/test_person_overrides.py
@@ -81,7 +81,14 @@ def test_can_insert_person_overrides():
assert results != []
[result] = results
created_at, *the_rest = result
- assert the_rest == [1, old_person_id, override_person_id, oldest_event, merged_at, 2]
+ assert the_rest == [
+ 1,
+ old_person_id,
+ override_person_id,
+ oldest_event,
+ merged_at,
+ 2,
+ ]
assert created_at > datetime.now(tz=ZoneInfo("UTC")) - timedelta(seconds=10)
finally:
producer.close()
@@ -124,7 +131,8 @@ def test_person_overrides_dict():
sync_execute("INSERT INTO person_overrides (*) VALUES", [values])
sync_execute("SYSTEM RELOAD DICTIONARY person_overrides_dict")
results = sync_execute(
- "SELECT dictGet(person_overrides_dict, 'override_person_id', (%(team_id)s, %(old_person_id)s))", values
+ "SELECT dictGet(person_overrides_dict, 'override_person_id', (%(team_id)s, %(old_person_id)s))",
+ values,
)
assert len(results) == 1
@@ -136,7 +144,8 @@ def test_person_overrides_dict():
sync_execute("INSERT INTO person_overrides (*) VALUES", [values])
sync_execute("SYSTEM RELOAD DICTIONARY person_overrides_dict")
new_results = sync_execute(
- "SELECT dictGet(person_overrides_dict, 'override_person_id', (%(team_id)s, %(old_person_id)s))", values
+ "SELECT dictGet(person_overrides_dict, 'override_person_id', (%(team_id)s, %(old_person_id)s))",
+ values,
)
assert len(new_results) == 1
diff --git a/posthog/conftest.py b/posthog/conftest.py
index 06e7e256aed79..2b819ff9390ad 100644
--- a/posthog/conftest.py
+++ b/posthog/conftest.py
@@ -11,7 +11,11 @@
def create_clickhouse_tables(num_tables: int):
# Create clickhouse tables to default before running test
# Mostly so that test runs locally work correctly
- from posthog.clickhouse.schema import CREATE_DISTRIBUTED_TABLE_QUERIES, CREATE_MERGETREE_TABLE_QUERIES, build_query
+ from posthog.clickhouse.schema import (
+ CREATE_DISTRIBUTED_TABLE_QUERIES,
+ CREATE_MERGETREE_TABLE_QUERIES,
+ build_query,
+ )
# REMEMBER TO ADD ANY NEW CLICKHOUSE TABLES TO THIS ARRAY!
CREATE_TABLE_QUERIES: Tuple[Any, ...] = CREATE_MERGETREE_TABLE_QUERIES + CREATE_DISTRIBUTED_TABLE_QUERIES
@@ -27,8 +31,12 @@ def create_clickhouse_tables(num_tables: int):
def reset_clickhouse_tables():
# Truncate clickhouse tables to default before running test
# Mostly so that test runs locally work correctly
- from posthog.clickhouse.dead_letter_queue import TRUNCATE_DEAD_LETTER_QUEUE_TABLE_SQL
- from posthog.clickhouse.plugin_log_entries import TRUNCATE_PLUGIN_LOG_ENTRIES_TABLE_SQL
+ from posthog.clickhouse.dead_letter_queue import (
+ TRUNCATE_DEAD_LETTER_QUEUE_TABLE_SQL,
+ )
+ from posthog.clickhouse.plugin_log_entries import (
+ TRUNCATE_PLUGIN_LOG_ENTRIES_TABLE_SQL,
+ )
from posthog.models.app_metrics.sql import TRUNCATE_APP_METRICS_TABLE_SQL
from posthog.models.cohort.sql import TRUNCATE_COHORTPEOPLE_TABLE_SQL
from posthog.models.event.sql import TRUNCATE_EVENTS_TABLE_SQL
@@ -40,7 +48,9 @@ def reset_clickhouse_tables():
TRUNCATE_PERSON_STATIC_COHORT_TABLE_SQL,
TRUNCATE_PERSON_TABLE_SQL,
)
- from posthog.session_recordings.sql.session_recording_event_sql import TRUNCATE_SESSION_RECORDING_EVENTS_TABLE_SQL
+ from posthog.session_recordings.sql.session_recording_event_sql import (
+ TRUNCATE_SESSION_RECORDING_EVENTS_TABLE_SQL,
+ )
# REMEMBER TO ADD ANY NEW CLICKHOUSE TABLES TO THIS ARRAY!
TABLES_TO_CREATE_DROP = [
@@ -80,7 +90,8 @@ def django_db_setup(django_db_setup, django_db_keepdb):
database.create_database() # Create database if it doesn't exist
table_count = sync_execute(
- "SELECT count() FROM system.tables WHERE database = %(database)s", {"database": settings.CLICKHOUSE_DATABASE}
+ "SELECT count() FROM system.tables WHERE database = %(database)s",
+ {"database": settings.CLICKHOUSE_DATABASE},
)[0][0]
create_clickhouse_tables(table_count)
diff --git a/posthog/constants.py b/posthog/constants.py
index 3beb8ca12b3b9..ecfeb03e1259f 100644
--- a/posthog/constants.py
+++ b/posthog/constants.py
@@ -55,7 +55,13 @@ class AvailableFeature(str, Enum):
TRENDS_BOLD_NUMBER = "BoldNumber"
# Sync with frontend NON_TIME_SERIES_DISPLAY_TYPES
-NON_TIME_SERIES_DISPLAY_TYPES = [TRENDS_TABLE, TRENDS_PIE, TRENDS_BAR_VALUE, TRENDS_WORLD_MAP, TRENDS_BOLD_NUMBER]
+NON_TIME_SERIES_DISPLAY_TYPES = [
+ TRENDS_TABLE,
+ TRENDS_PIE,
+ TRENDS_BAR_VALUE,
+ TRENDS_WORLD_MAP,
+ TRENDS_BOLD_NUMBER,
+]
# Sync with frontend NON_BREAKDOWN_DISPLAY_TYPES
NON_BREAKDOWN_DISPLAY_TYPES = [TRENDS_BOLD_NUMBER]
diff --git a/posthog/demo/legacy/app_data_generator.py b/posthog/demo/legacy/app_data_generator.py
index 51a12e3d486ff..56f7c3ebcecc2 100644
--- a/posthog/demo/legacy/app_data_generator.py
+++ b/posthog/demo/legacy/app_data_generator.py
@@ -50,7 +50,12 @@ def create_actions_dashboards(self):
"order": 0,
"type": TREND_FILTER_TYPE_ACTIONS,
},
- {"id": rated_app_action.id, "name": "Rated App", "order": 1, "type": TREND_FILTER_TYPE_ACTIONS},
+ {
+ "id": rated_app_action.id,
+ "name": "Rated App",
+ "order": 1,
+ "type": TREND_FILTER_TYPE_ACTIONS,
+ },
{
"id": rated_app_action.id,
"name": "Rated App",
@@ -68,8 +73,16 @@ def create_actions_dashboards(self):
def populate_person_events(self, person: Person, distinct_id: str, _index: int):
start_day = random.randint(1, self.n_days)
- self.add_event(event="$pageview", distinct_id=distinct_id, timestamp=now() - relativedelta(days=start_day))
- self.add_event(event="installed_app", distinct_id=distinct_id, timestamp=now() - relativedelta(days=start_day))
+ self.add_event(
+ event="$pageview",
+ distinct_id=distinct_id,
+ timestamp=now() - relativedelta(days=start_day),
+ )
+ self.add_event(
+ event="installed_app",
+ distinct_id=distinct_id,
+ timestamp=now() - relativedelta(days=start_day),
+ )
if random.randint(0, 10) <= 9:
self.add_event(
diff --git a/posthog/demo/legacy/data_generator.py b/posthog/demo/legacy/data_generator.py
index 65bdd350acc88..ccc9f163e6c3c 100644
--- a/posthog/demo/legacy/data_generator.py
+++ b/posthog/demo/legacy/data_generator.py
@@ -3,7 +3,9 @@
from posthog.models import Person, PersonDistinctId, Team
from posthog.models.utils import UUIDT
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
class DataGenerator:
diff --git a/posthog/demo/legacy/revenue_data_generator.py b/posthog/demo/legacy/revenue_data_generator.py
index 641cd30f5250e..2fa4901389eb0 100644
--- a/posthog/demo/legacy/revenue_data_generator.py
+++ b/posthog/demo/legacy/revenue_data_generator.py
@@ -29,7 +29,9 @@ def create_missing_events_and_properties(self):
def populate_person_events(self, person: Person, distinct_id: str, index: int):
if random.randint(0, 10) <= 4:
self.add_event(
- event="entered_free_trial", distinct_id=distinct_id, timestamp=now() - relativedelta(days=345)
+ event="entered_free_trial",
+ distinct_id=distinct_id,
+ timestamp=now() - relativedelta(days=345),
)
self.add_event(
@@ -68,7 +70,14 @@ def create_actions_dashboards(self):
team=self.team,
name="Entered Free Trial -> Purchase (Premium)",
filters={
- "events": [{"id": "$pageview", "name": "Pageview", "order": 0, "type": TREND_FILTER_TYPE_ACTIONS}],
+ "events": [
+ {
+ "id": "$pageview",
+ "name": "Pageview",
+ "order": 0,
+ "type": TREND_FILTER_TYPE_ACTIONS,
+ }
+ ],
"actions": [
{
"id": purchase_action.id,
diff --git a/posthog/demo/legacy/web_data_generator.py b/posthog/demo/legacy/web_data_generator.py
index e74ddc53bfe99..aa0836d3db732 100644
--- a/posthog/demo/legacy/web_data_generator.py
+++ b/posthog/demo/legacy/web_data_generator.py
@@ -7,7 +7,15 @@
from django.utils.timezone import now
from posthog.constants import TREND_FILTER_TYPE_ACTIONS
-from posthog.models import Action, ActionStep, Dashboard, DashboardTile, Insight, Person, PropertyDefinition
+from posthog.models import (
+ Action,
+ ActionStep,
+ Dashboard,
+ DashboardTile,
+ Insight,
+ Person,
+ PropertyDefinition,
+)
from posthog.models.filters.mixins.utils import cached_property
from posthog.models.utils import UUIDT
from posthog.utils import get_absolute_path
@@ -27,7 +35,12 @@ def create_missing_events_and_properties(self):
def create_actions_dashboards(self):
homepage = Action.objects.create(team=self.team, name="Hogflix homepage view")
- ActionStep.objects.create(action=homepage, event="$pageview", url="http://hogflix.com", url_matching="exact")
+ ActionStep.objects.create(
+ action=homepage,
+ event="$pageview",
+ url="http://hogflix.com",
+ url_matching="exact",
+ )
user_signed_up = Action.objects.create(team=self.team, name="Hogflix signed up")
ActionStep.objects.create(
@@ -54,14 +67,24 @@ def create_actions_dashboards(self):
description="Shows a conversion funnel from sign up to watching a movie.",
filters={
"actions": [
- {"id": homepage.id, "name": "Hogflix homepage view", "order": 0, "type": TREND_FILTER_TYPE_ACTIONS},
+ {
+ "id": homepage.id,
+ "name": "Hogflix homepage view",
+ "order": 0,
+ "type": TREND_FILTER_TYPE_ACTIONS,
+ },
{
"id": user_signed_up.id,
"name": "Hogflix signed up",
"order": 1,
"type": TREND_FILTER_TYPE_ACTIONS,
},
- {"id": user_paid.id, "name": "Hogflix paid", "order": 2, "type": TREND_FILTER_TYPE_ACTIONS},
+ {
+ "id": user_paid.id,
+ "name": "Hogflix paid",
+ "order": 2,
+ "type": TREND_FILTER_TYPE_ACTIONS,
+ },
],
"insight": "FUNNELS",
},
@@ -77,7 +100,11 @@ def populate_person_events(self, person: Person, distinct_id: str, index: int):
event="$pageview",
distinct_id=distinct_id,
timestamp=now() - relativedelta(days=start_day),
- properties={"$current_url": "http://hogflix.com", "$browser": browser, "$lib": "web"},
+ properties={
+ "$current_url": "http://hogflix.com",
+ "$browser": browser,
+ "$lib": "web",
+ },
)
self.add_event(
@@ -107,7 +134,11 @@ def populate_person_events(self, person: Person, distinct_id: str, index: int):
self.add_event(
event="$pageview",
distinct_id=distinct_id,
- properties={"$current_url": "http://hogflix.com/2", "$browser": browser, "$lib": "web"},
+ properties={
+ "$current_url": "http://hogflix.com/2",
+ "$browser": browser,
+ "$lib": "web",
+ },
timestamp=now() - relativedelta(days=start_day) + relativedelta(seconds=30),
)
if index % 5 == 0:
@@ -131,7 +162,11 @@ def populate_person_events(self, person: Person, distinct_id: str, index: int):
self.add_event(
event="$pageview",
distinct_id=distinct_id,
- properties={"$current_url": "http://hogflix.com/3", "$browser": browser, "$lib": "web"},
+ properties={
+ "$current_url": "http://hogflix.com/3",
+ "$browser": browser,
+ "$lib": "web",
+ },
timestamp=now() - relativedelta(days=start_day) + relativedelta(seconds=60),
)
diff --git a/posthog/demo/matrix/manager.py b/posthog/demo/matrix/manager.py
index 8b13bd78c2b24..c174b2c782bc9 100644
--- a/posthog/demo/matrix/manager.py
+++ b/posthog/demo/matrix/manager.py
@@ -66,7 +66,12 @@ def ensure_account_and_save(
with transaction.atomic():
organization = Organization.objects.create(**organization_kwargs)
new_user = User.objects.create_and_join(
- organization, email, password, first_name, OrganizationMembership.Level.ADMIN, is_staff=is_staff
+ organization,
+ email,
+ password,
+ first_name,
+ OrganizationMembership.Level.ADMIN,
+ is_staff=is_staff,
)
team = self.create_team(organization)
self.run_on_team(team, new_user)
@@ -99,7 +104,11 @@ def reset_master(self):
@staticmethod
def create_team(organization: Organization, **kwargs) -> Team:
team = Team.objects.create(
- organization=organization, ingested_event=True, completed_snippet_onboarding=True, is_demo=True, **kwargs
+ organization=organization,
+ ingested_event=True,
+ completed_snippet_onboarding=True,
+ is_demo=True,
+ **kwargs,
)
return team
@@ -132,11 +141,19 @@ def _save_analytics_data(self, data_team: Team):
for group_type_index, (group_type, groups) in enumerate(self.matrix.groups.items()):
group_type_index += self.matrix.group_type_index_offset # Adjust
bulk_group_type_mappings.append(
- GroupTypeMapping(team=data_team, group_type_index=group_type_index, group_type=group_type)
+ GroupTypeMapping(
+ team=data_team,
+ group_type_index=group_type_index,
+ group_type=group_type,
+ )
)
for group_key, group in groups.items():
self._save_sim_group(
- data_team, cast(Literal[0, 1, 2, 3, 4], group_type_index), group_key, group, self.matrix.now
+ data_team,
+ cast(Literal[0, 1, 2, 3, 4], group_type_index),
+ group_key,
+ group,
+ self.matrix.now,
)
try:
GroupTypeMapping.objects.bulk_create(bulk_group_type_mappings)
@@ -164,16 +181,28 @@ def _create_master_team(cls) -> Team:
@classmethod
def _erase_master_team_data(cls):
AsyncEventDeletion().process(
- [AsyncDeletion(team_id=cls.MASTER_TEAM_ID, key=cls.MASTER_TEAM_ID, deletion_type=DeletionType.Team)]
+ [
+ AsyncDeletion(
+ team_id=cls.MASTER_TEAM_ID,
+ key=cls.MASTER_TEAM_ID,
+ deletion_type=DeletionType.Team,
+ )
+ ]
)
GroupTypeMapping.objects.filter(team_id=cls.MASTER_TEAM_ID).delete()
def _copy_analytics_data_from_master_team(self, target_team: Team):
from posthog.models.event.sql import COPY_EVENTS_BETWEEN_TEAMS
from posthog.models.group.sql import COPY_GROUPS_BETWEEN_TEAMS
- from posthog.models.person.sql import COPY_PERSON_DISTINCT_ID2S_BETWEEN_TEAMS, COPY_PERSONS_BETWEEN_TEAMS
+ from posthog.models.person.sql import (
+ COPY_PERSON_DISTINCT_ID2S_BETWEEN_TEAMS,
+ COPY_PERSONS_BETWEEN_TEAMS,
+ )
- copy_params = {"source_team_id": self.MASTER_TEAM_ID, "target_team_id": target_team.pk}
+ copy_params = {
+ "source_team_id": self.MASTER_TEAM_ID,
+ "target_team_id": target_team.pk,
+ }
sync_execute(COPY_PERSONS_BETWEEN_TEAMS, copy_params)
sync_execute(COPY_PERSON_DISTINCT_ID2S_BETWEEN_TEAMS, copy_params)
sync_execute(COPY_EVENTS_BETWEEN_TEAMS, copy_params)
@@ -191,7 +220,10 @@ def _copy_analytics_data_from_master_team(self, target_team: Team):
@classmethod
def _sync_postgres_with_clickhouse_data(cls, source_team_id: int, target_team_id: int):
from posthog.models.group.sql import SELECT_GROUPS_OF_TEAM
- from posthog.models.person.sql import SELECT_PERSON_DISTINCT_ID2S_OF_TEAM, SELECT_PERSONS_OF_TEAM
+ from posthog.models.person.sql import (
+ SELECT_PERSON_DISTINCT_ID2S_OF_TEAM,
+ SELECT_PERSONS_OF_TEAM,
+ )
list_params = {"source_team_id": source_team_id}
# Persons
@@ -220,7 +252,11 @@ def _sync_postgres_with_clickhouse_data(cls, source_team_id: int, target_team_id
person_uuid = row.pop("person_uuid")
try:
bulk_person_distinct_ids.append(
- PersonDistinctId(team_id=target_team_id, person_id=bulk_persons[person_uuid].pk, **row)
+ PersonDistinctId(
+ team_id=target_team_id,
+ person_id=bulk_persons[person_uuid].pk,
+ **row,
+ )
)
except KeyError:
pre_existing_id_count -= 1
@@ -232,7 +268,14 @@ def _sync_postgres_with_clickhouse_data(cls, source_team_id: int, target_team_id
bulk_groups = []
for row in clickhouse_groups:
group_properties = json.loads(row.pop("group_properties", "{}"))
- bulk_groups.append(Group(team_id=target_team_id, version=0, group_properties=group_properties, **row))
+ bulk_groups.append(
+ Group(
+ team_id=target_team_id,
+ version=0,
+ group_properties=group_properties,
+ **row,
+ )
+ )
try:
Group.objects.bulk_create(bulk_groups)
except IntegrityError as e:
@@ -241,16 +284,24 @@ def _sync_postgres_with_clickhouse_data(cls, source_team_id: int, target_team_id
def _save_sim_person(self, team: Team, subject: SimPerson):
# We only want to save directly if there are past events
if subject.past_events:
- from posthog.models.person.util import create_person, create_person_distinct_id
+ from posthog.models.person.util import (
+ create_person,
+ create_person_distinct_id,
+ )
create_person(
- uuid=str(subject.in_posthog_id), team_id=team.pk, properties=subject.properties_at_now, version=0
+ uuid=str(subject.in_posthog_id),
+ team_id=team.pk,
+ properties=subject.properties_at_now,
+ version=0,
)
self._persons_created += 1
self._person_distinct_ids_created += len(subject.distinct_ids_at_now)
for distinct_id in subject.distinct_ids_at_now:
create_person_distinct_id(
- team_id=team.pk, distinct_id=str(distinct_id), person_id=str(subject.in_posthog_id)
+ team_id=team.pk,
+ distinct_id=str(distinct_id),
+ person_id=str(subject.in_posthog_id),
)
self._save_past_sim_events(team, subject.past_events)
# We only want to queue future events if there are any
@@ -294,14 +345,21 @@ def _save_future_sim_events(team: Team, events: List[SimEvent]):
@staticmethod
def _save_sim_group(
- team: Team, type_index: Literal[0, 1, 2, 3, 4], key: str, properties: Dict[str, Any], timestamp: dt.datetime
+ team: Team,
+ type_index: Literal[0, 1, 2, 3, 4],
+ key: str,
+ properties: Dict[str, Any],
+ timestamp: dt.datetime,
):
from posthog.models.group.util import raw_create_group_ch
raw_create_group_ch(team.pk, type_index, key, properties, timestamp)
def _sleep_until_person_data_in_clickhouse(self, team_id: int):
- from posthog.models.person.sql import GET_PERSON_COUNT_FOR_TEAM, GET_PERSON_DISTINCT_ID2_COUNT_FOR_TEAM
+ from posthog.models.person.sql import (
+ GET_PERSON_COUNT_FOR_TEAM,
+ GET_PERSON_DISTINCT_ID2_COUNT_FOR_TEAM,
+ )
while True:
person_count: int = sync_execute(GET_PERSON_COUNT_FOR_TEAM, {"team_id": team_id})[0][0]
diff --git a/posthog/demo/matrix/matrix.py b/posthog/demo/matrix/matrix.py
index 1a080057a0ceb..d94988bc4210d 100644
--- a/posthog/demo/matrix/matrix.py
+++ b/posthog/demo/matrix/matrix.py
@@ -72,7 +72,12 @@ def __init__(self, *, index: int, matrix: "Matrix") -> None:
self.radius = int(self.MIN_RADIUS + self.radius_distribution() * (self.MAX_RADIUS - self.MIN_RADIUS))
self.people_matrix = [
[
- matrix.PERSON_CLASS(kernel=(x == self.radius and y == self.radius), x=x, y=y, cluster=self)
+ matrix.PERSON_CLASS(
+ kernel=(x == self.radius and y == self.radius),
+ x=x,
+ y=y,
+ cluster=self,
+ )
for x in range(1 + self.radius * 2)
]
for y in range(1 + self.radius * 2)
diff --git a/posthog/demo/matrix/models.py b/posthog/demo/matrix/models.py
index fbb1dff7f98ff..a2e7796518914 100644
--- a/posthog/demo/matrix/models.py
+++ b/posthog/demo/matrix/models.py
@@ -181,7 +181,11 @@ class SimBrowserClient(SimClient):
def __init__(self, person: "SimPerson"):
self.person = person
self.matrix = person.cluster.matrix
- self.device_type, self.os, self.browser = self.person.cluster.properties_provider.device_type_os_browser()
+ (
+ self.device_type,
+ self.os,
+ self.browser,
+ ) = self.person.cluster.properties_provider.device_type_os_browser()
self.device_id = str(UUID(int=self.person.cluster.random.getrandbits(128)))
self.active_distinct_id = self.device_id # Pre-`$identify`, the device ID is used as the distinct ID
self.active_session_id = None
@@ -223,7 +227,10 @@ def capture(self, event: str, properties: Optional[Properties] = None):
if properties:
if referrer := properties.get("$referrer"):
referring_domain = urlparse(referrer).netloc if referrer != "$direct" else referrer
- referrer_properties = {"$referrer": referrer, "$referring_domain": referring_domain}
+ referrer_properties = {
+ "$referrer": referrer,
+ "$referring_domain": referring_domain,
+ }
self.register(referrer_properties)
combined_properties["$set"].update(referrer_properties)
combined_properties["$referring_domain"] = referring_domain
@@ -235,7 +242,11 @@ def capture(self, event: str, properties: Optional[Properties] = None):
super()._capture_raw(event, combined_properties, distinct_id=self.active_distinct_id)
def capture_pageview(
- self, current_url: str, properties: Optional[Properties] = None, *, referrer: Optional[str] = None
+ self,
+ current_url: str,
+ properties: Optional[Properties] = None,
+ *,
+ referrer: Optional[str] = None,
):
"""Capture a $pageview event. $pageleave is handled implicitly."""
if self.current_url is not None:
@@ -259,14 +270,24 @@ def identify(self, distinct_id: Optional[str], set_properties: Optional[Properti
self.active_distinct_id = distinct_id
self.capture(EVENT_IDENTIFY, identify_properties)
- def group(self, group_type: str, group_key: str, set_properties: Optional[Properties] = None):
+ def group(
+ self,
+ group_type: str,
+ group_key: str,
+ set_properties: Optional[Properties] = None,
+ ):
"""Link the person to the specified group. Similar to JS `posthog.group()`."""
if set_properties is None:
set_properties = {}
self.person._groups[group_type] = group_key
self.person.cluster.matrix._update_group(group_type, group_key, set_properties)
self.capture(
- EVENT_GROUP_IDENTIFY, {"$group_type": group_type, "$group_key": group_key, "$group_set": set_properties}
+ EVENT_GROUP_IDENTIFY,
+ {
+ "$group_type": group_type,
+ "$group_key": group_key,
+ "$group_set": set_properties,
+ },
)
def reset(self):
@@ -404,7 +425,13 @@ def schedule_effect(
An effect is a function that runs on the person, so it can change the person's state."""
self.cluster.raw_schedule_effect(
- Effect(timestamp=timestamp, callback=callback, source=self, target=target, condition=condition)
+ Effect(
+ timestamp=timestamp,
+ callback=callback,
+ source=self,
+ target=target,
+ condition=condition,
+ )
)
# Person state
@@ -423,7 +450,14 @@ def move_attribute(self, attr: str, delta: float) -> Literal[True]:
setattr(self, attr, getattr(self, attr) + delta)
return True
- def _append_event(self, event: str, properties: Properties, *, distinct_id: str, timestamp: dt.datetime):
+ def _append_event(
+ self,
+ event: str,
+ properties: Properties,
+ *,
+ distinct_id: str,
+ timestamp: dt.datetime,
+ ):
"""Append event to `past_events` or `future_events`, whichever is appropriate."""
if self.in_posthog_id is None:
self.in_posthog_id = self.cluster.roll_uuidt()
diff --git a/posthog/demo/matrix/randomization.py b/posthog/demo/matrix/randomization.py
index c0d6a8edb6bb3..ca6bcfd588640 100644
--- a/posthog/demo/matrix/randomization.py
+++ b/posthog/demo/matrix/randomization.py
@@ -23,19 +23,28 @@ class Industry(str, Enum):
class PropertiesProvider(mimesis.BaseProvider):
# Somewhat realistically segmented and weighted pools for random properties: device type/OS/browser
- DEVICE_TYPE_WEIGHTED_POOL: WeightedPool = (["Desktop", "Mobile", "Tablet"], [8, 1, 1])
+ DEVICE_TYPE_WEIGHTED_POOL: WeightedPool = (
+ ["Desktop", "Mobile", "Tablet"],
+ [8, 1, 1],
+ )
OS_WEIGHTED_POOLS: Dict[str, WeightedPool] = {
"Desktop": (["Windows", "Mac OS X", "Linux", "Chrome OS"], [18, 16, 7, 1]),
"Mobile": (["iOS", "Android"], [1, 1]),
"Tablet": (["iOS", "Android"], [1, 1]),
}
BROWSER_WEIGHTED_POOLS: Dict[str, WeightedPool] = {
- "Windows": (["Chrome", "Firefox", "Opera", "Microsoft Edge", "Internet Explorer"], [12, 4, 2, 1, 1]),
+ "Windows": (
+ ["Chrome", "Firefox", "Opera", "Microsoft Edge", "Internet Explorer"],
+ [12, 4, 2, 1, 1],
+ ),
"Mac OS X": (["Chrome", "Firefox", "Opera", "Safari"], [4, 2, 1, 2]),
"Linux": (["Chrome", "Firefox", "Opera"], [3, 3, 1]),
"Chrome OS": (["Chrome"], [1]),
"iOS": (["Mobile Safari", "Chrome iOS", "Firefox iOS"], [8, 1, 1]),
- "Android": (["Chrome", "Android Mobile", "Samsung Internet", "Firefox"], [5, 3, 3, 1]),
+ "Android": (
+ ["Chrome", "Android Mobile", "Samsung Internet", "Firefox"],
+ [5, 3, 3, 1],
+ ),
}
INDUSTRY_POOL = (
diff --git a/posthog/demo/products/hedgebox/matrix.py b/posthog/demo/products/hedgebox/matrix.py
index ed863556ecbf4..5c169ad0afd28 100644
--- a/posthog/demo/products/hedgebox/matrix.py
+++ b/posthog/demo/products/hedgebox/matrix.py
@@ -4,7 +4,13 @@
from django.db import IntegrityError
-from posthog.constants import INSIGHT_TRENDS, PAGEVIEW_EVENT, RETENTION_FIRST_TIME, TRENDS_LINEAR, TRENDS_WORLD_MAP
+from posthog.constants import (
+ INSIGHT_TRENDS,
+ PAGEVIEW_EVENT,
+ RETENTION_FIRST_TIME,
+ TRENDS_LINEAR,
+ TRENDS_WORLD_MAP,
+)
from posthog.demo.matrix.matrix import Cluster, Matrix
from posthog.demo.matrix.randomization import Industry
from posthog.models import (
@@ -46,7 +52,8 @@ def __init__(self, *args, **kwargs):
is_company = self.random.random() < COMPANY_CLUSTERS_PROPORTION
if is_company:
self.company = HedgdboxCompany(
- name=self.finance_provider.company(), industry=self.properties_provider.industry()
+ name=self.finance_provider.company(),
+ industry=self.properties_provider.industry(),
)
else:
self.company = None
@@ -81,7 +88,10 @@ def set_project_up(self, team, user):
# Actions
interacted_with_file_action = Action.objects.create(
- name="Interacted with file", team=team, description="Logged-in interaction with a file.", created_by=user
+ name="Interacted with file",
+ team=team,
+ description="Logged-in interaction with a file.",
+ created_by=user,
)
ActionStep.objects.bulk_create(
(
@@ -97,7 +107,18 @@ def set_project_up(self, team, user):
team=team,
name="Signed-up users",
created_by=user,
- groups=[{"properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "is_set",
+ "operator": "is_set",
+ }
+ ]
+ }
+ ],
)
real_users_cohort = Cohort.objects.create(
team=team,
@@ -105,14 +126,26 @@ def set_project_up(self, team, user):
description="People who don't belong to the Hedgebox team.",
created_by=user,
groups=[
- {"properties": [{"key": "email", "type": "person", "value": "@hedgebox.net$", "operator": "not_regex"}]}
+ {
+ "properties": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "@hedgebox.net$",
+ "operator": "not_regex",
+ }
+ ]
+ }
],
)
team.test_account_filters = [{"key": "id", "type": "cohort", "value": real_users_cohort.pk}]
# Dashboard: Key metrics (project home)
key_metrics_dashboard = Dashboard.objects.create(
- team=team, name="🔑 Key metrics", description="Company overview.", pinned=True
+ team=team,
+ name="🔑 Key metrics",
+ description="Company overview.",
+ pinned=True,
)
team.primary_dashboard = key_metrics_dashboard
weekly_signups_insight = Insight.objects.create(
@@ -137,7 +170,16 @@ def set_project_up(self, team, user):
color="blue",
layouts={
"sm": {"h": 5, "w": 6, "x": 0, "y": 0, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 0, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 0,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
signups_by_country_insight = Insight.objects.create(
@@ -162,7 +204,16 @@ def set_project_up(self, team, user):
insight=signups_by_country_insight,
layouts={
"sm": {"h": 5, "w": 6, "x": 6, "y": 0, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 5, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 5,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
activation_funnel = Insight.objects.create(
@@ -210,7 +261,16 @@ def set_project_up(self, team, user):
insight=activation_funnel,
layouts={
"sm": {"h": 5, "w": 6, "x": 0, "y": 5, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 10, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 10,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
new_user_retention = Insight.objects.create(
@@ -227,11 +287,23 @@ def set_project_up(self, team, user):
"values": [
{
"type": "AND",
- "values": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}],
+ "values": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "is_set",
+ "operator": "is_set",
+ }
+ ],
}
],
},
- "target_entity": {"id": EVENT_SIGNED_UP, "name": EVENT_SIGNED_UP, "type": "events", "order": 0},
+ "target_entity": {
+ "id": EVENT_SIGNED_UP,
+ "name": EVENT_SIGNED_UP,
+ "type": "events",
+ "order": 0,
+ },
"retention_type": RETENTION_FIRST_TIME,
"total_intervals": 9,
"returning_entity": {
@@ -249,7 +321,16 @@ def set_project_up(self, team, user):
insight=new_user_retention,
layouts={
"sm": {"h": 5, "w": 6, "x": 6, "y": 5, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 15, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 15,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
active_user_lifecycle = Insight.objects.create(
@@ -287,7 +368,16 @@ def set_project_up(self, team, user):
insight=active_user_lifecycle,
layouts={
"sm": {"h": 5, "w": 6, "x": 0, "y": 10, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 20, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 20,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
weekly_file_volume = Insight.objects.create(
@@ -333,7 +423,16 @@ def set_project_up(self, team, user):
insight=weekly_file_volume,
layouts={
"sm": {"h": 5, "w": 6, "x": 6, "y": 10, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 25, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 25,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
@@ -346,7 +445,13 @@ def set_project_up(self, team, user):
name="Monthly app revenue",
filters={
"events": [
- {"id": EVENT_PAID_BILL, "type": "events", "order": 0, "math": "sum", "math_property": "amount_usd"}
+ {
+ "id": EVENT_PAID_BILL,
+ "type": "events",
+ "order": 0,
+ "math": "sum",
+ "math_property": "amount_usd",
+ }
],
"actions": [],
"display": TRENDS_LINEAR,
@@ -362,7 +467,16 @@ def set_project_up(self, team, user):
insight=monthly_app_revenue_trends,
layouts={
"sm": {"h": 5, "w": 6, "x": 0, "y": 0, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 0, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 0,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
bills_paid_trends = Insight.objects.create(
@@ -399,7 +513,16 @@ def set_project_up(self, team, user):
insight=bills_paid_trends,
layouts={
"sm": {"h": 5, "w": 6, "x": 6, "y": 0, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 5, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 5,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
@@ -426,7 +549,16 @@ def set_project_up(self, team, user):
insight=daily_unique_visitors_trends,
layouts={
"sm": {"h": 5, "w": 6, "x": 0, "y": 0, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 0, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 0,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
most_popular_pages_trends = Insight.objects.create(
@@ -435,7 +567,14 @@ def set_project_up(self, team, user):
saved=True,
name="Most popular pages",
filters={
- "events": [{"id": PAGEVIEW_EVENT, "math": "total", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": PAGEVIEW_EVENT,
+ "math": "total",
+ "type": "events",
+ "order": 0,
+ }
+ ],
"actions": [],
"display": "ActionsTable",
"insight": "TRENDS",
@@ -469,7 +608,16 @@ def set_project_up(self, team, user):
insight=most_popular_pages_trends,
layouts={
"sm": {"h": 5, "w": 6, "x": 6, "y": 0, "minH": 5, "minW": 3},
- "xs": {"h": 5, "w": 1, "x": 0, "y": 5, "minH": 5, "minW": 3, "moved": False, "static": False},
+ "xs": {
+ "h": 5,
+ "w": 1,
+ "x": 0,
+ "y": 5,
+ "minH": 5,
+ "minW": 3,
+ "moved": False,
+ "static": False,
+ },
},
)
@@ -487,7 +635,12 @@ def set_project_up(self, team, user):
"type": "events",
"order": 0,
"properties": [
- {"key": "$current_url", "type": "event", "value": URL_HOME, "operator": "exact"}
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": URL_HOME,
+ "operator": "exact",
+ }
],
},
{
@@ -497,10 +650,21 @@ def set_project_up(self, team, user):
"type": "events",
"order": 1,
"properties": [
- {"key": "$current_url", "type": "event", "value": URL_SIGNUP, "operator": "regex"}
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": URL_SIGNUP,
+ "operator": "regex",
+ }
],
},
- {"custom_name": "Signed up", "id": "signed_up", "name": "signed_up", "type": "events", "order": 2},
+ {
+ "custom_name": "Signed up",
+ "id": "signed_up",
+ "name": "signed_up",
+ "type": "events",
+ "order": 2,
+ },
],
"actions": [],
"display": "FunnelViz",
@@ -563,7 +727,11 @@ def set_project_up(self, team, user):
user=user,
insight=insight,
last_viewed_at=(
- self.now - dt.timedelta(days=self.random.randint(0, 3), minutes=self.random.randint(5, 60))
+ self.now
+ - dt.timedelta(
+ days=self.random.randint(0, 3),
+ minutes=self.random.randint(5, 60),
+ )
),
)
for insight in Insight.objects.filter(team=team)
@@ -610,8 +778,14 @@ def set_project_up(self, team, user):
"groups": [{"properties": [], "rollout_percentage": None}],
"multivariate": {
"variants": [
- {"key": "control", "rollout_percentage": 100 - NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT},
- {"key": "test", "rollout_percentage": NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT},
+ {
+ "key": "control",
+ "rollout_percentage": 100 - NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT,
+ },
+ {
+ "key": "test",
+ "rollout_percentage": NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT,
+ },
]
},
},
@@ -632,10 +806,20 @@ def set_project_up(self, team, user):
"type": "events",
"order": 0,
"properties": [
- {"key": "$current_url", "type": "event", "value": URL_SIGNUP, "operator": "exact"}
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": URL_SIGNUP,
+ "operator": "exact",
+ }
],
},
- {"id": "signed_up", "name": "signed_up", "type": "events", "order": 1},
+ {
+ "id": "signed_up",
+ "name": "signed_up",
+ "type": "events",
+ "order": 1,
+ },
],
"actions": [],
"display": "FunnelViz",
@@ -646,8 +830,14 @@ def set_project_up(self, team, user):
},
parameters={
"feature_flag_variants": [
- {"key": "control", "rollout_percentage": 100 - NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT},
- {"key": "test", "rollout_percentage": NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT},
+ {
+ "key": "control",
+ "rollout_percentage": 100 - NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT,
+ },
+ {
+ "key": "test",
+ "rollout_percentage": NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT,
+ },
],
"recommended_sample_size": int(len(self.clusters) * 0.274),
"recommended_running_time": None,
diff --git a/posthog/demo/products/hedgebox/models.py b/posthog/demo/products/hedgebox/models.py
index 132f3d6ac5f32..1c0a0e4ffd0da 100644
--- a/posthog/demo/products/hedgebox/models.py
+++ b/posthog/demo/products/hedgebox/models.py
@@ -279,9 +279,18 @@ def determine_session_intent(self) -> Optional[HedgeboxSessionIntent]:
# The more files, the more likely to delete/download/share rather than upload
possible_intents_with_weights.extend(
[
- (HedgeboxSessionIntent.DELETE_FILE_S, math.log10(file_count) / 8 if file_count else 0),
- (HedgeboxSessionIntent.DOWNLOAD_OWN_FILE_S, math.log10(file_count + 1) if file_count else 0),
- (HedgeboxSessionIntent.SHARE_FILE, math.log10(file_count) / 3 if file_count else 0),
+ (
+ HedgeboxSessionIntent.DELETE_FILE_S,
+ math.log10(file_count) / 8 if file_count else 0,
+ ),
+ (
+ HedgeboxSessionIntent.DOWNLOAD_OWN_FILE_S,
+ math.log10(file_count + 1) if file_count else 0,
+ ),
+ (
+ HedgeboxSessionIntent.SHARE_FILE,
+ math.log10(file_count) / 3 if file_count else 0,
+ ),
]
)
if self.account.allocation_used_fraction < 0.99:
@@ -304,7 +313,8 @@ def determine_session_intent(self) -> Optional[HedgeboxSessionIntent]:
if possible_intents_with_weights:
possible_intents, weights = zip(*possible_intents_with_weights)
return self.cluster.random.choices(
- cast(Tuple[HedgeboxSessionIntent], possible_intents), cast(Tuple[float], weights)
+ cast(Tuple[HedgeboxSessionIntent], possible_intents),
+ cast(Tuple[float], weights),
)[0]
else:
return None
@@ -526,7 +536,10 @@ def go_to_shared_file(self, file: HedgeboxFile):
self.active_client.capture_pageview(dyn_url_file(file.id))
self.advance_timer(0.5 + self.cluster.random.betavariate(1.2, 1.6) * 20)
if self.cluster.random.random() < 0.7:
- self.active_client.capture(EVENT_DOWNLOADED_FILE, {"file_type": file.type, "file_size_b": file.size_b})
+ self.active_client.capture(
+ EVENT_DOWNLOADED_FILE,
+ {"file_type": file.type, "file_size_b": file.size_b},
+ )
self.advance_timer(0.5 + self.cluster.random.betavariate(1.2, 2) * 80)
self.need += (self.cluster.random.betavariate(1.2, 1) - 0.5) * 0.08
if self.cluster.random.random() < 0.2:
@@ -537,13 +550,20 @@ def go_to_account_settings(self):
self.advance_timer(1 + self.cluster.random.betavariate(1.2, 1.2) * 5)
random = self.cluster.random.random()
if (
- self.active_session_intent in (HedgeboxSessionIntent.UPGRADE_PLAN, HedgeboxSessionIntent.DOWNGRADE_PLAN)
+ self.active_session_intent
+ in (
+ HedgeboxSessionIntent.UPGRADE_PLAN,
+ HedgeboxSessionIntent.DOWNGRADE_PLAN,
+ )
or random < 0.1
):
self.go_to_account_billing()
elif (
self.active_session_intent
- in (HedgeboxSessionIntent.INVITE_TEAM_MEMBER, HedgeboxSessionIntent.REMOVE_TEAM_MEMBER)
+ in (
+ HedgeboxSessionIntent.INVITE_TEAM_MEMBER,
+ HedgeboxSessionIntent.REMOVE_TEAM_MEMBER,
+ )
or random < 0.1
):
self.go_to_account_team()
@@ -609,7 +629,11 @@ def join_team(self):
raise ValueError("Cannot join team without an account")
self.active_client.capture(EVENT_SIGNED_UP, {"from_invite": True})
self.advance_timer(self.cluster.random.uniform(0.1, 0.2))
- self.active_client.group(GROUP_TYPE_ACCOUNT, self.account.id, {"team_size": len(self.account.team_members)})
+ self.active_client.group(
+ GROUP_TYPE_ACCOUNT,
+ self.account.id,
+ {"team_size": len(self.account.team_members)},
+ )
self.account.team_members.add(self)
def upload_file(self, file: HedgeboxFile):
@@ -618,12 +642,19 @@ def upload_file(self, file: HedgeboxFile):
self.account.files.add(file)
self.active_client.capture(
EVENT_UPLOADED_FILE,
- properties={"file_type": file.type, "file_size_b": file.size_b, "used_mb": self.account.current_used_mb},
+ properties={
+ "file_type": file.type,
+ "file_size_b": file.size_b,
+ "used_mb": self.account.current_used_mb,
+ },
)
self.active_client.group(
GROUP_TYPE_ACCOUNT,
self.account.id,
- {"used_mb": self.account.current_used_mb, "file_count": len(self.account.files)},
+ {
+ "used_mb": self.account.current_used_mb,
+ "file_count": len(self.account.files),
+ },
)
self.satisfaction += self.cluster.random.uniform(-0.19, 0.2)
if self.satisfaction > 0.9:
@@ -643,7 +674,10 @@ def delete_file(self, file: HedgeboxFile):
self.active_client.group(
GROUP_TYPE_ACCOUNT,
self.account.id,
- {"used_mb": self.account.current_used_mb, "file_count": len(self.account.files)},
+ {
+ "used_mb": self.account.current_used_mb,
+ "file_count": len(self.account.files),
+ },
)
def share_file(self, file: HedgeboxFile):
@@ -662,7 +696,8 @@ def upgrade_plan(self):
if new_plan is None:
raise ValueError("There's no successor plan")
self.active_client.capture(
- EVENT_UPGRADED_PLAN, {"previous_plan": str(previous_plan), "new_plan": str(new_plan)}
+ EVENT_UPGRADED_PLAN,
+ {"previous_plan": str(previous_plan), "new_plan": str(new_plan)},
)
self.advance_timer(self.cluster.random.betavariate(1.2, 1.2) * 2)
self.schedule_effect(
@@ -678,7 +713,11 @@ def upgrade_plan(self):
)
for i in range(future_months):
bill_timestamp = self.cluster.simulation_time + dt.timedelta(days=30 * i)
- self.schedule_effect(bill_timestamp, lambda person: person.bill_account(), Effect.Target.SELF)
+ self.schedule_effect(
+ bill_timestamp,
+ lambda person: person.bill_account(),
+ Effect.Target.SELF,
+ )
def downgrade_plan(self):
assert self.account is not None
@@ -687,7 +726,8 @@ def downgrade_plan(self):
if new_plan is None:
raise ValueError("There's no predecessor plan")
self.active_client.capture(
- EVENT_DOWNGRADED_PLAN, {"previous_plan": str(previous_plan), "new_plan": str(new_plan)}
+ EVENT_DOWNGRADED_PLAN,
+ {"previous_plan": str(previous_plan), "new_plan": str(new_plan)},
)
self.account.plan = new_plan
@@ -716,7 +756,10 @@ def bill_account(self):
if self.account and self.account.current_monthly_bill_usd:
self.cluster.matrix.server_client.capture(
EVENT_PAID_BILL,
- {"amount_usd": self.account.current_monthly_bill_usd, "plan": self.account.plan},
+ {
+ "amount_usd": self.account.current_monthly_bill_usd,
+ "plan": self.account.plan,
+ },
distinct_id=self.in_product_id,
)
diff --git a/posthog/demo/test/test_matrix_manager.py b/posthog/demo/test/test_matrix_manager.py
index 99f0451c5485d..25770553ab613 100644
--- a/posthog/demo/test/test_matrix_manager.py
+++ b/posthog/demo/test/test_matrix_manager.py
@@ -55,7 +55,9 @@ class TestMatrixManager(ClickhouseDestroyTablesMixin):
def setUpTestData(cls):
super().setUpTestData()
cls.matrix = DummyMatrix(
- n_clusters=3, now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=ZoneInfo("UTC")), days_future=0
+ n_clusters=3,
+ now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=ZoneInfo("UTC")),
+ days_future=0,
)
cls.matrix.simulate()
@@ -83,7 +85,10 @@ def test_run_on_team(self):
# At least one event for each cluster
assert (
- sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk})[0][0] >= 3
+ sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[
+ 0
+ ][0]
+ >= 3
)
assert self.team.name == DummyMatrix.PRODUCT_NAME
@@ -95,5 +100,8 @@ def test_run_on_team_using_pre_save(self):
# At least one event for each cluster
assert sync_execute("SELECT count() FROM events WHERE team_id = 0")[0][0] >= 3
assert (
- sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk})[0][0] >= 3
+ sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[
+ 0
+ ][0]
+ >= 3
)
diff --git a/posthog/email.py b/posthog/email.py
index 579d68cb0e0ac..93968b6c07844 100644
--- a/posthog/email.py
+++ b/posthog/email.py
@@ -59,7 +59,6 @@ def _send_email(
records: List = []
with transaction.atomic():
-
for dest in to:
record, _ = MessagingRecord.objects.get_or_create(raw_email=dest["raw_email"], campaign_key=campaign_key)
@@ -113,7 +112,11 @@ def _send_email(
try:
connection.close() # type: ignore
except Exception as err:
- print("Could not close email connection (this can be ignored):", err, file=sys.stderr)
+ print(
+ "Could not close email connection (this can be ignored):",
+ err,
+ file=sys.stderr,
+ )
class EmailMessage:
diff --git a/posthog/event_usage.py b/posthog/event_usage.py
index 7d238a29738e6..fa69f0c23662b 100644
--- a/posthog/event_usage.py
+++ b/posthog/event_usage.py
@@ -93,7 +93,8 @@ def report_user_joined_organization(organization: Organization, current_user: Us
def report_user_logged_in(
- user: User, social_provider: str = "" # which third-party provider processed the login (empty = no third-party)
+ user: User,
+ social_provider: str = "", # which third-party provider processed the login (empty = no third-party)
) -> None:
"""
Reports that a user has logged in to PostHog.
@@ -125,7 +126,9 @@ def report_user_password_reset(user: User) -> None:
Reports a user resetting their password.
"""
posthoganalytics.capture(
- user.distinct_id, "user password reset", groups=groups(user.current_organization, user.current_team)
+ user.distinct_id,
+ "user password reset",
+ groups=groups(user.current_organization, user.current_team),
)
@@ -195,13 +198,19 @@ def report_bulk_invited(
def report_user_action(user: User, event: str, properties: Dict = {}):
posthoganalytics.capture(
- user.distinct_id, event, properties=properties, groups=groups(user.current_organization, user.current_team)
+ user.distinct_id,
+ event,
+ properties=properties,
+ groups=groups(user.current_organization, user.current_team),
)
def report_organization_deleted(user: User, organization: Organization):
posthoganalytics.capture(
- user.distinct_id, "organization deleted", organization.get_analytics_metadata(), groups=groups(organization)
+ user.distinct_id,
+ "organization deleted",
+ organization.get_analytics_metadata(),
+ groups=groups(organization),
)
@@ -219,7 +228,12 @@ def groups(organization: Optional[Organization] = None, team: Optional[Team] = N
return result
-def report_team_action(team: Team, event: str, properties: Dict = {}, group_properties: Optional[Dict] = None):
+def report_team_action(
+ team: Team,
+ event: str,
+ properties: Dict = {},
+ group_properties: Optional[Dict] = None,
+):
"""
For capturing events where it is unclear which user was the core actor we can use the team instead
"""
@@ -230,13 +244,19 @@ def report_team_action(team: Team, event: str, properties: Dict = {}, group_prop
def report_organization_action(
- organization: Organization, event: str, properties: Dict = {}, group_properties: Optional[Dict] = None
+ organization: Organization,
+ event: str,
+ properties: Dict = {},
+ group_properties: Optional[Dict] = None,
):
"""
For capturing events where it is unclear which user was the core actor we can use the organization instead
"""
posthoganalytics.capture(
- str(organization.id), event, properties=properties, groups=groups(organization=organization)
+ str(organization.id),
+ event,
+ properties=properties,
+ groups=groups(organization=organization),
)
if group_properties:
diff --git a/posthog/exceptions.py b/posthog/exceptions.py
index 1cdcc5f1bf957..a38b334b566fb 100644
--- a/posthog/exceptions.py
+++ b/posthog/exceptions.py
@@ -75,6 +75,7 @@ def generate_exception_response(
from statshog.defaults.django import statsd
statsd.incr(
- f"posthog_cloud_raw_endpoint_exception", tags={"endpoint": endpoint, "code": code, "type": type, "attr": attr}
+ f"posthog_cloud_raw_endpoint_exception",
+ tags={"endpoint": endpoint, "code": code, "type": type, "attr": attr},
)
return JsonResponse({"type": type, "code": code, "detail": detail, "attr": attr}, status=status_code)
diff --git a/posthog/filters.py b/posthog/filters.py
index 0e8bb86cae7c6..ac098dea92c68 100644
--- a/posthog/filters.py
+++ b/posthog/filters.py
@@ -33,7 +33,12 @@ def get_search_terms(self, request: Request):
terms = terms.replace("\x00", "") # strip null characters
return list(filter(None, terms.split(" ")))
- def filter_queryset(self, request: Request, queryset: Union[QuerySet[_MT], RawQuerySet], view: APIView):
+ def filter_queryset(
+ self,
+ request: Request,
+ queryset: Union[QuerySet[_MT], RawQuerySet],
+ view: APIView,
+ ):
if isinstance(queryset, RawQuerySet):
return queryset
@@ -54,7 +59,9 @@ def filter_queryset(self, request: Request, queryset: Union[QuerySet[_MT], RawQu
def term_search_filter_sql(
- search_fields: List[str], search_terms: Optional[str] = "", search_extra: Optional[str] = ""
+ search_fields: List[str],
+ search_terms: Optional[str] = "",
+ search_extra: Optional[str] = "",
) -> Tuple[str, dict]:
if not search_fields or not search_terms:
return "", {}
diff --git a/posthog/health.py b/posthog/health.py
index 782fab5a2942a..a77e4d79718f3 100644
--- a/posthog/health.py
+++ b/posthog/health.py
@@ -59,7 +59,13 @@
# NOTE: we can be pretty picky about what the worker needs as by its nature
# of reading from a durable queue rather that being required to perform
# request/response, we are more resilient to service downtime.
- "worker": ["http", "postgres", "postgres_migrations_uptodate", "clickhouse", "celery_broker"],
+ "worker": [
+ "http",
+ "postgres",
+ "postgres_migrations_uptodate",
+ "clickhouse",
+ "celery_broker",
+ ],
"decide": ["http"],
}
diff --git a/posthog/helpers/dashboard_templates.py b/posthog/helpers/dashboard_templates.py
index 7a1179b3af5d3..cfaa2bac5e1d1 100644
--- a/posthog/helpers/dashboard_templates.py
+++ b/posthog/helpers/dashboard_templates.py
@@ -39,7 +39,9 @@ def _create_website_dashboard(dashboard: Dashboard) -> None:
dashboard.filters = {DATE_FROM: "-30d"}
if dashboard.team.organization.is_feature_available(AvailableFeature.TAGGING):
tag, _ = Tag.objects.get_or_create(
- name="marketing", team_id=dashboard.team_id, defaults={"team_id": dashboard.team_id}
+ name="marketing",
+ team_id=dashboard.team_id,
+ defaults={"team_id": dashboard.team_id},
)
dashboard.tagged_items.create(tag_id=tag.id)
dashboard.save(update_fields=["filters"])
@@ -50,7 +52,13 @@ def _create_website_dashboard(dashboard: Dashboard) -> None:
name="Website Unique Users (Total)",
description="Shows the number of unique users that use your app every day.",
filters={
- TREND_FILTER_TYPE_EVENTS: [{"id": "$pageview", "math": UNIQUE_USERS, "type": TREND_FILTER_TYPE_EVENTS}],
+ TREND_FILTER_TYPE_EVENTS: [
+ {
+ "id": "$pageview",
+ "math": UNIQUE_USERS,
+ "type": TREND_FILTER_TYPE_EVENTS,
+ }
+ ],
INTERVAL: "day",
INSIGHT: INSIGHT_TRENDS,
DATE_FROM: "-30d",
@@ -77,7 +85,13 @@ def _create_website_dashboard(dashboard: Dashboard) -> None:
name="Organic SEO Unique Users (Total)",
description="",
filters={
- TREND_FILTER_TYPE_EVENTS: [{"id": "$pageview", "math": UNIQUE_USERS, "type": TREND_FILTER_TYPE_EVENTS}],
+ TREND_FILTER_TYPE_EVENTS: [
+ {
+ "id": "$pageview",
+ "math": UNIQUE_USERS,
+ "type": TREND_FILTER_TYPE_EVENTS,
+ }
+ ],
INTERVAL: "day",
INSIGHT: INSIGHT_TRENDS,
DATE_FROM: "-30d",
@@ -89,8 +103,18 @@ def _create_website_dashboard(dashboard: Dashboard) -> None:
{
"type": "AND",
"values": [
- {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"},
- {"key": "utm_source", "type": "event", "value": "is_not_set", "operator": "is_not_set"},
+ {
+ "key": "$referring_domain",
+ "type": "event",
+ "value": "google",
+ "operator": "icontains",
+ },
+ {
+ "key": "utm_source",
+ "type": "event",
+ "value": "is_not_set",
+ "operator": "is_not_set",
+ },
],
}
],
@@ -117,7 +141,13 @@ def _create_website_dashboard(dashboard: Dashboard) -> None:
name="Website Unique Users (Breakdown)",
description="",
filters={
- TREND_FILTER_TYPE_EVENTS: [{"id": "$pageview", "math": UNIQUE_USERS, "type": TREND_FILTER_TYPE_EVENTS}],
+ TREND_FILTER_TYPE_EVENTS: [
+ {
+ "id": "$pageview",
+ "math": UNIQUE_USERS,
+ "type": TREND_FILTER_TYPE_EVENTS,
+ }
+ ],
INTERVAL: "week",
INSIGHT: INSIGHT_TRENDS,
DATE_FROM: "-30d",
@@ -149,8 +179,18 @@ def _create_website_dashboard(dashboard: Dashboard) -> None:
"math": UNIQUE_USERS,
"type": TREND_FILTER_TYPE_EVENTS,
PROPERTIES: [
- {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"},
- {"key": "utm_source", "type": "event", "value": "is_not_set", "operator": "is_not_set"},
+ {
+ "key": "$referring_domain",
+ "type": "event",
+ "value": "google",
+ "operator": "icontains",
+ },
+ {
+ "key": "utm_source",
+ "type": "event",
+ "value": "is_not_set",
+ "operator": "is_not_set",
+ },
],
}
],
@@ -283,7 +323,14 @@ def _create_website_dashboard(dashboard: Dashboard) -> None:
"values": [
{
"type": "AND",
- "values": [{"key": "$current_url", "type": "event", "value": "?", "operator": "not_icontains"}],
+ "values": [
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": "?",
+ "operator": "not_icontains",
+ }
+ ],
}
],
},
@@ -329,8 +376,18 @@ def _create_website_dashboard(dashboard: Dashboard) -> None:
{
"type": "AND",
"values": [
- {"key": "$current_url", "type": "event", "value": "?", "operator": "not_icontains"},
- {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"},
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": "?",
+ "operator": "not_icontains",
+ },
+ {
+ "key": "$referring_domain",
+ "type": "event",
+ "value": "google",
+ "operator": "icontains",
+ },
],
}
],
@@ -403,7 +460,9 @@ def create_from_template(dashboard: Dashboard, template: DashboardTemplate) -> N
if dashboard.team.organization.is_feature_available(AvailableFeature.TAGGING):
for template_tag in template.tags or []:
tag, _ = Tag.objects.get_or_create(
- name=template_tag, team_id=dashboard.team_id, defaults={"team_id": dashboard.team_id}
+ name=template_tag,
+ team_id=dashboard.team_id,
+ defaults={"team_id": dashboard.team_id},
)
dashboard.tagged_items.create(tag_id=tag.id)
dashboard.save()
@@ -490,7 +549,9 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None:
dashboard.filters = {DATE_FROM: "-30d"}
if dashboard.team.organization.is_feature_available(AvailableFeature.TAGGING):
tag, _ = Tag.objects.get_or_create(
- name="feature flags", team_id=dashboard.team_id, defaults={"team_id": dashboard.team_id}
+ name="feature flags",
+ team_id=dashboard.team_id,
+ defaults={"team_id": dashboard.team_id},
)
dashboard.tagged_items.create(tag_id=tag.id)
dashboard.save(update_fields=["filters"])
@@ -502,7 +563,11 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None:
description="Shows the number of total calls made on feature flag with key: " + feature_flag.key,
filters={
TREND_FILTER_TYPE_EVENTS: [
- {"id": "$feature_flag_called", "name": "$feature_flag_called", "type": TREND_FILTER_TYPE_EVENTS}
+ {
+ "id": "$feature_flag_called",
+ "name": "$feature_flag_called",
+ "type": TREND_FILTER_TYPE_EVENTS,
+ }
],
INTERVAL: "day",
INSIGHT: INSIGHT_TRENDS,
@@ -514,7 +579,11 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None:
{
"type": "AND",
"values": [
- {"key": "$feature_flag", "type": "event", "value": feature_flag.key},
+ {
+ "key": "$feature_flag",
+ "type": "event",
+ "value": feature_flag.key,
+ },
],
}
],
@@ -562,7 +631,11 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None:
{
"type": "AND",
"values": [
- {"key": "$feature_flag", "type": "event", "value": feature_flag.key},
+ {
+ "key": "$feature_flag",
+ "type": "event",
+ "value": feature_flag.key,
+ },
],
}
],
@@ -595,7 +668,11 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das
description="Shows the total number of times this feature was viewed and interacted with",
filters={
TREND_FILTER_TYPE_EVENTS: [
- {"id": "$feature_view", "name": "Feature View - Total", "type": TREND_FILTER_TYPE_EVENTS},
+ {
+ "id": "$feature_view",
+ "name": "Feature View - Total",
+ "type": TREND_FILTER_TYPE_EVENTS,
+ },
{
"id": "$feature_view",
"name": "Feature View - Unique users",
@@ -613,7 +690,11 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das
{
"type": "AND",
"values": [
- {"key": "feature_flag", "type": "event", "value": feature_flag.key},
+ {
+ "key": "feature_flag",
+ "type": "event",
+ "value": feature_flag.key,
+ },
],
}
],
@@ -630,7 +711,11 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das
description="Shows the total number of times this feature was viewed and interacted with",
filters={
TREND_FILTER_TYPE_EVENTS: [
- {"id": "$feature_interaction", "name": "Feature Interaction - Total", "type": TREND_FILTER_TYPE_EVENTS},
+ {
+ "id": "$feature_interaction",
+ "name": "Feature Interaction - Total",
+ "type": TREND_FILTER_TYPE_EVENTS,
+ },
{
"id": "$feature_interaction",
"name": "Feature Interaction - Unique users",
@@ -648,7 +733,11 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das
{
"type": "AND",
"values": [
- {"key": "feature_flag", "type": "event", "value": feature_flag.key},
+ {
+ "key": "feature_flag",
+ "type": "event",
+ "value": feature_flag.key,
+ },
],
}
],
diff --git a/posthog/helpers/tests/test_multi_property_breakdown.py b/posthog/helpers/tests/test_multi_property_breakdown.py
index 311cd465ad9b8..cc2dad4bbc57f 100644
--- a/posthog/helpers/tests/test_multi_property_breakdown.py
+++ b/posthog/helpers/tests/test_multi_property_breakdown.py
@@ -1,7 +1,9 @@
from typing import Any, Dict, List
from unittest import TestCase
-from posthog.helpers.multi_property_breakdown import protect_old_clients_from_multi_property_default
+from posthog.helpers.multi_property_breakdown import (
+ protect_old_clients_from_multi_property_default,
+)
class TestMultiPropertyBreakdown(TestCase):
@@ -15,7 +17,11 @@ def test_handles_empty_inputs(self):
assert False, "should not raise any KeyError"
def test_handles_empty_breakdowns_array(self):
- data: Dict[str, Any] = {"breakdowns": [], "insight": "FUNNELS", "breakdown_type": "event"}
+ data: Dict[str, Any] = {
+ "breakdowns": [],
+ "insight": "FUNNELS",
+ "breakdown_type": "event",
+ }
result: List = []
try:
@@ -24,7 +30,11 @@ def test_handles_empty_breakdowns_array(self):
assert False, "should not raise any KeyError"
def test_keeps_multi_property_breakdown_for_multi_property_requests(self):
- data: Dict[str, Any] = {"breakdowns": ["a", "b"], "insight": "FUNNELS", "breakdown_type": "event"}
+ data: Dict[str, Any] = {
+ "breakdowns": ["a", "b"],
+ "insight": "FUNNELS",
+ "breakdown_type": "event",
+ }
result: List[List[Dict[str, Any]]] = [[{"breakdown": ["a1", "b1"], "breakdown_value": ["a1", "b1"]}]]
actual = protect_old_clients_from_multi_property_default(data, result)
@@ -38,7 +48,11 @@ def test_keeps_multi_property_breakdown_for_multi_property_requests(self):
assert "breakdown" not in data
def test_flattens_multi_property_breakdown_for_single_property_requests(self):
- data: Dict[str, Any] = {"breakdown": "a", "insight": "FUNNELS", "breakdown_type": "event"}
+ data: Dict[str, Any] = {
+ "breakdown": "a",
+ "insight": "FUNNELS",
+ "breakdown_type": "event",
+ }
result: List[List[Dict[str, Any]]] = [[{"breakdown": ["a1"], "breakdown_value": ["a1", "b1"]}]]
actual = protect_old_clients_from_multi_property_default(data, result)
diff --git a/posthog/hogql/ai.py b/posthog/hogql/ai.py
index 915d03b77e49c..c53e9814d807a 100644
--- a/posthog/hogql/ai.py
+++ b/posthog/hogql/ai.py
@@ -85,7 +85,11 @@ def write_sql_from_prompt(prompt: str, *, current_query: Optional[str] = None, t
]
if current_query:
messages.insert(
- -1, {"role": "user", "content": CURRENT_QUERY_MESSAGE.format(current_query_input=current_query)}
+ -1,
+ {
+ "role": "user",
+ "content": CURRENT_QUERY_MESSAGE.format(current_query_input=current_query),
+ },
)
candidate_sql: Optional[str] = None
@@ -116,7 +120,12 @@ def write_sql_from_prompt(prompt: str, *, current_query: Optional[str] = None, t
print_ast(parse_select(candidate_sql), context=context, dialect="clickhouse")
except HogQLException as e:
messages.append({"role": "assistant", "content": candidate_sql})
- messages.append({"role": "user", "content": f"That query has this problem: {e}. Return fixed query."})
+ messages.append(
+ {
+ "role": "user",
+ "content": f"That query has this problem: {e}. Return fixed query.",
+ }
+ )
else:
generated_valid_hogql = True
break
diff --git a/posthog/hogql/bytecode.py b/posthog/hogql/bytecode.py
index ab468338ca803..7a4a6904527b9 100644
--- a/posthog/hogql/bytecode.py
+++ b/posthog/hogql/bytecode.py
@@ -3,7 +3,11 @@
from posthog.hogql import ast
from posthog.hogql.errors import NotImplementedException
from posthog.hogql.visitor import Visitor
-from hogvm.python.operation import Operation, HOGQL_BYTECODE_IDENTIFIER, SUPPORTED_FUNCTIONS
+from hogvm.python.operation import (
+ Operation,
+ HOGQL_BYTECODE_IDENTIFIER,
+ SUPPORTED_FUNCTIONS,
+)
COMPARE_OPERATIONS = {
ast.CompareOperationOp.Eq: Operation.EQ,
@@ -74,7 +78,11 @@ def visit_compare_operation(self, node: ast.CompareOperation):
return [*self.visit(node.right), *self.visit(node.left), operation]
def visit_arithmetic_operation(self, node: ast.ArithmeticOperation):
- return [*self.visit(node.right), *self.visit(node.left), ARITHMETIC_OPERATIONS[node.op]]
+ return [
+ *self.visit(node.right),
+ *self.visit(node.left),
+ ARITHMETIC_OPERATIONS[node.op],
+ ]
def visit_field(self, node: ast.Field):
chain = []
diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py
index cd08da81fca30..0a2806ca99878 100644
--- a/posthog/hogql/constants.py
+++ b/posthog/hogql/constants.py
@@ -4,7 +4,16 @@
from pydantic import ConfigDict, BaseModel
ConstantDataType: TypeAlias = Literal[
- "int", "float", "str", "bool", "array", "tuple", "date", "datetime", "uuid", "unknown"
+ "int",
+ "float",
+ "str",
+ "bool",
+ "array",
+ "tuple",
+ "date",
+ "datetime",
+ "uuid",
+ "unknown",
]
ConstantSupportedPrimitive: TypeAlias = int | float | str | bool | date | datetime | UUID | None
ConstantSupportedData: TypeAlias = (
diff --git a/posthog/hogql/context.py b/posthog/hogql/context.py
index 65c17ba7006be..7f45d66fa4f83 100644
--- a/posthog/hogql/context.py
+++ b/posthog/hogql/context.py
@@ -55,7 +55,11 @@ def add_sensitive_value(self, value: Any) -> str:
return f"%({key})s"
def add_notice(
- self, message: str, start: Optional[int] = None, end: Optional[int] = None, fix: Optional[str] = None
+ self,
+ message: str,
+ start: Optional[int] = None,
+ end: Optional[int] = None,
+ fix: Optional[str] = None,
):
if not any(n.start == start and n.end == end and n.message == message and n.fix == fix for n in self.notices):
self.notices.append(HogQLNotice(start=start, end=end, message=message, fix=fix))
diff --git a/posthog/hogql/database/argmax.py b/posthog/hogql/database/argmax.py
index a46b068513e6b..0302ac14ddb26 100644
--- a/posthog/hogql/database/argmax.py
+++ b/posthog/hogql/database/argmax.py
@@ -18,7 +18,12 @@ def argmax_select(
fields_to_select: List[ast.Expr] = []
for name, chain in select_fields.items():
if name not in group_fields:
- fields_to_select.append(ast.Alias(alias=name, expr=argmax_version(ast.Field(chain=[table_name] + chain))))
+ fields_to_select.append(
+ ast.Alias(
+ alias=name,
+ expr=argmax_version(ast.Field(chain=[table_name] + chain)),
+ )
+ )
for key in group_fields:
fields_to_group.append(ast.Field(chain=[table_name, key]))
fields_to_select.append(ast.Alias(alias=key, expr=ast.Field(chain=[table_name, key])))
diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py
index 1e5b8dd2cc390..db2791c348d76 100644
--- a/posthog/hogql/database/database.py
+++ b/posthog/hogql/database/database.py
@@ -27,10 +27,19 @@
from posthog.hogql.database.schema.events import EventsTable
from posthog.hogql.database.schema.groups import GroupsTable, RawGroupsTable
from posthog.hogql.database.schema.numbers import NumbersTable
-from posthog.hogql.database.schema.person_distinct_ids import PersonDistinctIdsTable, RawPersonDistinctIdsTable
+from posthog.hogql.database.schema.person_distinct_ids import (
+ PersonDistinctIdsTable,
+ RawPersonDistinctIdsTable,
+)
from posthog.hogql.database.schema.persons import PersonsTable, RawPersonsTable
-from posthog.hogql.database.schema.person_overrides import PersonOverridesTable, RawPersonOverridesTable
-from posthog.hogql.database.schema.session_replay_events import RawSessionReplayEventsTable, SessionReplayEventsTable
+from posthog.hogql.database.schema.person_overrides import (
+ PersonOverridesTable,
+ RawPersonOverridesTable,
+)
+from posthog.hogql.database.schema.session_replay_events import (
+ RawSessionReplayEventsTable,
+ SessionReplayEventsTable,
+)
from posthog.hogql.database.schema.static_cohort_people import StaticCohortPeople
from posthog.hogql.errors import HogQLException
from posthog.models.group_type_mapping import GroupTypeMapping
@@ -52,10 +61,10 @@ class Database(BaseModel):
cohort_people: CohortPeople = CohortPeople()
static_cohort_people: StaticCohortPeople = StaticCohortPeople()
log_entries: LogEntriesTable = LogEntriesTable()
- console_logs_log_entries: ReplayConsoleLogsLogEntriesTable = ReplayConsoleLogsLogEntriesTable()
+ console_logs_log_entries: (ReplayConsoleLogsLogEntriesTable) = ReplayConsoleLogsLogEntriesTable()
batch_export_log_entries: BatchExportLogEntriesTable = BatchExportLogEntriesTable()
- raw_session_replay_events: RawSessionReplayEventsTable = RawSessionReplayEventsTable()
+ raw_session_replay_events: (RawSessionReplayEventsTable) = RawSessionReplayEventsTable()
raw_person_distinct_ids: RawPersonDistinctIdsTable = RawPersonDistinctIdsTable()
raw_persons: RawPersonsTable = RawPersonsTable()
raw_groups: RawGroupsTable = RawGroupsTable()
@@ -111,7 +120,11 @@ def add_warehouse_tables(self, **field_definitions: Any):
def create_hogql_database(team_id: int, modifiers: Optional[HogQLQueryModifiers] = None) -> Database:
from posthog.models import Team
from posthog.hogql.query import create_default_modifiers_for_team
- from posthog.warehouse.models import DataWarehouseTable, DataWarehouseSavedQuery, DataWarehouseViewLink
+ from posthog.warehouse.models import (
+ DataWarehouseTable,
+ DataWarehouseSavedQuery,
+ DataWarehouseViewLink,
+ )
team = Team.objects.get(pk=team_id)
modifiers = create_default_modifiers_for_team(team, modifiers)
@@ -164,29 +177,6 @@ def create_hogql_database(team_id: int, modifiers: Optional[HogQLQueryModifiers]
return database
-def determine_join_function(view):
- def join_function(from_table: str, to_table: str, requested_fields: Dict[str, Any]):
- from posthog.hogql import ast
- from posthog.hogql.parser import parse_select
-
- if not requested_fields:
- raise HogQLException(f"No fields requested from {to_table}")
-
- join_expr = ast.JoinExpr(table=parse_select(view.saved_query.query["query"]))
- join_expr.join_type = "INNER JOIN"
- join_expr.alias = to_table
- join_expr.constraint = ast.JoinConstraint(
- expr=ast.CompareOperation(
- op=ast.CompareOperationOp.Eq,
- left=ast.Field(chain=[from_table, view.from_join_key]),
- right=ast.Field(chain=[to_table, view.to_join_key]),
- )
- )
- return join_expr
-
- return join_function
-
-
class _SerializedFieldBase(TypedDict):
key: str
type: Literal[
diff --git a/posthog/hogql/database/models.py b/posthog/hogql/database/models.py
index 9c7fcac1e8703..91a3b436a5df0 100644
--- a/posthog/hogql/database/models.py
+++ b/posthog/hogql/database/models.py
@@ -6,6 +6,7 @@
if TYPE_CHECKING:
from posthog.hogql.context import HogQLContext
+ from posthog.hogql.ast import SelectQuery
class FieldOrTable(BaseModel):
@@ -101,7 +102,7 @@ def get_asterisk(self):
class LazyJoin(FieldOrTable):
model_config = ConfigDict(extra="forbid")
- join_function: Callable[[str, str, Dict[str, Any], HogQLQueryModifiers], Any]
+ join_function: Callable[[str, str, Dict[str, Any], "HogQLContext", "SelectQuery"], Any]
join_table: Table
from_field: str
diff --git a/posthog/hogql/database/schema/cohort_people.py b/posthog/hogql/database/schema/cohort_people.py
index 097e74856f410..4cce926a61350 100644
--- a/posthog/hogql/database/schema/cohort_people.py
+++ b/posthog/hogql/database/schema/cohort_people.py
@@ -15,7 +15,11 @@
"person_id": StringDatabaseField(name="person_id"),
"cohort_id": IntegerDatabaseField(name="cohort_id"),
"team_id": IntegerDatabaseField(name="team_id"),
- "person": LazyJoin(from_field="person_id", join_table=PersonsTable(), join_function=join_with_persons_table),
+ "person": LazyJoin(
+ from_field="person_id",
+ join_table=PersonsTable(),
+ join_function=join_with_persons_table,
+ ),
}
@@ -25,7 +29,11 @@ def select_from_cohort_people_table(requested_fields: Dict[str, List[str]]):
table_name = "raw_cohort_people"
# must always include the person and cohort ids regardless of what other fields are requested
- requested_fields = {"person_id": ["person_id"], "cohort_id": ["cohort_id"], **requested_fields}
+ requested_fields = {
+ "person_id": ["person_id"],
+ "cohort_id": ["cohort_id"],
+ **requested_fields,
+ }
fields: List[ast.Expr] = [ast.Field(chain=[table_name] + chain) for name, chain in requested_fields.items()]
return ast.SelectQuery(
diff --git a/posthog/hogql/database/schema/event_sessions.py b/posthog/hogql/database/schema/event_sessions.py
new file mode 100644
index 0000000000000..a6951478ada4a
--- /dev/null
+++ b/posthog/hogql/database/schema/event_sessions.py
@@ -0,0 +1,178 @@
+from copy import deepcopy
+from typing import Any, Dict, List, Optional
+from posthog.hogql import ast
+from posthog.hogql.context import HogQLContext
+from posthog.hogql.database.models import (
+ FieldOrTable,
+ IntegerDatabaseField,
+ StringDatabaseField,
+ VirtualTable,
+)
+from posthog.hogql.parser import parse_select
+from posthog.hogql.resolver_utils import get_long_table_name, lookup_field_by_name
+from posthog.hogql.visitor import CloningVisitor, TraversingVisitor
+
+
+class EventsSessionSubTable(VirtualTable):
+ fields: Dict[str, FieldOrTable] = {
+ "$session_id": StringDatabaseField(name="$session_id"),
+ "session_duration": IntegerDatabaseField(name="session_duration"),
+ }
+
+ def to_printed_clickhouse(self, context):
+ return "events"
+
+ def to_printed_hogql(self):
+ return "events"
+
+
+class GetFieldsTraverser(TraversingVisitor):
+ fields: List[ast.Field]
+
+ def __init__(self, expr: ast.Expr):
+ super().__init__()
+ self.fields = []
+ super().visit(expr)
+
+ def visit_field(self, node: ast.Field):
+ self.fields.append(node)
+
+
+class CleanTableNameFromChain(CloningVisitor):
+ def __init__(self, table_name: str, select_query_type: ast.SelectQueryType):
+ super().__init__()
+ self.table_name = table_name
+ self.select_query_type = select_query_type
+
+ def visit_field(self, node: ast.Field):
+ if len(node.chain) > 1 and str(node.chain[0]) in self.select_query_type.tables:
+ type = self.select_query_type.tables[str(node.chain[0])]
+
+ name = get_long_table_name(self.select_query_type, type)
+ if name == self.table_name:
+ node.chain.pop(0)
+
+ return super().visit_field(node)
+
+
+class WhereClauseExtractor:
+ compare_operators: List[ast.Expr]
+
+ def __init__(
+ self,
+ where_expression: ast.Expr,
+ from_table_name: str,
+ select_query_type: ast.SelectQueryType,
+ ):
+ self.table_name = from_table_name
+ self.select_query_type = select_query_type
+ self.compare_operators = self.run(deepcopy(where_expression))
+
+ def _is_field_on_table(self, field: ast.Field) -> bool:
+ if len(field.chain) == 0:
+ return False
+
+ type: Optional[ast.Type] = None
+
+ # If the field contains at least two parts, the first might be a table.
+ if len(field.chain) > 1 and str(field.chain[0]) in self.select_query_type.tables:
+ type = self.select_query_type.tables[str(field.chain[0])]
+
+ name = get_long_table_name(self.select_query_type, type)
+ if name != self.table_name:
+ return False
+
+ # Field in scope
+ if not type:
+ type = lookup_field_by_name(self.select_query_type, str(field.chain[0]))
+
+ if not type:
+ return False
+
+ # Recursively resolve the rest of the chain until we can point to the deepest node.
+ loop_type = type
+ chain_to_parse = field.chain[1:]
+ while True:
+ if isinstance(loop_type, ast.FieldTraverserType):
+ chain_to_parse = loop_type.chain + chain_to_parse
+ loop_type = loop_type.table_type
+ continue
+ if len(chain_to_parse) == 0:
+ break
+ next_chain = chain_to_parse.pop(0)
+ loop_type = loop_type.get_child(str(next_chain))
+ if loop_type is None:
+ return False
+
+ return True
+
+ def run(self, expr: ast.Expr) -> List[ast.Expr]:
+ exprs_to_apply: List[ast.Expr] = []
+
+ if isinstance(expr, ast.And):
+ for expression in expr.exprs:
+ if not isinstance(expression, ast.CompareOperation):
+ continue
+
+ fields = GetFieldsTraverser(expression).fields
+ res = [self._is_field_on_table(field) for field in fields]
+ if all(res):
+ exprs_to_apply.append(expression)
+ elif isinstance(expr, ast.CompareOperation):
+ exprs_to_apply.extend(self.run(ast.And(exprs=[expr])))
+ elif isinstance(expr, ast.Or):
+ pass # Ignore for now
+
+ # Clone field nodes and remove table name from field chains
+ return [
+ CleanTableNameFromChain(self.table_name, self.select_query_type).visit(
+ CloningVisitor(clear_types=True, clear_locations=True).visit(e)
+ )
+ for e in exprs_to_apply
+ ]
+
+
+def join_with_events_table_session_duration(
+ from_table: str,
+ to_table: str,
+ requested_fields: Dict[str, Any],
+ context: HogQLContext,
+ node: ast.SelectQuery,
+):
+ select_query = parse_select(
+ """
+ select "$session_id", dateDiff('second', min(timestamp), max(timestamp)) as session_duration
+ from events
+ group by "$session_id"
+ """
+ )
+
+ if isinstance(select_query, ast.SelectQuery):
+ compare_operators = (
+ WhereClauseExtractor(node.where, from_table, node.type).compare_operators
+ if node.where and node.type
+ else []
+ )
+ select_query.where = ast.And(
+ exprs=[
+ *compare_operators,
+ ast.CompareOperation(
+ left=ast.Field(chain=["$session_id"]),
+ op=ast.CompareOperationOp.NotEq,
+ right=ast.Constant(value=""),
+ ),
+ ]
+ )
+
+ join_expr = ast.JoinExpr(table=select_query)
+ join_expr.join_type = "INNER JOIN"
+ join_expr.alias = to_table
+ join_expr.constraint = ast.JoinConstraint(
+ expr=ast.CompareOperation(
+ op=ast.CompareOperationOp.Eq,
+ left=ast.Field(chain=[from_table, "$session_id"]),
+ right=ast.Field(chain=[to_table, "$session_id"]),
+ )
+ )
+
+ return join_expr
diff --git a/posthog/hogql/database/schema/events.py b/posthog/hogql/database/schema/events.py
index ba27ff7c5e158..e90142c290c72 100644
--- a/posthog/hogql/database/schema/events.py
+++ b/posthog/hogql/database/schema/events.py
@@ -11,12 +11,19 @@
FieldTraverser,
FieldOrTable,
)
+from posthog.hogql.database.schema.event_sessions import (
+ EventsSessionSubTable,
+ join_with_events_table_session_duration,
+)
from posthog.hogql.database.schema.groups import GroupsTable, join_with_group_n_table
from posthog.hogql.database.schema.person_distinct_ids import (
PersonDistinctIdsTable,
join_with_person_distinct_ids_table,
)
-from posthog.hogql.database.schema.person_overrides import PersonOverridesTable, join_with_person_overrides_table
+from posthog.hogql.database.schema.person_overrides import (
+ PersonOverridesTable,
+ join_with_person_overrides_table,
+)
class EventsPersonSubTable(VirtualTable):
@@ -88,15 +95,40 @@ class EventsTable(Table):
"person": FieldTraverser(chain=["pdi", "person"]),
"person_id": FieldTraverser(chain=["pdi", "person_id"]),
"$group_0": StringDatabaseField(name="$group_0"),
- "group_0": LazyJoin(from_field="$group_0", join_table=GroupsTable(), join_function=join_with_group_n_table(0)),
+ "group_0": LazyJoin(
+ from_field="$group_0",
+ join_table=GroupsTable(),
+ join_function=join_with_group_n_table(0),
+ ),
"$group_1": StringDatabaseField(name="$group_1"),
- "group_1": LazyJoin(from_field="$group_1", join_table=GroupsTable(), join_function=join_with_group_n_table(1)),
+ "group_1": LazyJoin(
+ from_field="$group_1",
+ join_table=GroupsTable(),
+ join_function=join_with_group_n_table(1),
+ ),
"$group_2": StringDatabaseField(name="$group_2"),
- "group_2": LazyJoin(from_field="$group_2", join_table=GroupsTable(), join_function=join_with_group_n_table(2)),
+ "group_2": LazyJoin(
+ from_field="$group_2",
+ join_table=GroupsTable(),
+ join_function=join_with_group_n_table(2),
+ ),
"$group_3": StringDatabaseField(name="$group_3"),
- "group_3": LazyJoin(from_field="$group_3", join_table=GroupsTable(), join_function=join_with_group_n_table(3)),
+ "group_3": LazyJoin(
+ from_field="$group_3",
+ join_table=GroupsTable(),
+ join_function=join_with_group_n_table(3),
+ ),
"$group_4": StringDatabaseField(name="$group_4"),
- "group_4": LazyJoin(from_field="$group_4", join_table=GroupsTable(), join_function=join_with_group_n_table(4)),
+ "group_4": LazyJoin(
+ from_field="$group_4",
+ join_table=GroupsTable(),
+ join_function=join_with_group_n_table(4),
+ ),
+ "session": LazyJoin(
+ from_field="$session_id",
+ join_table=EventsSessionSubTable(),
+ join_function=join_with_events_table_session_duration,
+ ),
}
def to_printed_clickhouse(self, context):
diff --git a/posthog/hogql/database/schema/groups.py b/posthog/hogql/database/schema/groups.py
index 9b3fc1f28c176..39382b246349b 100644
--- a/posthog/hogql/database/schema/groups.py
+++ b/posthog/hogql/database/schema/groups.py
@@ -1,4 +1,6 @@
from typing import Any, Dict, List
+from posthog.hogql.ast import SelectQuery
+from posthog.hogql.context import HogQLContext
from posthog.hogql.database.argmax import argmax_select
from posthog.hogql.database.models import (
@@ -34,7 +36,11 @@ def select_from_groups_table(requested_fields: Dict[str, List[str]]):
def join_with_group_n_table(group_index: int):
def join_with_group_table(
- from_table: str, to_table: str, requested_fields: Dict[str, Any], modifiers: HogQLQueryModifiers
+ from_table: str,
+ to_table: str,
+ requested_fields: Dict[str, Any],
+ context: HogQLContext,
+ node: SelectQuery,
):
from posthog.hogql import ast
@@ -43,7 +49,9 @@ def join_with_group_table(
select_query = select_from_groups_table(requested_fields)
select_query.where = ast.CompareOperation(
- left=ast.Field(chain=["index"]), op=ast.CompareOperationOp.Eq, right=ast.Constant(value=group_index)
+ left=ast.Field(chain=["index"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value=group_index),
)
join_expr = ast.JoinExpr(table=select_query)
diff --git a/posthog/hogql/database/schema/person_distinct_ids.py b/posthog/hogql/database/schema/person_distinct_ids.py
index 3765c44673890..d5785bef98c49 100644
--- a/posthog/hogql/database/schema/person_distinct_ids.py
+++ b/posthog/hogql/database/schema/person_distinct_ids.py
@@ -1,4 +1,6 @@
from typing import Dict, List
+from posthog.hogql.ast import SelectQuery
+from posthog.hogql.context import HogQLContext
from posthog.hogql.database.argmax import argmax_select
from posthog.hogql.database.models import (
@@ -18,7 +20,11 @@
"team_id": IntegerDatabaseField(name="team_id"),
"distinct_id": StringDatabaseField(name="distinct_id"),
"person_id": StringDatabaseField(name="person_id"),
- "person": LazyJoin(from_field="person_id", join_table=PersonsTable(), join_function=join_with_persons_table),
+ "person": LazyJoin(
+ from_field="person_id",
+ join_table=PersonsTable(),
+ join_function=join_with_persons_table,
+ ),
}
@@ -36,7 +42,11 @@ def select_from_person_distinct_ids_table(requested_fields: Dict[str, List[str]]
def join_with_person_distinct_ids_table(
- from_table: str, to_table: str, requested_fields: Dict[str, List[str]], modifiers: HogQLQueryModifiers
+ from_table: str,
+ to_table: str,
+ requested_fields: Dict[str, List[str]],
+ context: HogQLContext,
+ node: SelectQuery,
):
from posthog.hogql import ast
diff --git a/posthog/hogql/database/schema/person_overrides.py b/posthog/hogql/database/schema/person_overrides.py
index 9e2e92656867c..800e902d343fe 100644
--- a/posthog/hogql/database/schema/person_overrides.py
+++ b/posthog/hogql/database/schema/person_overrides.py
@@ -1,4 +1,6 @@
from typing import Any, Dict, List
+from posthog.hogql.ast import SelectQuery
+from posthog.hogql.context import HogQLContext
from posthog.hogql.database.argmax import argmax_select
from posthog.hogql.database.models import (
@@ -32,7 +34,11 @@ def select_from_person_overrides_table(requested_fields: Dict[str, List[str]]):
def join_with_person_overrides_table(
- from_table: str, to_table: str, requested_fields: Dict[str, Any], modifiers: HogQLQueryModifiers
+ from_table: str,
+ to_table: str,
+ requested_fields: Dict[str, Any],
+ context: HogQLContext,
+ node: SelectQuery,
):
from posthog.hogql import ast
diff --git a/posthog/hogql/database/schema/persons.py b/posthog/hogql/database/schema/persons.py
index 6df5513f316cf..1a1d79123436d 100644
--- a/posthog/hogql/database/schema/persons.py
+++ b/posthog/hogql/database/schema/persons.py
@@ -1,6 +1,8 @@
from typing import Dict, List
+from posthog.hogql.ast import SelectQuery
from posthog.hogql.constants import HogQLQuerySettings
+from posthog.hogql.context import HogQLContext
from posthog.hogql.database.argmax import argmax_select
from posthog.hogql.database.models import (
Table,
@@ -81,13 +83,17 @@ def select_from_persons_table(requested_fields: Dict[str, List[str]], modifiers:
def join_with_persons_table(
- from_table: str, to_table: str, requested_fields: Dict[str, List[str]], modifiers: HogQLQueryModifiers
+ from_table: str,
+ to_table: str,
+ requested_fields: Dict[str, List[str]],
+ context: HogQLContext,
+ node: SelectQuery,
):
from posthog.hogql import ast
if not requested_fields:
raise HogQLException("No fields requested from persons table")
- join_expr = ast.JoinExpr(table=select_from_persons_table(requested_fields, modifiers))
+ join_expr = ast.JoinExpr(table=select_from_persons_table(requested_fields, context.modifiers))
join_expr.join_type = "INNER JOIN"
join_expr.alias = to_table
join_expr.constraint = ast.JoinConstraint(
diff --git a/posthog/hogql/database/schema/persons_pdi.py b/posthog/hogql/database/schema/persons_pdi.py
index 8f83234b6bed3..9c7fcf9e03e43 100644
--- a/posthog/hogql/database/schema/persons_pdi.py
+++ b/posthog/hogql/database/schema/persons_pdi.py
@@ -1,4 +1,6 @@
from typing import Dict, List
+from posthog.hogql.ast import SelectQuery
+from posthog.hogql.context import HogQLContext
from posthog.hogql.database.argmax import argmax_select
from posthog.hogql.database.models import (
@@ -29,7 +31,11 @@ def persons_pdi_select(requested_fields: Dict[str, List[str]]):
# :NOTE: We already have person_distinct_ids.py, which most tables link to. This persons_pdi.py is a hack to
# make "select persons.pdi.distinct_id from persons" work while avoiding circular imports. Don't use directly.
def persons_pdi_join(
- from_table: str, to_table: str, requested_fields: Dict[str, List[str]], modifiers: HogQLQueryModifiers
+ from_table: str,
+ to_table: str,
+ requested_fields: Dict[str, List[str]],
+ context: HogQLContext,
+ node: SelectQuery,
):
from posthog.hogql import ast
diff --git a/posthog/hogql/database/schema/static_cohort_people.py b/posthog/hogql/database/schema/static_cohort_people.py
index d09a7479f080d..c9737f86c6af9 100644
--- a/posthog/hogql/database/schema/static_cohort_people.py
+++ b/posthog/hogql/database/schema/static_cohort_people.py
@@ -1,6 +1,12 @@
from typing import Dict
-from posthog.hogql.database.models import Table, StringDatabaseField, IntegerDatabaseField, LazyJoin, FieldOrTable
+from posthog.hogql.database.models import (
+ Table,
+ StringDatabaseField,
+ IntegerDatabaseField,
+ LazyJoin,
+ FieldOrTable,
+)
from posthog.hogql.database.schema.persons import PersonsTable, join_with_persons_table
@@ -9,7 +15,11 @@ class StaticCohortPeople(Table):
"person_id": StringDatabaseField(name="person_id"),
"cohort_id": IntegerDatabaseField(name="cohort_id"),
"team_id": IntegerDatabaseField(name="team_id"),
- "person": LazyJoin(from_field="person_id", join_table=PersonsTable(), join_function=join_with_persons_table),
+ "person": LazyJoin(
+ from_field="person_id",
+ join_table=PersonsTable(),
+ join_function=join_with_persons_table,
+ ),
}
def avoid_asterisk_fields(self):
diff --git a/posthog/hogql/database/schema/test/test_event_sessions.py b/posthog/hogql/database/schema/test/test_event_sessions.py
new file mode 100644
index 0000000000000..268180a773e6c
--- /dev/null
+++ b/posthog/hogql/database/schema/test/test_event_sessions.py
@@ -0,0 +1,220 @@
+from typing import List, cast
+from posthog.hogql import ast
+from posthog.hogql.context import HogQLContext
+from posthog.hogql.database.database import create_hogql_database
+from posthog.hogql.database.schema.event_sessions import (
+ CleanTableNameFromChain,
+ WhereClauseExtractor,
+)
+from posthog.hogql.parser import parse_expr, parse_select
+from posthog.hogql.resolver import resolve_types
+from posthog.hogql.visitor import clone_expr
+from posthog.test.base import BaseTest
+
+
+class TestWhereClauseExtractor(BaseTest):
+ def setUp(self):
+ self.database = create_hogql_database(self.team.pk)
+ self.context = HogQLContext(database=self.database, team_id=self.team.pk)
+
+ def _select(self, query: str) -> ast.SelectQuery:
+ select_query = cast(ast.SelectQuery, clone_expr(parse_select(query), clear_locations=True))
+ return cast(ast.SelectQuery, resolve_types(select_query, self.context))
+
+ def _compare_operators(self, query: ast.SelectQuery, table_name: str) -> List[ast.Expr]:
+ assert query.where is not None and query.type is not None
+ return WhereClauseExtractor(query.where, table_name, query.type).compare_operators
+
+ def test_with_simple_equality_clause(self):
+ query = self._select(
+ """
+ SELECT event
+ FROM events
+ WHERE event = '$pageview'
+ """
+ )
+
+ compare_operators = self._compare_operators(query, "events")
+
+ assert len(compare_operators) == 1
+ assert compare_operators[0] == ast.CompareOperation(
+ left=ast.Field(chain=["event"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value="$pageview"),
+ )
+
+ def test_with_timestamps(self):
+ query = self._select(
+ """
+ SELECT event
+ FROM events
+ WHERE timestamp > '2023-01-01'
+ """
+ )
+
+ compare_operators = self._compare_operators(query, "events")
+
+ assert len(compare_operators) == 1
+ assert compare_operators[0] == ast.CompareOperation(
+ left=ast.Field(chain=["timestamp"]),
+ op=ast.CompareOperationOp.Gt,
+ right=ast.Constant(value="2023-01-01"),
+ )
+
+ def test_with_alias_table(self):
+ query = self._select(
+ """
+ SELECT e.event
+ FROM events e
+ WHERE e.event = '$pageview'
+ """
+ )
+
+ compare_operators = self._compare_operators(query, "e")
+
+ assert len(compare_operators) == 1
+ assert compare_operators[0] == ast.CompareOperation(
+ left=ast.Field(chain=["event"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value="$pageview"),
+ )
+
+ def test_with_multiple_clauses(self):
+ query = self._select(
+ """
+ SELECT event
+ FROM events
+ WHERE event = '$pageview' AND timestamp > '2023-01-01'
+ """
+ )
+
+ compare_operators = self._compare_operators(query, "events")
+
+ assert len(compare_operators) == 2
+ assert compare_operators[0] == ast.CompareOperation(
+ left=ast.Field(chain=["event"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value="$pageview"),
+ )
+ assert compare_operators[1] == ast.CompareOperation(
+ left=ast.Field(chain=["timestamp"]),
+ op=ast.CompareOperationOp.Gt,
+ right=ast.Constant(value="2023-01-01"),
+ )
+
+ def test_with_join(self):
+ query = self._select(
+ """
+ SELECT e.event, p.id
+ FROM events e
+ LEFT JOIN persons p
+ ON e.person_id = p.id
+ WHERE e.event = '$pageview' and p.is_identified = 0
+ """
+ )
+
+ compare_operators = self._compare_operators(query, "e")
+
+ assert len(compare_operators) == 1
+ assert compare_operators[0] == ast.CompareOperation(
+ left=ast.Field(chain=["event"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value="$pageview"),
+ )
+
+ def test_with_ignoring_ors(self):
+ query = self._select(
+ """
+ SELECT event
+ FROM events
+ WHERE event = '$pageleave' OR event = '$pageview'
+ """
+ )
+
+ compare_operators = self._compare_operators(query, "events")
+
+ assert len(compare_operators) == 0
+
+
+class TestCleanTableNameFromChain(BaseTest):
+ def setUp(self):
+ self.database = create_hogql_database(self.team.pk)
+ self.context = HogQLContext(database=self.database, team_id=self.team.pk)
+
+ def _select(self, query: str) -> ast.SelectQuery:
+ select_query = cast(ast.SelectQuery, clone_expr(parse_select(query), clear_locations=True))
+ return cast(ast.SelectQuery, resolve_types(select_query, self.context))
+
+ def _clean(self, table_name: str, query: ast.SelectQuery, expr: ast.Expr) -> ast.Expr:
+ assert query.type is not None
+ return CleanTableNameFromChain(table_name, query.type).visit(expr)
+
+ def test_table_with_no_alias(self):
+ query = self._select(
+ """
+ SELECT event
+ FROM events
+ """
+ )
+
+ expr = parse_expr('event = "$pageview"')
+ cleaned_expr = cast(ast.CompareOperation, self._clean("events", query, expr))
+ expr_left = cast(ast.Field, cleaned_expr.left)
+
+ assert expr_left.chain == ["event"]
+
+ def test_table_with_alias(self):
+ query = self._select(
+ """
+ SELECT e.event
+ FROM events e
+ """
+ )
+
+ expr = parse_expr('e.event = "$pageview"')
+ cleaned_expr = cast(ast.CompareOperation, self._clean("e", query, expr))
+ expr_left = cast(ast.Field, cleaned_expr.left)
+
+ assert expr_left.chain == ["event"]
+
+ def test_field_with_properties(self):
+ query = self._select(
+ """
+ SELECT event
+ FROM events
+ """
+ )
+
+ expr = parse_expr('properties.$browser = "Chrome"')
+ cleaned_expr = cast(ast.CompareOperation, self._clean("events", query, expr))
+ expr_left = cast(ast.Field, cleaned_expr.left)
+
+ assert expr_left.chain == ["properties", "$browser"]
+
+ def test_table_alias_and_field_with_properties(self):
+ query = self._select(
+ """
+ SELECT e.event
+ FROM events e
+ """
+ )
+
+ expr = parse_expr('e.properties.$browser = "Chrome"')
+ cleaned_expr = cast(ast.CompareOperation, self._clean("e", query, expr))
+ expr_left = cast(ast.Field, cleaned_expr.left)
+
+ assert expr_left.chain == ["properties", "$browser"]
+
+ def test_with_incorrect_alias(self):
+ query = self._select(
+ """
+ SELECT e.event
+ FROM events e
+ """
+ )
+
+ expr = parse_expr('e.event = "$pageview"')
+ cleaned_expr = cast(ast.CompareOperation, self._clean("some_other_alias", query, expr))
+ expr_left = cast(ast.Field, cleaned_expr.left)
+
+ assert expr_left.chain == ["e", "event"]
diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr
index 1a0efafd1a4c6..90bc08c457891 100644
--- a/posthog/hogql/database/test/__snapshots__/test_database.ambr
+++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr
@@ -226,6 +226,15 @@
"updated_at",
"properties"
]
+ },
+ {
+ "key": "session",
+ "type": "lazy_table",
+ "table": "events",
+ "fields": [
+ "$session_id",
+ "session_duration"
+ ]
}
],
"groups": [
@@ -1007,6 +1016,15 @@
"updated_at",
"properties"
]
+ },
+ {
+ "key": "session",
+ "type": "lazy_table",
+ "table": "events",
+ "fields": [
+ "$session_id",
+ "session_duration"
+ ]
}
],
"groups": [
diff --git a/posthog/hogql/database/test/tables.py b/posthog/hogql/database/test/tables.py
index f675f3c8d194d..f3328091791b7 100644
--- a/posthog/hogql/database/test/tables.py
+++ b/posthog/hogql/database/test/tables.py
@@ -1,4 +1,8 @@
-from posthog.hogql.database.models import DateDatabaseField, IntegerDatabaseField, FloatDatabaseField
+from posthog.hogql.database.models import (
+ DateDatabaseField,
+ IntegerDatabaseField,
+ FloatDatabaseField,
+)
from posthog.hogql.database.s3_table import S3Table
from posthog.hogql.database.models import SavedQuery
diff --git a/posthog/hogql/database/test/test_argmax.py b/posthog/hogql/database/test/test_argmax.py
index 535c17cae97ed..8c61ecd4a29c4 100644
--- a/posthog/hogql/database/test/test_argmax.py
+++ b/posthog/hogql/database/test/test_argmax.py
@@ -58,7 +58,10 @@ def test_argmax_select_deleted(self):
op=ast.CompareOperationOp.Eq,
left=ast.Call(
name="argMax",
- args=[ast.Field(chain=["raw_persons", "is_deleted"]), ast.Field(chain=["raw_persons", "version"])],
+ args=[
+ ast.Field(chain=["raw_persons", "is_deleted"]),
+ ast.Field(chain=["raw_persons", "version"]),
+ ],
),
right=ast.Constant(value=0),
),
diff --git a/posthog/hogql/database/test/test_database.py b/posthog/hogql/database/test/test_database.py
index 1ea0583c4e349..16bfeb9e4a392 100644
--- a/posthog/hogql/database/test/test_database.py
+++ b/posthog/hogql/database/test/test_database.py
@@ -51,7 +51,11 @@ def test_database_with_warehouse_tables(self, patch_execute):
team=self.team, access_key="_accesskey", access_secret="_secret"
)
DataWarehouseTable.objects.create(
- name="whatever", team=self.team, columns={"id": "String"}, credential=credential, url_pattern=""
+ name="whatever",
+ team=self.team,
+ columns={"id": "String"},
+ credential=credential,
+ url_pattern="",
)
create_hogql_database(team_id=self.team.pk)
diff --git a/posthog/hogql/database/test/test_s3_table.py b/posthog/hogql/database/test/test_s3_table.py
index 1711aebb688a6..72b5dfa6cf3c0 100644
--- a/posthog/hogql/database/test/test_s3_table.py
+++ b/posthog/hogql/database/test/test_s3_table.py
@@ -27,7 +27,10 @@ def test_s3_table_select(self):
self._init_database()
hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql")
- self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10")
+ self.assertEqual(
+ hogql,
+ "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10",
+ )
clickhouse = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="clickhouse")
@@ -80,7 +83,8 @@ def test_s3_table_select_join_with_alias(self):
dialect="hogql",
)
self.assertEqual(
- hogql, "SELECT a.High, a.Low FROM aapl_stock AS a JOIN aapl_stock AS b ON equals(a.High, b.High) LIMIT 10"
+ hogql,
+ "SELECT a.High, a.Low FROM aapl_stock AS a JOIN aapl_stock AS b ON equals(a.High, b.High) LIMIT 10",
)
clickhouse = self._select(
@@ -180,7 +184,8 @@ def test_s3_table_select_in(self):
self._init_database()
hogql = self._select(
- query="SELECT uuid, event FROM events WHERE event IN (SELECT Date FROM aapl_stock)", dialect="hogql"
+ query="SELECT uuid, event FROM events WHERE event IN (SELECT Date FROM aapl_stock)",
+ dialect="hogql",
)
self.assertEqual(
hogql,
@@ -188,7 +193,8 @@ def test_s3_table_select_in(self):
)
clickhouse = self._select(
- query="SELECT uuid, event FROM events WHERE event IN (SELECT Date FROM aapl_stock)", dialect="clickhouse"
+ query="SELECT uuid, event FROM events WHERE event IN (SELECT Date FROM aapl_stock)",
+ dialect="clickhouse",
)
self.assertEqual(
diff --git a/posthog/hogql/database/test/test_saved_query.py b/posthog/hogql/database/test/test_saved_query.py
index 5e64f9760fcbf..7c7f534c66f21 100644
--- a/posthog/hogql/database/test/test_saved_query.py
+++ b/posthog/hogql/database/test/test_saved_query.py
@@ -35,7 +35,10 @@ def test_saved_query_table_select(self):
self._init_database()
hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql")
- self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10")
+ self.assertEqual(
+ hogql,
+ "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10",
+ )
clickhouse = self._select(query="SELECT * FROM aapl_stock_view LIMIT 10", dialect="clickhouse")
@@ -48,9 +51,15 @@ def test_saved_query_with_alias(self):
self._init_database()
hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql")
- self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10")
+ self.assertEqual(
+ hogql,
+ "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10",
+ )
- clickhouse = self._select(query="SELECT * FROM aapl_stock_view AS some_alias LIMIT 10", dialect="clickhouse")
+ clickhouse = self._select(
+ query="SELECT * FROM aapl_stock_view AS some_alias LIMIT 10",
+ dialect="clickhouse",
+ )
self.assertEqual(
clickhouse,
diff --git a/posthog/hogql/database/test/test_view.py b/posthog/hogql/database/test/test_view.py
index 3d773314e1f8f..26ce89e10653c 100644
--- a/posthog/hogql/database/test/test_view.py
+++ b/posthog/hogql/database/test/test_view.py
@@ -35,7 +35,10 @@ def test_view_table_select(self):
self._init_database()
hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql")
- self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10")
+ self.assertEqual(
+ hogql,
+ "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10",
+ )
clickhouse = self._select(query="SELECT * FROM aapl_stock_view LIMIT 10", dialect="clickhouse")
@@ -48,9 +51,15 @@ def test_view_with_alias(self):
self._init_database()
hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql")
- self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10")
+ self.assertEqual(
+ hogql,
+ "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10",
+ )
- clickhouse = self._select(query="SELECT * FROM aapl_stock_view AS some_alias LIMIT 10", dialect="clickhouse")
+ clickhouse = self._select(
+ query="SELECT * FROM aapl_stock_view AS some_alias LIMIT 10",
+ dialect="clickhouse",
+ )
self.assertEqual(
clickhouse,
diff --git a/posthog/hogql/errors.py b/posthog/hogql/errors.py
index 5dd36c2bf7143..4035e30eed173 100644
--- a/posthog/hogql/errors.py
+++ b/posthog/hogql/errors.py
@@ -11,7 +11,12 @@ class HogQLException(Exception):
end: Optional[int]
def __init__(
- self, message: str, *, start: Optional[int] = None, end: Optional[int] = None, node: Optional["Expr"] = None
+ self,
+ message: str,
+ *,
+ start: Optional[int] = None,
+ end: Optional[int] = None,
+ node: Optional["Expr"] = None,
):
super().__init__(message)
if node is not None and node.start is not None and node.end is not None:
diff --git a/posthog/hogql/escape_sql.py b/posthog/hogql/escape_sql.py
index 68e326ede1437..d6c9b4bfefd99 100644
--- a/posthog/hogql/escape_sql.py
+++ b/posthog/hogql/escape_sql.py
@@ -54,19 +54,25 @@ def escape_clickhouse_identifier(identifier: str) -> str:
def escape_hogql_string(
- name: float | int | str | list | tuple | date | datetime | UUID | UUIDT, timezone: Optional[str] = None
+ name: float | int | str | list | tuple | date | datetime | UUID | UUIDT,
+ timezone: Optional[str] = None,
) -> str:
return SQLValueEscaper(timezone=timezone, dialect="hogql").visit(name)
def escape_clickhouse_string(
- name: float | int | str | list | tuple | date | datetime | UUID | UUIDT, timezone: Optional[str] = None
+ name: float | int | str | list | tuple | date | datetime | UUID | UUIDT,
+ timezone: Optional[str] = None,
) -> str:
return SQLValueEscaper(timezone=timezone, dialect="clickhouse").visit(name)
class SQLValueEscaper:
- def __init__(self, timezone: Optional[str] = None, dialect: Literal["hogql", "clickhouse"] = "clickhouse"):
+ def __init__(
+ self,
+ timezone: Optional[str] = None,
+ dialect: Literal["hogql", "clickhouse"] = "clickhouse",
+ ):
self._timezone = timezone or "UTC"
self._dialect = dialect
diff --git a/posthog/hogql/filters.py b/posthog/hogql/filters.py
index 61f992ac86688..c900ac1bc5ea6 100644
--- a/posthog/hogql/filters.py
+++ b/posthog/hogql/filters.py
@@ -59,7 +59,12 @@ def visit_placeholder(self, node):
parsed_date = isoparse(dateTo)
except ValueError:
parsed_date = relative_date_parse(dateTo, self.team.timezone_info)
- exprs.append(parse_expr("timestamp < {timestamp}", {"timestamp": ast.Constant(value=parsed_date)}))
+ exprs.append(
+ parse_expr(
+ "timestamp < {timestamp}",
+ {"timestamp": ast.Constant(value=parsed_date)},
+ )
+ )
# limit to the last 30d by default
dateFrom = self.filters.dateRange.date_from if self.filters.dateRange else None
@@ -68,7 +73,12 @@ def visit_placeholder(self, node):
parsed_date = isoparse(dateFrom)
except ValueError:
parsed_date = relative_date_parse(dateFrom, self.team.timezone_info)
- exprs.append(parse_expr("timestamp >= {timestamp}", {"timestamp": ast.Constant(value=parsed_date)}))
+ exprs.append(
+ parse_expr(
+ "timestamp >= {timestamp}",
+ {"timestamp": ast.Constant(value=parsed_date)},
+ )
+ )
if len(exprs) == 0:
return ast.Constant(value=True)
diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py
index 0e4ba1623b28a..8d8fca037f21a 100644
--- a/posthog/hogql/functions/mapping.py
+++ b/posthog/hogql/functions/mapping.py
@@ -144,7 +144,11 @@ class HogQLFunctionMeta:
"toFloat": HogQLFunctionMeta("toFloat64OrNull", 1, 1),
"toDecimal": HogQLFunctionMeta("toDecimal64OrNull", 1, 1),
"toDate": HogQLFunctionMeta(
- "toDateOrNull", 1, 1, overloads=[((ast.DateTimeType, ast.DateType), "toDate")], tz_aware=True
+ "toDateOrNull",
+ 1,
+ 1,
+ overloads=[((ast.DateTimeType, ast.DateType), "toDate")],
+ tz_aware=True,
),
"toDateTime": HogQLFunctionMeta(
"parseDateTime64BestEffortOrNull",
@@ -731,6 +735,17 @@ class HogQLFunctionMeta:
# TODO: Make the below details part of function meta
# Functions where we use a -OrNull variant by default
-ADD_OR_NULL_DATETIME_FUNCTIONS = ("toDateTime", "parseDateTime", "parseDateTimeBestEffort")
+ADD_OR_NULL_DATETIME_FUNCTIONS = (
+ "toDateTime",
+ "parseDateTime",
+ "parseDateTimeBestEffort",
+)
# Functions where the first argument needs to be DateTime and not DateTime64
-FIRST_ARG_DATETIME_FUNCTIONS = ("tumble", "tumbleStart", "tumbleEnd", "hop", "hopStart", "hopEnd")
+FIRST_ARG_DATETIME_FUNCTIONS = (
+ "tumble",
+ "tumbleStart",
+ "tumbleEnd",
+ "hop",
+ "hopStart",
+ "hopEnd",
+)
diff --git a/posthog/hogql/functions/test/test_cohort.py b/posthog/hogql/functions/test/test_cohort.py
index c9adaffbba8a0..f893eea1e5e68 100644
--- a/posthog/hogql/functions/test/test_cohort.py
+++ b/posthog/hogql/functions/test/test_cohort.py
@@ -8,7 +8,12 @@
from posthog.models.cohort.util import recalculate_cohortpeople
from posthog.models.utils import UUIDT
from posthog.schema import HogQLQueryModifiers
-from posthog.test.base import BaseTest, _create_person, _create_event, flush_persons_and_events
+from posthog.test.base import (
+ BaseTest,
+ _create_person,
+ _create_event,
+ flush_persons_and_events,
+)
elements_chain_match = lambda x: parse_expr("match(elements_chain, {regex})", {"regex": ast.Constant(value=str(x))})
not_call = lambda x: ast.Call(name="not", args=[x])
@@ -33,7 +38,8 @@ def _create_random_events(self) -> str:
def test_in_cohort_dynamic(self):
random_uuid = self._create_random_events()
cohort = Cohort.objects.create(
- team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}]
+ team=self.team,
+ groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}],
)
recalculate_cohortpeople(cohort, pending_version=0)
response = execute_hogql_query(
@@ -100,5 +106,8 @@ def test_in_cohort_error(self):
self.assertEqual(str(e.exception), "cohort() takes exactly one string or integer argument")
with self.assertRaises(HogQLException) as e:
- execute_hogql_query(f"SELECT event FROM events WHERE person_id IN COHORT 'blabla'", self.team)
+ execute_hogql_query(
+ f"SELECT event FROM events WHERE person_id IN COHORT 'blabla'",
+ self.team,
+ )
self.assertEqual(str(e.exception), "Could not find a cohort with the name 'blabla'")
diff --git a/posthog/hogql/functions/test/test_sparkline.py b/posthog/hogql/functions/test/test_sparkline.py
index febffcf2b8948..2a5c24d90b1af 100644
--- a/posthog/hogql/functions/test/test_sparkline.py
+++ b/posthog/hogql/functions/test/test_sparkline.py
@@ -14,7 +14,10 @@ def test_sparkline(self):
response.hogql,
f"SELECT tuple('__hogql_chart_type', 'sparkline', 'results', [1, 2, 3]) LIMIT 100",
)
- self.assertEqual(response.results[0][0], ("__hogql_chart_type", "sparkline", "results", [1, 2, 3]))
+ self.assertEqual(
+ response.results[0][0],
+ ("__hogql_chart_type", "sparkline", "results", [1, 2, 3]),
+ )
def test_sparkline_error(self):
with self.assertRaises(HogQLException) as e:
diff --git a/posthog/hogql/hogql.py b/posthog/hogql/hogql.py
index 87a2e0ee8f47e..6410bdc6a7d46 100644
--- a/posthog/hogql/hogql.py
+++ b/posthog/hogql/hogql.py
@@ -3,7 +3,11 @@
from posthog.hogql import ast
from posthog.hogql.context import HogQLContext
from posthog.hogql.database.database import create_hogql_database
-from posthog.hogql.errors import HogQLException, NotImplementedException, SyntaxException
+from posthog.hogql.errors import (
+ HogQLException,
+ NotImplementedException,
+ SyntaxException,
+)
from posthog.hogql.parser import parse_expr
from posthog.hogql.printer import prepare_ast_for_printing, print_prepared_ast
@@ -38,7 +42,10 @@ def translate_hogql(
prepare_ast_for_printing(select_query, context=context, dialect=dialect, stack=[select_query]),
)
return print_prepared_ast(
- prepared_select_query.select[0], context=context, dialect=dialect, stack=[prepared_select_query]
+ prepared_select_query.select[0],
+ context=context,
+ dialect=dialect,
+ stack=[prepared_select_query],
)
except (NotImplementedException, SyntaxException):
raise
diff --git a/posthog/hogql/metadata.py b/posthog/hogql/metadata.py
index de044ed2c4743..29a9b11075ab0 100644
--- a/posthog/hogql/metadata.py
+++ b/posthog/hogql/metadata.py
@@ -31,7 +31,9 @@ def get_hogql_metadata(
translate_hogql(query.expr, context=context, table=query.table or "events")
elif isinstance(query.select, str):
context = HogQLContext(
- team_id=team.pk, modifiers=create_default_modifiers_for_team(team), enable_select_queries=True
+ team_id=team.pk,
+ modifiers=create_default_modifiers_for_team(team),
+ enable_select_queries=True,
)
select_ast = parse_select(query.select)
diff --git a/posthog/hogql/parser.py b/posthog/hogql/parser.py
index cacae5eefec95..d1d8ef2a1b7a7 100644
--- a/posthog/hogql/parser.py
+++ b/posthog/hogql/parser.py
@@ -7,7 +7,11 @@
from posthog.hogql import ast
from posthog.hogql.base import AST
from posthog.hogql.constants import RESERVED_KEYWORDS
-from posthog.hogql.errors import NotImplementedException, HogQLException, SyntaxException
+from posthog.hogql.errors import (
+ NotImplementedException,
+ HogQLException,
+ SyntaxException,
+)
from posthog.hogql.grammar.HogQLLexer import HogQLLexer
from posthog.hogql.grammar.HogQLParser import HogQLParser
from posthog.hogql.parse_string import parse_string, parse_string_literal
@@ -211,7 +215,11 @@ def visitSelectStmt(self, ctx: HogQLParser.SelectStmtContext):
select_query.array_join_list = self.visit(array_join_clause.columnExprList())
for expr in select_query.array_join_list:
if not isinstance(expr, ast.Alias):
- raise SyntaxException("ARRAY JOIN arrays must have an alias", start=expr.start, end=expr.end)
+ raise SyntaxException(
+ "ARRAY JOIN arrays must have an alias",
+ start=expr.start,
+ end=expr.end,
+ )
if ctx.topClause():
raise NotImplementedException(f"Unsupported: SelectStmt.topClause()")
@@ -382,7 +390,8 @@ def visitRatioExpr(self, ctx: HogQLParser.RatioExprContext):
right = number_literals[1] if ctx.SLASH() and len(number_literals) > 1 else None
return ast.RatioExpr(
- left=self.visitNumberLiteral(left), right=self.visitNumberLiteral(right) if right else None
+ left=self.visitNumberLiteral(left),
+ right=self.visitNumberLiteral(right) if right else None,
)
def visitSettingExprList(self, ctx: HogQLParser.SettingExprListContext):
@@ -455,7 +464,11 @@ def visitColumnExprList(self, ctx: HogQLParser.ColumnExprListContext):
def visitColumnExprTernaryOp(self, ctx: HogQLParser.ColumnExprTernaryOpContext):
return ast.Call(
name="if",
- args=[self.visit(ctx.columnExpr(0)), self.visit(ctx.columnExpr(1)), self.visit(ctx.columnExpr(2))],
+ args=[
+ self.visit(ctx.columnExpr(0)),
+ self.visit(ctx.columnExpr(1)),
+ self.visit(ctx.columnExpr(2)),
+ ],
)
def visitColumnExprAlias(self, ctx: HogQLParser.ColumnExprAliasContext):
@@ -480,7 +493,9 @@ def visitColumnExprExtract(self, ctx: HogQLParser.ColumnExprExtractContext):
def visitColumnExprNegate(self, ctx: HogQLParser.ColumnExprNegateContext):
return ast.ArithmeticOperation(
- op=ast.ArithmeticOperationOp.Sub, left=ast.Constant(value=0), right=self.visit(ctx.columnExpr())
+ op=ast.ArithmeticOperationOp.Sub,
+ left=ast.Constant(value=0),
+ right=self.visit(ctx.columnExpr()),
)
def visitColumnExprSubquery(self, ctx: HogQLParser.ColumnExprSubqueryContext):
@@ -737,7 +752,8 @@ def visitColumnArgExpr(self, ctx: HogQLParser.ColumnArgExprContext):
def visitColumnLambdaExpr(self, ctx: HogQLParser.ColumnLambdaExprContext):
return ast.Lambda(
- args=[self.visit(identifier) for identifier in ctx.identifier()], expr=self.visit(ctx.columnExpr())
+ args=[self.visit(identifier) for identifier in ctx.identifier()],
+ expr=self.visit(ctx.columnExpr()),
)
def visitWithExprList(self, ctx: HogQLParser.WithExprListContext):
@@ -863,4 +879,7 @@ def visitEnumValue(self, ctx: HogQLParser.EnumValueContext):
raise NotImplementedException(f"Unsupported node: EnumValue")
def visitColumnExprNullish(self, ctx: HogQLParser.ColumnExprNullishContext):
- return ast.Call(name="ifNull", args=[self.visit(ctx.columnExpr(0)), self.visit(ctx.columnExpr(1))])
+ return ast.Call(
+ name="ifNull",
+ args=[self.visit(ctx.columnExpr(0)), self.visit(ctx.columnExpr(1))],
+ )
diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py
index 21cf3ef978060..fa55d34e586a8 100644
--- a/posthog/hogql/printer.py
+++ b/posthog/hogql/printer.py
@@ -30,7 +30,8 @@
escape_hogql_string,
)
from posthog.hogql.functions.mapping import ALL_EXPOSED_FUNCTION_NAMES, validate_function_args
-from posthog.hogql.resolver import ResolverException, lookup_field_by_name, resolve_types
+from posthog.hogql.resolver import ResolverException, resolve_types
+from posthog.hogql.resolver_utils import lookup_field_by_name
from posthog.hogql.transforms.in_cohort import resolve_in_cohorts
from posthog.hogql.transforms.lazy_tables import resolve_lazy_tables
from posthog.hogql.transforms.property_types import resolve_property_types
@@ -74,7 +75,12 @@ def print_ast(
) -> str:
prepared_ast = prepare_ast_for_printing(node=node, context=context, dialect=dialect, stack=stack, settings=settings)
return print_prepared_ast(
- node=prepared_ast, context=context, dialect=dialect, stack=stack, settings=settings, pretty=pretty
+ node=prepared_ast,
+ context=context,
+ dialect=dialect,
+ stack=stack,
+ settings=settings,
+ pretty=pretty,
)
@@ -121,9 +127,13 @@ def print_prepared_ast(
) -> str:
with context.timings.measure("printer"):
# _Printer also adds a team_id guard if printing clickhouse
- return _Printer(context=context, dialect=dialect, stack=stack or [], settings=settings, pretty=pretty).visit(
- node
- )
+ return _Printer(
+ context=context,
+ dialect=dialect,
+ stack=stack or [],
+ settings=settings,
+ pretty=pretty,
+ ).visit(node)
@dataclass
@@ -238,7 +248,11 @@ def visit_select_query(self, node: ast.SelectQuery):
array_join = ""
if node.array_join_op is not None:
- if node.array_join_op not in ("ARRAY JOIN", "LEFT ARRAY JOIN", "INNER ARRAY JOIN"):
+ if node.array_join_op not in (
+ "ARRAY JOIN",
+ "LEFT ARRAY JOIN",
+ "INNER ARRAY JOIN",
+ ):
raise HogQLException(f"Invalid ARRAY JOIN operation: {node.array_join_op}")
array_join = node.array_join_op
if len(node.array_join_list) == 0:
@@ -266,7 +280,10 @@ def visit_select_query(self, node: ast.SelectQuery):
if isinstance(limit, ast.Constant) and isinstance(limit.value, int):
limit.value = min(limit.value, MAX_SELECT_RETURNED_ROWS)
else:
- limit = ast.Call(name="min2", args=[ast.Constant(value=MAX_SELECT_RETURNED_ROWS), limit])
+ limit = ast.Call(
+ name="min2",
+ args=[ast.Constant(value=MAX_SELECT_RETURNED_ROWS), limit],
+ )
else:
limit = ast.Constant(value=MAX_SELECT_RETURNED_ROWS)
@@ -642,7 +659,11 @@ def visit_call(self, node: ast.Call):
func_meta = HOGQL_AGGREGATIONS[node.name]
validate_function_args(
- node.args, func_meta.min_args, func_meta.max_args, node.name, function_term="aggregation"
+ node.args,
+ func_meta.min_args,
+ func_meta.max_args,
+ node.name,
+ function_term="aggregation",
)
if func_meta.min_params:
if node.params is None:
@@ -678,7 +699,11 @@ def visit_call(self, node: ast.Call):
if node.params is None:
raise HogQLException(f"Function '{node.name}' requires parameters in addition to arguments")
validate_function_args(
- node.params, func_meta.min_params, func_meta.max_params, node.name, argument_term="parameter"
+ node.params,
+ func_meta.min_params,
+ func_meta.max_params,
+ node.name,
+ argument_term="parameter",
)
if self.dialect == "clickhouse":
@@ -724,7 +749,10 @@ def visit_call(self, node: ast.Call):
)
if first_arg_constant_type is not None:
- for overload_types, overload_clickhouse_name in func_meta.overloads:
+ for (
+ overload_types,
+ overload_clickhouse_name,
+ ) in func_meta.overloads:
if isinstance(first_arg_constant_type, overload_types):
relevant_clickhouse_name = overload_clickhouse_name
break # Found an overload matching the first function org
@@ -801,7 +829,8 @@ def visit_field_type(self, type: ast.FieldType):
return self.visit(
ast.AsteriskType(
table_type=ast.TableAliasType(
- table_type=ast.TableType(table=resolved_field), alias=type.table_type.alias
+ table_type=ast.TableType(table=resolved_field),
+ alias=type.table_type.alias,
)
)
)
diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py
index c0341461e1293..9d619c23175b6 100644
--- a/posthog/hogql/property.py
+++ b/posthog/hogql/property.py
@@ -10,12 +10,24 @@
from posthog.hogql.errors import NotImplementedException
from posthog.hogql.parser import parse_expr
from posthog.hogql.visitor import TraversingVisitor, clone_expr
-from posthog.models import Action, ActionStep, Cohort, Property, Team, PropertyDefinition
+from posthog.models import (
+ Action,
+ ActionStep,
+ Cohort,
+ Property,
+ Team,
+ PropertyDefinition,
+)
from posthog.models.event import Selector
from posthog.models.property import PropertyGroup
from posthog.models.property.util import build_selector_regex
from posthog.models.property_definition import PropertyType
-from posthog.schema import PropertyOperator, PropertyGroupFilter, PropertyGroupFilterValue, FilterLogicalOperator
+from posthog.schema import (
+ PropertyOperator,
+ PropertyGroupFilter,
+ PropertyGroupFilterValue,
+ FilterLogicalOperator,
+)
def has_aggregation(expr: AST) -> bool:
@@ -116,7 +128,14 @@ def property_to_expr(
else:
exprs = [
property_to_expr(
- Property(type=property.type, key=property.key, operator=property.operator, value=v), team, scope
+ Property(
+ type=property.type,
+ key=property.key,
+ operator=property.operator,
+ value=v,
+ ),
+ team,
+ scope,
)
for v in value
]
@@ -133,12 +152,25 @@ def property_to_expr(
properties_field = ast.Field(chain=chain)
if operator == PropertyOperator.is_set:
- return ast.CompareOperation(op=ast.CompareOperationOp.NotEq, left=field, right=ast.Constant(value=None))
+ return ast.CompareOperation(
+ op=ast.CompareOperationOp.NotEq,
+ left=field,
+ right=ast.Constant(value=None),
+ )
elif operator == PropertyOperator.is_not_set:
return ast.Or(
exprs=[
- ast.CompareOperation(op=ast.CompareOperationOp.Eq, left=field, right=ast.Constant(value=None)),
- ast.Not(expr=ast.Call(name="JSONHas", args=[properties_field, ast.Constant(value=property.key)])),
+ ast.CompareOperation(
+ op=ast.CompareOperationOp.Eq,
+ left=field,
+ right=ast.Constant(value=None),
+ ),
+ ast.Not(
+ expr=ast.Call(
+ name="JSONHas",
+ args=[properties_field, ast.Constant(value=property.key)],
+ )
+ ),
]
)
elif operator == PropertyOperator.icontains:
@@ -156,7 +188,10 @@ def property_to_expr(
elif operator == PropertyOperator.regex:
return ast.Call(name="match", args=[field, ast.Constant(value=value)])
elif operator == PropertyOperator.not_regex:
- return ast.Call(name="not", args=[ast.Call(name="match", args=[field, ast.Constant(value=value)])])
+ return ast.Call(
+ name="not",
+ args=[ast.Call(name="match", args=[field, ast.Constant(value=value)])],
+ )
elif operator == PropertyOperator.exact or operator == PropertyOperator.is_date_exact:
op = ast.CompareOperationOp.Eq
elif operator == PropertyOperator.is_not:
@@ -207,7 +242,14 @@ def property_to_expr(
else:
exprs = [
property_to_expr(
- Property(type=property.type, key=property.key, operator=property.operator, value=v), team, scope
+ Property(
+ type=property.type,
+ key=property.key,
+ operator=property.operator,
+ value=v,
+ ),
+ team,
+ scope,
)
for v in value
]
@@ -287,11 +329,20 @@ def action_to_expr(action: Action) -> ast.Expr:
if step.url:
if step.url_matching == ActionStep.EXACT:
- expr = parse_expr("properties.$current_url = {url}", {"url": ast.Constant(value=step.url)})
+ expr = parse_expr(
+ "properties.$current_url = {url}",
+ {"url": ast.Constant(value=step.url)},
+ )
elif step.url_matching == ActionStep.REGEX:
- expr = parse_expr("properties.$current_url =~ {regex}", {"regex": ast.Constant(value=step.url)})
+ expr = parse_expr(
+ "properties.$current_url =~ {regex}",
+ {"regex": ast.Constant(value=step.url)},
+ )
else:
- expr = parse_expr("properties.$current_url like {url}", {"url": ast.Constant(value=f"%{step.url}%")})
+ expr = parse_expr(
+ "properties.$current_url like {url}",
+ {"url": ast.Constant(value=f"%{step.url}%")},
+ )
exprs.append(expr)
if step.properties:
diff --git a/posthog/hogql/query.py b/posthog/hogql/query.py
index 723476b0ab5e4..697305d0ae964 100644
--- a/posthog/hogql/query.py
+++ b/posthog/hogql/query.py
@@ -8,7 +8,11 @@
from posthog.hogql.modifiers import create_default_modifiers_for_team
from posthog.hogql.parser import parse_select
from posthog.hogql.placeholders import replace_placeholders, find_placeholders
-from posthog.hogql.printer import prepare_ast_for_printing, print_ast, print_prepared_ast
+from posthog.hogql.printer import (
+ prepare_ast_for_printing,
+ print_ast,
+ print_prepared_ast,
+)
from posthog.hogql.filters import replace_filters
from posthog.hogql.timings import HogQLTimings
from posthog.hogql.visitor import clone_expr
@@ -61,7 +65,10 @@ def execute_hogql_query(
select_query = replace_placeholders(select_query, placeholders)
with timings.measure("max_limit"):
- from posthog.hogql.constants import DEFAULT_RETURNED_ROWS, MAX_SELECT_RETURNED_ROWS
+ from posthog.hogql.constants import (
+ DEFAULT_RETURNED_ROWS,
+ MAX_SELECT_RETURNED_ROWS,
+ )
select_queries = (
select_query.select_queries if isinstance(select_query, ast.SelectUnionQuery) else [select_query]
@@ -104,7 +111,10 @@ def execute_hogql_query(
else:
print_columns.append(
print_prepared_ast(
- node=node, context=hogql_query_context, dialect="hogql", stack=[select_query_hogql]
+ node=node,
+ context=hogql_query_context,
+ dialect="hogql",
+ stack=[select_query_hogql],
)
)
@@ -117,7 +127,10 @@ def execute_hogql_query(
modifiers=query_modifiers,
)
clickhouse_sql = print_ast(
- select_query, context=clickhouse_context, dialect="clickhouse", settings=settings or HogQLGlobalSettings()
+ select_query,
+ context=clickhouse_context,
+ dialect="clickhouse",
+ settings=settings or HogQLGlobalSettings(),
)
timings_dict = timings.to_dict()
diff --git a/posthog/hogql/resolver.py b/posthog/hogql/resolver.py
index 48ea712bb9e13..8d971084aa583 100644
--- a/posthog/hogql/resolver.py
+++ b/posthog/hogql/resolver.py
@@ -6,11 +6,17 @@
from posthog.hogql.ast import FieldTraverserType, ConstantType
from posthog.hogql.functions import HOGQL_POSTHOG_FUNCTIONS, cohort
from posthog.hogql.context import HogQLContext
-from posthog.hogql.database.models import StringJSONDatabaseField, FunctionCallTable, LazyTable, SavedQuery
+from posthog.hogql.database.models import (
+ StringJSONDatabaseField,
+ FunctionCallTable,
+ LazyTable,
+ SavedQuery,
+)
from posthog.hogql.errors import ResolverException
from posthog.hogql.functions.mapping import validate_function_args
from posthog.hogql.functions.sparkline import sparkline
from posthog.hogql.parser import parse_select
+from posthog.hogql.resolver_utils import lookup_cte_by_name, lookup_field_by_name
from posthog.hogql.visitor import CloningVisitor, clone_expr
from posthog.models.utils import UUIDT
from posthog.hogql.database.schema.events import EventsTable
@@ -47,7 +53,9 @@ def resolve_constant_data_type(constant: Any) -> ConstantType:
def resolve_types(
- node: ast.Expr, context: HogQLContext, scopes: Optional[List[ast.SelectQueryType]] = None
+ node: ast.Expr,
+ context: HogQLContext,
+ scopes: Optional[List[ast.SelectQueryType]] = None,
) -> ast.Expr:
return Resolver(scopes=scopes, context=context).visit(node)
@@ -331,7 +339,10 @@ def visit_call(self, node: ast.Call):
else:
param_types.append(ast.UnknownType())
node.type = ast.CallType(
- name=node.name, arg_types=arg_types, param_types=param_types, return_type=ast.UnknownType()
+ name=node.name,
+ arg_types=arg_types,
+ param_types=param_types,
+ return_type=ast.UnknownType(),
)
return node
@@ -453,7 +464,10 @@ def visit_array_access(self, node: ast.ArrayAccess):
(isinstance(node.array.type, ast.PropertyType))
or (
isinstance(node.array.type, ast.FieldType)
- and isinstance(node.array.type.resolve_database_field(), StringJSONDatabaseField)
+ and isinstance(
+ node.array.type.resolve_database_field(),
+ StringJSONDatabaseField,
+ )
)
)
):
@@ -563,30 +577,3 @@ def _is_next_s3(self, node: Optional[ast.JoinExpr]):
if isinstance(node.type, ast.TableAliasType):
return isinstance(node.type.table_type.table, S3Table)
return False
-
-
-def lookup_field_by_name(scope: ast.SelectQueryType, name: str) -> Optional[ast.Type]:
- """Looks for a field in the scope's list of aliases and children for each joined table."""
- if name in scope.aliases:
- return scope.aliases[name]
- else:
- named_tables = [table for table in scope.tables.values() if table.has_child(name)]
- anonymous_tables = [table for table in scope.anonymous_tables if table.has_child(name)]
- tables_with_field = named_tables + anonymous_tables
-
- if len(tables_with_field) > 1:
- raise ResolverException(f"Ambiguous query. Found multiple sources for field: {name}")
- elif len(tables_with_field) == 1:
- return tables_with_field[0].get_child(name)
-
- if scope.parent:
- return lookup_field_by_name(scope.parent, name)
-
- return None
-
-
-def lookup_cte_by_name(scopes: List[ast.SelectQueryType], name: str) -> Optional[ast.CTE]:
- for scope in reversed(scopes):
- if scope and scope.ctes and name in scope.ctes:
- return scope.ctes[name]
- return None
diff --git a/posthog/hogql/resolver_utils.py b/posthog/hogql/resolver_utils.py
new file mode 100644
index 0000000000000..2fb8fd6d814f7
--- /dev/null
+++ b/posthog/hogql/resolver_utils.py
@@ -0,0 +1,47 @@
+from typing import List, Optional
+from posthog.hogql import ast
+from posthog.hogql.errors import HogQLException, ResolverException
+
+
+def lookup_field_by_name(scope: ast.SelectQueryType, name: str) -> Optional[ast.Type]:
+ """Looks for a field in the scope's list of aliases and children for each joined table."""
+ if name in scope.aliases:
+ return scope.aliases[name]
+ else:
+ named_tables = [table for table in scope.tables.values() if table.has_child(name)]
+ anonymous_tables = [table for table in scope.anonymous_tables if table.has_child(name)]
+ tables_with_field = named_tables + anonymous_tables
+
+ if len(tables_with_field) > 1:
+ raise ResolverException(f"Ambiguous query. Found multiple sources for field: {name}")
+ elif len(tables_with_field) == 1:
+ return tables_with_field[0].get_child(name)
+
+ if scope.parent:
+ return lookup_field_by_name(scope.parent, name)
+
+ return None
+
+
+def lookup_cte_by_name(scopes: List[ast.SelectQueryType], name: str) -> Optional[ast.CTE]:
+ for scope in reversed(scopes):
+ if scope and scope.ctes and name in scope.ctes:
+ return scope.ctes[name]
+ return None
+
+
+def get_long_table_name(select: ast.SelectQueryType, type: ast.Type) -> str:
+ if isinstance(type, ast.TableType):
+ return select.get_alias_for_table_type(type) or ""
+ elif isinstance(type, ast.LazyTableType):
+ return type.table.to_printed_hogql()
+ elif isinstance(type, ast.TableAliasType):
+ return type.alias
+ elif isinstance(type, ast.SelectQueryAliasType):
+ return type.alias
+ elif isinstance(type, ast.LazyJoinType):
+ return f"{get_long_table_name(select, type.table_type)}__{type.field}"
+ elif isinstance(type, ast.VirtualTableType):
+ return f"{get_long_table_name(select, type.table_type)}__{type.field}"
+ else:
+ raise HogQLException(f"Unknown table type in LazyTableResolver: {type.__class__.__name__}")
diff --git a/posthog/hogql/test/_test_parser.py b/posthog/hogql/test/_test_parser.py
index 765d4fbaab4de..9b5fa20dcf910 100644
--- a/posthog/hogql/test/_test_parser.py
+++ b/posthog/hogql/test/_test_parser.py
@@ -57,7 +57,9 @@ def test_conditional(self):
name="if",
args=[
ast.CompareOperation(
- op=ast.CompareOperationOp.Gt, left=ast.Constant(value=1), right=ast.Constant(value=2)
+ op=ast.CompareOperationOp.Gt,
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
),
ast.Constant(value=1),
ast.Constant(value=2),
@@ -69,11 +71,15 @@ def test_arrays(self):
self.assertEqual(self._expr("[]"), ast.Array(exprs=[]))
self.assertEqual(self._expr("[1]"), ast.Array(exprs=[ast.Constant(value=1)]))
self.assertEqual(
- self._expr("[1, avg()]"), ast.Array(exprs=[ast.Constant(value=1), ast.Call(name="avg", args=[])])
+ self._expr("[1, avg()]"),
+ ast.Array(exprs=[ast.Constant(value=1), ast.Call(name="avg", args=[])]),
)
self.assertEqual(
self._expr("properties['value']"),
- ast.ArrayAccess(array=ast.Field(chain=["properties"]), property=ast.Constant(value="value")),
+ ast.ArrayAccess(
+ array=ast.Field(chain=["properties"]),
+ property=ast.Constant(value="value"),
+ ),
)
self.assertEqual(
self._expr("properties[(select 'value')]"),
@@ -98,7 +104,8 @@ def test_arrays(self):
def test_tuples(self):
self.assertEqual(
- self._expr("(1, avg())"), ast.Tuple(exprs=[ast.Constant(value=1), ast.Call(name="avg", args=[])])
+ self._expr("(1, avg())"),
+ ast.Tuple(exprs=[ast.Constant(value=1), ast.Call(name="avg", args=[])]),
)
# needs at least two values to be a tuple
self.assertEqual(self._expr("(1)"), ast.Constant(value=1))
@@ -165,44 +172,58 @@ def test_arithmetic_operations(self):
self.assertEqual(
self._expr("1 + 2"),
ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Add
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.ArithmeticOperationOp.Add,
),
)
self.assertEqual(
self._expr("1 + -2"),
ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=-2), op=ast.ArithmeticOperationOp.Add
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=-2),
+ op=ast.ArithmeticOperationOp.Add,
),
)
self.assertEqual(
self._expr("1 - 2"),
ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Sub
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.ArithmeticOperationOp.Sub,
),
)
self.assertEqual(
self._expr("1 * 2"),
ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Mult
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.ArithmeticOperationOp.Mult,
),
)
self.assertEqual(
self._expr("1 / 2"),
ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Div
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.ArithmeticOperationOp.Div,
),
)
self.assertEqual(
self._expr("1 % 2"),
ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Mod
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.ArithmeticOperationOp.Mod,
),
)
self.assertEqual(
self._expr("1 + 2 + 2"),
ast.ArithmeticOperation(
left=ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Add
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.ArithmeticOperationOp.Add,
),
right=ast.Constant(value=2),
op=ast.ArithmeticOperationOp.Add,
@@ -212,7 +233,9 @@ def test_arithmetic_operations(self):
self._expr("1 * 1 * 2"),
ast.ArithmeticOperation(
left=ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.ArithmeticOperationOp.Mult
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=1),
+ op=ast.ArithmeticOperationOp.Mult,
),
right=ast.Constant(value=2),
op=ast.ArithmeticOperationOp.Mult,
@@ -223,7 +246,9 @@ def test_arithmetic_operations(self):
ast.ArithmeticOperation(
left=ast.Constant(value=1),
right=ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Mult
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.ArithmeticOperationOp.Mult,
),
op=ast.ArithmeticOperationOp.Add,
),
@@ -232,7 +257,9 @@ def test_arithmetic_operations(self):
self._expr("1 * 1 + 2"),
ast.ArithmeticOperation(
left=ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.ArithmeticOperationOp.Mult
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=1),
+ op=ast.ArithmeticOperationOp.Mult,
),
right=ast.Constant(value=2),
op=ast.ArithmeticOperationOp.Add,
@@ -243,43 +270,57 @@ def test_math_comparison_operations(self):
self.assertEqual(
self._expr("1 = 2"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.Eq
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.CompareOperationOp.Eq,
),
)
self.assertEqual(
self._expr("1 == 2"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.Eq
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.CompareOperationOp.Eq,
),
)
self.assertEqual(
self._expr("1 != 2"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.NotEq
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.CompareOperationOp.NotEq,
),
)
self.assertEqual(
self._expr("1 < 2"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.Lt
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.CompareOperationOp.Lt,
),
)
self.assertEqual(
self._expr("1 <= 2"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.LtEq
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.CompareOperationOp.LtEq,
),
)
self.assertEqual(
self._expr("1 > 2"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.Gt
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.CompareOperationOp.Gt,
),
)
self.assertEqual(
self._expr("1 >= 2"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.GtEq
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
+ op=ast.CompareOperationOp.GtEq,
),
)
@@ -287,13 +328,17 @@ def test_null_comparison_operations(self):
self.assertEqual(
self._expr("1 is null"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=None), op=ast.CompareOperationOp.Eq
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=None),
+ op=ast.CompareOperationOp.Eq,
),
)
self.assertEqual(
self._expr("1 is not null"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=None), op=ast.CompareOperationOp.NotEq
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=None),
+ op=ast.CompareOperationOp.NotEq,
),
)
@@ -301,25 +346,33 @@ def test_like_comparison_operations(self):
self.assertEqual(
self._expr("1 like 'a%sd'"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value="a%sd"), op=ast.CompareOperationOp.Like
+ left=ast.Constant(value=1),
+ right=ast.Constant(value="a%sd"),
+ op=ast.CompareOperationOp.Like,
),
)
self.assertEqual(
self._expr("1 not like 'a%sd'"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value="a%sd"), op=ast.CompareOperationOp.NotLike
+ left=ast.Constant(value=1),
+ right=ast.Constant(value="a%sd"),
+ op=ast.CompareOperationOp.NotLike,
),
)
self.assertEqual(
self._expr("1 ilike 'a%sd'"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value="a%sd"), op=ast.CompareOperationOp.ILike
+ left=ast.Constant(value=1),
+ right=ast.Constant(value="a%sd"),
+ op=ast.CompareOperationOp.ILike,
),
)
self.assertEqual(
self._expr("1 not ilike 'a%sd'"),
ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value="a%sd"), op=ast.CompareOperationOp.NotILike
+ left=ast.Constant(value=1),
+ right=ast.Constant(value="a%sd"),
+ op=ast.CompareOperationOp.NotILike,
),
)
@@ -335,7 +388,10 @@ def test_and_or(self):
self.assertEqual(
self._expr("true and not false"),
ast.And(
- exprs=[ast.Constant(value=True), ast.Not(expr=ast.Constant(value=False))],
+ exprs=[
+ ast.Constant(value=True),
+ ast.Not(expr=ast.Constant(value=False)),
+ ],
),
)
self.assertEqual(
@@ -355,7 +411,10 @@ def test_and_or(self):
exprs=[
ast.Constant(value=True),
ast.And(
- exprs=[ast.Constant(value=False), ast.Not(expr=ast.Constant(value=True))],
+ exprs=[
+ ast.Constant(value=False),
+ ast.Not(expr=ast.Constant(value=True)),
+ ],
),
ast.Constant(value=2),
],
@@ -376,7 +435,9 @@ def test_parens(self):
self.assertEqual(
self._expr("(1 + 1)"),
ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.ArithmeticOperationOp.Add
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=1),
+ op=ast.ArithmeticOperationOp.Add,
),
)
self.assertEqual(
@@ -384,7 +445,9 @@ def test_parens(self):
ast.ArithmeticOperation(
left=ast.Constant(value=1),
right=ast.ArithmeticOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.ArithmeticOperationOp.Add
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=1),
+ op=ast.ArithmeticOperationOp.Add,
),
op=ast.ArithmeticOperationOp.Add,
),
@@ -398,7 +461,9 @@ def test_field_access(self):
self.assertEqual(
self._expr("event like '$%'"),
ast.CompareOperation(
- left=ast.Field(chain=["event"]), right=ast.Constant(value="$%"), op=ast.CompareOperationOp.Like
+ left=ast.Field(chain=["event"]),
+ right=ast.Constant(value="$%"),
+ op=ast.CompareOperationOp.Like,
),
)
@@ -435,13 +500,24 @@ def test_calls(self):
)
self.assertEqual(
self._expr("avg(1,2,3)"),
- ast.Call(name="avg", args=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]),
+ ast.Call(
+ name="avg",
+ args=[
+ ast.Constant(value=1),
+ ast.Constant(value=2),
+ ast.Constant(value=3),
+ ],
+ ),
)
def test_calls_with_params(self):
self.assertEqual(
self._expr("quantile(0.95)(foo)"),
- ast.Call(name="quantile", args=[ast.Field(chain=["foo"])], params=[ast.Constant(value=0.95)]),
+ ast.Call(
+ name="quantile",
+ args=[ast.Field(chain=["foo"])],
+ params=[ast.Constant(value=0.95)],
+ ),
)
def test_alias(self):
@@ -513,15 +589,25 @@ def test_intervals(self):
)
def test_select_columns(self):
- self.assertEqual(self._select("select 1"), ast.SelectQuery(select=[ast.Constant(value=1)]))
+ self.assertEqual(
+ self._select("select 1"),
+ ast.SelectQuery(select=[ast.Constant(value=1)]),
+ )
self.assertEqual(
self._select("select 1, 4, 'string'"),
- ast.SelectQuery(select=[ast.Constant(value=1), ast.Constant(value=4), ast.Constant(value="string")]),
+ ast.SelectQuery(
+ select=[
+ ast.Constant(value=1),
+ ast.Constant(value=4),
+ ast.Constant(value="string"),
+ ]
+ ),
)
def test_select_columns_distinct(self):
self.assertEqual(
- self._select("select distinct 1"), ast.SelectQuery(select=[ast.Constant(value=1)], distinct=True)
+ self._select("select distinct 1"),
+ ast.SelectQuery(select=[ast.Constant(value=1)], distinct=True),
)
def test_select_where(self):
@@ -534,7 +620,9 @@ def test_select_where(self):
ast.SelectQuery(
select=[ast.Constant(value=1)],
where=ast.CompareOperation(
- op=ast.CompareOperationOp.Eq, left=ast.Constant(value=1), right=ast.Constant(value=2)
+ op=ast.CompareOperationOp.Eq,
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
),
),
)
@@ -549,7 +637,9 @@ def test_select_prewhere(self):
ast.SelectQuery(
select=[ast.Constant(value=1)],
prewhere=ast.CompareOperation(
- op=ast.CompareOperationOp.Eq, left=ast.Constant(value=1), right=ast.Constant(value=2)
+ op=ast.CompareOperationOp.Eq,
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
),
),
)
@@ -564,7 +654,9 @@ def test_select_having(self):
ast.SelectQuery(
select=[ast.Constant(value=1)],
having=ast.CompareOperation(
- op=ast.CompareOperationOp.Eq, left=ast.Constant(value=1), right=ast.Constant(value=2)
+ op=ast.CompareOperationOp.Eq,
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
),
),
)
@@ -575,10 +667,14 @@ def test_select_complex_wheres(self):
ast.SelectQuery(
select=[ast.Constant(value=1)],
where=ast.CompareOperation(
- op=ast.CompareOperationOp.Eq, left=ast.Constant(value=1), right=ast.Constant(value=2)
+ op=ast.CompareOperationOp.Eq,
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=2),
),
prewhere=ast.CompareOperation(
- op=ast.CompareOperationOp.NotEq, left=ast.Constant(value=2), right=ast.Constant(value=3)
+ op=ast.CompareOperationOp.NotEq,
+ left=ast.Constant(value=2),
+ right=ast.Constant(value=3),
),
having=ast.CompareOperation(
op=ast.CompareOperationOp.Like,
@@ -592,7 +688,8 @@ def test_select_from(self):
self.assertEqual(
self._select("select 1 from events"),
ast.SelectQuery(
- select=[ast.Constant(value=1)], select_from=ast.JoinExpr(table=ast.Field(chain=["events"]))
+ select=[ast.Constant(value=1)],
+ select_from=ast.JoinExpr(table=ast.Field(chain=["events"])),
),
)
self.assertEqual(
@@ -636,7 +733,8 @@ def test_select_from(self):
select=[ast.Constant(value=1)],
select_from=ast.JoinExpr(
table=ast.SelectQuery(
- select=[ast.Constant(value=1)], select_from=ast.JoinExpr(table=ast.Field(chain=["events"]))
+ select=[ast.Constant(value=1)],
+ select_from=ast.JoinExpr(table=ast.Field(chain=["events"])),
)
),
),
@@ -647,7 +745,8 @@ def test_select_from(self):
select=[ast.Constant(value=1)],
select_from=ast.JoinExpr(
table=ast.SelectQuery(
- select=[ast.Constant(value=1)], select_from=ast.JoinExpr(table=ast.Field(chain=["events"]))
+ select=[ast.Constant(value=1)],
+ select_from=ast.JoinExpr(table=ast.Field(chain=["events"])),
),
alias="sq",
),
@@ -663,7 +762,10 @@ def test_select_from_placeholder(self):
),
)
self.assertEqual(
- self._select("select 1 from {placeholder}", {"placeholder": ast.Field(chain=["events"])}),
+ self._select(
+ "select 1 from {placeholder}",
+ {"placeholder": ast.Field(chain=["events"])},
+ ),
ast.SelectQuery(
select=[ast.Constant(value=1)],
select_from=ast.JoinExpr(table=ast.Field(chain=["events"])),
@@ -830,7 +932,13 @@ def test_select_array_join(self):
array_join_op="ARRAY JOIN",
array_join_list=[
ast.Alias(
- expr=ast.Array(exprs=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]),
+ expr=ast.Array(
+ exprs=[
+ ast.Constant(value=1),
+ ast.Constant(value=2),
+ ast.Constant(value=3),
+ ]
+ ),
alias="a",
)
],
@@ -844,7 +952,13 @@ def test_select_array_join(self):
array_join_op="INNER ARRAY JOIN",
array_join_list=[
ast.Alias(
- expr=ast.Array(exprs=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]),
+ expr=ast.Array(
+ exprs=[
+ ast.Constant(value=1),
+ ast.Constant(value=2),
+ ast.Constant(value=3),
+ ]
+ ),
alias="a",
)
],
@@ -858,11 +972,23 @@ def test_select_array_join(self):
array_join_op="LEFT ARRAY JOIN",
array_join_list=[
ast.Alias(
- expr=ast.Array(exprs=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]),
+ expr=ast.Array(
+ exprs=[
+ ast.Constant(value=1),
+ ast.Constant(value=2),
+ ast.Constant(value=3),
+ ]
+ ),
alias="a",
),
ast.Alias(
- expr=ast.Array(exprs=[ast.Constant(value=4), ast.Constant(value=5), ast.Constant(value=6)]),
+ expr=ast.Array(
+ exprs=[
+ ast.Constant(value=4),
+ ast.Constant(value=5),
+ ast.Constant(value=6),
+ ]
+ ),
alias="b",
),
],
@@ -878,7 +1004,10 @@ def test_select_array_join_errors(self):
with self.assertRaises(HogQLException) as e:
self._select("select a ARRAY JOIN [1,2,3]")
- self.assertEqual(str(e.exception), "Using ARRAY JOIN without a FROM clause is not permitted")
+ self.assertEqual(
+ str(e.exception),
+ "Using ARRAY JOIN without a FROM clause is not permitted",
+ )
self.assertEqual(e.exception.start, 0)
self.assertEqual(e.exception.end, 27)
@@ -895,15 +1024,30 @@ def test_select_group_by(self):
def test_order_by(self):
self.assertEqual(
parse_order_expr("1 ASC"),
- ast.OrderExpr(expr=ast.Constant(value=1, start=0, end=1), order="ASC", start=0, end=5),
+ ast.OrderExpr(
+ expr=ast.Constant(value=1, start=0, end=1),
+ order="ASC",
+ start=0,
+ end=5,
+ ),
)
self.assertEqual(
parse_order_expr("event"),
- ast.OrderExpr(expr=ast.Field(chain=["event"], start=0, end=5), order="ASC", start=0, end=5),
+ ast.OrderExpr(
+ expr=ast.Field(chain=["event"], start=0, end=5),
+ order="ASC",
+ start=0,
+ end=5,
+ ),
)
self.assertEqual(
parse_order_expr("timestamp DESC"),
- ast.OrderExpr(expr=ast.Field(chain=["timestamp"], start=0, end=9), order="DESC", start=0, end=14),
+ ast.OrderExpr(
+ expr=ast.Field(chain=["timestamp"], start=0, end=9),
+ order="DESC",
+ start=0,
+ end=14,
+ ),
)
def test_select_order_by(self):
@@ -993,7 +1137,10 @@ def test_select_placeholders(self):
),
)
self.assertEqual(
- self._select("select 1 where 1 == {hogql_val_1}", {"hogql_val_1": ast.Constant(value="bar")}),
+ self._select(
+ "select 1 where 1 == {hogql_val_1}",
+ {"hogql_val_1": ast.Constant(value="bar")},
+ ),
ast.SelectQuery(
select=[ast.Constant(value=1)],
where=ast.CompareOperation(
@@ -1082,7 +1229,13 @@ def test_select_with_columns(self):
self.assertEqual(
self._select("with event as boo select boo from events"),
ast.SelectQuery(
- ctes={"boo": ast.CTE(name="boo", expr=ast.Field(chain=["event"]), cte_type="column")},
+ ctes={
+ "boo": ast.CTE(
+ name="boo",
+ expr=ast.Field(chain=["event"]),
+ cte_type="column",
+ )
+ },
select=[ast.Field(chain=["boo"])],
select_from=ast.JoinExpr(table=ast.Field(chain=["events"])),
),
@@ -1090,7 +1243,13 @@ def test_select_with_columns(self):
self.assertEqual(
self._select("with count() as kokku select kokku from events"),
ast.SelectQuery(
- ctes={"kokku": ast.CTE(name="kokku", expr=ast.Call(name="count", args=[]), cte_type="column")},
+ ctes={
+ "kokku": ast.CTE(
+ name="kokku",
+ expr=ast.Call(name="count", args=[]),
+ cte_type="column",
+ )
+ },
select=[ast.Field(chain=["kokku"])],
select_from=ast.JoinExpr(table=ast.Field(chain=["events"])),
),
@@ -1169,7 +1328,14 @@ def test_ctes_subquery_recursion(self):
def test_case_when(self):
self.assertEqual(
self._expr("case when 1 then 2 else 3 end"),
- ast.Call(name="if", args=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]),
+ ast.Call(
+ name="if",
+ args=[
+ ast.Constant(value=1),
+ ast.Constant(value=2),
+ ast.Constant(value=3),
+ ],
+ ),
)
def test_case_when_many(self):
@@ -1214,7 +1380,12 @@ def test_window_functions(self):
args=[ast.Field(chain=["timestamp"])],
over_expr=ast.WindowExpr(
partition_by=[ast.Field(chain=["person", "id"])],
- order_by=[ast.OrderExpr(expr=ast.Field(chain=["timestamp"]), order="DESC")],
+ order_by=[
+ ast.OrderExpr(
+ expr=ast.Field(chain=["timestamp"]),
+ order="DESC",
+ )
+ ],
frame_method="ROWS",
frame_start=ast.WindowFrameExpr(frame_type="PRECEDING", frame_value=None),
frame_end=ast.WindowFrameExpr(frame_type="PRECEDING", frame_value=1),
@@ -1257,7 +1428,8 @@ def test_window_functions_with_window(self):
def test_property_access_with_arrays_zero_index_error(self):
query = f"SELECT properties.something[0] FROM events"
with self.assertRaisesMessage(
- SyntaxException, "SQL indexes start from one, not from zero. E.g: array[1]"
+ SyntaxException,
+ "SQL indexes start from one, not from zero. E.g: array[1]",
) as e:
self._select(query)
self.assertEqual(e.exception.start, 7)
@@ -1266,7 +1438,8 @@ def test_property_access_with_arrays_zero_index_error(self):
def test_property_access_with_tuples_zero_index_error(self):
query = f"SELECT properties.something.0 FROM events"
with self.assertRaisesMessage(
- SyntaxException, "SQL indexes start from one, not from zero. E.g: array[1]"
+ SyntaxException,
+ "SQL indexes start from one, not from zero. E.g: array[1]",
) as e:
self._select(query)
self.assertEqual(e.exception.start, 7)
@@ -1275,7 +1448,8 @@ def test_property_access_with_tuples_zero_index_error(self):
def test_reserved_keyword_alias_error(self):
query = f"SELECT 0 AS trUE FROM events"
with self.assertRaisesMessage(
- SyntaxException, '"trUE" cannot be an alias or identifier, as it\'s a reserved keyword'
+ SyntaxException,
+ '"trUE" cannot be an alias or identifier, as it\'s a reserved keyword',
) as e:
self._select(query)
self.assertEqual(e.exception.start, 7)
@@ -1284,7 +1458,8 @@ def test_reserved_keyword_alias_error(self):
def test_malformed_sql(self):
query = "SELEC 2"
with self.assertRaisesMessage(
- SyntaxException, "mismatched input 'SELEC' expecting {SELECT, WITH, '('}"
+ SyntaxException,
+ "mismatched input 'SELEC' expecting {SELECT, WITH, '('}",
) as e:
self._select(query)
self.assertEqual(e.exception.start, 0)
diff --git a/posthog/hogql/test/test_bytecode.py b/posthog/hogql/test/test_bytecode.py
index bbd90608fe0c6..7fee12533d6da 100644
--- a/posthog/hogql/test/test_bytecode.py
+++ b/posthog/hogql/test/test_bytecode.py
@@ -11,20 +11,54 @@ def test_bytecode_create(self):
self.assertEqual(to_bytecode("1 or 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.OR, 2])
self.assertEqual(
to_bytecode("1 or (2 and 1) or 2"),
- [_H, op.INTEGER, 2, op.INTEGER, 1, op.INTEGER, 2, op.AND, 2, op.INTEGER, 1, op.OR, 3],
+ [
+ _H,
+ op.INTEGER,
+ 2,
+ op.INTEGER,
+ 1,
+ op.INTEGER,
+ 2,
+ op.AND,
+ 2,
+ op.INTEGER,
+ 1,
+ op.OR,
+ 3,
+ ],
)
self.assertEqual(
to_bytecode("(1 or 2) and (1 or 2)"),
- [_H, op.INTEGER, 2, op.INTEGER, 1, op.OR, 2, op.INTEGER, 2, op.INTEGER, 1, op.OR, 2, op.AND, 2],
+ [
+ _H,
+ op.INTEGER,
+ 2,
+ op.INTEGER,
+ 1,
+ op.OR,
+ 2,
+ op.INTEGER,
+ 2,
+ op.INTEGER,
+ 1,
+ op.OR,
+ 2,
+ op.AND,
+ 2,
+ ],
)
self.assertEqual(to_bytecode("not true"), [_H, op.TRUE, op.NOT])
self.assertEqual(to_bytecode("true"), [_H, op.TRUE])
self.assertEqual(to_bytecode("false"), [_H, op.FALSE])
self.assertEqual(to_bytecode("null"), [_H, op.NULL])
self.assertEqual(to_bytecode("3.14"), [_H, op.FLOAT, 3.14])
- self.assertEqual(to_bytecode("properties.bla"), [_H, op.STRING, "bla", op.STRING, "properties", op.FIELD, 2])
self.assertEqual(
- to_bytecode("concat('arg', 'another')"), [_H, op.STRING, "another", op.STRING, "arg", op.CALL, "concat", 2]
+ to_bytecode("properties.bla"),
+ [_H, op.STRING, "bla", op.STRING, "properties", op.FIELD, 2],
+ )
+ self.assertEqual(
+ to_bytecode("concat('arg', 'another')"),
+ [_H, op.STRING, "another", op.STRING, "arg", op.CALL, "concat", 2],
)
self.assertEqual(to_bytecode("1 = 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.EQ])
self.assertEqual(to_bytecode("1 == 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.EQ])
@@ -36,32 +70,58 @@ def test_bytecode_create(self):
self.assertEqual(to_bytecode("1 like 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.LIKE])
self.assertEqual(to_bytecode("1 ilike 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.ILIKE])
self.assertEqual(to_bytecode("1 not like 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_LIKE])
- self.assertEqual(to_bytecode("1 not ilike 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_ILIKE])
+ self.assertEqual(
+ to_bytecode("1 not ilike 2"),
+ [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_ILIKE],
+ )
self.assertEqual(to_bytecode("1 in 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.IN])
self.assertEqual(to_bytecode("1 not in 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_IN])
- self.assertEqual(to_bytecode("'string' ~ 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.REGEX])
- self.assertEqual(to_bytecode("'string' =~ 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.REGEX])
self.assertEqual(
- to_bytecode("'string' !~ 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.NOT_REGEX]
+ to_bytecode("'string' ~ 'regex'"),
+ [_H, op.STRING, "regex", op.STRING, "string", op.REGEX],
+ )
+ self.assertEqual(
+ to_bytecode("'string' =~ 'regex'"),
+ [_H, op.STRING, "regex", op.STRING, "string", op.REGEX],
+ )
+ self.assertEqual(
+ to_bytecode("'string' !~ 'regex'"),
+ [_H, op.STRING, "regex", op.STRING, "string", op.NOT_REGEX],
)
- self.assertEqual(to_bytecode("'string' ~* 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX])
- self.assertEqual(to_bytecode("'string' =~* 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX])
self.assertEqual(
- to_bytecode("'string' !~* 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.NOT_IREGEX]
+ to_bytecode("'string' ~* 'regex'"),
+ [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX],
)
self.assertEqual(
- to_bytecode("match('test', 'e.*')"), [_H, op.STRING, "e.*", op.STRING, "test", op.CALL, "match", 2]
+ to_bytecode("'string' =~* 'regex'"),
+ [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX],
)
self.assertEqual(
- to_bytecode("match('test', '^e.*')"), [_H, op.STRING, "^e.*", op.STRING, "test", op.CALL, "match", 2]
+ to_bytecode("'string' !~* 'regex'"),
+ [_H, op.STRING, "regex", op.STRING, "string", op.NOT_IREGEX],
)
self.assertEqual(
- to_bytecode("match('test', 'x.*')"), [_H, op.STRING, "x.*", op.STRING, "test", op.CALL, "match", 2]
+ to_bytecode("match('test', 'e.*')"),
+ [_H, op.STRING, "e.*", op.STRING, "test", op.CALL, "match", 2],
+ )
+ self.assertEqual(
+ to_bytecode("match('test', '^e.*')"),
+ [_H, op.STRING, "^e.*", op.STRING, "test", op.CALL, "match", 2],
+ )
+ self.assertEqual(
+ to_bytecode("match('test', 'x.*')"),
+ [_H, op.STRING, "x.*", op.STRING, "test", op.CALL, "match", 2],
)
self.assertEqual(to_bytecode("not('test')"), [_H, op.STRING, "test", op.NOT])
self.assertEqual(to_bytecode("not 'test'"), [_H, op.STRING, "test", op.NOT])
- self.assertEqual(to_bytecode("or('test', 'test2')"), [_H, op.STRING, "test2", op.STRING, "test", op.OR, 2])
- self.assertEqual(to_bytecode("and('test', 'test2')"), [_H, op.STRING, "test2", op.STRING, "test", op.AND, 2])
+ self.assertEqual(
+ to_bytecode("or('test', 'test2')"),
+ [_H, op.STRING, "test2", op.STRING, "test", op.OR, 2],
+ )
+ self.assertEqual(
+ to_bytecode("and('test', 'test2')"),
+ [_H, op.STRING, "test2", op.STRING, "test", op.AND, 2],
+ )
def test_bytecode_create_error(self):
with self.assertRaises(NotImplementedException) as e:
diff --git a/posthog/hogql/test/test_escape_sql.py b/posthog/hogql/test/test_escape_sql.py
index 8f541f05aab40..0e24d8d8116f5 100644
--- a/posthog/hogql/test/test_escape_sql.py
+++ b/posthog/hogql/test/test_escape_sql.py
@@ -65,7 +65,10 @@ def test_sanitize_clickhouse_string(self):
uuid = UUIDT()
self.assertEqual(escape_clickhouse_string(uuid), f"toUUIDOrNull('{str(uuid)}')")
date = datetime.fromisoformat("2020-02-02 02:02:02")
- self.assertEqual(escape_clickhouse_string(date), "toDateTime64('2020-02-02 02:02:02.000000', 6, 'UTC')")
+ self.assertEqual(
+ escape_clickhouse_string(date),
+ "toDateTime64('2020-02-02 02:02:02.000000', 6, 'UTC')",
+ )
self.assertEqual(
escape_clickhouse_string(date, timezone="Europe/Brussels"),
"toDateTime64('2020-02-02 03:02:02.000000', 6, 'Europe/Brussels')",
@@ -80,7 +83,10 @@ def test_sanitize_clickhouse_string(self):
self.assertEqual(escape_clickhouse_string(float("123.123")), "123.123")
self.assertEqual(escape_clickhouse_string(float("-123.123")), "-123.123")
self.assertEqual(escape_clickhouse_string(float("0.000000000000000001")), "1e-18")
- self.assertEqual(escape_clickhouse_string(float("234732482374928374923")), "2.3473248237492837e+20")
+ self.assertEqual(
+ escape_clickhouse_string(float("234732482374928374923")),
+ "2.3473248237492837e+20",
+ )
def test_sanitize_hogql_string(self):
self.assertEqual(escape_hogql_string("a"), "'a'")
@@ -101,7 +107,8 @@ def test_sanitize_hogql_string(self):
date = datetime.fromisoformat("2020-02-02 02:02:02")
self.assertEqual(escape_hogql_string(date), "toDateTime('2020-02-02 02:02:02.000000')")
self.assertEqual(
- escape_hogql_string(date, timezone="Europe/Brussels"), "toDateTime('2020-02-02 03:02:02.000000')"
+ escape_hogql_string(date, timezone="Europe/Brussels"),
+ "toDateTime('2020-02-02 03:02:02.000000')",
)
self.assertEqual(escape_hogql_string(date.date()), "toDate('2020-02-02')")
self.assertEqual(escape_hogql_string(1), "1")
@@ -113,7 +120,10 @@ def test_sanitize_hogql_string(self):
self.assertEqual(escape_hogql_string(float("123.123")), "123.123")
self.assertEqual(escape_hogql_string(float("-123.123")), "-123.123")
self.assertEqual(escape_hogql_string(float("0.000000000000000001")), "1e-18")
- self.assertEqual(escape_hogql_string(float("234732482374928374923")), "2.3473248237492837e+20")
+ self.assertEqual(
+ escape_hogql_string(float("234732482374928374923")),
+ "2.3473248237492837e+20",
+ )
def test_escape_hogql_identifier_errors(self):
with self.assertRaises(HogQLException) as context:
diff --git a/posthog/hogql/test/test_filters.py b/posthog/hogql/test/test_filters.py
index b7c20e67e4f7e..98b319bb31694 100644
--- a/posthog/hogql/test/test_filters.py
+++ b/posthog/hogql/test/test_filters.py
@@ -6,7 +6,12 @@
from posthog.hogql.parser import parse_expr, parse_select
from posthog.hogql.printer import print_ast
from posthog.hogql.visitor import clear_locations
-from posthog.schema import HogQLFilters, EventPropertyFilter, PersonPropertyFilter, DateRange
+from posthog.schema import (
+ HogQLFilters,
+ EventPropertyFilter,
+ PersonPropertyFilter,
+ DateRange,
+)
from posthog.test.base import BaseTest
@@ -20,14 +25,20 @@ def _parse_select(self, select: str, placeholders: Dict[str, Any] = None):
return clear_locations(parse_select(select, placeholders=placeholders))
def _print_ast(self, node: ast.Expr):
- return print_ast(node, dialect="hogql", context=HogQLContext(team_id=self.team.pk, enable_select_queries=True))
+ return print_ast(
+ node,
+ dialect="hogql",
+ context=HogQLContext(team_id=self.team.pk, enable_select_queries=True),
+ )
def test_replace_filters(self):
select = replace_filters(self._parse_select("SELECT event FROM events"), HogQLFilters(), self.team)
self.assertEqual(self._print_ast(select), "SELECT event FROM events LIMIT 10000")
select = replace_filters(
- self._parse_select("SELECT event FROM events where {filters}"), HogQLFilters(), self.team
+ self._parse_select("SELECT event FROM events where {filters}"),
+ HogQLFilters(),
+ self.team,
)
self.assertEqual(self._print_ast(select), "SELECT event FROM events WHERE true LIMIT 10000")
@@ -59,7 +70,8 @@ def test_replace_filters(self):
self.team,
)
self.assertEqual(
- self._print_ast(select), "SELECT event FROM events WHERE equals(properties.random_uuid, '123') LIMIT 10000"
+ self._print_ast(select),
+ "SELECT event FROM events WHERE equals(properties.random_uuid, '123') LIMIT 10000",
)
select = replace_filters(
diff --git a/posthog/hogql/test/test_metadata.py b/posthog/hogql/test/test_metadata.py
index 46f9e13cc04f3..fe440243e909d 100644
--- a/posthog/hogql/test/test_metadata.py
+++ b/posthog/hogql/test/test_metadata.py
@@ -9,12 +9,14 @@ class TestMetadata(ClickhouseTestMixin, APIBaseTest):
def _expr(self, query: str, table: str = "events") -> HogQLMetadataResponse:
return get_hogql_metadata(
- query=HogQLMetadata(kind="HogQLMetadata", expr=query, table=table, response=None), team=self.team
+ query=HogQLMetadata(kind="HogQLMetadata", expr=query, table=table, response=None),
+ team=self.team,
)
def _select(self, query: str) -> HogQLMetadataResponse:
return get_hogql_metadata(
- query=HogQLMetadata(kind="HogQLMetadata", select=query, response=None), team=self.team
+ query=HogQLMetadata(kind="HogQLMetadata", select=query, response=None),
+ team=self.team,
)
def test_metadata_valid_expr_select(self):
@@ -26,7 +28,14 @@ def test_metadata_valid_expr_select(self):
"isValid": False,
"inputExpr": "select 1",
"inputSelect": None,
- "errors": [{"message": "extraneous input '1' expecting
", "start": 7, "end": 8, "fix": None}],
+ "errors": [
+ {
+ "message": "extraneous input '1' expecting ",
+ "start": 7,
+ "end": 8,
+ "fix": None,
+ }
+ ],
},
)
diff --git a/posthog/hogql/test/test_modifiers.py b/posthog/hogql/test/test_modifiers.py
index b876d7ada529d..ba5ed58e84882 100644
--- a/posthog/hogql/test/test_modifiers.py
+++ b/posthog/hogql/test/test_modifiers.py
@@ -13,11 +13,13 @@ def test_create_default_modifiers_for_team_init(self):
modifiers = create_default_modifiers_for_team(self.team)
assert modifiers.personsOnEventsMode == PersonsOnEventsMode.disabled # NB! not a None
modifiers = create_default_modifiers_for_team(
- self.team, HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v1_enabled)
+ self.team,
+ HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v1_enabled),
)
assert modifiers.personsOnEventsMode == PersonsOnEventsMode.v1_enabled
modifiers = create_default_modifiers_for_team(
- self.team, HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v2_enabled)
+ self.team,
+ HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v2_enabled),
)
assert modifiers.personsOnEventsMode == PersonsOnEventsMode.v2_enabled
@@ -26,13 +28,17 @@ def test_modifiers_persons_on_events_mode_v1_enabled(self):
# Control
response = execute_hogql_query(
- query, team=self.team, modifiers=HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.disabled)
+ query,
+ team=self.team,
+ modifiers=HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.disabled),
)
assert " JOIN " in response.clickhouse
# Test
response = execute_hogql_query(
- query, team=self.team, modifiers=HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v1_enabled)
+ query,
+ team=self.team,
+ modifiers=HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v1_enabled),
)
assert " JOIN " not in response.clickhouse
@@ -70,9 +76,11 @@ def test_modifiers_persons_on_events_mode_mapping(self):
),
]
- for (mode, *expected) in test_cases:
+ for mode, *expected in test_cases:
response = execute_hogql_query(
- query, team=self.team, modifiers=HogQLQueryModifiers(personsOnEventsMode=mode)
+ query,
+ team=self.team,
+ modifiers=HogQLQueryModifiers(personsOnEventsMode=mode),
)
assert f"SELECT {', '.join(expected)} FROM" in response.clickhouse, f"PoE mode: {mode}"
@@ -80,11 +88,19 @@ def test_modifiers_persons_argmax_version_v2(self):
query = "SELECT * FROM persons"
# Control (v1)
- response = execute_hogql_query(query, team=self.team, modifiers=HogQLQueryModifiers(personsArgMaxVersion="v1"))
+ response = execute_hogql_query(
+ query,
+ team=self.team,
+ modifiers=HogQLQueryModifiers(personsArgMaxVersion="v1"),
+ )
assert "in(tuple(person.id, person.version)" not in response.clickhouse
# Test (v2)
- response = execute_hogql_query(query, team=self.team, modifiers=HogQLQueryModifiers(personsArgMaxVersion="v2"))
+ response = execute_hogql_query(
+ query,
+ team=self.team,
+ modifiers=HogQLQueryModifiers(personsArgMaxVersion="v2"),
+ )
assert "in(tuple(person.id, person.version)" in response.clickhouse
def test_modifiers_persons_argmax_version_auto(self):
diff --git a/posthog/hogql/test/test_placeholders.py b/posthog/hogql/test/test_placeholders.py
index 6906104795775..88c92ebfc8fe8 100644
--- a/posthog/hogql/test/test_placeholders.py
+++ b/posthog/hogql/test/test_placeholders.py
@@ -26,11 +26,15 @@ def test_replace_placeholders_error(self):
expr = ast.Placeholder(field="foo")
with self.assertRaises(HogQLException) as context:
replace_placeholders(expr, {})
- self.assertEqual("Placeholders, such as {foo}, are not supported in this context", str(context.exception))
+ self.assertEqual(
+ "Placeholders, such as {foo}, are not supported in this context",
+ str(context.exception),
+ )
with self.assertRaises(HogQLException) as context:
replace_placeholders(expr, {"bar": ast.Constant(value=123)})
self.assertEqual(
- "Placeholder {foo} is not available in this context. You can use the following: bar", str(context.exception)
+ "Placeholder {foo} is not available in this context. You can use the following: bar",
+ str(context.exception),
)
def test_replace_placeholders_comparison(self):
@@ -61,4 +65,7 @@ def test_assert_no_placeholders(self):
expr = ast.Placeholder(field="foo")
with self.assertRaises(HogQLException) as context:
replace_placeholders(expr, None)
- self.assertEqual("Placeholders, such as {foo}, are not supported in this context", str(context.exception))
+ self.assertEqual(
+ "Placeholders, such as {foo}, are not supported in this context",
+ str(context.exception),
+ )
diff --git a/posthog/hogql/test/test_printer.py b/posthog/hogql/test/test_printer.py
index 3861bd77fce42..75f182a618f54 100644
--- a/posthog/hogql/test/test_printer.py
+++ b/posthog/hogql/test/test_printer.py
@@ -23,13 +23,19 @@ class TestPrinter(BaseTest):
# Helper to always translate HogQL with a blank context
def _expr(
- self, query: str, context: Optional[HogQLContext] = None, dialect: Literal["hogql", "clickhouse"] = "clickhouse"
+ self,
+ query: str,
+ context: Optional[HogQLContext] = None,
+ dialect: Literal["hogql", "clickhouse"] = "clickhouse",
) -> str:
return translate_hogql(query, context or HogQLContext(team_id=self.team.pk), dialect)
# Helper to always translate HogQL with a blank context,
def _select(
- self, query: str, context: Optional[HogQLContext] = None, placeholders: Optional[Dict[str, ast.Expr]] = None
+ self,
+ query: str,
+ context: Optional[HogQLContext] = None,
+ placeholders: Optional[Dict[str, ast.Expr]] = None,
) -> str:
return print_ast(
parse_select(query, placeholders=placeholders),
@@ -37,7 +43,12 @@ def _select(
"clickhouse",
)
- def _assert_expr_error(self, expr, expected_error, dialect: Literal["hogql", "clickhouse"] = "clickhouse"):
+ def _assert_expr_error(
+ self,
+ expr,
+ expected_error,
+ dialect: Literal["hogql", "clickhouse"] = "clickhouse",
+ ):
with self.assertRaises(HogQLException) as context:
self._expr(expr, None, dialect)
if expected_error not in str(context.exception):
@@ -90,9 +101,13 @@ def test_tuples(self):
self.assertEqual(self._expr("(1,2,[])"), "tuple(1, 2, [])")
def test_lambdas(self):
- self.assertEqual(self._expr("arrayMap(x -> x*2, [1,2,3])"), "arrayMap(x -> multiply(x, 2), [1, 2, 3])")
self.assertEqual(
- self._expr("arrayMap((x, y) -> x*y, [1,2,3])"), "arrayMap((x, y) -> multiply(x, y), [1, 2, 3])"
+ self._expr("arrayMap(x -> x*2, [1,2,3])"),
+ "arrayMap(x -> multiply(x, 2), [1, 2, 3])",
+ )
+ self.assertEqual(
+ self._expr("arrayMap((x, y) -> x*y, [1,2,3])"),
+ "arrayMap((x, y) -> multiply(x, y), [1, 2, 3])",
)
def test_equals_null(self):
@@ -162,7 +177,11 @@ def test_hogql_properties(self):
"person",
)
self.assertEqual(
- self._expr("person.properties.$browser", HogQLContext(team_id=self.team.pk), "hogql"),
+ self._expr(
+ "person.properties.$browser",
+ HogQLContext(team_id=self.team.pk),
+ "hogql",
+ ),
"person.properties.$browser",
)
self.assertEqual(
@@ -170,23 +189,43 @@ def test_hogql_properties(self):
"properties.$browser",
)
self.assertEqual(
- self._expr("properties.`$browser with a space`", HogQLContext(team_id=self.team.pk), "hogql"),
+ self._expr(
+ "properties.`$browser with a space`",
+ HogQLContext(team_id=self.team.pk),
+ "hogql",
+ ),
"properties.`$browser with a space`",
)
self.assertEqual(
- self._expr('properties."$browser with a space"', HogQLContext(team_id=self.team.pk), "hogql"),
+ self._expr(
+ 'properties."$browser with a space"',
+ HogQLContext(team_id=self.team.pk),
+ "hogql",
+ ),
"properties.`$browser with a space`",
)
self.assertEqual(
- self._expr("properties['$browser with a space']", HogQLContext(team_id=self.team.pk), "hogql"),
+ self._expr(
+ "properties['$browser with a space']",
+ HogQLContext(team_id=self.team.pk),
+ "hogql",
+ ),
"properties.`$browser with a space`",
)
self.assertEqual(
- self._expr("properties['$browser with a ` tick']", HogQLContext(team_id=self.team.pk), "hogql"),
+ self._expr(
+ "properties['$browser with a ` tick']",
+ HogQLContext(team_id=self.team.pk),
+ "hogql",
+ ),
"properties.`$browser with a \\` tick`",
)
self.assertEqual(
- self._expr("properties['$browser \\\\with a \\n` tick']", HogQLContext(team_id=self.team.pk), "hogql"),
+ self._expr(
+ "properties['$browser \\\\with a \\n` tick']",
+ HogQLContext(team_id=self.team.pk),
+ "hogql",
+ ),
"properties.`$browser \\\\with a \\n\\` tick`",
)
# "dot NUMBER" means "tuple access" in clickhouse. To access strings properties, wrap them in `backquotes`
@@ -198,7 +237,11 @@ def test_hogql_properties(self):
self._expr("properties.`1`", HogQLContext(team_id=self.team.pk), "hogql"),
"properties.`1`",
)
- self._assert_expr_error("properties.'no strings'", "no viable alternative at input '.'no strings'", "hogql")
+ self._assert_expr_error(
+ "properties.'no strings'",
+ "no viable alternative at input '.'no strings'",
+ "hogql",
+ )
def test_hogql_properties_json(self):
context = HogQLContext(team_id=self.team.pk)
@@ -206,7 +249,10 @@ def test_hogql_properties_json(self):
self._expr("properties.nomat.json.yet", context),
"replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, %(hogql_val_0)s, %(hogql_val_1)s, %(hogql_val_2)s), ''), 'null'), '^\"|\"$', '')",
)
- self.assertEqual(context.values, {"hogql_val_0": "nomat", "hogql_val_1": "json", "hogql_val_2": "yet"})
+ self.assertEqual(
+ context.values,
+ {"hogql_val_0": "nomat", "hogql_val_1": "json", "hogql_val_2": "yet"},
+ )
def test_hogql_properties_materialized_json_access(self):
try:
@@ -232,11 +278,15 @@ def test_materialized_fields_and_properties(self):
self.assertEqual(1 + 2, 3)
return
materialize("events", "$browser")
- self.assertEqual(self._expr("properties['$browser']"), "nullIf(nullIf(events.`mat_$browser`, ''), 'null')")
+ self.assertEqual(
+ self._expr("properties['$browser']"),
+ "nullIf(nullIf(events.`mat_$browser`, ''), 'null')",
+ )
materialize("events", "withoutdollar")
self.assertEqual(
- self._expr("properties['withoutdollar']"), "nullIf(nullIf(events.mat_withoutdollar, ''), 'null')"
+ self._expr("properties['withoutdollar']"),
+ "nullIf(nullIf(events.mat_withoutdollar, ''), 'null')",
)
materialize("events", "$browser and string")
@@ -247,13 +297,17 @@ def test_materialized_fields_and_properties(self):
materialize("events", "$browser%%%#@!@")
self.assertEqual(
- self._expr("properties['$browser%%%#@!@']"), "nullIf(nullIf(events.`mat_$browser_______`, ''), 'null')"
+ self._expr("properties['$browser%%%#@!@']"),
+ "nullIf(nullIf(events.`mat_$browser_______`, ''), 'null')",
)
def test_methods(self):
self.assertEqual(self._expr("count()"), "count()")
self.assertEqual(self._expr("count(distinct event)"), "count(DISTINCT events.event)")
- self.assertEqual(self._expr("countIf(distinct event, 1 == 2)"), "countIf(DISTINCT events.event, 0)")
+ self.assertEqual(
+ self._expr("countIf(distinct event, 1 == 2)"),
+ "countIf(DISTINCT events.event, 0)",
+ )
self.assertEqual(self._expr("sumIf(1, 1 == 2)"), "sumIf(1, 0)")
def test_functions(self):
@@ -269,32 +323,49 @@ def test_expr_parse_errors(self):
self._assert_expr_error("avg(bla)", "Unable to resolve field: bla")
self._assert_expr_error("count(1,2,3,4)", "Aggregation 'count' expects at most 1 argument, found 4")
self._assert_expr_error("countIf()", "Aggregation 'countIf' expects at least 1 argument, found 0")
- self._assert_expr_error("countIf(2,3,4)", "Aggregation 'countIf' expects at most 2 arguments, found 3")
+ self._assert_expr_error(
+ "countIf(2,3,4)",
+ "Aggregation 'countIf' expects at most 2 arguments, found 3",
+ )
self._assert_expr_error("uniq()", "Aggregation 'uniq' expects at least 1 argument, found 0")
self._assert_expr_error(
- "quantile(event)", "Aggregation 'quantile' requires parameters in addition to arguments"
+ "quantile(event)",
+ "Aggregation 'quantile' requires parameters in addition to arguments",
)
self._assert_expr_error(
- "quantile()(event)", "Aggregation 'quantile' requires parameters in addition to arguments"
+ "quantile()(event)",
+ "Aggregation 'quantile' requires parameters in addition to arguments",
+ )
+ self._assert_expr_error(
+ "quantile(0.5, 2)(event)",
+ "Aggregation 'quantile' expects 1 parameter, found 2",
)
- self._assert_expr_error("quantile(0.5, 2)(event)", "Aggregation 'quantile' expects 1 parameter, found 2")
self._assert_expr_error("sparkline()", "Function 'sparkline' expects 1 argument, found 0")
self._assert_expr_error("hamburger(event)", "Unsupported function call 'hamburger(...)'")
self._assert_expr_error("mad(event)", "Unsupported function call 'mad(...)'")
- self._assert_expr_error("noway(event)", "Unsupported function call 'noway(...)'. Perhaps you meant 'now(...)'?")
self._assert_expr_error(
- "tostring(event)", "Unsupported function call 'tostring(...)'. Perhaps you meant 'toString(...)'?"
+ "noway(event)",
+ "Unsupported function call 'noway(...)'. Perhaps you meant 'now(...)'?",
+ )
+ self._assert_expr_error(
+ "tostring(event)",
+ "Unsupported function call 'tostring(...)'. Perhaps you meant 'toString(...)'?",
)
self._assert_expr_error("yeet.the.cloud", "Unable to resolve field: yeet")
self._assert_expr_error("chipotle", "Unable to resolve field: chipotle")
self._assert_expr_error(
- "avg(avg(properties.bla))", "Aggregation 'avg' cannot be nested inside another aggregation 'avg'."
+ "avg(avg(properties.bla))",
+ "Aggregation 'avg' cannot be nested inside another aggregation 'avg'.",
)
self._assert_expr_error("person.chipotle", "Field not found: chipotle")
self._assert_expr_error("properties.0", "SQL indexes start from one, not from zero. E.g: array[1]")
- self._assert_expr_error("properties.id.0", "SQL indexes start from one, not from zero. E.g: array[1]")
self._assert_expr_error(
- "event as `as%d`", 'The HogQL identifier "as%d" is not permitted as it contains the "%" character'
+ "properties.id.0",
+ "SQL indexes start from one, not from zero. E.g: array[1]",
+ )
+ self._assert_expr_error(
+ "event as `as%d`",
+ 'The HogQL identifier "as%d" is not permitted as it contains the "%" character',
)
@override_settings(PERSON_ON_EVENTS_OVERRIDE=True, PERSON_ON_EVENTS_V2_OVERRIDE=True)
@@ -337,24 +408,55 @@ def test_logic(self):
def test_comparisons(self):
context = HogQLContext(team_id=self.team.pk)
self.assertEqual(self._expr("event == 'E'", context), "equals(events.event, %(hogql_val_0)s)")
- self.assertEqual(self._expr("event != 'E'", context), "notEquals(events.event, %(hogql_val_1)s)")
+ self.assertEqual(
+ self._expr("event != 'E'", context),
+ "notEquals(events.event, %(hogql_val_1)s)",
+ )
self.assertEqual(self._expr("event > 'E'", context), "greater(events.event, %(hogql_val_2)s)")
- self.assertEqual(self._expr("event >= 'E'", context), "greaterOrEquals(events.event, %(hogql_val_3)s)")
+ self.assertEqual(
+ self._expr("event >= 'E'", context),
+ "greaterOrEquals(events.event, %(hogql_val_3)s)",
+ )
self.assertEqual(self._expr("event < 'E'", context), "less(events.event, %(hogql_val_4)s)")
- self.assertEqual(self._expr("event <= 'E'", context), "lessOrEquals(events.event, %(hogql_val_5)s)")
+ self.assertEqual(
+ self._expr("event <= 'E'", context),
+ "lessOrEquals(events.event, %(hogql_val_5)s)",
+ )
self.assertEqual(self._expr("event like 'E'", context), "like(events.event, %(hogql_val_6)s)")
- self.assertEqual(self._expr("event not like 'E'", context), "notLike(events.event, %(hogql_val_7)s)")
- self.assertEqual(self._expr("event ilike 'E'", context), "ilike(events.event, %(hogql_val_8)s)")
- self.assertEqual(self._expr("event not ilike 'E'", context), "notILike(events.event, %(hogql_val_9)s)")
+ self.assertEqual(
+ self._expr("event not like 'E'", context),
+ "notLike(events.event, %(hogql_val_7)s)",
+ )
+ self.assertEqual(
+ self._expr("event ilike 'E'", context),
+ "ilike(events.event, %(hogql_val_8)s)",
+ )
+ self.assertEqual(
+ self._expr("event not ilike 'E'", context),
+ "notILike(events.event, %(hogql_val_9)s)",
+ )
self.assertEqual(self._expr("event in 'E'", context), "in(events.event, %(hogql_val_10)s)")
- self.assertEqual(self._expr("event not in 'E'", context), "notIn(events.event, %(hogql_val_11)s)")
+ self.assertEqual(
+ self._expr("event not in 'E'", context),
+ "notIn(events.event, %(hogql_val_11)s)",
+ )
self.assertEqual(self._expr("event ~ 'E'", context), "match(events.event, %(hogql_val_12)s)")
self.assertEqual(self._expr("event =~ 'E'", context), "match(events.event, %(hogql_val_13)s)")
- self.assertEqual(self._expr("event !~ 'E'", context), "not(match(events.event, %(hogql_val_14)s))")
- self.assertEqual(self._expr("event ~* 'E'", context), "match(events.event, concat('(?i)', %(hogql_val_15)s))")
- self.assertEqual(self._expr("event =~* 'E'", context), "match(events.event, concat('(?i)', %(hogql_val_16)s))")
self.assertEqual(
- self._expr("event !~* 'E'", context), "not(match(events.event, concat('(?i)', %(hogql_val_17)s)))"
+ self._expr("event !~ 'E'", context),
+ "not(match(events.event, %(hogql_val_14)s))",
+ )
+ self.assertEqual(
+ self._expr("event ~* 'E'", context),
+ "match(events.event, concat('(?i)', %(hogql_val_15)s))",
+ )
+ self.assertEqual(
+ self._expr("event =~* 'E'", context),
+ "match(events.event, concat('(?i)', %(hogql_val_16)s))",
+ )
+ self.assertEqual(
+ self._expr("event !~* 'E'", context),
+ "not(match(events.event, concat('(?i)', %(hogql_val_17)s)))",
)
def test_comments(self):
@@ -369,31 +471,48 @@ def test_values(self):
self._expr("coalesce(4.2, 5, 'lol', 'hoo')", context),
"coalesce(4.2, 5, %(hogql_val_1)s, %(hogql_val_2)s)",
)
- self.assertEqual(context.values, {"hogql_val_0": "E", "hogql_val_1": "lol", "hogql_val_2": "hoo"})
+ self.assertEqual(
+ context.values,
+ {"hogql_val_0": "E", "hogql_val_1": "lol", "hogql_val_2": "hoo"},
+ )
def test_alias_keywords(self):
self._assert_expr_error(
- "1 as team_id", '"team_id" cannot be an alias or identifier, as it\'s a reserved keyword'
+ "1 as team_id",
+ '"team_id" cannot be an alias or identifier, as it\'s a reserved keyword',
+ )
+ self._assert_expr_error(
+ "1 as true",
+ '"true" cannot be an alias or identifier, as it\'s a reserved keyword',
)
- self._assert_expr_error("1 as true", '"true" cannot be an alias or identifier, as it\'s a reserved keyword')
self._assert_select_error(
- "select 1 as team_id from events", '"team_id" cannot be an alias or identifier, as it\'s a reserved keyword'
+ "select 1 as team_id from events",
+ '"team_id" cannot be an alias or identifier, as it\'s a reserved keyword',
)
self.assertEqual(
self._select("select 1 as `-- select team_id` from events"),
f"SELECT 1 AS `-- select team_id` FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000",
)
# Some aliases are funny, but that's what the antlr syntax permits, and ClickHouse doesn't complain either
- self.assertEqual(self._expr("event makes little sense"), "((events.event AS makes) AS little) AS sense")
+ self.assertEqual(
+ self._expr("event makes little sense"),
+ "((events.event AS makes) AS little) AS sense",
+ )
def test_case_when(self):
self.assertEqual(self._expr("case when 1 then 2 else 3 end"), "if(1, 2, 3)")
def test_case_when_many(self):
- self.assertEqual(self._expr("case when 1 then 2 when 3 then 4 else 5 end"), "multiIf(1, 2, 3, 4, 5)")
+ self.assertEqual(
+ self._expr("case when 1 then 2 when 3 then 4 else 5 end"),
+ "multiIf(1, 2, 3, 4, 5)",
+ )
def test_case_when_case(self):
- self.assertEqual(self._expr("case 0 when 1 then 2 when 3 then 4 else 5 end"), "transform(0, [1, 3], [2, 4], 5)")
+ self.assertEqual(
+ self._expr("case 0 when 1 then 2 when 3 then 4 else 5 end"),
+ "transform(0, [1, 3], [2, 4], 5)",
+ )
def test_select(self):
self.assertEqual(self._select("select 1"), "SELECT 1 LIMIT 10000")
@@ -421,19 +540,29 @@ def test_select_from(self):
def test_select_from_placeholder(self):
self.assertEqual(
- self._select("select 1 from {placeholder}", placeholders={"placeholder": ast.Field(chain=["events"])}),
- f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000",
- )
- with self.assertRaises(HogQLException) as error_context:
self._select(
"select 1 from {placeholder}",
- placeholders={
- "placeholder": ast.CompareOperation(
- left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.CompareOperationOp.Eq
- )
- },
+ placeholders={"placeholder": ast.Field(chain=["events"])},
),
- self.assertEqual(str(error_context.exception), "JoinExpr with table of type CompareOperation not supported")
+ f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000",
+ )
+ with self.assertRaises(HogQLException) as error_context:
+ (
+ self._select(
+ "select 1 from {placeholder}",
+ placeholders={
+ "placeholder": ast.CompareOperation(
+ left=ast.Constant(value=1),
+ right=ast.Constant(value=1),
+ op=ast.CompareOperationOp.Eq,
+ )
+ },
+ ),
+ )
+ self.assertEqual(
+ str(error_context.exception),
+ "JoinExpr with table of type CompareOperation not supported",
+ )
def test_select_cross_join(self):
self.assertEqual(
@@ -702,13 +831,16 @@ def test_count_if_distinct(self):
def test_print_timezone(self):
context = HogQLContext(
- team_id=self.team.pk, enable_select_queries=True, database=Database(None, WeekStartDay.SUNDAY)
+ team_id=self.team.pk,
+ enable_select_queries=True,
+ database=Database(None, WeekStartDay.SUNDAY),
)
context.database.events.fields["test_date"] = DateDatabaseField(name="test_date") # type: ignore
self.assertEqual(
self._select(
- "SELECT now(), toDateTime(timestamp), toDate(test_date), toDateTime('2020-02-02') FROM events", context
+ "SELECT now(), toDateTime(timestamp), toDate(test_date), toDateTime('2020-02-02') FROM events",
+ context,
),
f"SELECT now64(6, %(hogql_val_0)s), toDateTime(toTimeZone(events.timestamp, %(hogql_val_1)s), %(hogql_val_2)s), toDate(events.test_date, %(hogql_val_3)s), parseDateTime64BestEffortOrNull(%(hogql_val_4)s, 6, %(hogql_val_5)s) FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000",
)
@@ -729,7 +861,10 @@ def test_print_timezone_custom(self):
self.team.save()
context = HogQLContext(team_id=self.team.pk, enable_select_queries=True)
self.assertEqual(
- self._select("SELECT now(), toDateTime(timestamp), toDateTime('2020-02-02') FROM events", context),
+ self._select(
+ "SELECT now(), toDateTime(timestamp), toDateTime('2020-02-02') FROM events",
+ context,
+ ),
f"SELECT now64(6, %(hogql_val_0)s), toDateTime(toTimeZone(events.timestamp, %(hogql_val_1)s), %(hogql_val_2)s), parseDateTime64BestEffortOrNull(%(hogql_val_3)s, 6, %(hogql_val_4)s) FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000",
)
self.assertEqual(
@@ -749,7 +884,10 @@ def test_print_timezone_gibberish(self):
context = HogQLContext(team_id=self.team.pk, enable_select_queries=True)
with self.assertRaises(HogQLException) as error_context:
- self._select("SELECT now(), toDateTime(timestamp), toDateTime('2020-02-02') FROM events", context)
+ self._select(
+ "SELECT now(), toDateTime(timestamp), toDateTime('2020-02-02') FROM events",
+ context,
+ )
self.assertEqual(str(error_context.exception), "Unknown timezone: 'Europe/PostHogLandia'")
def test_window_functions(self):
@@ -907,7 +1045,11 @@ def test_print_global_settings(self):
def test_print_query_level_settings(self):
query = parse_select("SELECT 1 FROM events")
query.settings = HogQLQuerySettings(optimize_aggregation_in_order=True)
- printed = print_ast(query, HogQLContext(team_id=self.team.pk, enable_select_queries=True), "clickhouse")
+ printed = print_ast(
+ query,
+ HogQLContext(team_id=self.team.pk, enable_select_queries=True),
+ "clickhouse",
+ )
self.assertEqual(
printed,
f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS optimize_aggregation_in_order=1",
diff --git a/posthog/hogql/test/test_property.py b/posthog/hogql/test/test_property.py
index 1e57589805645..c0ed528ea4da9 100644
--- a/posthog/hogql/test/test_property.py
+++ b/posthog/hogql/test/test_property.py
@@ -12,7 +12,14 @@
tag_name_to_expr,
)
from posthog.hogql.visitor import clear_locations
-from posthog.models import Action, ActionStep, Cohort, Property, PropertyDefinition, Team
+from posthog.models import (
+ Action,
+ ActionStep,
+ Cohort,
+ Property,
+ PropertyDefinition,
+ Team,
+)
from posthog.models.property import PropertyGroup
from posthog.models.property_definition import PropertyType
from posthog.schema import HogQLPropertyFilter, PropertyOperator
@@ -49,8 +56,14 @@ def test_has_aggregation(self):
def test_property_to_expr_hogql(self):
self.assertEqual(self._property_to_expr({"type": "hogql", "key": "1"}), ast.Constant(value=1))
- self.assertEqual(self._property_to_expr(Property(type="hogql", key="1")), ast.Constant(value=1))
- self.assertEqual(self._property_to_expr(HogQLPropertyFilter(type="hogql", key="1")), ast.Constant(value=1))
+ self.assertEqual(
+ self._property_to_expr(Property(type="hogql", key="1")),
+ ast.Constant(value=1),
+ )
+ self.assertEqual(
+ self._property_to_expr(HogQLPropertyFilter(type="hogql", key="1")),
+ ast.Constant(value=1),
+ )
def test_property_to_expr_event(self):
self.assertEqual(
@@ -128,7 +141,10 @@ def test_property_to_expr_boolean(self):
property_type=PropertyType.String,
)
self.assertEqual(
- self._property_to_expr({"type": "event", "key": "boolean_prop", "value": "true"}, team=self.team),
+ self._property_to_expr(
+ {"type": "event", "key": "boolean_prop", "value": "true"},
+ team=self.team,
+ ),
self._parse_expr("properties.boolean_prop = true"),
)
self.assertEqual(
@@ -136,7 +152,10 @@ def test_property_to_expr_boolean(self):
self._parse_expr("properties.string_prop = 'true'"),
)
self.assertEqual(
- self._property_to_expr({"type": "event", "key": "unknown_prop", "value": "true"}, team=self.team),
+ self._property_to_expr(
+ {"type": "event", "key": "unknown_prop", "value": "true"},
+ team=self.team,
+ ),
self._parse_expr("properties.unknown_prop = true"),
)
@@ -147,7 +166,14 @@ def test_property_to_expr_event_list(self):
self._parse_expr("properties.a = 'b' or properties.a = 'c'"),
)
self.assertEqual(
- self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "icontains"}),
+ self._property_to_expr(
+ {
+ "type": "event",
+ "key": "a",
+ "value": ["b", "c"],
+ "operator": "icontains",
+ }
+ ),
self._parse_expr("properties.a ilike '%b%' or properties.a ilike '%c%'"),
)
self.assertEqual(
@@ -160,11 +186,25 @@ def test_property_to_expr_event_list(self):
self._parse_expr("properties.a != 'b' and properties.a != 'c'"),
)
self.assertEqual(
- self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "not_icontains"}),
+ self._property_to_expr(
+ {
+ "type": "event",
+ "key": "a",
+ "value": ["b", "c"],
+ "operator": "not_icontains",
+ }
+ ),
self._parse_expr("properties.a not ilike '%b%' and properties.a not ilike '%c%'"),
)
self.assertEqual(
- self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "not_regex"}),
+ self._property_to_expr(
+ {
+ "type": "event",
+ "key": "a",
+ "value": ["b", "c"],
+ "operator": "not_regex",
+ }
+ ),
self._parse_expr("not(match(properties.a, 'b')) and not(match(properties.a, 'c'))"),
)
@@ -182,27 +222,69 @@ def test_property_to_expr_person(self):
def test_property_to_expr_element(self):
self.assertEqual(
- self._property_to_expr({"type": "element", "key": "selector", "value": "div", "operator": "exact"}),
+ self._property_to_expr(
+ {
+ "type": "element",
+ "key": "selector",
+ "value": "div",
+ "operator": "exact",
+ }
+ ),
self._selector_to_expr("div"),
)
self.assertEqual(
- self._property_to_expr({"type": "element", "key": "selector", "value": "div", "operator": "is_not"}),
+ self._property_to_expr(
+ {
+ "type": "element",
+ "key": "selector",
+ "value": "div",
+ "operator": "is_not",
+ }
+ ),
clear_locations(not_call(self._selector_to_expr("div"))),
)
self.assertEqual(
- self._property_to_expr({"type": "element", "key": "tag_name", "value": "div", "operator": "exact"}),
+ self._property_to_expr(
+ {
+ "type": "element",
+ "key": "tag_name",
+ "value": "div",
+ "operator": "exact",
+ }
+ ),
clear_locations(tag_name_to_expr("div")),
)
self.assertEqual(
- self._property_to_expr({"type": "element", "key": "tag_name", "value": "div", "operator": "is_not"}),
+ self._property_to_expr(
+ {
+ "type": "element",
+ "key": "tag_name",
+ "value": "div",
+ "operator": "is_not",
+ }
+ ),
clear_locations(not_call(tag_name_to_expr("div"))),
)
self.assertEqual(
- self._property_to_expr({"type": "element", "key": "href", "value": "href-text.", "operator": "exact"}),
+ self._property_to_expr(
+ {
+ "type": "element",
+ "key": "href",
+ "value": "href-text.",
+ "operator": "exact",
+ }
+ ),
clear_locations(element_chain_key_filter("href", "href-text.", PropertyOperator.exact)),
)
self.assertEqual(
- self._property_to_expr({"type": "element", "key": "text", "value": "text-text.", "operator": "regex"}),
+ self._property_to_expr(
+ {
+ "type": "element",
+ "key": "text",
+ "value": "text-text.",
+ "operator": "regex",
+ }
+ ),
clear_locations(element_chain_key_filter("text", "text-text.", PropertyOperator.regex)),
)
@@ -259,7 +341,8 @@ def test_property_groups_single(self):
self.assertEqual(
self._property_to_expr(
PropertyGroup(
- type=PropertyOperatorType.OR, values=[Property(type="event", key="e", value="b", operator="exact")]
+ type=PropertyOperatorType.OR,
+ values=[Property(type="event", key="e", value="b", operator="exact")],
)
),
self._parse_expr("properties.e = 'b'"),
@@ -277,8 +360,18 @@ def test_property_groups_combined(self):
PropertyGroup(
type=PropertyOperatorType.OR,
values=[
- Property(type="person", key="a", value="b", operator="exact"),
- Property(type="event", key="e", value="b", operator="exact"),
+ Property(
+ type="person",
+ key="a",
+ value="b",
+ operator="exact",
+ ),
+ Property(
+ type="event",
+ key="e",
+ value="b",
+ operator="exact",
+ ),
],
),
],
@@ -290,7 +383,8 @@ def test_property_groups_combined(self):
def test_tag_name_to_expr(self):
self.assertEqual(
- clear_locations(tag_name_to_expr("a")), clear_locations(elements_chain_match("(^|;)a(\\.|$|;|:)"))
+ clear_locations(tag_name_to_expr("a")),
+ clear_locations(elements_chain_match("(^|;)a(\\.|$|;|:)")),
)
def test_selector_to_expr(self):
@@ -379,7 +473,12 @@ def test_elements_chain_key_filter(self):
def test_action_to_expr(self):
action1 = Action.objects.create(team=self.team)
- ActionStep.objects.create(event="$autocapture", action=action1, selector="a.nav-link.active", tag_name="a")
+ ActionStep.objects.create(
+ event="$autocapture",
+ action=action1,
+ selector="a.nav-link.active",
+ tag_name="a",
+ )
self.assertEqual(
clear_locations(action_to_expr(action1)),
self._parse_expr(
@@ -394,15 +493,30 @@ def test_action_to_expr(self):
)
action2 = Action.objects.create(team=self.team)
- ActionStep.objects.create(event="$pageview", action=action2, url="https://example.com", url_matching="contains")
+ ActionStep.objects.create(
+ event="$pageview",
+ action=action2,
+ url="https://example.com",
+ url_matching="contains",
+ )
self.assertEqual(
clear_locations(action_to_expr(action2)),
self._parse_expr("event = '$pageview' and properties.$current_url like '%https://example.com%'"),
)
action3 = Action.objects.create(team=self.team)
- ActionStep.objects.create(event="$pageview", action=action3, url="https://example2.com", url_matching="regex")
- ActionStep.objects.create(event="custom", action=action3, url="https://example3.com", url_matching="exact")
+ ActionStep.objects.create(
+ event="$pageview",
+ action=action3,
+ url="https://example2.com",
+ url_matching="regex",
+ )
+ ActionStep.objects.create(
+ event="custom",
+ action=action3,
+ url="https://example3.com",
+ url_matching="exact",
+ )
self.assertEqual(
clear_locations(action_to_expr(action3)),
self._parse_expr(
@@ -435,7 +549,8 @@ def test_cohort_filter_static(self):
def test_cohort_filter_dynamic(self):
cohort = Cohort.objects.create(
- team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}]
+ team=self.team,
+ groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}],
)
self.assertEqual(
self._property_to_expr({"type": "cohort", "key": "id", "value": cohort.pk}, self.team),
@@ -444,15 +559,25 @@ def test_cohort_filter_dynamic(self):
def test_person_scope(self):
self.assertEqual(
- self._property_to_expr({"type": "person", "key": "a", "value": "b", "operator": "exact"}, scope="event"),
+ self._property_to_expr(
+ {"type": "person", "key": "a", "value": "b", "operator": "exact"},
+ scope="event",
+ ),
self._parse_expr("person.properties.a = 'b'"),
)
self.assertEqual(
- self._property_to_expr({"type": "person", "key": "a", "value": "b", "operator": "exact"}, scope="person"),
+ self._property_to_expr(
+ {"type": "person", "key": "a", "value": "b", "operator": "exact"},
+ scope="person",
+ ),
self._parse_expr("properties.a = 'b'"),
)
with self.assertRaises(Exception) as e:
- self._property_to_expr({"type": "event", "key": "a", "value": "b", "operator": "exact"}, scope="person")
+ self._property_to_expr(
+ {"type": "event", "key": "a", "value": "b", "operator": "exact"},
+ scope="person",
+ )
self.assertEqual(
- str(e.exception), "The 'event' property filter only works in 'event' scope, not in 'person' scope"
+ str(e.exception),
+ "The 'event' property filter only works in 'event' scope, not in 'person' scope",
)
diff --git a/posthog/hogql/test/test_query.py b/posthog/hogql/test/test_query.py
index 475a346ff2b5e..0d6cf4342dead 100644
--- a/posthog/hogql/test/test_query.py
+++ b/posthog/hogql/test/test_query.py
@@ -15,9 +15,17 @@
from posthog.models import Cohort
from posthog.models.cohort.util import recalculate_cohortpeople
from posthog.models.utils import UUIDT
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.schema import HogQLFilters, EventPropertyFilter, DateRange, QueryTiming
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ _create_event,
+ _create_person,
+ flush_persons_and_events,
+)
from posthog.warehouse.models import DataWarehouseSavedQuery, DataWarehouseViewLink
@@ -38,7 +46,11 @@ def _create_random_events(self) -> str:
distinct_id="bla",
event="random event",
team=self.team,
- properties={"random_prop": "don't include", "random_uuid": random_uuid, "index": index},
+ properties={
+ "random_prop": "don't include",
+ "random_uuid": random_uuid,
+ "index": index,
+ },
)
flush_persons_and_events()
return random_uuid
@@ -241,7 +253,10 @@ def test_query_joins_pdi_persons(self):
)
assert pretty_print_in_tests(response.clickhouse, self.team.pk) == self.snapshot
self.assertEqual(response.results[0][0], "bla")
- self.assertEqual(response.results[0][1], datetime.datetime(2020, 1, 10, 0, 0, tzinfo=timezone.utc))
+ self.assertEqual(
+ response.results[0][1],
+ datetime.datetime(2020, 1, 10, 0, 0, tzinfo=timezone.utc),
+ )
@pytest.mark.usefixtures("unittest_snapshot")
def test_query_joins_pdi_person_properties(self):
@@ -402,20 +417,42 @@ def test_query_select_person_with_poe_without_joins(self):
def test_prop_cohort_basic(self):
with freeze_time("2020-01-10"):
- _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"})
+ _create_person(
+ distinct_ids=["some_other_id"],
+ team_id=self.team.pk,
+ properties={"$some_prop": "something"},
+ )
_create_person(
distinct_ids=["some_id"],
team_id=self.team.pk,
properties={"$some_prop": "something", "$another_prop": "something"},
)
_create_person(distinct_ids=["no_match"], team_id=self.team.pk)
- _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"})
_create_event(
- event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "some_val"}
+ event="$pageview",
+ team=self.team,
+ distinct_id="some_id",
+ properties={"attr": "some_val"},
+ )
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id="some_other_id",
+ properties={"attr": "some_val"},
)
cohort = Cohort.objects.create(
team=self.team,
- groups=[{"properties": [{"key": "$some_prop", "value": "something", "type": "person"}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "$some_prop",
+ "value": "something",
+ "type": "person",
+ }
+ ]
+ }
+ ],
name="cohort",
)
recalculate_cohortpeople(cohort, pending_version=0)
@@ -425,7 +462,8 @@ def test_prop_cohort_basic(self):
team=self.team,
placeholders={
"cohort_filter": property_to_expr(
- {"type": "cohort", "key": "id", "value": cohort.pk}, self.team
+ {"type": "cohort", "key": "id", "value": cohort.pk},
+ self.team,
)
},
)
@@ -441,7 +479,8 @@ def test_prop_cohort_basic(self):
team=self.team,
placeholders={
"cohort_filter": property_to_expr(
- {"type": "cohort", "key": "id", "value": cohort.pk}, self.team
+ {"type": "cohort", "key": "id", "value": cohort.pk},
+ self.team,
)
},
)
@@ -456,16 +495,28 @@ def test_prop_cohort_basic(self):
def test_prop_cohort_static(self):
with freeze_time("2020-01-10"):
- _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"})
+ _create_person(
+ distinct_ids=["some_other_id"],
+ team_id=self.team.pk,
+ properties={"$some_prop": "something"},
+ )
_create_person(
distinct_ids=["some_id"],
team_id=self.team.pk,
properties={"$some_prop": "something", "$another_prop": "something"},
)
_create_person(distinct_ids=["no_match"], team_id=self.team.pk)
- _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"})
_create_event(
- event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "some_val"}
+ event="$pageview",
+ team=self.team,
+ distinct_id="some_id",
+ properties={"attr": "some_val"},
+ )
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id="some_other_id",
+ properties={"attr": "some_val"},
)
cohort = Cohort.objects.create(team=self.team, groups=[], is_static=True)
cohort.insert_users_by_list(["some_id"])
@@ -476,7 +527,8 @@ def test_prop_cohort_static(self):
team=self.team,
placeholders={
"cohort_filter": property_to_expr(
- {"type": "cohort", "key": "id", "value": cohort.pk}, self.team
+ {"type": "cohort", "key": "id", "value": cohort.pk},
+ self.team,
)
},
)
@@ -493,7 +545,8 @@ def test_prop_cohort_static(self):
team=self.team,
placeholders={
"cohort_filter": property_to_expr(
- {"type": "cohort", "key": "id", "value": cohort.pk}, self.team
+ {"type": "cohort", "key": "id", "value": cohort.pk},
+ self.team,
)
},
)
@@ -505,7 +558,11 @@ def test_prop_cohort_static(self):
def test_join_with_property_materialized_session_id(self):
with freeze_time("2020-01-10"):
- _create_person(distinct_ids=["some_id"], team_id=self.team.pk, properties={"$some_prop": "something"})
+ _create_person(
+ distinct_ids=["some_id"],
+ team_id=self.team.pk,
+ properties={"$some_prop": "something"},
+ )
_create_event(
event="$pageview",
team=self.team,
@@ -519,7 +576,10 @@ def test_join_with_property_materialized_session_id(self):
properties={"attr": "some_val", "$session_id": "111"},
)
produce_replay_summary(
- distinct_id="some_id", session_id="111", first_timestamp=timezone.now(), team_id=self.team.pk
+ distinct_id="some_id",
+ session_id="111",
+ first_timestamp=timezone.now(),
+ team_id=self.team.pk,
)
response = execute_hogql_query(
@@ -544,7 +604,11 @@ def test_join_with_property_materialized_session_id(self):
def test_join_with_property_not_materialized(self):
with freeze_time("2020-01-10"):
- _create_person(distinct_ids=["some_id"], team_id=self.team.pk, properties={"$some_prop": "something"})
+ _create_person(
+ distinct_ids=["some_id"],
+ team_id=self.team.pk,
+ properties={"$some_prop": "something"},
+ )
_create_event(
event="$pageview",
team=self.team,
@@ -558,7 +622,10 @@ def test_join_with_property_not_materialized(self):
properties={"attr": "some_val", "$$$session_id": "111"},
)
produce_replay_summary(
- distinct_id="some_id", session_id="111", first_timestamp=timezone.now(), team_id=self.team.pk
+ distinct_id="some_id",
+ session_id="111",
+ first_timestamp=timezone.now(),
+ team_id=self.team.pk,
)
response = execute_hogql_query(
@@ -625,7 +692,10 @@ def test_tuple_access(self):
query,
team=self.team,
)
- self.assertEqual(response.results, [("0", [("random event", 1)]), ("1", [("random event", 1)])])
+ self.assertEqual(
+ response.results,
+ [("0", [("random event", 1)]), ("1", [("random event", 1)])],
+ )
assert pretty_print_in_tests(response.clickhouse, self.team.pk) == self.snapshot
def test_null_properties(self):
@@ -635,7 +705,12 @@ def test_null_properties(self):
distinct_id="bla",
event="empty event",
team=self.team,
- properties={"empty_string": "", "null": None, "str_zero": "0", "num_zero": 0},
+ properties={
+ "empty_string": "",
+ "null": None,
+ "str_zero": "0",
+ "num_zero": 0,
+ },
)
query = """
@@ -881,7 +956,10 @@ def test_with_pivot_table_1_level(self):
query,
team=self.team,
)
- self.assertEqual(response.results, [("0", [("random event", 1)]), ("1", [("random event", 1)])])
+ self.assertEqual(
+ response.results,
+ [("0", [("random event", 1)]), ("1", [("random event", 1)])],
+ )
assert pretty_print_in_tests(response.clickhouse, self.team.pk) == self.snapshot
@pytest.mark.usefixtures("unittest_snapshot")
@@ -917,7 +995,10 @@ def test_with_pivot_table_2_levels(self):
query,
team=self.team,
)
- self.assertEqual(response.results, [("0", [("random event", 1)]), ("1", [("random event", 1)])])
+ self.assertEqual(
+ response.results,
+ [("0", [("random event", 1)]), ("1", [("random event", 1)])],
+ )
assert pretty_print_in_tests(response.clickhouse, self.team.pk) == self.snapshot
def test_property_access_with_arrays(self):
@@ -1308,7 +1389,11 @@ def test_view_link(self):
saved_query = DataWarehouseSavedQuery.objects.get(pk=saved_query_response["id"])
DataWarehouseViewLink.objects.create(
- saved_query=saved_query, table="events", to_join_key="fake", from_join_key="distinct_id", team=self.team
+ saved_query=saved_query,
+ table="events",
+ to_join_key="fake",
+ from_join_key="distinct_id",
+ team=self.team,
)
response = execute_hogql_query("SELECT event_view.fake FROM events", team=self.team)
@@ -1366,7 +1451,10 @@ def test_hogql_query_filters_double_error(self):
query = "SELECT event from events where {filters}"
with self.assertRaises(HogQLException) as e:
execute_hogql_query(
- query, team=self.team, filters=HogQLFilters(), placeholders={"filters": ast.Constant(value=True)}
+ query,
+ team=self.team,
+ filters=HogQLFilters(),
+ placeholders={"filters": ast.Constant(value=True)},
)
self.assertEqual(
str(e.exception),
@@ -1378,7 +1466,14 @@ def test_hogql_query_filters_alias(self):
random_uuid = self._create_random_events()
query = "SELECT event, distinct_id from events e WHERE {filters}"
filters = HogQLFilters(
- properties=[EventPropertyFilter(key="random_uuid", operator="exact", value=random_uuid, type="event")]
+ properties=[
+ EventPropertyFilter(
+ key="random_uuid",
+ operator="exact",
+ value=random_uuid,
+ type="event",
+ )
+ ]
)
response = execute_hogql_query(query, team=self.team, filters=filters)
self.assertEqual(
diff --git a/posthog/hogql/test/test_resolver.py b/posthog/hogql/test/test_resolver.py
index 7ed33e37291d2..1a946c2c3e769 100644
--- a/posthog/hogql/test/test_resolver.py
+++ b/posthog/hogql/test/test_resolver.py
@@ -27,11 +27,18 @@ class TestResolver(BaseTest):
maxDiff = None
def _select(self, query: str, placeholders: Optional[Dict[str, ast.Expr]] = None) -> ast.SelectQuery:
- return cast(ast.SelectQuery, clone_expr(parse_select(query, placeholders=placeholders), clear_locations=True))
+ return cast(
+ ast.SelectQuery,
+ clone_expr(parse_select(query, placeholders=placeholders), clear_locations=True),
+ )
def _print_hogql(self, select: str):
expr = self._select(select)
- return print_ast(expr, HogQLContext(team_id=self.team.pk, enable_select_queries=True), "hogql")
+ return print_ast(
+ expr,
+ HogQLContext(team_id=self.team.pk, enable_select_queries=True),
+ "hogql",
+ )
def setUp(self):
self.database = create_hogql_database(self.team.pk)
@@ -80,7 +87,8 @@ def test_will_not_run_twice(self):
with self.assertRaises(ResolverException) as context:
expr = resolve_types(expr, self.context)
self.assertEqual(
- str(context.exception), "Type already resolved for SelectQuery (SelectQueryType). Can't run again."
+ str(context.exception),
+ "Type already resolved for SelectQuery (SelectQueryType). Can't run again.",
)
def test_resolve_events_table_alias(self):
@@ -134,11 +142,17 @@ def test_resolve_events_table_column_alias(self):
select_query_type = ast.SelectQueryType(
aliases={
"ee": ast.FieldAliasType(alias="ee", type=event_field_type),
- "e": ast.FieldAliasType(alias="e", type=ast.FieldAliasType(alias="ee", type=event_field_type)),
+ "e": ast.FieldAliasType(
+ alias="e",
+ type=ast.FieldAliasType(alias="ee", type=event_field_type),
+ ),
},
columns={
"ee": ast.FieldAliasType(alias="ee", type=event_field_type),
- "e": ast.FieldAliasType(alias="e", type=ast.FieldAliasType(alias="ee", type=event_field_type)),
+ "e": ast.FieldAliasType(
+ alias="e",
+ type=ast.FieldAliasType(alias="ee", type=event_field_type),
+ ),
"timestamp": timestamp_field_type,
},
tables={"e": events_table_alias_type},
@@ -184,7 +198,8 @@ def test_resolve_events_table_column_alias_inside_subquery(self):
expr = resolve_types(expr, self.context)
inner_events_table_type = ast.TableType(table=self.database.events)
inner_event_field_type = ast.FieldAliasType(
- alias="b", type=ast.FieldType(name="event", table_type=inner_events_table_type)
+ alias="b",
+ type=ast.FieldType(name="event", table_type=inner_events_table_type),
)
timestamp_field_type = ast.FieldType(name="timestamp", table_type=inner_events_table_type)
timstamp_alias_type = ast.FieldAliasType(alias="c", type=timestamp_field_type)
@@ -288,13 +303,25 @@ def test_resolve_constant_type(self):
ast.Constant(value=1.1232, type=ast.FloatType()),
ast.Constant(value=None, type=ast.UnknownType()),
ast.Constant(value=date(2020, 1, 10), type=ast.DateType()),
- ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc), type=ast.DateTimeType()),
- ast.Constant(value=UUID("00000000-0000-4000-8000-000000000000"), type=ast.UUIDType()),
+ ast.Constant(
+ value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc),
+ type=ast.DateTimeType(),
+ ),
+ ast.Constant(
+ value=UUID("00000000-0000-4000-8000-000000000000"),
+ type=ast.UUIDType(),
+ ),
ast.Constant(value=[], type=ast.ArrayType(item_type=ast.UnknownType())),
ast.Constant(value=[1, 2], type=ast.ArrayType(item_type=ast.IntegerType())),
ast.Constant(
value=(1, 2, 3),
- type=ast.TupleType(item_types=[ast.IntegerType(), ast.IntegerType(), ast.IntegerType()]),
+ type=ast.TupleType(
+ item_types=[
+ ast.IntegerType(),
+ ast.IntegerType(),
+ ast.IntegerType(),
+ ]
+ ),
),
],
type=ast.SelectQueryType(aliases={}, columns={}, tables={}),
@@ -634,7 +661,9 @@ def test_resolve_virtual_events_poe(self):
type=ast.FieldType(
name="id",
table_type=ast.VirtualTableType(
- table_type=events_table_type, field="poe", virtual_table=self.database.events.fields["poe"]
+ table_type=events_table_type,
+ field="poe",
+ virtual_table=self.database.events.fields["poe"],
),
),
),
@@ -674,15 +703,27 @@ def test_resolve_union_all(self):
self.assertEqual(
node.select_queries[0].select,
[
- ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=events_table_type)),
- ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=events_table_type)),
+ ast.Field(
+ chain=["event"],
+ type=ast.FieldType(name="event", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["timestamp"],
+ type=ast.FieldType(name="timestamp", table_type=events_table_type),
+ ),
],
)
self.assertEqual(
node.select_queries[1].select,
[
- ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=events_table_type)),
- ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=events_table_type)),
+ ast.Field(
+ chain=["event"],
+ type=ast.FieldType(name="event", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["timestamp"],
+ type=ast.FieldType(name="timestamp", table_type=events_table_type),
+ ),
],
)
@@ -693,11 +734,18 @@ def test_call_type(self):
ast.Call(
name="max",
# NB! timestamp was resolved to a DateTimeType for the Call's arg type.
- type=ast.CallType(name="max", arg_types=[ast.DateTimeType()], return_type=ast.UnknownType()),
+ type=ast.CallType(
+ name="max",
+ arg_types=[ast.DateTimeType()],
+ return_type=ast.UnknownType(),
+ ),
args=[
ast.Field(
chain=["timestamp"],
- type=ast.FieldType(name="timestamp", table_type=ast.TableType(table=self.database.events)),
+ type=ast.FieldType(
+ name="timestamp",
+ table_type=ast.TableType(table=self.database.events),
+ ),
)
],
),
@@ -776,21 +824,58 @@ def test_asterisk_expander_table(self):
self.assertEqual(
node.select,
[
- ast.Field(chain=["uuid"], type=ast.FieldType(name="uuid", table_type=events_table_type)),
- ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=events_table_type)),
- ast.Field(chain=["properties"], type=ast.FieldType(name="properties", table_type=events_table_type)),
- ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=events_table_type)),
- ast.Field(chain=["distinct_id"], type=ast.FieldType(name="distinct_id", table_type=events_table_type)),
- ast.Field(
- chain=["elements_chain"], type=ast.FieldType(name="elements_chain", table_type=events_table_type)
- ),
- ast.Field(chain=["created_at"], type=ast.FieldType(name="created_at", table_type=events_table_type)),
- ast.Field(chain=["$session_id"], type=ast.FieldType(name="$session_id", table_type=events_table_type)),
- ast.Field(chain=["$group_0"], type=ast.FieldType(name="$group_0", table_type=events_table_type)),
- ast.Field(chain=["$group_1"], type=ast.FieldType(name="$group_1", table_type=events_table_type)),
- ast.Field(chain=["$group_2"], type=ast.FieldType(name="$group_2", table_type=events_table_type)),
- ast.Field(chain=["$group_3"], type=ast.FieldType(name="$group_3", table_type=events_table_type)),
- ast.Field(chain=["$group_4"], type=ast.FieldType(name="$group_4", table_type=events_table_type)),
+ ast.Field(
+ chain=["uuid"],
+ type=ast.FieldType(name="uuid", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["event"],
+ type=ast.FieldType(name="event", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["properties"],
+ type=ast.FieldType(name="properties", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["timestamp"],
+ type=ast.FieldType(name="timestamp", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["distinct_id"],
+ type=ast.FieldType(name="distinct_id", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["elements_chain"],
+ type=ast.FieldType(name="elements_chain", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["created_at"],
+ type=ast.FieldType(name="created_at", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["$session_id"],
+ type=ast.FieldType(name="$session_id", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["$group_0"],
+ type=ast.FieldType(name="$group_0", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["$group_1"],
+ type=ast.FieldType(name="$group_1", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["$group_2"],
+ type=ast.FieldType(name="$group_2", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["$group_3"],
+ type=ast.FieldType(name="$group_3", table_type=events_table_type),
+ ),
+ ast.Field(
+ chain=["$group_4"],
+ type=ast.FieldType(name="$group_4", table_type=events_table_type),
+ ),
],
)
@@ -805,32 +890,58 @@ def test_asterisk_expander_table_alias(self):
self.assertEqual(
node.select,
[
- ast.Field(chain=["uuid"], type=ast.FieldType(name="uuid", table_type=events_table_alias_type)),
- ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=events_table_alias_type)),
ast.Field(
- chain=["properties"], type=ast.FieldType(name="properties", table_type=events_table_alias_type)
+ chain=["uuid"],
+ type=ast.FieldType(name="uuid", table_type=events_table_alias_type),
),
ast.Field(
- chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=events_table_alias_type)
+ chain=["event"],
+ type=ast.FieldType(name="event", table_type=events_table_alias_type),
),
ast.Field(
- chain=["distinct_id"], type=ast.FieldType(name="distinct_id", table_type=events_table_alias_type)
+ chain=["properties"],
+ type=ast.FieldType(name="properties", table_type=events_table_alias_type),
+ ),
+ ast.Field(
+ chain=["timestamp"],
+ type=ast.FieldType(name="timestamp", table_type=events_table_alias_type),
+ ),
+ ast.Field(
+ chain=["distinct_id"],
+ type=ast.FieldType(name="distinct_id", table_type=events_table_alias_type),
),
ast.Field(
chain=["elements_chain"],
type=ast.FieldType(name="elements_chain", table_type=events_table_alias_type),
),
ast.Field(
- chain=["created_at"], type=ast.FieldType(name="created_at", table_type=events_table_alias_type)
+ chain=["created_at"],
+ type=ast.FieldType(name="created_at", table_type=events_table_alias_type),
+ ),
+ ast.Field(
+ chain=["$session_id"],
+ type=ast.FieldType(name="$session_id", table_type=events_table_alias_type),
+ ),
+ ast.Field(
+ chain=["$group_0"],
+ type=ast.FieldType(name="$group_0", table_type=events_table_alias_type),
),
ast.Field(
- chain=["$session_id"], type=ast.FieldType(name="$session_id", table_type=events_table_alias_type)
+ chain=["$group_1"],
+ type=ast.FieldType(name="$group_1", table_type=events_table_alias_type),
+ ),
+ ast.Field(
+ chain=["$group_2"],
+ type=ast.FieldType(name="$group_2", table_type=events_table_alias_type),
+ ),
+ ast.Field(
+ chain=["$group_3"],
+ type=ast.FieldType(name="$group_3", table_type=events_table_alias_type),
+ ),
+ ast.Field(
+ chain=["$group_4"],
+ type=ast.FieldType(name="$group_4", table_type=events_table_alias_type),
),
- ast.Field(chain=["$group_0"], type=ast.FieldType(name="$group_0", table_type=events_table_alias_type)),
- ast.Field(chain=["$group_1"], type=ast.FieldType(name="$group_1", table_type=events_table_alias_type)),
- ast.Field(chain=["$group_2"], type=ast.FieldType(name="$group_2", table_type=events_table_alias_type)),
- ast.Field(chain=["$group_3"], type=ast.FieldType(name="$group_3", table_type=events_table_alias_type)),
- ast.Field(chain=["$group_4"], type=ast.FieldType(name="$group_4", table_type=events_table_alias_type)),
],
)
@@ -852,8 +963,14 @@ def test_asterisk_expander_subquery(self):
self.assertEqual(
node.select,
[
- ast.Field(chain=["a"], type=ast.FieldType(name="a", table_type=select_subquery_type)),
- ast.Field(chain=["b"], type=ast.FieldType(name="b", table_type=select_subquery_type)),
+ ast.Field(
+ chain=["a"],
+ type=ast.FieldType(name="a", table_type=select_subquery_type),
+ ),
+ ast.Field(
+ chain=["b"],
+ type=ast.FieldType(name="b", table_type=select_subquery_type),
+ ),
],
)
@@ -878,8 +995,14 @@ def test_asterisk_expander_subquery_alias(self):
self.assertEqual(
node.select,
[
- ast.Field(chain=["a"], type=ast.FieldType(name="a", table_type=select_subquery_type)),
- ast.Field(chain=["b"], type=ast.FieldType(name="b", table_type=select_subquery_type)),
+ ast.Field(
+ chain=["a"],
+ type=ast.FieldType(name="a", table_type=select_subquery_type),
+ ),
+ ast.Field(
+ chain=["b"],
+ type=ast.FieldType(name="b", table_type=select_subquery_type),
+ ),
],
)
@@ -914,22 +1037,58 @@ def test_asterisk_expander_from_subquery_table(self):
self.assertEqual(
node.select,
[
- ast.Field(chain=["uuid"], type=ast.FieldType(name="uuid", table_type=inner_select_type)),
- ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=inner_select_type)),
- ast.Field(chain=["properties"], type=ast.FieldType(name="properties", table_type=inner_select_type)),
- ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=inner_select_type)),
- ast.Field(chain=["distinct_id"], type=ast.FieldType(name="distinct_id", table_type=inner_select_type)),
+ ast.Field(
+ chain=["uuid"],
+ type=ast.FieldType(name="uuid", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["event"],
+ type=ast.FieldType(name="event", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["properties"],
+ type=ast.FieldType(name="properties", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["timestamp"],
+ type=ast.FieldType(name="timestamp", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["distinct_id"],
+ type=ast.FieldType(name="distinct_id", table_type=inner_select_type),
+ ),
ast.Field(
chain=["elements_chain"],
type=ast.FieldType(name="elements_chain", table_type=inner_select_type),
),
- ast.Field(chain=["created_at"], type=ast.FieldType(name="created_at", table_type=inner_select_type)),
- ast.Field(chain=["$session_id"], type=ast.FieldType(name="$session_id", table_type=inner_select_type)),
- ast.Field(chain=["$group_0"], type=ast.FieldType(name="$group_0", table_type=inner_select_type)),
- ast.Field(chain=["$group_1"], type=ast.FieldType(name="$group_1", table_type=inner_select_type)),
- ast.Field(chain=["$group_2"], type=ast.FieldType(name="$group_2", table_type=inner_select_type)),
- ast.Field(chain=["$group_3"], type=ast.FieldType(name="$group_3", table_type=inner_select_type)),
- ast.Field(chain=["$group_4"], type=ast.FieldType(name="$group_4", table_type=inner_select_type)),
+ ast.Field(
+ chain=["created_at"],
+ type=ast.FieldType(name="created_at", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$session_id"],
+ type=ast.FieldType(name="$session_id", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_0"],
+ type=ast.FieldType(name="$group_0", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_1"],
+ type=ast.FieldType(name="$group_1", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_2"],
+ type=ast.FieldType(name="$group_2", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_3"],
+ type=ast.FieldType(name="$group_3", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_4"],
+ type=ast.FieldType(name="$group_4", table_type=inner_select_type),
+ ),
],
)
@@ -938,7 +1097,8 @@ def test_asterisk_expander_multiple_table_error(self):
with self.assertRaises(ResolverException) as e:
resolve_types(node, self.context)
self.assertEqual(
- str(e.exception), "Cannot use '*' without table name when there are multiple tables in the query"
+ str(e.exception),
+ "Cannot use '*' without table name when there are multiple tables in the query",
)
@override_settings(PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False)
@@ -977,22 +1137,58 @@ def test_asterisk_expander_select_union(self):
self.assertEqual(
node.select,
[
- ast.Field(chain=["uuid"], type=ast.FieldType(name="uuid", table_type=inner_select_type)),
- ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=inner_select_type)),
- ast.Field(chain=["properties"], type=ast.FieldType(name="properties", table_type=inner_select_type)),
- ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=inner_select_type)),
- ast.Field(chain=["distinct_id"], type=ast.FieldType(name="distinct_id", table_type=inner_select_type)),
+ ast.Field(
+ chain=["uuid"],
+ type=ast.FieldType(name="uuid", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["event"],
+ type=ast.FieldType(name="event", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["properties"],
+ type=ast.FieldType(name="properties", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["timestamp"],
+ type=ast.FieldType(name="timestamp", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["distinct_id"],
+ type=ast.FieldType(name="distinct_id", table_type=inner_select_type),
+ ),
ast.Field(
chain=["elements_chain"],
type=ast.FieldType(name="elements_chain", table_type=inner_select_type),
),
- ast.Field(chain=["created_at"], type=ast.FieldType(name="created_at", table_type=inner_select_type)),
- ast.Field(chain=["$session_id"], type=ast.FieldType(name="$session_id", table_type=inner_select_type)),
- ast.Field(chain=["$group_0"], type=ast.FieldType(name="$group_0", table_type=inner_select_type)),
- ast.Field(chain=["$group_1"], type=ast.FieldType(name="$group_1", table_type=inner_select_type)),
- ast.Field(chain=["$group_2"], type=ast.FieldType(name="$group_2", table_type=inner_select_type)),
- ast.Field(chain=["$group_3"], type=ast.FieldType(name="$group_3", table_type=inner_select_type)),
- ast.Field(chain=["$group_4"], type=ast.FieldType(name="$group_4", table_type=inner_select_type)),
+ ast.Field(
+ chain=["created_at"],
+ type=ast.FieldType(name="created_at", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$session_id"],
+ type=ast.FieldType(name="$session_id", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_0"],
+ type=ast.FieldType(name="$group_0", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_1"],
+ type=ast.FieldType(name="$group_1", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_2"],
+ type=ast.FieldType(name="$group_2", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_3"],
+ type=ast.FieldType(name="$group_3", table_type=inner_select_type),
+ ),
+ ast.Field(
+ chain=["$group_4"],
+ type=ast.FieldType(name="$group_4", table_type=inner_select_type),
+ ),
],
)
diff --git a/posthog/hogql/test/test_visitor.py b/posthog/hogql/test/test_visitor.py
index d946af02073dd..78b2d6dc42536 100644
--- a/posthog/hogql/test/test_visitor.py
+++ b/posthog/hogql/test/test_visitor.py
@@ -66,7 +66,10 @@ def test_everything_visitor(self):
],
)
),
- ast.Alias(expr=ast.SelectQuery(select=[ast.Field(chain=["timestamp"])]), alias="f"),
+ ast.Alias(
+ expr=ast.SelectQuery(select=[ast.Field(chain=["timestamp"])]),
+ alias="f",
+ ),
ast.SelectQuery(
select=[ast.Field(chain=["a"])],
select_from=ast.JoinExpr(
diff --git a/posthog/hogql/transforms/in_cohort.py b/posthog/hogql/transforms/in_cohort.py
index aa1fe0e3a23ee..670d0a8e73c2a 100644
--- a/posthog/hogql/transforms/in_cohort.py
+++ b/posthog/hogql/transforms/in_cohort.py
@@ -9,12 +9,20 @@
from posthog.hogql.visitor import TraversingVisitor, clone_expr
-def resolve_in_cohorts(node: ast.Expr, stack: Optional[List[ast.SelectQuery]] = None, context: HogQLContext = None):
+def resolve_in_cohorts(
+ node: ast.Expr,
+ stack: Optional[List[ast.SelectQuery]] = None,
+ context: HogQLContext = None,
+):
InCohortResolver(stack=stack, context=context).visit(node)
class InCohortResolver(TraversingVisitor):
- def __init__(self, stack: Optional[List[ast.SelectQuery]] = None, context: HogQLContext = None):
+ def __init__(
+ self,
+ stack: Optional[List[ast.SelectQuery]] = None,
+ context: HogQLContext = None,
+ ):
super().__init__()
self.stack: List[ast.SelectQuery] = stack or []
self.context = context
@@ -80,7 +88,12 @@ def visit_compare_operation(self, node: ast.CompareOperation):
self.visit(node.right)
def _add_join_for_cohort(
- self, cohort_id: int, is_static: bool, select: ast.SelectQuery, compare: ast.CompareOperation, negative: bool
+ self,
+ cohort_id: int,
+ is_static: bool,
+ select: ast.SelectQuery,
+ compare: ast.CompareOperation,
+ negative: bool,
):
must_add_join = True
last_join = select.select_from
@@ -115,9 +128,14 @@ def _add_join_for_cohort(
)
),
)
- new_join = cast(ast.JoinExpr, resolve_types(new_join, self.context, [self.stack[-1].type]))
+ new_join = cast(
+ ast.JoinExpr,
+ resolve_types(new_join, self.context, [self.stack[-1].type]),
+ )
new_join.constraint.expr.left = resolve_types(
- ast.Field(chain=[f"in_cohort__{cohort_id}", "person_id"]), self.context, [self.stack[-1].type]
+ ast.Field(chain=[f"in_cohort__{cohort_id}", "person_id"]),
+ self.context,
+ [self.stack[-1].type],
)
new_join.constraint.expr.right = clone_expr(compare.left)
if last_join:
@@ -127,6 +145,8 @@ def _add_join_for_cohort(
compare.op = ast.CompareOperationOp.NotEq if negative else ast.CompareOperationOp.Eq
compare.left = resolve_types(
- ast.Field(chain=[f"in_cohort__{cohort_id}", "matched"]), self.context, [self.stack[-1].type]
+ ast.Field(chain=[f"in_cohort__{cohort_id}", "matched"]),
+ self.context,
+ [self.stack[-1].type],
)
compare.right = resolve_types(ast.Constant(value=1), self.context, [self.stack[-1].type])
diff --git a/posthog/hogql/transforms/lazy_tables.py b/posthog/hogql/transforms/lazy_tables.py
index d2bd4c1398aa9..48018cd789264 100644
--- a/posthog/hogql/transforms/lazy_tables.py
+++ b/posthog/hogql/transforms/lazy_tables.py
@@ -6,10 +6,15 @@
from posthog.hogql.database.models import LazyJoin, LazyTable
from posthog.hogql.errors import HogQLException
from posthog.hogql.resolver import resolve_types
+from posthog.hogql.resolver_utils import get_long_table_name
from posthog.hogql.visitor import TraversingVisitor
-def resolve_lazy_tables(node: ast.Expr, stack: Optional[List[ast.SelectQuery]] = None, context: HogQLContext = None):
+def resolve_lazy_tables(
+ node: ast.Expr,
+ stack: Optional[List[ast.SelectQuery]] = None,
+ context: HogQLContext = None,
+):
LazyTableResolver(stack=stack, context=context).visit(node)
@@ -28,27 +33,15 @@ class TableToAdd:
class LazyTableResolver(TraversingVisitor):
- def __init__(self, stack: Optional[List[ast.SelectQuery]] = None, context: HogQLContext = None):
+ def __init__(
+ self,
+ stack: Optional[List[ast.SelectQuery]] = None,
+ context: HogQLContext = None,
+ ):
super().__init__()
self.stack_of_fields: List[List[ast.FieldType | ast.PropertyType]] = [[]] if stack else []
self.context = context
- def _get_long_table_name(self, select: ast.SelectQueryType, type: ast.BaseTableType) -> str:
- if isinstance(type, ast.TableType):
- return select.get_alias_for_table_type(type)
- elif isinstance(type, ast.LazyTableType):
- return type.table.to_printed_hogql()
- elif isinstance(type, ast.TableAliasType):
- return type.alias
- elif isinstance(type, ast.SelectQueryAliasType):
- return type.alias
- elif isinstance(type, ast.LazyJoinType):
- return f"{self._get_long_table_name(select, type.table_type)}__{type.field}"
- elif isinstance(type, ast.VirtualTableType):
- return f"{self._get_long_table_name(select, type.table_type)}__{type.field}"
- else:
- raise HogQLException(f"Unknown table type in LazyTableResolver: {type.__class__.__name__}")
-
def visit_property_type(self, node: ast.PropertyType):
if node.joined_subquery is not None:
# we have already visited this property
@@ -110,7 +103,7 @@ def visit_select_query(self, node: ast.SelectQuery):
if field_or_property.field_type.table_type == join.table.type:
fields.append(field_or_property)
if len(fields) == 0:
- table_name = join.alias or self._get_long_table_name(select_type, join.table.type)
+ table_name = join.alias or get_long_table_name(select_type, join.table.type)
tables_to_add[table_name] = TableToAdd(fields_accessed={}, lazy_table=join.table.type.table)
join = join.next_join
@@ -139,8 +132,8 @@ def visit_select_query(self, node: ast.SelectQuery):
# Loop over the collected lazy tables in reverse order to create the joins
for table_type in reversed(table_types):
if isinstance(table_type, ast.LazyJoinType):
- from_table = self._get_long_table_name(select_type, table_type.table_type)
- to_table = self._get_long_table_name(select_type, table_type)
+ from_table = get_long_table_name(select_type, table_type.table_type)
+ to_table = get_long_table_name(select_type, table_type)
if to_table not in joins_to_add:
joins_to_add[to_table] = JoinToAdd(
fields_accessed={}, # collect here all fields accessed on this table
@@ -159,7 +152,7 @@ def visit_select_query(self, node: ast.SelectQuery):
else:
new_join.fields_accessed[field.name] = chain
elif isinstance(table_type, ast.LazyTableType):
- table_name = self._get_long_table_name(select_type, table_type)
+ table_name = get_long_table_name(select_type, table_type)
if table_name not in tables_to_add:
tables_to_add[table_name] = TableToAdd(
fields_accessed={}, # collect here all fields accessed on this table
@@ -203,9 +196,14 @@ def visit_select_query(self, node: ast.SelectQuery):
# For all the collected joins, create the join subqueries, and add them to the table.
for to_table, join_scope in joins_to_add.items():
join_to_add: ast.JoinExpr = join_scope.lazy_join.join_function(
- join_scope.from_table, join_scope.to_table, join_scope.fields_accessed, self.context.modifiers
+ join_scope.from_table,
+ join_scope.to_table,
+ join_scope.fields_accessed,
+ self.context,
+ node,
)
join_to_add = cast(ast.JoinExpr, resolve_types(join_to_add, self.context, [node.type]))
+
select_type.tables[to_table] = join_to_add.type
join_ptr = node.select_from
@@ -239,7 +237,7 @@ def visit_select_query(self, node: ast.SelectQuery):
else:
raise HogQLException("Should not be reachable")
- table_name = self._get_long_table_name(select_type, table_type)
+ table_name = get_long_table_name(select_type, table_type)
table_type = select_type.tables[table_name]
if isinstance(field_or_property, ast.FieldType):
diff --git a/posthog/hogql/transforms/property_types.py b/posthog/hogql/transforms/property_types.py
index be46d24873a91..a2fe60c9aaacd 100644
--- a/posthog/hogql/transforms/property_types.py
+++ b/posthog/hogql/transforms/property_types.py
@@ -46,7 +46,10 @@ def resolve_property_types(node: ast.Expr, context: HogQLContext = None) -> ast.
timezone = context.database.get_timezone() if context and context.database else "UTC"
property_swapper = PropertySwapper(
- timezone=timezone, event_properties=event_properties, person_properties=person_properties, context=context
+ timezone=timezone,
+ event_properties=event_properties,
+ person_properties=person_properties,
+ context=context,
)
return property_swapper.visit(node)
@@ -83,7 +86,11 @@ def visit_field(self, node: ast.Field):
class PropertySwapper(CloningVisitor):
def __init__(
- self, timezone: str, event_properties: Dict[str, str], person_properties: Dict[str, str], context: HogQLContext
+ self,
+ timezone: str,
+ event_properties: Dict[str, str],
+ person_properties: Dict[str, str],
+ context: HogQLContext,
):
super().__init__(clear_types=False)
self.timezone = timezone
@@ -98,7 +105,9 @@ def visit_field(self, node: ast.Field):
name="toTimeZone",
args=[node, ast.Constant(value=self.timezone)],
type=ast.CallType(
- name="toTimeZone", arg_types=[ast.DateTimeType()], return_type=ast.DateTimeType()
+ name="toTimeZone",
+ arg_types=[ast.DateTimeType()],
+ return_type=ast.DateTimeType(),
),
)
@@ -128,7 +137,10 @@ def visit_field(self, node: ast.Field):
return node
def _convert_string_property_to_type(
- self, node: ast.Field, property_type: Literal["event", "person"], property_name: str
+ self,
+ node: ast.Field,
+ property_type: Literal["event", "person"],
+ property_name: str,
):
posthog_field_type = (
self.person_properties.get(property_name)
@@ -146,7 +158,12 @@ def _convert_string_property_to_type(
return parse_expr("{node} = 'true'", {"node": node})
return node
- def _add_property_notice(self, node: ast.Field, property_type: Literal["event", "person"], field_type: str) -> str:
+ def _add_property_notice(
+ self,
+ node: ast.Field,
+ property_type: Literal["event", "person"],
+ field_type: str,
+ ) -> str:
property_name = node.chain[-1]
if property_type == "person":
if self.context.modifiers.personsOnEventsMode != PersonOnEventsMode.DISABLED:
diff --git a/posthog/hogql/transforms/test/test_in_cohort.py b/posthog/hogql/transforms/test/test_in_cohort.py
index dbef0b685aadf..26e2e18b66af7 100644
--- a/posthog/hogql/transforms/test/test_in_cohort.py
+++ b/posthog/hogql/transforms/test/test_in_cohort.py
@@ -8,7 +8,12 @@
from posthog.models.cohort.util import recalculate_cohortpeople
from posthog.models.utils import UUIDT
from posthog.schema import HogQLQueryModifiers
-from posthog.test.base import BaseTest, _create_person, _create_event, flush_persons_and_events
+from posthog.test.base import (
+ BaseTest,
+ _create_person,
+ _create_event,
+ flush_persons_and_events,
+)
elements_chain_match = lambda x: parse_expr("match(elements_chain, {regex})", {"regex": ast.Constant(value=str(x))})
not_call = lambda x: ast.Call(name="not", args=[x])
@@ -33,7 +38,8 @@ def _create_random_events(self) -> str:
def test_in_cohort_dynamic(self):
random_uuid = self._create_random_events()
cohort = Cohort.objects.create(
- team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}]
+ team=self.team,
+ groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}],
)
recalculate_cohortpeople(cohort, pending_version=0)
response = execute_hogql_query(
@@ -100,5 +106,8 @@ def test_in_cohort_error(self):
self.assertEqual(str(e.exception), "cohort() takes exactly one string or integer argument")
with self.assertRaises(HogQLException) as e:
- execute_hogql_query(f"SELECT event FROM events WHERE person_id IN COHORT 'blabla'", self.team)
+ execute_hogql_query(
+ f"SELECT event FROM events WHERE person_id IN COHORT 'blabla'",
+ self.team,
+ )
self.assertEqual(str(e.exception), "Could not find a cohort with the name 'blabla'")
diff --git a/posthog/hogql/transforms/test/test_lazy_tables.py b/posthog/hogql/transforms/test/test_lazy_tables.py
index aad1dbae3fb1c..131fcb227fbbc 100644
--- a/posthog/hogql/transforms/test/test_lazy_tables.py
+++ b/posthog/hogql/transforms/test/test_lazy_tables.py
@@ -80,5 +80,9 @@ def test_select_count_from_lazy_table(self):
def _print_select(self, select: str):
expr = parse_select(select)
- query = print_ast(expr, HogQLContext(team_id=self.team.pk, enable_select_queries=True), "clickhouse")
+ query = print_ast(
+ expr,
+ HogQLContext(team_id=self.team.pk, enable_select_queries=True),
+ "clickhouse",
+ )
return pretty_print_in_tests(query, self.team.pk)
diff --git a/posthog/hogql/transforms/test/test_property_types.py b/posthog/hogql/transforms/test/test_property_types.py
index c50f19a0a792d..10d8bf27cc97b 100644
--- a/posthog/hogql/transforms/test/test_property_types.py
+++ b/posthog/hogql/transforms/test/test_property_types.py
@@ -30,10 +30,16 @@ def setUp(self):
defaults={"property_type": "Numeric"},
)
PropertyDefinition.objects.get_or_create(
- team=self.team, type=PropertyDefinition.Type.EVENT, name="bool", defaults={"property_type": "Boolean"}
+ team=self.team,
+ type=PropertyDefinition.Type.EVENT,
+ name="bool",
+ defaults={"property_type": "Boolean"},
)
PropertyDefinition.objects.get_or_create(
- team=self.team, type=PropertyDefinition.Type.PERSON, name="tickets", defaults={"property_type": "Numeric"}
+ team=self.team,
+ type=PropertyDefinition.Type.PERSON,
+ name="tickets",
+ defaults={"property_type": "Numeric"},
)
PropertyDefinition.objects.get_or_create(
team=self.team,
@@ -89,5 +95,9 @@ def test_resolve_property_types_event_person_poe_on(self):
def _print_select(self, select: str):
expr = parse_select(select)
- query = print_ast(expr, HogQLContext(team_id=self.team.pk, enable_select_queries=True), "clickhouse")
+ query = print_ast(
+ expr,
+ HogQLContext(team_id=self.team.pk, enable_select_queries=True),
+ "clickhouse",
+ )
return pretty_print_in_tests(query, self.team.pk)
diff --git a/posthog/hogql/visitor.py b/posthog/hogql/visitor.py
index c8e1a5a57a789..db6b1ef6fb72e 100644
--- a/posthog/hogql/visitor.py
+++ b/posthog/hogql/visitor.py
@@ -128,8 +128,8 @@ def visit_select_query(self, node: ast.SelectQuery):
self.visit(expr)
for expr in node.limit_by or []:
self.visit(expr)
- self.visit(node.limit),
- self.visit(node.offset),
+ (self.visit(node.limit),)
+ (self.visit(node.offset),)
for expr in (node.window_exprs or {}).values():
self.visit(expr)
@@ -248,7 +248,11 @@ def visit_join_constraint(self, node: ast.JoinConstraint):
class CloningVisitor(Visitor):
"""Visitor that traverses and clones the AST tree. Clears types."""
- def __init__(self, clear_types: Optional[bool] = True, clear_locations: Optional[bool] = False):
+ def __init__(
+ self,
+ clear_types: Optional[bool] = True,
+ clear_locations: Optional[bool] = False,
+ ):
self.clear_types = clear_types
self.clear_locations = clear_locations
diff --git a/posthog/hogql_queries/events_query_runner.py b/posthog/hogql_queries/events_query_runner.py
index ff85691b983d3..d85d251684fa1 100644
--- a/posthog/hogql_queries/events_query_runner.py
+++ b/posthog/hogql_queries/events_query_runner.py
@@ -99,7 +99,9 @@ def to_query(self) -> ast.SelectQuery:
with self.timings.measure("event"):
where_exprs.append(
parse_expr(
- "event = {event}", {"event": ast.Constant(value=self.query.event)}, timings=self.timings
+ "event = {event}",
+ {"event": ast.Constant(value=self.query.event)},
+ timings=self.timings,
)
)
if self.query.actionId:
@@ -118,7 +120,9 @@ def to_query(self) -> ast.SelectQuery:
ids_list = list(map(str, distinct_ids))
where_exprs.append(
parse_expr(
- "distinct_id in {list}", {"list": ast.Constant(value=ids_list)}, timings=self.timings
+ "distinct_id in {list}",
+ {"list": ast.Constant(value=ids_list)},
+ timings=self.timings,
)
)
@@ -131,7 +135,9 @@ def to_query(self) -> ast.SelectQuery:
parsed_date = relative_date_parse(before, self.team.timezone_info)
where_exprs.append(
parse_expr(
- "timestamp < {timestamp}", {"timestamp": ast.Constant(value=parsed_date)}, timings=self.timings
+ "timestamp < {timestamp}",
+ {"timestamp": ast.Constant(value=parsed_date)},
+ timings=self.timings,
)
)
@@ -261,7 +267,10 @@ def select_input_raw(self) -> List[str]:
def limit(self) -> int:
# importing locally so we could override in a test
- from posthog.hogql.constants import DEFAULT_RETURNED_ROWS, MAX_SELECT_RETURNED_ROWS
+ from posthog.hogql.constants import (
+ DEFAULT_RETURNED_ROWS,
+ MAX_SELECT_RETURNED_ROWS,
+ )
# adding +1 to the limit to check if there's a "next page" after the requested results
return (
diff --git a/posthog/hogql_queries/hogql_query_runner.py b/posthog/hogql_queries/hogql_query_runner.py
index 815822ce894c6..576419fdff967 100644
--- a/posthog/hogql_queries/hogql_query_runner.py
+++ b/posthog/hogql_queries/hogql_query_runner.py
@@ -10,7 +10,13 @@
from posthog.hogql.timings import HogQLTimings
from posthog.hogql_queries.query_runner import QueryRunner
from posthog.models import Team
-from posthog.schema import HogQLQuery, HogQLQueryResponse, DashboardFilter, HogQLFilters, DateRange
+from posthog.schema import (
+ HogQLQuery,
+ HogQLQueryResponse,
+ DashboardFilter,
+ HogQLFilters,
+ DateRange,
+)
class HogQLQueryRunner(QueryRunner):
diff --git a/posthog/hogql_queries/insights/lifecycle_query_runner.py b/posthog/hogql_queries/insights/lifecycle_query_runner.py
index ffa274958ceb0..87a8a345a8462 100644
--- a/posthog/hogql_queries/insights/lifecycle_query_runner.py
+++ b/posthog/hogql_queries/insights/lifecycle_query_runner.py
@@ -3,7 +3,10 @@
from typing import Optional, Any, Dict, List
from django.utils.timezone import datetime
-from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL
+from posthog.caching.insights_api import (
+ BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL,
+ REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL,
+)
from posthog.caching.utils import is_stale
from posthog.hogql import ast
@@ -16,7 +19,12 @@
from posthog.models import Team, Action
from posthog.hogql_queries.utils.query_date_range import QueryDateRange
from posthog.models.filters.mixins.utils import cached_property
-from posthog.schema import LifecycleQuery, ActionsNode, EventsNode, LifecycleQueryResponse
+from posthog.schema import (
+ LifecycleQuery,
+ ActionsNode,
+ EventsNode,
+ LifecycleQueryResponse,
+)
class LifecycleQueryRunner(QueryRunner):
@@ -139,7 +147,10 @@ def calculate(self):
@cached_property
def query_date_range(self):
return QueryDateRange(
- date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now()
+ date_range=self.query.dateRange,
+ team=self.team,
+ interval=self.query.interval,
+ now=datetime.now(),
)
@cached_property
diff --git a/posthog/hogql_queries/insights/test/test_events_query.py b/posthog/hogql_queries/insights/test/test_events_query.py
index 707891d424a41..927829290367f 100644
--- a/posthog/hogql_queries/insights/test/test_events_query.py
+++ b/posthog/hogql_queries/insights/test/test_events_query.py
@@ -8,7 +8,12 @@
EventPropertyFilter,
PropertyOperator,
)
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ _create_event,
+ _create_person,
+)
class TestEventsQueryRunner(ClickhouseTestMixin, APIBaseTest):
diff --git a/posthog/hogql_queries/insights/test/test_lifecycle_query_runner.py b/posthog/hogql_queries/insights/test/test_lifecycle_query_runner.py
index 75637d5216ebd..1dba61d970e6c 100644
--- a/posthog/hogql_queries/insights/test/test_lifecycle_query_runner.py
+++ b/posthog/hogql_queries/insights/test/test_lifecycle_query_runner.py
@@ -6,7 +6,13 @@
from posthog.hogql_queries.insights.lifecycle_query_runner import LifecycleQueryRunner
from posthog.models.utils import UUIDT
from posthog.schema import DateRange, IntervalType, LifecycleQuery, EventsNode
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ _create_event,
+ _create_person,
+ flush_persons_and_events,
+)
class TestLifecycleQueryRunner(ClickhouseTestMixin, APIBaseTest):
@@ -26,7 +32,11 @@ def _create_random_events(self) -> str:
distinct_id="bla",
event="random event",
team=self.team,
- properties={"random_prop": "don't include", "random_uuid": random_uuid, "index": index},
+ properties={
+ "random_prop": "don't include",
+ "random_uuid": random_uuid,
+ "index": index,
+ },
)
flush_persons_and_events()
return random_uuid
@@ -39,7 +49,10 @@ def _create_events(self, data, event="$pageview"):
_create_person(
team_id=self.team.pk,
distinct_ids=[id],
- properties={"name": id, **({"email": "test@posthog.com"} if id == "p1" else {})},
+ properties={
+ "name": id,
+ **({"email": "test@posthog.com"} if id == "p1" else {}),
+ },
)
)
for timestamp in timestamps:
@@ -69,7 +82,9 @@ def _create_test_events(self):
def _create_query_runner(self, date_from, date_to, interval) -> LifecycleQueryRunner:
series = [EventsNode(event="$pageview")]
query = LifecycleQuery(
- dateRange=DateRange(date_from=date_from, date_to=date_to), interval=interval, series=series
+ dateRange=DateRange(date_from=date_from, date_to=date_to),
+ interval=interval,
+ series=series,
)
return LifecycleQueryRunner(team=self.team, query=query)
diff --git a/posthog/hogql_queries/insights/trends/aggregation_operations.py b/posthog/hogql_queries/insights/trends/aggregation_operations.py
index f585fc313dc70..3920344cbfd52 100644
--- a/posthog/hogql_queries/insights/trends/aggregation_operations.py
+++ b/posthog/hogql_queries/insights/trends/aggregation_operations.py
@@ -121,7 +121,10 @@ def _events_query(self, events_where_clause: ast.Expr, sample_value: ast.RatioEx
timestamp,
actor_id
""",
- placeholders={"events_where_clause": events_where_clause, "sample": sample_value},
+ placeholders={
+ "events_where_clause": events_where_clause,
+ "sample": sample_value,
+ },
)
def get_query_orchestrator(self, events_where_clause: ast.Expr, sample_value: str):
diff --git a/posthog/hogql_queries/insights/trends/breakdown.py b/posthog/hogql_queries/insights/trends/breakdown.py
index 403c5be4da536..a713cb09dcee1 100644
--- a/posthog/hogql_queries/insights/trends/breakdown.py
+++ b/posthog/hogql_queries/insights/trends/breakdown.py
@@ -4,7 +4,10 @@
from posthog.hogql.timings import HogQLTimings
from posthog.hogql_queries.insights.trends.breakdown_session import BreakdownSession
from posthog.hogql_queries.insights.trends.breakdown_values import BreakdownValues
-from posthog.hogql_queries.insights.trends.utils import get_properties_chain, series_event_name
+from posthog.hogql_queries.insights.trends.utils import (
+ get_properties_chain,
+ series_event_name,
+)
from posthog.hogql_queries.utils.query_date_range import QueryDateRange
from posthog.models.filters.mixins.utils import cached_property
from posthog.models.team.team import Team
@@ -143,7 +146,6 @@ def _get_breakdown_histogram_multi_if(self) -> ast.Expr:
buckets = self._get_breakdown_histogram_buckets()
for lower_bound, upper_bound in buckets:
-
multi_if_exprs.extend(
[
ast.And(
diff --git a/posthog/hogql_queries/insights/trends/breakdown_values.py b/posthog/hogql_queries/insights/trends/breakdown_values.py
index 72ae54d0286be..37d9f7168e121 100644
--- a/posthog/hogql_queries/insights/trends/breakdown_values.py
+++ b/posthog/hogql_queries/insights/trends/breakdown_values.py
@@ -122,7 +122,12 @@ def _where_filter(self) -> ast.Expr:
)
if self.event_name is not None:
- filters.append(parse_expr("event = {event}", placeholders={"event": ast.Constant(value=self.event_name)}))
+ filters.append(
+ parse_expr(
+ "event = {event}",
+ placeholders={"event": ast.Constant(value=self.event_name)},
+ )
+ )
return ast.And(exprs=filters)
diff --git a/posthog/hogql_queries/insights/trends/query_builder.py b/posthog/hogql_queries/insights/trends/query_builder.py
index 3c0cd7d9356c7..0a90cae985dba 100644
--- a/posthog/hogql_queries/insights/trends/query_builder.py
+++ b/posthog/hogql_queries/insights/trends/query_builder.py
@@ -3,7 +3,9 @@
from posthog.hogql.parser import parse_expr, parse_select
from posthog.hogql.property import property_to_expr
from posthog.hogql.timings import HogQLTimings
-from posthog.hogql_queries.insights.trends.aggregation_operations import AggregationOperations
+from posthog.hogql_queries.insights.trends.aggregation_operations import (
+ AggregationOperations,
+)
from posthog.hogql_queries.insights.trends.breakdown import Breakdown
from posthog.hogql_queries.insights.trends.breakdown_session import BreakdownSession
from posthog.hogql_queries.insights.trends.utils import series_event_name
@@ -157,7 +159,8 @@ def _get_events_subquery(self) -> ast.SelectQuery:
# Just complex series aggregation
elif self._aggregation_operation.requires_query_orchestration():
return self._aggregation_operation.get_query_orchestrator(
- events_where_clause=self._events_filter(), sample_value=self._sample_value()
+ events_where_clause=self._events_filter(),
+ sample_value=self._sample_value(),
).build()
return default_query
@@ -222,7 +225,8 @@ def _events_filter(self) -> ast.Expr:
if series_event_name(self.series) is not None:
filters.append(
parse_expr(
- "event = {event}", placeholders={"event": ast.Constant(value=series_event_name(self.series))}
+ "event = {event}",
+ placeholders={"event": ast.Constant(value=series_event_name(self.series))},
)
)
diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py
index 760c55577d7db..88e012672e12d 100644
--- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py
+++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py
@@ -3,8 +3,19 @@
from freezegun import freeze_time
from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner
-from posthog.schema import DateRange, EventsNode, IntervalType, TrendsFilter, TrendsQuery
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person
+from posthog.schema import (
+ DateRange,
+ EventsNode,
+ IntervalType,
+ TrendsFilter,
+ TrendsQuery,
+)
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ _create_event,
+ _create_person,
+)
@dataclass
@@ -41,7 +52,12 @@ def _create_events(self, data: List[SeriesTestData]):
)
for event in person.events:
for timestamp in event.timestamps:
- _create_event(team=self.team, event=event.event, distinct_id=id, timestamp=timestamp)
+ _create_event(
+ team=self.team,
+ event=event.event,
+ distinct_id=id,
+ timestamp=timestamp,
+ )
return person_result
def _create_test_events(self):
@@ -74,7 +90,10 @@ def _create_test_events(self):
SeriesTestData(
distinct_id="p2",
events=[
- Series(event="$pageview", timestamps=["2020-01-09T12:00:00Z", "2020-01-12T12:00:00Z"]),
+ Series(
+ event="$pageview",
+ timestamps=["2020-01-09T12:00:00Z", "2020-01-12T12:00:00Z"],
+ ),
Series(
event="$pageleave",
timestamps=[
@@ -111,7 +130,12 @@ def _create_query_runner(self, date_from, date_to, interval, series, trends_filt
return TrendsQueryRunner(team=self.team, query=query)
def _run_trends_query(
- self, date_from, date_to, interval, series=None, trends_filters: Optional[TrendsFilter] = None
+ self,
+ date_from,
+ date_to,
+ interval,
+ series=None,
+ trends_filters: Optional[TrendsFilter] = None,
):
return self._create_query_runner(date_from, date_to, interval, series, trends_filters).calculate()
@@ -221,7 +245,11 @@ def test_trends_query_compare(self):
self._create_test_events()
response = self._run_trends_query(
- "2020-01-15", "2020-01-19", IntervalType.day, [EventsNode(event="$pageview")], TrendsFilter(compare=True)
+ "2020-01-15",
+ "2020-01-19",
+ IntervalType.day,
+ [EventsNode(event="$pageview")],
+ TrendsFilter(compare=True),
)
self.assertEqual(2, len(response.results))
diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py
index 9c1dc4eca64f5..cfbcb60fdf28e 100644
--- a/posthog/hogql_queries/insights/trends/trends_query_runner.py
+++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py
@@ -6,7 +6,10 @@
from typing import List, Optional, Any, Dict
from django.utils.timezone import datetime
-from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL
+from posthog.caching.insights_api import (
+ BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL,
+ REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL,
+)
from posthog.caching.utils import is_stale
from posthog.hogql import ast
@@ -17,12 +20,20 @@
from posthog.hogql_queries.query_runner import QueryRunner
from posthog.hogql_queries.utils.formula_ast import FormulaAST
from posthog.hogql_queries.utils.query_date_range import QueryDateRange
-from posthog.hogql_queries.utils.query_previous_period_date_range import QueryPreviousPeriodDateRange
+from posthog.hogql_queries.utils.query_previous_period_date_range import (
+ QueryPreviousPeriodDateRange,
+)
from posthog.models import Team
from posthog.models.cohort.cohort import Cohort
from posthog.models.filters.mixins.utils import cached_property
from posthog.models.property_definition import PropertyDefinition
-from posthog.schema import ActionsNode, EventsNode, HogQLQueryResponse, TrendsQuery, TrendsQueryResponse
+from posthog.schema import (
+ ActionsNode,
+ EventsNode,
+ HogQLQueryResponse,
+ TrendsQuery,
+ TrendsQueryResponse,
+)
class TrendsQueryRunner(QueryRunner):
@@ -141,7 +152,10 @@ def build_series_response(self, response: HogQLQueryResponse, series: SeriesWith
# Modifications for when comparing to previous period
if self.query.trendsFilter is not None and self.query.trendsFilter.compare:
labels = [
- "{} {}".format(self.query.interval if self.query.interval is not None else "day", i)
+ "{} {}".format(
+ self.query.interval if self.query.interval is not None else "day",
+ i,
+ )
for i in range(len(series_object["labels"]))
]
@@ -171,13 +185,19 @@ def build_series_response(self, response: HogQLQueryResponse, series: SeriesWith
@cached_property
def query_date_range(self):
return QueryDateRange(
- date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now()
+ date_range=self.query.dateRange,
+ team=self.team,
+ interval=self.query.interval,
+ now=datetime.now(),
)
@cached_property
def query_previous_date_range(self):
return QueryPreviousPeriodDateRange(
- date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now()
+ date_range=self.query.dateRange,
+ team=self.team,
+ interval=self.query.interval,
+ now=datetime.now(),
)
def series_event(self, series: EventsNode | ActionsNode) -> str | None:
@@ -209,12 +229,16 @@ def setup_series(self) -> List[SeriesWithExtras]:
for series in series_with_extras:
updated_series.append(
SeriesWithExtras(
- series=series.series, is_previous_period_series=False, overriden_query=series.overriden_query
+ series=series.series,
+ is_previous_period_series=False,
+ overriden_query=series.overriden_query,
)
)
updated_series.append(
SeriesWithExtras(
- series=series.series, is_previous_period_series=True, overriden_query=series.overriden_query
+ series=series.series,
+ is_previous_period_series=True,
+ overriden_query=series.overriden_query,
)
)
series_with_extras = updated_series
@@ -265,7 +289,9 @@ def _is_breakdown_field_boolean(self):
property_type = PropertyDefinition.Type.EVENT
field_type = self._event_property(
- self.query.breakdown.breakdown, property_type, self.query.breakdown.breakdown_group_type_index
+ self.query.breakdown.breakdown,
+ property_type,
+ self.query.breakdown.breakdown_group_type_index,
)
return field_type == "Boolean"
@@ -273,7 +299,12 @@ def _convert_boolean(self, value: any):
bool_map = {1: "true", 0: "false", "": ""}
return bool_map.get(value) or value
- def _event_property(self, field: str, field_type: PropertyDefinition.Type, group_type_index: Optional[int]):
+ def _event_property(
+ self,
+ field: str,
+ field_type: PropertyDefinition.Type,
+ group_type_index: Optional[int],
+ ):
return PropertyDefinition.objects.get(
name=field,
team=self.team,
diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py
index f8941c3899125..ce490cadfc834 100644
--- a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py
+++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py
@@ -240,11 +240,17 @@ def _properties(filter: Dict):
if raw_properties is None or len(raw_properties) == 0:
return {}
elif isinstance(raw_properties, list):
- raw_properties = {"type": "AND", "values": [{"type": "AND", "values": raw_properties}]}
+ raw_properties = {
+ "type": "AND",
+ "values": [{"type": "AND", "values": raw_properties}],
+ }
return {"properties": PropertyGroupFilter(**clean_properties(raw_properties))}
elif is_old_style_properties(raw_properties):
raw_properties = transform_old_style_properties(raw_properties)
- raw_properties = {"type": "AND", "values": [{"type": "AND", "values": raw_properties}]}
+ raw_properties = {
+ "type": "AND",
+ "values": [{"type": "AND", "values": raw_properties}],
+ }
return {"properties": PropertyGroupFilter(**clean_properties(raw_properties))}
else:
return {"properties": PropertyGroupFilter(**clean_properties(raw_properties))}
diff --git a/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py
index f07405b248976..9a130faa9774f 100644
--- a/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py
+++ b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py
@@ -58,8 +58,20 @@
}
insight_2 = {
"events": [
- {"id": "signed_up", "name": "signed_up", "type": "events", "order": 2, "custom_name": "Signed up"},
- {"id": "upgraded_plan", "name": "upgraded_plan", "type": "events", "order": 4, "custom_name": "Upgraded plan"},
+ {
+ "id": "signed_up",
+ "name": "signed_up",
+ "type": "events",
+ "order": 2,
+ "custom_name": "Signed up",
+ },
+ {
+ "id": "upgraded_plan",
+ "name": "upgraded_plan",
+ "type": "events",
+ "order": 4,
+ "custom_name": "Upgraded plan",
+ },
],
"actions": [{"id": 1, "name": "Interacted with file", "type": "actions", "order": 3}],
"display": "FunnelViz",
@@ -76,17 +88,45 @@
"properties": {
"type": "AND",
"values": [
- {"type": "AND", "values": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}]}
+ {
+ "type": "AND",
+ "values": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "is_set",
+ "operator": "is_set",
+ }
+ ],
+ }
],
},
- "target_entity": {"id": "signed_up", "name": "signed_up", "type": "events", "order": 0},
+ "target_entity": {
+ "id": "signed_up",
+ "name": "signed_up",
+ "type": "events",
+ "order": 0,
+ },
"retention_type": "retention_first_time",
"total_intervals": 9,
- "returning_entity": {"id": 1, "name": "Interacted with file", "type": "actions", "order": 0},
+ "returning_entity": {
+ "id": 1,
+ "name": "Interacted with file",
+ "type": "actions",
+ "order": 0,
+ },
}
insight_4 = {
"events": [],
- "actions": [{"id": 1, "math": "total", "name": "Interacted with file", "type": "actions", "order": 0}],
+ "actions": [
+ {
+ "id": 1,
+ "math": "total",
+ "name": "Interacted with file",
+ "type": "actions",
+ "order": 0,
+ }
+ ],
"compare": False,
"display": "ActionsLineGraph",
"insight": "LIFECYCLE",
@@ -128,7 +168,15 @@
"filter_test_accounts": True,
}
insight_6 = {
- "events": [{"id": "paid_bill", "math": "sum", "type": "events", "order": 0, "math_property": "amount_usd"}],
+ "events": [
+ {
+ "id": "paid_bill",
+ "math": "sum",
+ "type": "events",
+ "order": 0,
+ "math_property": "amount_usd",
+ }
+ ],
"actions": [],
"display": "ActionsLineGraph",
"insight": "TRENDS",
@@ -170,7 +218,14 @@
"values": [
{
"type": "AND",
- "values": [{"key": "$current_url", "type": "event", "value": "/files/", "operator": "not_icontains"}],
+ "values": [
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": "/files/",
+ "operator": "not_icontains",
+ }
+ ],
}
],
},
@@ -184,7 +239,12 @@
"type": "events",
"order": 0,
"properties": [
- {"key": "$current_url", "type": "event", "value": "https://hedgebox.net/", "operator": "exact"}
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": "https://hedgebox.net/",
+ "operator": "exact",
+ }
],
"custom_name": "Viewed homepage",
},
@@ -194,11 +254,22 @@
"type": "events",
"order": 1,
"properties": [
- {"key": "$current_url", "type": "event", "value": "https://hedgebox.net/signup/", "operator": "regex"}
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": "https://hedgebox.net/signup/",
+ "operator": "regex",
+ }
],
"custom_name": "Viewed signup page",
},
- {"id": "signed_up", "name": "signed_up", "type": "events", "order": 2, "custom_name": "Signed up"},
+ {
+ "id": "signed_up",
+ "name": "signed_up",
+ "type": "events",
+ "order": 2,
+ "custom_name": "Signed up",
+ },
],
"actions": [],
"display": "FunnelViz",
@@ -279,9 +350,24 @@
}
insight_17 = {
"events": [
- {"id": "$pageview", "type": "events", "order": 0, "custom_name": "First page view"},
- {"id": "$pageview", "type": "events", "order": 1, "custom_name": "Second page view"},
- {"id": "$pageview", "type": "events", "order": 2, "custom_name": "Third page view"},
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "custom_name": "First page view",
+ },
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 1,
+ "custom_name": "Second page view",
+ },
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 2,
+ "custom_name": "Third page view",
+ },
],
"layout": "horizontal",
"display": "FunnelViz",
@@ -303,7 +389,14 @@
"name": "Pageviews",
"type": "actions",
"order": 0,
- "properties": [{"key": "$browser", "type": "event", "value": "Chrome", "operator": None}],
+ "properties": [
+ {
+ "key": "$browser",
+ "type": "event",
+ "value": "Chrome",
+ "operator": None,
+ }
+ ],
"math_property": None,
}
],
@@ -392,7 +485,14 @@
"interval": "day",
"shown_as": "Volume",
"breakdown": False,
- "properties": [{"key": "$current_url", "type": "event", "value": "https://example.com/", "operator": "icontains"}],
+ "properties": [
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": "https://example.com/",
+ "operator": "icontains",
+ }
+ ],
"breakdown_type": "undefined",
}
insight_24 = {
@@ -471,7 +571,12 @@
"type": "events",
"order": 1,
"properties": [
- {"key": "$current_url", "type": "event", "value": "posthog.com/signup$", "operator": "regex"}
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": "posthog.com/signup$",
+ "operator": "regex",
+ }
],
"custom_name": "Views on signup page",
},
@@ -491,7 +596,15 @@
"breakdown_group_type_index": 0,
}
insight_31 = {
- "events": [{"id": "$autocapture", "math": "total", "name": "$autocapture", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "$autocapture",
+ "math": "total",
+ "name": "$autocapture",
+ "type": "events",
+ "order": 0,
+ }
+ ],
"insight": "STICKINESS",
"entity_type": "events",
}
@@ -592,7 +705,12 @@ def test_base_insights(filter: dict):
properties_1 = [{"key": "account_id", "type": "event", "value": ["some_id"], "operator": "exact"}]
properties_2 = [
{"key": "account_id", "type": "event", "value": ["some_id"], "operator": "exact"},
- {"key": "$current_url", "type": "event", "value": "/path", "operator": "not_icontains"},
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": "/path",
+ "operator": "not_icontains",
+ },
]
properties_3 = {}
properties_4 = {"type": "AND", "values": []}
@@ -603,8 +721,18 @@ def test_base_insights(filter: dict):
{
"type": "AND",
"values": [
- {"key": "$current_url", "type": "event", "value": "?", "operator": "not_icontains"},
- {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"},
+ {
+ "key": "$current_url",
+ "type": "event",
+ "value": "?",
+ "operator": "not_icontains",
+ },
+ {
+ "key": "$referring_domain",
+ "type": "event",
+ "value": "google",
+ "operator": "icontains",
+ },
],
}
],
@@ -612,10 +740,19 @@ def test_base_insights(filter: dict):
properties_7 = {
"type": "AND",
"values": [
- {"type": "AND", "values": [{"type": "AND", "values": []}, {"type": "AND", "values": []}]},
{
"type": "AND",
- "values": [{"key": "dateDiff('minute', timestamp, now()) < 5", "type": "hogql", "value": None}],
+ "values": [{"type": "AND", "values": []}, {"type": "AND", "values": []}],
+ },
+ {
+ "type": "AND",
+ "values": [
+ {
+ "key": "dateDiff('minute', timestamp, now()) < 5",
+ "type": "hogql",
+ "value": None,
+ }
+ ],
},
],
}
@@ -624,11 +761,23 @@ def test_base_insights(filter: dict):
"values": [
{
"type": "AND",
- "values": [{"key": "dateDiff('minute', timestamp, now()) < 5", "type": "hogql", "value": None}],
+ "values": [
+ {
+ "key": "dateDiff('minute', timestamp, now()) < 5",
+ "type": "hogql",
+ "value": None,
+ }
+ ],
},
{
"type": "AND",
- "values": [{"key": "dateDiff('minute', timestamp, now()) < 5", "type": "hogql", "value": None}],
+ "values": [
+ {
+ "key": "dateDiff('minute', timestamp, now()) < 5",
+ "type": "hogql",
+ "value": None,
+ }
+ ],
},
],
}
@@ -638,9 +787,24 @@ def test_base_insights(filter: dict):
{
"type": "AND",
"values": [
- {"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "event"},
- {"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "person"},
- {"key": "$feature/hogql-insights", "value": ["true"], "operator": "exact", "type": "event"},
+ {
+ "key": "$browser",
+ "value": ["Chrome"],
+ "operator": "exact",
+ "type": "event",
+ },
+ {
+ "key": "$browser",
+ "value": ["Chrome"],
+ "operator": "exact",
+ "type": "person",
+ },
+ {
+ "key": "$feature/hogql-insights",
+ "value": ["true"],
+ "operator": "exact",
+ "type": "event",
+ },
{
"key": "site_url",
"value": ["http://localhost:8000"],
@@ -649,8 +813,18 @@ def test_base_insights(filter: dict):
"group_type_index": 1,
},
{"key": "id", "value": 2, "type": "cohort"},
- {"key": "tag_name", "value": ["elem"], "operator": "exact", "type": "element"},
- {"key": "$session_duration", "value": None, "operator": "gt", "type": "session"},
+ {
+ "key": "tag_name",
+ "value": ["elem"],
+ "operator": "exact",
+ "type": "element",
+ },
+ {
+ "key": "$session_duration",
+ "value": None,
+ "operator": "gt",
+ "type": "session",
+ },
{"type": "hogql", "key": "properties.name", "value": None},
],
},
@@ -659,7 +833,14 @@ def test_base_insights(filter: dict):
}
properties_10 = [{"key": "id", "type": "cohort", "value": 71, "operator": None}]
properties_11 = [{"key": [498], "type": "cohort", "value": 498, "operator": None}]
-properties_12 = [{"key": "userId", "type": "event", "values": ["63ffaeae99ac3c4240976d60"], "operator": "exact"}]
+properties_12 = [
+ {
+ "key": "userId",
+ "type": "event",
+ "values": ["63ffaeae99ac3c4240976d60"],
+ "operator": "exact",
+ }
+]
properties_13 = {"plan": "premium"}
properties_14 = {"$current_url__icontains": "signin"}
@@ -783,7 +964,10 @@ def test_series_custom(self):
def test_series_order(self):
filter = {
- "events": [{"id": "$pageview", "order": 1}, {"id": "$pageview", "math": "dau", "order": 2}],
+ "events": [
+ {"id": "$pageview", "order": 1},
+ {"id": "$pageview", "math": "dau", "order": 2},
+ ],
"actions": [{"id": 1, "order": 3}, {"id": 1, "math": "dau", "order": 0}],
}
@@ -803,9 +987,20 @@ def test_series_math(self):
filter = {
"events": [
{"id": "$pageview", "math": "dau"}, # base math type
- {"id": "$pageview", "math": "median", "math_property": "$math_prop"}, # property math type
- {"id": "$pageview", "math": "avg_count_per_actor"}, # count per actor math type
- {"id": "$pageview", "math": "unique_group", "math_group_type_index": 0}, # unique group
+ {
+ "id": "$pageview",
+ "math": "median",
+ "math_property": "$math_prop",
+ }, # property math type
+ {
+ "id": "$pageview",
+ "math": "avg_count_per_actor",
+ }, # count per actor math type
+ {
+ "id": "$pageview",
+ "math": "unique_group",
+ "math_group_type_index": 0,
+ }, # unique group
{
"id": "$pageview",
"math": "hogql",
@@ -821,10 +1016,22 @@ def test_series_math(self):
[
EventsNode(event="$pageview", name="$pageview", math=BaseMathType.dau),
EventsNode(
- event="$pageview", name="$pageview", math=PropertyMathType.median, math_property="$math_prop"
+ event="$pageview",
+ name="$pageview",
+ math=PropertyMathType.median,
+ math_property="$math_prop",
+ ),
+ EventsNode(
+ event="$pageview",
+ name="$pageview",
+ math=CountPerActorMathType.avg_count_per_actor,
+ ),
+ EventsNode(
+ event="$pageview",
+ name="$pageview",
+ math="unique_group",
+ math_group_type_index=0,
),
- EventsNode(event="$pageview", name="$pageview", math=CountPerActorMathType.avg_count_per_actor),
- EventsNode(event="$pageview", name="$pageview", math="unique_group", math_group_type_index=0),
EventsNode(
event="$pageview",
name="$pageview",
@@ -840,21 +1047,52 @@ def test_series_properties(self):
{"id": "$pageview", "properties": []}, # smoke test
{
"id": "$pageview",
- "properties": [{"key": "success", "type": "event", "value": ["true"], "operator": "exact"}],
+ "properties": [
+ {
+ "key": "success",
+ "type": "event",
+ "value": ["true"],
+ "operator": "exact",
+ }
+ ],
},
{
"id": "$pageview",
- "properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}],
+ "properties": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "is_set",
+ "operator": "is_set",
+ }
+ ],
},
{
"id": "$pageview",
- "properties": [{"key": "text", "value": ["some text"], "operator": "exact", "type": "element"}],
+ "properties": [
+ {
+ "key": "text",
+ "value": ["some text"],
+ "operator": "exact",
+ "type": "element",
+ }
+ ],
+ },
+ {
+ "id": "$pageview",
+ "properties": [
+ {
+ "key": "$session_duration",
+ "value": 1,
+ "operator": "gt",
+ "type": "session",
+ }
+ ],
},
{
"id": "$pageview",
- "properties": [{"key": "$session_duration", "value": 1, "operator": "gt", "type": "session"}],
+ "properties": [{"key": "id", "value": 2, "type": "cohort"}],
},
- {"id": "$pageview", "properties": [{"key": "id", "value": 2, "type": "cohort"}]},
{
"id": "$pageview",
"properties": [
@@ -870,14 +1108,28 @@ def test_series_properties(self):
{
"id": "$pageview",
"properties": [
- {"key": "dateDiff('minute', timestamp, now()) < 30", "type": "hogql", "value": None}
+ {
+ "key": "dateDiff('minute', timestamp, now()) < 30",
+ "type": "hogql",
+ "value": None,
+ }
],
},
{
"id": "$pageview",
"properties": [
- {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"},
- {"key": "utm_source", "type": "event", "value": "is_not_set", "operator": "is_not_set"},
+ {
+ "key": "$referring_domain",
+ "type": "event",
+ "value": "google",
+ "operator": "icontains",
+ },
+ {
+ "key": "utm_source",
+ "type": "event",
+ "value": "is_not_set",
+ "operator": "is_not_set",
+ },
],
},
]
@@ -892,18 +1144,34 @@ def test_series_properties(self):
EventsNode(
event="$pageview",
name="$pageview",
- properties=[EventPropertyFilter(key="success", value=["true"], operator=PropertyOperator.exact)],
+ properties=[
+ EventPropertyFilter(
+ key="success",
+ value=["true"],
+ operator=PropertyOperator.exact,
+ )
+ ],
),
EventsNode(
event="$pageview",
name="$pageview",
- properties=[PersonPropertyFilter(key="email", value="is_set", operator=PropertyOperator.is_set)],
+ properties=[
+ PersonPropertyFilter(
+ key="email",
+ value="is_set",
+ operator=PropertyOperator.is_set,
+ )
+ ],
),
EventsNode(
event="$pageview",
name="$pageview",
properties=[
- ElementPropertyFilter(key=Key.text, value=["some text"], operator=PropertyOperator.exact)
+ ElementPropertyFilter(
+ key=Key.text,
+ value=["some text"],
+ operator=PropertyOperator.exact,
+ )
],
),
EventsNode(
@@ -911,13 +1179,20 @@ def test_series_properties(self):
name="$pageview",
properties=[SessionPropertyFilter(value=1, operator=PropertyOperator.gt)],
),
- EventsNode(event="$pageview", name="$pageview", properties=[CohortPropertyFilter(value=2)]),
+ EventsNode(
+ event="$pageview",
+ name="$pageview",
+ properties=[CohortPropertyFilter(value=2)],
+ ),
EventsNode(
event="$pageview",
name="$pageview",
properties=[
GroupPropertyFilter(
- key="name", value=["Hedgebox Inc."], operator=PropertyOperator.exact, group_type_index=2
+ key="name",
+ value=["Hedgebox Inc."],
+ operator=PropertyOperator.exact,
+ group_type_index=2,
)
],
),
@@ -931,9 +1206,15 @@ def test_series_properties(self):
name="$pageview",
properties=[
EventPropertyFilter(
- key="$referring_domain", value="google", operator=PropertyOperator.icontains
+ key="$referring_domain",
+ value="google",
+ operator=PropertyOperator.icontains,
+ ),
+ EventPropertyFilter(
+ key="utm_source",
+ value="is_not_set",
+ operator=PropertyOperator.is_not_set,
),
- EventPropertyFilter(key="utm_source", value="is_not_set", operator=PropertyOperator.is_not_set),
],
),
],
@@ -1070,7 +1351,11 @@ def test_retention_filter(self):
"retention_type": "retention_first_time",
# retention_reference="previous",
"total_intervals": 12,
- "returning_entity": {"id": "$pageview", "name": "$pageview", "type": "events"},
+ "returning_entity": {
+ "id": "$pageview",
+ "name": "$pageview",
+ "type": "events",
+ },
"target_entity": {"id": "$pageview", "name": "$pageview", "type": "events"},
"period": "Week",
}
@@ -1119,7 +1404,13 @@ def test_paths_filter(self):
"funnel_filter": {
"insight": "FUNNELS",
"events": [
- {"type": "events", "id": "$pageview", "order": 0, "name": "$pageview", "math": "total"},
+ {
+ "type": "events",
+ "id": "$pageview",
+ "order": 0,
+ "name": "$pageview",
+ "math": "total",
+ },
{"type": "events", "id": None, "order": 1, "math": "total"},
],
"funnel_viz_type": "steps",
@@ -1152,7 +1443,13 @@ def test_paths_filter(self):
funnel_filter={
"insight": "FUNNELS",
"events": [
- {"type": "events", "id": "$pageview", "order": 0, "name": "$pageview", "math": "total"},
+ {
+ "type": "events",
+ "id": "$pageview",
+ "order": 0,
+ "name": "$pageview",
+ "math": "total",
+ },
{"type": "events", "id": None, "order": 1, "math": "total"},
],
"funnel_viz_type": "steps",
diff --git a/posthog/hogql_queries/persons_query_runner.py b/posthog/hogql_queries/persons_query_runner.py
index a373a1acbf7d9..d597f4bab1c2a 100644
--- a/posthog/hogql_queries/persons_query_runner.py
+++ b/posthog/hogql_queries/persons_query_runner.py
@@ -69,7 +69,11 @@ def filter_conditions(self) -> List[ast.Expr]:
source_query_runner = get_query_runner(source, self.team, self.timings)
source_query = source_query_runner.to_persons_query()
where_exprs.append(
- ast.CompareOperation(left=ast.Field(chain=["id"]), op=ast.CompareOperationOp.In, right=source_query)
+ ast.CompareOperation(
+ left=ast.Field(chain=["id"]),
+ op=ast.CompareOperationOp.In,
+ right=source_query,
+ )
)
except NotImplementedError:
raise ValueError(f"Queries of type '{source.kind}' are not implemented as a PersonsQuery sources.")
@@ -113,7 +117,10 @@ def input_columns(self) -> List[str]:
return self.query.select or ["person", "id", "created_at", "person.$delete"]
def query_limit(self) -> int:
- return min(MAX_SELECT_RETURNED_ROWS, DEFAULT_RETURNED_ROWS if self.query.limit is None else self.query.limit)
+ return min(
+ MAX_SELECT_RETURNED_ROWS,
+ DEFAULT_RETURNED_ROWS if self.query.limit is None else self.query.limit,
+ )
def to_query(self) -> ast.SelectQuery:
with self.timings.measure("columns"):
@@ -175,7 +182,8 @@ def to_query(self) -> ast.SelectQuery:
ast.OrderExpr(
expr=ast.Field(chain=["properties", order_property]),
order=cast(
- Literal["ASC", "DESC"], "DESC" if self.query.orderBy[0] == "person DESC" else "ASC"
+ Literal["ASC", "DESC"],
+ "DESC" if self.query.orderBy[0] == "person DESC" else "ASC",
),
)
]
diff --git a/posthog/hogql_queries/test/test_hogql_query_runner.py b/posthog/hogql_queries/test/test_hogql_query_runner.py
index 6af80f638e3ba..badc27efef3bf 100644
--- a/posthog/hogql_queries/test/test_hogql_query_runner.py
+++ b/posthog/hogql_queries/test/test_hogql_query_runner.py
@@ -3,7 +3,13 @@
from posthog.hogql_queries.hogql_query_runner import HogQLQueryRunner
from posthog.models.utils import UUIDT
from posthog.schema import HogQLPropertyFilter, HogQLQuery, HogQLFilters
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_person, flush_persons_and_events, _create_event
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ _create_person,
+ flush_persons_and_events,
+ _create_event,
+)
class TestHogQLQueryRunner(ClickhouseTestMixin, APIBaseTest):
@@ -24,7 +30,11 @@ def _create_random_persons(self) -> str:
distinct_ids=[f"id-{random_uuid}-{index}"],
is_identified=True,
)
- _create_event(distinct_id=f"id-{random_uuid}-{index}", event=f"clicky-{index}", team=self.team)
+ _create_event(
+ distinct_id=f"id-{random_uuid}-{index}",
+ event=f"clicky-{index}",
+ team=self.team,
+ )
flush_persons_and_events()
return random_uuid
@@ -60,7 +70,9 @@ def test_hogql_query_filters(self):
select=[ast.Call(name="count", args=[ast.Field(chain=["event"])])],
select_from=ast.JoinExpr(table=ast.Field(chain=["events"])),
where=ast.CompareOperation(
- left=ast.Field(chain=["event"]), op=ast.CompareOperationOp.Eq, right=ast.Constant(value="clicky-3")
+ left=ast.Field(chain=["event"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value="clicky-3"),
),
)
self.assertEqual(clear_locations(query), expected)
@@ -69,7 +81,10 @@ def test_hogql_query_filters(self):
def test_hogql_query_values(self):
runner = self._create_runner(
- HogQLQuery(query="select count(event) from events where event={e}", values={"e": "clicky-3"})
+ HogQLQuery(
+ query="select count(event) from events where event={e}",
+ values={"e": "clicky-3"},
+ )
)
query = runner.to_query()
query = clear_locations(query)
@@ -77,7 +92,9 @@ def test_hogql_query_values(self):
select=[ast.Call(name="count", args=[ast.Field(chain=["event"])])],
select_from=ast.JoinExpr(table=ast.Field(chain=["events"])),
where=ast.CompareOperation(
- left=ast.Field(chain=["event"]), op=ast.CompareOperationOp.Eq, right=ast.Constant(value="clicky-3")
+ left=ast.Field(chain=["event"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value="clicky-3"),
),
)
self.assertEqual(clear_locations(query), expected)
diff --git a/posthog/hogql_queries/test/test_persons_query_runner.py b/posthog/hogql_queries/test/test_persons_query_runner.py
index fbe65319a5912..7460d8cd728b7 100644
--- a/posthog/hogql_queries/test/test_persons_query_runner.py
+++ b/posthog/hogql_queries/test/test_persons_query_runner.py
@@ -13,7 +13,13 @@
EventsNode,
IntervalType,
)
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_person, flush_persons_and_events, _create_event
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ _create_person,
+ flush_persons_and_events,
+ _create_event,
+)
from freezegun import freeze_time
@@ -35,7 +41,11 @@ def _create_random_persons(self) -> str:
distinct_ids=[f"id-{random_uuid}-{index}"],
is_identified=True,
)
- _create_event(distinct_id=f"id-{random_uuid}-{index}", event=f"clicky-{index}", team=self.team)
+ _create_event(
+ distinct_id=f"id-{random_uuid}-{index}",
+ event=f"clicky-{index}",
+ team=self.team,
+ )
flush_persons_and_events()
return random_uuid
@@ -81,7 +91,11 @@ def test_persons_query_properties(self):
runner = self._create_runner(
PersonsQuery(
properties=[
- PersonPropertyFilter(key="random_uuid", value=self.random_uuid, operator=PropertyOperator.exact),
+ PersonPropertyFilter(
+ key="random_uuid",
+ value=self.random_uuid,
+ operator=PropertyOperator.exact,
+ ),
HogQLPropertyFilter(key="toInt(properties.index) > 5"),
]
)
@@ -93,7 +107,11 @@ def test_persons_query_fixed_properties(self):
runner = self._create_runner(
PersonsQuery(
fixedProperties=[
- PersonPropertyFilter(key="random_uuid", value=self.random_uuid, operator=PropertyOperator.exact),
+ PersonPropertyFilter(
+ key="random_uuid",
+ value=self.random_uuid,
+ operator=PropertyOperator.exact,
+ ),
HogQLPropertyFilter(key="toInt(properties.index) < 2"),
]
)
@@ -144,7 +162,12 @@ def test_persons_query_limit(self):
self.assertEqual(response.hasMore, True)
runner = self._create_runner(
- PersonsQuery(select=["properties.email"], orderBy=["properties.email DESC"], limit=1, offset=2)
+ PersonsQuery(
+ select=["properties.email"],
+ orderBy=["properties.email DESC"],
+ limit=1,
+ offset=2,
+ )
)
response = runner.calculate()
self.assertEqual(response.results, [[f"jacob7@{self.random_uuid}.posthog.com"]])
@@ -153,7 +176,11 @@ def test_persons_query_limit(self):
def test_source_hogql_query(self):
self.random_uuid = self._create_random_persons()
source_query = HogQLQuery(query="SELECT distinct person_id FROM events WHERE event='clicky-4'")
- query = PersonsQuery(select=["properties.email"], orderBy=["properties.email DESC"], source=source_query)
+ query = PersonsQuery(
+ select=["properties.email"],
+ orderBy=["properties.email DESC"],
+ source=source_query,
+ )
runner = self._create_runner(query)
response = runner.calculate()
self.assertEqual(response.results, [[f"jacob4@{self.random_uuid}.posthog.com"]])
@@ -165,12 +192,20 @@ def test_source_lifecycle_query(self):
source_query = LifecycleQuery(
series=[EventsNode(event="clicky-4")],
properties=[
- PersonPropertyFilter(key="random_uuid", value=self.random_uuid, operator=PropertyOperator.exact)
+ PersonPropertyFilter(
+ key="random_uuid",
+ value=self.random_uuid,
+ operator=PropertyOperator.exact,
+ )
],
interval=IntervalType.day,
dateRange=DateRange(date_from="-7d"),
)
- query = PersonsQuery(select=["properties.email"], orderBy=["properties.email DESC"], source=source_query)
+ query = PersonsQuery(
+ select=["properties.email"],
+ orderBy=["properties.email DESC"],
+ source=source_query,
+ )
runner = self._create_runner(query)
response = runner.calculate()
self.assertEqual(response.results, [[f"jacob4@{self.random_uuid}.posthog.com"]])
diff --git a/posthog/hogql_queries/test/test_query_runner.py b/posthog/hogql_queries/test/test_query_runner.py
index 9ac9cb5956df2..5b82b0fae5af9 100644
--- a/posthog/hogql_queries/test/test_query_runner.py
+++ b/posthog/hogql_queries/test/test_query_runner.py
@@ -6,7 +6,11 @@
from freezegun import freeze_time
from pydantic import BaseModel
-from posthog.hogql_queries.query_runner import QueryResponse, QueryRunner, RunnableQueryNode
+from posthog.hogql_queries.query_runner import (
+ QueryResponse,
+ QueryRunner,
+ RunnableQueryNode,
+)
from posthog.models.team.team import Team
from posthog.test.base import BaseTest
diff --git a/posthog/hogql_queries/utils/query_date_range.py b/posthog/hogql_queries/utils/query_date_range.py
index 9c2a99e62d61f..be4e993326486 100644
--- a/posthog/hogql_queries/utils/query_date_range.py
+++ b/posthog/hogql_queries/utils/query_date_range.py
@@ -11,7 +11,11 @@
from posthog.models.team import Team
from posthog.queries.util import get_earliest_timestamp
from posthog.schema import DateRange, IntervalType
-from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping
+from posthog.utils import (
+ DEFAULT_DATE_FROM_DAYS,
+ relative_date_parse,
+ relative_date_parse_with_delta_mapping,
+)
# Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries
@@ -24,7 +28,11 @@ class QueryDateRange:
_now_without_timezone: datetime
def __init__(
- self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime
+ self,
+ date_range: Optional[DateRange],
+ team: Team,
+ interval: Optional[IntervalType],
+ now: datetime,
) -> None:
self._team = team
self._date_range = date_range
@@ -40,7 +48,10 @@ def date_to(self) -> datetime:
if self._date_range and self._date_range.date_to:
date_to, delta_mapping = relative_date_parse_with_delta_mapping(
- self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone
+ self._date_range.date_to,
+ self._team.timezone_info,
+ always_truncate=True,
+ now=self.now_with_timezone,
)
is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None
@@ -60,7 +71,9 @@ def date_from(self) -> datetime:
date_from = self.get_earliest_timestamp()
elif self._date_range and isinstance(self._date_range.date_from, str):
date_from = relative_date_parse(
- self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone
+ self._date_range.date_from,
+ self._team.timezone_info,
+ now=self.now_with_timezone,
)
else:
date_from = self.now_with_timezone.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(
@@ -106,25 +119,38 @@ def interval_name(self) -> str:
def date_to_as_hogql(self) -> ast.Expr:
return ast.Call(
- name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_to_str))])]
+ name="assumeNotNull",
+ args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_to_str))])],
)
def date_from_as_hogql(self) -> ast.Expr:
return ast.Call(
- name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_from_str))])]
+ name="assumeNotNull",
+ args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_from_str))])],
)
def previous_period_date_from_as_hogql(self) -> ast.Expr:
return ast.Call(
name="assumeNotNull",
- args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.previous_period_date_from_str))])],
+ args=[
+ ast.Call(
+ name="toDateTime",
+ args=[(ast.Constant(value=self.previous_period_date_from_str))],
+ )
+ ],
)
def one_interval_period(self) -> ast.Expr:
- return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Constant(value=1)])
+ return ast.Call(
+ name=f"toInterval{self.interval_name.capitalize()}",
+ args=[ast.Constant(value=1)],
+ )
def number_interval_periods(self) -> ast.Expr:
- return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Field(chain=["number"])])
+ return ast.Call(
+ name=f"toInterval{self.interval_name.capitalize()}",
+ args=[ast.Field(chain=["number"])],
+ )
def interval_period_string_as_hogql_constant(self) -> ast.Expr:
return ast.Constant(value=self.interval_name)
@@ -143,7 +169,13 @@ def to_properties(self, field: Optional[List[str]] = None) -> List[ast.Expr]:
field = ["timestamp"]
return [
ast.CompareOperation(
- left=ast.Field(chain=field), op=CompareOperationOp.LtEq, right=self.date_to_as_hogql()
+ left=ast.Field(chain=field),
+ op=CompareOperationOp.LtEq,
+ right=self.date_to_as_hogql(),
+ ),
+ ast.CompareOperation(
+ left=ast.Field(chain=field),
+ op=CompareOperationOp.Gt,
+ right=self.date_to_as_hogql(),
),
- ast.CompareOperation(left=ast.Field(chain=field), op=CompareOperationOp.Gt, right=self.date_to_as_hogql()),
]
diff --git a/posthog/hogql_queries/utils/query_previous_period_date_range.py b/posthog/hogql_queries/utils/query_previous_period_date_range.py
index ac16f0b9eec10..c127ac3e36d07 100644
--- a/posthog/hogql_queries/utils/query_previous_period_date_range.py
+++ b/posthog/hogql_queries/utils/query_previous_period_date_range.py
@@ -4,7 +4,10 @@
from posthog.hogql_queries.utils.query_date_range import QueryDateRange
from posthog.models.team import Team
from posthog.schema import DateRange, IntervalType
-from posthog.utils import get_compare_period_dates, relative_date_parse_with_delta_mapping
+from posthog.utils import (
+ get_compare_period_dates,
+ relative_date_parse_with_delta_mapping,
+)
# Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries
@@ -17,14 +20,20 @@ class QueryPreviousPeriodDateRange(QueryDateRange):
_now_without_timezone: datetime
def __init__(
- self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime
+ self,
+ date_range: Optional[DateRange],
+ team: Team,
+ interval: Optional[IntervalType],
+ now: datetime,
) -> None:
super().__init__(date_range, team, interval, now)
def date_from_delta_mappings(self) -> Dict[str, int] | None:
if self._date_range and isinstance(self._date_range.date_from, str) and self._date_range.date_from != "all":
delta_mapping = relative_date_parse_with_delta_mapping(
- self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone
+ self._date_range.date_from,
+ self._team.timezone_info,
+ now=self.now_with_timezone,
)[1]
return delta_mapping
@@ -33,7 +42,10 @@ def date_from_delta_mappings(self) -> Dict[str, int] | None:
def date_to_delta_mappings(self) -> Dict[str, int] | None:
if self._date_range and self._date_range.date_to:
delta_mapping = relative_date_parse_with_delta_mapping(
- self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone
+ self._date_range.date_to,
+ self._team.timezone_info,
+ always_truncate=True,
+ now=self.now_with_timezone,
)[1]
return delta_mapping
return None
diff --git a/posthog/hogql_queries/web_analytics/stats_table.py b/posthog/hogql_queries/web_analytics/stats_table.py
index 12e739413f2d1..dbd92defd2814 100644
--- a/posthog/hogql_queries/web_analytics/stats_table.py
+++ b/posthog/hogql_queries/web_analytics/stats_table.py
@@ -5,7 +5,9 @@
COUNTS_CTE,
BOUNCE_RATE_CTE,
)
-from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner
+from posthog.hogql_queries.web_analytics.web_analytics_query_runner import (
+ WebAnalyticsQueryRunner,
+)
from posthog.schema import (
WebStatsTableQuery,
WebStatsBreakdown,
diff --git a/posthog/hogql_queries/web_analytics/top_clicks.py b/posthog/hogql_queries/web_analytics/top_clicks.py
index 004cad7947c93..1693f2c1d86ce 100644
--- a/posthog/hogql_queries/web_analytics/top_clicks.py
+++ b/posthog/hogql_queries/web_analytics/top_clicks.py
@@ -4,7 +4,9 @@
from posthog.hogql.parser import parse_select
from posthog.hogql.query import execute_hogql_query
from posthog.hogql_queries.utils.query_date_range import QueryDateRange
-from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner
+from posthog.hogql_queries.web_analytics.web_analytics_query_runner import (
+ WebAnalyticsQueryRunner,
+)
from posthog.models.filters.mixins.utils import cached_property
from posthog.schema import WebTopClicksQuery, WebTopClicksQueryResponse
@@ -51,9 +53,17 @@ def calculate(self):
)
return WebTopClicksQueryResponse(
- columns=response.columns, results=response.results, timings=response.timings, types=response.types
+ columns=response.columns,
+ results=response.results,
+ timings=response.timings,
+ types=response.types,
)
@cached_property
def query_date_range(self):
- return QueryDateRange(date_range=self.query.dateRange, team=self.team, interval=None, now=datetime.now())
+ return QueryDateRange(
+ date_range=self.query.dateRange,
+ team=self.team,
+ interval=None,
+ now=datetime.now(),
+ )
diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py
index a9d0092565f59..16f31272d43a4 100644
--- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py
+++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py
@@ -35,7 +35,12 @@ def _refresh_frequency(self):
@cached_property
def query_date_range(self):
- return QueryDateRange(date_range=self.query.dateRange, team=self.team, interval=None, now=datetime.now())
+ return QueryDateRange(
+ date_range=self.query.dateRange,
+ team=self.team,
+ interval=None,
+ now=datetime.now(),
+ )
@cached_property
def pathname_property_filter(self) -> Optional[EventPropertyFilter]:
diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py
index 062bf72d2968e..19a587245443d 100644
--- a/posthog/hogql_queries/web_analytics/web_overview.py
+++ b/posthog/hogql_queries/web_analytics/web_overview.py
@@ -7,7 +7,9 @@
from posthog.hogql.property import property_to_expr
from posthog.hogql.query import execute_hogql_query
from posthog.hogql_queries.utils.query_date_range import QueryDateRange
-from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner
+from posthog.hogql_queries.web_analytics.web_analytics_query_runner import (
+ WebAnalyticsQueryRunner,
+)
from posthog.models.filters.mixins.utils import cached_property
from posthog.schema import WebOverviewQueryResponse, WebOverviewQuery
@@ -118,14 +120,23 @@ def calculate(self):
@cached_property
def query_date_range(self):
- return QueryDateRange(date_range=self.query.dateRange, team=self.team, interval=None, now=datetime.now())
+ return QueryDateRange(
+ date_range=self.query.dateRange,
+ team=self.team,
+ interval=None,
+ now=datetime.now(),
+ )
def event_properties(self) -> ast.Expr:
return property_to_expr(self.query.properties, team=self.team)
def to_data(
- key: str, kind: str, value: Optional[float], previous: Optional[float], is_increase_bad: Optional[bool] = None
+ key: str,
+ kind: str,
+ value: Optional[float],
+ previous: Optional[float],
+ is_increase_bad: Optional[bool] = None,
) -> dict:
if kind == "percentage":
if value is not None:
diff --git a/posthog/jwt.py b/posthog/jwt.py
index 73d42c80c3ee1..fa458ab2f5e3f 100644
--- a/posthog/jwt.py
+++ b/posthog/jwt.py
@@ -20,7 +20,11 @@ def encode_jwt(payload: dict, expiry_delta: timedelta, audience: PosthogJwtAudie
raise Exception("Audience must be in the list of PostHog-supported audiences")
encoded_jwt = jwt.encode(
- {**payload, "exp": datetime.now(tz=timezone.utc) + expiry_delta, "aud": audience.value},
+ {
+ **payload,
+ "exp": datetime.now(tz=timezone.utc) + expiry_delta,
+ "aud": audience.value,
+ },
settings.SECRET_KEY,
algorithm="HS256",
)
diff --git a/posthog/kafka_client/client.py b/posthog/kafka_client/client.py
index 2de052c8e73f8..a22de73a8fffe 100644
--- a/posthog/kafka_client/client.py
+++ b/posthog/kafka_client/client.py
@@ -6,7 +6,11 @@
from django.conf import settings
from kafka import KafkaConsumer as KC
from kafka import KafkaProducer as KP
-from kafka.producer.future import FutureProduceResult, FutureRecordMetadata, RecordMetadata
+from kafka.producer.future import (
+ FutureProduceResult,
+ FutureRecordMetadata,
+ RecordMetadata,
+)
from kafka.structs import TopicPartition
from statshog.defaults.django import statsd
from structlog import get_logger
@@ -24,7 +28,13 @@ class KafkaProducerForTests:
def __init__(self):
pass
- def send(self, topic: str, value: Any, key: Any = None, headers: Optional[List[Tuple[str, bytes]]] = None):
+ def send(
+ self,
+ topic: str,
+ value: Any,
+ key: Any = None,
+ headers: Optional[List[Tuple[str, bytes]]] = None,
+ ):
produce_future = FutureProduceResult(topic_partition=TopicPartition(topic, 1))
future = FutureRecordMetadata(
produce_future=produce_future,
@@ -81,7 +91,10 @@ class _KafkaSecurityProtocol(str, Enum):
def _sasl_params():
- if settings.KAFKA_SECURITY_PROTOCOL in [_KafkaSecurityProtocol.SASL_PLAINTEXT, _KafkaSecurityProtocol.SASL_SSL]:
+ if settings.KAFKA_SECURITY_PROTOCOL in [
+ _KafkaSecurityProtocol.SASL_PLAINTEXT,
+ _KafkaSecurityProtocol.SASL_SSL,
+ ]:
return {
"sasl_mechanism": settings.KAFKA_SASL_MECHANISM,
"sasl_plain_username": settings.KAFKA_SASL_USER,
@@ -135,7 +148,10 @@ def on_send_success(self, record_metadata: RecordMetadata):
statsd.incr("posthog_cloud_kafka_send_success", tags={"topic": record_metadata.topic})
def on_send_failure(self, topic: str, exc: Exception):
- statsd.incr("posthog_cloud_kafka_send_failure", tags={"topic": topic, "exception": exc.__class__.__name__})
+ statsd.incr(
+ "posthog_cloud_kafka_send_failure",
+ tags={"topic": topic, "exception": exc.__class__.__name__},
+ )
def produce(
self,
@@ -208,7 +224,10 @@ def build_kafka_consumer(
):
if test:
consumer = KafkaConsumerForTests(
- topic=topic, auto_offset_reset=auto_offset_reset, max=10, consumer_timeout_ms=consumer_timeout_ms
+ topic=topic,
+ auto_offset_reset=auto_offset_reset,
+ max=10,
+ consumer_timeout_ms=consumer_timeout_ms,
)
elif settings.KAFKA_BASE64_KEYS:
consumer = helper.get_kafka_consumer(
diff --git a/posthog/logging/timing.py b/posthog/logging/timing.py
index b736450e0d8d7..d83b692fb2894 100644
--- a/posthog/logging/timing.py
+++ b/posthog/logging/timing.py
@@ -34,7 +34,8 @@ def wrapper(*args, **kwargs):
finally:
duration = round((time() - start) * 1000, 1)
print( # noqa T201
- f"Timed function: {fn_name} took {duration}ms with args", {"args": args, "kwargs": kwargs}
+ f"Timed function: {fn_name} took {duration}ms with args",
+ {"args": args, "kwargs": kwargs},
)
return wrapper
diff --git a/posthog/management/commands/backfill_persons_and_groups_on_events.py b/posthog/management/commands/backfill_persons_and_groups_on_events.py
index 04880e7fa32dc..b7fb2fcbc46e9 100644
--- a/posthog/management/commands/backfill_persons_and_groups_on_events.py
+++ b/posthog/management/commands/backfill_persons_and_groups_on_events.py
@@ -138,7 +138,6 @@ def print_and_execute_query(sql: str, name: str, dry_run: bool, timeout=180, que
def run_backfill(options):
-
if not options["team_id"]:
logger.error("You must specify --team-id to run this script")
exit(1)
@@ -149,12 +148,20 @@ def run_backfill(options):
print("Dry run. Queries to run:", end="\n\n")
print_and_execute_query(GROUPS_DICTIONARY_SQL, "GROUPS_DICTIONARY_SQL", dry_run)
- print_and_execute_query(PERSON_DISTINCT_IDS_DICTIONARY_SQL, "PERSON_DISTINCT_IDS_DICTIONARY_SQL", dry_run)
+ print_and_execute_query(
+ PERSON_DISTINCT_IDS_DICTIONARY_SQL,
+ "PERSON_DISTINCT_IDS_DICTIONARY_SQL",
+ dry_run,
+ )
print_and_execute_query(PERSONS_DICTIONARY_SQL, "PERSONS_DICTIONARY_SQL", dry_run)
tag_queries(kind="backfill", id=backfill_query_id)
print_and_execute_query(
- BACKFILL_SQL, "BACKFILL_SQL", dry_run, 0, {"team_id": options["team_id"], "id": backfill_query_id}
+ BACKFILL_SQL,
+ "BACKFILL_SQL",
+ dry_run,
+ 0,
+ {"team_id": options["team_id"], "id": backfill_query_id},
)
reset_query_tags()
@@ -177,11 +184,17 @@ class Command(BaseCommand):
help = "Backfill persons and groups data on events for a given team"
def add_arguments(self, parser):
-
- parser.add_argument("--team-id", default=None, type=str, help="Specify a team to backfill data for.")
+ parser.add_argument(
+ "--team-id",
+ default=None,
+ type=str,
+ help="Specify a team to backfill data for.",
+ )
parser.add_argument(
- "--live-run", action="store_true", help="Opts out of default 'dry run' mode and actually runs the queries."
+ "--live-run",
+ action="store_true",
+ help="Opts out of default 'dry run' mode and actually runs the queries.",
)
def handle(self, *args, **options):
diff --git a/posthog/management/commands/create_batch_export_from_app.py b/posthog/management/commands/create_batch_export_from_app.py
index 510e0f3dbfa4d..6fa577f582c55 100644
--- a/posthog/management/commands/create_batch_export_from_app.py
+++ b/posthog/management/commands/create_batch_export_from_app.py
@@ -15,7 +15,9 @@ class Command(BaseCommand):
def add_arguments(self, parser):
"""Add arguments to the parser."""
parser.add_argument(
- "--plugin-config-id", type=int, help="The ID of the PluginConfig to use as a base for the new BatchExport"
+ "--plugin-config-id",
+ type=int,
+ help="The ID of the PluginConfig to use as a base for the new BatchExport",
)
parser.add_argument(
"--team-id",
@@ -116,7 +118,11 @@ def handle(self, *args, **options):
end_at = dt.datetime.utcnow()
start_at = end_at - (dt.timedelta(hours=1) if interval == "hour" else dt.timedelta(days=1))
backfill_export(
- client, batch_export_id=str(batch_export.id), team_id=team_id, start_at=start_at, end_at=end_at
+ client,
+ batch_export_id=str(batch_export.id),
+ team_id=team_id,
+ start_at=start_at,
+ end_at=end_at,
)
self.stdout.write(f"Triggered backfill for BatchExport '{name}'.")
diff --git a/posthog/management/commands/create_ch_migration.py b/posthog/management/commands/create_ch_migration.py
index 3b5334498589b..3f4495a0825b9 100644
--- a/posthog/management/commands/create_ch_migration.py
+++ b/posthog/management/commands/create_ch_migration.py
@@ -10,6 +10,7 @@
operations = []
"""
+
# ex: python manage.py create_ch_migration
class Command(BaseCommand):
help = "Create blank clickhouse migration"
diff --git a/posthog/management/commands/execute_temporal_workflow.py b/posthog/management/commands/execute_temporal_workflow.py
index 73d61979ab909..df9f5d993fc07 100644
--- a/posthog/management/commands/execute_temporal_workflow.py
+++ b/posthog/management/commands/execute_temporal_workflow.py
@@ -31,17 +31,45 @@ def add_arguments(self, parser):
"Set an ID in order to limit concurrency."
),
)
- parser.add_argument("--temporal-host", default=settings.TEMPORAL_HOST, help="Hostname for Temporal Scheduler")
- parser.add_argument("--temporal-port", default=settings.TEMPORAL_PORT, help="Port for Temporal Scheduler")
- parser.add_argument("--namespace", default=settings.TEMPORAL_NAMESPACE, help="Namespace to connect to")
- parser.add_argument("--task-queue", default=settings.TEMPORAL_TASK_QUEUE, help="Task queue to service")
parser.add_argument(
- "--server-root-ca-cert", default=settings.TEMPORAL_CLIENT_ROOT_CA, help="Optional root server CA cert"
+ "--temporal-host",
+ default=settings.TEMPORAL_HOST,
+ help="Hostname for Temporal Scheduler",
)
- parser.add_argument("--client-cert", default=settings.TEMPORAL_CLIENT_CERT, help="Optional client cert")
- parser.add_argument("--client-key", default=settings.TEMPORAL_CLIENT_KEY, help="Optional client key")
parser.add_argument(
- "--max-attempts", default=settings.TEMPORAL_WORKFLOW_MAX_ATTEMPTS, help="Number of max attempts"
+ "--temporal-port",
+ default=settings.TEMPORAL_PORT,
+ help="Port for Temporal Scheduler",
+ )
+ parser.add_argument(
+ "--namespace",
+ default=settings.TEMPORAL_NAMESPACE,
+ help="Namespace to connect to",
+ )
+ parser.add_argument(
+ "--task-queue",
+ default=settings.TEMPORAL_TASK_QUEUE,
+ help="Task queue to service",
+ )
+ parser.add_argument(
+ "--server-root-ca-cert",
+ default=settings.TEMPORAL_CLIENT_ROOT_CA,
+ help="Optional root server CA cert",
+ )
+ parser.add_argument(
+ "--client-cert",
+ default=settings.TEMPORAL_CLIENT_CERT,
+ help="Optional client cert",
+ )
+ parser.add_argument(
+ "--client-key",
+ default=settings.TEMPORAL_CLIENT_KEY,
+ help="Optional client key",
+ )
+ parser.add_argument(
+ "--max-attempts",
+ default=settings.TEMPORAL_WORKFLOW_MAX_ATTEMPTS,
+ help="Number of max attempts",
)
def handle(self, *args, **options):
diff --git a/posthog/management/commands/generate_demo_data.py b/posthog/management/commands/generate_demo_data.py
index 3948813b854ad..2a6e27c992345 100644
--- a/posthog/management/commands/generate_demo_data.py
+++ b/posthog/management/commands/generate_demo_data.py
@@ -20,7 +20,9 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("--seed", type=str, help="Simulation seed for deterministic output")
parser.add_argument(
- "--now", type=dt.datetime.fromisoformat, help="Simulation 'now' datetime in ISO format (default: now)"
+ "--now",
+ type=dt.datetime.fromisoformat,
+ help="Simulation 'now' datetime in ISO format (default: now)",
)
parser.add_argument(
"--days-past",
@@ -34,7 +36,12 @@ def add_arguments(self, parser):
default=30,
help="At how many days after 'now' should the simulation end (default: 30)",
)
- parser.add_argument("--n-clusters", type=int, default=500, help="Number of clusters (default: 500)")
+ parser.add_argument(
+ "--n-clusters",
+ type=int,
+ default=500,
+ help="Number of clusters (default: 500)",
+ )
parser.add_argument("--dry-run", action="store_true", help="Don't save simulation results")
parser.add_argument(
"--team-id",
@@ -43,10 +50,16 @@ def add_arguments(self, parser):
help="If specified, an existing project with this ID will be used, and no new user will be created. If the ID is 0, data will be generated for the master project (but insights etc. won't be created)",
)
parser.add_argument(
- "--email", type=str, default="test@posthog.com", help="Email of the demo user (default: test@posthog.com)"
+ "--email",
+ type=str,
+ default="test@posthog.com",
+ help="Email of the demo user (default: test@posthog.com)",
)
parser.add_argument(
- "--password", type=str, default="12345678", help="Password of the demo user (default: 12345678)"
+ "--password",
+ type=str,
+ default="12345678",
+ help="Password of the demo user (default: 12345678)",
)
def handle(self, *args, **options):
@@ -74,7 +87,12 @@ def handle(self, *args, **options):
)
print("Running simulation...")
matrix.simulate()
- self.print_results(matrix, seed=seed, duration=monotonic() - timer, verbosity=options["verbosity"])
+ self.print_results(
+ matrix,
+ seed=seed,
+ duration=monotonic() - timer,
+ verbosity=options["verbosity"],
+ )
if not options["dry_run"]:
email = options["email"]
password = options["password"]
@@ -89,7 +107,11 @@ def handle(self, *args, **options):
matrix_manager.run_on_team(team, existing_user)
else:
matrix_manager.ensure_account_and_save(
- email, "Employee 427", "Hedgebox Inc.", password=password, disallow_collision=True
+ email,
+ "Employee 427",
+ "Hedgebox Inc.",
+ password=password,
+ disallow_collision=True,
)
except exceptions.ValidationError as e:
print(f"Error: {e}")
diff --git a/posthog/management/commands/makemigrations.py b/posthog/management/commands/makemigrations.py
index 3ab70d9bc0800..8ff0a37bfaa34 100644
--- a/posthog/management/commands/makemigrations.py
+++ b/posthog/management/commands/makemigrations.py
@@ -1,6 +1,8 @@
"""Cause git to detect a merge conflict when two branches have migrations."""
-from django.core.management.commands.makemigrations import Command as MakeMigrationsCommand
+from django.core.management.commands.makemigrations import (
+ Command as MakeMigrationsCommand,
+)
from django.db.migrations.loader import MigrationLoader
diff --git a/posthog/management/commands/migrate_clickhouse.py b/posthog/management/commands/migrate_clickhouse.py
index 82da287a1743d..b9a4d31eea3d9 100644
--- a/posthog/management/commands/migrate_clickhouse.py
+++ b/posthog/management/commands/migrate_clickhouse.py
@@ -6,7 +6,12 @@
from infi.clickhouse_orm.migrations import MigrationHistory
from infi.clickhouse_orm.utils import import_submodules
-from posthog.settings import CLICKHOUSE_DATABASE, CLICKHOUSE_HTTP_URL, CLICKHOUSE_PASSWORD, CLICKHOUSE_USER
+from posthog.settings import (
+ CLICKHOUSE_DATABASE,
+ CLICKHOUSE_HTTP_URL,
+ CLICKHOUSE_PASSWORD,
+ CLICKHOUSE_USER,
+)
from posthog.settings.data_stores import CLICKHOUSE_CLUSTER
MIGRATIONS_PACKAGE_NAME = "posthog.clickhouse.migrations"
@@ -17,14 +22,25 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
- "--upto", default=99_999, type=int, help="Database state will be brought to the state after that migration."
+ "--upto",
+ default=99_999,
+ type=int,
+ help="Database state will be brought to the state after that migration.",
)
- parser.add_argument("--fake", action="store_true", help="Mark migrations as run without actually running them.")
parser.add_argument(
- "--check", action="store_true", help="Exits with a non-zero status if unapplied migrations exist."
+ "--fake",
+ action="store_true",
+ help="Mark migrations as run without actually running them.",
)
parser.add_argument(
- "--plan", action="store_true", help="Shows a list of the migration actions that will be performed."
+ "--check",
+ action="store_true",
+ help="Exits with a non-zero status if unapplied migrations exist.",
+ )
+ parser.add_argument(
+ "--plan",
+ action="store_true",
+ help="Shows a list of the migration actions that will be performed.",
)
parser.add_argument(
"--print-sql",
diff --git a/posthog/management/commands/notify_helm_install.py b/posthog/management/commands/notify_helm_install.py
index ce0b5c3b333f1..684261cdae418 100644
--- a/posthog/management/commands/notify_helm_install.py
+++ b/posthog/management/commands/notify_helm_install.py
@@ -25,5 +25,10 @@ def handle(self, *args, **options):
posthoganalytics.api_key = "sTMFPsFhdP1Ssg"
disabled = posthoganalytics.disabled
posthoganalytics.disabled = False
- posthoganalytics.capture(get_machine_id(), "helm_install", report, groups={"instance": settings.SITE_URL})
+ posthoganalytics.capture(
+ get_machine_id(),
+ "helm_install",
+ report,
+ groups={"instance": settings.SITE_URL},
+ )
posthoganalytics.disabled = disabled
diff --git a/posthog/management/commands/partition.py b/posthog/management/commands/partition.py
index 68a24aef0efc0..b17e958b0c1e1 100644
--- a/posthog/management/commands/partition.py
+++ b/posthog/management/commands/partition.py
@@ -18,7 +18,6 @@ def add_arguments(self, parser):
parser.add_argument("--reverse", action="store_true", help="unpartition event table")
def handle(self, *args, **options):
-
if options["reverse"]:
print("Reversing partitions...")
with connection.cursor() as cursor:
diff --git a/posthog/management/commands/plugin_server_load_test.py b/posthog/management/commands/plugin_server_load_test.py
index 3fe197c154393..4adfe8941e644 100644
--- a/posthog/management/commands/plugin_server_load_test.py
+++ b/posthog/management/commands/plugin_server_load_test.py
@@ -32,7 +32,9 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("--seed", type=str, help="Simulation seed for deterministic output")
parser.add_argument(
- "--now", type=dt.datetime.fromisoformat, help="Simulation 'now' datetime in ISO format (default: now)"
+ "--now",
+ type=dt.datetime.fromisoformat,
+ help="Simulation 'now' datetime in ISO format (default: now)",
)
parser.add_argument(
"--days-past",
@@ -46,9 +48,17 @@ def add_arguments(self, parser):
default=30,
help="At how many days after 'now' should the simulation end (default: 30)",
)
- parser.add_argument("--n-clusters", type=int, default=500, help="Number of clusters (default: 500)")
parser.add_argument(
- "--team-id", type=str, default="1", help="The team to which the events should be associated."
+ "--n-clusters",
+ type=int,
+ default=500,
+ help="Number of clusters (default: 500)",
+ )
+ parser.add_argument(
+ "--team-id",
+ type=str,
+ default="1",
+ help="The team to which the events should be associated.",
)
def handle(self, *args, **options):
@@ -83,7 +93,8 @@ def handle(self, *args, **options):
# Make sure events are ordered by time to simulate how they would be
# ingested in real life.
ordered_events = sorted(
- chain.from_iterable(person.all_events for person in matrix.people), key=lambda e: e.timestamp
+ chain.from_iterable(person.all_events for person in matrix.people),
+ key=lambda e: e.timestamp,
)
start_time = time.monotonic()
@@ -107,7 +118,11 @@ def handle(self, *args, **options):
offsets = admin.list_consumer_group_offsets(group_id="clickhouse-ingestion")
end_offsets = consumer.end_offsets([TopicPartition(topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, partition=0)])
if end_offsets is None:
- logger.error("no_end_offsets", topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, partition=0)
+ logger.error(
+ "no_end_offsets",
+ topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC,
+ partition=0,
+ )
sys.exit(1)
end_offset = end_offsets[TopicPartition(topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, partition=0)]
diff --git a/posthog/management/commands/run_async_migrations.py b/posthog/management/commands/run_async_migrations.py
index e0b9cfef5cb20..611c6038fd43b 100644
--- a/posthog/management/commands/run_async_migrations.py
+++ b/posthog/management/commands/run_async_migrations.py
@@ -6,8 +6,16 @@
from django.core.management.base import BaseCommand
from semantic_version.base import Version
-from posthog.async_migrations.runner import complete_migration, is_migration_dependency_fulfilled, start_async_migration
-from posthog.async_migrations.setup import ALL_ASYNC_MIGRATIONS, setup_async_migrations, setup_model
+from posthog.async_migrations.runner import (
+ complete_migration,
+ is_migration_dependency_fulfilled,
+ start_async_migration,
+)
+from posthog.async_migrations.setup import (
+ ALL_ASYNC_MIGRATIONS,
+ setup_async_migrations,
+ setup_model,
+)
from posthog.constants import FROZEN_POSTHOG_VERSION
from posthog.models.async_migration import (
AsyncMigration,
@@ -41,7 +49,9 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
- "--check", action="store_true", help="Exits with a non-zero status if required unapplied migrations exist."
+ "--check",
+ action="store_true",
+ help="Exits with a non-zero status if required unapplied migrations exist.",
)
parser.add_argument(
"--plan",
diff --git a/posthog/management/commands/send_usage_report.py b/posthog/management/commands/send_usage_report.py
index 03e4b4a102da4..cfcd7c8758516 100644
--- a/posthog/management/commands/send_usage_report.py
+++ b/posthog/management/commands/send_usage_report.py
@@ -9,11 +9,21 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("--dry-run", type=bool, help="Print information instead of sending it")
parser.add_argument("--date", type=str, help="The date to be ran in format YYYY-MM-DD")
- parser.add_argument("--event-name", type=str, help="Override the event name to be sent - for testing")
parser.add_argument(
- "--skip-capture-event", type=str, help="Skip the posthog capture events - for retrying to billing service"
+ "--event-name",
+ type=str,
+ help="Override the event name to be sent - for testing",
+ )
+ parser.add_argument(
+ "--skip-capture-event",
+ type=str,
+ help="Skip the posthog capture events - for retrying to billing service",
+ )
+ parser.add_argument(
+ "--organization-id",
+ type=str,
+ help="Only send the report for this organization ID",
)
- parser.add_argument("--organization-id", type=str, help="Only send the report for this organization ID")
parser.add_argument("--async", type=bool, help="Run the task asynchronously")
def handle(self, *args, **options):
@@ -26,11 +36,19 @@ def handle(self, *args, **options):
if run_async:
send_all_org_usage_reports.delay(
- dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id
+ dry_run,
+ date,
+ event_name,
+ skip_capture_event=skip_capture_event,
+ only_organization_id=organization_id,
)
else:
send_all_org_usage_reports(
- dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id
+ dry_run,
+ date,
+ event_name,
+ skip_capture_event=skip_capture_event,
+ only_organization_id=organization_id,
)
if dry_run:
diff --git a/posthog/management/commands/setup_dev.py b/posthog/management/commands/setup_dev.py
index 09281d2b6c39f..42d6d33be512f 100644
--- a/posthog/management/commands/setup_dev.py
+++ b/posthog/management/commands/setup_dev.py
@@ -2,7 +2,15 @@
from django.db import transaction
from posthog.demo.legacy import ORGANIZATION_NAME, TEAM_NAME, create_demo_data
-from posthog.models import EventProperty, PersonalAPIKey, Plugin, PluginConfig, PluginSourceFile, Team, User
+from posthog.models import (
+ EventProperty,
+ PersonalAPIKey,
+ Plugin,
+ PluginConfig,
+ PluginSourceFile,
+ Team,
+ User,
+)
from posthog.models.event_definition import EventDefinition
from posthog.models.personal_api_key import hash_key_value
from posthog.models.property_definition import PropertyDefinition
@@ -13,7 +21,11 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("--no-data", action="store_true", help="Create demo account without data")
- parser.add_argument("--create-e2e-test-plugin", action="store_true", help="Create plugin for charts E2E test")
+ parser.add_argument(
+ "--create-e2e-test-plugin",
+ action="store_true",
+ help="Create plugin for charts E2E test",
+ )
def handle(self, *args, **options):
print("\n⚠️ setup_dev is deprecated. Use the more robust generate_demo_data command instead.\n") # noqa T201
@@ -43,7 +55,9 @@ def handle(self, *args, **options):
PropertyDefinition.objects.create(name="is_demo", type=PropertyDefinition.Type.PERSON, team=team)
PersonalAPIKey.objects.create(
- user=user, label="e2e_demo_api_key key", secure_value=hash_key_value("e2e_demo_api_key")
+ user=user,
+ label="e2e_demo_api_key key",
+ secure_value=hash_key_value("e2e_demo_api_key"),
)
if not options["no_data"]:
create_demo_data(team)
@@ -62,7 +76,9 @@ def create_plugin(self, team):
plugin_config = PluginConfig.objects.create(plugin=plugin, team=team, order=1, config={})
PluginSourceFile.objects.update_or_create(
- plugin=plugin, filename="plugin.json", source='{ "name": "e2e test plugin", "config": [] }'
+ plugin=plugin,
+ filename="plugin.json",
+ source='{ "name": "e2e test plugin", "config": [] }',
)
PluginSourceFile.objects.update_or_create(
plugin=plugin,
diff --git a/posthog/management/commands/split_person.py b/posthog/management/commands/split_person.py
index a7b52be1bc786..f32804b14b33c 100644
--- a/posthog/management/commands/split_person.py
+++ b/posthog/management/commands/split_person.py
@@ -18,10 +18,18 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("--team-id", default=None, type=int, help="Specify a team to fix data for.")
- parser.add_argument("--person-id", default=None, type=int, help="Specify the person ID to split.")
+ parser.add_argument(
+ "--person-id",
+ default=None,
+ type=int,
+ help="Specify the person ID to split.",
+ )
parser.add_argument("--live-run", action="store_true", help="Run changes, default is dry-run")
parser.add_argument(
- "--max-splits", default=None, type=int, help="Only split off a given number of distinct_ids and exit."
+ "--max-splits",
+ default=None,
+ type=int,
+ help="Only split off a given number of distinct_ids and exit.",
)
def handle(self, *args, **options):
diff --git a/posthog/management/commands/start_temporal_worker.py b/posthog/management/commands/start_temporal_worker.py
index 0ea2feea50f85..6e10a28b31b7d 100644
--- a/posthog/management/commands/start_temporal_worker.py
+++ b/posthog/management/commands/start_temporal_worker.py
@@ -14,15 +14,41 @@ class Command(BaseCommand):
help = "Start Temporal Python Django-aware Worker"
def add_arguments(self, parser):
- parser.add_argument("--temporal_host", default=settings.TEMPORAL_HOST, help="Hostname for Temporal Scheduler")
- parser.add_argument("--temporal_port", default=settings.TEMPORAL_PORT, help="Port for Temporal Scheduler")
- parser.add_argument("--namespace", default=settings.TEMPORAL_NAMESPACE, help="Namespace to connect to")
- parser.add_argument("--task-queue", default=settings.TEMPORAL_TASK_QUEUE, help="Task queue to service")
parser.add_argument(
- "--server-root-ca-cert", default=settings.TEMPORAL_CLIENT_ROOT_CA, help="Optional root server CA cert"
+ "--temporal_host",
+ default=settings.TEMPORAL_HOST,
+ help="Hostname for Temporal Scheduler",
+ )
+ parser.add_argument(
+ "--temporal_port",
+ default=settings.TEMPORAL_PORT,
+ help="Port for Temporal Scheduler",
+ )
+ parser.add_argument(
+ "--namespace",
+ default=settings.TEMPORAL_NAMESPACE,
+ help="Namespace to connect to",
+ )
+ parser.add_argument(
+ "--task-queue",
+ default=settings.TEMPORAL_TASK_QUEUE,
+ help="Task queue to service",
+ )
+ parser.add_argument(
+ "--server-root-ca-cert",
+ default=settings.TEMPORAL_CLIENT_ROOT_CA,
+ help="Optional root server CA cert",
+ )
+ parser.add_argument(
+ "--client-cert",
+ default=settings.TEMPORAL_CLIENT_CERT,
+ help="Optional client cert",
+ )
+ parser.add_argument(
+ "--client-key",
+ default=settings.TEMPORAL_CLIENT_KEY,
+ help="Optional client key",
)
- parser.add_argument("--client-cert", default=settings.TEMPORAL_CLIENT_CERT, help="Optional client cert")
- parser.add_argument("--client-key", default=settings.TEMPORAL_CLIENT_KEY, help="Optional client key")
def handle(self, *args, **options):
temporal_host = options["temporal_host"]
diff --git a/posthog/management/commands/sync_available_features.py b/posthog/management/commands/sync_available_features.py
index 516e1eed78490..841cf210cffdf 100644
--- a/posthog/management/commands/sync_available_features.py
+++ b/posthog/management/commands/sync_available_features.py
@@ -3,7 +3,9 @@
import structlog
from django.core.management.base import BaseCommand
-from posthog.tasks.sync_all_organization_available_features import sync_all_organization_available_features
+from posthog.tasks.sync_all_organization_available_features import (
+ sync_all_organization_available_features,
+)
logger = structlog.get_logger(__name__)
logger.setLevel(logging.INFO)
diff --git a/posthog/management/commands/sync_feature_flags.py b/posthog/management/commands/sync_feature_flags.py
index 2459d7f2c80c9..186316bb6a2df 100644
--- a/posthog/management/commands/sync_feature_flags.py
+++ b/posthog/management/commands/sync_feature_flags.py
@@ -56,14 +56,26 @@ def handle(self, *args, **options):
"groups": [{"properties": [], "rollout_percentage": None}],
"multivariate": {
"variants": [
- {"key": "control", "name": "Control", "rollout_percentage": 0},
- {"key": "test", "name": "Test", "rollout_percentage": 100},
+ {
+ "key": "control",
+ "name": "Control",
+ "rollout_percentage": 0,
+ },
+ {
+ "key": "test",
+ "name": "Test",
+ "rollout_percentage": 100,
+ },
]
},
},
)
else:
FeatureFlag.objects.create(
- team=team, rollout_percentage=100, name=flag, key=flag, created_by=first_user
+ team=team,
+ rollout_percentage=100,
+ name=flag,
+ key=flag,
+ created_by=first_user,
)
print(f"Created feature flag '{flag} for team {team.id} {' - ' + team.name if team.name else ''}")
diff --git a/posthog/management/commands/sync_persons_to_clickhouse.py b/posthog/management/commands/sync_persons_to_clickhouse.py
index 9e3af26deb3b5..6bf7639fcfa33 100644
--- a/posthog/management/commands/sync_persons_to_clickhouse.py
+++ b/posthog/management/commands/sync_persons_to_clickhouse.py
@@ -36,7 +36,9 @@ def add_arguments(self, parser):
parser.add_argument("--person-override", action="store_true", help="Sync person overrides")
parser.add_argument("--group", action="store_true", help="Sync groups")
parser.add_argument(
- "--deletes", action="store_true", help="process deletes for data in ClickHouse but not Postgres"
+ "--deletes",
+ action="store_true",
+ help="process deletes for data in ClickHouse but not Postgres",
)
parser.add_argument("--live-run", action="store_true", help="Run changes, default is dry-run")
diff --git a/posthog/management/commands/sync_replicated_schema.py b/posthog/management/commands/sync_replicated_schema.py
index 35b73e2808378..40d4ab8d32ca5 100644
--- a/posthog/management/commands/sync_replicated_schema.py
+++ b/posthog/management/commands/sync_replicated_schema.py
@@ -24,7 +24,9 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
- "--dry-run", action="store_true", help="Exits with a non-zero status if schema changes would be required."
+ "--dry-run",
+ action="store_true",
+ help="Exits with a non-zero status if schema changes would be required.",
)
def handle(self, *args, **options):
@@ -35,7 +37,10 @@ def handle(self, *args, **options):
_, create_table_queries, out_of_sync_hosts = self.analyze_cluster_tables()
if len(out_of_sync_hosts) > 0:
- logger.info("Schema out of sync on some clickhouse nodes!", out_of_sync_hosts=out_of_sync_hosts)
+ logger.info(
+ "Schema out of sync on some clickhouse nodes!",
+ out_of_sync_hosts=out_of_sync_hosts,
+ )
if options.get("dry_run"):
exit(1)
@@ -81,7 +86,9 @@ def get_out_of_sync_hosts(self, host_tables: Dict[HostName, Set[TableName]]) ->
return out_of_sync
def create_missing_tables(
- self, out_of_sync_hosts: Dict[HostName, Set[TableName]], create_table_queries: Dict[TableName, Query]
+ self,
+ out_of_sync_hosts: Dict[HostName, Set[TableName]],
+ create_table_queries: Dict[TableName, Query],
):
missing_tables = set(table for tables in out_of_sync_hosts.values() for table in tables)
@@ -95,5 +102,5 @@ def run_on_cluster(self, create_table_query: Query) -> Query:
r"^CREATE TABLE (\S+)",
f"CREATE TABLE IF NOT EXISTS \\1 ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'",
create_table_query,
- 1,
+ count=1,
)
diff --git a/posthog/management/commands/test/test_backfill_persons_and_groups_on_events.py b/posthog/management/commands/test/test_backfill_persons_and_groups_on_events.py
index d67b74e1ab466..3f410beef2372 100644
--- a/posthog/management/commands/test/test_backfill_persons_and_groups_on_events.py
+++ b/posthog/management/commands/test/test_backfill_persons_and_groups_on_events.py
@@ -6,7 +6,9 @@
from posthog.client import sync_execute
from posthog.conftest import create_clickhouse_tables
-from posthog.management.commands.backfill_persons_and_groups_on_events import run_backfill
+from posthog.management.commands.backfill_persons_and_groups_on_events import (
+ run_backfill,
+)
from posthog.models.event.sql import EVENTS_DATA_TABLE
from posthog.test.base import BaseTest, ClickhouseTestMixin
@@ -73,7 +75,11 @@ def test_person_backfill(self):
events_after = sync_execute("select event, person_id, person_properties from events")
self.assertEqual(
- events_after, [("event1", person_id, '{ "foo": "bar" }'), ("event2", person_id, '{ "foo": "bar" }')]
+ events_after,
+ [
+ ("event1", person_id, '{ "foo": "bar" }'),
+ ("event2", person_id, '{ "foo": "bar" }'),
+ ],
)
def test_groups_backfill(self):
@@ -99,4 +105,7 @@ def test_groups_backfill(self):
sleep(10)
events_after = sync_execute("select event, $group_0, group0_properties from events")
- self.assertEqual(events_after, [("event1", "my_group", group_props), ("event2", "my_group", group_props)])
+ self.assertEqual(
+ events_after,
+ [("event1", "my_group", group_props), ("event2", "my_group", group_props)],
+ )
diff --git a/posthog/management/commands/test/test_create_batch_export_from_app.py b/posthog/management/commands/test/test_create_batch_export_from_app.py
index fb216dc4f2bb4..aabe5ad511c99 100644
--- a/posthog/management/commands/test/test_create_batch_export_from_app.py
+++ b/posthog/management/commands/test/test_create_batch_export_from_app.py
@@ -92,7 +92,11 @@ def config(request):
@pytest.fixture
def snowflake_plugin_config(snowflake_plugin, team) -> typing.Generator[PluginConfig, None, None]:
plugin_config = PluginConfig.objects.create(
- plugin=snowflake_plugin, order=1, team=team, enabled=True, config=test_snowflake_config
+ plugin=snowflake_plugin,
+ order=1,
+ team=team,
+ enabled=True,
+ config=test_snowflake_config,
)
yield plugin_config
plugin_config.delete()
diff --git a/posthog/management/commands/test/test_fix_person_distinct_ids_after_delete.py b/posthog/management/commands/test/test_fix_person_distinct_ids_after_delete.py
index 2698af803934c..954dac77d9c06 100644
--- a/posthog/management/commands/test/test_fix_person_distinct_ids_after_delete.py
+++ b/posthog/management/commands/test/test_fix_person_distinct_ids_after_delete.py
@@ -24,7 +24,11 @@ class TestFixPersonDistinctIdsAfterDelete(BaseTest, ClickhouseTestMixin):
def test_dry_run(self, mocked_ch_call):
# clickhouse only deleted person and distinct id that should be updated
ch_only_deleted_person_uuid = create_person(
- uuid=str(uuid4()), team_id=self.team.pk, is_deleted=True, version=5, sync=True
+ uuid=str(uuid4()),
+ team_id=self.team.pk,
+ is_deleted=True,
+ version=5,
+ sync=True,
)
create_person_distinct_id(
team_id=self.team.pk,
@@ -39,7 +43,10 @@ def test_dry_run(self, mocked_ch_call):
team_id=self.team.pk, properties={"abcdefg": 11112}, version=1, uuid=uuid4()
)
PersonDistinctId.objects.create(
- team=self.team, person=person_linked_to_after, distinct_id="distinct_id", version=0
+ team=self.team,
+ person=person_linked_to_after,
+ distinct_id="distinct_id",
+ version=0,
)
options = {"live_run": False, "team_id": self.team.pk, "new_version": 2500}
run(options, True)
@@ -61,7 +68,13 @@ def test_dry_run(self, mocked_ch_call):
self.assertEqual(
ch_person_distinct_ids,
[
- (UUID(ch_only_deleted_person_uuid), self.team.pk, "distinct_id", 7, True),
+ (
+ UUID(ch_only_deleted_person_uuid),
+ self.team.pk,
+ "distinct_id",
+ 7,
+ True,
+ ),
],
)
mocked_ch_call.assert_not_called()
@@ -73,7 +86,11 @@ def test_dry_run(self, mocked_ch_call):
def test_live_run(self, mocked_ch_call):
# clickhouse only deleted person and distinct id that should be updated
ch_only_deleted_person_uuid = create_person(
- uuid=str(uuid4()), team_id=self.team.pk, is_deleted=True, version=5, sync=True
+ uuid=str(uuid4()),
+ team_id=self.team.pk,
+ is_deleted=True,
+ version=5,
+ sync=True,
)
create_person_distinct_id(
team_id=self.team.pk,
@@ -96,10 +113,16 @@ def test_live_run(self, mocked_ch_call):
team_id=self.team.pk, properties={"abcdefg": 11112}, version=1, uuid=uuid4()
)
PersonDistinctId.objects.create(
- team=self.team, person=person_linked_to_after, distinct_id="distinct_id", version=0
+ team=self.team,
+ person=person_linked_to_after,
+ distinct_id="distinct_id",
+ version=0,
)
PersonDistinctId.objects.create(
- team=self.team, person=person_linked_to_after, distinct_id="distinct_id-2", version=0
+ team=self.team,
+ person=person_linked_to_after,
+ distinct_id="distinct_id-2",
+ version=0,
)
options = {"live_run": True, "team_id": self.team.pk, "new_version": 2500}
run(options, True)
@@ -110,7 +133,8 @@ def test_live_run(self, mocked_ch_call):
self.assertEqual(pg_distinct_ids[0].version, 2500)
self.assertEqual(pg_distinct_ids[1].version, 2500)
self.assertEqual(
- {pg_distinct_ids[0].distinct_id, pg_distinct_ids[1].distinct_id}, {"distinct_id", "distinct_id-2"}
+ {pg_distinct_ids[0].distinct_id, pg_distinct_ids[1].distinct_id},
+ {"distinct_id", "distinct_id-2"},
)
self.assertEqual(pg_distinct_ids[0].person.uuid, person_linked_to_after.uuid)
self.assertEqual(pg_distinct_ids[1].person.uuid, person_linked_to_after.uuid)
@@ -126,7 +150,13 @@ def test_live_run(self, mocked_ch_call):
ch_person_distinct_ids,
[
(person_linked_to_after.uuid, self.team.pk, "distinct_id", 2500, False),
- (person_linked_to_after.uuid, self.team.pk, "distinct_id-2", 2500, False),
+ (
+ person_linked_to_after.uuid,
+ self.team.pk,
+ "distinct_id-2",
+ 2500,
+ False,
+ ),
],
)
self.assertEqual(mocked_ch_call.call_count, 2)
@@ -145,7 +175,10 @@ def test_no_op(self, mocked_ch_call):
# distinct id no update
PersonDistinctId.objects.create(
- team=self.team, person=person_not_changed_1, distinct_id="distinct_id-1", version=0
+ team=self.team,
+ person=person_not_changed_1,
+ distinct_id="distinct_id-1",
+ version=0,
)
# deleted person not re-used
@@ -153,7 +186,10 @@ def test_no_op(self, mocked_ch_call):
team_id=self.team.pk, properties={"abcdef": 1111}, version=0, uuid=uuid4()
)
PersonDistinctId.objects.create(
- team=self.team, person=person_deleted_1, distinct_id="distinct_id-del-1", version=16
+ team=self.team,
+ person=person_deleted_1,
+ distinct_id="distinct_id-del-1",
+ version=16,
)
person_deleted_1.delete()
diff --git a/posthog/management/commands/test/test_migrate_kafka_data.py b/posthog/management/commands/test/test_migrate_kafka_data.py
index 0053c7201b876..05bf9f0c47c3e 100644
--- a/posthog/management/commands/test/test_migrate_kafka_data.py
+++ b/posthog/management/commands/test/test_migrate_kafka_data.py
@@ -34,7 +34,12 @@ def test_can_migrate_data_from_one_topic_to_another_on_a_different_cluster():
_create_topic(new_events_topic)
# Put some data to the old topic
- _send_message(old_events_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")])
+ _send_message(
+ old_events_topic,
+ b'{ "event": "test" }',
+ key=message_key.encode("utf-8"),
+ headers=[("foo", b"bar")],
+ )
migrate_kafka_data(
"--from-topic",
@@ -95,7 +100,12 @@ def test_we_do_not_migrate_when_dry_run_is_set():
_create_topic(new_events_topic)
# Put some data to the old topic
- _send_message(old_events_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")])
+ _send_message(
+ old_events_topic,
+ b'{ "event": "test" }',
+ key=message_key.encode("utf-8"),
+ headers=[("foo", b"bar")],
+ )
migrate_kafka_data(
"--from-topic",
@@ -128,7 +138,12 @@ def test_cannot_send_data_back_into_same_topic_on_same_cluster():
_commit_offsets_for_topic(topic, consumer_group_id)
# Put some data to the topic
- _send_message(topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")])
+ _send_message(
+ topic,
+ b'{ "event": "test" }',
+ key=message_key.encode("utf-8"),
+ headers=[("foo", b"bar")],
+ )
try:
migrate_kafka_data(
@@ -161,7 +176,12 @@ def test_that_the_command_fails_if_the_specified_consumer_group_does_not_exist()
_create_topic(new_topic)
# Put some data to the topic
- _send_message(old_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")])
+ _send_message(
+ old_topic,
+ b'{ "event": "test" }',
+ key=message_key.encode("utf-8"),
+ headers=[("foo", b"bar")],
+ )
try:
migrate_kafka_data(
@@ -195,7 +215,12 @@ def test_that_we_error_if_the_target_topic_doesnt_exist():
_commit_offsets_for_topic(old_topic, consumer_group_id)
# Put some data to the topic
- _send_message(old_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")])
+ _send_message(
+ old_topic,
+ b'{ "event": "test" }',
+ key=message_key.encode("utf-8"),
+ headers=[("foo", b"bar")],
+ )
try:
migrate_kafka_data(
@@ -231,7 +256,12 @@ def test_we_fail_on_send_errors_to_new_topic():
_commit_offsets_for_topic(old_topic, consumer_group_id)
# Put some data to the topic
- _send_message(old_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")])
+ _send_message(
+ old_topic,
+ b'{ "event": "test" }',
+ key=message_key.encode("utf-8"),
+ headers=[("foo", b"bar")],
+ )
with mock.patch("kafka.KafkaProducer.send") as mock_send:
produce_future = FutureProduceResult(topic_partition=TopicPartition(new_topic, 1))
diff --git a/posthog/management/commands/test/test_sync_persons_to_clickhouse.py b/posthog/management/commands/test/test_sync_persons_to_clickhouse.py
index 56e956d04f73b..acde0c4630f19 100644
--- a/posthog/management/commands/test/test_sync_persons_to_clickhouse.py
+++ b/posthog/management/commands/test/test_sync_persons_to_clickhouse.py
@@ -29,7 +29,11 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin):
def test_persons_sync(self):
with mute_selected_signals(): # without creating/updating in clickhouse
person = Person.objects.create(
- team_id=self.team.pk, properties={"a": 1234}, is_identified=True, version=4, uuid=uuid4()
+ team_id=self.team.pk,
+ properties={"a": 1234},
+ is_identified=True,
+ version=4,
+ uuid=uuid4(),
)
run_person_sync(self.team.pk, live_run=True, deletes=False, sync=True)
@@ -45,7 +49,11 @@ def test_persons_sync(self):
def test_persons_sync_with_null_version(self):
with mute_selected_signals(): # without creating/updating in clickhouse
person = Person.objects.create(
- team_id=self.team.pk, properties={"a": 1234}, is_identified=True, version=None, uuid=uuid4()
+ team_id=self.team.pk,
+ properties={"a": 1234},
+ is_identified=True,
+ version=None,
+ uuid=uuid4(),
)
run_person_sync(self.team.pk, live_run=True, deletes=False, sync=True)
@@ -59,7 +67,13 @@ def test_persons_sync_with_null_version(self):
self.assertEqual(ch_persons, [(person.uuid, self.team.pk, '{"a": 1234}', True, 0, False)])
def test_persons_deleted(self):
- uuid = create_person(uuid=str(uuid4()), team_id=self.team.pk, version=5, properties={"abc": 123}, sync=True)
+ uuid = create_person(
+ uuid=str(uuid4()),
+ team_id=self.team.pk,
+ version=5,
+ properties={"abc": 123},
+ sync=True,
+ )
run_person_sync(self.team.pk, live_run=True, deletes=True, sync=True)
@@ -104,7 +118,12 @@ def test_distinct_ids_sync_with_null_version(self):
def test_distinct_ids_deleted(self):
uuid = uuid4()
create_person_distinct_id(
- team_id=self.team.pk, distinct_id="test-id-7", person_id=str(uuid), is_deleted=False, version=7, sync=True
+ team_id=self.team.pk,
+ distinct_id="test-id-7",
+ person_id=str(uuid),
+ is_deleted=False,
+ version=7,
+ sync=True,
)
run_distinct_id_sync(self.team.pk, live_run=True, deletes=True, sync=True)
@@ -114,7 +133,10 @@ def test_distinct_ids_deleted(self):
""",
{"team_id": self.team.pk},
)
- self.assertEqual(ch_person_distinct_ids, [(UUID(int=0), self.team.pk, "test-id-7", 107, True)])
+ self.assertEqual(
+ ch_person_distinct_ids,
+ [(UUID(int=0), self.team.pk, "test-id-7", 107, True)],
+ )
@mock.patch(
f"{posthog.management.commands.sync_persons_to_clickhouse.__name__}.raw_create_group_ch",
@@ -156,7 +178,13 @@ def test_group_sync(self, mocked_ch_call):
wraps=posthog.management.commands.sync_persons_to_clickhouse.raw_create_group_ch,
)
def test_group_sync_updates_group(self, mocked_ch_call):
- group = create_group(self.team.pk, 2, "group-key", {"a": 5}, timestamp=datetime.utcnow() - timedelta(hours=3))
+ group = create_group(
+ self.team.pk,
+ 2,
+ "group-key",
+ {"a": 5},
+ timestamp=datetime.utcnow() - timedelta(hours=3),
+ )
group.group_properties = {"a": 5, "b": 3}
group.save()
@@ -175,9 +203,18 @@ def test_group_sync_updates_group(self, mocked_ch_call):
self.assertEqual(ch_group[0], 2)
self.assertEqual(ch_group[1], "group-key")
self.assertEqual(ch_group[2], '{"a": 5, "b": 3}')
- self.assertEqual(ch_group[3].strftime("%Y-%m-%d %H:%M:%S"), group.created_at.strftime("%Y-%m-%d %H:%M:%S"))
- self.assertGreaterEqual(ch_group[4].strftime("%Y-%m-%d %H:%M:%S"), ts_before.strftime("%Y-%m-%d %H:%M:%S"))
- self.assertLessEqual(ch_group[4].strftime("%Y-%m-%d %H:%M:%S"), datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
+ self.assertEqual(
+ ch_group[3].strftime("%Y-%m-%d %H:%M:%S"),
+ group.created_at.strftime("%Y-%m-%d %H:%M:%S"),
+ )
+ self.assertGreaterEqual(
+ ch_group[4].strftime("%Y-%m-%d %H:%M:%S"),
+ ts_before.strftime("%Y-%m-%d %H:%M:%S"),
+ )
+ self.assertLessEqual(
+ ch_group[4].strftime("%Y-%m-%d %H:%M:%S"),
+ datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
+ )
# second time it's a no-op
run_group_sync(self.team.pk, live_run=True, sync=True)
@@ -256,15 +293,24 @@ def everything_test_run(self, live_run):
# 2 persons who should be created
with mute_selected_signals(): # without creating/updating in clickhouse
person_should_be_created_1 = Person.objects.create(
- team_id=self.team.pk, properties={"abcde": 12553633}, version=2, uuid=uuid4()
+ team_id=self.team.pk,
+ properties={"abcde": 12553633},
+ version=2,
+ uuid=uuid4(),
)
person_should_be_created_2 = Person.objects.create(
- team_id=self.team.pk, properties={"abcdeit34": 12553633}, version=3, uuid=uuid4()
+ team_id=self.team.pk,
+ properties={"abcdeit34": 12553633},
+ version=3,
+ uuid=uuid4(),
)
# 2 persons who have updates
person_should_update_1 = Person.objects.create(
- team_id=self.team.pk, properties={"abcde": 12553}, version=5, uuid=uuid4()
+ team_id=self.team.pk,
+ properties={"abcde": 12553},
+ version=5,
+ uuid=uuid4(),
)
person_should_update_2 = Person.objects.create(
team_id=self.team.pk, properties={"abc": 125}, version=7, uuid=uuid4()
@@ -286,35 +332,61 @@ def everything_test_run(self, live_run):
# 2 persons need to be deleted
deleted_person_1_uuid = create_person(
- uuid=str(uuid4()), team_id=self.team.pk, version=7, properties={"abcd": 123}, sync=True
+ uuid=str(uuid4()),
+ team_id=self.team.pk,
+ version=7,
+ properties={"abcd": 123},
+ sync=True,
)
deleted_person_2_uuid = create_person(
- uuid=str(uuid4()), team_id=self.team.pk, version=8, properties={"abcef": 123}, sync=True
+ uuid=str(uuid4()),
+ team_id=self.team.pk,
+ version=8,
+ properties={"abcef": 123},
+ sync=True,
)
# 2 distinct id no update
PersonDistinctId.objects.create(
- team=self.team, person=person_not_changed_1, distinct_id="distinct_id", version=0
+ team=self.team,
+ person=person_not_changed_1,
+ distinct_id="distinct_id",
+ version=0,
)
PersonDistinctId.objects.create(
- team=self.team, person=person_not_changed_1, distinct_id="distinct_id-9", version=9
+ team=self.team,
+ person=person_not_changed_1,
+ distinct_id="distinct_id-9",
+ version=9,
)
# # 2 distinct id to be created
with mute_selected_signals(): # without creating/updating in clickhouse
PersonDistinctId.objects.create(
- team=self.team, person=person_not_changed_1, distinct_id="distinct_id-10", version=10
+ team=self.team,
+ person=person_not_changed_1,
+ distinct_id="distinct_id-10",
+ version=10,
)
PersonDistinctId.objects.create(
- team=self.team, person=person_not_changed_1, distinct_id="distinct_id-11", version=11
+ team=self.team,
+ person=person_not_changed_1,
+ distinct_id="distinct_id-11",
+ version=11,
)
# 2 distinct id that need to update
PersonDistinctId.objects.create(
- team=self.team, person=person_not_changed_2, distinct_id="distinct_id-12", version=13
+ team=self.team,
+ person=person_not_changed_2,
+ distinct_id="distinct_id-12",
+ version=13,
)
PersonDistinctId.objects.create(
- team=self.team, person=person_not_changed_2, distinct_id="distinct_id-14", version=15
+ team=self.team,
+ person=person_not_changed_2,
+ distinct_id="distinct_id-14",
+ version=15,
)
create_person_distinct_id(
team_id=self.team.pk,
@@ -397,23 +469,95 @@ def everything_test_run(self, live_run):
self.assertEqual(
ch_persons,
[
- (person_not_changed_1.uuid, self.team.pk, '{"abcdef": 1111}', False, 0, False),
- (person_not_changed_2.uuid, self.team.pk, '{"abcdefg": 11112}', False, 1, False),
- (person_should_update_1.uuid, self.team.pk, '{"a": 13}', False, 4, False),
- (person_should_update_2.uuid, self.team.pk, '{"a": 1}', False, 6, False),
- (UUID(deleted_person_1_uuid), self.team.pk, '{"abcd": 123}', False, 7, False),
- (UUID(deleted_person_2_uuid), self.team.pk, '{"abcef": 123}', False, 8, False),
+ (
+ person_not_changed_1.uuid,
+ self.team.pk,
+ '{"abcdef": 1111}',
+ False,
+ 0,
+ False,
+ ),
+ (
+ person_not_changed_2.uuid,
+ self.team.pk,
+ '{"abcdefg": 11112}',
+ False,
+ 1,
+ False,
+ ),
+ (
+ person_should_update_1.uuid,
+ self.team.pk,
+ '{"a": 13}',
+ False,
+ 4,
+ False,
+ ),
+ (
+ person_should_update_2.uuid,
+ self.team.pk,
+ '{"a": 1}',
+ False,
+ 6,
+ False,
+ ),
+ (
+ UUID(deleted_person_1_uuid),
+ self.team.pk,
+ '{"abcd": 123}',
+ False,
+ 7,
+ False,
+ ),
+ (
+ UUID(deleted_person_2_uuid),
+ self.team.pk,
+ '{"abcef": 123}',
+ False,
+ 8,
+ False,
+ ),
],
)
self.assertEqual(
ch_person_distinct_ids,
[
(person_not_changed_1.uuid, self.team.pk, "distinct_id", 0, False),
- (person_not_changed_1.uuid, self.team.pk, "distinct_id-9", 9, False),
- (person_not_changed_1.uuid, self.team.pk, "distinct_id-12", 12, False),
- (person_not_changed_1.uuid, self.team.pk, "distinct_id-14", 14, False),
- (deleted_distinct_id_1_uuid, self.team.pk, "distinct_id-17", 17, False),
- (deleted_distinct_id_2_uuid, self.team.pk, "distinct_id-18", 18, False),
+ (
+ person_not_changed_1.uuid,
+ self.team.pk,
+ "distinct_id-9",
+ 9,
+ False,
+ ),
+ (
+ person_not_changed_1.uuid,
+ self.team.pk,
+ "distinct_id-12",
+ 12,
+ False,
+ ),
+ (
+ person_not_changed_1.uuid,
+ self.team.pk,
+ "distinct_id-14",
+ 14,
+ False,
+ ),
+ (
+ deleted_distinct_id_1_uuid,
+ self.team.pk,
+ "distinct_id-17",
+ 17,
+ False,
+ ),
+ (
+ deleted_distinct_id_2_uuid,
+ self.team.pk,
+ "distinct_id-18",
+ 18,
+ False,
+ ),
],
)
self.assertEqual(len(ch_groups), 0)
@@ -421,12 +565,54 @@ def everything_test_run(self, live_run):
self.assertEqual(
ch_persons,
[
- (person_not_changed_1.uuid, self.team.pk, '{"abcdef": 1111}', False, 0, False),
- (person_not_changed_2.uuid, self.team.pk, '{"abcdefg": 11112}', False, 1, False),
- (person_should_be_created_1.uuid, self.team.pk, '{"abcde": 12553633}', False, 2, False),
- (person_should_be_created_2.uuid, self.team.pk, '{"abcdeit34": 12553633}', False, 3, False),
- (person_should_update_1.uuid, self.team.pk, '{"abcde": 12553}', False, 5, False),
- (person_should_update_2.uuid, self.team.pk, '{"abc": 125}', False, 7, False),
+ (
+ person_not_changed_1.uuid,
+ self.team.pk,
+ '{"abcdef": 1111}',
+ False,
+ 0,
+ False,
+ ),
+ (
+ person_not_changed_2.uuid,
+ self.team.pk,
+ '{"abcdefg": 11112}',
+ False,
+ 1,
+ False,
+ ),
+ (
+ person_should_be_created_1.uuid,
+ self.team.pk,
+ '{"abcde": 12553633}',
+ False,
+ 2,
+ False,
+ ),
+ (
+ person_should_be_created_2.uuid,
+ self.team.pk,
+ '{"abcdeit34": 12553633}',
+ False,
+ 3,
+ False,
+ ),
+ (
+ person_should_update_1.uuid,
+ self.team.pk,
+ '{"abcde": 12553}',
+ False,
+ 5,
+ False,
+ ),
+ (
+ person_should_update_2.uuid,
+ self.team.pk,
+ '{"abc": 125}',
+ False,
+ 7,
+ False,
+ ),
(UUID(deleted_person_1_uuid), self.team.pk, "{}", False, 107, True),
(UUID(deleted_person_2_uuid), self.team.pk, "{}", False, 108, True),
],
@@ -435,11 +621,41 @@ def everything_test_run(self, live_run):
ch_person_distinct_ids,
[
(person_not_changed_1.uuid, self.team.pk, "distinct_id", 0, False),
- (person_not_changed_1.uuid, self.team.pk, "distinct_id-9", 9, False),
- (person_not_changed_1.uuid, self.team.pk, "distinct_id-10", 10, False),
- (person_not_changed_1.uuid, self.team.pk, "distinct_id-11", 11, False),
- (person_not_changed_2.uuid, self.team.pk, "distinct_id-12", 13, False),
- (person_not_changed_2.uuid, self.team.pk, "distinct_id-14", 15, False),
+ (
+ person_not_changed_1.uuid,
+ self.team.pk,
+ "distinct_id-9",
+ 9,
+ False,
+ ),
+ (
+ person_not_changed_1.uuid,
+ self.team.pk,
+ "distinct_id-10",
+ 10,
+ False,
+ ),
+ (
+ person_not_changed_1.uuid,
+ self.team.pk,
+ "distinct_id-11",
+ 11,
+ False,
+ ),
+ (
+ person_not_changed_2.uuid,
+ self.team.pk,
+ "distinct_id-12",
+ 13,
+ False,
+ ),
+ (
+ person_not_changed_2.uuid,
+ self.team.pk,
+ "distinct_id-14",
+ 15,
+ False,
+ ),
(UUID(int=0), self.team.pk, "distinct_id-17", 117, True),
(UUID(int=0), self.team.pk, "distinct_id-18", 118, True),
],
diff --git a/posthog/management/commands/test/test_sync_replicated_schema.py b/posthog/management/commands/test/test_sync_replicated_schema.py
index 83ba19901b229..8b51b9259b5c0 100644
--- a/posthog/management/commands/test/test_sync_replicated_schema.py
+++ b/posthog/management/commands/test/test_sync_replicated_schema.py
@@ -21,7 +21,11 @@ def recreate_database(self, create_tables=True):
def test_analyze_test_cluster(self):
self.recreate_database(create_tables=True)
- host_tables, create_table_queries, out_of_sync_hosts = Command().analyze_cluster_tables()
+ (
+ host_tables,
+ create_table_queries,
+ out_of_sync_hosts,
+ ) = Command().analyze_cluster_tables()
self.assertEqual(len(host_tables), 1)
self.assertGreater(len(create_table_queries), 0)
@@ -34,7 +38,11 @@ def test_analyze_test_cluster(self):
def test_analyze_empty_cluster(self):
self.recreate_database(create_tables=False)
- host_tables, create_table_queries, out_of_sync_hosts = Command().analyze_cluster_tables()
+ (
+ host_tables,
+ create_table_queries,
+ out_of_sync_hosts,
+ ) = Command().analyze_cluster_tables()
self.assertEqual(host_tables, {})
self.assertEqual(create_table_queries, {})
diff --git a/posthog/middleware.py b/posthog/middleware.py
index 406197944996e..b480580afad40 100644
--- a/posthog/middleware.py
+++ b/posthog/middleware.py
@@ -13,7 +13,11 @@
from django.middleware.csrf import CsrfViewMiddleware
from django.urls import resolve
from django.utils.cache import add_never_cache_headers
-from django_prometheus.middleware import Metrics, PrometheusAfterMiddleware, PrometheusBeforeMiddleware
+from django_prometheus.middleware import (
+ Metrics,
+ PrometheusAfterMiddleware,
+ PrometheusBeforeMiddleware,
+)
from rest_framework import status
from statshog.defaults.django import statsd
@@ -26,12 +30,11 @@
from posthog.metrics import LABEL_TEAM_ID
from posthog.models import Action, Cohort, Dashboard, FeatureFlag, Insight, Team, User
from posthog.rate_limit import DecideRateThrottle
-from posthog.settings import SITE_URL
+from posthog.settings import SITE_URL, DEBUG
from posthog.settings.statsd import STATSD_HOST
from posthog.user_permissions import UserPermissions
-from .utils_cors import cors_response
-
from .auth import PersonalAPIKeyAuthentication
+from .utils_cors import cors_response
ALWAYS_ALLOWED_ENDPOINTS = [
"decide",
@@ -45,6 +48,10 @@
"_health",
]
+if DEBUG:
+ # /i/ is the new root path for capture endpoints
+ ALWAYS_ALLOWED_ENDPOINTS.append("i")
+
default_cookie_options = {
"max_age": 365 * 24 * 60 * 60, # one year
"expires": None,
@@ -113,6 +120,8 @@ def process_view(self, request, callback, callback_args, callback_kwargs):
# if super().process_view did not find a valid CSRF token, try looking for a personal API key
if result is not None and PersonalAPIKeyAuthentication.find_key_with_source(request) is not None:
return self._accept(request)
+ if DEBUG and request.path.split("/")[1] in ALWAYS_ALLOWED_ENDPOINTS:
+ return self._accept(request)
return result
def _accept(self, request):
@@ -228,7 +237,10 @@ def __call__(self, request: HttpRequest):
response: HttpResponse = self.get_response(request)
if "api/" in request.path and "capture" not in request.path:
- statsd.incr("http_api_request_response", tags={"id": route_id, "status_code": response.status_code})
+ statsd.incr(
+ "http_api_request_response",
+ tags={"id": route_id, "status_code": response.status_code},
+ )
return response
finally:
@@ -243,7 +255,13 @@ def _get_param(self, request: HttpRequest, name: str):
class QueryTimeCountingMiddleware:
- ALLOW_LIST_ROUTES = ["dashboard", "insight", "property_definitions", "properties", "person"]
+ ALLOW_LIST_ROUTES = [
+ "dashboard",
+ "insight",
+ "property_definitions",
+ "properties",
+ "person",
+ ]
def __init__(self, get_response):
self.get_response = get_response
@@ -286,7 +304,8 @@ class ShortCircuitMiddleware:
def __init__(self, get_response):
self.get_response = get_response
self.decide_throttler = DecideRateThrottle(
- replenish_rate=settings.DECIDE_BUCKET_REPLENISH_RATE, bucket_capacity=settings.DECIDE_BUCKET_CAPACITY
+ replenish_rate=settings.DECIDE_BUCKET_REPLENISH_RATE,
+ bucket_capacity=settings.DECIDE_BUCKET_CAPACITY,
)
def __call__(self, request: HttpRequest):
@@ -391,7 +410,12 @@ def __call__(self, request: HttpRequest):
resolver_match = resolve(request.path)
request.resolver_match = resolver_match
for middleware in self.CAPTURE_MIDDLEWARE:
- middleware.process_view(request, resolver_match.func, resolver_match.args, resolver_match.kwargs)
+ middleware.process_view(
+ request,
+ resolver_match.func,
+ resolver_match.args,
+ resolver_match.kwargs,
+ )
response: HttpResponse = get_event(request)
diff --git a/posthog/migrations/0001_initial.py b/posthog/migrations/0001_initial.py
index a196986062dd6..4a4f27e763557 100644
--- a/posthog/migrations/0001_initial.py
+++ b/posthog/migrations/0001_initial.py
@@ -10,7 +10,6 @@
class Migration(migrations.Migration):
-
initial = True
dependencies = [
diff --git a/posthog/migrations/0001_initial_squashed_0284_improved_caching_state_idx.py b/posthog/migrations/0001_initial_squashed_0284_improved_caching_state_idx.py
index cab1b248467f5..d862ca30ecf22 100644
--- a/posthog/migrations/0001_initial_squashed_0284_improved_caching_state_idx.py
+++ b/posthog/migrations/0001_initial_squashed_0284_improved_caching_state_idx.py
@@ -324,11 +324,28 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="User",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("password", models.CharField(max_length=128, verbose_name="password")),
- ("last_login", models.DateTimeField(blank=True, null=True, verbose_name="last login")),
- ("first_name", models.CharField(blank=True, max_length=150, verbose_name="first name")),
- ("last_name", models.CharField(blank=True, max_length=150, verbose_name="last name")),
+ (
+ "last_login",
+ models.DateTimeField(blank=True, null=True, verbose_name="last login"),
+ ),
+ (
+ "first_name",
+ models.CharField(blank=True, max_length=150, verbose_name="first name"),
+ ),
+ (
+ "last_name",
+ models.CharField(blank=True, max_length=150, verbose_name="last name"),
+ ),
(
"is_staff",
models.BooleanField(
@@ -345,7 +362,10 @@ class Migration(migrations.Migration):
verbose_name="active",
),
),
- ("date_joined", models.DateTimeField(default=django.utils.timezone.now, verbose_name="date joined")),
+ (
+ "date_joined",
+ models.DateTimeField(default=django.utils.timezone.now, verbose_name="date joined"),
+ ),
# NOTE: to achieve parity with the constraint names from the
# unsquashed migration, we need to apply uniqueness separately
# as Django appears to have different behaviour in these cases.
@@ -362,10 +382,22 @@ class Migration(migrations.Migration):
"temporary_token",
models.CharField(blank=True, max_length=200, null=True),
), # NOTE: we make this unique later
- ("distinct_id", models.CharField(blank=True, max_length=200)), # NOTE: we make this unique later
- ("email_opt_in", models.BooleanField(blank=True, default=False, null=True)),
- ("partial_notification_settings", models.JSONField(blank=True, null=True)),
- ("anonymize_data", models.BooleanField(blank=True, default=False, null=True)),
+ (
+ "distinct_id",
+ models.CharField(blank=True, max_length=200),
+ ), # NOTE: we make this unique later
+ (
+ "email_opt_in",
+ models.BooleanField(blank=True, default=False, null=True),
+ ),
+ (
+ "partial_notification_settings",
+ models.JSONField(blank=True, null=True),
+ ),
+ (
+ "anonymize_data",
+ models.BooleanField(blank=True, default=False, null=True),
+ ),
(
"toolbar_mode",
models.CharField(
@@ -376,7 +408,10 @@ class Migration(migrations.Migration):
null=True,
),
),
- ("events_column_config", models.JSONField(default=posthog.models.user.events_column_config_default)),
+ (
+ "events_column_config",
+ models.JSONField(default=posthog.models.user.events_column_config_default),
+ ),
],
options={
"verbose_name": "user",
@@ -414,22 +449,44 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Action",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("name", models.CharField(blank=True, max_length=400, null=True)),
("description", models.TextField(blank=True, default="")),
("created_at", models.DateTimeField(auto_now_add=True)),
("deleted", models.BooleanField(default=False)),
("post_to_slack", models.BooleanField(default=False)),
- ("slack_message_format", models.CharField(blank=True, default="", max_length=600)),
+ (
+ "slack_message_format",
+ models.CharField(blank=True, default="", max_length=600),
+ ),
("is_calculating", models.BooleanField(default=False)),
("updated_at", models.DateTimeField(auto_now=True)),
- ("last_calculated_at", models.DateTimeField(blank=True, default=django.utils.timezone.now)),
+ (
+ "last_calculated_at",
+ models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
],
),
migrations.CreateModel(
name="ActionStep",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("tag_name", models.CharField(blank=True, max_length=400, null=True)),
("text", models.CharField(blank=True, max_length=400, null=True)),
("href", models.CharField(blank=True, max_length=65535, null=True)),
@@ -439,7 +496,11 @@ class Migration(migrations.Migration):
"url_matching",
models.CharField(
blank=True,
- choices=[("contains", "contains"), ("regex", "regex"), ("exact", "exact")],
+ choices=[
+ ("contains", "contains"),
+ ("regex", "regex"),
+ ("exact", "exact"),
+ ],
default="contains",
max_length=400,
null=True,
@@ -456,7 +517,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("team_id", models.PositiveIntegerField(null=True)),
@@ -468,7 +532,8 @@ class Migration(migrations.Migration):
(
"detail",
models.JSONField(
- encoder=posthog.models.activity_logging.activity_log.ActivityDetailEncoder, null=True
+ encoder=posthog.models.activity_logging.activity_log.ActivityDetailEncoder,
+ null=True,
),
),
("created_at", models.DateTimeField(default=django.utils.timezone.now)),
@@ -477,9 +542,20 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Annotation",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("content", models.CharField(blank=True, max_length=400, null=True)),
- ("created_at", models.DateTimeField(default=django.utils.timezone.now, null=True)),
+ (
+ "created_at",
+ models.DateTimeField(default=django.utils.timezone.now, null=True),
+ ),
("updated_at", models.DateTimeField(auto_now=True)),
(
"scope",
@@ -495,7 +571,11 @@ class Migration(migrations.Migration):
),
(
"creation_type",
- models.CharField(choices=[("USR", "user"), ("GIT", "GitHub")], default="USR", max_length=3),
+ models.CharField(
+ choices=[("USR", "user"), ("GIT", "GitHub")],
+ default="USR",
+ max_length=3,
+ ),
),
("date_marker", models.DateTimeField(blank=True, null=True)),
("deleted", models.BooleanField(default=False)),
@@ -523,16 +603,28 @@ class Migration(migrations.Migration):
fields=[
("id", models.BigAutoField(primary_key=True, serialize=False)),
("name", models.CharField(max_length=50)),
- ("description", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "description",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("progress", models.PositiveSmallIntegerField(default=0)),
("status", models.PositiveSmallIntegerField(default=0)),
- ("current_operation_index", models.PositiveSmallIntegerField(default=0)),
+ (
+ "current_operation_index",
+ models.PositiveSmallIntegerField(default=0),
+ ),
("current_query_id", models.CharField(default="", max_length=100)),
("celery_task_id", models.CharField(default="", max_length=100)),
("started_at", models.DateTimeField(blank=True, null=True)),
("finished_at", models.DateTimeField(blank=True, null=True)),
- ("posthog_min_version", models.CharField(blank=True, max_length=20, null=True)),
- ("posthog_max_version", models.CharField(blank=True, max_length=20, null=True)),
+ (
+ "posthog_min_version",
+ models.CharField(blank=True, max_length=20, null=True),
+ ),
+ (
+ "posthog_max_version",
+ models.CharField(blank=True, max_length=20, null=True),
+ ),
("parameters", models.JSONField(default=dict)),
],
),
@@ -555,7 +647,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Cohort",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("name", models.CharField(blank=True, max_length=400, null=True)),
("description", models.CharField(blank=True, max_length=1000)),
("deleted", models.BooleanField(default=False)),
@@ -563,7 +663,10 @@ class Migration(migrations.Migration):
("version", models.IntegerField(blank=True, null=True)),
("pending_version", models.IntegerField(blank=True, null=True)),
("count", models.IntegerField(blank=True, null=True)),
- ("created_at", models.DateTimeField(blank=True, default=django.utils.timezone.now, null=True)),
+ (
+ "created_at",
+ models.DateTimeField(blank=True, default=django.utils.timezone.now, null=True),
+ ),
("is_calculating", models.BooleanField(default=False)),
("last_calculation", models.DateTimeField(blank=True, null=True)),
("errors_calculating", models.IntegerField(default=0)),
@@ -581,7 +684,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Dashboard",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("name", models.CharField(blank=True, max_length=400, null=True)),
("description", models.TextField(blank=True)),
("pinned", models.BooleanField(default=False)),
@@ -592,7 +703,11 @@ class Migration(migrations.Migration):
(
"creation_mode",
models.CharField(
- choices=[("default", "Default"), ("template", "Template"), ("duplicate", "Duplicate")],
+ choices=[
+ ("default", "Default"),
+ ("template", "Template"),
+ ("duplicate", "Duplicate"),
+ ],
default="default",
max_length=16,
),
@@ -610,7 +725,11 @@ class Migration(migrations.Migration):
(
"deprecated_tags",
django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None
+ base_field=models.CharField(max_length=32),
+ blank=True,
+ default=list,
+ null=True,
+ size=None,
),
),
(
@@ -624,17 +743,31 @@ class Migration(migrations.Migration):
size=None,
),
),
- ("share_token", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "share_token",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("is_shared", models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name="DashboardTile",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("layouts", models.JSONField(default=dict)),
("color", models.CharField(blank=True, max_length=400, null=True)),
- ("filters_hash", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "filters_hash",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("last_refresh", models.DateTimeField(blank=True, null=True)),
("refreshing", models.BooleanField(null=True)),
("refresh_attempt", models.IntegerField(blank=True, null=True)),
@@ -644,7 +777,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Element",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("text", models.CharField(blank=True, max_length=10000, null=True)),
("tag_name", models.CharField(blank=True, max_length=1000, null=True)),
("href", models.CharField(blank=True, max_length=10000, null=True)),
@@ -652,7 +793,10 @@ class Migration(migrations.Migration):
(
"attr_class",
django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(blank=True, max_length=200), blank=True, null=True, size=None
+ base_field=models.CharField(blank=True, max_length=200),
+ blank=True,
+ null=True,
+ size=None,
),
),
("nth_child", models.IntegerField(blank=True, null=True)),
@@ -664,20 +808,42 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="ElementGroup",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("hash", models.CharField(blank=True, max_length=400, null=True)),
],
),
migrations.CreateModel(
name="Event",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("created_at", models.DateTimeField(auto_now_add=True, null=True)),
("event", models.CharField(blank=True, max_length=200, null=True)),
("distinct_id", models.CharField(max_length=200)),
("properties", models.JSONField(default=dict)),
- ("timestamp", models.DateTimeField(blank=True, default=django.utils.timezone.now)),
- ("elements_hash", models.CharField(blank=True, max_length=200, null=True)),
+ (
+ "timestamp",
+ models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ (
+ "elements_hash",
+ models.CharField(blank=True, max_length=200, null=True),
+ ),
("site_url", models.CharField(blank=True, max_length=200, null=True)),
("elements", models.JSONField(blank=True, default=list, null=True)),
],
@@ -685,7 +851,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="EventBuffer",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("event", models.JSONField(blank=True, null=True)),
("process_at", models.DateTimeField()),
("locked", models.BooleanField()),
@@ -697,11 +871,17 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=400)),
- ("created_at", models.DateTimeField(default=django.utils.timezone.now, null=True)),
+ (
+ "created_at",
+ models.DateTimeField(default=django.utils.timezone.now, null=True),
+ ),
("last_seen_at", models.DateTimeField(default=None, null=True)),
("volume_30_day", models.IntegerField(default=None, null=True)),
("query_usage_30_day", models.IntegerField(default=None, null=True)),
@@ -710,7 +890,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="EventProperty",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("event", models.CharField(max_length=400)),
("property", models.CharField(max_length=400)),
],
@@ -718,9 +906,20 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Experiment",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("name", models.CharField(max_length=400)),
- ("description", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "description",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("filters", models.JSONField(default=dict)),
("parameters", models.JSONField(default=dict, null=True)),
("secondary_metrics", models.JSONField(default=list, null=True)),
@@ -734,7 +933,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="ExportedAsset",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
(
"export_format",
models.CharField(
@@ -749,7 +956,10 @@ class Migration(migrations.Migration):
("content", models.BinaryField(null=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
("export_context", models.JSONField(blank=True, null=True)),
- ("content_location", models.TextField(blank=True, max_length=1000, null=True)),
+ (
+ "content_location",
+ models.TextField(blank=True, max_length=1000, null=True),
+ ),
(
"access_token",
models.CharField(
@@ -764,7 +974,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="FeatureFlag",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=400)),
("name", models.TextField(blank=True)),
("filters", models.JSONField(default=dict)),
@@ -774,13 +992,24 @@ class Migration(migrations.Migration):
("active", models.BooleanField(default=True)),
("rollback_conditions", models.JSONField(blank=True, null=True)),
("performed_rollback", models.BooleanField(blank=True, null=True)),
- ("ensure_experience_continuity", models.BooleanField(blank=True, default=False, null=True)),
+ (
+ "ensure_experience_continuity",
+ models.BooleanField(blank=True, default=False, null=True),
+ ),
],
),
migrations.CreateModel(
name="FeatureFlagHashKeyOverride",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("feature_flag_key", models.CharField(max_length=400)),
("hash_key", models.CharField(max_length=400)),
],
@@ -788,14 +1017,30 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="FeatureFlagOverride",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("override_value", models.JSONField()),
],
),
migrations.CreateModel(
name="Group",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("group_key", models.CharField(max_length=400)),
("group_type_index", models.IntegerField()),
("group_properties", models.JSONField(default=dict)),
@@ -808,22 +1053,53 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="GroupTypeMapping",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("group_type", models.CharField(max_length=400)),
("group_type_index", models.IntegerField()),
- ("name_singular", models.CharField(blank=True, max_length=400, null=True)),
- ("name_plural", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "name_singular",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
+ (
+ "name_plural",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
],
),
migrations.CreateModel(
name="Insight",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("name", models.CharField(blank=True, max_length=400, null=True)),
- ("derived_name", models.CharField(blank=True, max_length=400, null=True)),
- ("description", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "derived_name",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
+ (
+ "description",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("filters", models.JSONField(default=dict)),
- ("filters_hash", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "filters_hash",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("order", models.IntegerField(blank=True, null=True)),
("deleted", models.BooleanField(default=False)),
("saved", models.BooleanField(default=False)),
@@ -831,10 +1107,20 @@ class Migration(migrations.Migration):
("last_refresh", models.DateTimeField(blank=True, null=True)),
("refreshing", models.BooleanField(default=False)),
("is_sample", models.BooleanField(default=False)),
- ("short_id", models.CharField(blank=True, default=posthog.utils.generate_short_id, max_length=12)),
+ (
+ "short_id",
+ models.CharField(
+ blank=True,
+ default=posthog.utils.generate_short_id,
+ max_length=12,
+ ),
+ ),
("favorited", models.BooleanField(default=False)),
("refresh_attempt", models.IntegerField(blank=True, null=True)),
- ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "last_modified_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
("layouts", models.JSONField(default=dict)),
("color", models.CharField(blank=True, max_length=400, null=True)),
("updated_at", models.DateTimeField(auto_now=True)),
@@ -843,7 +1129,11 @@ class Migration(migrations.Migration):
(
"deprecated_tags",
django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None
+ base_field=models.CharField(max_length=32),
+ blank=True,
+ default=list,
+ null=True,
+ size=None,
),
),
(
@@ -868,7 +1158,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("cache_key", models.CharField(max_length=400)),
@@ -883,14 +1176,30 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="InsightViewed",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("last_viewed_at", models.DateTimeField()),
],
),
migrations.CreateModel(
name="InstanceSetting",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=128)),
("raw_value", models.CharField(blank=True, max_length=1024)),
],
@@ -901,26 +1210,44 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=64)),
- ("slug", posthog.models.utils.LowercaseSlugField(max_length=48, unique=True)),
+ (
+ "slug",
+ posthog.models.utils.LowercaseSlugField(max_length=48, unique=True),
+ ),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"plugins_access_level",
models.PositiveSmallIntegerField(
- choices=[(0, "none"), (3, "config"), (6, "install"), (9, "root")], default=3
+ choices=[
+ (0, "none"),
+ (3, "config"),
+ (6, "install"),
+ (9, "root"),
+ ],
+ default=3,
),
),
("for_internal_metrics", models.BooleanField(default=False)),
("is_member_join_email_enabled", models.BooleanField(default=True)),
- ("customer_id", models.CharField(blank=True, max_length=200, null=True)),
+ (
+ "customer_id",
+ models.CharField(blank=True, max_length=200, null=True),
+ ),
(
"available_features",
django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=64), blank=True, default=list, size=None
+ base_field=models.CharField(max_length=64),
+ blank=True,
+ default=list,
+ size=None,
),
),
("usage", models.JSONField(blank=True, null=True)),
@@ -929,7 +1256,10 @@ class Migration(migrations.Migration):
(
"domain_whitelist",
django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=256), blank=True, default=list, size=None
+ base_field=models.CharField(max_length=256),
+ blank=True,
+ default=list,
+ size=None,
),
),
],
@@ -937,18 +1267,39 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Person",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("created_at", models.DateTimeField(auto_now_add=True)),
- ("properties_last_updated_at", models.JSONField(blank=True, default=dict, null=True)),
+ (
+ "properties_last_updated_at",
+ models.JSONField(blank=True, default=dict, null=True),
+ ),
("properties_last_operation", models.JSONField(blank=True, null=True)),
("properties", models.JSONField(default=dict)),
("is_identified", models.BooleanField(default=False)),
- ("uuid", models.UUIDField(db_index=True, default=posthog.models.utils.UUIDT, editable=False)),
+ (
+ "uuid",
+ models.UUIDField(
+ db_index=True,
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ ),
+ ),
("version", models.BigIntegerField(blank=True, null=True)),
(
"is_user",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
),
),
],
@@ -956,7 +1307,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Plugin",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
(
"plugin_type",
models.CharField(
@@ -974,7 +1333,10 @@ class Migration(migrations.Migration):
),
("is_global", models.BooleanField(default=False)),
("is_preinstalled", models.BooleanField(default=False)),
- ("is_stateless", models.BooleanField(blank=True, default=False, null=True)),
+ (
+ "is_stateless",
+ models.BooleanField(blank=True, default=False, null=True),
+ ),
("name", models.CharField(blank=True, max_length=200, null=True)),
("description", models.TextField(blank=True, null=True)),
("url", models.CharField(blank=True, max_length=800, null=True)),
@@ -1008,7 +1370,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="PluginConfig",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("enabled", models.BooleanField(default=False)),
("order", models.IntegerField()),
("config", models.JSONField(default=dict)),
@@ -1016,27 +1386,49 @@ class Migration(migrations.Migration):
("web_token", models.CharField(default=None, max_length=64, null=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
- ("plugin", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin")),
+ (
+ "plugin",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin"),
+ ),
],
),
migrations.CreateModel(
name="Prompt",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("step", models.IntegerField()),
("type", models.CharField(max_length=200)),
("title", models.CharField(max_length=200)),
("text", models.CharField(max_length=1000)),
("placement", models.CharField(default="top", max_length=200)),
("buttons", models.JSONField()),
- ("reference", models.CharField(default=None, max_length=200, null=True)),
+ (
+ "reference",
+ models.CharField(default=None, max_length=200, null=True),
+ ),
("icon", models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name="PromptSequence",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=200)),
("type", models.CharField(max_length=200)),
(
@@ -1050,7 +1442,10 @@ class Migration(migrations.Migration):
("status", models.CharField(max_length=200)),
("requires_opt_in", models.BooleanField(default=False)),
("autorun", models.BooleanField(default=True)),
- ("must_have_completed", models.ManyToManyField(blank=True, to="posthog.PromptSequence")),
+ (
+ "must_have_completed",
+ models.ManyToManyField(blank=True, to="posthog.PromptSequence"),
+ ),
("prompts", models.ManyToManyField(to="posthog.Prompt")),
],
),
@@ -1060,7 +1455,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=400)),
@@ -1086,7 +1484,10 @@ class Migration(migrations.Migration):
blank=True,
choices=[
("unix_timestamp", "Unix Timestamp in seconds"),
- ("unix_timestamp_milliseconds", "Unix Timestamp in milliseconds"),
+ (
+ "unix_timestamp_milliseconds",
+ "Unix Timestamp in milliseconds",
+ ),
("YYYY-MM-DDThh:mm:ssZ", "YYYY-MM-DDThh:mm:ssZ"),
("YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD hh:mm:ss"),
("DD-MM-YYYY hh:mm:ss", "DD-MM-YYYY hh:mm:ss"),
@@ -1105,21 +1506,45 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="SessionRecordingPlaylist",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
- ("short_id", models.CharField(blank=True, default=posthog.utils.generate_short_id, max_length=12)),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "short_id",
+ models.CharField(
+ blank=True,
+ default=posthog.utils.generate_short_id,
+ max_length=12,
+ ),
+ ),
("name", models.CharField(blank=True, max_length=400, null=True)),
- ("derived_name", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "derived_name",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("description", models.TextField(blank=True)),
("pinned", models.BooleanField(default=False)),
("deleted", models.BooleanField(default=False)),
("filters", models.JSONField(default=dict)),
("created_at", models.DateTimeField(auto_now_add=True)),
- ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "last_modified_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
("is_static", models.BooleanField(default=False)),
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
(
@@ -1140,7 +1565,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=255)),
@@ -1149,7 +1577,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Team",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
(
"uuid",
models.UUIDField(default=posthog.models.utils.UUIDT, editable=False),
@@ -1161,7 +1597,8 @@ class Migration(migrations.Migration):
max_length=200,
validators=[
django.core.validators.MinLengthValidator(
- 10, "Project's API token must be at least 10 characters long!"
+ 10,
+ "Project's API token must be at least 10 characters long!",
)
],
),
@@ -1169,7 +1606,10 @@ class Migration(migrations.Migration):
(
"app_urls",
django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=200, null=True), blank=True, default=list, size=None
+ base_field=models.CharField(max_length=200, null=True),
+ blank=True,
+ default=list,
+ size=None,
),
),
(
@@ -1180,21 +1620,36 @@ class Migration(migrations.Migration):
validators=[django.core.validators.MinLengthValidator(1, "Project must have a name!")],
),
),
- ("slack_incoming_webhook", models.CharField(blank=True, max_length=500, null=True)),
+ (
+ "slack_incoming_webhook",
+ models.CharField(blank=True, max_length=500, null=True),
+ ),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("anonymize_ips", models.BooleanField(default=False)),
("completed_snippet_onboarding", models.BooleanField(default=False)),
("ingested_event", models.BooleanField(default=False)),
("session_recording_opt_in", models.BooleanField(default=False)),
- ("capture_console_log_opt_in", models.BooleanField(blank=True, null=True)),
- ("signup_token", models.CharField(blank=True, max_length=200, null=True)),
+ (
+ "capture_console_log_opt_in",
+ models.BooleanField(blank=True, null=True),
+ ),
+ (
+ "signup_token",
+ models.CharField(blank=True, max_length=200, null=True),
+ ),
("is_demo", models.BooleanField(default=False)),
("access_control", models.BooleanField(default=False)),
("inject_web_apps", models.BooleanField(null=True)),
("test_account_filters", models.JSONField(default=list)),
- ("test_account_filters_default_checked", models.BooleanField(blank=True, null=True)),
- ("path_cleaning_filters", models.JSONField(blank=True, default=list, null=True)),
+ (
+ "test_account_filters_default_checked",
+ models.BooleanField(blank=True, null=True),
+ ),
+ (
+ "path_cleaning_filters",
+ models.JSONField(blank=True, default=list, null=True),
+ ),
(
"timezone",
models.CharField(
@@ -1256,16 +1711,34 @@ class Migration(migrations.Migration):
("America/Anguilla", "America/Anguilla"),
("America/Antigua", "America/Antigua"),
("America/Araguaina", "America/Araguaina"),
- ("America/Argentina/Buenos_Aires", "America/Argentina/Buenos_Aires"),
- ("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
+ (
+ "America/Argentina/Buenos_Aires",
+ "America/Argentina/Buenos_Aires",
+ ),
+ (
+ "America/Argentina/Catamarca",
+ "America/Argentina/Catamarca",
+ ),
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
- ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
+ (
+ "America/Argentina/La_Rioja",
+ "America/Argentina/La_Rioja",
+ ),
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
- ("America/Argentina/Rio_Gallegos", "America/Argentina/Rio_Gallegos"),
+ (
+ "America/Argentina/Rio_Gallegos",
+ "America/Argentina/Rio_Gallegos",
+ ),
("America/Argentina/Salta", "America/Argentina/Salta"),
- ("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
- ("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
+ (
+ "America/Argentina/San_Juan",
+ "America/Argentina/San_Juan",
+ ),
+ (
+ "America/Argentina/San_Luis",
+ "America/Argentina/San_Luis",
+ ),
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
("America/Aruba", "America/Aruba"),
@@ -1314,10 +1787,16 @@ class Migration(migrations.Migration):
("America/Halifax", "America/Halifax"),
("America/Havana", "America/Havana"),
("America/Hermosillo", "America/Hermosillo"),
- ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
+ (
+ "America/Indiana/Indianapolis",
+ "America/Indiana/Indianapolis",
+ ),
("America/Indiana/Knox", "America/Indiana/Knox"),
("America/Indiana/Marengo", "America/Indiana/Marengo"),
- ("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
+ (
+ "America/Indiana/Petersburg",
+ "America/Indiana/Petersburg",
+ ),
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
("America/Indiana/Vevay", "America/Indiana/Vevay"),
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
@@ -1326,8 +1805,14 @@ class Migration(migrations.Migration):
("America/Iqaluit", "America/Iqaluit"),
("America/Jamaica", "America/Jamaica"),
("America/Juneau", "America/Juneau"),
- ("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
- ("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
+ (
+ "America/Kentucky/Louisville",
+ "America/Kentucky/Louisville",
+ ),
+ (
+ "America/Kentucky/Monticello",
+ "America/Kentucky/Monticello",
+ ),
("America/Kralendijk", "America/Kralendijk"),
("America/La_Paz", "America/La_Paz"),
("America/Lima", "America/Lima"),
@@ -1354,9 +1839,18 @@ class Migration(migrations.Migration):
("America/Nipigon", "America/Nipigon"),
("America/Nome", "America/Nome"),
("America/Noronha", "America/Noronha"),
- ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
- ("America/North_Dakota/Center", "America/North_Dakota/Center"),
- ("America/North_Dakota/New_Salem", "America/North_Dakota/New_Salem"),
+ (
+ "America/North_Dakota/Beulah",
+ "America/North_Dakota/Beulah",
+ ),
+ (
+ "America/North_Dakota/Center",
+ "America/North_Dakota/Center",
+ ),
+ (
+ "America/North_Dakota/New_Salem",
+ "America/North_Dakota/New_Salem",
+ ),
("America/Nuuk", "America/Nuuk"),
("America/Ojinaga", "America/Ojinaga"),
("America/Panama", "America/Panama"),
@@ -1643,11 +2137,17 @@ class Migration(migrations.Migration):
max_length=240,
),
),
- ("data_attributes", models.JSONField(default=posthog.models.team.team.get_default_data_attributes)),
+ (
+ "data_attributes",
+ models.JSONField(default=posthog.models.team.team.get_default_data_attributes),
+ ),
(
"person_display_name_properties",
django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=400), blank=True, null=True, size=None
+ base_field=models.CharField(max_length=400),
+ blank=True,
+ null=True,
+ size=None,
),
),
(
@@ -1659,11 +2159,20 @@ class Migration(migrations.Migration):
(
"recording_domains",
django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=200, null=True), blank=True, null=True, size=None
+ base_field=models.CharField(max_length=200, null=True),
+ blank=True,
+ null=True,
+ size=None,
),
),
- ("correlation_config", models.JSONField(blank=True, default=dict, null=True)),
- ("session_recording_retention_period_days", models.IntegerField(blank=True, default=None, null=True)),
+ (
+ "correlation_config",
+ models.JSONField(blank=True, default=dict, null=True),
+ ),
+ (
+ "session_recording_retention_period_days",
+ models.IntegerField(blank=True, default=None, null=True),
+ ),
("plugins_opt_in", models.BooleanField(default=False)),
("opt_out_capture", models.BooleanField(default=False)),
("event_names", models.JSONField(default=list)),
@@ -1720,16 +2229,36 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="UserPromptState",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
- ("last_updated_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "last_updated_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
("step", models.IntegerField(default=None, null=True)),
("completed", models.BooleanField(default=False)),
("dismissed", models.BooleanField(default=False)),
(
"sequence",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.promptsequence"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.promptsequence",
+ ),
+ ),
+ (
+ "user",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
),
- ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
@@ -1738,20 +2267,35 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
- ("media_location", models.TextField(blank=True, max_length=1000, null=True)),
- ("content_type", models.TextField(blank=True, max_length=100, null=True)),
+ (
+ "media_location",
+ models.TextField(blank=True, max_length=1000, null=True),
+ ),
+ (
+ "content_type",
+ models.TextField(blank=True, max_length=100, null=True),
+ ),
("file_name", models.TextField(blank=True, max_length=1000, null=True)),
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
options={
"abstract": False,
@@ -1760,13 +2304,27 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Text",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("body", models.CharField(blank=True, max_length=4000, null=True)),
- ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "last_modified_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
(
@@ -1779,7 +2337,10 @@ class Migration(migrations.Migration):
to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.CreateModel(
@@ -1788,7 +2349,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
(
@@ -1844,7 +2408,9 @@ class Migration(migrations.Migration):
(
"tag",
models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, related_name="tagged_items", to="posthog.tag"
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="tagged_items",
+ to="posthog.tag",
),
),
],
@@ -1857,12 +2423,25 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Subscription",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("title", models.CharField(blank=True, max_length=100, null=True)),
(
"target_type",
models.CharField(
- choices=[("email", "Email"), ("slack", "Slack"), ("webhook", "Webhook")], max_length=10
+ choices=[
+ ("email", "Email"),
+ ("slack", "Slack"),
+ ("webhook", "Webhook"),
+ ],
+ max_length=10,
),
),
("target_value", models.TextField()),
@@ -1910,24 +2489,46 @@ class Migration(migrations.Migration):
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
(
"dashboard",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.dashboard",
+ ),
),
(
"insight",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.insight",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
],
),
migrations.CreateModel(
name="SharingConfiguration",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("created_at", models.DateTimeField(auto_now_add=True)),
("enabled", models.BooleanField(default=False)),
(
@@ -1942,29 +2543,65 @@ class Migration(migrations.Migration):
),
(
"dashboard",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.dashboard",
+ ),
),
(
"insight",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.insight",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
],
),
migrations.CreateModel(
name="SessionRecordingViewed",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("created_at", models.DateTimeField(auto_now_add=True, null=True)),
("session_id", models.CharField(max_length=200)),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
- ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
+ (
+ "user",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
],
),
migrations.CreateModel(
name="SessionRecordingPlaylistItem",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("session_id", models.CharField(max_length=200)),
("created_at", models.DateTimeField(auto_now_add=True)),
("deleted", models.BooleanField(blank=True, null=True)),
@@ -1986,14 +2623,28 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="SessionRecordingEvent",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("created_at", models.DateTimeField(auto_now_add=True, null=True)),
- ("timestamp", models.DateTimeField(blank=True, default=django.utils.timezone.now)),
+ (
+ "timestamp",
+ models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
("distinct_id", models.CharField(max_length=200)),
("session_id", models.CharField(max_length=200)),
("window_id", models.CharField(blank=True, max_length=200, null=True)),
("snapshot_data", models.JSONField(default=dict)),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddField(
@@ -2009,12 +2660,23 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="PluginStorage",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=200)),
("value", models.TextField(blank=True, null=True)),
(
"plugin_config",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.pluginconfig"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.pluginconfig",
+ ),
),
],
),
@@ -2024,7 +2686,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("filename", models.CharField(max_length=200)),
@@ -2032,7 +2697,11 @@ class Migration(migrations.Migration):
(
"status",
models.CharField(
- choices=[("LOCKED", "locked"), ("TRANSPILED", "transpiled"), ("ERROR", "error")],
+ choices=[
+ ("LOCKED", "locked"),
+ ("TRANSPILED", "transpiled"),
+ ("ERROR", "error"),
+ ],
max_length=20,
null=True,
),
@@ -2040,18 +2709,33 @@ class Migration(migrations.Migration):
("transpiled", models.TextField(blank=True, null=True)),
("error", models.TextField(blank=True, null=True)),
("updated_at", models.DateTimeField(blank=True, null=True)),
- ("plugin", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin")),
+ (
+ "plugin",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin"),
+ ),
],
),
migrations.AddField(
model_name="pluginconfig",
name="team",
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.team",
+ ),
),
migrations.CreateModel(
name="PluginAttachment",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=200)),
("content_type", models.CharField(max_length=200)),
("file_name", models.CharField(max_length=200)),
@@ -2060,22 +2744,46 @@ class Migration(migrations.Migration):
(
"plugin_config",
models.ForeignKey(
- null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.pluginconfig"
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.pluginconfig",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.team",
),
),
- ("team", models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
],
),
migrations.CreateModel(
name="PersonDistinctId",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("distinct_id", models.CharField(max_length=400)),
("version", models.BigIntegerField(blank=True, null=True)),
- ("person", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person")),
+ (
+ "person",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person"),
+ ),
(
"team",
- models.ForeignKey(db_index=False, on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ models.ForeignKey(
+ db_index=False,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.team",
+ ),
),
],
),
@@ -2092,8 +2800,20 @@ class Migration(migrations.Migration):
),
),
("label", models.CharField(max_length=40)),
- ("value", models.CharField(blank=True, editable=False, max_length=50, null=True, unique=True)),
- ("secure_value", models.CharField(editable=False, max_length=300, null=True, unique=True)),
+ (
+ "value",
+ models.CharField(
+ blank=True,
+ editable=False,
+ max_length=50,
+ null=True,
+ unique=True,
+ ),
+ ),
+ (
+ "secure_value",
+ models.CharField(editable=False, max_length=300, null=True, unique=True),
+ ),
("created_at", models.DateTimeField(default=django.utils.timezone.now)),
("last_used_at", models.DateTimeField(blank=True, null=True)),
(
@@ -2127,13 +2847,17 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
(
"level",
models.PositiveSmallIntegerField(
- choices=[(1, "member"), (8, "administrator"), (15, "owner")], default=1
+ choices=[(1, "member"), (8, "administrator"), (15, "owner")],
+ default=1,
),
),
("joined_at", models.DateTimeField(auto_now_add=True)),
@@ -2164,10 +2888,16 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
- ("target_email", models.EmailField(db_index=True, max_length=254, null=True)),
+ (
+ "target_email",
+ models.EmailField(db_index=True, max_length=254, null=True),
+ ),
("first_name", models.CharField(blank=True, default="", max_length=30)),
("emailing_attempt_made", models.BooleanField(default=False)),
("created_at", models.DateTimeField(auto_now_add=True)),
@@ -2203,27 +2933,45 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("domain", models.CharField(max_length=128, unique=True)),
(
"verification_challenge",
models.CharField(
- default=posthog.models.organization_domain.generate_verification_challenge, max_length=128
+ default=posthog.models.organization_domain.generate_verification_challenge,
+ max_length=128,
),
),
- ("verified_at", models.DateTimeField(blank=True, default=None, null=True)),
- ("last_verification_retry", models.DateTimeField(blank=True, default=None, null=True)),
+ (
+ "verified_at",
+ models.DateTimeField(blank=True, default=None, null=True),
+ ),
+ (
+ "last_verification_retry",
+ models.DateTimeField(blank=True, default=None, null=True),
+ ),
("jit_provisioning_enabled", models.BooleanField(default=False)),
("sso_enforcement", models.CharField(blank=True, max_length=28)),
- ("saml_entity_id", models.CharField(blank=True, max_length=512, null=True)),
- ("saml_acs_url", models.CharField(blank=True, max_length=512, null=True)),
+ (
+ "saml_entity_id",
+ models.CharField(blank=True, max_length=512, null=True),
+ ),
+ (
+ "saml_acs_url",
+ models.CharField(blank=True, max_length=512, null=True),
+ ),
("saml_x509_cert", models.TextField(blank=True, null=True)),
(
"organization",
models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, related_name="domains", to="posthog.organization"
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="domains",
+ to="posthog.organization",
),
),
],
@@ -2247,14 +2995,19 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("last_viewed_activity_date", models.DateTimeField(default=None)),
(
"user",
models.ForeignKey(
- null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
],
@@ -2265,7 +3018,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("email_hash", models.CharField(max_length=1024)),
@@ -2280,7 +3036,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Integration",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("kind", models.CharField(choices=[("slack", "Slack")], max_length=10)),
("config", models.JSONField(default=dict)),
("sensitive_config", models.JSONField(default=dict)),
@@ -2289,10 +3053,16 @@ class Migration(migrations.Migration):
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddConstraint(
@@ -2328,7 +3098,9 @@ class Migration(migrations.Migration):
model_name="insightcachingstate",
name="insight",
field=models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, related_name="caching_states", to="posthog.insight"
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="caching_states",
+ to="posthog.insight",
),
),
migrations.AddField(
@@ -2340,7 +3112,10 @@ class Migration(migrations.Migration):
model_name="insight",
name="created_by",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
@@ -2358,7 +3133,10 @@ class Migration(migrations.Migration):
model_name="insight",
name="dive_dashboard",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="posthog.dashboard"
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to="posthog.dashboard",
),
),
migrations.AddField(
@@ -2426,18 +3204,29 @@ class Migration(migrations.Migration):
model_name="exportedasset",
name="created_by",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
model_name="exportedasset",
name="dashboard",
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"),
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.dashboard",
+ ),
),
migrations.AddField(
model_name="exportedasset",
name="insight",
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"),
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.insight",
+ ),
),
migrations.AddField(
model_name="exportedasset",
@@ -2488,21 +3277,29 @@ class Migration(migrations.Migration):
model_name="element",
name="event",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.event"
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.event",
),
),
migrations.AddField(
model_name="element",
name="group",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.elementgroup"
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.elementgroup",
),
),
migrations.AddField(
model_name="dashboardtile",
name="dashboard",
field=models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, related_name="tiles", to="posthog.dashboard"
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="tiles",
+ to="posthog.dashboard",
),
),
migrations.AddField(
@@ -2529,14 +3326,20 @@ class Migration(migrations.Migration):
model_name="dashboard",
name="created_by",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
model_name="dashboard",
name="insights",
field=models.ManyToManyField(
- blank=True, related_name="dashboards", through="posthog.DashboardTile", to="posthog.Insight"
+ blank=True,
+ related_name="dashboards",
+ through="posthog.DashboardTile",
+ to="posthog.Insight",
),
),
migrations.AddField(
@@ -2558,7 +3361,10 @@ class Migration(migrations.Migration):
model_name="cohort",
name="created_by",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
@@ -2580,27 +3386,39 @@ class Migration(migrations.Migration):
model_name="asyncdeletion",
name="created_by",
field=models.ForeignKey(
- null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
model_name="annotation",
name="created_by",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
model_name="annotation",
name="dashboard_item",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="posthog.insight"
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to="posthog.insight",
),
),
migrations.AddField(
model_name="annotation",
name="organization",
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.organization"),
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.organization",
+ ),
),
migrations.AddField(
model_name="annotation",
@@ -2611,21 +3429,28 @@ class Migration(migrations.Migration):
model_name="activitylog",
name="user",
field=models.ForeignKey(
- null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
model_name="actionstep",
name="action",
field=models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, related_name="steps", to="posthog.action"
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="steps",
+ to="posthog.action",
),
),
migrations.AddField(
model_name="action",
name="created_by",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
@@ -2772,7 +3597,16 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name="taggeditem",
- unique_together={("tag", "dashboard", "insight", "event_definition", "property_definition", "action")},
+ unique_together={
+ (
+ "tag",
+ "dashboard",
+ "insight",
+ "event_definition",
+ "property_definition",
+ "action",
+ )
+ },
),
migrations.AlterUniqueTogether(
name="tag",
@@ -2780,7 +3614,10 @@ class Migration(migrations.Migration):
),
migrations.AddIndex(
model_name="sessionrecordingviewed",
- index=models.Index(fields=["team_id", "user_id", "session_id"], name="posthog_ses_team_id_465af1_idx"),
+ index=models.Index(
+ fields=["team_id", "user_id", "session_id"],
+ name="posthog_ses_team_id_465af1_idx",
+ ),
),
migrations.AlterUniqueTogether(
name="sessionrecordingviewed",
@@ -2801,14 +3638,17 @@ class Migration(migrations.Migration):
migrations.AddIndex(
model_name="sessionrecordingevent",
index=models.Index(
- fields=["team_id", "distinct_id", "timestamp", "session_id"], name="posthog_ses_team_id_46392f_idx"
+ fields=["team_id", "distinct_id", "timestamp", "session_id"],
+ name="posthog_ses_team_id_46392f_idx",
),
),
TrigramExtension(),
migrations.AddIndex(
model_name="propertydefinition",
index=django.contrib.postgres.indexes.GinIndex(
- fields=["name"], name="index_property_definition_name", opclasses=["gin_trgm_ops"]
+ fields=["name"],
+ name="index_property_definition_name",
+ opclasses=["gin_trgm_ops"],
),
),
migrations.AddConstraint(
@@ -2829,7 +3669,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="pluginstorage",
constraint=models.UniqueConstraint(
- fields=("plugin_config_id", "key"), name="posthog_unique_plugin_storage_key"
+ fields=("plugin_config_id", "key"),
+ name="posthog_unique_plugin_storage_key",
),
),
migrations.AddConstraint(
@@ -2851,13 +3692,16 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="organizationmembership",
constraint=models.UniqueConstraint(
- fields=("organization_id", "user_id"), name="unique_organization_membership"
+ fields=("organization_id", "user_id"),
+ name="unique_organization_membership",
),
),
migrations.AddConstraint(
model_name="organizationmembership",
constraint=models.UniqueConstraint(
- condition=models.Q(("level", 15)), fields=("organization_id",), name="only_one_owner_per_organization"
+ condition=models.Q(("level", 15)),
+ fields=("organization_id",),
+ name="only_one_owner_per_organization",
),
),
migrations.AddConstraint(
@@ -2874,7 +3718,10 @@ class Migration(migrations.Migration):
),
migrations.AddIndex(
model_name="insightviewed",
- index=models.Index(fields=["team_id", "user_id", "-last_viewed_at"], name="posthog_ins_team_id_339ee0_idx"),
+ index=models.Index(
+ fields=["team_id", "user_id", "-last_viewed_at"],
+ name="posthog_ins_team_id_339ee0_idx",
+ ),
),
migrations.AddConstraint(
model_name="insightviewed",
@@ -2903,13 +3750,15 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="grouptypemapping",
constraint=models.UniqueConstraint(
- fields=("team", "group_type_index"), name="unique event column indexes for team"
+ fields=("team", "group_type_index"),
+ name="unique event column indexes for team",
),
),
migrations.AddConstraint(
model_name="grouptypemapping",
constraint=models.CheckConstraint(
- check=models.Q(("group_type_index__lte", 5)), name="group_type_index is less than or equal 5"
+ check=models.Q(("group_type_index__lte", 5)),
+ name="group_type_index is less than or equal 5",
),
),
migrations.AddConstraint(
@@ -2922,13 +3771,15 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="featureflagoverride",
constraint=models.UniqueConstraint(
- fields=("user", "feature_flag", "team"), name="unique feature flag for a user/team combo"
+ fields=("user", "feature_flag", "team"),
+ name="unique feature flag for a user/team combo",
),
),
migrations.AddConstraint(
model_name="featureflaghashkeyoverride",
constraint=models.UniqueConstraint(
- fields=("team", "person", "feature_flag_key"), name="Unique hash_key for a user/team/feature_flag combo"
+ fields=("team", "person", "feature_flag_key"),
+ name="Unique hash_key for a user/team/feature_flag combo",
),
),
migrations.AddConstraint(
@@ -2946,13 +3797,16 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="eventproperty",
constraint=models.UniqueConstraint(
- fields=("team", "event", "property"), name="posthog_event_property_unique_team_event_property"
+ fields=("team", "event", "property"),
+ name="posthog_event_property_unique_team_event_property",
),
),
migrations.AddIndex(
model_name="eventdefinition",
index=django.contrib.postgres.indexes.GinIndex(
- fields=["name"], name="index_event_definition_name", opclasses=["gin_trgm_ops"]
+ fields=["name"],
+ name="index_event_definition_name",
+ opclasses=["gin_trgm_ops"],
),
),
migrations.AlterUniqueTogether(
@@ -2965,7 +3819,10 @@ class Migration(migrations.Migration):
),
migrations.AddIndex(
model_name="event",
- index=models.Index(fields=["timestamp", "team_id", "event"], name="posthog_eve_timesta_1f6a8c_idx"),
+ index=models.Index(
+ fields=["timestamp", "team_id", "event"],
+ name="posthog_eve_timesta_1f6a8c_idx",
+ ),
),
migrations.AddConstraint(
model_name="elementgroup",
@@ -2986,7 +3843,9 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="dashboardtile",
constraint=models.UniqueConstraint(
- condition=models.Q(("text__isnull", False)), fields=("dashboard", "text"), name="unique_dashboard_text"
+ condition=models.Q(("text__isnull", False)),
+ fields=("dashboard", "text"),
+ name="unique_dashboard_text",
),
),
migrations.AddConstraint(
@@ -3019,17 +3878,25 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="asyncdeletion",
constraint=models.UniqueConstraint(
- fields=("deletion_type", "key", "group_type_index"), name="unique deletion for groups"
+ fields=("deletion_type", "key", "group_type_index"),
+ name="unique deletion for groups",
),
),
migrations.AddIndex(
model_name="activitylog",
- index=models.Index(fields=["team_id", "scope", "item_id"], name="posthog_act_team_id_13a0a8_idx"),
+ index=models.Index(
+ fields=["team_id", "scope", "item_id"],
+ name="posthog_act_team_id_13a0a8_idx",
+ ),
),
migrations.AddConstraint(
model_name="activitylog",
constraint=models.CheckConstraint(
- check=models.Q(("team_id__isnull", False), ("organization_id__isnull", False), _connector="OR"),
+ check=models.Q(
+ ("team_id__isnull", False),
+ ("organization_id__isnull", False),
+ _connector="OR",
+ ),
name="must_have_team_or_organization_id",
),
),
@@ -3060,7 +3927,9 @@ class Migration(migrations.Migration):
model_name="persondistinctid",
name="team",
field=models.ForeignKey(
- db_index=False, on_delete=django.db.models.deletion.CASCADE, to="posthog.team"
+ db_index=False,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.team",
),
),
],
diff --git a/posthog/migrations/0002_person.py b/posthog/migrations/0002_person.py
index b7c46ca5c2675..00676ae9e5077 100644
--- a/posthog/migrations/0002_person.py
+++ b/posthog/migrations/0002_person.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0001_initial"),
]
diff --git a/posthog/migrations/0003_person_is_user.py b/posthog/migrations/0003_person_is_user.py
index d894d52ddf69e..e9ebff11822ba 100644
--- a/posthog/migrations/0003_person_is_user.py
+++ b/posthog/migrations/0003_person_is_user.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0002_person"),
]
diff --git a/posthog/migrations/0004_auto_20200125_0415.py b/posthog/migrations/0004_auto_20200125_0415.py
index 2c182399e54d7..7a504bd6a4261 100644
--- a/posthog/migrations/0004_auto_20200125_0415.py
+++ b/posthog/migrations/0004_auto_20200125_0415.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0003_person_is_user"),
]
diff --git a/posthog/migrations/0005_remove_person_distinct_ids.py b/posthog/migrations/0005_remove_person_distinct_ids.py
index 8355585fac3e4..e1b6f7a96f75a 100644
--- a/posthog/migrations/0005_remove_person_distinct_ids.py
+++ b/posthog/migrations/0005_remove_person_distinct_ids.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0004_auto_20200125_0415"),
]
diff --git a/posthog/migrations/0006_person_distinct_ids.py b/posthog/migrations/0006_person_distinct_ids.py
index 4c13b697fa97b..c193d33a9916e 100644
--- a/posthog/migrations/0006_person_distinct_ids.py
+++ b/posthog/migrations/0006_person_distinct_ids.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0005_remove_person_distinct_ids"),
]
diff --git a/posthog/migrations/0007_element.py b/posthog/migrations/0007_element.py
index ca419df97b1cc..e190ac2f5bec5 100644
--- a/posthog/migrations/0007_element.py
+++ b/posthog/migrations/0007_element.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0006_person_distinct_ids"),
]
diff --git a/posthog/migrations/0008_action_actionstep.py b/posthog/migrations/0008_action_actionstep.py
index 4720db487a565..1d17b8f064470 100644
--- a/posthog/migrations/0008_action_actionstep.py
+++ b/posthog/migrations/0008_action_actionstep.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0007_element"),
]
diff --git a/posthog/migrations/0009_auto_20200127_0018.py b/posthog/migrations/0009_auto_20200127_0018.py
index 3319c69a0cb13..8828294ee9399 100644
--- a/posthog/migrations/0009_auto_20200127_0018.py
+++ b/posthog/migrations/0009_auto_20200127_0018.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0008_action_actionstep"),
]
diff --git a/posthog/migrations/0010_funnel_funnelstep.py b/posthog/migrations/0010_funnel_funnelstep.py
index 2d7d45f7ed572..3bcafa4035942 100644
--- a/posthog/migrations/0010_funnel_funnelstep.py
+++ b/posthog/migrations/0010_funnel_funnelstep.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0009_auto_20200127_0018"),
]
diff --git a/posthog/migrations/0011_auto_20200127_2105.py b/posthog/migrations/0011_auto_20200127_2105.py
index b52911c657af8..c9f83a19c57b4 100644
--- a/posthog/migrations/0011_auto_20200127_2105.py
+++ b/posthog/migrations/0011_auto_20200127_2105.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0010_funnel_funnelstep"),
]
diff --git a/posthog/migrations/0012_team_app_url.py b/posthog/migrations/0012_team_app_url.py
index dd087ab0d35fe..c6916c0818f36 100644
--- a/posthog/migrations/0012_team_app_url.py
+++ b/posthog/migrations/0012_team_app_url.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0011_auto_20200127_2105"),
]
diff --git a/posthog/migrations/0013_element_attr_class.py b/posthog/migrations/0013_element_attr_class.py
index 44d25b77c683b..b5ec08787553e 100644
--- a/posthog/migrations/0013_element_attr_class.py
+++ b/posthog/migrations/0013_element_attr_class.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0012_team_app_url"),
]
diff --git a/posthog/migrations/0014_auto_20200129_0703.py b/posthog/migrations/0014_auto_20200129_0703.py
index d936899632ca3..4be8151c42872 100644
--- a/posthog/migrations/0014_auto_20200129_0703.py
+++ b/posthog/migrations/0014_auto_20200129_0703.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0013_element_attr_class"),
]
diff --git a/posthog/migrations/0015_actionstep_event.py b/posthog/migrations/0015_actionstep_event.py
index d2eef953a2311..7b2927a2813b2 100644
--- a/posthog/migrations/0015_actionstep_event.py
+++ b/posthog/migrations/0015_actionstep_event.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0014_auto_20200129_0703"),
]
diff --git a/posthog/migrations/0016_user_temporary_token.py b/posthog/migrations/0016_user_temporary_token.py
index 926e5aaef0bbd..d2bcda6de4e23 100644
--- a/posthog/migrations/0016_user_temporary_token.py
+++ b/posthog/migrations/0016_user_temporary_token.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0015_actionstep_event"),
]
diff --git a/posthog/migrations/0017_dashboarditem.py b/posthog/migrations/0017_dashboarditem.py
index b6ee906d4c455..51eb088ba2840 100644
--- a/posthog/migrations/0017_dashboarditem.py
+++ b/posthog/migrations/0017_dashboarditem.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0016_user_temporary_token"),
]
diff --git a/posthog/migrations/0018_funnel_deleted.py b/posthog/migrations/0018_funnel_deleted.py
index 6560947c7d27b..43f596fcf7376 100644
--- a/posthog/migrations/0018_funnel_deleted.py
+++ b/posthog/migrations/0018_funnel_deleted.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0017_dashboarditem"),
]
diff --git a/posthog/migrations/0019_team_name.py b/posthog/migrations/0019_team_name.py
index b3328cc23fb81..5b73755da2f73 100644
--- a/posthog/migrations/0019_team_name.py
+++ b/posthog/migrations/0019_team_name.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0018_funnel_deleted"),
]
diff --git a/posthog/migrations/0020_auto_20200210_0212.py b/posthog/migrations/0020_auto_20200210_0212.py
index c5737413424f1..f9278fecde9a9 100644
--- a/posthog/migrations/0020_auto_20200210_0212.py
+++ b/posthog/migrations/0020_auto_20200210_0212.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0019_team_name"),
]
diff --git a/posthog/migrations/0021_user_distinct_id.py b/posthog/migrations/0021_user_distinct_id.py
index 062702ddbdc03..95881bcc9225d 100644
--- a/posthog/migrations/0021_user_distinct_id.py
+++ b/posthog/migrations/0021_user_distinct_id.py
@@ -18,7 +18,6 @@ def reverse_func(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0020_auto_20200210_0212"),
]
diff --git a/posthog/migrations/0022_action_deleted.py b/posthog/migrations/0022_action_deleted.py
index a9878f6b01754..d9340dc0be1ea 100644
--- a/posthog/migrations/0022_action_deleted.py
+++ b/posthog/migrations/0022_action_deleted.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0021_user_distinct_id"),
]
diff --git a/posthog/migrations/0023_team_opt_out_capture.py b/posthog/migrations/0023_team_opt_out_capture.py
index ecd04b3b91dda..25dab1b607a4c 100644
--- a/posthog/migrations/0023_team_opt_out_capture.py
+++ b/posthog/migrations/0023_team_opt_out_capture.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0022_action_deleted"),
]
diff --git a/posthog/migrations/0025_cohort.py b/posthog/migrations/0025_cohort.py
index 06f93085d95a9..e330d91cb45ce 100644
--- a/posthog/migrations/0025_cohort.py
+++ b/posthog/migrations/0025_cohort.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0024_add_event_distinct_id_index"),
]
diff --git a/posthog/migrations/0027_move_elements_to_group.py b/posthog/migrations/0027_move_elements_to_group.py
index a52d14506c71a..51a65b1f5da39 100644
--- a/posthog/migrations/0027_move_elements_to_group.py
+++ b/posthog/migrations/0027_move_elements_to_group.py
@@ -61,5 +61,10 @@ class Migration(migrations.Migration):
]
operations = [
- migrations.RunPython(forwards, reverse_code=backwards, hints={"target_db": "default"}, elidable=True),
+ migrations.RunPython(
+ forwards,
+ reverse_code=backwards,
+ hints={"target_db": "default"},
+ elidable=True,
+ ),
]
diff --git a/posthog/migrations/0028_actionstep_url_matching.py b/posthog/migrations/0028_actionstep_url_matching.py
index a127710db6dbf..4100495be04e0 100644
--- a/posthog/migrations/0028_actionstep_url_matching.py
+++ b/posthog/migrations/0028_actionstep_url_matching.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0027_move_elements_to_group"),
]
diff --git a/posthog/migrations/0029_migrate_dashboard_actions.py b/posthog/migrations/0029_migrate_dashboard_actions.py
index 89e8eea0ecbc9..7ddce5a0bce9d 100644
--- a/posthog/migrations/0029_migrate_dashboard_actions.py
+++ b/posthog/migrations/0029_migrate_dashboard_actions.py
@@ -18,7 +18,6 @@ def migrate_to_array(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0028_actionstep_url_matching"),
]
diff --git a/posthog/migrations/0030_migrate_dashboard_days.py b/posthog/migrations/0030_migrate_dashboard_days.py
index 4edf8c1a38633..a3516a251ef6e 100644
--- a/posthog/migrations/0030_migrate_dashboard_days.py
+++ b/posthog/migrations/0030_migrate_dashboard_days.py
@@ -18,7 +18,6 @@ def migrate_to_array(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0029_migrate_dashboard_actions"),
]
diff --git a/posthog/migrations/0031_team_signup_token.py b/posthog/migrations/0031_team_signup_token.py
index 63b6e208d5322..3e8a66ff19931 100644
--- a/posthog/migrations/0031_team_signup_token.py
+++ b/posthog/migrations/0031_team_signup_token.py
@@ -17,7 +17,6 @@ def backwards(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0030_migrate_dashboard_days"),
]
diff --git a/posthog/migrations/0032_team_multiple_app_urls.py b/posthog/migrations/0032_team_multiple_app_urls.py
index 7efbf6b31218c..13173c75b6376 100644
--- a/posthog/migrations/0032_team_multiple_app_urls.py
+++ b/posthog/migrations/0032_team_multiple_app_urls.py
@@ -5,7 +5,6 @@
def migrate_to_array(apps, schema_editor):
-
Team = apps.get_model("posthog", "Team")
for mm in Team.objects.all():
@@ -14,7 +13,6 @@ def migrate_to_array(apps, schema_editor):
def rollback_to_string(apps, schema_editor):
-
Team = apps.get_model("posthog", "Team")
for mm in Team.objects.all():
@@ -23,7 +21,6 @@ def rollback_to_string(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0031_team_signup_token"),
]
diff --git a/posthog/migrations/0033_auto_20200316_1655.py b/posthog/migrations/0033_auto_20200316_1655.py
index b51694a2941b3..83fa47260ad7a 100644
--- a/posthog/migrations/0033_auto_20200316_1655.py
+++ b/posthog/migrations/0033_auto_20200316_1655.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0032_team_multiple_app_urls"),
]
diff --git a/posthog/migrations/0034_pg_trgm_and_btree_20200318_1447.py b/posthog/migrations/0034_pg_trgm_and_btree_20200318_1447.py
index 4fa956d1b61be..a48ff23a7ffc5 100644
--- a/posthog/migrations/0034_pg_trgm_and_btree_20200318_1447.py
+++ b/posthog/migrations/0034_pg_trgm_and_btree_20200318_1447.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0033_auto_20200316_1655"),
]
diff --git a/posthog/migrations/0036_remove_current_url_index.py b/posthog/migrations/0036_remove_current_url_index.py
index a705491e1bb18..00d8ed6871125 100644
--- a/posthog/migrations/0036_remove_current_url_index.py
+++ b/posthog/migrations/0036_remove_current_url_index.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0035_current_url_index_20200318_1459"),
]
diff --git a/posthog/migrations/0037_action_step_url_matching_can_be_null_20200402_1351.py b/posthog/migrations/0037_action_step_url_matching_can_be_null_20200402_1351.py
index 66be9b4b9bc3a..b69d237cf84ec 100644
--- a/posthog/migrations/0037_action_step_url_matching_can_be_null_20200402_1351.py
+++ b/posthog/migrations/0037_action_step_url_matching_can_be_null_20200402_1351.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0036_remove_current_url_index"),
]
diff --git a/posthog/migrations/0038_migrate_actions_to_precalculate_events.py b/posthog/migrations/0038_migrate_actions_to_precalculate_events.py
index 172ce9d3e81ef..3cbb4c8e06082 100644
--- a/posthog/migrations/0038_migrate_actions_to_precalculate_events.py
+++ b/posthog/migrations/0038_migrate_actions_to_precalculate_events.py
@@ -12,7 +12,6 @@ def rollback(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0037_action_step_url_matching_can_be_null_20200402_1351"),
]
diff --git a/posthog/migrations/0039_populate_event_ip_property.py b/posthog/migrations/0039_populate_event_ip_property.py
index 9fb98ba2a7f30..d1e4166a1982e 100644
--- a/posthog/migrations/0039_populate_event_ip_property.py
+++ b/posthog/migrations/0039_populate_event_ip_property.py
@@ -2,7 +2,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0038_migrate_actions_to_precalculate_events"),
]
diff --git a/posthog/migrations/0039_user_email_opt_in.py b/posthog/migrations/0039_user_email_opt_in.py
index f5132cde4b4f6..3f8a25572715b 100644
--- a/posthog/migrations/0039_user_email_opt_in.py
+++ b/posthog/migrations/0039_user_email_opt_in.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0038_migrate_actions_to_precalculate_events"),
]
diff --git a/posthog/migrations/0040_remove_event_ip.py b/posthog/migrations/0040_remove_event_ip.py
index 741cf5232a3df..944ae3c63c847 100644
--- a/posthog/migrations/0040_remove_event_ip.py
+++ b/posthog/migrations/0040_remove_event_ip.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0039_populate_event_ip_property"),
]
diff --git a/posthog/migrations/0041_merge_20200407_1805.py b/posthog/migrations/0041_merge_20200407_1805.py
index aa209ddaadd9a..a73e12aa07488 100644
--- a/posthog/migrations/0041_merge_20200407_1805.py
+++ b/posthog/migrations/0041_merge_20200407_1805.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0040_remove_event_ip"),
("posthog", "0039_user_email_opt_in"),
diff --git a/posthog/migrations/0042_add_type_dashboarditems.py b/posthog/migrations/0042_add_type_dashboarditems.py
index 37728e73ebb87..33a721640d5e8 100644
--- a/posthog/migrations/0042_add_type_dashboarditems.py
+++ b/posthog/migrations/0042_add_type_dashboarditems.py
@@ -33,7 +33,6 @@ def reverse_filter_types(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0041_merge_20200407_1805"),
]
diff --git a/posthog/migrations/0043_slack_webhooks.py b/posthog/migrations/0043_slack_webhooks.py
index bc68ade92b1e5..097da3742916d 100644
--- a/posthog/migrations/0043_slack_webhooks.py
+++ b/posthog/migrations/0043_slack_webhooks.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0042_add_type_dashboarditems"),
]
diff --git a/posthog/migrations/0044_auto_20200413_1936.py b/posthog/migrations/0044_auto_20200413_1936.py
index eda91a6b76ae0..fdda5eaf560ec 100644
--- a/posthog/migrations/0044_auto_20200413_1936.py
+++ b/posthog/migrations/0044_auto_20200413_1936.py
@@ -33,7 +33,6 @@ def revert_funnel_steps(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0043_slack_webhooks"),
]
diff --git a/posthog/migrations/0045_add_timestamp_index.py b/posthog/migrations/0045_add_timestamp_index.py
index 497a8dca04f7d..b6598fe802492 100644
--- a/posthog/migrations/0045_add_timestamp_index.py
+++ b/posthog/migrations/0045_add_timestamp_index.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0044_auto_20200413_1936"),
]
diff --git a/posthog/migrations/0046_event_names_properties_to_team.py b/posthog/migrations/0046_event_names_properties_to_team.py
index a90b2c2e1c526..7350a5a8de701 100644
--- a/posthog/migrations/0046_event_names_properties_to_team.py
+++ b/posthog/migrations/0046_event_names_properties_to_team.py
@@ -30,7 +30,6 @@ def noop(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0045_add_timestamp_index"),
]
diff --git a/posthog/migrations/0047_auto_20200416_1631.py b/posthog/migrations/0047_auto_20200416_1631.py
index 2e2c359df1d3b..fd236cf551358 100644
--- a/posthog/migrations/0047_auto_20200416_1631.py
+++ b/posthog/migrations/0047_auto_20200416_1631.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0046_event_names_properties_to_team"),
]
diff --git a/posthog/migrations/0048_auto_20200420_1051.py b/posthog/migrations/0048_auto_20200420_1051.py
index e29ed78cc695f..29248b69fb693 100644
--- a/posthog/migrations/0048_auto_20200420_1051.py
+++ b/posthog/migrations/0048_auto_20200420_1051.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0047_auto_20200416_1631"),
]
diff --git a/posthog/migrations/0049_delete_funnelstep.py b/posthog/migrations/0049_delete_funnelstep.py
index a29722f0e313a..e66988922c931 100644
--- a/posthog/migrations/0049_delete_funnelstep.py
+++ b/posthog/migrations/0049_delete_funnelstep.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0048_auto_20200420_1051"),
]
diff --git a/posthog/migrations/0050_dashboards.py b/posthog/migrations/0050_dashboards.py
index 8d74bcf7394e7..70d1559591be7 100644
--- a/posthog/migrations/0050_dashboards.py
+++ b/posthog/migrations/0050_dashboards.py
@@ -26,7 +26,6 @@ def backwards(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0049_delete_funnelstep"),
]
@@ -73,7 +72,12 @@ class Migration(migrations.Migration):
to="posthog.Dashboard",
),
),
- migrations.RunPython(forwards, reverse_code=backwards, hints={"target_db": "default"}, elidable=True),
+ migrations.RunPython(
+ forwards,
+ reverse_code=backwards,
+ hints={"target_db": "default"},
+ elidable=True,
+ ),
migrations.AlterField(
model_name="dashboarditem",
name="dashboard",
diff --git a/posthog/migrations/0051_precalculate_cohorts.py b/posthog/migrations/0051_precalculate_cohorts.py
index 4e4f2224c6927..0d2e2f83981b1 100644
--- a/posthog/migrations/0051_precalculate_cohorts.py
+++ b/posthog/migrations/0051_precalculate_cohorts.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0050_dashboards"),
]
diff --git a/posthog/migrations/0052_data_precalculate_cohorts.py b/posthog/migrations/0052_data_precalculate_cohorts.py
index b6e01cd1e2a42..d2c7990399a2b 100644
--- a/posthog/migrations/0052_data_precalculate_cohorts.py
+++ b/posthog/migrations/0052_data_precalculate_cohorts.py
@@ -15,7 +15,6 @@ def backwards(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0051_precalculate_cohorts"),
]
diff --git a/posthog/migrations/0053_dashboard_item_layouts.py b/posthog/migrations/0053_dashboard_item_layouts.py
index 77fcfa4df7275..f514a48fc5352 100644
--- a/posthog/migrations/0053_dashboard_item_layouts.py
+++ b/posthog/migrations/0053_dashboard_item_layouts.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0052_data_precalculate_cohorts"),
]
diff --git a/posthog/migrations/0054_dashboard_item_color.py b/posthog/migrations/0054_dashboard_item_color.py
index 052811a71b0c0..1ba803e0baa84 100644
--- a/posthog/migrations/0054_dashboard_item_color.py
+++ b/posthog/migrations/0054_dashboard_item_color.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0053_dashboard_item_layouts"),
]
diff --git a/posthog/migrations/0055_user_anonymize_data.py b/posthog/migrations/0055_user_anonymize_data.py
index cea3694da7149..412e2f08666b9 100644
--- a/posthog/migrations/0055_user_anonymize_data.py
+++ b/posthog/migrations/0055_user_anonymize_data.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0054_dashboard_item_color"),
]
diff --git a/posthog/migrations/0056_auto_20200522_1024.py b/posthog/migrations/0056_auto_20200522_1024.py
index 861d269961096..ae09c0519a391 100644
--- a/posthog/migrations/0056_auto_20200522_1024.py
+++ b/posthog/migrations/0056_auto_20200522_1024.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0055_user_anonymize_data"),
]
diff --git a/posthog/migrations/0057_action_updated_at.py b/posthog/migrations/0057_action_updated_at.py
index 2cdf65ea2b683..1acfa631c6425 100644
--- a/posthog/migrations/0057_action_updated_at.py
+++ b/posthog/migrations/0057_action_updated_at.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0056_auto_20200522_1024"),
]
diff --git a/posthog/migrations/0058_dashboarditem_last_refresh.py b/posthog/migrations/0058_dashboarditem_last_refresh.py
index ad71e982e45ca..428995375c6e4 100644
--- a/posthog/migrations/0058_dashboarditem_last_refresh.py
+++ b/posthog/migrations/0058_dashboarditem_last_refresh.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0057_action_updated_at"),
]
diff --git a/posthog/migrations/0059_dashboarditem_refreshing.py b/posthog/migrations/0059_dashboarditem_refreshing.py
index f5e82621b32a0..1a873e17817b2 100644
--- a/posthog/migrations/0059_dashboarditem_refreshing.py
+++ b/posthog/migrations/0059_dashboarditem_refreshing.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0058_dashboarditem_last_refresh"),
]
diff --git a/posthog/migrations/0060_auto_20200616_0746.py b/posthog/migrations/0060_auto_20200616_0746.py
index 3c6acbdc31a2a..cfea859ea3f47 100644
--- a/posthog/migrations/0060_auto_20200616_0746.py
+++ b/posthog/migrations/0060_auto_20200616_0746.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0059_dashboarditem_refreshing"),
]
diff --git a/posthog/migrations/0061_featureflag.py b/posthog/migrations/0061_featureflag.py
index ee2456e8c172c..d7a05ea799558 100644
--- a/posthog/migrations/0061_featureflag.py
+++ b/posthog/migrations/0061_featureflag.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0060_auto_20200616_0746"),
]
diff --git a/posthog/migrations/0062_team_anonymize_ips.py b/posthog/migrations/0062_team_anonymize_ips.py
index cdd7fd305169f..d5234b8b47310 100644
--- a/posthog/migrations/0062_team_anonymize_ips.py
+++ b/posthog/migrations/0062_team_anonymize_ips.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0061_featureflag"),
]
diff --git a/posthog/migrations/0063_team_completed_snippet_onboarding.py b/posthog/migrations/0063_team_completed_snippet_onboarding.py
index d9341aead8321..1a3819af10dae 100644
--- a/posthog/migrations/0063_team_completed_snippet_onboarding.py
+++ b/posthog/migrations/0063_team_completed_snippet_onboarding.py
@@ -17,7 +17,6 @@ def backwards(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0062_team_anonymize_ips"),
]
diff --git a/posthog/migrations/0064_toolbar_mode.py b/posthog/migrations/0064_toolbar_mode.py
index e8031d315fc79..566ad520e284e 100644
--- a/posthog/migrations/0064_toolbar_mode.py
+++ b/posthog/migrations/0064_toolbar_mode.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0063_team_completed_snippet_onboarding"),
]
diff --git a/posthog/migrations/0065_auto_20200624_1842.py b/posthog/migrations/0065_auto_20200624_1842.py
index c44d120ca2517..6941eb7672045 100644
--- a/posthog/migrations/0065_auto_20200624_1842.py
+++ b/posthog/migrations/0065_auto_20200624_1842.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0064_toolbar_mode"),
]
diff --git a/posthog/migrations/0066_team_created_at.py b/posthog/migrations/0066_team_created_at.py
index e2b76d8be195c..0e9b0f2f0c4bd 100644
--- a/posthog/migrations/0066_team_created_at.py
+++ b/posthog/migrations/0066_team_created_at.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0065_auto_20200624_1842"),
]
diff --git a/posthog/migrations/0067_team_updated_at.py b/posthog/migrations/0067_team_updated_at.py
index 31dfbb25225ed..5545097a96e05 100644
--- a/posthog/migrations/0067_team_updated_at.py
+++ b/posthog/migrations/0067_team_updated_at.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0066_team_created_at"),
]
diff --git a/posthog/migrations/0068_auto_20200629_1322.py b/posthog/migrations/0068_auto_20200629_1322.py
index 216a5c3f1c499..fc6c23eb79aa5 100644
--- a/posthog/migrations/0068_auto_20200629_1322.py
+++ b/posthog/migrations/0068_auto_20200629_1322.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0067_team_updated_at"),
]
diff --git a/posthog/migrations/0069_auto_20200714_1642.py b/posthog/migrations/0069_auto_20200714_1642.py
index ff7cb3d92860a..a9a8091b0d32e 100644
--- a/posthog/migrations/0069_auto_20200714_1642.py
+++ b/posthog/migrations/0069_auto_20200714_1642.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0068_auto_20200629_1322"),
]
diff --git a/posthog/migrations/0070_team_event_properties_numerical.py b/posthog/migrations/0070_team_event_properties_numerical.py
index 3b0d300c5785a..032e9a336179f 100644
--- a/posthog/migrations/0070_team_event_properties_numerical.py
+++ b/posthog/migrations/0070_team_event_properties_numerical.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0069_auto_20200714_1642"),
]
diff --git a/posthog/migrations/0071_cache_dashboard_items.py b/posthog/migrations/0071_cache_dashboard_items.py
index 909daf452e801..9b9d1b2b72a78 100644
--- a/posthog/migrations/0071_cache_dashboard_items.py
+++ b/posthog/migrations/0071_cache_dashboard_items.py
@@ -30,7 +30,6 @@ def reverse_func(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0070_team_event_properties_numerical"),
]
diff --git a/posthog/migrations/0072_action_step_url_matching_regex.py b/posthog/migrations/0072_action_step_url_matching_regex.py
index e7a624c3e06fb..2afb583ce5097 100644
--- a/posthog/migrations/0072_action_step_url_matching_regex.py
+++ b/posthog/migrations/0072_action_step_url_matching_regex.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0071_cache_dashboard_items"),
]
diff --git a/posthog/migrations/0073_update_dashboard_item_filters.py b/posthog/migrations/0073_update_dashboard_item_filters.py
index 3f204ce499a30..d4310df88ef1c 100644
--- a/posthog/migrations/0073_update_dashboard_item_filters.py
+++ b/posthog/migrations/0073_update_dashboard_item_filters.py
@@ -20,7 +20,6 @@ def reverse_func(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0072_action_step_url_matching_regex"),
]
diff --git a/posthog/migrations/0074_toolbar_default_on.py b/posthog/migrations/0074_toolbar_default_on.py
index 2804bc5edb060..9fcc672bb598f 100644
--- a/posthog/migrations/0074_toolbar_default_on.py
+++ b/posthog/migrations/0074_toolbar_default_on.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0073_update_dashboard_item_filters"),
]
diff --git a/posthog/migrations/0075_action_slack_message_format.py b/posthog/migrations/0075_action_slack_message_format.py
index 7404c7965efd9..9eb7d8c7cda26 100644
--- a/posthog/migrations/0075_action_slack_message_format.py
+++ b/posthog/migrations/0075_action_slack_message_format.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0074_toolbar_default_on"),
]
diff --git a/posthog/migrations/0076_auto_20200819_1214.py b/posthog/migrations/0076_auto_20200819_1214.py
index d450022fc29bc..a02021d092451 100644
--- a/posthog/migrations/0076_auto_20200819_1214.py
+++ b/posthog/migrations/0076_auto_20200819_1214.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0075_action_slack_message_format"),
]
diff --git a/posthog/migrations/0077_cohortpeople_id_to_bigautofield.py b/posthog/migrations/0077_cohortpeople_id_to_bigautofield.py
index 4c80a3f2f2469..ae77683b0ce12 100644
--- a/posthog/migrations/0077_cohortpeople_id_to_bigautofield.py
+++ b/posthog/migrations/0077_cohortpeople_id_to_bigautofield.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0076_auto_20200819_1214"),
]
diff --git a/posthog/migrations/0078_auto_20200731_1323.py b/posthog/migrations/0078_auto_20200731_1323.py
index 0d171ef26858e..0a67bdd4874ad 100644
--- a/posthog/migrations/0078_auto_20200731_1323.py
+++ b/posthog/migrations/0078_auto_20200731_1323.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0077_cohortpeople_id_to_bigautofield"),
]
diff --git a/posthog/migrations/0079_move_funnels_to_insights.py b/posthog/migrations/0079_move_funnels_to_insights.py
index 9a381f2af3b26..d4466b0cccb5f 100644
--- a/posthog/migrations/0079_move_funnels_to_insights.py
+++ b/posthog/migrations/0079_move_funnels_to_insights.py
@@ -39,7 +39,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0078_auto_20200731_1323"),
]
diff --git a/posthog/migrations/0080_update_dashboard_funnel_filters.py b/posthog/migrations/0080_update_dashboard_funnel_filters.py
index db3f6771e8ff6..09bdafafbb262 100644
--- a/posthog/migrations/0080_update_dashboard_funnel_filters.py
+++ b/posthog/migrations/0080_update_dashboard_funnel_filters.py
@@ -37,7 +37,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0079_move_funnels_to_insights"),
]
diff --git a/posthog/migrations/0081_person_is_identified.py b/posthog/migrations/0081_person_is_identified.py
index d5b37b3255d6c..40443fd8b91ff 100644
--- a/posthog/migrations/0081_person_is_identified.py
+++ b/posthog/migrations/0081_person_is_identified.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0080_update_dashboard_funnel_filters"),
]
diff --git a/posthog/migrations/0082_personalapikey.py b/posthog/migrations/0082_personalapikey.py
index ce4ad061c3928..95520b06dd452 100644
--- a/posthog/migrations/0082_personalapikey.py
+++ b/posthog/migrations/0082_personalapikey.py
@@ -9,7 +9,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0081_person_is_identified"),
]
diff --git a/posthog/migrations/0083_auto_20200826_1504.py b/posthog/migrations/0083_auto_20200826_1504.py
index aa0ba2b90251e..07ccc49fc93a9 100644
--- a/posthog/migrations/0083_auto_20200826_1504.py
+++ b/posthog/migrations/0083_auto_20200826_1504.py
@@ -15,7 +15,6 @@ def create_uuid(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0082_personalapikey"),
]
diff --git a/posthog/migrations/0084_person_uuid.py b/posthog/migrations/0084_person_uuid.py
index f32f3e14ae3a9..f6bd7480de94e 100644
--- a/posthog/migrations/0084_person_uuid.py
+++ b/posthog/migrations/0084_person_uuid.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0083_auto_20200826_1504"),
]
diff --git a/posthog/migrations/0085_org_models.py b/posthog/migrations/0085_org_models.py
index 25e8b90aefd2c..7483d43a62394 100644
--- a/posthog/migrations/0085_org_models.py
+++ b/posthog/migrations/0085_org_models.py
@@ -51,7 +51,6 @@ def reverse_func(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0084_person_uuid"),
]
diff --git a/posthog/migrations/0086_team_session_recording_opt_in.py b/posthog/migrations/0086_team_session_recording_opt_in.py
index f26a94a383e7b..4fcd8c23c6d47 100644
--- a/posthog/migrations/0086_team_session_recording_opt_in.py
+++ b/posthog/migrations/0086_team_session_recording_opt_in.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0085_org_models"),
]
diff --git a/posthog/migrations/0087_fix_annotation_created_at.py b/posthog/migrations/0087_fix_annotation_created_at.py
index 0c4483174d1c4..3b77ccea233ae 100644
--- a/posthog/migrations/0087_fix_annotation_created_at.py
+++ b/posthog/migrations/0087_fix_annotation_created_at.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0086_team_session_recording_opt_in"),
]
diff --git a/posthog/migrations/0088_toolbar_disabled.py b/posthog/migrations/0088_toolbar_disabled.py
index 071ba75c8dcfd..64a2f05a274b5 100644
--- a/posthog/migrations/0088_toolbar_disabled.py
+++ b/posthog/migrations/0088_toolbar_disabled.py
@@ -15,7 +15,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0087_fix_annotation_created_at"),
]
diff --git a/posthog/migrations/0089_auto_20201015_1031.py b/posthog/migrations/0089_auto_20201015_1031.py
index 72b6195c5d0d7..f19ad04b047d2 100644
--- a/posthog/migrations/0089_auto_20201015_1031.py
+++ b/posthog/migrations/0089_auto_20201015_1031.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0088_toolbar_disabled"),
]
diff --git a/posthog/migrations/0090_org_live.py b/posthog/migrations/0090_org_live.py
index 79dd818bd2d86..d63d3f545de00 100644
--- a/posthog/migrations/0090_org_live.py
+++ b/posthog/migrations/0090_org_live.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0089_auto_20201015_1031"),
]
@@ -29,14 +28,19 @@ class Migration(migrations.Migration):
model_name="team",
name="api_token",
field=models.CharField(
- default=posthog.models.utils.generate_random_token, max_length=200, null=True, unique=True
+ default=posthog.models.utils.generate_random_token,
+ max_length=200,
+ null=True,
+ unique=True,
),
),
migrations.AlterField(
model_name="team",
name="users",
field=models.ManyToManyField(
- blank=True, related_name="teams_deprecated_relationship", to=settings.AUTH_USER_MODEL
+ blank=True,
+ related_name="teams_deprecated_relationship",
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
diff --git a/posthog/migrations/0091_messagingrecord.py b/posthog/migrations/0091_messagingrecord.py
index f00d2ee1da4b5..669c582ed6893 100644
--- a/posthog/migrations/0091_messagingrecord.py
+++ b/posthog/migrations/0091_messagingrecord.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0090_org_live"),
]
@@ -18,7 +17,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("email_hash", models.CharField(max_length=1024)),
diff --git a/posthog/migrations/0093_remove_user_is_superuser.py b/posthog/migrations/0093_remove_user_is_superuser.py
index 4b797ea341ab8..c56685c477ea8 100644
--- a/posthog/migrations/0093_remove_user_is_superuser.py
+++ b/posthog/migrations/0093_remove_user_is_superuser.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0092_rename_projects_to_default"),
]
diff --git a/posthog/migrations/0094_description_on_dashboard_items.py b/posthog/migrations/0094_description_on_dashboard_items.py
index 612b57160c044..24dde3b3926f4 100644
--- a/posthog/migrations/0094_description_on_dashboard_items.py
+++ b/posthog/migrations/0094_description_on_dashboard_items.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0093_remove_user_is_superuser"),
]
diff --git a/posthog/migrations/0095_session_recording_event_table.py b/posthog/migrations/0095_session_recording_event_table.py
index a972e792e3041..94b9d9848dc23 100644
--- a/posthog/migrations/0095_session_recording_event_table.py
+++ b/posthog/migrations/0095_session_recording_event_table.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0094_description_on_dashboard_items"),
]
@@ -16,13 +15,30 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="SessionRecordingEvent",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("created_at", models.DateTimeField(auto_now_add=True, null=True)),
- ("timestamp", models.DateTimeField(blank=True, default=django.utils.timezone.now)),
+ (
+ "timestamp",
+ models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
("session_id", models.CharField(max_length=200)),
("distinct_id", models.CharField(max_length=200)),
- ("snapshot_data", django.contrib.postgres.fields.jsonb.JSONField(default=dict)),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team")),
+ (
+ "snapshot_data",
+ django.contrib.postgres.fields.jsonb.JSONField(default=dict),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team"),
+ ),
],
),
migrations.AddIndex(
@@ -32,7 +48,8 @@ class Migration(migrations.Migration):
migrations.AddIndex(
model_name="sessionrecordingevent",
index=models.Index(
- fields=["team_id", "distinct_id", "timestamp", "session_id"], name="posthog_ses_team_id_46392f_idx"
+ fields=["team_id", "distinct_id", "timestamp", "session_id"],
+ name="posthog_ses_team_id_46392f_idx",
),
),
]
diff --git a/posthog/migrations/0096_plugins.py b/posthog/migrations/0096_plugins.py
index 765e13e122a65..8ae7167596371 100644
--- a/posthog/migrations/0096_plugins.py
+++ b/posthog/migrations/0096_plugins.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0095_session_recording_event_table"),
]
diff --git a/posthog/migrations/0097_invite_emails.py b/posthog/migrations/0097_invite_emails.py
index f12ac859111ef..ff6aa476f5e30 100644
--- a/posthog/migrations/0097_invite_emails.py
+++ b/posthog/migrations/0097_invite_emails.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0096_plugins"),
]
diff --git a/posthog/migrations/0098_events_property_usage.py b/posthog/migrations/0098_events_property_usage.py
index ed45de9c99645..99f56f0bb1387 100644
--- a/posthog/migrations/0098_events_property_usage.py
+++ b/posthog/migrations/0098_events_property_usage.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0097_invite_emails"),
]
diff --git a/posthog/migrations/0099_plugin_attachment.py b/posthog/migrations/0099_plugin_attachment.py
index 6cb474f0a6bac..4f49ebf712858 100644
--- a/posthog/migrations/0099_plugin_attachment.py
+++ b/posthog/migrations/0099_plugin_attachment.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0098_events_property_usage"),
]
@@ -14,7 +13,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="PluginAttachment",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=200)),
("content_type", models.CharField(max_length=200)),
("file_name", models.CharField(max_length=200)),
@@ -22,9 +29,19 @@ class Migration(migrations.Migration):
("contents", models.BinaryField()),
(
"plugin_config",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.PluginConfig"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.PluginConfig",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.Team",
+ ),
),
- ("team", models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.Team")),
],
),
]
diff --git a/posthog/migrations/0100_action_step_max_length.py b/posthog/migrations/0100_action_step_max_length.py
index a14256c139273..f81ac84ae64d3 100644
--- a/posthog/migrations/0100_action_step_max_length.py
+++ b/posthog/migrations/0100_action_step_max_length.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0099_plugin_attachment"),
]
diff --git a/posthog/migrations/0101_org_owners.py b/posthog/migrations/0101_org_owners.py
index f9e04e97fb4e1..93effd980a703 100644
--- a/posthog/migrations/0101_org_owners.py
+++ b/posthog/migrations/0101_org_owners.py
@@ -20,7 +20,6 @@ def make_owners_administrators_again(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0100_action_step_max_length"),
]
@@ -36,8 +35,14 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="organizationmembership",
constraint=models.UniqueConstraint(
- condition=models.Q(level=15), fields=("organization_id",), name="only_one_owner_per_organization"
+ condition=models.Q(level=15),
+ fields=("organization_id",),
+ name="only_one_owner_per_organization",
),
),
- migrations.RunPython(make_first_administrators_owners, make_owners_administrators_again, elidable=True),
+ migrations.RunPython(
+ make_first_administrators_owners,
+ make_owners_administrators_again,
+ elidable=True,
+ ),
]
diff --git a/posthog/migrations/0102_dashboarditem_filters_hash.py b/posthog/migrations/0102_dashboarditem_filters_hash.py
index 188859c466359..9d3d12c9b6e88 100644
--- a/posthog/migrations/0102_dashboarditem_filters_hash.py
+++ b/posthog/migrations/0102_dashboarditem_filters_hash.py
@@ -24,7 +24,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0101_org_owners"),
]
diff --git a/posthog/migrations/0103_retention_remove_date.py b/posthog/migrations/0103_retention_remove_date.py
index 45b20942c8a9b..ed00fec812576 100644
--- a/posthog/migrations/0103_retention_remove_date.py
+++ b/posthog/migrations/0103_retention_remove_date.py
@@ -7,7 +7,9 @@
def forward(apps, schema_editor):
DashboardItem = apps.get_model("posthog", "DashboardItem")
for item in DashboardItem.objects.filter(
- filters__insight="RETENTION", filters__selectedDate__isnull=False, dashboard__isnull=False
+ filters__insight="RETENTION",
+ filters__selectedDate__isnull=False,
+ dashboard__isnull=False,
):
item.filters.pop("selectedDate")
item.save()
@@ -18,7 +20,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0102_dashboarditem_filters_hash"),
]
diff --git a/posthog/migrations/0104_auto_20201208_1052.py b/posthog/migrations/0104_auto_20201208_1052.py
index ecda95ed3a63a..15a0f8e90b9d1 100644
--- a/posthog/migrations/0104_auto_20201208_1052.py
+++ b/posthog/migrations/0104_auto_20201208_1052.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0103_retention_remove_date"),
]
diff --git a/posthog/migrations/0105_cohort_errors_calculating.py b/posthog/migrations/0105_cohort_errors_calculating.py
index 9fb142a129edc..d0254fa311159 100644
--- a/posthog/migrations/0105_cohort_errors_calculating.py
+++ b/posthog/migrations/0105_cohort_errors_calculating.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0104_auto_20201208_1052"),
]
diff --git a/posthog/migrations/0106_dashboard_item_type_to_display.py b/posthog/migrations/0106_dashboard_item_type_to_display.py
index dd456b59717ae..8a47a57f3c167 100644
--- a/posthog/migrations/0106_dashboard_item_type_to_display.py
+++ b/posthog/migrations/0106_dashboard_item_type_to_display.py
@@ -23,7 +23,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0105_cohort_errors_calculating"),
]
diff --git a/posthog/migrations/0107_plugin_source.py b/posthog/migrations/0107_plugin_source.py
index 3e24136c29984..b8d380da63624 100644
--- a/posthog/migrations/0107_plugin_source.py
+++ b/posthog/migrations/0107_plugin_source.py
@@ -15,7 +15,6 @@ def backwards(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0106_dashboard_item_type_to_display"),
]
@@ -26,7 +25,12 @@ class Migration(migrations.Migration):
name="plugin_type",
field=models.CharField(
blank=True,
- choices=[("local", "local"), ("custom", "custom"), ("repository", "repository"), ("source", "source")],
+ choices=[
+ ("local", "local"),
+ ("custom", "custom"),
+ ("repository", "repository"),
+ ("source", "source"),
+ ],
default=None,
max_length=200,
null=True,
diff --git a/posthog/migrations/0108_plugin_organization.py b/posthog/migrations/0108_plugin_organization.py
index e66b63ca91f79..36a422017b66e 100644
--- a/posthog/migrations/0108_plugin_organization.py
+++ b/posthog/migrations/0108_plugin_organization.py
@@ -11,7 +11,6 @@ def set_plugin_organization(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0107_plugin_source"),
]
diff --git a/posthog/migrations/0109_fix_retention_filters.py b/posthog/migrations/0109_fix_retention_filters.py
index 087994342e5f2..b313aa87c7d00 100644
--- a/posthog/migrations/0109_fix_retention_filters.py
+++ b/posthog/migrations/0109_fix_retention_filters.py
@@ -17,7 +17,6 @@ def backwards(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0108_plugin_organization"),
]
diff --git a/posthog/migrations/0111_plugin_storage.py b/posthog/migrations/0111_plugin_storage.py
index 5d26fe5444f90..2016036bca333 100644
--- a/posthog/migrations/0111_plugin_storage.py
+++ b/posthog/migrations/0111_plugin_storage.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0110_sessionrecordingeventbyteamandtimestamp"),
]
@@ -14,19 +13,31 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="PluginStorage",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=200)),
("value", models.TextField(blank=True, null=True)),
(
"plugin_config",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.PluginConfig"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.PluginConfig",
+ ),
),
],
),
migrations.AddConstraint(
model_name="pluginstorage",
constraint=models.UniqueConstraint(
- fields=("plugin_config_id", "key"), name="posthog_unique_plugin_storage_key"
+ fields=("plugin_config_id", "key"),
+ name="posthog_unique_plugin_storage_key",
),
),
]
diff --git a/posthog/migrations/0112_sessions_filter.py b/posthog/migrations/0112_sessions_filter.py
index e681e0ee54775..2667a1f5e04cb 100644
--- a/posthog/migrations/0112_sessions_filter.py
+++ b/posthog/migrations/0112_sessions_filter.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0111_plugin_storage"),
]
@@ -16,18 +15,35 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="SessionsFilter",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("name", models.CharField(blank=True, max_length=400, null=False)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
- ("filters", django.contrib.postgres.fields.jsonb.JSONField(default=dict)),
+ (
+ "filters",
+ django.contrib.postgres.fields.jsonb.JSONField(default=dict),
+ ),
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team"),
+ ),
],
),
migrations.AddIndex(
diff --git a/posthog/migrations/0113_cohort_is_static.py b/posthog/migrations/0113_cohort_is_static.py
index cb76e16a26b8a..4e47813670209 100644
--- a/posthog/migrations/0113_cohort_is_static.py
+++ b/posthog/migrations/0113_cohort_is_static.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0112_sessions_filter"),
]
diff --git a/posthog/migrations/0114_fix_team_event_names.py b/posthog/migrations/0114_fix_team_event_names.py
index 6b9143bf4c69c..a9803e62f9d4c 100644
--- a/posthog/migrations/0114_fix_team_event_names.py
+++ b/posthog/migrations/0114_fix_team_event_names.py
@@ -9,7 +9,9 @@ def fix_team_event_names(apps, schema_editor):
old_event_names = team.event_names
team.event_names = [event for event in old_event_names if isinstance(event, str)]
if len(team.event_names) != len(old_event_names):
- from posthog.tasks.calculate_event_property_usage import calculate_event_property_usage_for_team
+ from posthog.tasks.calculate_event_property_usage import (
+ calculate_event_property_usage_for_team,
+ )
team.save()
calculate_event_property_usage_for_team(team.pk)
@@ -20,7 +22,6 @@ def backwards(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0113_cohort_is_static"),
]
diff --git a/posthog/migrations/0115_session_recording_viewed.py b/posthog/migrations/0115_session_recording_viewed.py
index af7c4adfffa86..695ce6592e144 100644
--- a/posthog/migrations/0115_session_recording_viewed.py
+++ b/posthog/migrations/0115_session_recording_viewed.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0114_fix_team_event_names"),
]
@@ -15,16 +14,36 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="SessionRecordingViewed",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("created_at", models.DateTimeField(auto_now_add=True, null=True)),
("session_id", models.CharField(max_length=200)),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team")),
- ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team"),
+ ),
+ (
+ "user",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
],
),
migrations.AddIndex(
model_name="sessionrecordingviewed",
- index=models.Index(fields=["team_id", "user_id", "session_id"], name="posthog_ses_team_id_465af1_idx"),
+ index=models.Index(
+ fields=["team_id", "user_id", "session_id"],
+ name="posthog_ses_team_id_465af1_idx",
+ ),
),
migrations.AlterUniqueTogether(
name="sessionrecordingviewed",
diff --git a/posthog/migrations/0116_plugin_latest_tag.py b/posthog/migrations/0116_plugin_latest_tag.py
index d9206f419e8c0..43bdaf4b6c293 100644
--- a/posthog/migrations/0116_plugin_latest_tag.py
+++ b/posthog/migrations/0116_plugin_latest_tag.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0115_session_recording_viewed"),
]
diff --git a/posthog/migrations/0116_session_recording_retention_period.py b/posthog/migrations/0116_session_recording_retention_period.py
index 7c22507ebb464..fcba843db267f 100644
--- a/posthog/migrations/0116_session_recording_retention_period.py
+++ b/posthog/migrations/0116_session_recording_retention_period.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0115_session_recording_viewed"),
]
diff --git a/posthog/migrations/0117_merge_20210126_0917.py b/posthog/migrations/0117_merge_20210126_0917.py
index 58d4c58b420ed..215b295e771a2 100644
--- a/posthog/migrations/0117_merge_20210126_0917.py
+++ b/posthog/migrations/0117_merge_20210126_0917.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0116_plugin_latest_tag"),
("posthog", "0116_session_recording_retention_period"),
diff --git a/posthog/migrations/0118_is_demo.py b/posthog/migrations/0118_is_demo.py
index ad156872b060d..a8ee78022e0ab 100644
--- a/posthog/migrations/0118_is_demo.py
+++ b/posthog/migrations/0118_is_demo.py
@@ -13,7 +13,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0117_merge_20210126_0917"),
]
diff --git a/posthog/migrations/0119_mandatory_plugin_order.py b/posthog/migrations/0119_mandatory_plugin_order.py
index 0d11fe4c5dba3..d357ac97b0c5d 100644
--- a/posthog/migrations/0119_mandatory_plugin_order.py
+++ b/posthog/migrations/0119_mandatory_plugin_order.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0118_is_demo"),
]
diff --git a/posthog/migrations/0120_organization_personalization.py b/posthog/migrations/0120_organization_personalization.py
index dbba7585b1562..d8bbda6227122 100644
--- a/posthog/migrations/0120_organization_personalization.py
+++ b/posthog/migrations/0120_organization_personalization.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0119_mandatory_plugin_order"),
]
diff --git a/posthog/migrations/0122_organization_setup_section_2_completed.py b/posthog/migrations/0122_organization_setup_section_2_completed.py
index 1f08e6c3eb76c..36d060caa991e 100644
--- a/posthog/migrations/0122_organization_setup_section_2_completed.py
+++ b/posthog/migrations/0122_organization_setup_section_2_completed.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0121_person_email_index"),
]
diff --git a/posthog/migrations/0123_organizationinvite_first_name.py b/posthog/migrations/0123_organizationinvite_first_name.py
index f20482d789e85..a16a8e1f23030 100644
--- a/posthog/migrations/0123_organizationinvite_first_name.py
+++ b/posthog/migrations/0123_organizationinvite_first_name.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0122_organization_setup_section_2_completed"),
]
diff --git a/posthog/migrations/0124_unset_is_calculating_static_cohorts.py b/posthog/migrations/0124_unset_is_calculating_static_cohorts.py
index ed1a3000d6a57..cb7a3bc8176dc 100644
--- a/posthog/migrations/0124_unset_is_calculating_static_cohorts.py
+++ b/posthog/migrations/0124_unset_is_calculating_static_cohorts.py
@@ -13,7 +13,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0123_organizationinvite_first_name"),
]
diff --git a/posthog/migrations/0125_longer_webhook_url.py b/posthog/migrations/0125_longer_webhook_url.py
index c87c15ae5438f..3ea0beab6aabc 100644
--- a/posthog/migrations/0125_longer_webhook_url.py
+++ b/posthog/migrations/0125_longer_webhook_url.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0124_unset_is_calculating_static_cohorts"),
]
diff --git a/posthog/migrations/0126_fix_funnels_insights_links.py b/posthog/migrations/0126_fix_funnels_insights_links.py
index 074b95c34ebfa..5379cf8920a89 100644
--- a/posthog/migrations/0126_fix_funnels_insights_links.py
+++ b/posthog/migrations/0126_fix_funnels_insights_links.py
@@ -15,7 +15,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0125_longer_webhook_url"),
]
diff --git a/posthog/migrations/0127_add_dashboard_filters.py b/posthog/migrations/0127_add_dashboard_filters.py
index 73052b72778a5..1cd1b2d0bbc69 100644
--- a/posthog/migrations/0127_add_dashboard_filters.py
+++ b/posthog/migrations/0127_add_dashboard_filters.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0126_fix_funnels_insights_links"),
]
diff --git a/posthog/migrations/0127_stricter_team_data.py b/posthog/migrations/0127_stricter_team_data.py
index 64d4d15703251..3a2e6a0ba733d 100644
--- a/posthog/migrations/0127_stricter_team_data.py
+++ b/posthog/migrations/0127_stricter_team_data.py
@@ -17,11 +17,14 @@ def adjust_teams_for_stricter_requirements(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0126_fix_funnels_insights_links"),
]
operations = [
- migrations.RunPython(adjust_teams_for_stricter_requirements, migrations.RunPython.noop, elidable=True),
+ migrations.RunPython(
+ adjust_teams_for_stricter_requirements,
+ migrations.RunPython.noop,
+ elidable=True,
+ ),
]
diff --git a/posthog/migrations/0128_stricter_team_schema.py b/posthog/migrations/0128_stricter_team_schema.py
index c974080e643da..26f0210798515 100644
--- a/posthog/migrations/0128_stricter_team_schema.py
+++ b/posthog/migrations/0128_stricter_team_schema.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0127_stricter_team_data"),
]
diff --git a/posthog/migrations/0129_merge_20210223_0757.py b/posthog/migrations/0129_merge_20210223_0757.py
index 919bfb5ada376..4f28607b0bec2 100644
--- a/posthog/migrations/0129_merge_20210223_0757.py
+++ b/posthog/migrations/0129_merge_20210223_0757.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0128_stricter_team_schema"),
("posthog", "0127_add_dashboard_filters"),
diff --git a/posthog/migrations/0130_dashboard_creation_mode.py b/posthog/migrations/0130_dashboard_creation_mode.py
index a163da42b2e05..cf102ef97a5c9 100644
--- a/posthog/migrations/0130_dashboard_creation_mode.py
+++ b/posthog/migrations/0130_dashboard_creation_mode.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0129_merge_20210223_0757"),
]
@@ -14,7 +13,11 @@ class Migration(migrations.Migration):
model_name="dashboard",
name="creation_mode",
field=models.CharField(
- choices=[("default", "Default"), ("template", "Template"), ("duplicate", "Duplicate")],
+ choices=[
+ ("default", "Default"),
+ ("template", "Template"),
+ ("duplicate", "Duplicate"),
+ ],
default="default",
max_length=16,
),
diff --git a/posthog/migrations/0131_add_plugins_updated_created_at.py b/posthog/migrations/0131_add_plugins_updated_created_at.py
index 873214abf6a73..1c3ef33ab1b0d 100644
--- a/posthog/migrations/0131_add_plugins_updated_created_at.py
+++ b/posthog/migrations/0131_add_plugins_updated_created_at.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0130_dashboard_creation_mode"),
]
diff --git a/posthog/migrations/0132_team_test_account_filters.py b/posthog/migrations/0132_team_test_account_filters.py
index 339ac4ec5ceee..313de9f3355e4 100644
--- a/posthog/migrations/0132_team_test_account_filters.py
+++ b/posthog/migrations/0132_team_test_account_filters.py
@@ -40,7 +40,12 @@ def forward(apps, schema_editor):
{
"key": "$host",
"operator": "is_not",
- "value": ["localhost:8000", "localhost:5000", "127.0.0.1:8000", "127.0.0.1:3000"],
+ "value": [
+ "localhost:8000",
+ "localhost:5000",
+ "127.0.0.1:8000",
+ "127.0.0.1:3000",
+ ],
},
]
if team.organization:
@@ -51,7 +56,12 @@ def forward(apps, schema_editor):
example_email = re.search(r"@[\w.]+", example_emails[0])
if example_email:
filters += [
- {"key": "email", "operator": "not_icontains", "value": example_email.group(), "type": "person"},
+ {
+ "key": "email",
+ "operator": "not_icontains",
+ "value": example_email.group(),
+ "type": "person",
+ },
]
team.test_account_filters = filters
team.save()
@@ -62,7 +72,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0131_add_plugins_updated_created_at"),
]
diff --git a/posthog/migrations/0133_plugins_access_control.py b/posthog/migrations/0133_plugins_access_control.py
index 824853fdb0666..0e10347b18393 100644
--- a/posthog/migrations/0133_plugins_access_control.py
+++ b/posthog/migrations/0133_plugins_access_control.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0132_team_test_account_filters"),
]
@@ -33,7 +32,10 @@ class Migration(migrations.Migration):
model_name="team",
name="app_urls",
field=fields.ArrayField(
- base_field=models.CharField(max_length=200, null=True), blank=True, default=list, size=None
+ base_field=models.CharField(max_length=200, null=True),
+ blank=True,
+ default=list,
+ size=None,
),
),
migrations.AlterField(
diff --git a/posthog/migrations/0134_event_site_url.py b/posthog/migrations/0134_event_site_url.py
index 0096edcf1b74f..c70de913a77b8 100644
--- a/posthog/migrations/0134_event_site_url.py
+++ b/posthog/migrations/0134_event_site_url.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0133_plugins_access_control"),
]
diff --git a/posthog/migrations/0135_plugins_on_cloud.py b/posthog/migrations/0135_plugins_on_cloud.py
index 38afccfb7dfdf..bbf7165f956bf 100644
--- a/posthog/migrations/0135_plugins_on_cloud.py
+++ b/posthog/migrations/0135_plugins_on_cloud.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0134_event_site_url"),
]
diff --git a/posthog/migrations/0136_global_plugin_attachments.py b/posthog/migrations/0136_global_plugin_attachments.py
index cd269c296886b..b0207e75bb878 100644
--- a/posthog/migrations/0136_global_plugin_attachments.py
+++ b/posthog/migrations/0136_global_plugin_attachments.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0135_plugins_on_cloud"),
]
@@ -14,6 +13,10 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name="pluginattachment",
name="plugin_config",
- field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.PluginConfig"),
+ field=models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.PluginConfig",
+ ),
),
]
diff --git a/posthog/migrations/0137_team_timezone.py b/posthog/migrations/0137_team_timezone.py
index 09c9077a4172c..b7b31ff04f75c 100644
--- a/posthog/migrations/0137_team_timezone.py
+++ b/posthog/migrations/0137_team_timezone.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0136_global_plugin_attachments"),
]
@@ -72,13 +71,19 @@ class Migration(migrations.Migration):
("America/Anguilla", "America/Anguilla"),
("America/Antigua", "America/Antigua"),
("America/Araguaina", "America/Araguaina"),
- ("America/Argentina/Buenos_Aires", "America/Argentina/Buenos_Aires"),
+ (
+ "America/Argentina/Buenos_Aires",
+ "America/Argentina/Buenos_Aires",
+ ),
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
- ("America/Argentina/Rio_Gallegos", "America/Argentina/Rio_Gallegos"),
+ (
+ "America/Argentina/Rio_Gallegos",
+ "America/Argentina/Rio_Gallegos",
+ ),
("America/Argentina/Salta", "America/Argentina/Salta"),
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
@@ -172,7 +177,10 @@ class Migration(migrations.Migration):
("America/Noronha", "America/Noronha"),
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
("America/North_Dakota/Center", "America/North_Dakota/Center"),
- ("America/North_Dakota/New_Salem", "America/North_Dakota/New_Salem"),
+ (
+ "America/North_Dakota/New_Salem",
+ "America/North_Dakota/New_Salem",
+ ),
("America/Nuuk", "America/Nuuk"),
("America/Ojinaga", "America/Ojinaga"),
("America/Panama", "America/Panama"),
diff --git a/posthog/migrations/0138_featureflag_name_optional.py b/posthog/migrations/0138_featureflag_name_optional.py
index 91cea1870b0f4..95e892cc419fb 100644
--- a/posthog/migrations/0138_featureflag_name_optional.py
+++ b/posthog/migrations/0138_featureflag_name_optional.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0137_team_timezone"),
]
diff --git a/posthog/migrations/0139_dashboard_tagging.py b/posthog/migrations/0139_dashboard_tagging.py
index f16d90cd0933c..f5b1d24d1db8b 100644
--- a/posthog/migrations/0139_dashboard_tagging.py
+++ b/posthog/migrations/0139_dashboard_tagging.py
@@ -5,13 +5,16 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0138_featureflag_name_optional"),
]
operations = [
- migrations.AddField(model_name="dashboard", name="description", field=models.TextField(blank=True)),
+ migrations.AddField(
+ model_name="dashboard",
+ name="description",
+ field=models.TextField(blank=True),
+ ),
migrations.AddField(
model_name="dashboard",
name="tags",
diff --git a/posthog/migrations/0140_team_data_attributes.py b/posthog/migrations/0140_team_data_attributes.py
index 4d832106ebe5b..3a53999977c36 100644
--- a/posthog/migrations/0140_team_data_attributes.py
+++ b/posthog/migrations/0140_team_data_attributes.py
@@ -10,7 +10,6 @@ def set_default_data_attributes(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0139_dashboard_tagging"),
]
diff --git a/posthog/migrations/0142_fix_team_data_attributes_default.py b/posthog/migrations/0142_fix_team_data_attributes_default.py
index be1c263053aac..1e24b7d580234 100644
--- a/posthog/migrations/0142_fix_team_data_attributes_default.py
+++ b/posthog/migrations/0142_fix_team_data_attributes_default.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0141_events_created_at_index"),
]
diff --git a/posthog/migrations/0143_user_uuid.py b/posthog/migrations/0143_user_uuid.py
index 484c78c1db1d3..8e3d2cb7ae384 100644
--- a/posthog/migrations/0143_user_uuid.py
+++ b/posthog/migrations/0143_user_uuid.py
@@ -17,7 +17,6 @@ def backwards(app, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0142_fix_team_data_attributes_default"),
]
diff --git a/posthog/migrations/0144_update_django_3_1_8.py b/posthog/migrations/0144_update_django_3_1_8.py
index a063dbfaf77d0..375a532b7456c 100644
--- a/posthog/migrations/0144_update_django_3_1_8.py
+++ b/posthog/migrations/0144_update_django_3_1_8.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0143_user_uuid"),
]
diff --git a/posthog/migrations/0145_eventdefinition_propertydefinition.py b/posthog/migrations/0145_eventdefinition_propertydefinition.py
index 6ebf7328ae3e4..e97e52b591120 100644
--- a/posthog/migrations/0145_eventdefinition_propertydefinition.py
+++ b/posthog/migrations/0145_eventdefinition_propertydefinition.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0144_update_django_3_1_8"),
]
@@ -19,7 +18,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=400)),
@@ -46,7 +48,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=400)),
diff --git a/posthog/migrations/0146_eventproperty_sync.py b/posthog/migrations/0146_eventproperty_sync.py
index a83f455f1bbf9..a53527c2ad2bf 100644
--- a/posthog/migrations/0146_eventproperty_sync.py
+++ b/posthog/migrations/0146_eventproperty_sync.py
@@ -12,7 +12,6 @@
def sync_event_and_properties_definitions(team_uuid: str, Team, EventDefinition, PropertyDefinition) -> None:
-
team = None
# It is possible that the team was deleted before the task could run
@@ -64,11 +63,14 @@ def sync_team_event_names_and_properties(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0145_eventdefinition_propertydefinition"),
]
operations = [
- migrations.RunPython(sync_team_event_names_and_properties, migrations.RunPython.noop, elidable=True),
+ migrations.RunPython(
+ sync_team_event_names_and_properties,
+ migrations.RunPython.noop,
+ elidable=True,
+ ),
]
diff --git a/posthog/migrations/0147_fix_stickiness_dashboard_items.py b/posthog/migrations/0147_fix_stickiness_dashboard_items.py
index 212aa8a489867..ba7954637f05b 100644
--- a/posthog/migrations/0147_fix_stickiness_dashboard_items.py
+++ b/posthog/migrations/0147_fix_stickiness_dashboard_items.py
@@ -11,7 +11,6 @@ def update_stickiness(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0146_eventproperty_sync"),
]
diff --git a/posthog/migrations/0147_plugin_logs.py b/posthog/migrations/0147_plugin_logs.py
index e5163836374b4..5d2844aa81c80 100644
--- a/posthog/migrations/0147_plugin_logs.py
+++ b/posthog/migrations/0147_plugin_logs.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0146_eventproperty_sync"),
]
@@ -20,7 +19,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("timestamp", models.DateTimeField(default=django.utils.timezone.now)),
@@ -50,16 +52,28 @@ class Migration(migrations.Migration):
),
("message", models.TextField(db_index=True)),
("instance_id", models.UUIDField()),
- ("plugin", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin")),
+ (
+ "plugin",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin"),
+ ),
(
"plugin_config",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.pluginconfig"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.pluginconfig",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
],
),
migrations.AddIndex(
model_name="pluginlogentry",
- index=models.Index(fields=["plugin_config_id", "timestamp"], name="posthog_plu_plugin__736133_idx"),
+ index=models.Index(
+ fields=["plugin_config_id", "timestamp"],
+ name="posthog_plu_plugin__736133_idx",
+ ),
),
]
diff --git a/posthog/migrations/0148_merge_20210506_0823.py b/posthog/migrations/0148_merge_20210506_0823.py
index 3bb39d35e22a1..3e880cf836478 100644
--- a/posthog/migrations/0148_merge_20210506_0823.py
+++ b/posthog/migrations/0148_merge_20210506_0823.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0147_plugin_logs"),
("posthog", "0147_fix_stickiness_dashboard_items"),
diff --git a/posthog/migrations/0149_fix_lifecycle_dashboard_items.py b/posthog/migrations/0149_fix_lifecycle_dashboard_items.py
index 1bd984dda628d..0890b104ec6f4 100644
--- a/posthog/migrations/0149_fix_lifecycle_dashboard_items.py
+++ b/posthog/migrations/0149_fix_lifecycle_dashboard_items.py
@@ -11,7 +11,6 @@ def update_lifecycle(apps, _):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0148_merge_20210506_0823"),
]
diff --git a/posthog/migrations/0150_increase_element_varchars.py b/posthog/migrations/0150_increase_element_varchars.py
index ad622bd0f727d..93f4df18194b4 100644
--- a/posthog/migrations/0150_increase_element_varchars.py
+++ b/posthog/migrations/0150_increase_element_varchars.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0149_fix_lifecycle_dashboard_items"),
]
diff --git a/posthog/migrations/0151_plugin_preinstalled.py b/posthog/migrations/0151_plugin_preinstalled.py
index f008fcf5c506e..d42fc8ede9904 100644
--- a/posthog/migrations/0151_plugin_preinstalled.py
+++ b/posthog/migrations/0151_plugin_preinstalled.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0150_increase_element_varchars"),
]
diff --git a/posthog/migrations/0152_user_events_column_config.py b/posthog/migrations/0152_user_events_column_config.py
index e543eb2dbe967..0b8a311f2f2e3 100644
--- a/posthog/migrations/0152_user_events_column_config.py
+++ b/posthog/migrations/0152_user_events_column_config.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0151_plugin_preinstalled"),
]
diff --git a/posthog/migrations/0153_plugin_capabilities.py b/posthog/migrations/0153_plugin_capabilities.py
index 1543275859024..a5b14ba10f758 100644
--- a/posthog/migrations/0153_plugin_capabilities.py
+++ b/posthog/migrations/0153_plugin_capabilities.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0152_user_events_column_config"),
]
diff --git a/posthog/migrations/0154_organization_for_internal_metrics.py b/posthog/migrations/0154_organization_for_internal_metrics.py
index 0e27e01bc65c3..af56792e73be0 100644
--- a/posthog/migrations/0154_organization_for_internal_metrics.py
+++ b/posthog/migrations/0154_organization_for_internal_metrics.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0153_plugin_capabilities"),
]
diff --git a/posthog/migrations/0155_organization_available_features.py b/posthog/migrations/0155_organization_available_features.py
index 2b2fe5de8e3c6..d0bda03ae5a79 100644
--- a/posthog/migrations/0155_organization_available_features.py
+++ b/posthog/migrations/0155_organization_available_features.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0154_organization_for_internal_metrics"),
]
@@ -15,7 +14,10 @@ class Migration(migrations.Migration):
model_name="organization",
name="available_features",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=64), blank=True, default=list, size=None
+ base_field=models.CharField(max_length=64),
+ blank=True,
+ default=list,
+ size=None,
),
),
]
diff --git a/posthog/migrations/0157_plugin_metrics.py b/posthog/migrations/0157_plugin_metrics.py
index 64ce3cdf26008..a33e2e9c3c993 100644
--- a/posthog/migrations/0157_plugin_metrics.py
+++ b/posthog/migrations/0157_plugin_metrics.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0156_insight_short_id"),
]
diff --git a/posthog/migrations/0158_new_token_format.py b/posthog/migrations/0158_new_token_format.py
index d4118cb976040..165a7b9b2fcb5 100644
--- a/posthog/migrations/0158_new_token_format.py
+++ b/posthog/migrations/0158_new_token_format.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0157_plugin_metrics"),
]
@@ -17,7 +16,10 @@ class Migration(migrations.Migration):
model_name="personalapikey",
name="value",
field=models.CharField(
- default=posthog.models.utils.generate_random_token_personal, editable=False, max_length=50, unique=True
+ default=posthog.models.utils.generate_random_token_personal,
+ editable=False,
+ max_length=50,
+ unique=True,
),
),
migrations.AlterField(
diff --git a/posthog/migrations/0160_organization_domain_whitelist.py b/posthog/migrations/0160_organization_domain_whitelist.py
index f277a8b3b4865..03307b3a5efc9 100644
--- a/posthog/migrations/0160_organization_domain_whitelist.py
+++ b/posthog/migrations/0160_organization_domain_whitelist.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0159_remove_funnels_with_breakdown"),
]
@@ -15,7 +14,10 @@ class Migration(migrations.Migration):
model_name="organization",
name="domain_whitelist",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=256), blank=True, default=list, size=None
+ base_field=models.CharField(max_length=256),
+ blank=True,
+ default=list,
+ size=None,
),
),
]
diff --git a/posthog/migrations/0161_property_defs_search.py b/posthog/migrations/0161_property_defs_search.py
index 96dd9f6d9b713..5963799289e95 100644
--- a/posthog/migrations/0161_property_defs_search.py
+++ b/posthog/migrations/0161_property_defs_search.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0160_organization_domain_whitelist"),
]
@@ -15,10 +14,18 @@ class Migration(migrations.Migration):
TrigramExtension(),
migrations.AddIndex(
model_name="eventdefinition",
- index=GinIndex(fields=["name"], name="index_event_definition_name", opclasses=["gin_trgm_ops"]),
+ index=GinIndex(
+ fields=["name"],
+ name="index_event_definition_name",
+ opclasses=["gin_trgm_ops"],
+ ),
),
migrations.AddIndex(
model_name="propertydefinition",
- index=GinIndex(fields=["name"], name="index_property_definition_name", opclasses=["gin_trgm_ops"]),
+ index=GinIndex(
+ fields=["name"],
+ name="index_property_definition_name",
+ opclasses=["gin_trgm_ops"],
+ ),
),
]
diff --git a/posthog/migrations/0162_organization_is_member_join_email_enabled.py b/posthog/migrations/0162_organization_is_member_join_email_enabled.py
index 1277559492597..0d632f1231b78 100644
--- a/posthog/migrations/0162_organization_is_member_join_email_enabled.py
+++ b/posthog/migrations/0162_organization_is_member_join_email_enabled.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0161_property_defs_search"),
]
diff --git a/posthog/migrations/0163_insights_favorited_updatedat_tags.py b/posthog/migrations/0163_insights_favorited_updatedat_tags.py
index c5a40dcc94e66..2d84e5b65fa2c 100644
--- a/posthog/migrations/0163_insights_favorited_updatedat_tags.py
+++ b/posthog/migrations/0163_insights_favorited_updatedat_tags.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0162_organization_is_member_join_email_enabled"),
]
@@ -20,7 +19,10 @@ class Migration(migrations.Migration):
model_name="dashboarditem",
name="tags",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=32), blank=True, default=list, size=None
+ base_field=models.CharField(max_length=32),
+ blank=True,
+ default=list,
+ size=None,
),
),
migrations.AddField(
diff --git a/posthog/migrations/0165_dashboarditem_dive_dashboard.py b/posthog/migrations/0165_dashboarditem_dive_dashboard.py
index a38a150f89772..6ac346a7cddb1 100644
--- a/posthog/migrations/0165_dashboarditem_dive_dashboard.py
+++ b/posthog/migrations/0165_dashboarditem_dive_dashboard.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0164_person_index_by_team_and_id"),
]
@@ -15,7 +14,10 @@ class Migration(migrations.Migration):
model_name="dashboarditem",
name="dive_dashboard",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="posthog.dashboard"
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to="posthog.dashboard",
),
),
]
diff --git a/posthog/migrations/0166_plugin_public_jobs.py b/posthog/migrations/0166_plugin_public_jobs.py
index 082bd8e5be961..2a1ea2976b7e5 100644
--- a/posthog/migrations/0166_plugin_public_jobs.py
+++ b/posthog/migrations/0166_plugin_public_jobs.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0165_dashboarditem_dive_dashboard"),
]
diff --git a/posthog/migrations/0167_feature_flag_override.py b/posthog/migrations/0167_feature_flag_override.py
index 3dc9007b090a9..d8612997f9aea 100644
--- a/posthog/migrations/0167_feature_flag_override.py
+++ b/posthog/migrations/0167_feature_flag_override.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0166_plugin_public_jobs"),
]
@@ -15,20 +14,41 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="FeatureFlagOverride",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("override_value", models.JSONField()),
(
"feature_flag",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.featureflag"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.featureflag",
+ ),
+ ),
+ (
+ "user",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
),
- ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
],
),
migrations.AddConstraint(
model_name="featureflagoverride",
constraint=models.UniqueConstraint(
- fields=("user", "feature_flag", "team"), name="unique feature flag for a user/team combo"
+ fields=("user", "feature_flag", "team"),
+ name="unique feature flag for a user/team combo",
),
),
]
diff --git a/posthog/migrations/0169_person_properties_last_updated_at.py b/posthog/migrations/0169_person_properties_last_updated_at.py
index cec9034911e9e..381bc7a25c81d 100644
--- a/posthog/migrations/0169_person_properties_last_updated_at.py
+++ b/posthog/migrations/0169_person_properties_last_updated_at.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0168_action_step_empty_string_reset"),
]
diff --git a/posthog/migrations/0170_project_based_permissioning.py b/posthog/migrations/0170_project_based_permissioning.py
index 7f214bc84f48e..0f774199b0001 100644
--- a/posthog/migrations/0170_project_based_permissioning.py
+++ b/posthog/migrations/0170_project_based_permissioning.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0169_person_properties_last_updated_at"),
]
diff --git a/posthog/migrations/0171_cohort_description.py b/posthog/migrations/0171_cohort_description.py
index 951f992947026..70f78f35e30a5 100644
--- a/posthog/migrations/0171_cohort_description.py
+++ b/posthog/migrations/0171_cohort_description.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0170_project_based_permissioning"),
]
diff --git a/posthog/migrations/0172_person_properties_last_operation.py b/posthog/migrations/0172_person_properties_last_operation.py
index c2d8fbb5b6543..de91895bf629d 100644
--- a/posthog/migrations/0172_person_properties_last_operation.py
+++ b/posthog/migrations/0172_person_properties_last_operation.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0171_cohort_description"),
]
diff --git a/posthog/migrations/0173_should_update_person_props_function.py b/posthog/migrations/0173_should_update_person_props_function.py
index 57c0b37fbe87e..dae88cbdfbf13 100644
--- a/posthog/migrations/0173_should_update_person_props_function.py
+++ b/posthog/migrations/0173_should_update_person_props_function.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0172_person_properties_last_operation"),
]
diff --git a/posthog/migrations/0174_organization_slug.py b/posthog/migrations/0174_organization_slug.py
index 8d79133fb2b51..8bdd611d3b9e0 100644
--- a/posthog/migrations/0174_organization_slug.py
+++ b/posthog/migrations/0174_organization_slug.py
@@ -28,7 +28,6 @@ def slugify_all(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0173_should_update_person_props_function"),
]
diff --git a/posthog/migrations/0175_should_update_person_props_function.py b/posthog/migrations/0175_should_update_person_props_function.py
index b90a896e5c139..356028422d461 100644
--- a/posthog/migrations/0175_should_update_person_props_function.py
+++ b/posthog/migrations/0175_should_update_person_props_function.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0174_organization_slug"),
]
diff --git a/posthog/migrations/0176_update_person_props_function.py b/posthog/migrations/0176_update_person_props_function.py
index 3f0f1f6e919ad..974fb90062026 100644
--- a/posthog/migrations/0176_update_person_props_function.py
+++ b/posthog/migrations/0176_update_person_props_function.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0175_should_update_person_props_function"),
]
diff --git a/posthog/migrations/0177_path_cleaning_filters.py b/posthog/migrations/0177_path_cleaning_filters.py
index 3e731d7787655..adaf7819b135c 100644
--- a/posthog/migrations/0177_path_cleaning_filters.py
+++ b/posthog/migrations/0177_path_cleaning_filters.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0176_update_person_props_function"),
]
diff --git a/posthog/migrations/0178_rename_dashboard_item_to_insight.py b/posthog/migrations/0178_rename_dashboard_item_to_insight.py
index 1c6bf777abf49..2cea98aedaa7c 100644
--- a/posthog/migrations/0178_rename_dashboard_item_to_insight.py
+++ b/posthog/migrations/0178_rename_dashboard_item_to_insight.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0177_path_cleaning_filters"),
]
diff --git a/posthog/migrations/0179_add_group_type_mapping.py b/posthog/migrations/0179_add_group_type_mapping.py
index e977ee52079f9..47311ff1756ab 100644
--- a/posthog/migrations/0179_add_group_type_mapping.py
+++ b/posthog/migrations/0179_add_group_type_mapping.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0178_rename_dashboard_item_to_insight"),
]
@@ -14,10 +13,21 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="GroupTypeMapping",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("group_type", models.CharField(max_length=400)),
("group_type_index", models.IntegerField()),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddConstraint(
@@ -27,13 +37,15 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="grouptypemapping",
constraint=models.UniqueConstraint(
- fields=("team", "group_type_index"), name="unique event column indexes for team"
+ fields=("team", "group_type_index"),
+ name="unique event column indexes for team",
),
),
migrations.AddConstraint(
model_name="grouptypemapping",
constraint=models.CheckConstraint(
- check=models.Q(("group_type_index__lte", 5)), name="group_type_index is less than or equal 5"
+ check=models.Q(("group_type_index__lte", 5)),
+ name="group_type_index is less than or equal 5",
),
),
]
diff --git a/posthog/migrations/0180_person_version.py b/posthog/migrations/0180_person_version.py
index 46d008ccf5f35..d6d48d7643f78 100644
--- a/posthog/migrations/0180_person_version.py
+++ b/posthog/migrations/0180_person_version.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0179_add_group_type_mapping"),
]
diff --git a/posthog/migrations/0181_team_correlation_config.py b/posthog/migrations/0181_team_correlation_config.py
index 5d8dc4f3a80bd..9bc7be124d8c6 100644
--- a/posthog/migrations/0181_team_correlation_config.py
+++ b/posthog/migrations/0181_team_correlation_config.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0180_person_version"),
]
diff --git a/posthog/migrations/0182_sessionrecordingevent_window_id.py b/posthog/migrations/0182_sessionrecordingevent_window_id.py
index 1bfcb9b9b71a9..ba59120a0bbb9 100644
--- a/posthog/migrations/0182_sessionrecordingevent_window_id.py
+++ b/posthog/migrations/0182_sessionrecordingevent_window_id.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0181_team_correlation_config"),
]
diff --git a/posthog/migrations/0183_groups_pg.py b/posthog/migrations/0183_groups_pg.py
index 2924c8a581c05..ff5d69ffb0cce 100644
--- a/posthog/migrations/0183_groups_pg.py
+++ b/posthog/migrations/0183_groups_pg.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0182_sessionrecordingevent_window_id"),
]
@@ -14,7 +13,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Group",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("group_key", models.CharField(max_length=400)),
("group_type_index", models.IntegerField()),
("group_properties", models.JSONField(default=dict)),
@@ -22,7 +29,10 @@ class Migration(migrations.Migration):
("properties_last_updated_at", models.JSONField(default=dict)),
("properties_last_operation", models.JSONField(default=dict)),
("version", models.BigIntegerField()),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddConstraint(
diff --git a/posthog/migrations/0184_delete_sessionsfilter.py b/posthog/migrations/0184_delete_sessionsfilter.py
index 4d5c6d7fda3f1..44df92d321541 100644
--- a/posthog/migrations/0184_delete_sessionsfilter.py
+++ b/posthog/migrations/0184_delete_sessionsfilter.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0183_groups_pg"),
]
diff --git a/posthog/migrations/0185_special_migrations.py b/posthog/migrations/0185_special_migrations.py
index ef779c7aafcde..3c95dcb6e2a40 100644
--- a/posthog/migrations/0185_special_migrations.py
+++ b/posthog/migrations/0185_special_migrations.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0184_delete_sessionsfilter"),
]
@@ -15,17 +14,29 @@ class Migration(migrations.Migration):
fields=[
("id", models.BigAutoField(primary_key=True, serialize=False)),
("name", models.CharField(max_length=50)),
- ("description", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "description",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("progress", models.PositiveSmallIntegerField(default=0)),
("status", models.PositiveSmallIntegerField(default=0)),
- ("current_operation_index", models.PositiveSmallIntegerField(default=0)),
+ (
+ "current_operation_index",
+ models.PositiveSmallIntegerField(default=0),
+ ),
("current_query_id", models.CharField(default="", max_length=100)),
("celery_task_id", models.CharField(default="", max_length=100)),
("started_at", models.DateTimeField(blank=True, null=True)),
("finished_at", models.DateTimeField(blank=True, null=True)),
("last_error", models.TextField(blank=True, null=True)),
- ("posthog_min_version", models.CharField(blank=True, max_length=20, null=True)),
- ("posthog_max_version", models.CharField(blank=True, max_length=20, null=True)),
+ (
+ "posthog_min_version",
+ models.CharField(blank=True, max_length=20, null=True),
+ ),
+ (
+ "posthog_max_version",
+ models.CharField(blank=True, max_length=20, null=True),
+ ),
],
),
migrations.AddConstraint(
diff --git a/posthog/migrations/0186_insight_refresh_attempt.py b/posthog/migrations/0186_insight_refresh_attempt.py
index 052ef134cbb68..2f643dfe7d070 100644
--- a/posthog/migrations/0186_insight_refresh_attempt.py
+++ b/posthog/migrations/0186_insight_refresh_attempt.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0185_special_migrations"),
]
diff --git a/posthog/migrations/0187_stale_events.py b/posthog/migrations/0187_stale_events.py
index c7aad3faeec0e..3fa16bd4457fa 100644
--- a/posthog/migrations/0187_stale_events.py
+++ b/posthog/migrations/0187_stale_events.py
@@ -4,7 +4,6 @@
def set_created_at(apps, schema_editor):
-
try:
from posthog.client import sync_execute
except ImportError:
@@ -32,7 +31,6 @@ def set_created_at(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0186_insight_refresh_attempt"),
]
diff --git a/posthog/migrations/0188_person_distinct_id_version.py b/posthog/migrations/0188_person_distinct_id_version.py
index 0d068def31b40..57f2ab732499d 100644
--- a/posthog/migrations/0188_person_distinct_id_version.py
+++ b/posthog/migrations/0188_person_distinct_id_version.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0187_stale_events"),
]
diff --git a/posthog/migrations/0189_alter_annotation_scope.py b/posthog/migrations/0189_alter_annotation_scope.py
index 2b0bc6b7ce2f1..c8eb52286e0c5 100644
--- a/posthog/migrations/0189_alter_annotation_scope.py
+++ b/posthog/migrations/0189_alter_annotation_scope.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0188_person_distinct_id_version"),
]
@@ -14,7 +13,11 @@ class Migration(migrations.Migration):
model_name="annotation",
name="scope",
field=models.CharField(
- choices=[("dashboard_item", "insight"), ("project", "project"), ("organization", "organization")],
+ choices=[
+ ("dashboard_item", "insight"),
+ ("project", "project"),
+ ("organization", "organization"),
+ ],
default="dashboard_item",
max_length=24,
),
diff --git a/posthog/migrations/0190_experiment.py b/posthog/migrations/0190_experiment.py
index cec03a5e882d5..071722ba18e6c 100644
--- a/posthog/migrations/0190_experiment.py
+++ b/posthog/migrations/0190_experiment.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0189_alter_annotation_scope"),
]
@@ -16,9 +15,20 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Experiment",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("name", models.CharField(max_length=400)),
- ("description", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "description",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("filters", models.JSONField(default=dict)),
("parameters", models.JSONField(default=dict, null=True)),
("start_date", models.DateTimeField(null=True)),
@@ -27,13 +37,22 @@ class Migration(migrations.Migration):
("updated_at", models.DateTimeField(auto_now=True)),
(
"created_by",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
),
(
"feature_flag",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.featureflag"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.featureflag",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
],
),
]
diff --git a/posthog/migrations/0191_rename_specialmigration_asyncmigration.py b/posthog/migrations/0191_rename_specialmigration_asyncmigration.py
index 89455a47093bc..518b6cd3032a1 100644
--- a/posthog/migrations/0191_rename_specialmigration_asyncmigration.py
+++ b/posthog/migrations/0191_rename_specialmigration_asyncmigration.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0190_experiment"),
]
diff --git a/posthog/migrations/0192_event_properties.py b/posthog/migrations/0192_event_properties.py
index d2d1c0afd023d..90cd831c338e0 100644
--- a/posthog/migrations/0192_event_properties.py
+++ b/posthog/migrations/0192_event_properties.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0191_rename_specialmigration_asyncmigration"),
]
@@ -15,7 +14,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="EventProperty",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("event", models.CharField(max_length=400)),
("property", models.CharField(max_length=400)),
(
@@ -38,7 +45,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="eventproperty",
constraint=models.UniqueConstraint(
- fields=("team", "event", "property"), name="posthog_event_property_unique_team_event_property"
+ fields=("team", "event", "property"),
+ name="posthog_event_property_unique_team_event_property",
),
),
]
diff --git a/posthog/migrations/0193_auto_20211222_0912.py b/posthog/migrations/0193_auto_20211222_0912.py
index c5236c5300b02..a028cb6a7649f 100644
--- a/posthog/migrations/0193_auto_20211222_0912.py
+++ b/posthog/migrations/0193_auto_20211222_0912.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0192_event_properties"),
]
@@ -44,8 +43,14 @@ class Migration(migrations.Migration):
constraint=models.CheckConstraint(
check=models.Q(
models.Q(
- ("property_type__in", ["DateTime", "String", "Numeric", "Boolean"]),
- ("property_type_format__in", ["unix_timestamp", "YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD"]),
+ (
+ "property_type__in",
+ ["DateTime", "String", "Numeric", "Boolean"],
+ ),
+ (
+ "property_type_format__in",
+ ["unix_timestamp", "YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD"],
+ ),
)
),
name="property_type_and_format_are_valid",
diff --git a/posthog/migrations/0194_set_property_type_for_time.py b/posthog/migrations/0194_set_property_type_for_time.py
index 40a11c5cf10c9..452c46f18a144 100644
--- a/posthog/migrations/0194_set_property_type_for_time.py
+++ b/posthog/migrations/0194_set_property_type_for_time.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0193_auto_20211222_0912"),
]
diff --git a/posthog/migrations/0195_group_type_name.py b/posthog/migrations/0195_group_type_name.py
index 4f818f3d3384c..c100c26b761f9 100644
--- a/posthog/migrations/0195_group_type_name.py
+++ b/posthog/migrations/0195_group_type_name.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0194_set_property_type_for_time"),
]
diff --git a/posthog/migrations/0196_update_property_types.py b/posthog/migrations/0196_update_property_types.py
index 790cfed311ae3..230a536e0254c 100644
--- a/posthog/migrations/0196_update_property_types.py
+++ b/posthog/migrations/0196_update_property_types.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0195_group_type_name"),
]
@@ -39,7 +38,10 @@ class Migration(migrations.Migration):
constraint=models.CheckConstraint(
check=models.Q(
models.Q(
- ("property_type__in", ["DateTime", "String", "Numeric", "Boolean"]),
+ (
+ "property_type__in",
+ ["DateTime", "String", "Numeric", "Boolean"],
+ ),
(
"property_type_format__in",
[
diff --git a/posthog/migrations/0197_plugin_is_stateless.py b/posthog/migrations/0197_plugin_is_stateless.py
index 0b09f00404905..32d9018c89257 100644
--- a/posthog/migrations/0197_plugin_is_stateless.py
+++ b/posthog/migrations/0197_plugin_is_stateless.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0196_update_property_types"),
]
diff --git a/posthog/migrations/0198_async_migration_error.py b/posthog/migrations/0198_async_migration_error.py
index 1fca61f08fca8..bd1932c772b76 100644
--- a/posthog/migrations/0198_async_migration_error.py
+++ b/posthog/migrations/0198_async_migration_error.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0197_plugin_is_stateless"),
]
@@ -23,7 +22,10 @@ class Migration(migrations.Migration):
("description", models.CharField(max_length=400)),
(
"async_migration",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.asyncmigration"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.asyncmigration",
+ ),
),
],
),
diff --git a/posthog/migrations/0199_update_experiment_model.py b/posthog/migrations/0199_update_experiment_model.py
index dc1d1aa4cf5ff..eac2ce551d81c 100644
--- a/posthog/migrations/0199_update_experiment_model.py
+++ b/posthog/migrations/0199_update_experiment_model.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0198_async_migration_error"),
]
diff --git a/posthog/migrations/0200_insight_last_modified.py b/posthog/migrations/0200_insight_last_modified.py
index 61335edd71dff..6b568c4381480 100644
--- a/posthog/migrations/0200_insight_last_modified.py
+++ b/posthog/migrations/0200_insight_last_modified.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0199_update_experiment_model"),
]
diff --git a/posthog/migrations/0201_remove_property_type_format_constraint.py b/posthog/migrations/0201_remove_property_type_format_constraint.py
index 83435e998f337..4451b2d81b1b7 100644
--- a/posthog/migrations/0201_remove_property_type_format_constraint.py
+++ b/posthog/migrations/0201_remove_property_type_format_constraint.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0200_insight_last_modified"),
]
diff --git a/posthog/migrations/0202_descriptions_for_action.py b/posthog/migrations/0202_descriptions_for_action.py
index d446196299d02..cfda7fdf75f07 100644
--- a/posthog/migrations/0202_descriptions_for_action.py
+++ b/posthog/migrations/0202_descriptions_for_action.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0201_remove_property_type_format_constraint"),
]
diff --git a/posthog/migrations/0203_dashboard_permissions.py b/posthog/migrations/0203_dashboard_permissions.py
index 58936560d68c2..b029b2aeb06f2 100644
--- a/posthog/migrations/0203_dashboard_permissions.py
+++ b/posthog/migrations/0203_dashboard_permissions.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0202_descriptions_for_action"),
]
diff --git a/posthog/migrations/0204_remove_duplicate_plugin_configs.py b/posthog/migrations/0204_remove_duplicate_plugin_configs.py
index c076acc3ef8dd..a9be75e301fe4 100644
--- a/posthog/migrations/0204_remove_duplicate_plugin_configs.py
+++ b/posthog/migrations/0204_remove_duplicate_plugin_configs.py
@@ -23,7 +23,6 @@ def remove_duplicate_plugin_configs(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0203_dashboard_permissions"),
]
diff --git a/posthog/migrations/0205_auto_20220204_1748.py b/posthog/migrations/0205_auto_20220204_1748.py
index 8fc138fbd95ea..1b09b6736692b 100644
--- a/posthog/migrations/0205_auto_20220204_1748.py
+++ b/posthog/migrations/0205_auto_20220204_1748.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0204_remove_duplicate_plugin_configs"),
]
diff --git a/posthog/migrations/0206_global_tags_setup.py b/posthog/migrations/0206_global_tags_setup.py
index f977c5106d110..f5927c2baea49 100644
--- a/posthog/migrations/0206_global_tags_setup.py
+++ b/posthog/migrations/0206_global_tags_setup.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0205_auto_20220204_1748"),
]
@@ -19,11 +18,17 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=255)),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.CreateModel(
@@ -32,7 +37,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
(
@@ -48,7 +56,9 @@ class Migration(migrations.Migration):
(
"tag",
models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, related_name="tagged_items", to="posthog.tag"
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="tagged_items",
+ to="posthog.tag",
),
),
],
@@ -56,7 +66,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="taggeditem",
constraint=models.CheckConstraint(
- check=models.Q(models.Q(("action__isnull", False)), _connector="OR"), name="exactly_one_related_object"
+ check=models.Q(models.Q(("action__isnull", False)), _connector="OR"),
+ name="exactly_one_related_object",
),
),
migrations.AlterUniqueTogether(
diff --git a/posthog/migrations/0207_cohort_count.py b/posthog/migrations/0207_cohort_count.py
index 95e985f8c0ec0..794ed5add5ea7 100644
--- a/posthog/migrations/0207_cohort_count.py
+++ b/posthog/migrations/0207_cohort_count.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0206_global_tags_setup"),
]
diff --git a/posthog/migrations/0208_alter_plugin_updated_at.py b/posthog/migrations/0208_alter_plugin_updated_at.py
index 81e0492d6d74d..2e30b98562a35 100644
--- a/posthog/migrations/0208_alter_plugin_updated_at.py
+++ b/posthog/migrations/0208_alter_plugin_updated_at.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0207_cohort_count"),
]
diff --git a/posthog/migrations/0209_plugin_logs_disabled.py b/posthog/migrations/0209_plugin_logs_disabled.py
index 31b8423790ea4..72cbcbb80cbb4 100644
--- a/posthog/migrations/0209_plugin_logs_disabled.py
+++ b/posthog/migrations/0209_plugin_logs_disabled.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0208_alter_plugin_updated_at"),
]
diff --git a/posthog/migrations/0210_drop_update_person_functions.py b/posthog/migrations/0210_drop_update_person_functions.py
index 6d70f4c0cf32b..c036799c5429f 100644
--- a/posthog/migrations/0210_drop_update_person_functions.py
+++ b/posthog/migrations/0210_drop_update_person_functions.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0209_plugin_logs_disabled"),
]
diff --git a/posthog/migrations/0211_async_migrations_errors_length.py b/posthog/migrations/0211_async_migrations_errors_length.py
index 445b85183e7a9..a29e6bd862808 100644
--- a/posthog/migrations/0211_async_migrations_errors_length.py
+++ b/posthog/migrations/0211_async_migrations_errors_length.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0210_drop_update_person_functions"),
]
diff --git a/posthog/migrations/0212_alter_persondistinctid_team.py b/posthog/migrations/0212_alter_persondistinctid_team.py
index 89995547f70db..3f0b7eb908fac 100644
--- a/posthog/migrations/0212_alter_persondistinctid_team.py
+++ b/posthog/migrations/0212_alter_persondistinctid_team.py
@@ -48,7 +48,9 @@ class Migration(migrations.Migration):
model_name="persondistinctid",
name="team",
field=models.ForeignKey(
- db_index=False, on_delete=django.db.models.deletion.CASCADE, to="posthog.team"
+ db_index=False,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.team",
),
)
],
diff --git a/posthog/migrations/0213_deprecated_old_tags.py b/posthog/migrations/0213_deprecated_old_tags.py
index d713f0aaa5f45..2efc99288852c 100644
--- a/posthog/migrations/0213_deprecated_old_tags.py
+++ b/posthog/migrations/0213_deprecated_old_tags.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0212_alter_persondistinctid_team"),
]
@@ -18,14 +17,22 @@ class Migration(migrations.Migration):
model_name="dashboard",
name="deprecated_tags",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None
+ base_field=models.CharField(max_length=32),
+ blank=True,
+ default=list,
+ null=True,
+ size=None,
),
),
migrations.AlterField(
model_name="insight",
name="deprecated_tags",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None
+ base_field=models.CharField(max_length=32),
+ blank=True,
+ default=list,
+ null=True,
+ size=None,
),
),
migrations.RemoveConstraint(
@@ -78,7 +85,16 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name="taggeditem",
- unique_together={("tag", "dashboard", "insight", "event_definition", "property_definition", "action")},
+ unique_together={
+ (
+ "tag",
+ "dashboard",
+ "insight",
+ "event_definition",
+ "property_definition",
+ "action",
+ )
+ },
),
migrations.AddConstraint(
model_name="taggeditem",
diff --git a/posthog/migrations/0215_add_tags_back.py b/posthog/migrations/0215_add_tags_back.py
index 0bd14fee6f5ac..bc66b0997cd60 100644
--- a/posthog/migrations/0215_add_tags_back.py
+++ b/posthog/migrations/0215_add_tags_back.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0214_migrate_dashboard_insight_tags"),
]
@@ -15,14 +14,22 @@ class Migration(migrations.Migration):
model_name="dashboard",
name="tags",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=32), blank=True, default=None, null=True, size=None
+ base_field=models.CharField(max_length=32),
+ blank=True,
+ default=None,
+ null=True,
+ size=None,
),
),
migrations.AddField(
model_name="insight",
name="tags",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=32), blank=True, default=None, null=True, size=None
+ base_field=models.CharField(max_length=32),
+ blank=True,
+ default=None,
+ null=True,
+ size=None,
),
),
]
diff --git a/posthog/migrations/0216_insight_placeholder_name.py b/posthog/migrations/0216_insight_placeholder_name.py
index 43e0a93447550..7d5956a0cfbc6 100644
--- a/posthog/migrations/0216_insight_placeholder_name.py
+++ b/posthog/migrations/0216_insight_placeholder_name.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0215_add_tags_back"),
]
diff --git a/posthog/migrations/0217_team_primary_dashboard.py b/posthog/migrations/0217_team_primary_dashboard.py
index 98a8f32763983..cb164adf85455 100644
--- a/posthog/migrations/0217_team_primary_dashboard.py
+++ b/posthog/migrations/0217_team_primary_dashboard.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0216_insight_placeholder_name"),
]
diff --git a/posthog/migrations/0219_migrate_tags_v2.py b/posthog/migrations/0219_migrate_tags_v2.py
index 1895d3b86c59d..ecc4f4312a812 100644
--- a/posthog/migrations/0219_migrate_tags_v2.py
+++ b/posthog/migrations/0219_migrate_tags_v2.py
@@ -33,7 +33,11 @@ def forwards(apps, schema_editor):
)
for insight_page in insight_paginator.page_range:
- logger.info("insight_tag_batch_get_start", limit=batch_size, offset=(insight_page - 1) * batch_size)
+ logger.info(
+ "insight_tag_batch_get_start",
+ limit=batch_size,
+ offset=(insight_page - 1) * batch_size,
+ )
insights = iter(insight_paginator.get_page(insight_page))
for tags, team_id, insight_id in insights:
unique_tags = set(tagify(t) for t in tags if isinstance(t, str) and t.strip() != "")
@@ -55,13 +59,22 @@ def forwards(apps, schema_editor):
)
for dashboard_page in dashboard_paginator.page_range:
- logger.info("dashboard_tag_batch_get_start", limit=batch_size, offset=(dashboard_page - 1) * batch_size)
+ logger.info(
+ "dashboard_tag_batch_get_start",
+ limit=batch_size,
+ offset=(dashboard_page - 1) * batch_size,
+ )
dashboards = iter(dashboard_paginator.get_page(dashboard_page))
for tags, team_id, dashboard_id in dashboards:
unique_tags = set(tagify(t) for t in tags if isinstance(t, str) and t.strip() != "")
for tag in unique_tags:
temp_tag = Tag(name=tag, team_id=team_id)
- createables.append((temp_tag, TaggedItem(dashboard_id=dashboard_id, tag_id=temp_tag.id)))
+ createables.append(
+ (
+ temp_tag,
+ TaggedItem(dashboard_id=dashboard_id, tag_id=temp_tag.id),
+ )
+ )
logger.info("dashboard_tag_get_end", tags_count=len(createables) - num_insight_tags)
@@ -94,7 +107,9 @@ def forwards(apps, schema_editor):
# Create tag <-> item relationships, ignoring conflicts
TaggedItem.objects.bulk_create(
- [tagged_item for (_, tagged_item) in createable_batch], ignore_conflicts=True, batch_size=batch_size
+ [tagged_item for (_, tagged_item) in createable_batch],
+ ignore_conflicts=True,
+ batch_size=batch_size,
)
logger.info("posthog/0219_migrate_tags_v2_end")
diff --git a/posthog/migrations/0220_backfill_primary_dashboards.py b/posthog/migrations/0220_backfill_primary_dashboards.py
index 4633c81b90bbf..f32def59bc25c 100644
--- a/posthog/migrations/0220_backfill_primary_dashboards.py
+++ b/posthog/migrations/0220_backfill_primary_dashboards.py
@@ -10,7 +10,6 @@ def backfill_primary_dashboards(apps, _):
team_dashboards = []
with connection.cursor() as cursor:
-
# Fetch a list of teams and the id of the dashboard that should be set as the primary dashboard
# The primary dashboard should be the oldest pinned dashboard, if one exists
# or the oldest dashboard, if no pinned dashboards exist
diff --git a/posthog/migrations/0221_add_activity_log_model.py b/posthog/migrations/0221_add_activity_log_model.py
index dda3d6001c30a..077951fbfda0f 100644
--- a/posthog/migrations/0221_add_activity_log_model.py
+++ b/posthog/migrations/0221_add_activity_log_model.py
@@ -10,7 +10,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0220_backfill_primary_dashboards"),
]
@@ -22,7 +21,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("team_id", models.PositiveIntegerField(null=True)),
@@ -33,26 +35,36 @@ class Migration(migrations.Migration):
(
"detail",
models.JSONField(
- encoder=posthog.models.activity_logging.activity_log.ActivityDetailEncoder, null=True
+ encoder=posthog.models.activity_logging.activity_log.ActivityDetailEncoder,
+ null=True,
),
),
("created_at", models.DateTimeField(default=django.utils.timezone.now)),
(
"user",
models.ForeignKey(
- null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
],
),
migrations.AddIndex(
model_name="activitylog",
- index=models.Index(fields=["team_id", "scope", "item_id"], name="posthog_act_team_id_13a0a8_idx"),
+ index=models.Index(
+ fields=["team_id", "scope", "item_id"],
+ name="posthog_act_team_id_13a0a8_idx",
+ ),
),
migrations.AddConstraint(
model_name="activitylog",
constraint=models.CheckConstraint(
- check=models.Q(("team_id__isnull", False), ("organization_id__isnull", False), _connector="OR"),
+ check=models.Q(
+ ("team_id__isnull", False),
+ ("organization_id__isnull", False),
+ _connector="OR",
+ ),
name="must_have_team_or_organization_id",
),
),
diff --git a/posthog/migrations/0222_fix_deleted_primary_dashboards.py b/posthog/migrations/0222_fix_deleted_primary_dashboards.py
index 7869597e61475..a65df9e39f2f7 100644
--- a/posthog/migrations/0222_fix_deleted_primary_dashboards.py
+++ b/posthog/migrations/0222_fix_deleted_primary_dashboards.py
@@ -15,7 +15,6 @@ def fix_for_deleted_primary_dashboards(apps, _):
expected_team_dashboards = []
with connection.cursor() as cursor:
-
# Fetch a list of teams and the id of the dashboard that should be set as the primary dashboard
# The primary dashboard should be the oldest pinned dashboard, if one exists
# or the oldest dashboard, if no pinned dashboards exist
diff --git a/posthog/migrations/0223_organizationdomain.py b/posthog/migrations/0223_organizationdomain.py
index 30cef1aca53a0..c46349689ff69 100644
--- a/posthog/migrations/0223_organizationdomain.py
+++ b/posthog/migrations/0223_organizationdomain.py
@@ -15,12 +15,14 @@ def migrate_domain_whitelist(apps, schema_editor):
for organization in Organization.objects.exclude(domain_whitelist=[]):
for domain in organization.domain_whitelist:
OrganizationDomain.objects.create(
- organization=organization, domain=domain, verified_at=timezone.now(), jit_provisioning_enabled=True
+ organization=organization,
+ domain=domain,
+ verified_at=timezone.now(),
+ jit_provisioning_enabled=True,
)
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0222_fix_deleted_primary_dashboards"),
]
@@ -32,18 +34,28 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("domain", models.CharField(max_length=128, unique=True)),
(
"verification_challenge",
models.CharField(
- default=posthog.models.organization_domain.generate_verification_challenge, max_length=128
+ default=posthog.models.organization_domain.generate_verification_challenge,
+ max_length=128,
),
),
- ("verified_at", models.DateTimeField(blank=True, default=None, null=True)),
- ("last_verification_retry", models.DateTimeField(blank=True, default=None, null=True)),
+ (
+ "verified_at",
+ models.DateTimeField(blank=True, default=None, null=True),
+ ),
+ (
+ "last_verification_retry",
+ models.DateTimeField(blank=True, default=None, null=True),
+ ),
(
"jit_provisioning_enabled",
models.BooleanField(default=False),
@@ -52,7 +64,9 @@ class Migration(migrations.Migration):
(
"organization",
models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, related_name="domains", to="posthog.organization"
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="domains",
+ to="posthog.organization",
),
),
],
diff --git a/posthog/migrations/0224_saml_multitenant.py b/posthog/migrations/0224_saml_multitenant.py
index 2f942a4caf0d2..b2813acd95827 100644
--- a/posthog/migrations/0224_saml_multitenant.py
+++ b/posthog/migrations/0224_saml_multitenant.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0223_organizationdomain"),
]
diff --git a/posthog/migrations/0225_insight_viewed.py b/posthog/migrations/0225_insight_viewed.py
index e0f1b58456766..7d4195a4fb681 100644
--- a/posthog/migrations/0225_insight_viewed.py
+++ b/posthog/migrations/0225_insight_viewed.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0224_saml_multitenant"),
]
@@ -15,16 +14,42 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="InsightViewed",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("last_viewed_at", models.DateTimeField()),
- ("insight", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.insight")),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
- ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
+ (
+ "insight",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.insight",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
+ (
+ "user",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
],
),
migrations.AddIndex(
model_name="insightviewed",
- index=models.Index(fields=["team_id", "user_id", "-last_viewed_at"], name="posthog_ins_team_id_339ee0_idx"),
+ index=models.Index(
+ fields=["team_id", "user_id", "-last_viewed_at"],
+ name="posthog_ins_team_id_339ee0_idx",
+ ),
),
migrations.AddConstraint(
model_name="insightviewed",
diff --git a/posthog/migrations/0226_longer_action_slack_message_format.py b/posthog/migrations/0226_longer_action_slack_message_format.py
index 8f6a1968f2492..b6d0d5d448ecf 100644
--- a/posthog/migrations/0226_longer_action_slack_message_format.py
+++ b/posthog/migrations/0226_longer_action_slack_message_format.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0225_insight_viewed"),
]
diff --git a/posthog/migrations/0227_add_dashboard_tiles.py b/posthog/migrations/0227_add_dashboard_tiles.py
index 88e2258ca455f..5ced1caad6326 100644
--- a/posthog/migrations/0227_add_dashboard_tiles.py
+++ b/posthog/migrations/0227_add_dashboard_tiles.py
@@ -58,7 +58,6 @@ def reverse(apps, _) -> None:
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0226_longer_action_slack_message_format"),
]
@@ -67,9 +66,23 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="DashboardTile",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
- ("dashboard", models.ForeignKey(on_delete=models.deletion.CASCADE, to="posthog.dashboard")),
- ("insight", models.ForeignKey(on_delete=models.deletion.CASCADE, to="posthog.insight")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "dashboard",
+ models.ForeignKey(on_delete=models.deletion.CASCADE, to="posthog.dashboard"),
+ ),
+ (
+ "insight",
+ models.ForeignKey(on_delete=models.deletion.CASCADE, to="posthog.insight"),
+ ),
("layouts", models.JSONField(default=dict)),
("color", models.CharField(blank=True, max_length=400, null=True)),
],
@@ -78,7 +91,10 @@ class Migration(migrations.Migration):
model_name="dashboard",
name="insights",
field=models.ManyToManyField(
- blank=True, related_name="dashboards", through="posthog.DashboardTile", to="posthog.Insight"
+ blank=True,
+ related_name="dashboards",
+ through="posthog.DashboardTile",
+ to="posthog.Insight",
),
),
migrations.RunPython(migrate_dashboard_insight_relations, reverse, elidable=True),
diff --git a/posthog/migrations/0228_fix_tile_layouts.py b/posthog/migrations/0228_fix_tile_layouts.py
index bea976781fe8b..f819390449f04 100644
--- a/posthog/migrations/0228_fix_tile_layouts.py
+++ b/posthog/migrations/0228_fix_tile_layouts.py
@@ -42,7 +42,6 @@ def migrate_dashboard_insight_relations(apps, _) -> None:
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0227_add_dashboard_tiles"),
]
diff --git a/posthog/migrations/0229_add_filters_hash_to_dashboard_table.py b/posthog/migrations/0229_add_filters_hash_to_dashboard_table.py
index fdb078043a99a..7bbd818dbbafa 100644
--- a/posthog/migrations/0229_add_filters_hash_to_dashboard_table.py
+++ b/posthog/migrations/0229_add_filters_hash_to_dashboard_table.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0228_fix_tile_layouts"),
]
diff --git a/posthog/migrations/0230_cohort_filters.py b/posthog/migrations/0230_cohort_filters.py
index 96dff620cea4c..c695d413372eb 100644
--- a/posthog/migrations/0230_cohort_filters.py
+++ b/posthog/migrations/0230_cohort_filters.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0229_add_filters_hash_to_dashboard_table"),
]
diff --git a/posthog/migrations/0231_add_refreshing_data_to_tiles.py b/posthog/migrations/0231_add_refreshing_data_to_tiles.py
index 90e4171ba8f65..18a31765beead 100644
--- a/posthog/migrations/0231_add_refreshing_data_to_tiles.py
+++ b/posthog/migrations/0231_add_refreshing_data_to_tiles.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0230_cohort_filters"),
]
diff --git a/posthog/migrations/0232_add_team_person_display_name_properties.py b/posthog/migrations/0232_add_team_person_display_name_properties.py
index 1508ea860d7ed..56402418d2a46 100644
--- a/posthog/migrations/0232_add_team_person_display_name_properties.py
+++ b/posthog/migrations/0232_add_team_person_display_name_properties.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0231_add_refreshing_data_to_tiles"),
]
@@ -15,7 +14,10 @@ class Migration(migrations.Migration):
model_name="team",
name="person_display_name_properties",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=400), blank=True, null=True, size=None
+ base_field=models.CharField(max_length=400),
+ blank=True,
+ null=True,
+ size=None,
),
),
]
diff --git a/posthog/migrations/0233_plugin_source_file.py b/posthog/migrations/0233_plugin_source_file.py
index 2686bb99ca8bc..0e5a13827425a 100644
--- a/posthog/migrations/0233_plugin_source_file.py
+++ b/posthog/migrations/0233_plugin_source_file.py
@@ -33,7 +33,6 @@ def migrate_plugin_source(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0232_add_team_person_display_name_properties"),
]
@@ -45,12 +44,18 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("filename", models.CharField(max_length=200)),
("source", models.TextField(blank=True, null=True)),
- ("plugin", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin")),
+ (
+ "plugin",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin"),
+ ),
],
),
migrations.AddConstraint(
diff --git a/posthog/migrations/0234_create_plugin_jsons.py b/posthog/migrations/0234_create_plugin_jsons.py
index 6d6e3420efbb5..36b12068613e3 100644
--- a/posthog/migrations/0234_create_plugin_jsons.py
+++ b/posthog/migrations/0234_create_plugin_jsons.py
@@ -27,7 +27,6 @@ def migrate_plugin_source(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0233_plugin_source_file"),
]
diff --git a/posthog/migrations/0235_plugin_source_transpilation.py b/posthog/migrations/0235_plugin_source_transpilation.py
index 767db8bd4fd6b..a657113cf57ce 100644
--- a/posthog/migrations/0235_plugin_source_transpilation.py
+++ b/posthog/migrations/0235_plugin_source_transpilation.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0234_create_plugin_jsons"),
]
@@ -20,7 +19,11 @@ class Migration(migrations.Migration):
name="status",
field=models.CharField(
null=True,
- choices=[("LOCKED", "locked"), ("TRANSPILED", "transpiled"), ("ERROR", "error")],
+ choices=[
+ ("LOCKED", "locked"),
+ ("TRANSPILED", "transpiled"),
+ ("ERROR", "error"),
+ ],
max_length=20,
),
),
diff --git a/posthog/migrations/0236_add_instance_setting_model.py b/posthog/migrations/0236_add_instance_setting_model.py
index b42aa22cba4da..f41fae6f6f5fd 100644
--- a/posthog/migrations/0236_add_instance_setting_model.py
+++ b/posthog/migrations/0236_add_instance_setting_model.py
@@ -33,7 +33,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="InstanceSetting",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=128)),
("raw_value", models.CharField(blank=True, max_length=1024)),
],
diff --git a/posthog/migrations/0237_remove_timezone_from_teams.py b/posthog/migrations/0237_remove_timezone_from_teams.py
index 4bd8f99cf24c4..e4ff58e555d6d 100644
--- a/posthog/migrations/0237_remove_timezone_from_teams.py
+++ b/posthog/migrations/0237_remove_timezone_from_teams.py
@@ -8,7 +8,6 @@ def reset_team_timezone_to_UTC(apps, _) -> None:
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0236_add_instance_setting_model"),
]
diff --git a/posthog/migrations/0238_exportedasset.py b/posthog/migrations/0238_exportedasset.py
index 92eef7b7faefa..f1ace97313135 100644
--- a/posthog/migrations/0238_exportedasset.py
+++ b/posthog/migrations/0238_exportedasset.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0237_remove_timezone_from_teams"),
]
@@ -16,7 +15,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="ExportedAsset",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
(
"export_format",
models.CharField(
@@ -41,13 +48,24 @@ class Migration(migrations.Migration):
),
(
"dashboard",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.dashboard",
+ ),
),
(
"insight",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.insight",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
],
),
]
diff --git a/posthog/migrations/0239_delete_postgres_pluginlogentry.py b/posthog/migrations/0239_delete_postgres_pluginlogentry.py
index 34dc7a34ca8c8..4161c47e28212 100644
--- a/posthog/migrations/0239_delete_postgres_pluginlogentry.py
+++ b/posthog/migrations/0239_delete_postgres_pluginlogentry.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0238_exportedasset"),
]
diff --git a/posthog/migrations/0240_organizationinvite_message.py b/posthog/migrations/0240_organizationinvite_message.py
index 17cf1a9c22a06..cefc8aa46b5f9 100644
--- a/posthog/migrations/0240_organizationinvite_message.py
+++ b/posthog/migrations/0240_organizationinvite_message.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0239_delete_postgres_pluginlogentry"),
]
diff --git a/posthog/migrations/0241_subscription.py b/posthog/migrations/0241_subscription.py
index 5c85060da6553..8d00dbef43f2d 100644
--- a/posthog/migrations/0241_subscription.py
+++ b/posthog/migrations/0241_subscription.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0240_organizationinvite_message"),
]
@@ -16,9 +15,20 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Subscription",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("title", models.CharField(blank=True, max_length=100, null=True)),
- ("target_type", models.CharField(choices=[("email", "Email")], max_length=10)),
+ (
+ "target_type",
+ models.CharField(choices=[("email", "Email")], max_length=10),
+ ),
("target_value", models.TextField()),
(
"frequency",
@@ -64,18 +74,32 @@ class Migration(migrations.Migration):
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
(
"dashboard",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.dashboard",
+ ),
),
(
"insight",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.insight",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
],
),
]
diff --git a/posthog/migrations/0242_team_live_events_columns.py b/posthog/migrations/0242_team_live_events_columns.py
index 8dee3fe2fd900..e29c482738406 100644
--- a/posthog/migrations/0242_team_live_events_columns.py
+++ b/posthog/migrations/0242_team_live_events_columns.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0241_subscription"),
]
diff --git a/posthog/migrations/0243_unpack_plugin_source_files.py b/posthog/migrations/0243_unpack_plugin_source_files.py
index d0e10ef482302..58aac54e753a9 100644
--- a/posthog/migrations/0243_unpack_plugin_source_files.py
+++ b/posthog/migrations/0243_unpack_plugin_source_files.py
@@ -56,7 +56,9 @@ def sync_from_plugin_archive(plugin):
)
else:
logger.debug(
- "Migration 0243 - extracted and saved code of plugin.", plugin=plugin.name, plugin_id=plugin.id
+ "Migration 0243 - extracted and saved code of plugin.",
+ plugin=plugin.name,
+ plugin_id=plugin.id,
)
logger.info("Migration 0243 - finished")
@@ -70,7 +72,6 @@ def reverse_func(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0242_team_live_events_columns"),
]
diff --git a/posthog/migrations/0244_drop_should_update_person_prop.py b/posthog/migrations/0244_drop_should_update_person_prop.py
index 40dc0f451e916..210e46ce4d36d 100644
--- a/posthog/migrations/0244_drop_should_update_person_prop.py
+++ b/posthog/migrations/0244_drop_should_update_person_prop.py
@@ -2,7 +2,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0243_unpack_plugin_source_files"),
]
diff --git a/posthog/migrations/0245_silence_deprecated_tags_warnings.py b/posthog/migrations/0245_silence_deprecated_tags_warnings.py
index f39278900d75c..56c2a042db092 100644
--- a/posthog/migrations/0245_silence_deprecated_tags_warnings.py
+++ b/posthog/migrations/0245_silence_deprecated_tags_warnings.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0244_drop_should_update_person_prop"),
]
diff --git a/posthog/migrations/0246_integrations.py b/posthog/migrations/0246_integrations.py
index d80dd3e6040d7..9bc1f0cb4b6eb 100644
--- a/posthog/migrations/0246_integrations.py
+++ b/posthog/migrations/0246_integrations.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0245_silence_deprecated_tags_warnings"),
]
@@ -16,13 +15,26 @@ class Migration(migrations.Migration):
model_name="subscription",
name="target_type",
field=models.CharField(
- choices=[("email", "Email"), ("slack", "Slack"), ("webhook", "Webhook")], max_length=10
+ choices=[
+ ("email", "Email"),
+ ("slack", "Slack"),
+ ("webhook", "Webhook"),
+ ],
+ max_length=10,
),
),
migrations.CreateModel(
name="Integration",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("kind", models.CharField(choices=[("slack", "Slack")], max_length=10)),
("config", models.JSONField(default=dict)),
("sensitive_config", models.JSONField(default=dict)),
@@ -31,10 +43,16 @@ class Migration(migrations.Migration):
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
]
diff --git a/posthog/migrations/0247_feature_flags_experience_continuity.py b/posthog/migrations/0247_feature_flags_experience_continuity.py
index 31a5c471c1fdb..f23365acf60fe 100644
--- a/posthog/migrations/0247_feature_flags_experience_continuity.py
+++ b/posthog/migrations/0247_feature_flags_experience_continuity.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0246_integrations"),
]
@@ -19,17 +18,32 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="FeatureFlagHashKeyOverride",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("feature_flag_key", models.CharField(max_length=400)),
("hash_key", models.CharField(max_length=400)),
- ("person", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person")),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "person",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person"),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddConstraint(
model_name="featureflaghashkeyoverride",
constraint=models.UniqueConstraint(
- fields=("team", "person", "feature_flag_key"), name="Unique hash_key for a user/team/feature_flag combo"
+ fields=("team", "person", "feature_flag_key"),
+ name="Unique hash_key for a user/team/feature_flag combo",
),
),
]
diff --git a/posthog/migrations/0248_add_context_for_csv_exports.py b/posthog/migrations/0248_add_context_for_csv_exports.py
index ad5891165b54e..9b86fee68a8f2 100644
--- a/posthog/migrations/0248_add_context_for_csv_exports.py
+++ b/posthog/migrations/0248_add_context_for_csv_exports.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0247_feature_flags_experience_continuity"),
]
diff --git a/posthog/migrations/0249_add_sharingconfiguration.py b/posthog/migrations/0249_add_sharingconfiguration.py
index 8ee425789c583..ad3f6ccf209f3 100644
--- a/posthog/migrations/0249_add_sharingconfiguration.py
+++ b/posthog/migrations/0249_add_sharingconfiguration.py
@@ -29,7 +29,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0248_add_context_for_csv_exports"),
]
@@ -38,7 +37,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="SharingConfiguration",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("created_at", models.DateTimeField(auto_now_add=True)),
("enabled", models.BooleanField(default=False)),
(
@@ -53,13 +60,24 @@ class Migration(migrations.Migration):
),
(
"dashboard",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.dashboard",
+ ),
),
(
"insight",
- models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"),
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.insight",
+ ),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
],
),
migrations.RunPython(create_sharing_configurations, reverse, elidable=True),
diff --git a/posthog/migrations/0250_exportedasset_created_by.py b/posthog/migrations/0250_exportedasset_created_by.py
index e0f21ea5a447e..72a9984ab53fd 100644
--- a/posthog/migrations/0250_exportedasset_created_by.py
+++ b/posthog/migrations/0250_exportedasset_created_by.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0249_add_sharingconfiguration"),
]
@@ -16,7 +15,10 @@ class Migration(migrations.Migration):
model_name="exportedasset",
name="created_by",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
]
diff --git a/posthog/migrations/0251_event_buffer.py b/posthog/migrations/0251_event_buffer.py
index af6b8f3b105b6..c0323f3057ce8 100644
--- a/posthog/migrations/0251_event_buffer.py
+++ b/posthog/migrations/0251_event_buffer.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0250_exportedasset_created_by"),
]
@@ -13,7 +12,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="EventBuffer",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("event", models.JSONField(null=True, blank=True)),
("process_at", models.DateTimeField()),
("locked", models.BooleanField()),
diff --git a/posthog/migrations/0252_reset_insight_refreshing_status.py b/posthog/migrations/0252_reset_insight_refreshing_status.py
index 140128632f575..abfef85709b30 100644
--- a/posthog/migrations/0252_reset_insight_refreshing_status.py
+++ b/posthog/migrations/0252_reset_insight_refreshing_status.py
@@ -16,7 +16,6 @@ def reverse(_apps, _schema_editor) -> None:
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0251_event_buffer"),
]
diff --git a/posthog/migrations/0253_add_async_migration_parameters.py b/posthog/migrations/0253_add_async_migration_parameters.py
index a81e43700dee4..d96735c67c483 100644
--- a/posthog/migrations/0253_add_async_migration_parameters.py
+++ b/posthog/migrations/0253_add_async_migration_parameters.py
@@ -10,7 +10,6 @@ def describe(self):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0252_reset_insight_refreshing_status"),
]
diff --git a/posthog/migrations/0254_prompt_sequence_state.py b/posthog/migrations/0254_prompt_sequence_state.py
index 94b920a964a8a..e10e4a2530b4d 100644
--- a/posthog/migrations/0254_prompt_sequence_state.py
+++ b/posthog/migrations/0254_prompt_sequence_state.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0253_add_async_migration_parameters"),
]
@@ -15,20 +14,38 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="PromptSequenceState",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=400)),
- ("last_updated_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "last_updated_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
("step", models.IntegerField(default=0)),
("completed", models.BooleanField(default=False)),
("dismissed", models.BooleanField(default=False)),
- ("person", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person")),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "person",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person"),
+ ),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddConstraint(
model_name="promptsequencestate",
constraint=models.UniqueConstraint(
- fields=("team", "person", "key"), name="unique sequence key for person for team"
+ fields=("team", "person", "key"),
+ name="unique sequence key for person for team",
),
),
]
diff --git a/posthog/migrations/0255_user_prompt_sequence_state.py b/posthog/migrations/0255_user_prompt_sequence_state.py
index 9c1d2ec48f7fe..ebfe3dc5e6363 100644
--- a/posthog/migrations/0255_user_prompt_sequence_state.py
+++ b/posthog/migrations/0255_user_prompt_sequence_state.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0254_prompt_sequence_state"),
]
@@ -16,13 +15,30 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="UserPromptSequenceState",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=400)),
- ("last_updated_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "last_updated_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
("step", models.IntegerField(default=0)),
("completed", models.BooleanField(default=False)),
("dismissed", models.BooleanField(default=False)),
- ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
+ (
+ "user",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
],
),
migrations.AddConstraint(
diff --git a/posthog/migrations/0256_add_async_deletion_model.py b/posthog/migrations/0256_add_async_deletion_model.py
index dcc035f70bc08..636fe554b15d5 100644
--- a/posthog/migrations/0256_add_async_deletion_model.py
+++ b/posthog/migrations/0256_add_async_deletion_model.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0255_user_prompt_sequence_state"),
]
@@ -24,10 +23,15 @@ class Migration(migrations.Migration):
(
"created_by",
models.ForeignKey(
- null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddIndex(
@@ -45,7 +49,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="asyncdeletion",
constraint=models.UniqueConstraint(
- fields=("deletion_type", "key", "group_type_index"), name="unique deletion for groups"
+ fields=("deletion_type", "key", "group_type_index"),
+ name="unique deletion for groups",
),
),
]
diff --git a/posthog/migrations/0257_add_default_checked_for_test_filters_on_team.py b/posthog/migrations/0257_add_default_checked_for_test_filters_on_team.py
index ccf6d725f15a3..b513fb4713dd5 100644
--- a/posthog/migrations/0257_add_default_checked_for_test_filters_on_team.py
+++ b/posthog/migrations/0257_add_default_checked_for_test_filters_on_team.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0256_add_async_deletion_model"),
]
diff --git a/posthog/migrations/0258_team_recording_domains.py b/posthog/migrations/0258_team_recording_domains.py
index 510cf5500bb26..9ae7931f1048a 100644
--- a/posthog/migrations/0258_team_recording_domains.py
+++ b/posthog/migrations/0258_team_recording_domains.py
@@ -3,7 +3,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0257_add_default_checked_for_test_filters_on_team"),
]
@@ -13,7 +12,10 @@ class Migration(migrations.Migration):
model_name="team",
name="recording_domains",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=200, null=True), blank=True, null=True, size=None
+ base_field=models.CharField(max_length=200, null=True),
+ blank=True,
+ null=True,
+ size=None,
),
),
]
diff --git a/posthog/migrations/0259_backfill_team_recording_domains.py b/posthog/migrations/0259_backfill_team_recording_domains.py
index 8589dcc83de02..1f0dcba4f08f8 100644
--- a/posthog/migrations/0259_backfill_team_recording_domains.py
+++ b/posthog/migrations/0259_backfill_team_recording_domains.py
@@ -45,7 +45,6 @@ def reverse(apps, _):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0258_team_recording_domains"),
]
diff --git a/posthog/migrations/0260_pak_v2.py b/posthog/migrations/0260_pak_v2.py
index abd6b490b303c..02fbd842ed39a 100644
--- a/posthog/migrations/0260_pak_v2.py
+++ b/posthog/migrations/0260_pak_v2.py
@@ -20,7 +20,6 @@ def hash_all_keys(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0259_backfill_team_recording_domains"),
]
diff --git a/posthog/migrations/0261_team_capture_console_log_opt_in.py b/posthog/migrations/0261_team_capture_console_log_opt_in.py
index 9bca3ca244582..92f202606cea9 100644
--- a/posthog/migrations/0261_team_capture_console_log_opt_in.py
+++ b/posthog/migrations/0261_team_capture_console_log_opt_in.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0260_pak_v2"),
]
diff --git a/posthog/migrations/0262_track_viewed_notifications.py b/posthog/migrations/0262_track_viewed_notifications.py
index 441d6472514f2..a333ce35688ea 100644
--- a/posthog/migrations/0262_track_viewed_notifications.py
+++ b/posthog/migrations/0262_track_viewed_notifications.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0261_team_capture_console_log_opt_in"),
]
@@ -20,14 +19,19 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("last_viewed_activity_date", models.DateTimeField(default=None)),
(
"user",
models.ForeignKey(
- null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
],
diff --git a/posthog/migrations/0263_plugin_config_web_token.py b/posthog/migrations/0263_plugin_config_web_token.py
index 1600ef55c4f5d..79aabbaf0d3c9 100644
--- a/posthog/migrations/0263_plugin_config_web_token.py
+++ b/posthog/migrations/0263_plugin_config_web_token.py
@@ -14,7 +14,6 @@ def forwards_func(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0262_track_viewed_notifications"),
]
diff --git a/posthog/migrations/0264_user_partial_notification_settings.py b/posthog/migrations/0264_user_partial_notification_settings.py
index 53984cb854b21..c8f8b9c727fd8 100644
--- a/posthog/migrations/0264_user_partial_notification_settings.py
+++ b/posthog/migrations/0264_user_partial_notification_settings.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0263_plugin_config_web_token"),
]
diff --git a/posthog/migrations/0265_related_tiles.py b/posthog/migrations/0265_related_tiles.py
index a161e58ec4d6e..55a5958054c00 100644
--- a/posthog/migrations/0265_related_tiles.py
+++ b/posthog/migrations/0265_related_tiles.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0264_user_partial_notification_settings"),
]
@@ -16,7 +15,9 @@ class Migration(migrations.Migration):
model_name="dashboardtile",
name="dashboard",
field=models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, related_name="tiles", to="posthog.dashboard"
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="tiles",
+ to="posthog.dashboard",
),
),
]
diff --git a/posthog/migrations/0266_add_is_system_field_to_activity_log.py b/posthog/migrations/0266_add_is_system_field_to_activity_log.py
index 539307c29a4db..b6716d09b3c55 100644
--- a/posthog/migrations/0266_add_is_system_field_to_activity_log.py
+++ b/posthog/migrations/0266_add_is_system_field_to_activity_log.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0265_related_tiles"),
]
diff --git a/posthog/migrations/0267_add_text_tiles.py b/posthog/migrations/0267_add_text_tiles.py
index 808f7020e6500..b33ffea3d4905 100644
--- a/posthog/migrations/0267_add_text_tiles.py
+++ b/posthog/migrations/0267_add_text_tiles.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0266_add_is_system_field_to_activity_log"),
]
@@ -16,9 +15,20 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Text",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("body", models.CharField(blank=True, max_length=4000, null=True)),
- ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "last_modified_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
],
),
# allow null and add related name to the field
@@ -36,7 +46,10 @@ class Migration(migrations.Migration):
model_name="text",
name="created_by",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
@@ -78,7 +91,9 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="dashboardtile",
constraint=models.UniqueConstraint(
- condition=models.Q(("text__isnull", False)), fields=("dashboard", "text"), name="unique_dashboard_text"
+ condition=models.Q(("text__isnull", False)),
+ fields=("dashboard", "text"),
+ name="unique_dashboard_text",
),
),
# can't have both insight and text on a tile
diff --git a/posthog/migrations/0268_plugin_source_file_updated_at.py b/posthog/migrations/0268_plugin_source_file_updated_at.py
index 29bb3d78e98c0..c9e23fd601222 100644
--- a/posthog/migrations/0268_plugin_source_file_updated_at.py
+++ b/posthog/migrations/0268_plugin_source_file_updated_at.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0267_add_text_tiles"),
]
diff --git a/posthog/migrations/0269_soft_delete_tiles.py b/posthog/migrations/0269_soft_delete_tiles.py
index fd12b437a9b90..6c02e4aba196a 100644
--- a/posthog/migrations/0269_soft_delete_tiles.py
+++ b/posthog/migrations/0269_soft_delete_tiles.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0268_plugin_source_file_updated_at"),
]
diff --git a/posthog/migrations/0270_add_uploaded_media.py b/posthog/migrations/0270_add_uploaded_media.py
index eac6c04e6c5d5..4b202c85757e8 100644
--- a/posthog/migrations/0270_add_uploaded_media.py
+++ b/posthog/migrations/0270_add_uploaded_media.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0269_soft_delete_tiles"),
]
@@ -20,20 +19,35 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
- ("media_location", models.TextField(blank=True, max_length=1000, null=True)),
- ("content_type", models.TextField(blank=True, max_length=100, null=True)),
+ (
+ "media_location",
+ models.TextField(blank=True, max_length=1000, null=True),
+ ),
+ (
+ "content_type",
+ models.TextField(blank=True, max_length=100, null=True),
+ ),
("file_name", models.TextField(blank=True, max_length=1000, null=True)),
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
options={
"abstract": False,
diff --git a/posthog/migrations/0271_delete_promptsequencestate.py b/posthog/migrations/0271_delete_promptsequencestate.py
index cd1df66344aa7..479482c1981b5 100644
--- a/posthog/migrations/0271_delete_promptsequencestate.py
+++ b/posthog/migrations/0271_delete_promptsequencestate.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0270_add_uploaded_media"),
]
diff --git a/posthog/migrations/0272_alter_organization_plugins_access_level.py b/posthog/migrations/0272_alter_organization_plugins_access_level.py
index 13f56d6f21b90..5f7fee6db4de3 100644
--- a/posthog/migrations/0272_alter_organization_plugins_access_level.py
+++ b/posthog/migrations/0272_alter_organization_plugins_access_level.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0271_delete_promptsequencestate"),
]
@@ -15,7 +14,8 @@ class Migration(migrations.Migration):
model_name="organization",
name="plugins_access_level",
field=models.PositiveSmallIntegerField(
- choices=[(0, "none"), (3, "config"), (6, "install"), (9, "root")], default=3
+ choices=[(0, "none"), (3, "config"), (6, "install"), (9, "root")],
+ default=3,
),
),
]
diff --git a/posthog/migrations/0273_mark_inactive_exports_as_finished.py b/posthog/migrations/0273_mark_inactive_exports_as_finished.py
index 324a6fdcd2683..fcf024f5e6281 100644
--- a/posthog/migrations/0273_mark_inactive_exports_as_finished.py
+++ b/posthog/migrations/0273_mark_inactive_exports_as_finished.py
@@ -32,7 +32,12 @@ def should_verify_if_ongoing(start_entry, finished_exports):
else:
finished_exports.add(key(entry))
- start_entries = list(filter(lambda entry: should_verify_if_ongoing(entry, finished_exports), start_entries))
+ start_entries = list(
+ filter(
+ lambda entry: should_verify_if_ongoing(entry, finished_exports),
+ start_entries,
+ )
+ )
for entry in start_entries:
expected_running_job_id = entry.detail["trigger"]["job_id"]
diff --git a/posthog/migrations/0274_add_plugin_icon_and_rewrite_urls.py b/posthog/migrations/0274_add_plugin_icon_and_rewrite_urls.py
index ee0b095199239..683937de53ac8 100644
--- a/posthog/migrations/0274_add_plugin_icon_and_rewrite_urls.py
+++ b/posthog/migrations/0274_add_plugin_icon_and_rewrite_urls.py
@@ -2,7 +2,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0273_mark_inactive_exports_as_finished"),
]
diff --git a/posthog/migrations/0275_feature_flag_rollback_fields.py b/posthog/migrations/0275_feature_flag_rollback_fields.py
index 6e938205fcad4..9078a175497a9 100644
--- a/posthog/migrations/0275_feature_flag_rollback_fields.py
+++ b/posthog/migrations/0275_feature_flag_rollback_fields.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0274_add_plugin_icon_and_rewrite_urls"),
]
diff --git a/posthog/migrations/0276_organization_usage.py b/posthog/migrations/0276_organization_usage.py
index 1aa80ff6c5cf1..0e46fb7f50e7f 100644
--- a/posthog/migrations/0276_organization_usage.py
+++ b/posthog/migrations/0276_organization_usage.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0275_feature_flag_rollback_fields"),
]
diff --git a/posthog/migrations/0277_recording_playlist_model.py b/posthog/migrations/0277_recording_playlist_model.py
index 137e9c0c75727..3a7b1ea2fd68c 100644
--- a/posthog/migrations/0277_recording_playlist_model.py
+++ b/posthog/migrations/0277_recording_playlist_model.py
@@ -9,7 +9,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0276_organization_usage"),
]
@@ -18,20 +17,44 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="SessionRecordingPlaylist",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
- ("short_id", models.CharField(blank=True, default=posthog.utils.generate_short_id, max_length=12)),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "short_id",
+ models.CharField(
+ blank=True,
+ default=posthog.utils.generate_short_id,
+ max_length=12,
+ ),
+ ),
("name", models.CharField(blank=True, max_length=400, null=True)),
- ("derived_name", models.CharField(blank=True, max_length=400, null=True)),
+ (
+ "derived_name",
+ models.CharField(blank=True, max_length=400, null=True),
+ ),
("description", models.TextField(blank=True)),
("pinned", models.BooleanField(default=False)),
("deleted", models.BooleanField(default=False)),
("filters", models.JSONField(default=dict)),
("created_at", models.DateTimeField(auto_now_add=True)),
- ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "last_modified_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
(
@@ -44,7 +67,10 @@ class Migration(migrations.Migration):
to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
options={
"unique_together": {("team", "short_id")},
diff --git a/posthog/migrations/0278_organization_customer_id.py b/posthog/migrations/0278_organization_customer_id.py
index bebede0dcb936..76e65bf416d02 100644
--- a/posthog/migrations/0278_organization_customer_id.py
+++ b/posthog/migrations/0278_organization_customer_id.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0277_recording_playlist_model"),
]
diff --git a/posthog/migrations/0279_recording_playlist_item_model.py b/posthog/migrations/0279_recording_playlist_item_model.py
index cc8a359c9cca8..6bf9e98877bcd 100644
--- a/posthog/migrations/0279_recording_playlist_item_model.py
+++ b/posthog/migrations/0279_recording_playlist_item_model.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0278_organization_customer_id"),
]
@@ -19,7 +18,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="SessionRecordingPlaylistItem",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("session_id", models.CharField(max_length=200)),
("created_at", models.DateTimeField(auto_now_add=True)),
("deleted", models.BooleanField(blank=True, null=True)),
diff --git a/posthog/migrations/0280_fix_async_deletion_team.py b/posthog/migrations/0280_fix_async_deletion_team.py
index 98c1b3a81b9a9..9b218d0aad5eb 100644
--- a/posthog/migrations/0280_fix_async_deletion_team.py
+++ b/posthog/migrations/0280_fix_async_deletion_team.py
@@ -10,7 +10,6 @@ def describe(self):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0279_recording_playlist_item_model"),
]
diff --git a/posthog/migrations/0281_create_insight_caching_state_model.py b/posthog/migrations/0281_create_insight_caching_state_model.py
index d8fb19f00a79c..9d176840bd954 100644
--- a/posthog/migrations/0281_create_insight_caching_state_model.py
+++ b/posthog/migrations/0281_create_insight_caching_state_model.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0280_fix_async_deletion_team"),
]
@@ -19,7 +18,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("cache_key", models.CharField(max_length=400)),
@@ -41,10 +43,15 @@ class Migration(migrations.Migration):
(
"insight",
models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, related_name="caching_state", to="posthog.insight"
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="caching_state",
+ to="posthog.insight",
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddIndex(
diff --git a/posthog/migrations/0282_fix_insight_caching_state_model.py b/posthog/migrations/0282_fix_insight_caching_state_model.py
index c8f06becdcf7c..68606d0329dce 100644
--- a/posthog/migrations/0282_fix_insight_caching_state_model.py
+++ b/posthog/migrations/0282_fix_insight_caching_state_model.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0281_create_insight_caching_state_model"),
]
@@ -35,14 +34,19 @@ class Migration(migrations.Migration):
model_name="insightcachingstate",
name="dashboard_tile",
field=models.ForeignKey(
- null=True, on_delete=models.deletion.CASCADE, related_name="caching_states", to="posthog.dashboardtile"
+ null=True,
+ on_delete=models.deletion.CASCADE,
+ related_name="caching_states",
+ to="posthog.dashboardtile",
),
),
migrations.AlterField(
model_name="insightcachingstate",
name="insight",
field=models.ForeignKey(
- on_delete=models.deletion.CASCADE, related_name="caching_states", to="posthog.insight"
+ on_delete=models.deletion.CASCADE,
+ related_name="caching_states",
+ to="posthog.insight",
),
),
]
diff --git a/posthog/migrations/0283_prompt_sequence_model.py b/posthog/migrations/0283_prompt_sequence_model.py
index 49378f62d32dd..16d29c076e483 100644
--- a/posthog/migrations/0283_prompt_sequence_model.py
+++ b/posthog/migrations/0283_prompt_sequence_model.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0282_fix_insight_caching_state_model"),
]
@@ -17,21 +16,40 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="Prompt",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("step", models.IntegerField()),
("type", models.CharField(max_length=200)),
("title", models.CharField(max_length=200)),
("text", models.CharField(max_length=1000)),
("placement", models.CharField(default="top", max_length=200)),
("buttons", models.JSONField()),
- ("reference", models.CharField(default=None, max_length=200, null=True)),
+ (
+ "reference",
+ models.CharField(default=None, max_length=200, null=True),
+ ),
("icon", models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name="PromptSequence",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("key", models.CharField(max_length=200)),
("type", models.CharField(max_length=200)),
(
@@ -45,23 +63,46 @@ class Migration(migrations.Migration):
("status", models.CharField(max_length=200)),
("requires_opt_in", models.BooleanField(default=False)),
("autorun", models.BooleanField(default=True)),
- ("must_have_completed", models.ManyToManyField(blank=True, to="posthog.PromptSequence")),
+ (
+ "must_have_completed",
+ models.ManyToManyField(blank=True, to="posthog.PromptSequence"),
+ ),
("prompts", models.ManyToManyField(to="posthog.Prompt")),
],
),
migrations.CreateModel(
name="UserPromptState",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
- ("last_updated_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "last_updated_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
("step", models.IntegerField(default=None, null=True)),
("completed", models.BooleanField(default=False)),
("dismissed", models.BooleanField(default=False)),
(
"sequence",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.promptsequence"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.promptsequence",
+ ),
+ ),
+ (
+ "user",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
),
- ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.DeleteModel(
diff --git a/posthog/migrations/0285_capture_performance_opt_in.py b/posthog/migrations/0285_capture_performance_opt_in.py
index ba1673cef18f2..9f478625b7be2 100644
--- a/posthog/migrations/0285_capture_performance_opt_in.py
+++ b/posthog/migrations/0285_capture_performance_opt_in.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0284_improved_caching_state_idx"),
]
diff --git a/posthog/migrations/0287_add_session_recording_model.py b/posthog/migrations/0287_add_session_recording_model.py
index 28bcd68907815..ca2ecb40a642c 100644
--- a/posthog/migrations/0287_add_session_recording_model.py
+++ b/posthog/migrations/0287_add_session_recording_model.py
@@ -24,7 +24,10 @@ def migrate_playlist_item_recording_relations(apps, _) -> None:
Recording.objects.bulk_create(
[
- Recording(session_id=playlist_item_object.session_id, team=playlist_item_object.playlist.team)
+ Recording(
+ session_id=playlist_item_object.session_id,
+ team=playlist_item_object.playlist.team,
+ )
for playlist_item_object in playlist_items
],
ignore_conflicts=True,
@@ -44,7 +47,6 @@ def reverse(apps, _) -> None:
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0286_index_insightcachingstate_lookup"),
]
@@ -72,12 +74,18 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("session_id", models.CharField(max_length=200, unique=True)),
("created_at", models.DateTimeField(auto_now_add=True, null=True)),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
options={
"unique_together": {("team", "session_id")},
diff --git a/posthog/migrations/0288_add_session_recording_persistence.py b/posthog/migrations/0288_add_session_recording_persistence.py
index 3bf5226e91025..785346ecc2c10 100644
--- a/posthog/migrations/0288_add_session_recording_persistence.py
+++ b/posthog/migrations/0288_add_session_recording_persistence.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0287_add_session_recording_model"),
]
diff --git a/posthog/migrations/0289_add_tags_to_feature_flags.py b/posthog/migrations/0289_add_tags_to_feature_flags.py
index debf52c006c03..913cbf6c99a36 100644
--- a/posthog/migrations/0289_add_tags_to_feature_flags.py
+++ b/posthog/migrations/0289_add_tags_to_feature_flags.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0288_add_session_recording_persistence"),
]
@@ -29,7 +28,15 @@ class Migration(migrations.Migration):
migrations.AlterUniqueTogether(
name="taggeditem",
unique_together={
- ("tag", "dashboard", "insight", "event_definition", "property_definition", "action", "feature_flag")
+ (
+ "tag",
+ "dashboard",
+ "insight",
+ "event_definition",
+ "property_definition",
+ "action",
+ "feature_flag",
+ )
},
),
migrations.AddConstraint(
diff --git a/posthog/migrations/0290_add_dashboard_templates.py b/posthog/migrations/0290_add_dashboard_templates.py
index 65f8835a89cdd..736a495c8747e 100644
--- a/posthog/migrations/0290_add_dashboard_templates.py
+++ b/posthog/migrations/0290_add_dashboard_templates.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0289_add_tags_to_feature_flags"),
]
@@ -20,7 +19,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("template_name", models.CharField(max_length=400, null=True)),
@@ -30,11 +32,20 @@ class Migration(migrations.Migration):
(
"tags",
django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=255), default=list, size=None
+ base_field=models.CharField(max_length=255),
+ default=list,
+ size=None,
),
),
("github_url", models.CharField(max_length=8201, null=True)),
- ("team", models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.team",
+ ),
+ ),
],
),
migrations.AddConstraint(
diff --git a/posthog/migrations/0291_create_person_override_model.py b/posthog/migrations/0291_create_person_override_model.py
index 59b7b9e5268fb..81c4191a25be3 100644
--- a/posthog/migrations/0291_create_person_override_model.py
+++ b/posthog/migrations/0291_create_person_override_model.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0290_add_dashboard_templates"),
]
@@ -14,18 +13,30 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="PersonOverride",
fields=[
- ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("old_person_id", models.UUIDField(db_index=True)),
("override_person_id", models.UUIDField(db_index=True)),
("oldest_event", models.DateTimeField()),
("version", models.BigIntegerField(blank=True, null=True)),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddConstraint(
model_name="personoverride",
constraint=models.UniqueConstraint(
- fields=("team", "old_person_id"), name="unique override per old_person_id"
+ fields=("team", "old_person_id"),
+ name="unique override per old_person_id",
),
),
]
diff --git a/posthog/migrations/0292_property_definitions_persons_and_groups_support.py b/posthog/migrations/0292_property_definitions_persons_and_groups_support.py
index cde16d28f1840..d57a95ffa75de 100644
--- a/posthog/migrations/0292_property_definitions_persons_and_groups_support.py
+++ b/posthog/migrations/0292_property_definitions_persons_and_groups_support.py
@@ -35,7 +35,9 @@ class Migration(migrations.Migration):
model_name="propertydefinition",
constraint=models.CheckConstraint(
check=models.Q(
- models.Q(("type", 3), _negated=True), ("group_type_index__isnull", False), _connector="OR"
+ models.Q(("type", 3), _negated=True),
+ ("group_type_index__isnull", False),
+ _connector="OR",
),
name="group_type_index_set",
),
diff --git a/posthog/migrations/0293_property_definitions_drop_old_constraint.py b/posthog/migrations/0293_property_definitions_drop_old_constraint.py
index f1d623521c903..9da2dbdfbe217 100644
--- a/posthog/migrations/0293_property_definitions_drop_old_constraint.py
+++ b/posthog/migrations/0293_property_definitions_drop_old_constraint.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0292_property_definitions_persons_and_groups_support"),
]
diff --git a/posthog/migrations/0294_plugin_blank_fields.py b/posthog/migrations/0294_plugin_blank_fields.py
index 053fbb1d48ac0..7f519b9c021d0 100644
--- a/posthog/migrations/0294_plugin_blank_fields.py
+++ b/posthog/migrations/0294_plugin_blank_fields.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0293_property_definitions_drop_old_constraint"),
]
diff --git a/posthog/migrations/0295_plugin_allow_blank_config_schema.py b/posthog/migrations/0295_plugin_allow_blank_config_schema.py
index 8952f8a252fad..4c8de8d40ef26 100644
--- a/posthog/migrations/0295_plugin_allow_blank_config_schema.py
+++ b/posthog/migrations/0295_plugin_allow_blank_config_schema.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0294_plugin_blank_fields"),
]
diff --git a/posthog/migrations/0296_team_allow_blank_fields.py b/posthog/migrations/0296_team_allow_blank_fields.py
index f6c9065580709..9c593b68404ba 100644
--- a/posthog/migrations/0296_team_allow_blank_fields.py
+++ b/posthog/migrations/0296_team_allow_blank_fields.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0295_plugin_allow_blank_config_schema"),
]
diff --git a/posthog/migrations/0298_add_insight_queries.py b/posthog/migrations/0298_add_insight_queries.py
index 48d3475819d91..82c04e7388164 100644
--- a/posthog/migrations/0298_add_insight_queries.py
+++ b/posthog/migrations/0298_add_insight_queries.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0297_property_definitions_index_query"),
]
diff --git a/posthog/migrations/0299_set_templates_global.py b/posthog/migrations/0299_set_templates_global.py
index 4cdaf2f2c645f..7ca39b1814f35 100644
--- a/posthog/migrations/0299_set_templates_global.py
+++ b/posthog/migrations/0299_set_templates_global.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0298_add_insight_queries"),
]
diff --git a/posthog/migrations/0300_add_constraints_to_person_override.py b/posthog/migrations/0300_add_constraints_to_person_override.py
index 91716cd8cc292..1f54ee839514a 100644
--- a/posthog/migrations/0300_add_constraints_to_person_override.py
+++ b/posthog/migrations/0300_add_constraints_to_person_override.py
@@ -31,7 +31,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0299_set_templates_global"),
]
@@ -45,7 +44,11 @@ class Migration(migrations.Migration):
model_name="personoverride",
constraint=models.CheckConstraint(
check=models.Q(
- ("old_person_id__exact", django.db.models.expressions.F("override_person_id")), _negated=True
+ (
+ "old_person_id__exact",
+ django.db.models.expressions.F("override_person_id"),
+ ),
+ _negated=True,
),
name="old_person_id_different_from_override_person_id",
),
diff --git a/posthog/migrations/0301_organization_enforce_2fa.py b/posthog/migrations/0301_organization_enforce_2fa.py
index 21885de4fd954..43ae649c9d298 100644
--- a/posthog/migrations/0301_organization_enforce_2fa.py
+++ b/posthog/migrations/0301_organization_enforce_2fa.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0300_add_constraints_to_person_override"),
]
diff --git a/posthog/migrations/0302_add_user_pending_email_and_is_verified.py b/posthog/migrations/0302_add_user_pending_email_and_is_verified.py
index e318a48cce19b..1c779dc12df73 100644
--- a/posthog/migrations/0302_add_user_pending_email_and_is_verified.py
+++ b/posthog/migrations/0302_add_user_pending_email_and_is_verified.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0301_organization_enforce_2fa"),
]
@@ -19,7 +18,10 @@ class Migration(migrations.Migration):
model_name="user",
name="pending_email",
field=models.EmailField(
- blank=True, max_length=254, null=True, verbose_name="pending email address awaiting verification"
+ blank=True,
+ max_length=254,
+ null=True,
+ verbose_name="pending email address awaiting verification",
),
),
]
diff --git a/posthog/migrations/0303_team_session_recording_version.py b/posthog/migrations/0303_team_session_recording_version.py
index b0517f1506d38..29469557f3b5f 100644
--- a/posthog/migrations/0303_team_session_recording_version.py
+++ b/posthog/migrations/0303_team_session_recording_version.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0302_add_user_pending_email_and_is_verified"),
]
diff --git a/posthog/migrations/0304_store_dashboard_template_in_db.py b/posthog/migrations/0304_store_dashboard_template_in_db.py
index 6097f8761a29c..997ce8aab1bb8 100644
--- a/posthog/migrations/0304_store_dashboard_template_in_db.py
+++ b/posthog/migrations/0304_store_dashboard_template_in_db.py
@@ -13,7 +13,6 @@ def describe(self):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0303_team_session_recording_version"),
]
@@ -28,7 +27,10 @@ class Migration(migrations.Migration):
model_name="dashboardtemplate",
name="created_by",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
@@ -66,7 +68,10 @@ class Migration(migrations.Migration):
model_name="dashboardtemplate",
name="tags",
field=django.contrib.postgres.fields.ArrayField(
- base_field=models.CharField(max_length=255), blank=True, null=True, size=None
+ base_field=models.CharField(max_length=255),
+ blank=True,
+ null=True,
+ size=None,
),
),
AlterFieldNullSafe(
diff --git a/posthog/migrations/0305_rework_person_overrides.py b/posthog/migrations/0305_rework_person_overrides.py
index 3afd6d4154b54..e5da39fce39b4 100644
--- a/posthog/migrations/0305_rework_person_overrides.py
+++ b/posthog/migrations/0305_rework_person_overrides.py
@@ -2,7 +2,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0304_store_dashboard_template_in_db"),
]
diff --git a/posthog/migrations/0306_featureflag_dashboard.py b/posthog/migrations/0306_featureflag_dashboard.py
index 87d6332e8f08d..b465eda4bee5e 100644
--- a/posthog/migrations/0306_featureflag_dashboard.py
+++ b/posthog/migrations/0306_featureflag_dashboard.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0305_rework_person_overrides"),
]
@@ -15,7 +14,10 @@ class Migration(migrations.Migration):
model_name="featureflag",
name="usage_dashboard",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.dashboard",
),
),
]
diff --git a/posthog/migrations/0307_pluginconfig_admin.py b/posthog/migrations/0307_pluginconfig_admin.py
index 2716382f4fc62..2a901a1492bb1 100644
--- a/posthog/migrations/0307_pluginconfig_admin.py
+++ b/posthog/migrations/0307_pluginconfig_admin.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0306_featureflag_dashboard"),
]
diff --git a/posthog/migrations/0308_add_indirect_person_override_constraints.py b/posthog/migrations/0308_add_indirect_person_override_constraints.py
index 757cc46f80126..78231ace0834e 100644
--- a/posthog/migrations/0308_add_indirect_person_override_constraints.py
+++ b/posthog/migrations/0308_add_indirect_person_override_constraints.py
@@ -20,7 +20,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0307_pluginconfig_admin"),
]
@@ -29,7 +28,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="PersonOverrideMapping",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("uuid", models.UUIDField()),
("team_id", models.BigIntegerField()),
],
@@ -73,14 +80,19 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="personoverride",
constraint=models.UniqueConstraint(
- fields=("team", "old_person_id"), name="unique override per old_person_id"
+ fields=("team", "old_person_id"),
+ name="unique override per old_person_id",
),
),
migrations.AddConstraint(
model_name="personoverride",
constraint=models.CheckConstraint(
check=models.Q(
- ("old_person_id__exact", django.db.models.expressions.F("override_person_id")), _negated=True
+ (
+ "old_person_id__exact",
+ django.db.models.expressions.F("override_person_id"),
+ ),
+ _negated=True,
),
name="old_person_id_different_from_override_person_id",
),
diff --git a/posthog/migrations/0309_team_autocapture_opt_out.py b/posthog/migrations/0309_team_autocapture_opt_out.py
index 5f77749d65eed..11b8e8a9cb0d7 100644
--- a/posthog/migrations/0309_team_autocapture_opt_out.py
+++ b/posthog/migrations/0309_team_autocapture_opt_out.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0308_add_indirect_person_override_constraints"),
]
diff --git a/posthog/migrations/0310_add_starter_dashboard_template.py b/posthog/migrations/0310_add_starter_dashboard_template.py
index 70957615b1975..2d1fc6972b517 100644
--- a/posthog/migrations/0310_add_starter_dashboard_template.py
+++ b/posthog/migrations/0310_add_starter_dashboard_template.py
@@ -148,7 +148,6 @@ def create_starter_template(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0309_team_autocapture_opt_out"),
]
diff --git a/posthog/migrations/0311_dashboard_template_scope.py b/posthog/migrations/0311_dashboard_template_scope.py
index 6843e09d68511..41e34afa83109 100644
--- a/posthog/migrations/0311_dashboard_template_scope.py
+++ b/posthog/migrations/0311_dashboard_template_scope.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0310_add_starter_dashboard_template"),
]
@@ -15,7 +14,10 @@ class Migration(migrations.Migration):
model_name="dashboardtemplate",
name="scope",
field=models.CharField(
- choices=[("team", "Only team"), ("global", "Global")], max_length=24, null=True, blank=True
+ choices=[("team", "Only team"), ("global", "Global")],
+ max_length=24,
+ null=True,
+ blank=True,
),
),
migrations.RunSQL(
diff --git a/posthog/migrations/0312_organization_available_product_features.py b/posthog/migrations/0312_organization_available_product_features.py
index c5b2eb170f9c4..2459cd9726c07 100644
--- a/posthog/migrations/0312_organization_available_product_features.py
+++ b/posthog/migrations/0312_organization_available_product_features.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0311_dashboard_template_scope"),
]
diff --git a/posthog/migrations/0313_early_access_feature.py b/posthog/migrations/0313_early_access_feature.py
index 20d1dfe22d479..e5bc07942ff24 100644
--- a/posthog/migrations/0313_early_access_feature.py
+++ b/posthog/migrations/0313_early_access_feature.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0312_organization_available_product_features"),
]
@@ -19,7 +18,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=200)),
diff --git a/posthog/migrations/0314_sharingconfiguration_recording.py b/posthog/migrations/0314_sharingconfiguration_recording.py
index 940a09db8d46f..d4ce07d2ffebc 100644
--- a/posthog/migrations/0314_sharingconfiguration_recording.py
+++ b/posthog/migrations/0314_sharingconfiguration_recording.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0313_early_access_feature"),
]
diff --git a/posthog/migrations/0315_notebook.py b/posthog/migrations/0315_notebook.py
index b14d66d59c3f7..b02a15842a7a1 100644
--- a/posthog/migrations/0315_notebook.py
+++ b/posthog/migrations/0315_notebook.py
@@ -9,7 +9,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0314_sharingconfiguration_recording"),
]
@@ -21,20 +20,36 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
+ ),
+ ),
+ (
+ "short_id",
+ models.CharField(
+ blank=True,
+ default=posthog.utils.generate_short_id,
+ max_length=12,
),
),
- ("short_id", models.CharField(blank=True, default=posthog.utils.generate_short_id, max_length=12)),
("title", models.CharField(blank=True, max_length=256, null=True)),
("content", models.JSONField(blank=True, default=None, null=True)),
("deleted", models.BooleanField(default=False)),
("version", models.IntegerField(default=0)),
("created_at", models.DateTimeField(auto_now_add=True)),
- ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)),
+ (
+ "last_modified_at",
+ models.DateTimeField(default=django.utils.timezone.now),
+ ),
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
(
@@ -47,7 +62,10 @@ class Migration(migrations.Migration):
to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
options={
"unique_together": {("team", "short_id")},
diff --git a/posthog/migrations/0316_action_href_text_matching.py b/posthog/migrations/0316_action_href_text_matching.py
index 870c045ad6a34..fdded1fd8cbf0 100644
--- a/posthog/migrations/0316_action_href_text_matching.py
+++ b/posthog/migrations/0316_action_href_text_matching.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0315_notebook"),
]
@@ -15,7 +14,11 @@ class Migration(migrations.Migration):
name="href_matching",
field=models.CharField(
blank=True,
- choices=[("contains", "contains"), ("regex", "regex"), ("exact", "exact")],
+ choices=[
+ ("contains", "contains"),
+ ("regex", "regex"),
+ ("exact", "exact"),
+ ],
max_length=400,
null=True,
),
@@ -25,7 +28,11 @@ class Migration(migrations.Migration):
name="text_matching",
field=models.CharField(
blank=True,
- choices=[("contains", "contains"), ("regex", "regex"), ("exact", "exact")],
+ choices=[
+ ("contains", "contains"),
+ ("regex", "regex"),
+ ("exact", "exact"),
+ ],
max_length=400,
null=True,
),
diff --git a/posthog/migrations/0317_batch_export_models.py b/posthog/migrations/0317_batch_export_models.py
index b8feb24b4b75e..f17bf9293ad34 100644
--- a/posthog/migrations/0317_batch_export_models.py
+++ b/posthog/migrations/0317_batch_export_models.py
@@ -17,7 +17,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
(
@@ -39,13 +42,15 @@ class Migration(migrations.Migration):
(
"created_at",
models.DateTimeField(
- auto_now_add=True, help_text="The timestamp at which this BatchExportDestination was created."
+ auto_now_add=True,
+ help_text="The timestamp at which this BatchExportDestination was created.",
),
),
(
"last_updated_at",
models.DateTimeField(
- auto_now=True, help_text="The timestamp at which this BatchExportDestination was last updated."
+ auto_now=True,
+ help_text="The timestamp at which this BatchExportDestination was last updated.",
),
),
],
@@ -59,7 +64,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
(
@@ -70,7 +78,10 @@ class Migration(migrations.Migration):
to="posthog.team",
),
),
- ("name", models.TextField(help_text="A human-readable name for this BatchExport.")),
+ (
+ "name",
+ models.TextField(help_text="A human-readable name for this BatchExport."),
+ ),
(
"destination",
models.ForeignKey(
@@ -88,21 +99,32 @@ class Migration(migrations.Migration):
max_length=64,
),
),
- ("paused", models.BooleanField(default=False, help_text="Whether this BatchExport is paused or not.")),
+ (
+ "paused",
+ models.BooleanField(
+ default=False,
+ help_text="Whether this BatchExport is paused or not.",
+ ),
+ ),
(
"deleted",
- models.BooleanField(default=False, help_text="Whether this BatchExport is deleted or not."),
+ models.BooleanField(
+ default=False,
+ help_text="Whether this BatchExport is deleted or not.",
+ ),
),
(
"created_at",
models.DateTimeField(
- auto_now_add=True, help_text="The timestamp at which this BatchExport was created."
+ auto_now_add=True,
+ help_text="The timestamp at which this BatchExport was created.",
),
),
(
"last_updated_at",
models.DateTimeField(
- auto_now=True, help_text="The timestamp at which this BatchExport was last updated."
+ auto_now=True,
+ help_text="The timestamp at which this BatchExport was last updated.",
),
),
],
@@ -116,7 +138,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
(
@@ -138,31 +163,52 @@ class Migration(migrations.Migration):
),
(
"records_completed",
- models.IntegerField(help_text="The number of records that have been exported.", null=True),
+ models.IntegerField(
+ help_text="The number of records that have been exported.",
+ null=True,
+ ),
),
(
"latest_error",
- models.TextField(help_text="The latest error that occurred during this run.", null=True),
+ models.TextField(
+ help_text="The latest error that occurred during this run.",
+ null=True,
+ ),
+ ),
+ (
+ "data_interval_start",
+ models.DateTimeField(help_text="The start of the data interval."),
+ ),
+ (
+ "data_interval_end",
+ models.DateTimeField(help_text="The end of the data interval."),
+ ),
+ (
+ "cursor",
+ models.TextField(
+ help_text="An opaque cursor that may be used to resume.",
+ null=True,
+ ),
),
- ("data_interval_start", models.DateTimeField(help_text="The start of the data interval.")),
- ("data_interval_end", models.DateTimeField(help_text="The end of the data interval.")),
- ("cursor", models.TextField(help_text="An opaque cursor that may be used to resume.", null=True)),
(
"created_at",
models.DateTimeField(
- auto_now_add=True, help_text="The timestamp at which this BatchExportRun was created."
+ auto_now_add=True,
+ help_text="The timestamp at which this BatchExportRun was created.",
),
),
(
"finished_at",
models.DateTimeField(
- help_text="The timestamp at which this BatchExportRun finished, successfully or not.", null=True
+ help_text="The timestamp at which this BatchExportRun finished, successfully or not.",
+ null=True,
),
),
(
"last_updated_at",
models.DateTimeField(
- auto_now=True, help_text="The timestamp at which this BatchExportRun was last updated."
+ auto_now=True,
+ help_text="The timestamp at which this BatchExportRun was last updated.",
),
),
(
diff --git a/posthog/migrations/0318_alter_earlyaccessfeature_stage.py b/posthog/migrations/0318_alter_earlyaccessfeature_stage.py
index 2657a38695868..92abd1afd5d86 100644
--- a/posthog/migrations/0318_alter_earlyaccessfeature_stage.py
+++ b/posthog/migrations/0318_alter_earlyaccessfeature_stage.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0317_batch_export_models"),
]
diff --git a/posthog/migrations/0319_user_requested_password_reset_at.py b/posthog/migrations/0319_user_requested_password_reset_at.py
index 0b51cd0063256..7de6560f71fec 100644
--- a/posthog/migrations/0319_user_requested_password_reset_at.py
+++ b/posthog/migrations/0319_user_requested_password_reset_at.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0318_alter_earlyaccessfeature_stage"),
]
diff --git a/posthog/migrations/0320_survey.py b/posthog/migrations/0320_survey.py
index 9e8fea849ef9b..8dff33ee768db 100644
--- a/posthog/migrations/0320_survey.py
+++ b/posthog/migrations/0320_survey.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0319_user_requested_password_reset_at"),
]
@@ -19,7 +18,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=400)),
diff --git a/posthog/migrations/0321_add_exception_autocapture_optin.py b/posthog/migrations/0321_add_exception_autocapture_optin.py
index a1adce2374eb6..c15700964f90d 100644
--- a/posthog/migrations/0321_add_exception_autocapture_optin.py
+++ b/posthog/migrations/0321_add_exception_autocapture_optin.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0320_survey"),
]
diff --git a/posthog/migrations/0322_auto_20230531_1904.py b/posthog/migrations/0322_auto_20230531_1904.py
index c7b774f365c67..687d77316a99f 100644
--- a/posthog/migrations/0322_auto_20230531_1904.py
+++ b/posthog/migrations/0322_auto_20230531_1904.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0321_add_exception_autocapture_optin"),
]
@@ -14,13 +13,30 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name="FeatureFlagDashboards",
fields=[
- ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
("created_at", models.DateTimeField(auto_now_add=True, null=True)),
("updated_at", models.DateTimeField(auto_now=True, null=True)),
- ("dashboard", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard")),
+ (
+ "dashboard",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.dashboard",
+ ),
+ ),
(
"feature_flag",
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.featureflag"),
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.featureflag",
+ ),
),
],
),
@@ -37,7 +53,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="featureflagdashboards",
constraint=models.UniqueConstraint(
- fields=("feature_flag", "dashboard"), name="unique feature flag for a dashboard"
+ fields=("feature_flag", "dashboard"),
+ name="unique feature flag for a dashboard",
),
),
]
diff --git a/posthog/migrations/0324_user_has_seen_product_intro_for.py b/posthog/migrations/0324_user_has_seen_product_intro_for.py
index 5c75b6a7472e4..6c5142cdf47a6 100644
--- a/posthog/migrations/0324_user_has_seen_product_intro_for.py
+++ b/posthog/migrations/0324_user_has_seen_product_intro_for.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0323_alter_batchexportdestination_type"),
]
diff --git a/posthog/migrations/0325_alter_dashboardtemplate_scope.py b/posthog/migrations/0325_alter_dashboardtemplate_scope.py
index cccfc358af848..d6829a963ac7c 100644
--- a/posthog/migrations/0325_alter_dashboardtemplate_scope.py
+++ b/posthog/migrations/0325_alter_dashboardtemplate_scope.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0324_user_has_seen_product_intro_for"),
]
@@ -15,7 +14,11 @@ class Migration(migrations.Migration):
name="scope",
field=models.CharField(
blank=True,
- choices=[("team", "Only team"), ("global", "Global"), ("feature_flag", "Feature Flag")],
+ choices=[
+ ("team", "Only team"),
+ ("global", "Global"),
+ ("feature_flag", "Feature Flag"),
+ ],
max_length=24,
null=True,
),
diff --git a/posthog/migrations/0326_team_extra_settings.py b/posthog/migrations/0326_team_extra_settings.py
index 1ce5ca0886c75..62deb954703eb 100644
--- a/posthog/migrations/0326_team_extra_settings.py
+++ b/posthog/migrations/0326_team_extra_settings.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0325_alter_dashboardtemplate_scope"),
]
diff --git a/posthog/migrations/0327_alter_earlyaccessfeature_stage.py b/posthog/migrations/0327_alter_earlyaccessfeature_stage.py
index c9d2454d40d21..f5376b89e999c 100644
--- a/posthog/migrations/0327_alter_earlyaccessfeature_stage.py
+++ b/posthog/migrations/0327_alter_earlyaccessfeature_stage.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0326_team_extra_settings"),
]
diff --git a/posthog/migrations/0328_add_starter_feature_flag_template.py b/posthog/migrations/0328_add_starter_feature_flag_template.py
index adf9d5b971a60..eef2e038dc7a9 100644
--- a/posthog/migrations/0328_add_starter_feature_flag_template.py
+++ b/posthog/migrations/0328_add_starter_feature_flag_template.py
@@ -59,7 +59,6 @@ def create_starter_template(apps, schema_editor):
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0327_alter_earlyaccessfeature_stage"),
]
diff --git a/posthog/migrations/0329_datawarehousecredential_datawarehousetable.py b/posthog/migrations/0329_datawarehousecredential_datawarehousetable.py
index 7b1e88d018b8d..b3957067826df 100644
--- a/posthog/migrations/0329_datawarehousecredential_datawarehousetable.py
+++ b/posthog/migrations/0329_datawarehousecredential_datawarehousetable.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0328_add_starter_feature_flag_template"),
]
@@ -21,18 +20,33 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
- ("access_key", encrypted_fields.fields.EncryptedTextField(max_length=500)),
- ("access_secret", encrypted_fields.fields.EncryptedTextField(max_length=500)),
+ (
+ "access_key",
+ encrypted_fields.fields.EncryptedTextField(max_length=500),
+ ),
+ (
+ "access_secret",
+ encrypted_fields.fields.EncryptedTextField(max_length=500),
+ ),
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
options={
"abstract": False,
@@ -46,11 +60,17 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("name", models.CharField(max_length=128)),
- ("format", models.CharField(choices=[("CSV", "CSV"), ("Parquet", "Parquet")], max_length=128)),
+ (
+ "format",
+ models.CharField(choices=[("CSV", "CSV"), ("Parquet", "Parquet")], max_length=128),
+ ),
("url_pattern", models.CharField(max_length=500)),
(
"columns",
@@ -64,7 +84,10 @@ class Migration(migrations.Migration):
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
(
@@ -76,7 +99,10 @@ class Migration(migrations.Migration):
to="posthog.datawarehousecredential",
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
options={
"abstract": False,
diff --git a/posthog/migrations/0330_add_autocapture_exceptions_events_to_ignore.py b/posthog/migrations/0330_add_autocapture_exceptions_events_to_ignore.py
index ae830ee034d0d..71671bb4096f1 100644
--- a/posthog/migrations/0330_add_autocapture_exceptions_events_to_ignore.py
+++ b/posthog/migrations/0330_add_autocapture_exceptions_events_to_ignore.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0329_datawarehousecredential_datawarehousetable"),
]
diff --git a/posthog/migrations/0331_add_missing_property_definition_index.py b/posthog/migrations/0331_add_missing_property_definition_index.py
index f4c0bcbca4fad..376cec5d0b3d7 100644
--- a/posthog/migrations/0331_add_missing_property_definition_index.py
+++ b/posthog/migrations/0331_add_missing_property_definition_index.py
@@ -18,6 +18,9 @@ class Migration(migrations.Migration):
operations = [
AddIndexConcurrently(
model_name="propertydefinition",
- index=models.Index(fields=["team_id", "type", "is_numerical"], name="posthog_pro_team_id_eac36d_idx"),
+ index=models.Index(
+ fields=["team_id", "type", "is_numerical"],
+ name="posthog_pro_team_id_eac36d_idx",
+ ),
),
]
diff --git a/posthog/migrations/0332_featureflag_has_enriched_analytics.py b/posthog/migrations/0332_featureflag_has_enriched_analytics.py
index 259845b925947..d12ca4079d5aa 100644
--- a/posthog/migrations/0332_featureflag_has_enriched_analytics.py
+++ b/posthog/migrations/0332_featureflag_has_enriched_analytics.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0331_add_missing_property_definition_index"),
]
diff --git a/posthog/migrations/0333_add_timestamp_fields_to_batch_exports.py b/posthog/migrations/0333_add_timestamp_fields_to_batch_exports.py
index aa9654a3ca275..b945693d75c20 100644
--- a/posthog/migrations/0333_add_timestamp_fields_to_batch_exports.py
+++ b/posthog/migrations/0333_add_timestamp_fields_to_batch_exports.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0332_featureflag_has_enriched_analytics"),
]
@@ -14,21 +13,27 @@ class Migration(migrations.Migration):
model_name="batchexport",
name="end_at",
field=models.DateTimeField(
- default=None, help_text="Time after which any Batch Export runs won't be triggered.", null=True
+ default=None,
+ help_text="Time after which any Batch Export runs won't be triggered.",
+ null=True,
),
),
migrations.AddField(
model_name="batchexport",
name="last_paused_at",
field=models.DateTimeField(
- default=None, help_text="The timestamp at which this BatchExport was last paused.", null=True
+ default=None,
+ help_text="The timestamp at which this BatchExport was last paused.",
+ null=True,
),
),
migrations.AddField(
model_name="batchexport",
name="start_at",
field=models.DateTimeField(
- default=None, help_text="Time before which any Batch Export runs won't be triggered.", null=True
+ default=None,
+ help_text="Time before which any Batch Export runs won't be triggered.",
+ null=True,
),
),
]
diff --git a/posthog/migrations/0334_add_asset_ttl.py b/posthog/migrations/0334_add_asset_ttl.py
index ec77eedb9cd78..8a97eca2b11e4 100644
--- a/posthog/migrations/0334_add_asset_ttl.py
+++ b/posthog/migrations/0334_add_asset_ttl.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0333_add_timestamp_fields_to_batch_exports"),
]
diff --git a/posthog/migrations/0335_alter_asyncdeletion_deletion_type.py b/posthog/migrations/0335_alter_asyncdeletion_deletion_type.py
index 33f5294d206ef..36d8ddbccc372 100644
--- a/posthog/migrations/0335_alter_asyncdeletion_deletion_type.py
+++ b/posthog/migrations/0335_alter_asyncdeletion_deletion_type.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0334_add_asset_ttl"),
]
@@ -14,7 +13,13 @@ class Migration(migrations.Migration):
model_name="asyncdeletion",
name="deletion_type",
field=models.PositiveSmallIntegerField(
- choices=[(0, "Team"), (1, "Person"), (2, "Group"), (3, "Cohort Stale"), (4, "Cohort Full")]
+ choices=[
+ (0, "Team"),
+ (1, "Person"),
+ (2, "Group"),
+ (3, "Cohort Stale"),
+ (4, "Cohort Full"),
+ ]
),
),
]
diff --git a/posthog/migrations/0336_alter_survey_type.py b/posthog/migrations/0336_alter_survey_type.py
index c432eb88eed46..8fe97122f5db7 100644
--- a/posthog/migrations/0336_alter_survey_type.py
+++ b/posthog/migrations/0336_alter_survey_type.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0335_alter_asyncdeletion_deletion_type"),
]
diff --git a/posthog/migrations/0337_more_session_recording_fields.py b/posthog/migrations/0337_more_session_recording_fields.py
index 66e59191c3b6d..c0396ef1417b5 100644
--- a/posthog/migrations/0337_more_session_recording_fields.py
+++ b/posthog/migrations/0337_more_session_recording_fields.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0336_alter_survey_type"),
]
diff --git a/posthog/migrations/0338_datawarehouse_saved_query.py b/posthog/migrations/0338_datawarehouse_saved_query.py
index eac5feef35a87..88ab851d0b1c8 100644
--- a/posthog/migrations/0338_datawarehouse_saved_query.py
+++ b/posthog/migrations/0338_datawarehouse_saved_query.py
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0337_more_session_recording_fields"),
]
@@ -22,7 +21,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
(
@@ -41,20 +43,30 @@ class Migration(migrations.Migration):
null=True,
),
),
- ("query", models.JSONField(blank=True, default=dict, help_text="HogQL query", null=True)),
+ (
+ "query",
+ models.JSONField(blank=True, default=dict, help_text="HogQL query", null=True),
+ ),
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
),
migrations.AddConstraint(
model_name="datawarehousesavedquery",
constraint=models.UniqueConstraint(
- fields=("team", "name"), name="posthog_datawarehouse_saved_query_unique_name"
+ fields=("team", "name"),
+ name="posthog_datawarehouse_saved_query_unique_name",
),
),
]
diff --git a/posthog/migrations/0339_add_user_scene_personalisation.py b/posthog/migrations/0339_add_user_scene_personalisation.py
index d38c1ec1da9a7..aede86b617e5a 100644
--- a/posthog/migrations/0339_add_user_scene_personalisation.py
+++ b/posthog/migrations/0339_add_user_scene_personalisation.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0338_datawarehouse_saved_query"),
]
@@ -19,20 +18,29 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("scene", models.CharField(max_length=200)),
(
"dashboard",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.dashboard",
),
),
(
"team",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.team"
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.team",
),
),
(
@@ -50,7 +58,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="userscenepersonalisation",
constraint=models.UniqueConstraint(
- fields=("team", "user", "scene"), name="posthog_unique_scene_personalisation"
+ fields=("team", "user", "scene"),
+ name="posthog_unique_scene_personalisation",
),
),
]
diff --git a/posthog/migrations/0340_action_bytecode.py b/posthog/migrations/0340_action_bytecode.py
index c55a3678f0142..3603c83d8ef1a 100644
--- a/posthog/migrations/0340_action_bytecode.py
+++ b/posthog/migrations/0340_action_bytecode.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0339_add_user_scene_personalisation"),
]
diff --git a/posthog/migrations/0341_add_session_recording_storage_version.py b/posthog/migrations/0341_add_session_recording_storage_version.py
index 92828fd84168b..6e81b4105e6fc 100644
--- a/posthog/migrations/0341_add_session_recording_storage_version.py
+++ b/posthog/migrations/0341_add_session_recording_storage_version.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0340_action_bytecode"),
]
diff --git a/posthog/migrations/0342_alter_featureflag_usage_dashboard.py b/posthog/migrations/0342_alter_featureflag_usage_dashboard.py
index 942413fd5d49a..55feb73d8a362 100644
--- a/posthog/migrations/0342_alter_featureflag_usage_dashboard.py
+++ b/posthog/migrations/0342_alter_featureflag_usage_dashboard.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0341_add_session_recording_storage_version"),
]
@@ -15,7 +14,10 @@ class Migration(migrations.Migration):
model_name="featureflag",
name="usage_dashboard",
field=models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="posthog.dashboard"
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to="posthog.dashboard",
),
),
]
diff --git a/posthog/migrations/0343_team_has_completed_onboarding_for.py b/posthog/migrations/0343_team_has_completed_onboarding_for.py
index c6fba2fc6334f..e3c1aab1edc01 100644
--- a/posthog/migrations/0343_team_has_completed_onboarding_for.py
+++ b/posthog/migrations/0343_team_has_completed_onboarding_for.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0342_alter_featureflag_usage_dashboard"),
]
diff --git a/posthog/migrations/0344_add_new_export_type.py b/posthog/migrations/0344_add_new_export_type.py
index 49155d32f940b..947c1cb537a40 100644
--- a/posthog/migrations/0344_add_new_export_type.py
+++ b/posthog/migrations/0344_add_new_export_type.py
@@ -13,7 +13,11 @@ class Migration(migrations.Migration):
model_name="batchexportdestination",
name="type",
field=models.CharField(
- choices=[("S3", "S3"), ("Snowflake", "Snowflake"), ("Postgres", "Postgres")],
+ choices=[
+ ("S3", "S3"),
+ ("Snowflake", "Snowflake"),
+ ("Postgres", "Postgres"),
+ ],
help_text="A choice of supported BatchExportDestination types.",
max_length=64,
),
diff --git a/posthog/migrations/0345_view_link_and_s3_table_update.py b/posthog/migrations/0345_view_link_and_s3_table_update.py
index 0e91d001128e9..5b0cbcc45b68b 100644
--- a/posthog/migrations/0345_view_link_and_s3_table_update.py
+++ b/posthog/migrations/0345_view_link_and_s3_table_update.py
@@ -7,7 +7,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0344_add_new_export_type"),
]
@@ -16,7 +15,12 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name="datawarehousesavedquery",
name="external_tables",
- field=models.JSONField(blank=True, default=list, help_text="List of all external tables", null=True),
+ field=models.JSONField(
+ blank=True,
+ default=list,
+ help_text="List of all external tables",
+ null=True,
+ ),
),
migrations.CreateModel(
name="DataWarehouseViewLink",
@@ -26,7 +30,10 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
("table", models.CharField(max_length=128)),
@@ -35,16 +42,23 @@ class Migration(migrations.Migration):
(
"created_by",
models.ForeignKey(
- blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to=settings.AUTH_USER_MODEL,
),
),
(
"saved_query",
models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="posthog.datawarehousesavedquery"
+ on_delete=django.db.models.deletion.CASCADE,
+ to="posthog.datawarehousesavedquery",
),
),
- ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")),
+ (
+ "team",
+ models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"),
+ ),
],
options={
"abstract": False,
diff --git a/posthog/migrations/0346_team_week_start_day.py b/posthog/migrations/0346_team_week_start_day.py
index d8f659ccbefae..716fd071fff04 100644
--- a/posthog/migrations/0346_team_week_start_day.py
+++ b/posthog/migrations/0346_team_week_start_day.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0345_view_link_and_s3_table_update"),
]
diff --git a/posthog/migrations/0347_add_bigquery_export_type.py b/posthog/migrations/0347_add_bigquery_export_type.py
index 862befb04723a..6f9b25a1b38de 100644
--- a/posthog/migrations/0347_add_bigquery_export_type.py
+++ b/posthog/migrations/0347_add_bigquery_export_type.py
@@ -13,7 +13,12 @@ class Migration(migrations.Migration):
model_name="batchexportdestination",
name="type",
field=models.CharField(
- choices=[("S3", "S3"), ("Snowflake", "Snowflake"), ("Postgres", "Postgres"), ("BigQuery", "Bigquery")],
+ choices=[
+ ("S3", "S3"),
+ ("Snowflake", "Snowflake"),
+ ("Postgres", "Postgres"),
+ ("BigQuery", "Bigquery"),
+ ],
help_text="A choice of supported BatchExportDestination types.",
max_length=64,
),
diff --git a/posthog/migrations/0348_alter_datawarehousetable_format.py b/posthog/migrations/0348_alter_datawarehousetable_format.py
index 72434bbc99fdb..d2f464830a957 100644
--- a/posthog/migrations/0348_alter_datawarehousetable_format.py
+++ b/posthog/migrations/0348_alter_datawarehousetable_format.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0347_add_bigquery_export_type"),
]
@@ -14,7 +13,12 @@ class Migration(migrations.Migration):
model_name="datawarehousetable",
name="format",
field=models.CharField(
- choices=[("CSV", "CSV"), ("Parquet", "Parquet"), ("JSONEachRow", "JSON")], max_length=128
+ choices=[
+ ("CSV", "CSV"),
+ ("Parquet", "Parquet"),
+ ("JSONEachRow", "JSON"),
+ ],
+ max_length=128,
),
),
]
diff --git a/posthog/migrations/0349_update_survey_query_name.py b/posthog/migrations/0349_update_survey_query_name.py
index cbcbbb3a0c954..13235cdc67fbc 100644
--- a/posthog/migrations/0349_update_survey_query_name.py
+++ b/posthog/migrations/0349_update_survey_query_name.py
@@ -5,7 +5,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0348_alter_datawarehousetable_format"),
]
diff --git a/posthog/migrations/0350_add_notebook_text_content.py b/posthog/migrations/0350_add_notebook_text_content.py
index bfe4b079b9945..b2a5c0c14285f 100644
--- a/posthog/migrations/0350_add_notebook_text_content.py
+++ b/posthog/migrations/0350_add_notebook_text_content.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0349_update_survey_query_name"),
]
diff --git a/posthog/migrations/0351_team_surveys_opt_in.py b/posthog/migrations/0351_team_surveys_opt_in.py
index c1722b7a11000..207677ab8b36d 100644
--- a/posthog/migrations/0351_team_surveys_opt_in.py
+++ b/posthog/migrations/0351_team_surveys_opt_in.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0350_add_notebook_text_content"),
]
diff --git a/posthog/migrations/0353_add_5_minute_interval_to_batch_exports.py b/posthog/migrations/0353_add_5_minute_interval_to_batch_exports.py
index 014edcd509144..3b255ea8a4778 100644
--- a/posthog/migrations/0353_add_5_minute_interval_to_batch_exports.py
+++ b/posthog/migrations/0353_add_5_minute_interval_to_batch_exports.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0352_auto_20230926_1833"),
]
@@ -14,7 +13,12 @@ class Migration(migrations.Migration):
model_name="batchexport",
name="interval",
field=models.CharField(
- choices=[("hour", "hour"), ("day", "day"), ("week", "week"), ("every 5 minutes", "every 5 minutes")],
+ choices=[
+ ("hour", "hour"),
+ ("day", "day"),
+ ("week", "week"),
+ ("every 5 minutes", "every 5 minutes"),
+ ],
default="hour",
help_text="The interval at which to export data.",
max_length=64,
diff --git a/posthog/migrations/0354_organization_never_drop_data.py b/posthog/migrations/0354_organization_never_drop_data.py
index 154446df669d5..560f1f518c612 100644
--- a/posthog/migrations/0354_organization_never_drop_data.py
+++ b/posthog/migrations/0354_organization_never_drop_data.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0353_add_5_minute_interval_to_batch_exports"),
]
diff --git a/posthog/migrations/0355_add_batch_export_backfill_model.py b/posthog/migrations/0355_add_batch_export_backfill_model.py
index c558d2a74d7f8..294ad7e019db6 100644
--- a/posthog/migrations/0355_add_batch_export_backfill_model.py
+++ b/posthog/migrations/0355_add_batch_export_backfill_model.py
@@ -33,11 +33,20 @@ class Migration(migrations.Migration):
(
"id",
models.UUIDField(
- default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False
+ default=posthog.models.utils.UUIDT,
+ editable=False,
+ primary_key=True,
+ serialize=False,
),
),
- ("start_at", models.DateTimeField(help_text="The start of the data interval.")),
- ("end_at", models.DateTimeField(help_text="The end of the data interval.")),
+ (
+ "start_at",
+ models.DateTimeField(help_text="The start of the data interval."),
+ ),
+ (
+ "end_at",
+ models.DateTimeField(help_text="The end of the data interval."),
+ ),
(
"status",
models.CharField(
@@ -58,7 +67,8 @@ class Migration(migrations.Migration):
(
"created_at",
models.DateTimeField(
- auto_now_add=True, help_text="The timestamp at which this BatchExportBackfill was created."
+ auto_now_add=True,
+ help_text="The timestamp at which this BatchExportBackfill was created.",
),
),
(
@@ -71,7 +81,8 @@ class Migration(migrations.Migration):
(
"last_updated_at",
models.DateTimeField(
- auto_now=True, help_text="The timestamp at which this BatchExportBackfill was last updated."
+ auto_now=True,
+ help_text="The timestamp at which this BatchExportBackfill was last updated.",
),
),
(
diff --git a/posthog/migrations/0356_add_replay_cost_control.py b/posthog/migrations/0356_add_replay_cost_control.py
index 96c5cb166f4f8..72e04e1d8a46f 100644
--- a/posthog/migrations/0356_add_replay_cost_control.py
+++ b/posthog/migrations/0356_add_replay_cost_control.py
@@ -6,7 +6,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("posthog", "0355_add_batch_export_backfill_model"),
]
diff --git a/posthog/models/__init__.py b/posthog/models/__init__.py
index 89432e0809984..b5f0586a349bb 100644
--- a/posthog/models/__init__.py
+++ b/posthog/models/__init__.py
@@ -37,7 +37,13 @@
from .organization_domain import OrganizationDomain
from .person import Person, PersonDistinctId, PersonOverride, PersonOverrideMapping
from .personal_api_key import PersonalAPIKey
-from .plugin import Plugin, PluginAttachment, PluginConfig, PluginSourceFile, PluginLogEntry
+from .plugin import (
+ Plugin,
+ PluginAttachment,
+ PluginConfig,
+ PluginSourceFile,
+ PluginLogEntry,
+)
from .prompt.prompt import Prompt, PromptSequence, UserPromptState
from .property import Property
from .property_definition import PropertyDefinition
@@ -51,8 +57,12 @@
from .user import User, UserManager
from .user_scene_personalisation import UserScenePersonalisation
from ..session_recordings.models.session_recording import SessionRecording
-from ..session_recordings.models.session_recording_playlist import SessionRecordingPlaylist
-from ..session_recordings.models.session_recording_playlist_item import SessionRecordingPlaylistItem
+from ..session_recordings.models.session_recording_playlist import (
+ SessionRecordingPlaylist,
+)
+from ..session_recordings.models.session_recording_playlist_item import (
+ SessionRecordingPlaylistItem,
+)
__all__ = [
"Action",
diff --git a/posthog/models/action/action.py b/posthog/models/action/action.py
index 28642eaedeb53..368100fcbc978 100644
--- a/posthog/models/action/action.py
+++ b/posthog/models/action/action.py
@@ -78,7 +78,10 @@ def refresh_bytecode(self):
@receiver(post_save, sender=Action)
def action_saved(sender, instance: Action, created, **kwargs):
- get_client().publish("reload-action", json.dumps({"teamId": instance.team_id, "actionId": instance.id}))
+ get_client().publish(
+ "reload-action",
+ json.dumps({"teamId": instance.team_id, "actionId": instance.id}),
+ )
@mutable_receiver(post_delete, sender=Action)
diff --git a/posthog/models/action/util.py b/posthog/models/action/util.py
index 7a1fcd007f0aa..b67ecf0115dc4 100644
--- a/posthog/models/action/util.py
+++ b/posthog/models/action/util.py
@@ -35,7 +35,9 @@ def format_action_filter(
conditions: List[str] = []
# filter element
if step.event == AUTOCAPTURE_EVENT:
- from posthog.models.property.util import filter_element # prevent circular import
+ from posthog.models.property.util import (
+ filter_element,
+ ) # prevent circular import
if step.selector:
element_condition, element_params = filter_element(
diff --git a/posthog/models/action_step.py b/posthog/models/action_step.py
index 2f792f1f74d85..036e6fad60d13 100644
--- a/posthog/models/action_step.py
+++ b/posthog/models/action_step.py
@@ -52,7 +52,8 @@ class ActionStep(models.Model):
def action_step_saved(sender, instance: ActionStep, created, **kwargs):
instance.action.refresh_bytecode()
get_client().publish(
- "reload-action", json.dumps({"teamId": instance.action.team_id, "actionId": instance.action.id})
+ "reload-action",
+ json.dumps({"teamId": instance.action.team_id, "actionId": instance.action.id}),
)
@@ -60,5 +61,6 @@ def action_step_saved(sender, instance: ActionStep, created, **kwargs):
def action_step_deleted(sender, instance: ActionStep, **kwargs):
instance.action.refresh_bytecode()
get_client().publish(
- "reload-action", json.dumps({"teamId": instance.action.team_id, "actionId": instance.action.id})
+ "reload-action",
+ json.dumps({"teamId": instance.action.team_id, "actionId": instance.action.id}),
)
diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py
index f3b36e2c3dbd0..94a9c0914faf6 100644
--- a/posthog/models/activity_logging/activity_log.py
+++ b/posthog/models/activity_logging/activity_log.py
@@ -99,8 +99,23 @@ class Meta:
field_exclusions: Dict[ActivityScope, List[str]] = {
- "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by", "text_content"],
- "FeatureFlag": ["id", "created_at", "created_by", "is_simple_flag", "experiment", "team", "featureflagoverride"],
+ "Notebook": [
+ "id",
+ "last_modified_at",
+ "last_modified_by",
+ "created_at",
+ "created_by",
+ "text_content",
+ ],
+ "FeatureFlag": [
+ "id",
+ "created_at",
+ "created_by",
+ "is_simple_flag",
+ "experiment",
+ "team",
+ "featureflagoverride",
+ ],
"Person": [
"id",
"uuid",
@@ -143,7 +158,14 @@ class Meta:
"dashboardtile",
"caching_states",
],
- "SessionRecordingPlaylist": ["id", "short_id", "created_at", "created_by", "last_modified_at", "last_modified_by"],
+ "SessionRecordingPlaylist": [
+ "id",
+ "short_id",
+ "created_at",
+ "created_by",
+ "last_modified_at",
+ "last_modified_by",
+ ],
"EventDefinition": [
"eventdefinition_ptr_id",
"id",
@@ -246,13 +268,24 @@ def changes_between(
elif right is None and left is not None:
changes.append(Change(type=model_type, field=field, action="deleted", before=left))
elif left != right:
- changes.append(Change(type=model_type, field=field, action="changed", before=left, after=right))
+ changes.append(
+ Change(
+ type=model_type,
+ field=field,
+ action="changed",
+ before=left,
+ after=right,
+ )
+ )
return changes
def dict_changes_between(
- model_type: ActivityScope, previous: Dict[Any, Any], new: Dict[Any, Any], use_field_exclusions: bool = False
+ model_type: ActivityScope,
+ previous: Dict[Any, Any],
+ new: Dict[Any, Any],
+ use_field_exclusions: bool = False,
) -> List[Change]:
"""
Identifies changes between two dictionaries by comparing fields
@@ -276,10 +309,23 @@ def dict_changes_between(
if previous_value is None and new_value is not None:
changes.append(Change(type=model_type, field=field, action="created", after=new_value))
elif new_value is None and previous_value is not None:
- changes.append(Change(type=model_type, field=field, action="deleted", before=previous_value))
+ changes.append(
+ Change(
+ type=model_type,
+ field=field,
+ action="deleted",
+ before=previous_value,
+ )
+ )
elif previous_value != new_value:
changes.append(
- Change(type=model_type, field=field, action="changed", before=previous_value, after=new_value)
+ Change(
+ type=model_type,
+ field=field,
+ action="changed",
+ before=previous_value,
+ after=new_value,
+ )
)
return changes
@@ -350,7 +396,11 @@ def get_activity_page(activity_query: models.QuerySet, limit: int = 10, page: in
def load_activity(
- scope: ActivityScope, team_id: int, item_id: Optional[int] = None, limit: int = 10, page: int = 1
+ scope: ActivityScope,
+ team_id: int,
+ item_id: Optional[int] = None,
+ limit: int = 10,
+ page: int = 1,
) -> ActivityPage:
# TODO in follow-up to posthog #8931 selecting specific fields into a return type from this query
diff --git a/posthog/models/app_metrics/sql.py b/posthog/models/app_metrics/sql.py
index 3198b3226e52b..65d1de6de3060 100644
--- a/posthog/models/app_metrics/sql.py
+++ b/posthog/models/app_metrics/sql.py
@@ -1,7 +1,11 @@
from django.conf import settings
from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS_WITH_PARTITION, kafka_engine
-from posthog.clickhouse.table_engines import AggregatingMergeTree, Distributed, ReplicationScheme
+from posthog.clickhouse.table_engines import (
+ AggregatingMergeTree,
+ Distributed,
+ ReplicationScheme,
+)
from posthog.kafka_client.topics import KAFKA_APP_METRICS
SHARDED_APP_METRICS_TABLE_ENGINE = lambda: AggregatingMergeTree(
diff --git a/posthog/models/async_deletion/async_deletion.py b/posthog/models/async_deletion/async_deletion.py
index 513657f73daae..a851fa513f526 100644
--- a/posthog/models/async_deletion/async_deletion.py
+++ b/posthog/models/async_deletion/async_deletion.py
@@ -21,7 +21,8 @@ class Meta:
condition=models.Q(group_type_index__isnull=True),
),
models.UniqueConstraint(
- name="unique deletion for groups", fields=["deletion_type", "key", "group_type_index"]
+ name="unique deletion for groups",
+ fields=["deletion_type", "key", "group_type_index"],
),
]
indexes = [models.Index(name="delete_verified_at index", fields=["delete_verified_at"])]
diff --git a/posthog/models/async_deletion/delete.py b/posthog/models/async_deletion/delete.py
index 378d655a02714..7774cf2384016 100644
--- a/posthog/models/async_deletion/delete.py
+++ b/posthog/models/async_deletion/delete.py
@@ -41,7 +41,10 @@ def mark_deletions_done(self):
AsyncDeletion.objects.filter(pk__in=[row.pk for row in to_verify]).update(delete_verified_at=timezone.now())
logger.warn(
"Updated `delete_verified_at` for AsyncDeletion",
- {"count": len(to_verify), "team_ids": list(set(row.team_id for row in to_verify))},
+ {
+ "count": len(to_verify),
+ "team_ids": list(set(row.team_id for row in to_verify)),
+ },
)
def _fetch_unverified_deletions_grouped(self):
diff --git a/posthog/models/async_deletion/delete_cohorts.py b/posthog/models/async_deletion/delete_cohorts.py
index a71f16ae26cd0..3a4737c221964 100644
--- a/posthog/models/async_deletion/delete_cohorts.py
+++ b/posthog/models/async_deletion/delete_cohorts.py
@@ -15,7 +15,10 @@ def process(self, deletions: List[AsyncDeletion]):
logger.warn(
"Starting AsyncDeletion on `cohortpeople` table in ClickHouse",
- {"count": len(deletions), "team_ids": list(set(row.team_id for row in deletions))},
+ {
+ "count": len(deletions),
+ "team_ids": list(set(row.team_id for row in deletions)),
+ },
)
conditions, args = self._conditions(deletions)
@@ -62,13 +65,20 @@ def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> Tuple[str, D
version_param = f"version{suffix}"
if async_deletion.deletion_type == DeletionType.Cohort_full:
key, _ = async_deletion.key.split("_")
- return f"( team_id = %({team_id_param})s AND {self._column_name(async_deletion)} = %({key_param})s )", {
- team_id_param: async_deletion.team_id,
- key_param: key,
- }
+ return (
+ f"( team_id = %({team_id_param})s AND {self._column_name(async_deletion)} = %({key_param})s )",
+ {
+ team_id_param: async_deletion.team_id,
+ key_param: key,
+ },
+ )
else:
key, version = async_deletion.key.split("_")
return (
f"( team_id = %({team_id_param})s AND {self._column_name(async_deletion)} = %({key_param})s AND version < %({version_param})s )",
- {team_id_param: async_deletion.team_id, version_param: version, key_param: key},
+ {
+ team_id_param: async_deletion.team_id,
+ version_param: version,
+ key_param: key,
+ },
)
diff --git a/posthog/models/async_deletion/delete_events.py b/posthog/models/async_deletion/delete_events.py
index 5529ca8a95843..cef9c97688f85 100644
--- a/posthog/models/async_deletion/delete_events.py
+++ b/posthog/models/async_deletion/delete_events.py
@@ -27,7 +27,10 @@ def process(self, deletions: List[AsyncDeletion]):
logger.info(
"Starting AsyncDeletion on `events` table in ClickHouse",
- {"count": len(deletions), "team_ids": list(set(row.team_id for row in deletions))},
+ {
+ "count": len(deletions),
+ "team_ids": list(set(row.team_id for row in deletions)),
+ },
)
conditions, args = self._conditions(deletions)
@@ -48,7 +51,10 @@ def process(self, deletions: List[AsyncDeletion]):
logger.info(
"Starting AsyncDeletion for teams on other tables",
- {"count": len(team_deletions), "team_ids": list(set(row.team_id for row in deletions))},
+ {
+ "count": len(team_deletions),
+ "team_ids": list(set(row.team_id for row in deletions)),
+ },
)
conditions, args = self._conditions(team_deletions)
for table in TABLES_TO_DELETE_TEAM_DATA_FROM:
@@ -97,5 +103,8 @@ def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> Tuple[str, D
else:
return (
f"(team_id = %(team_id{suffix})s AND {self._column_name(async_deletion)} = %(key{suffix})s)",
- {f"team_id{suffix}": async_deletion.team_id, f"key{suffix}": async_deletion.key},
+ {
+ f"team_id{suffix}": async_deletion.team_id,
+ f"key{suffix}": async_deletion.key,
+ },
)
diff --git a/posthog/models/async_migration.py b/posthog/models/async_migration.py
index 885f7ce397931..ab60eed94d0c5 100644
--- a/posthog/models/async_migration.py
+++ b/posthog/models/async_migration.py
@@ -33,7 +33,7 @@ class Meta:
null=False, blank=False, default=MigrationStatus.NotStarted
)
- current_operation_index: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(
+ current_operation_index: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField(
null=False, blank=False, default=0
)
current_query_id: models.CharField = models.CharField(max_length=100, null=False, blank=False, default="")
diff --git a/posthog/models/cohort/cohort.py b/posthog/models/cohort/cohort.py
index b101a0fa68bf6..b907df41c934a 100644
--- a/posthog/models/cohort/cohort.py
+++ b/posthog/models/cohort/cohort.py
@@ -193,7 +193,12 @@ def calculate_people_ch(self, pending_version):
from posthog.models.cohort.util import recalculate_cohortpeople
from posthog.tasks.calculate_cohort import clear_stale_cohort
- logger.warn("cohort_calculation_started", id=self.pk, current_version=self.version, new_version=pending_version)
+ logger.warn(
+ "cohort_calculation_started",
+ id=self.pk,
+ current_version=self.version,
+ new_version=pending_version,
+ )
start_time = time.monotonic()
try:
@@ -237,7 +242,10 @@ def insert_users_by_list(self, items: List[str]) -> None:
"""
batchsize = 1000
- from posthog.models.cohort.util import insert_static_cohort, get_static_cohort_size
+ from posthog.models.cohort.util import (
+ insert_static_cohort,
+ get_static_cohort_size,
+ )
if TEST:
from posthog.test.base import flush_persons_and_events
@@ -251,15 +259,26 @@ def insert_users_by_list(self, items: List[str]) -> None:
batch = items[i : i + batchsize]
persons_query = (
Person.objects.filter(team_id=self.team_id)
- .filter(Q(persondistinctid__team_id=self.team_id, persondistinctid__distinct_id__in=batch))
+ .filter(
+ Q(
+ persondistinctid__team_id=self.team_id,
+ persondistinctid__distinct_id__in=batch,
+ )
+ )
.exclude(cohort__id=self.id)
)
- insert_static_cohort([p for p in persons_query.values_list("uuid", flat=True)], self.pk, self.team)
+ insert_static_cohort(
+ [p for p in persons_query.values_list("uuid", flat=True)],
+ self.pk,
+ self.team,
+ )
sql, params = persons_query.distinct("pk").only("pk").query.sql_with_params()
query = UPDATE_QUERY.format(
cohort_id=self.pk,
values_query=sql.replace(
- 'FROM "posthog_person"', f', {self.pk}, {self.version or "NULL"} FROM "posthog_person"', 1
+ 'FROM "posthog_person"',
+ f', {self.pk}, {self.version or "NULL"} FROM "posthog_person"',
+ 1,
),
)
cursor.execute(query, params)
@@ -294,7 +313,9 @@ def insert_users_list_by_uuid(self, items: List[str]) -> None:
query = UPDATE_QUERY.format(
cohort_id=self.pk,
values_query=sql.replace(
- 'FROM "posthog_person"', f', {self.pk}, {self.version or "NULL"} FROM "posthog_person"', 1
+ 'FROM "posthog_person"',
+ f', {self.pk}, {self.version or "NULL"} FROM "posthog_person"',
+ 1,
),
)
cursor.execute(query, params)
diff --git a/posthog/models/cohort/sql.py b/posthog/models/cohort/sql.py
index b73662931aeb2..821e84e29fd37 100644
--- a/posthog/models/cohort/sql.py
+++ b/posthog/models/cohort/sql.py
@@ -19,7 +19,9 @@
Order By (team_id, cohort_id, person_id, version)
{storage_policy}
""".format(
- cluster=CLICKHOUSE_CLUSTER, engine=COHORTPEOPLE_TABLE_ENGINE(), storage_policy=""
+ cluster=CLICKHOUSE_CLUSTER,
+ engine=COHORTPEOPLE_TABLE_ENGINE(),
+ storage_policy="",
)
TRUNCATE_COHORTPEOPLE_TABLE_SQL = f"TRUNCATE TABLE IF EXISTS cohortpeople ON CLUSTER '{CLICKHOUSE_CLUSTER}'"
diff --git a/posthog/models/cohort/test/test_util.py b/posthog/models/cohort/test/test_util.py
index 7db7d6a5e0130..d8ff051a0bb41 100644
--- a/posthog/models/cohort/test/test_util.py
+++ b/posthog/models/cohort/test/test_util.py
@@ -1,5 +1,8 @@
from posthog.models.cohort import Cohort
-from posthog.models.cohort.util import get_dependent_cohorts, simplified_cohort_filter_properties
+from posthog.models.cohort.util import (
+ get_dependent_cohorts,
+ simplified_cohort_filter_properties,
+)
from posthog.test.base import BaseTest, _create_person, flush_persons_and_events
@@ -14,8 +17,11 @@ def _create_cohort(**kwargs):
class TestCohortUtils(BaseTest):
def test_simplified_cohort_filter_properties_static_cohort(self):
-
- _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"name": "test", "name": "test"},
+ )
cohort = _create_cohort(team=self.team, name="cohort1", groups=[], is_static=True)
flush_persons_and_events()
cohort.insert_users_by_list(["p1"])
@@ -24,12 +30,25 @@ def test_simplified_cohort_filter_properties_static_cohort(self):
self.assertEqual(
result.to_dict(),
- {"type": "AND", "values": [{"key": "id", "negation": False, "type": "static-cohort", "value": cohort.pk}]},
+ {
+ "type": "AND",
+ "values": [
+ {
+ "key": "id",
+ "negation": False,
+ "type": "static-cohort",
+ "value": cohort.pk,
+ }
+ ],
+ },
)
def test_simplified_cohort_filter_properties_static_cohort_with_negation(self):
-
- _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"name": "test", "name": "test"},
+ )
cohort = _create_cohort(team=self.team, name="cohort1", groups=[], is_static=True)
flush_persons_and_events()
cohort.insert_users_by_list(["p1"])
@@ -38,7 +57,17 @@ def test_simplified_cohort_filter_properties_static_cohort_with_negation(self):
self.assertEqual(
result.to_dict(),
- {"type": "AND", "values": [{"key": "id", "negation": True, "type": "static-cohort", "value": cohort.pk}]},
+ {
+ "type": "AND",
+ "values": [
+ {
+ "key": "id",
+ "negation": True,
+ "type": "static-cohort",
+ "value": cohort.pk,
+ }
+ ],
+ },
)
def test_simplified_cohort_filter_properties_precalculated_cohort(self):
@@ -57,7 +86,14 @@ def test_simplified_cohort_filter_properties_precalculated_cohort(self):
result.to_dict(),
{
"type": "AND",
- "values": [{"key": "id", "negation": False, "type": "precalculated-cohort", "value": cohort.pk}],
+ "values": [
+ {
+ "key": "id",
+ "negation": False,
+ "type": "precalculated-cohort",
+ "value": cohort.pk,
+ }
+ ],
},
)
@@ -77,7 +113,14 @@ def test_simplified_cohort_filter_properties_precalculated_cohort_negated(self):
result.to_dict(),
{
"type": "AND",
- "values": [{"key": "id", "negation": True, "type": "precalculated-cohort", "value": cohort.pk}],
+ "values": [
+ {
+ "key": "id",
+ "negation": True,
+ "type": "precalculated-cohort",
+ "value": cohort.pk,
+ }
+ ],
},
)
@@ -113,7 +156,17 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_behav
self.assertEqual(
result.to_dict(),
- {"type": "AND", "values": [{"key": "id", "negation": False, "type": "cohort", "value": cohort.pk}]},
+ {
+ "type": "AND",
+ "values": [
+ {
+ "key": "id",
+ "negation": False,
+ "type": "cohort",
+ "value": cohort.pk,
+ }
+ ],
+ },
)
# with negation
@@ -122,7 +175,17 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_behav
self.assertEqual(
result.to_dict(),
- {"type": "AND", "values": [{"key": "id", "negation": True, "type": "cohort", "value": cohort.pk}]},
+ {
+ "type": "AND",
+ "values": [
+ {
+ "key": "id",
+ "negation": True,
+ "type": "cohort",
+ "value": cohort.pk,
+ }
+ ],
+ },
)
def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_cohort_filter(self):
@@ -139,7 +202,12 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_cohor
"type": "AND",
"values": [
{"key": "name", "value": "test", "type": "person"},
- {"key": "id", "value": cohort1.pk, "type": "cohort", "negation": True},
+ {
+ "key": "id",
+ "value": cohort1.pk,
+ "type": "cohort",
+ "negation": True,
+ },
],
}
},
@@ -154,11 +222,19 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_cohor
{
"type": "AND",
"values": [
- {"type": "AND", "values": [{"key": "name", "value": "test", "type": "person"}]},
+ {
+ "type": "AND",
+ "values": [{"key": "name", "value": "test", "type": "person"}],
+ },
{
"type": "AND",
"values": [
- {"key": "id", "value": cohort1.pk, "type": "cohort", "negation": True},
+ {
+ "key": "id",
+ "value": cohort1.pk,
+ "type": "cohort",
+ "negation": True,
+ },
],
},
],
@@ -171,7 +247,17 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_cohor
self.assertEqual(
result.to_dict(),
- {"type": "AND", "values": [{"key": "id", "negation": True, "type": "cohort", "value": cohort.pk}]},
+ {
+ "type": "AND",
+ "values": [
+ {
+ "key": "id",
+ "negation": True,
+ "type": "cohort",
+ "value": cohort.pk,
+ }
+ ],
+ },
)
def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_only_person_property_filters(self):
@@ -182,7 +268,10 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_only_
"properties": {
"type": "OR",
"values": [
- {"type": "AND", "values": [{"key": "name", "value": "test", "type": "person"}]},
+ {
+ "type": "AND",
+ "values": [{"key": "name", "value": "test", "type": "person"}],
+ },
{
"type": "OR",
"values": [
@@ -204,7 +293,10 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_only_
{
"type": "OR",
"values": [
- {"type": "AND", "values": [{"key": "name", "value": "test", "type": "person"}]},
+ {
+ "type": "AND",
+ "values": [{"key": "name", "value": "test", "type": "person"}],
+ },
{
"type": "OR",
"values": [
@@ -222,7 +314,17 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_only_
self.assertEqual(
result.to_dict(),
- {"type": "AND", "values": [{"key": "id", "negation": True, "type": "cohort", "value": cohort.pk}]},
+ {
+ "type": "AND",
+ "values": [
+ {
+ "key": "id",
+ "negation": True,
+ "type": "cohort",
+ "value": cohort.pk,
+ }
+ ],
+ },
)
@@ -268,7 +370,18 @@ def test_dependent_cohorts_for_deeply_nested_cohort(self):
cohort3 = _create_cohort(
team=self.team,
name="cohort3",
- groups=[{"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort", "negation": True}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "id",
+ "value": cohort2.pk,
+ "type": "cohort",
+ "negation": True,
+ }
+ ]
+ }
+ ],
)
self.assertEqual(get_dependent_cohorts(cohort1), [])
@@ -291,7 +404,18 @@ def test_dependent_cohorts_for_circular_nested_cohort(self):
cohort3 = _create_cohort(
team=self.team,
name="cohort1",
- groups=[{"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort", "negation": True}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "id",
+ "value": cohort2.pk,
+ "type": "cohort",
+ "negation": True,
+ }
+ ]
+ }
+ ],
)
cohort1.groups = [{"properties": [{"key": "id", "value": cohort3.pk, "type": "cohort"}]}]
@@ -328,7 +452,12 @@ def test_dependent_cohorts_for_complex_nested_cohort(self):
{
"properties": [
{"key": "name", "value": "test3", "type": "person"},
- {"key": "id", "value": cohort2.pk, "type": "cohort", "negation": True},
+ {
+ "key": "id",
+ "value": cohort2.pk,
+ "type": "cohort",
+ "negation": True,
+ },
]
}
],
@@ -337,7 +466,18 @@ def test_dependent_cohorts_for_complex_nested_cohort(self):
cohort4 = _create_cohort(
team=self.team,
name="cohort1",
- groups=[{"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort", "negation": True}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "id",
+ "value": cohort1.pk,
+ "type": "cohort",
+ "negation": True,
+ }
+ ]
+ }
+ ],
)
cohort5 = _create_cohort(
@@ -346,8 +486,18 @@ def test_dependent_cohorts_for_complex_nested_cohort(self):
groups=[
{
"properties": [
- {"key": "id", "value": cohort2.pk, "type": "cohort", "negation": True},
- {"key": "id", "value": cohort4.pk, "type": "cohort", "negation": True},
+ {
+ "key": "id",
+ "value": cohort2.pk,
+ "type": "cohort",
+ "negation": True,
+ },
+ {
+ "key": "id",
+ "value": cohort4.pk,
+ "type": "cohort",
+ "negation": True,
+ },
]
}
],
diff --git a/posthog/models/cohort/util.py b/posthog/models/cohort/util.py
index c5b8c39ec4f1d..800b937d51f15 100644
--- a/posthog/models/cohort/util.py
+++ b/posthog/models/cohort/util.py
@@ -52,7 +52,11 @@ def format_person_query(cohort: Cohort, index: int, hogql_context: HogQLContext)
from posthog.queries.cohort_query import CohortQuery
query_builder = CohortQuery(
- Filter(data={"properties": cohort.properties}, team=cohort.team, hogql_context=hogql_context),
+ Filter(
+ data={"properties": cohort.properties},
+ team=cohort.team,
+ hogql_context=hogql_context,
+ ),
cohort.team,
cohort_pk=cohort.pk,
)
@@ -72,7 +76,13 @@ def format_static_cohort_query(cohort: Cohort, index: int, prepend: str) -> Tupl
def format_precalculated_cohort_query(cohort: Cohort, index: int, prepend: str = "") -> Tuple[str, Dict[str, Any]]:
filter_query = GET_PERSON_ID_BY_PRECALCULATED_COHORT_ID.format(index=index, prepend=prepend)
- return (filter_query, {f"{prepend}_cohort_id_{index}": cohort.pk, f"{prepend}_version_{index}": cohort.version})
+ return (
+ filter_query,
+ {
+ f"{prepend}_cohort_id_{index}": cohort.pk,
+ f"{prepend}_version_{index}": cohort.version,
+ },
+ )
def get_count_operator(count_operator: Optional[str]) -> str:
@@ -102,7 +112,10 @@ def get_entity_query(
elif action_id:
action = Action.objects.get(pk=action_id, team_id=team_id)
action_filter_query, action_params = format_action_filter(
- team_id=team_id, action=action, prepend="_{}_action".format(group_idx), hogql_context=hogql_context
+ team_id=team_id,
+ action=action,
+ prepend="_{}_action".format(group_idx),
+ hogql_context=hogql_context,
)
return action_filter_query, action_params
else:
@@ -128,7 +141,10 @@ def parse_entity_timestamps_in_days(days: int) -> Tuple[str, Dict[str, str]]:
return (
"AND timestamp >= %(date_from)s AND timestamp <= %(date_to)s",
- {"date_from": start_time.strftime("%Y-%m-%d %H:%M:%S"), "date_to": curr_time.strftime("%Y-%m-%d %H:%M:%S")},
+ {
+ "date_from": start_time.strftime("%Y-%m-%d %H:%M:%S"),
+ "date_to": curr_time.strftime("%Y-%m-%d %H:%M:%S"),
+ },
)
@@ -142,7 +158,10 @@ def parse_cohort_timestamps(start_time: Optional[str], end_time: Optional[str])
params = {"date_from": datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S")}
if end_time:
clause += "timestamp <= %(date_to)s"
- params = {**params, "date_to": datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S")}
+ params = {
+ **params,
+ "date_to": datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S"),
+ }
return clause, params
@@ -177,7 +196,10 @@ def format_filter_query(
def format_cohort_subquery(
- cohort: Cohort, index: int, hogql_context: HogQLContext, custom_match_field="person_id"
+ cohort: Cohort,
+ index: int,
+ hogql_context: HogQLContext,
+ custom_match_field="person_id",
) -> Tuple[str, Dict[str, Any]]:
is_precalculated = is_precalculated_query(cohort)
if is_precalculated:
@@ -189,7 +211,12 @@ def format_cohort_subquery(
return person_query, params
-def get_person_ids_by_cohort_id(team: Team, cohort_id: int, limit: Optional[int] = None, offset: Optional[int] = None):
+def get_person_ids_by_cohort_id(
+ team: Team,
+ cohort_id: int,
+ limit: Optional[int] = None,
+ offset: Optional[int] = None,
+):
from posthog.models.property.util import parse_prop_grouped_clauses
filter = Filter(data={"properties": [{"key": "id", "value": cohort_id, "type": "cohort"}]})
@@ -254,7 +281,10 @@ def recalculate_cohortpeople(cohort: Cohort, pending_version: int) -> Optional[i
if before_count:
logger.warn(
- "Recalculating cohortpeople starting", team_id=cohort.team_id, cohort_id=cohort.pk, size_before=before_count
+ "Recalculating cohortpeople starting",
+ team_id=cohort.team_id,
+ cohort_id=cohort.pk,
+ size_before=before_count,
)
recalcluate_cohortpeople_sql = RECALCULATE_COHORT_BY_ID.format(cohort_filter=cohort_query)
@@ -289,7 +319,11 @@ def clear_stale_cohortpeople(cohort: Cohort, before_version: int) -> None:
if cohort.version and cohort.version > 0:
stale_count_result = sync_execute(
STALE_COHORTPEOPLE,
- {"cohort_id": cohort.pk, "team_id": cohort.team_id, "version": before_version},
+ {
+ "cohort_id": cohort.pk,
+ "team_id": cohort.team_id,
+ "version": before_version,
+ },
)
if stale_count_result and len(stale_count_result) and len(stale_count_result[0]):
@@ -333,7 +367,14 @@ def simplified_cohort_filter_properties(cohort: Cohort, team: Team, is_negated=F
if is_precalculated_query(cohort):
return PropertyGroup(
type=PropertyOperatorType.AND,
- values=[Property(type="precalculated-cohort", key="id", value=cohort.pk, negation=is_negated)],
+ values=[
+ Property(
+ type="precalculated-cohort",
+ key="id",
+ value=cohort.pk,
+ negation=is_negated,
+ )
+ ],
)
# Cohort can have multiple match groups.
@@ -356,7 +397,14 @@ def simplified_cohort_filter_properties(cohort: Cohort, team: Team, is_negated=F
if is_negated:
return PropertyGroup(
type=PropertyOperatorType.AND,
- values=[Property(type="cohort", key="id", value=cohort.pk, negation=is_negated)],
+ values=[
+ Property(
+ type="cohort",
+ key="id",
+ value=cohort.pk,
+ negation=is_negated,
+ )
+ ],
)
# :TRICKY: We need to ensure we don't have infinite loops in here
# guaranteed during cohort creation
@@ -390,7 +438,9 @@ def get_all_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> List[int]:
def get_dependent_cohorts(
- cohort: Cohort, using_database: str = "default", seen_cohorts_cache: Optional[Dict[str, Cohort]] = None
+ cohort: Cohort,
+ using_database: str = "default",
+ seen_cohorts_cache: Optional[Dict[str, Cohort]] = None,
) -> List[Cohort]:
if seen_cohorts_cache is None:
seen_cohorts_cache = {}
diff --git a/posthog/models/dashboard.py b/posthog/models/dashboard.py
index d2a477d97e0f8..f20fc9fdcb0f2 100644
--- a/posthog/models/dashboard.py
+++ b/posthog/models/dashboard.py
@@ -18,14 +18,23 @@ class Dashboard(models.Model):
class CreationMode(models.TextChoices):
DEFAULT = "default", "Default"
- TEMPLATE = "template", "Template" # dashboard was created from a predefined template
- DUPLICATE = "duplicate", "Duplicate" # dashboard was duplicated from another dashboard
+ TEMPLATE = (
+ "template",
+ "Template",
+ ) # dashboard was created from a predefined template
+ DUPLICATE = (
+ "duplicate",
+ "Duplicate",
+ ) # dashboard was duplicated from another dashboard
class RestrictionLevel(models.IntegerChoices):
"""Collaboration restriction level (which is a dashboard setting). Sync with PrivilegeLevel."""
EVERYONE_IN_PROJECT_CAN_EDIT = 21, "Everyone in the project can edit"
- ONLY_COLLABORATORS_CAN_EDIT = 37, "Only those invited to this dashboard can edit"
+ ONLY_COLLABORATORS_CAN_EDIT = (
+ 37,
+ "Only those invited to this dashboard can edit",
+ )
class PrivilegeLevel(models.IntegerChoices):
"""Collaboration privilege level (which is a user property). Sync with RestrictionLevel."""
@@ -43,15 +52,25 @@ class PrivilegeLevel(models.IntegerChoices):
last_accessed_at: models.DateTimeField = models.DateTimeField(blank=True, null=True)
filters: models.JSONField = models.JSONField(default=dict)
creation_mode: models.CharField = models.CharField(max_length=16, default="default", choices=CreationMode.choices)
- restriction_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(
- default=RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT, choices=RestrictionLevel.choices
+ restriction_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField(
+ default=RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT,
+ choices=RestrictionLevel.choices,
+ )
+ insights = models.ManyToManyField(
+ "posthog.Insight",
+ related_name="dashboards",
+ through="DashboardTile",
+ blank=True,
)
- insights = models.ManyToManyField("posthog.Insight", related_name="dashboards", through="DashboardTile", blank=True)
# Deprecated in favour of app-wide tagging model. See EnterpriseTaggedItem
deprecated_tags: ArrayField = ArrayField(models.CharField(max_length=32), null=True, blank=True, default=list)
deprecated_tags_v2: ArrayField = ArrayField(
- models.CharField(max_length=32), null=True, blank=True, default=None, db_column="tags"
+ models.CharField(max_length=32),
+ null=True,
+ blank=True,
+ default=None,
+ db_column="tags",
)
# DEPRECATED: using the new "sharing" relation instead
diff --git a/posthog/models/dashboard_tile.py b/posthog/models/dashboard_tile.py
index ed4a885bfc1c1..7cc6b2601cb62 100644
--- a/posthog/models/dashboard_tile.py
+++ b/posthog/models/dashboard_tile.py
@@ -16,7 +16,11 @@ class Text(models.Model):
created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True)
last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now)
last_modified_by: models.ForeignKey = models.ForeignKey(
- "User", on_delete=models.SET_NULL, null=True, blank=True, related_name="modified_text_tiles"
+ "User",
+ on_delete=models.SET_NULL,
+ null=True,
+ blank=True,
+ related_name="modified_text_tiles",
)
team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE)
@@ -33,8 +37,18 @@ class DashboardTile(models.Model):
# Relations
dashboard = models.ForeignKey("posthog.Dashboard", on_delete=models.CASCADE, related_name="tiles")
- insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE, related_name="dashboard_tiles", null=True)
- text = models.ForeignKey("posthog.Text", on_delete=models.CASCADE, related_name="dashboard_tiles", null=True)
+ insight = models.ForeignKey(
+ "posthog.Insight",
+ on_delete=models.CASCADE,
+ related_name="dashboard_tiles",
+ null=True,
+ )
+ text = models.ForeignKey(
+ "posthog.Text",
+ on_delete=models.CASCADE,
+ related_name="dashboard_tiles",
+ null=True,
+ )
# Tile layout and style
layouts: models.JSONField = models.JSONField(default=dict)
@@ -57,9 +71,14 @@ class Meta:
condition=Q(("insight__isnull", False)),
),
UniqueConstraint(
- fields=["dashboard", "text"], name=f"unique_dashboard_text", condition=Q(("text__isnull", False))
+ fields=["dashboard", "text"],
+ name=f"unique_dashboard_text",
+ condition=Q(("text__isnull", False)),
+ ),
+ models.CheckConstraint(
+ check=build_check(("insight", "text")),
+ name="dash_tile_exactly_one_related_object",
),
- models.CheckConstraint(check=build_check(("insight", "text")), name="dash_tile_exactly_one_related_object"),
]
@property
@@ -94,7 +113,11 @@ def save(self, *args, **kwargs) -> None:
def copy_to_dashboard(self, dashboard: Dashboard) -> None:
DashboardTile.objects.create(
- dashboard=dashboard, insight=self.insight, text=self.text, color=self.color, layouts=self.layouts
+ dashboard=dashboard,
+ insight=self.insight,
+ text=self.text,
+ color=self.color,
+ layouts=self.layouts,
)
@staticmethod
diff --git a/posthog/models/early_access_feature.py b/posthog/models/early_access_feature.py
index e73c463b96a09..3ec1c99543b9a 100644
--- a/posthog/models/early_access_feature.py
+++ b/posthog/models/early_access_feature.py
@@ -12,7 +12,10 @@ class Stage(models.TextChoices):
ARCHIVED = "archived", "archived"
team: models.ForeignKey = models.ForeignKey(
- "posthog.Team", on_delete=models.CASCADE, related_name="features", related_query_name="feature"
+ "posthog.Team",
+ on_delete=models.CASCADE,
+ related_name="features",
+ related_query_name="feature",
)
feature_flag: models.ForeignKey = models.ForeignKey(
"posthog.FeatureFlag",
diff --git a/posthog/models/element_group.py b/posthog/models/element_group.py
index d248ba9d25739..3d399f2559844 100644
--- a/posthog/models/element_group.py
+++ b/posthog/models/element_group.py
@@ -30,7 +30,8 @@ def create(self, *args: Any, **kwargs: Any):
group = super().create(*args, **kwargs)
except:
return ElementGroup.objects.get(
- hash=kwargs["hash"], team_id=kwargs["team"].pk if kwargs.get("team") else kwargs["team_id"]
+ hash=kwargs["hash"],
+ team_id=kwargs["team"].pk if kwargs.get("team") else kwargs["team_id"],
)
for element in elements:
element.group = group
diff --git a/posthog/models/entity/entity.py b/posthog/models/entity/entity.py
index 8f62e5ea98aba..aced3a18a8842 100644
--- a/posthog/models/entity/entity.py
+++ b/posthog/models/entity/entity.py
@@ -65,7 +65,10 @@ class Entity(PropertyMixin):
def __init__(self, data: Dict[str, Any]) -> None:
self.id = data.get("id")
- if data.get("type") not in [TREND_FILTER_TYPE_ACTIONS, TREND_FILTER_TYPE_EVENTS]:
+ if data.get("type") not in [
+ TREND_FILTER_TYPE_ACTIONS,
+ TREND_FILTER_TYPE_EVENTS,
+ ]:
raise ValueError("Type needs to be either TREND_FILTER_TYPE_ACTIONS or TREND_FILTER_TYPE_EVENTS")
self.type = data["type"]
order_provided = data.get("order")
@@ -150,7 +153,15 @@ def get_action(self) -> Action:
raise ValidationError(f"Action ID {self.id} does not exist!")
__repr__ = sane_repr(
- "id", "type", "order", "name", "custom_name", "math", "math_property", "math_hogql", "properties"
+ "id",
+ "type",
+ "order",
+ "name",
+ "custom_name",
+ "math",
+ "math_property",
+ "math_hogql",
+ "properties",
)
diff --git a/posthog/models/event/query_event_list.py b/posthog/models/event/query_event_list.py
index 527bfb62645ea..de70d511da156 100644
--- a/posthog/models/event/query_event_list.py
+++ b/posthog/models/event/query_event_list.py
@@ -87,7 +87,10 @@ def query_events_list(
tzinfo=team.timezone_info,
)
prop_filters, prop_filter_params = parse_prop_grouped_clauses(
- team_id=team.pk, property_group=filter.property_groups, has_person_id_joined=False, hogql_context=hogql_context
+ team_id=team.pk,
+ property_group=filter.property_groups,
+ has_person_id_joined=False,
+ hogql_context=hogql_context,
)
if action_id:
@@ -106,7 +109,10 @@ def query_events_list(
if prop_filters != "":
return insight_query_with_columns(
SELECT_EVENT_BY_TEAM_AND_CONDITIONS_FILTERS_SQL.format(
- conditions=conditions, limit=limit_sql, filters=prop_filters, order=order
+ conditions=conditions,
+ limit=limit_sql,
+ filters=prop_filters,
+ order=order,
),
{
"team_id": team.pk,
diff --git a/posthog/models/event/util.py b/posthog/models/event/util.py
index 7deb2ee87b291..2cd36b34e1dd2 100644
--- a/posthog/models/event/util.py
+++ b/posthog/models/event/util.py
@@ -12,7 +12,11 @@
from posthog.kafka_client.client import ClickhouseProducer
from posthog.kafka_client.topics import KAFKA_EVENTS_JSON
from posthog.models import Group
-from posthog.models.element.element import Element, chain_to_elements, elements_to_string
+from posthog.models.element.element import (
+ Element,
+ chain_to_elements,
+ elements_to_string,
+)
from posthog.models.event.sql import BULK_INSERT_EVENT_SQL, INSERT_EVENT_SQL
from posthog.models.person import Person
from posthog.models.team import Team
@@ -167,7 +171,8 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di
else:
try:
person = Person.objects.get(
- persondistinctid__distinct_id=event["distinct_id"], persondistinctid__team_id=team_id
+ persondistinctid__distinct_id=event["distinct_id"],
+ persondistinctid__team_id=team_id,
)
person_properties = person.properties
person_id = person.uuid
@@ -179,7 +184,10 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di
event = {
**event,
- "person_properties": {**person_properties, **event.get("person_properties", {})},
+ "person_properties": {
+ **person_properties,
+ **event.get("person_properties", {}),
+ },
"person_id": person_id,
"person_created_at": person_created_at,
}
@@ -189,13 +197,20 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di
if property_key.startswith("$group_"):
group_type_index = property_key[-1]
try:
- group = Group.objects.get(team_id=team_id, group_type_index=group_type_index, group_key=value)
+ group = Group.objects.get(
+ team_id=team_id,
+ group_type_index=group_type_index,
+ group_key=value,
+ )
group_property_key = f"group{group_type_index}_properties"
group_created_at_key = f"group{group_type_index}_created_at"
event = {
**event,
- group_property_key: {**group.group_properties, **event.get(group_property_key, {})},
+ group_property_key: {
+ **group.group_properties,
+ **event.get(group_property_key, {}),
+ },
group_created_at_key: event.get(group_created_at_key, datetime64_default_timestamp),
}
@@ -238,7 +253,10 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di
else datetime64_default_timestamp,
}
- params = {**params, **{"{}_{}".format(key, index): value for key, value in event.items()}}
+ params = {
+ **params,
+ **{"{}_{}".format(key, index): value for key, value in event.items()},
+ }
sync_execute(BULK_INSERT_EVENT_SQL() + ", ".join(inserts), params, flush=False)
diff --git a/posthog/models/event_definition.py b/posthog/models/event_definition.py
index 7e2aa00d48819..5b22a9e6a2869 100644
--- a/posthog/models/event_definition.py
+++ b/posthog/models/event_definition.py
@@ -8,7 +8,10 @@
class EventDefinition(UUIDModel):
team: models.ForeignKey = models.ForeignKey(
- Team, on_delete=models.CASCADE, related_name="event_definitions", related_query_name="team"
+ Team,
+ on_delete=models.CASCADE,
+ related_name="event_definitions",
+ related_query_name="team",
)
name: models.CharField = models.CharField(max_length=400)
created_at: models.DateTimeField = models.DateTimeField(default=timezone.now, null=True)
@@ -26,7 +29,9 @@ class Meta:
unique_together = ("team", "name")
indexes = [
GinIndex(
- name="index_event_definition_name", fields=["name"], opclasses=["gin_trgm_ops"]
+ name="index_event_definition_name",
+ fields=["name"],
+ opclasses=["gin_trgm_ops"],
) # To speed up DB-based fuzzy searching
]
diff --git a/posthog/models/event_property.py b/posthog/models/event_property.py
index 458567c376ab4..4824248ddfce2 100644
--- a/posthog/models/event_property.py
+++ b/posthog/models/event_property.py
@@ -12,9 +12,13 @@ class EventProperty(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(
- fields=["team", "event", "property"], name="posthog_event_property_unique_team_event_property"
+ fields=["team", "event", "property"],
+ name="posthog_event_property_unique_team_event_property",
)
]
- indexes = [models.Index(fields=["team", "event"]), models.Index(fields=["team", "property"])]
+ indexes = [
+ models.Index(fields=["team", "event"]),
+ models.Index(fields=["team", "property"]),
+ ]
__repr__ = sane_repr("event", "property", "team_id")
diff --git a/posthog/models/exported_asset.py b/posthog/models/exported_asset.py
index eb3bf961c9aaa..675245e867634 100644
--- a/posthog/models/exported_asset.py
+++ b/posthog/models/exported_asset.py
@@ -96,7 +96,11 @@ def file_ext(self):
return self.export_format.split("/")[1]
def get_analytics_metadata(self):
- return {"export_format": self.export_format, "dashboard_id": self.dashboard_id, "insight_id": self.insight_id}
+ return {
+ "export_format": self.export_format,
+ "dashboard_id": self.dashboard_id,
+ "insight_id": self.insight_id,
+ }
def get_public_content_url(self, expiry_delta: Optional[timedelta] = None):
token = get_public_access_token(self, expiry_delta)
@@ -112,7 +116,11 @@ def delete_expired_assets(cls):
def get_public_access_token(asset: ExportedAsset, expiry_delta: Optional[timedelta] = None) -> str:
if not expiry_delta:
expiry_delta = timedelta(days=PUBLIC_ACCESS_TOKEN_EXP_DAYS)
- return encode_jwt({"id": asset.id}, expiry_delta=expiry_delta, audience=PosthogJwtAudience.EXPORTED_ASSET)
+ return encode_jwt(
+ {"id": asset.id},
+ expiry_delta=expiry_delta,
+ audience=PosthogJwtAudience.EXPORTED_ASSET,
+ )
def asset_for_token(token: str) -> ExportedAsset:
@@ -153,7 +161,10 @@ def save_content(exported_asset: ExportedAsset, content: bytes) -> None:
except ObjectStorageError as ose:
capture_exception(ose)
logger.error(
- "exported_asset.object-storage-error", exported_asset_id=exported_asset.id, exception=ose, exc_info=True
+ "exported_asset.object-storage-error",
+ exported_asset_id=exported_asset.id,
+ exception=ose,
+ exc_info=True,
)
save_content_to_exported_asset(exported_asset, content)
diff --git a/posthog/models/feature_flag/feature_flag.py b/posthog/models/feature_flag/feature_flag.py
index 29d8e89296e49..b45271bb16845 100644
--- a/posthog/models/feature_flag/feature_flag.py
+++ b/posthog/models/feature_flag/feature_flag.py
@@ -8,7 +8,10 @@
from django.utils import timezone
from sentry_sdk.api import capture_exception
-from posthog.constants import ENRICHED_DASHBOARD_INSIGHT_IDENTIFIER, PropertyOperatorType
+from posthog.constants import (
+ ENRICHED_DASHBOARD_INSIGHT_IDENTIFIER,
+ PropertyOperatorType,
+)
from posthog.models.cohort import Cohort
from posthog.models.experiment import Experiment
from posthog.models.property import GroupTypeIndex
@@ -120,12 +123,17 @@ def get_filters(self):
# We don't want to migrate to avoid /decide endpoint downtime until this code has been deployed
return {
"groups": [
- {"properties": self.filters.get("properties", []), "rollout_percentage": self.rollout_percentage}
+ {
+ "properties": self.filters.get("properties", []),
+ "rollout_percentage": self.rollout_percentage,
+ }
],
}
def transform_cohort_filters_for_easy_evaluation(
- self, using_database: str = "default", seen_cohorts_cache: Optional[Dict[str, Cohort]] = None
+ self,
+ using_database: str = "default",
+ seen_cohorts_cache: Optional[Dict[str, Cohort]] = None,
):
"""
Expands cohort filters into person property filters when possible.
@@ -248,7 +256,9 @@ def transform_cohort_filters_for_easy_evaluation(
return parsed_conditions
def get_cohort_ids(
- self, using_database: str = "default", seen_cohorts_cache: Optional[Dict[str, Cohort]] = None
+ self,
+ using_database: str = "default",
+ seen_cohorts_cache: Optional[Dict[str, Cohort]] = None,
) -> List[int]:
from posthog.models.cohort.util import get_dependent_cohorts
@@ -274,7 +284,9 @@ def get_cohort_ids(
[
dependent_cohort.pk
for dependent_cohort in get_dependent_cohorts(
- cohort, using_database=using_database, seen_cohorts_cache=seen_cohorts_cache
+ cohort,
+ using_database=using_database,
+ seen_cohorts_cache=seen_cohorts_cache,
)
]
)
@@ -310,7 +322,8 @@ class FeatureFlagHashKeyOverride(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(
- fields=["team", "person", "feature_flag_key"], name="Unique hash_key for a user/team/feature_flag combo"
+ fields=["team", "person", "feature_flag_key"],
+ name="Unique hash_key for a user/team/feature_flag combo",
)
]
@@ -329,7 +342,8 @@ class FeatureFlagOverride(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(
- fields=["user", "feature_flag", "team"], name="unique feature flag for a user/team combo"
+ fields=["user", "feature_flag", "team"],
+ name="unique feature flag for a user/team combo",
)
]
@@ -340,7 +354,9 @@ class Meta:
def set_feature_flags_for_team_in_cache(
- team_id: int, feature_flags: Optional[List[FeatureFlag]] = None, using_database: str = "default"
+ team_id: int,
+ feature_flags: Optional[List[FeatureFlag]] = None,
+ using_database: str = "default",
) -> List[FeatureFlag]:
from posthog.api.feature_flag import MinimalFeatureFlagSerializer
@@ -391,5 +407,8 @@ class FeatureFlagDashboards(models.Model):
class Meta:
constraints = [
- models.UniqueConstraint(fields=["feature_flag", "dashboard"], name="unique feature flag for a dashboard")
+ models.UniqueConstraint(
+ fields=["feature_flag", "dashboard"],
+ name="unique feature flag for a dashboard",
+ )
]
diff --git a/posthog/models/feature_flag/flag_analytics.py b/posthog/models/feature_flag/flag_analytics.py
index e949de479d166..367c836f75882 100644
--- a/posthog/models/feature_flag/flag_analytics.py
+++ b/posthog/models/feature_flag/flag_analytics.py
@@ -1,6 +1,8 @@
from typing import TYPE_CHECKING, Tuple
from posthog.constants import FlagRequestType
-from posthog.helpers.dashboard_templates import add_enriched_insights_to_feature_flag_dashboard
+from posthog.helpers.dashboard_templates import (
+ add_enriched_insights_to_feature_flag_dashboard,
+)
from posthog.models.feature_flag.feature_flag import FeatureFlag
from posthog.redis import redis, get_client
import time
@@ -68,9 +70,11 @@ def capture_team_decide_usage(ph_client: "Posthog", team_id: int, team_uuid: str
with client.lock(f"{REDIS_LOCK_TOKEN}:{team_id}", timeout=60, blocking=False):
decide_key_name = get_team_request_key(team_id, FlagRequestType.DECIDE)
- total_decide_request_count, min_time, max_time = _extract_total_count_for_key_from_redis_hash(
- client, decide_key_name
- )
+ (
+ total_decide_request_count,
+ min_time,
+ max_time,
+ ) = _extract_total_count_for_key_from_redis_hash(client, decide_key_name)
if total_decide_request_count > 0 and settings.DECIDE_BILLING_ANALYTICS_TOKEN:
ph_client.capture(
@@ -87,9 +91,11 @@ def capture_team_decide_usage(ph_client: "Posthog", team_id: int, team_uuid: str
)
local_evaluation_key_name = get_team_request_key(team_id, FlagRequestType.LOCAL_EVALUATION)
- total_local_evaluation_request_count, min_time, max_time = _extract_total_count_for_key_from_redis_hash(
- client, local_evaluation_key_name
- )
+ (
+ total_local_evaluation_request_count,
+ min_time,
+ max_time,
+ ) = _extract_total_count_for_key_from_redis_hash(client, local_evaluation_key_name)
if total_local_evaluation_request_count > 0 and settings.DECIDE_BILLING_ANALYTICS_TOKEN:
ph_client.capture(
@@ -113,7 +119,6 @@ def capture_team_decide_usage(ph_client: "Posthog", team_id: int, team_uuid: str
def find_flags_with_enriched_analytics(begin: datetime, end: datetime):
-
result = sync_execute(
"""
SELECT team_id, JSONExtractString(properties, 'feature_flag') as flag_key
diff --git a/posthog/models/feature_flag/flag_matching.py b/posthog/models/feature_flag/flag_matching.py
index 059b60d7211f8..05c5bbccb8f63 100644
--- a/posthog/models/feature_flag/flag_matching.py
+++ b/posthog/models/feature_flag/flag_matching.py
@@ -25,7 +25,10 @@
from posthog.models.cohort import Cohort
from posthog.models.utils import execute_with_timeout
from posthog.queries.base import match_property, properties_to_Q
-from posthog.database_healthcheck import postgres_healthcheck, DATABASE_FOR_FLAG_MATCHING
+from posthog.database_healthcheck import (
+ postgres_healthcheck,
+ DATABASE_FOR_FLAG_MATCHING,
+)
from posthog.utils import label_for_team_id_to_track
from .feature_flag import (
@@ -156,7 +159,11 @@ def get_match(self, feature_flag: FeatureFlag) -> FeatureFlagMatch:
# Match for boolean super condition first
if feature_flag.filters.get("super_groups", None):
- is_match, super_condition_value, evaluation_reason = self.is_super_condition_match(feature_flag)
+ (
+ is_match,
+ super_condition_value,
+ evaluation_reason,
+ ) = self.is_super_condition_match(feature_flag)
if is_match:
payload = self.get_matching_payload(super_condition_value, None, feature_flag)
return FeatureFlagMatch(
@@ -184,11 +191,18 @@ def get_match(self, feature_flag: FeatureFlag) -> FeatureFlagMatch:
payload = self.get_matching_payload(is_match, variant, feature_flag)
return FeatureFlagMatch(
- match=True, variant=variant, reason=evaluation_reason, condition_index=index, payload=payload
+ match=True,
+ variant=variant,
+ reason=evaluation_reason,
+ condition_index=index,
+ payload=payload,
)
- highest_priority_evaluation_reason, highest_priority_index = self.get_highest_priority_match_evaluation(
- highest_priority_evaluation_reason, highest_priority_index, evaluation_reason, index
+ (highest_priority_evaluation_reason, highest_priority_index,) = self.get_highest_priority_match_evaluation(
+ highest_priority_evaluation_reason,
+ highest_priority_index,
+ evaluation_reason,
+ index,
)
return FeatureFlagMatch(
@@ -227,7 +241,12 @@ def get_matches(self) -> Tuple[Dict[str, Union[str, bool]], Dict[str, dict], Dic
faced_error_computing_flags = True
handle_feature_flag_exception(err, "[Feature Flags] Error computing flags")
- return flag_values, flag_evaluation_reasons, flag_payloads, faced_error_computing_flags
+ return (
+ flag_values,
+ flag_evaluation_reasons,
+ flag_payloads,
+ faced_error_computing_flags,
+ )
def get_matching_variant(self, feature_flag: FeatureFlag) -> Optional[str]:
for variant in self.variant_lookup_table(feature_flag):
@@ -259,7 +278,11 @@ def is_super_condition_match(self, feature_flag: FeatureFlag) -> Tuple[bool, boo
super_condition_value = self._super_condition_matches(feature_flag)
if super_condition_value_is_set:
- return True, super_condition_value, FeatureFlagMatchReason.SUPER_CONDITION_VALUE
+ return (
+ True,
+ super_condition_value,
+ FeatureFlagMatchReason.SUPER_CONDITION_VALUE,
+ )
# Evaluate if properties are empty
if feature_flag.super_conditions and len(feature_flag.super_conditions) > 0:
@@ -290,7 +313,8 @@ def is_condition_match(
target_properties = self.property_value_overrides
if feature_flag.aggregation_group_type_index is not None:
target_properties = self.group_property_value_overrides.get(
- self.cache.group_type_index_to_name[feature_flag.aggregation_group_type_index], {}
+ self.cache.group_type_index_to_name[feature_flag.aggregation_group_type_index],
+ {},
)
condition_match = all(match_property(property, target_properties) for property in properties)
else:
@@ -344,7 +368,9 @@ def query_conditions(self) -> Dict[str, bool]:
all_conditions: Dict = {}
team_id = self.feature_flags[0].team_id
person_query: QuerySet = Person.objects.using(DATABASE_FOR_FLAG_MATCHING).filter(
- team_id=team_id, persondistinctid__distinct_id=self.distinct_id, persondistinctid__team_id=team_id
+ team_id=team_id,
+ persondistinctid__distinct_id=self.distinct_id,
+ persondistinctid__team_id=team_id,
)
basic_group_query: QuerySet = Group.objects.using(DATABASE_FOR_FLAG_MATCHING).filter(team_id=team_id)
group_query_per_group_type_mapping: Dict[GroupTypeIndex, Tuple[QuerySet, List[str]]] = {}
@@ -372,7 +398,8 @@ def condition_eval(key, condition):
target_properties = self.property_value_overrides
if feature_flag.aggregation_group_type_index is not None:
target_properties = self.group_property_value_overrides.get(
- self.cache.group_type_index_to_name[feature_flag.aggregation_group_type_index], {}
+ self.cache.group_type_index_to_name[feature_flag.aggregation_group_type_index],
+ {},
)
expr = properties_to_Q(
Filter(data=condition).property_groups.flat,
@@ -401,7 +428,8 @@ def condition_eval(key, condition):
person_query = person_query.annotate(
**{
key: ExpressionWrapper(
- expr if expr else RawSQL("true", []), output_field=BooleanField()
+ expr if expr else RawSQL("true", []),
+ output_field=BooleanField(),
)
}
)
@@ -410,13 +438,15 @@ def condition_eval(key, condition):
if feature_flag.aggregation_group_type_index not in group_query_per_group_type_mapping:
# ignore flags that didn't have the right groups passed in
return
- group_query, group_fields = group_query_per_group_type_mapping[
- feature_flag.aggregation_group_type_index
- ]
+ (
+ group_query,
+ group_fields,
+ ) = group_query_per_group_type_mapping[feature_flag.aggregation_group_type_index]
group_query = group_query.annotate(
**{
key: ExpressionWrapper(
- expr if expr else RawSQL("true", []), output_field=BooleanField()
+ expr if expr else RawSQL("true", []),
+ output_field=BooleanField(),
)
}
)
@@ -468,7 +498,10 @@ def condition_eval(key, condition):
if len(person_query) > 0:
all_conditions = {**all_conditions, **person_query[0]}
- for group_query, group_fields in group_query_per_group_type_mapping.values():
+ for (
+ group_query,
+ group_fields,
+ ) in group_query_per_group_type_mapping.values():
group_query = group_query.values(*group_fields)
if len(group_query) > 0:
assert len(group_query) == 1, f"Expected 1 group query result, got {len(group_query)}"
@@ -514,7 +547,9 @@ def get_hash(self, feature_flag: FeatureFlag, salt="") -> float:
return hash_val / __LONG_SCALE__
def can_compute_locally(
- self, properties: List[Property], group_type_index: Optional[GroupTypeIndex] = None
+ self,
+ properties: List[Property],
+ group_type_index: Optional[GroupTypeIndex] = None,
) -> bool:
target_properties = self.property_value_overrides
if group_type_index is not None:
@@ -661,7 +696,10 @@ def get_all_feature_flags(
SELECT key FROM posthog_featureflag WHERE team_id = %(team_id)s AND ensure_experience_continuity = TRUE AND active = TRUE AND deleted = FALSE
AND key NOT IN (SELECT feature_flag_key FROM existing_overrides)
"""
- cursor.execute(query, {"team_id": team_id, "distinct_ids": tuple(distinct_ids)}) # type: ignore
+ cursor.execute(
+ query,
+ {"team_id": team_id, "distinct_ids": tuple(distinct_ids)}, # type: ignore
+ )
flags_with_no_overrides = [row[0] for row in cursor.fetchall()]
should_write_hash_key_override = len(flags_with_no_overrides) > 0
except Exception as e:
@@ -686,7 +724,8 @@ def get_all_feature_flags(
)
team_id_label = label_for_team_id_to_track(team_id)
FLAG_HASH_KEY_WRITES_COUNTER.labels(
- team_id=team_id_label, successful_write=writing_hash_key_override
+ team_id=team_id_label,
+ successful_write=writing_hash_key_override,
).inc()
except Exception as e:
# If the database is in read-only mode, we can't handle experience continuity flags,
@@ -695,7 +734,9 @@ def get_all_feature_flags(
# For this case, and for any other case, do not error out on decide, just continue assuming continuity couldn't happen.
# At the same time, don't set db down, because the read-replica might still be up.
handle_feature_flag_exception(
- e, "[Feature Flags] Error while setting feature flag hash key overrides", set_healthcheck=False
+ e,
+ "[Feature Flags] Error while setting feature flag hash key overrides",
+ set_healthcheck=False,
)
# This is the read-path for experience continuity. We need to get the overrides, and to do that, we get the person_id.
@@ -783,14 +824,24 @@ def set_feature_flag_hash_key_overrides(team_id: int, distinct_ids: List[str], h
# We don't want to return an error response for `/decide` just because of this.
# There can be cases where it's a different override (like a person on two different browser sending the same request at the same time),
# but we don't care about that case because first override wins.
- cursor.execute(query, {"team_id": team_id, "distinct_ids": tuple(distinct_ids), "hash_key_override": hash_key_override}) # type: ignore
+ cursor.execute(
+ query,
+ {
+ "team_id": team_id,
+ "distinct_ids": tuple(distinct_ids), # type: ignore
+ "hash_key_override": hash_key_override,
+ },
+ )
return cursor.rowcount > 0
except IntegrityError as e:
if "violates foreign key constraint" in str(e) and retry < max_retries - 1:
# This can happen if a person is deleted while we're trying to add overrides for it.
# This is the only case when we retry.
- logger.info("Retrying set_feature_flag_hash_key_overrides due to person deletion", exc_info=True)
+ logger.info(
+ "Retrying set_feature_flag_hash_key_overrides due to person deletion",
+ exc_info=True,
+ )
time.sleep(retry_delay)
else:
raise e
diff --git a/posthog/models/feature_flag/permissions.py b/posthog/models/feature_flag/permissions.py
index 3df6cc1fe16b4..95d39636c4c07 100644
--- a/posthog/models/feature_flag/permissions.py
+++ b/posthog/models/feature_flag/permissions.py
@@ -22,7 +22,8 @@ def can_user_edit_feature_flag(request, feature_flag):
all_role_memberships = request.user.role_memberships.select_related("role").all()
try:
feature_flag_resource_access = OrganizationResourceAccess.objects.get(
- organization=request.user.organization, resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS
+ organization=request.user.organization,
+ resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS,
)
if feature_flag_resource_access.access_level >= OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT:
return True
@@ -30,7 +31,10 @@ def can_user_edit_feature_flag(request, feature_flag):
except OrganizationResourceAccess.DoesNotExist:
org_level = OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT
- role_level = max([membership.role.feature_flags_access_level for membership in all_role_memberships], default=0)
+ role_level = max(
+ [membership.role.feature_flags_access_level for membership in all_role_memberships],
+ default=0,
+ )
if role_level == 0:
final_level = org_level
diff --git a/posthog/models/feature_flag/user_blast_radius.py b/posthog/models/feature_flag/user_blast_radius.py
index 317c12e8a18ac..5843e3513e6b1 100644
--- a/posthog/models/feature_flag/user_blast_radius.py
+++ b/posthog/models/feature_flag/user_blast_radius.py
@@ -9,15 +9,17 @@
from posthog.models.team.team import Team
-def get_user_blast_radius(team: Team, feature_flag_condition: dict, group_type_index: Optional[GroupTypeIndex] = None):
-
+def get_user_blast_radius(
+ team: Team,
+ feature_flag_condition: dict,
+ group_type_index: Optional[GroupTypeIndex] = None,
+):
from posthog.queries.person_query import PersonQuery
# No rollout % calculations here, since it makes more sense to compute that on the frontend
properties = feature_flag_condition.get("properties") or []
if group_type_index is not None:
-
try:
from ee.clickhouse.queries.groups_join_query import GroupsJoinQuery
except Exception:
diff --git a/posthog/models/feedback/survey.py b/posthog/models/feedback/survey.py
index 13938222d317e..b8a91b0b92527 100644
--- a/posthog/models/feedback/survey.py
+++ b/posthog/models/feedback/survey.py
@@ -16,7 +16,10 @@ class Meta:
constraints = [models.UniqueConstraint(fields=["team", "name"], name="unique survey name for team")]
team: models.ForeignKey = models.ForeignKey(
- "posthog.Team", on_delete=models.CASCADE, related_name="surveys", related_query_name="survey"
+ "posthog.Team",
+ on_delete=models.CASCADE,
+ related_name="surveys",
+ related_query_name="survey",
)
name: models.CharField = models.CharField(max_length=400)
description: models.TextField = models.TextField(blank=True)
@@ -57,7 +60,12 @@ class Meta:
@mutable_receiver([post_save, post_delete], sender=Survey)
def update_surveys_opt_in(sender, instance, **kwargs):
active_surveys_count = (
- Survey.objects.filter(team_id=instance.team_id, start_date__isnull=False, end_date__isnull=True, archived=False)
+ Survey.objects.filter(
+ team_id=instance.team_id,
+ start_date__isnull=False,
+ end_date__isnull=True,
+ archived=False,
+ )
.exclude(type="api")
.count()
)
diff --git a/posthog/models/filters/base_filter.py b/posthog/models/filters/base_filter.py
index 193c71f574b50..8b86de9b23129 100644
--- a/posthog/models/filters/base_filter.py
+++ b/posthog/models/filters/base_filter.py
@@ -40,7 +40,12 @@ def __init__(
elif request.data and request.data.get(PROPERTIES):
properties = request.data[PROPERTIES]
- data = {**request.GET.dict(), **request.data, **(data if data else {}), **({PROPERTIES: properties})}
+ data = {
+ **request.GET.dict(),
+ **request.data,
+ **(data if data else {}),
+ **({PROPERTIES: properties}),
+ }
elif data is None:
raise ValueError("You need to define either a data dict or a request")
@@ -50,7 +55,8 @@ def __init__(
# Set the HogQL context for the request
self.hogql_context = self.kwargs.get(
- "hogql_context", HogQLContext(within_non_hogql_query=True, team_id=self.team.pk if self.team else None)
+ "hogql_context",
+ HogQLContext(within_non_hogql_query=True, team_id=self.team.pk if self.team else None),
)
if self.team:
self.hogql_context.person_on_events_mode = self.team.person_on_events_mode
@@ -77,7 +83,8 @@ def toJSON(self):
def shallow_clone(self, overrides: Dict[str, Any]):
"Clone the filter's data while sharing the HogQL context"
return type(self)(
- data={**self._data, **overrides}, **{**self.kwargs, "team": self.team, "hogql_context": self.hogql_context}
+ data={**self._data, **overrides},
+ **{**self.kwargs, "team": self.team, "hogql_context": self.hogql_context},
)
def query_tags(self) -> Dict[str, Any]:
diff --git a/posthog/models/filters/mixins/common.py b/posthog/models/filters/mixins/common.py
index bbb727407c6be..ae50d71f30656 100644
--- a/posthog/models/filters/mixins/common.py
+++ b/posthog/models/filters/mixins/common.py
@@ -50,7 +50,12 @@
)
from posthog.models.entity import Entity, ExclusionEntity, MathType
from posthog.models.filters.mixins.base import BaseParamMixin, BreakdownType
-from posthog.models.filters.mixins.utils import cached_property, include_dict, include_query_tags, process_bool
+from posthog.models.filters.mixins.utils import (
+ cached_property,
+ include_dict,
+ include_query_tags,
+ process_bool,
+)
from posthog.models.filters.utils import GroupTypeIndex, validate_group_type_index
from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse_with_delta_mapping
@@ -239,7 +244,10 @@ def breakdown_group_type_index(self) -> Optional[GroupTypeIndex]:
@include_dict
def breakdown_type_and_group_to_dict(self):
if self.breakdown_type == "group":
- return {BREAKDOWN_TYPE: self.breakdown_type, BREAKDOWN_GROUP_TYPE_INDEX: self.breakdown_group_type_index}
+ return {
+ BREAKDOWN_TYPE: self.breakdown_type,
+ BREAKDOWN_GROUP_TYPE_INDEX: self.breakdown_group_type_index,
+ }
elif self.breakdown_type:
return {BREAKDOWN_TYPE: self.breakdown_type}
else:
@@ -343,7 +351,11 @@ def date_from(self) -> Optional[datetime.datetime]:
if self._date_from == "all":
return None
elif isinstance(self._date_from, str):
- date, delta_mapping = relative_date_parse_with_delta_mapping(self._date_from, self.team.timezone_info, always_truncate=True) # type: ignore
+ date, delta_mapping = relative_date_parse_with_delta_mapping(
+ self._date_from,
+ self.team.timezone_info, # type: ignore
+ always_truncate=True,
+ )
self.date_from_delta_mapping = delta_mapping
return date
else:
@@ -361,7 +373,11 @@ def date_to(self) -> datetime.datetime:
if isinstance(self._date_to, str):
try:
return datetime.datetime.strptime(self._date_to, "%Y-%m-%d").replace(
- hour=23, minute=59, second=59, microsecond=999999, tzinfo=ZoneInfo("UTC")
+ hour=23,
+ minute=59,
+ second=59,
+ microsecond=999999,
+ tzinfo=ZoneInfo("UTC"),
)
except ValueError:
try:
@@ -369,7 +385,11 @@ def date_to(self) -> datetime.datetime:
tzinfo=ZoneInfo("UTC")
)
except ValueError:
- date, delta_mapping = relative_date_parse_with_delta_mapping(self._date_to, self.team.timezone_info, always_truncate=True) # type: ignore
+ date, delta_mapping = relative_date_parse_with_delta_mapping(
+ self._date_to,
+ self.team.timezone_info, # type: ignore
+ always_truncate=True,
+ )
self.date_to_delta_mapping = delta_mapping
return date
else:
diff --git a/posthog/models/filters/mixins/funnel.py b/posthog/models/filters/mixins/funnel.py
index 4c13029ec0fb6..91312a5030478 100644
--- a/posthog/models/filters/mixins/funnel.py
+++ b/posthog/models/filters/mixins/funnel.py
@@ -264,7 +264,11 @@ class FunnelTrendsPersonsMixin(BaseParamMixin):
@cached_property
def entrance_period_start(self) -> Optional[datetime.datetime]:
entrance_period_start_raw = self._data.get(ENTRANCE_PERIOD_START)
- return relative_date_parse(entrance_period_start_raw, self.team.timezone_info) if entrance_period_start_raw else None # type: ignore
+ return (
+ relative_date_parse(entrance_period_start_raw, self.team.timezone_info) # type: ignore
+ if entrance_period_start_raw
+ else None
+ )
@cached_property
def drop_off(self) -> Optional[bool]:
diff --git a/posthog/models/filters/mixins/property.py b/posthog/models/filters/mixins/property.py
index 5812967c035e2..7ca409d4897d1 100644
--- a/posthog/models/filters/mixins/property.py
+++ b/posthog/models/filters/mixins/property.py
@@ -5,7 +5,11 @@
from posthog.constants import PROPERTIES, PropertyOperatorType
from posthog.models.filters.mixins.base import BaseParamMixin
-from posthog.models.filters.mixins.utils import cached_property, include_dict, include_query_tags
+from posthog.models.filters.mixins.utils import (
+ cached_property,
+ include_dict,
+ include_query_tags,
+)
from posthog.models.property import Property, PropertyGroup
@@ -82,7 +86,10 @@ def _parse_properties(self, properties: Optional[Any]) -> List[Property]:
key_split = key.split("__")
ret.append(
Property(
- key=key_split[0], value=value, operator=key_split[1] if len(key_split) > 1 else None, type="event"
+ key=key_split[0],
+ value=value,
+ operator=key_split[1] if len(key_split) > 1 else None,
+ type="event",
)
)
return ret
@@ -90,7 +97,8 @@ def _parse_properties(self, properties: Optional[Any]) -> List[Property]:
def _parse_property_group(self, group: Optional[Dict]) -> PropertyGroup:
if group and "type" in group and "values" in group:
return PropertyGroup(
- PropertyOperatorType(group["type"].upper()), self._parse_property_group_list(group["values"])
+ PropertyOperatorType(group["type"].upper()),
+ self._parse_property_group_list(group["values"]),
)
return PropertyGroup(PropertyOperatorType.AND, cast(List[Property], []))
diff --git a/posthog/models/filters/mixins/retention.py b/posthog/models/filters/mixins/retention.py
index 53146bf62a7b3..c2b55f3d30e2e 100644
--- a/posthog/models/filters/mixins/retention.py
+++ b/posthog/models/filters/mixins/retention.py
@@ -17,7 +17,11 @@
TREND_FILTER_TYPE_EVENTS,
)
from posthog.models.entity import Entity
-from posthog.models.filters.mixins.common import BaseParamMixin, DateMixin, EntitiesMixin
+from posthog.models.filters.mixins.common import (
+ BaseParamMixin,
+ DateMixin,
+ EntitiesMixin,
+)
from posthog.models.filters.mixins.utils import cached_property, include_dict
from posthog.utils import relative_date_parse
diff --git a/posthog/models/filters/mixins/simplify.py b/posthog/models/filters/mixins/simplify.py
index 4afdc5dec64cf..4735a95e6a7d6 100644
--- a/posthog/models/filters/mixins/simplify.py
+++ b/posthog/models/filters/mixins/simplify.py
@@ -38,16 +38,23 @@ def simplify(self: T, team: "Team", **kwargs) -> T:
updated_entities = {}
if hasattr(result, "entities_to_dict"):
for entity_type, entities in result.entities_to_dict().items():
- updated_entities[entity_type] = [self._simplify_entity(team, entity_type, entity, **kwargs) for entity in entities] # type: ignore
+ updated_entities[entity_type] = [
+ self._simplify_entity(team, entity_type, entity, **kwargs) for entity in entities # type: ignore
+ ]
from posthog.models.property.util import clear_excess_levels
- prop_group = clear_excess_levels(self._simplify_property_group(team, result.property_groups, **kwargs), skip=True) # type: ignore
+ prop_group = clear_excess_levels(
+ self._simplify_property_group(team, result.property_groups, **kwargs), # type: ignore
+ skip=True,
+ )
prop_group = prop_group.to_dict() # type: ignore
new_group_props = []
if getattr(result, "aggregation_group_type_index", None) is not None:
- new_group_props.append(self._group_set_property(cast(int, result.aggregation_group_type_index)).to_dict()) # type: ignore
+ new_group_props.append(
+ self._group_set_property(cast(int, result.aggregation_group_type_index)).to_dict() # type: ignore
+ )
if new_group_props:
new_group = {"type": "AND", "values": new_group_props}
@@ -56,7 +63,11 @@ def simplify(self: T, team: "Team", **kwargs) -> T:
return result.shallow_clone({**updated_entities, "properties": prop_group})
def _simplify_entity(
- self, team: "Team", entity_type: Literal["events", "actions", "exclusions"], entity_params: Dict, **kwargs
+ self,
+ team: "Team",
+ entity_type: Literal["events", "actions", "exclusions"],
+ entity_params: Dict,
+ **kwargs,
) -> Dict:
from posthog.models.entity import Entity, ExclusionEntity
diff --git a/posthog/models/filters/mixins/test/test_interval.py b/posthog/models/filters/mixins/test/test_interval.py
index efeb33e3479d7..d47adfc6b3e81 100644
--- a/posthog/models/filters/mixins/test/test_interval.py
+++ b/posthog/models/filters/mixins/test/test_interval.py
@@ -26,10 +26,13 @@ def test_filter_interval_success(filter, expected_interval):
@pytest.mark.parametrize(
"filter,expected_error_message",
[
- (Filter(data={"interval": "foo"}), "Interval foo does not belong to SUPPORTED_INTERVAL_TYPES!"),
+ (
+ Filter(data={"interval": "foo"}),
+ "Interval foo does not belong to SUPPORTED_INTERVAL_TYPES!",
+ ),
(Filter(data={"interval": 123}), "Interval must be a string!"),
],
)
def test_filter_interval_errors(filter, expected_error_message):
with pytest.raises(ValueError, match=expected_error_message):
- filter.interval
+ filter.interval # noqa: B018
diff --git a/posthog/models/filters/mixins/test/test_property.py b/posthog/models/filters/mixins/test/test_property.py
index e1e250b5916a4..8f8b7c56721e2 100644
--- a/posthog/models/filters/mixins/test/test_property.py
+++ b/posthog/models/filters/mixins/test/test_property.py
@@ -13,7 +13,13 @@ def test_property_group_multi_level_parsing():
"properties": {
"type": "AND",
"values": [
- {"type": "AND", "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}]},
+ {
+ "type": "AND",
+ "values": [
+ {"key": "attr", "value": "val_1"},
+ {"key": "attr_2", "value": "val_2"},
+ ],
+ },
{"type": "OR", "values": [{"key": "attr", "value": "val_2"}]},
],
}
@@ -42,7 +48,10 @@ def test_property_group_simple_parsing():
data={
"properties": {
"type": "AND",
- "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}],
+ "values": [
+ {"key": "attr", "value": "val_1"},
+ {"key": "attr_2", "value": "val_2"},
+ ],
}
}
)
@@ -64,22 +73,23 @@ def test_property_group_empty_parsing():
def test_property_group_invalid_parsing():
-
filter = Filter(
data={
"properties": {
"type": "XaND",
- "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}],
+ "values": [
+ {"key": "attr", "value": "val_1"},
+ {"key": "attr_2", "value": "val_2"},
+ ],
}
}
)
with pytest.raises(ValidationError):
- filter.property_groups
+ filter.property_groups # noqa: B018
def test_property_group_includes_unhomogenous_groups():
-
filter = Filter(
data={
"properties": {
@@ -95,7 +105,7 @@ def test_property_group_includes_unhomogenous_groups():
)
with pytest.raises(ValidationError):
- filter.property_groups
+ filter.property_groups # noqa: B018
def test_property_multi_level_to_dict():
@@ -104,7 +114,13 @@ def test_property_multi_level_to_dict():
"properties": {
"type": "AND",
"values": [
- {"type": "AND", "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}]},
+ {
+ "type": "AND",
+ "values": [
+ {"key": "attr", "value": "val_1"},
+ {"key": "attr_2", "value": "val_2"},
+ ],
+ },
{"type": "OR", "values": [{"key": "attr", "value": "val_2"}]},
],
}
@@ -121,7 +137,10 @@ def test_property_multi_level_to_dict():
{"key": "attr_2", "value": "val_2", "type": "event"},
],
},
- {"type": "OR", "values": [{"key": "attr", "value": "val_2", "type": "event"}]},
+ {
+ "type": "OR",
+ "values": [{"key": "attr", "value": "val_2", "type": "event"}],
+ },
],
}
@@ -131,7 +150,10 @@ def test_property_group_simple_to_dict():
data={
"properties": {
"type": "AND",
- "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}],
+ "values": [
+ {"key": "attr", "value": "val_1"},
+ {"key": "attr_2", "value": "val_2"},
+ ],
}
}
)
@@ -149,7 +171,13 @@ def test_property_group_simple_json_parsing():
filter = Filter(
data={
"properties": json.dumps(
- {"type": "AND", "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}]}
+ {
+ "type": "AND",
+ "values": [
+ {"key": "attr", "value": "val_1"},
+ {"key": "attr_2", "value": "val_2"},
+ ],
+ }
)
}
)
@@ -173,7 +201,10 @@ def test_property_group_multi_level_json_parsing():
"values": [
{
"type": "AND",
- "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}],
+ "values": [
+ {"key": "attr", "value": "val_1"},
+ {"key": "attr_2", "value": "val_2"},
+ ],
},
{"type": "OR", "values": [{"key": "attr", "value": "val_2"}]},
],
diff --git a/posthog/models/filters/mixins/utils.py b/posthog/models/filters/mixins/utils.py
index 2d224ca98a716..a297cdcfa6320 100644
--- a/posthog/models/filters/mixins/utils.py
+++ b/posthog/models/filters/mixins/utils.py
@@ -5,6 +5,7 @@
T = TypeVar("T")
+
# can't use cached_property directly from functools because of 3.7 compatibilty
def cached_property(func: Callable[..., T]) -> T:
return property(lru_cache(maxsize=1)(func)) # type: ignore
diff --git a/posthog/models/filters/path_filter.py b/posthog/models/filters/path_filter.py
index 9fe71d5d6d16b..4373092b91520 100644
--- a/posthog/models/filters/path_filter.py
+++ b/posthog/models/filters/path_filter.py
@@ -17,7 +17,11 @@
SampleMixin,
SearchMixin,
)
-from .mixins.funnel import FunnelCorrelationMixin, FunnelPersonsStepMixin, FunnelWindowMixin
+from .mixins.funnel import (
+ FunnelCorrelationMixin,
+ FunnelPersonsStepMixin,
+ FunnelWindowMixin,
+)
from .mixins.groups import GroupsAggregationMixin
from .mixins.interval import IntervalMixin
from .mixins.paths import (
@@ -76,7 +80,12 @@ class PathFilter(
BaseFilter,
SampleMixin,
):
- def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None:
+ def __init__(
+ self,
+ data: Optional[Dict[str, Any]] = None,
+ request: Optional[Request] = None,
+ **kwargs,
+ ) -> None:
if data:
data["insight"] = INSIGHT_PATHS
else:
diff --git a/posthog/models/filters/retention_filter.py b/posthog/models/filters/retention_filter.py
index cd767606a6dd1..9cc3e8d0c7a08 100644
--- a/posthog/models/filters/retention_filter.py
+++ b/posthog/models/filters/retention_filter.py
@@ -18,7 +18,11 @@
from .mixins.funnel import FunnelCorrelationMixin
from .mixins.groups import GroupsAggregationMixin
from .mixins.property import PropertyMixin
-from .mixins.retention import EntitiesDerivedMixin, RetentionDateDerivedMixin, RetentionTypeMixin
+from .mixins.retention import (
+ EntitiesDerivedMixin,
+ RetentionDateDerivedMixin,
+ RetentionTypeMixin,
+)
from .mixins.simplify import SimplifyFilterMixin
from .mixins.utils import cached_property, include_dict
diff --git a/posthog/models/filters/stickiness_filter.py b/posthog/models/filters/stickiness_filter.py
index dbabdd5e6897a..4674c4ceeb3d9 100644
--- a/posthog/models/filters/stickiness_filter.py
+++ b/posthog/models/filters/stickiness_filter.py
@@ -1,6 +1,11 @@
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union
-from django.db.models.functions.datetime import TruncDay, TruncHour, TruncMonth, TruncWeek
+from django.db.models.functions.datetime import (
+ TruncDay,
+ TruncHour,
+ TruncMonth,
+ TruncWeek,
+)
from rest_framework.exceptions import ValidationError
from rest_framework.request import Request
@@ -55,7 +60,12 @@ class StickinessFilter(
get_earliest_timestamp: Optional[Callable]
team: "Team"
- def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None:
+ def __init__(
+ self,
+ data: Optional[Dict[str, Any]] = None,
+ request: Optional[Request] = None,
+ **kwargs,
+ ) -> None:
if data:
data["insight"] = INSIGHT_STICKINESS
else:
diff --git a/posthog/models/filters/test/test_filter.py b/posthog/models/filters/test/test_filter.py
index 3113cc3598000..d7f60b149b93b 100644
--- a/posthog/models/filters/test/test_filter.py
+++ b/posthog/models/filters/test/test_filter.py
@@ -60,7 +60,12 @@ def test_to_dict(self):
def test_simplify_test_accounts(self):
self.team.test_account_filters = [
- {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"}
+ {
+ "key": "email",
+ "value": "@posthog.com",
+ "operator": "not_icontains",
+ "type": "person",
+ }
]
self.team.save()
@@ -70,7 +75,12 @@ def test_simplify_test_accounts(self):
self.assertEqual(
filter.properties_to_dict(),
- {"properties": {"type": "AND", "values": [{"key": "attr", "value": "some_val", "type": "event"}]}},
+ {
+ "properties": {
+ "type": "AND",
+ "values": [{"key": "attr", "value": "some_val", "type": "event"}],
+ }
+ },
)
self.assertTrue(filter.is_simplified)
@@ -85,10 +95,18 @@ def test_simplify_test_accounts(self):
{
"type": "AND",
"values": [
- {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"}
+ {
+ "key": "email",
+ "value": "@posthog.com",
+ "operator": "not_icontains",
+ "type": "person",
+ }
],
},
- {"type": "AND", "values": [{"key": "attr", "value": "some_val", "type": "event"}]},
+ {
+ "type": "AND",
+ "values": [{"key": "attr", "value": "some_val", "type": "event"}],
+ },
],
}
},
@@ -104,10 +122,18 @@ def test_simplify_test_accounts(self):
{
"type": "AND",
"values": [
- {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"}
+ {
+ "key": "email",
+ "value": "@posthog.com",
+ "operator": "not_icontains",
+ "type": "person",
+ }
],
},
- {"type": "AND", "values": [{"key": "attr", "value": "some_val", "type": "event"}]},
+ {
+ "type": "AND",
+ "values": [{"key": "attr", "value": "some_val", "type": "event"}],
+ },
],
}
},
@@ -117,27 +143,61 @@ def test_simplify_test_accounts(self):
def property_to_Q_test_factory(filter_persons: Callable, person_factory):
class TestPropertiesToQ(BaseTest):
def test_simple_persons(self):
- person_factory(team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"url": "https://whatever.com"},
+ )
person_factory(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1})
- person_factory(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"url": {"bla": "bla"}},
+ )
person_factory(team_id=self.team.pk, distinct_ids=["person4"])
- filter = Filter(data={"properties": [{"type": "person", "key": "url", "value": "https://whatever.com"}]})
+ filter = Filter(
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "url",
+ "value": "https://whatever.com",
+ }
+ ]
+ }
+ )
results = filter_persons(filter, self.team)
self.assertEqual(len(results), 1)
def test_multiple_equality_persons(self):
- person_factory(team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"url": "https://whatever.com"},
+ )
person_factory(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1})
- person_factory(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"url": {"bla": "bla"}},
+ )
person_factory(team_id=self.team.pk, distinct_ids=["person4"])
- person_factory(team_id=self.team.pk, distinct_ids=["person5"], properties={"url": "https://example.com"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["person5"],
+ properties={"url": "https://example.com"},
+ )
filter = Filter(
data={
"properties": [
- {"type": "person", "key": "url", "value": ["https://whatever.com", "https://example.com"]}
+ {
+ "type": "person",
+ "key": "url",
+ "value": ["https://whatever.com", "https://example.com"],
+ }
]
}
)
@@ -147,7 +207,15 @@ def test_multiple_equality_persons(self):
def test_incomplete_data(self):
filter = Filter(
- data={"properties": [{"key": "$current_url", "operator": "not_icontains", "type": "event"}]}
+ data={
+ "properties": [
+ {
+ "key": "$current_url",
+ "operator": "not_icontains",
+ "type": "event",
+ }
+ ]
+ }
)
self.assertListEqual(filter.property_groups.values, [])
@@ -156,21 +224,60 @@ def test_numerical_person_properties(self):
person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"$a_number": 5})
person_factory(team_id=self.team.pk, distinct_ids=["p3"], properties={"$a_number": 6})
- filter = Filter(data={"properties": [{"type": "person", "key": "$a_number", "value": 4, "operator": "gt"}]})
+ filter = Filter(
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "$a_number",
+ "value": 4,
+ "operator": "gt",
+ }
+ ]
+ }
+ )
self.assertEqual(len(filter_persons(filter, self.team)), 2)
filter = Filter(data={"properties": [{"type": "person", "key": "$a_number", "value": 5}]})
self.assertEqual(len(filter_persons(filter, self.team)), 1)
- filter = Filter(data={"properties": [{"type": "person", "key": "$a_number", "value": 6, "operator": "lt"}]})
+ filter = Filter(
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "$a_number",
+ "value": 6,
+ "operator": "lt",
+ }
+ ]
+ }
+ )
self.assertEqual(len(filter_persons(filter, self.team)), 2)
def test_contains_persons(self):
- person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"})
- person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://example.com"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"url": "https://whatever.com"},
+ )
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p2"],
+ properties={"url": "https://example.com"},
+ )
filter = Filter(
- data={"properties": [{"type": "person", "key": "url", "value": "whatever", "operator": "icontains"}]}
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "url",
+ "value": "whatever",
+ "operator": "icontains",
+ }
+ ]
+ }
)
results = filter_persons(filter, self.team)
@@ -179,49 +286,106 @@ def test_contains_persons(self):
def test_regex_persons(self):
p1_uuid = str(
person_factory(
- team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"}
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"url": "https://whatever.com"},
).uuid
)
p2_uuid = str(person_factory(team_id=self.team.pk, distinct_ids=["p2"]).uuid)
filter = Filter(
- data={"properties": [{"type": "person", "key": "url", "value": r"\.com$", "operator": "regex"}]}
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "url",
+ "value": r"\.com$",
+ "operator": "regex",
+ }
+ ]
+ }
)
results = filter_persons(filter, self.team)
self.assertCountEqual(results, [p1_uuid])
filter = Filter(
- data={"properties": [{"type": "person", "key": "url", "value": r"\.eee$", "operator": "not_regex"}]}
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "url",
+ "value": r"\.eee$",
+ "operator": "not_regex",
+ }
+ ]
+ }
)
results = filter_persons(filter, self.team)
self.assertCountEqual(results, [p1_uuid, p2_uuid])
def test_invalid_regex_persons(self):
- person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"})
- person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://example.com"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"url": "https://whatever.com"},
+ )
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p2"],
+ properties={"url": "https://example.com"},
+ )
filter = Filter(
- data={"properties": [{"type": "person", "key": "url", "value": r"?*", "operator": "regex"}]}
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "url",
+ "value": r"?*",
+ "operator": "regex",
+ }
+ ]
+ }
)
self.assertEqual(len(filter_persons(filter, self.team)), 0)
filter = Filter(
- data={"properties": [{"type": "person", "key": "url", "value": r"?*", "operator": "not_regex"}]}
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "url",
+ "value": r"?*",
+ "operator": "not_regex",
+ }
+ ]
+ }
)
self.assertEqual(len(filter_persons(filter, self.team)), 0)
def test_is_not_persons(self):
- person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"url": "https://whatever.com"},
+ )
p2_uuid = str(
person_factory(
- team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://example.com"}
+ team_id=self.team.pk,
+ distinct_ids=["p2"],
+ properties={"url": "https://example.com"},
).uuid
)
filter = Filter(
data={
"properties": [
- {"type": "person", "key": "url", "value": "https://whatever.com", "operator": "is_not"}
+ {
+ "type": "person",
+ "key": "url",
+ "value": "https://whatever.com",
+ "operator": "is_not",
+ }
]
}
)
@@ -229,10 +393,16 @@ def test_is_not_persons(self):
self.assertCountEqual(results, [p2_uuid])
def test_does_not_contain_persons(self):
- person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"url": "https://whatever.com"},
+ )
p2_uuid = str(
person_factory(
- team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://example.com"}
+ team_id=self.team.pk,
+ distinct_ids=["p2"],
+ properties={"url": "https://example.com"},
).uuid
)
p3_uuid = str(person_factory(team_id=self.team.pk, distinct_ids=["p3"]).uuid)
@@ -241,7 +411,12 @@ def test_does_not_contain_persons(self):
filter = Filter(
data={
"properties": [
- {"type": "person", "key": "url", "value": "whatever.com", "operator": "not_icontains"}
+ {
+ "type": "person",
+ "key": "url",
+ "value": "whatever.com",
+ "operator": "not_icontains",
+ }
]
}
)
@@ -256,12 +431,21 @@ def test_multiple_persons(self):
properties={"url": "https://whatever.com", "another_key": "value"},
).uuid
)
- person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://whatever.com"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p2"],
+ properties={"url": "https://whatever.com"},
+ )
filter = Filter(
data={
"properties": [
- {"type": "person", "key": "url", "value": "whatever.com", "operator": "icontains"},
+ {
+ "type": "person",
+ "key": "url",
+ "value": "whatever.com",
+ "operator": "icontains",
+ },
{"type": "person", "key": "another_key", "value": "value"},
]
}
@@ -271,7 +455,11 @@ def test_multiple_persons(self):
def test_boolean_filters_persons(self):
p1_uuid = str(
- person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"is_first_user": True}).uuid
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"is_first_user": True},
+ ).uuid
)
person_factory(team_id=self.team.pk, distinct_ids=["p2"])
@@ -281,29 +469,62 @@ def test_boolean_filters_persons(self):
def test_is_not_set_and_is_set_persons(self):
p1_uuid = str(
- person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"is_first_user": True}).uuid
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"is_first_user": True},
+ ).uuid
)
p2_uuid = str(person_factory(team_id=self.team.pk, distinct_ids=["p2"]).uuid)
filter = Filter(
- data={"properties": [{"type": "person", "key": "is_first_user", "value": "", "operator": "is_set"}]}
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "is_first_user",
+ "value": "",
+ "operator": "is_set",
+ }
+ ]
+ }
)
results = filter_persons(filter, self.team)
self.assertEqual(results, [p1_uuid])
filter = Filter(
- data={"properties": [{"type": "person", "key": "is_first_user", "value": "", "operator": "is_not_set"}]}
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "is_first_user",
+ "value": "",
+ "operator": "is_not_set",
+ }
+ ]
+ }
)
results = filter_persons(filter, self.team)
self.assertEqual(results, [p2_uuid])
def test_is_not_true_false_persons(self):
- person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"is_first_user": True})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"is_first_user": True},
+ )
p2_uuid = str(person_factory(team_id=self.team.pk, distinct_ids=["p2"]).uuid)
filter = Filter(
data={
- "properties": [{"type": "person", "key": "is_first_user", "value": ["true"], "operator": "is_not"}]
+ "properties": [
+ {
+ "type": "person",
+ "key": "is_first_user",
+ "value": ["true"],
+ "operator": "is_not",
+ }
+ ]
}
)
results = filter_persons(filter, self.team)
@@ -312,15 +533,26 @@ def test_is_not_true_false_persons(self):
def test_is_date_before_persons(self):
p1_uuid = str(
person_factory(
- team_id=self.team.pk, distinct_ids=["p1"], properties={"some-timestamp": "2022-03-01"}
+ team_id=self.team.pk,
+ distinct_ids=["p1"],
+ properties={"some-timestamp": "2022-03-01"},
).uuid
)
- person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"some-timestamp": "2022-05-01"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["p2"],
+ properties={"some-timestamp": "2022-05-01"},
+ )
filter = Filter(
data={
"properties": [
- {"type": "person", "key": "some-timestamp", "value": "2022-04-01", "operator": "is_date_before"}
+ {
+ "type": "person",
+ "key": "some-timestamp",
+ "value": "2022-04-01",
+ "operator": "is_date_before",
+ }
]
}
)
@@ -348,14 +580,25 @@ def test_json_object(self):
self.assertEqual(results, [str(p1_uuid.uuid)])
def test_filter_out_team_members_persons(self):
- person_factory(team_id=self.team.pk, distinct_ids=["team_member"], properties={"email": "test@posthog.com"})
+ person_factory(
+ team_id=self.team.pk,
+ distinct_ids=["team_member"],
+ properties={"email": "test@posthog.com"},
+ )
p2_uuid = str(
person_factory(
- team_id=self.team.pk, distinct_ids=["random_user"], properties={"email": "test@gmail.com"}
+ team_id=self.team.pk,
+ distinct_ids=["random_user"],
+ properties={"email": "test@gmail.com"},
).uuid
)
self.team.test_account_filters = [
- {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"}
+ {
+ "key": "email",
+ "value": "@posthog.com",
+ "operator": "not_icontains",
+ "type": "person",
+ }
]
self.team.save()
filter = Filter(data={FILTER_TEST_ACCOUNTS: True}, team=self.team)
@@ -373,7 +616,9 @@ def _filter_persons(filter: Filter, team: Team):
return [str(uuid) for uuid in persons.values_list("uuid", flat=True)]
-class TestDjangoPropertiesToQ(property_to_Q_test_factory(_filter_persons, _create_person), QueryMatchingTest): # type: ignore
+class TestDjangoPropertiesToQ(
+ property_to_Q_test_factory(_filter_persons, _create_person), QueryMatchingTest
+): # type: ignore
@snapshot_postgres_queries
def test_array_property_as_string_on_persons(self):
Person.objects.create(
@@ -388,7 +633,16 @@ def test_array_property_as_string_on_persons(self):
# some idiosyncracies on how this works, but we shouldn't error out on this
filter = Filter(
- data={"properties": [{"type": "person", "key": "urls", "operator": "icontains", "value": '["abcd"]'}]}
+ data={
+ "properties": [
+ {
+ "type": "person",
+ "key": "urls",
+ "operator": "icontains",
+ "value": '["abcd"]',
+ }
+ ]
+ }
)
persons = Person.objects.filter(property_group_to_Q(filter.property_groups))
@@ -401,7 +655,9 @@ def test_array_property_as_string_on_persons(self):
def test_person_cohort_properties(self):
person1_distinct_id = "person1"
person1 = Person.objects.create(
- team=self.team, distinct_ids=[person1_distinct_id], properties={"$some_prop": 1}
+ team=self.team,
+ distinct_ids=[person1_distinct_id],
+ properties={"$some_prop": 1},
)
cohort1 = Cohort.objects.create(team=self.team, groups=[{"properties": {"$some_prop": 1}}], name="cohort1")
cohort1.people.add(person1)
@@ -410,7 +666,10 @@ def test_person_cohort_properties(self):
with self.assertNumQueries(2):
matched_person = (
- Person.objects.filter(team_id=self.team.pk, persondistinctid__distinct_id=person1_distinct_id)
+ Person.objects.filter(
+ team_id=self.team.pk,
+ persondistinctid__distinct_id=person1_distinct_id,
+ )
.filter(properties_to_Q(filter.property_groups.flat))
.exists()
)
@@ -419,7 +678,9 @@ def test_person_cohort_properties(self):
def test_person_cohort_properties_with_zero_value(self):
person1_distinct_id = "person1"
person1 = Person.objects.create(
- team=self.team, distinct_ids=[person1_distinct_id], properties={"$some_prop": 0}
+ team=self.team,
+ distinct_ids=[person1_distinct_id],
+ properties={"$some_prop": 0},
)
cohort1 = Cohort.objects.create(team=self.team, groups=[{"properties": {"$some_prop": 0}}], name="cohort1")
cohort1.people.add(person1)
@@ -428,7 +689,10 @@ def test_person_cohort_properties_with_zero_value(self):
with self.assertNumQueries(2):
matched_person = (
- Person.objects.filter(team_id=self.team.pk, persondistinctid__distinct_id=person1_distinct_id)
+ Person.objects.filter(
+ team_id=self.team.pk,
+ persondistinctid__distinct_id=person1_distinct_id,
+ )
.filter(properties_to_Q(filter.property_groups.flat))
.exists()
)
@@ -436,7 +700,11 @@ def test_person_cohort_properties_with_zero_value(self):
def test_person_cohort_properties_with_negation(self):
person1_distinct_id = "example_id"
- Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"$some_prop": "matches"})
+ Person.objects.create(
+ team=self.team,
+ distinct_ids=["example_id"],
+ properties={"$some_prop": "matches"},
+ )
user_in = Cohort.objects.create(
team=self.team,
@@ -447,7 +715,11 @@ def test_person_cohort_properties_with_negation(self):
{
"type": "AND",
"values": [
- {"key": "$some_prop", "value": "matches", "type": "person"},
+ {
+ "key": "$some_prop",
+ "value": "matches",
+ "type": "person",
+ },
],
}
],
@@ -464,7 +736,11 @@ def test_person_cohort_properties_with_negation(self):
{
"type": "OR",
"values": [
- {"key": "$bad_prop", "value": "nomatchihope", "type": "person"},
+ {
+ "key": "$bad_prop",
+ "value": "nomatchihope",
+ "type": "person",
+ },
],
},
],
@@ -501,14 +777,28 @@ def test_person_cohort_properties_with_negation(self):
with self.assertNumQueries(4):
matched_person = (
- Person.objects.filter(team_id=self.team.pk, persondistinctid__distinct_id=person1_distinct_id)
+ Person.objects.filter(
+ team_id=self.team.pk,
+ persondistinctid__distinct_id=person1_distinct_id,
+ )
.filter(properties_to_Q(filter.property_groups.flat))
.exists()
)
self.assertTrue(matched_person)
def test_group_property_filters_direct(self):
- filter = Filter(data={"properties": [{"key": "some_prop", "value": 5, "type": "group", "group_type_index": 1}]})
+ filter = Filter(
+ data={
+ "properties": [
+ {
+ "key": "some_prop",
+ "value": 5,
+ "type": "group",
+ "group_type_index": 1,
+ }
+ ]
+ }
+ )
query_filter = properties_to_Q(filter.property_groups.flat)
self.assertEqual(
query_filter,
@@ -543,9 +833,17 @@ def filter_persons_with_property_group(
class TestDjangoPropertyGroupToQ(BaseTest, QueryMatchingTest):
def test_simple_property_group_to_q(self):
- _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"url": "https://whatever.com"},
+ )
_create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1})
- _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"url": {"bla": "bla"}},
+ )
_create_person(team_id=self.team.pk, distinct_ids=["person4"])
filter = Filter(
@@ -553,7 +851,11 @@ def test_simple_property_group_to_q(self):
"properties": {
"type": "OR",
"values": [
- {"type": "person", "key": "url", "value": "https://whatever.com"},
+ {
+ "type": "person",
+ "key": "url",
+ "value": "https://whatever.com",
+ },
{"type": "person", "key": "url", "value": 1},
],
}
@@ -566,10 +868,20 @@ def test_simple_property_group_to_q(self):
def test_multiple_properties_property_group_to_q(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1}
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"url": "https://whatever.com", "bla": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"url": 1, "bla": 2},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"url": {"bla": "bla"}, "bla": 3},
)
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2})
- _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3})
_create_person(team_id=self.team.pk, distinct_ids=["person4"])
filter = Filter(
@@ -577,7 +889,11 @@ def test_multiple_properties_property_group_to_q(self):
"properties": {
"type": "OR",
"values": [
- {"type": "person", "key": "url", "value": "https://whatever.com"},
+ {
+ "type": "person",
+ "key": "url",
+ "value": "https://whatever.com",
+ },
{"type": "person", "key": "bla", "value": 1},
],
}
@@ -590,10 +906,20 @@ def test_multiple_properties_property_group_to_q(self):
def test_nested_property_group_to_q(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1}
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"url": "https://whatever.com", "bla": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"url": 1, "bla": 2},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"url": {"bla": "bla"}, "bla": 3},
)
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2})
- _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3})
_create_person(team_id=self.team.pk, distinct_ids=["person4"])
filter = Filter(
@@ -604,11 +930,18 @@ def test_nested_property_group_to_q(self):
{
"type": "AND",
"values": [
- {"type": "person", "key": "url", "value": "https://whatever.com"},
+ {
+ "type": "person",
+ "key": "url",
+ "value": "https://whatever.com",
+ },
{"type": "person", "key": "bla", "value": 1},
],
},
- {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]},
+ {
+ "type": "AND",
+ "values": [{"type": "person", "key": "bla", "value": 3}],
+ },
],
}
}
@@ -620,10 +953,20 @@ def test_nested_property_group_to_q(self):
def test_property_group_to_q_with_property_overrides(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1}
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"url": "https://whatever.com", "bla": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"url": 1, "bla": 2},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"url": {"bla": "bla"}, "bla": 3},
)
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2})
- _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3})
_create_person(team_id=self.team.pk, distinct_ids=["person4"])
filter = Filter(
@@ -634,11 +977,18 @@ def test_property_group_to_q_with_property_overrides(self):
{
"type": "AND",
"values": [
- {"type": "person", "key": "url", "value": "https://whatever.com"},
+ {
+ "type": "person",
+ "key": "url",
+ "value": "https://whatever.com",
+ },
{"type": "person", "key": "bla", "value": 1},
],
},
- {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]},
+ {
+ "type": "AND",
+ "values": [{"type": "person", "key": "bla", "value": 3}],
+ },
],
}
}
@@ -651,10 +1001,20 @@ def test_property_group_to_q_with_property_overrides(self):
@snapshot_postgres_queries
def test_property_group_to_q_with_cohorts(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1}
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"url": "https://whatever.com", "bla": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"url": 1, "bla": 2},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"url": {"bla": "bla"}, "bla": 3},
)
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2})
- _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3})
_create_person(team_id=self.team.pk, distinct_ids=["person4"])
cohort1 = Cohort.objects.create(
@@ -679,12 +1039,19 @@ def test_property_group_to_q_with_cohorts(self):
{
"type": "AND",
"values": [
- {"type": "person", "key": "url", "value": "https://whatever.com"},
+ {
+ "type": "person",
+ "key": "url",
+ "value": "https://whatever.com",
+ },
{"type": "person", "key": "bla", "value": 1},
{"type": "cohort", "key": "id", "value": cohort1.pk},
],
},
- {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]},
+ {
+ "type": "AND",
+ "values": [{"type": "person", "key": "bla", "value": 3}],
+ },
],
}
}
@@ -696,12 +1063,36 @@ def test_property_group_to_q_with_cohorts(self):
@snapshot_postgres_queries
def test_property_group_to_q_with_negation_cohorts(self):
- _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"bla": 1, "other": 1})
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"bla": 2, "other": 1})
- _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"bla": 3, "other": 2})
- _create_person(team_id=self.team.pk, distinct_ids=["person4"], properties={"bla": 4, "other": 1})
- _create_person(team_id=self.team.pk, distinct_ids=["person5"], properties={"bla": 5, "other": 1})
- _create_person(team_id=self.team.pk, distinct_ids=["person6"], properties={"bla": 6, "other": 1})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"bla": 1, "other": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"bla": 2, "other": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"bla": 3, "other": 2},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person4"],
+ properties={"bla": 4, "other": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person5"],
+ properties={"bla": 5, "other": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person6"],
+ properties={"bla": 6, "other": 1},
+ )
cohort1 = Cohort.objects.create(
team=self.team,
@@ -750,8 +1141,18 @@ def test_property_group_to_q_with_negation_cohorts(self):
"properties": {
"type": "AND",
"values": [
- {"type": "cohort", "key": "id", "value": cohort1.pk, "negation": True},
- {"type": "cohort", "key": "id", "value": cohort2.pk, "negation": True},
+ {
+ "type": "cohort",
+ "key": "id",
+ "value": cohort1.pk,
+ "negation": True,
+ },
+ {
+ "type": "cohort",
+ "key": "id",
+ "value": cohort2.pk,
+ "negation": True,
+ },
{"type": "cohort", "key": "id", "value": cohort3.pk},
],
}
@@ -777,10 +1178,20 @@ def test_property_group_to_q_with_negation_cohorts(self):
@snapshot_postgres_queries
def test_property_group_to_q_with_cohorts_no_match(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1}
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"url": "https://whatever.com", "bla": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"url": 1, "bla": 2},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"url": {"bla": "bla"}, "bla": 3},
)
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2})
- _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3})
_create_person(team_id=self.team.pk, distinct_ids=["person4"])
cohort1 = Cohort.objects.create(
@@ -805,12 +1216,19 @@ def test_property_group_to_q_with_cohorts_no_match(self):
{
"type": "AND",
"values": [
- {"type": "person", "key": "url", "value": "https://whatever.com"},
+ {
+ "type": "person",
+ "key": "url",
+ "value": "https://whatever.com",
+ },
{"type": "person", "key": "bla", "value": 1},
{"type": "cohort", "key": "id", "value": cohort1.pk},
],
},
- {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]},
+ {
+ "type": "AND",
+ "values": [{"type": "person", "key": "bla", "value": 3}],
+ },
],
}
}
@@ -822,13 +1240,27 @@ def test_property_group_to_q_with_cohorts_no_match(self):
def test_property_group_to_q_with_behavioural_cohort(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1}
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"url": "https://whatever.com", "bla": 1},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"url": 1, "bla": 2},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"url": {"bla": "bla"}, "bla": 3},
)
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2})
- _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3})
_create_person(team_id=self.team.pk, distinct_ids=["person4"])
- cohort2 = Cohort.objects.create(team=self.team, groups=[{"event_id": "$pageview", "days": 7}], name="cohort2")
+ cohort2 = Cohort.objects.create(
+ team=self.team,
+ groups=[{"event_id": "$pageview", "days": 7}],
+ name="cohort2",
+ )
filter = Filter(
data={
@@ -838,12 +1270,19 @@ def test_property_group_to_q_with_behavioural_cohort(self):
{
"type": "AND",
"values": [
- {"type": "person", "key": "url", "value": "https://whatever.com"},
+ {
+ "type": "person",
+ "key": "url",
+ "value": "https://whatever.com",
+ },
{"type": "person", "key": "bla", "value": 1},
{"type": "cohort", "key": "id", "value": cohort2.pk},
],
},
- {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]},
+ {
+ "type": "AND",
+ "values": [{"type": "person", "key": "bla", "value": 3}],
+ },
],
}
}
diff --git a/posthog/models/filters/test/test_lifecycle_filter.py b/posthog/models/filters/test/test_lifecycle_filter.py
index 8bf5d904c66f4..9273a12e654d1 100644
--- a/posthog/models/filters/test/test_lifecycle_filter.py
+++ b/posthog/models/filters/test/test_lifecycle_filter.py
@@ -64,4 +64,7 @@ def test_filter_properties(self):
},
)
self.assertEqual(filter.lifecycle_type, lifecycle_type)
- self.assertEqual(filter.target_date, relative_date_parse(target_date, self.team.timezone_info))
+ self.assertEqual(
+ filter.target_date,
+ relative_date_parse(target_date, self.team.timezone_info),
+ )
diff --git a/posthog/models/filters/utils.py b/posthog/models/filters/utils.py
index 7c2f75331bc74..0b31b209afa69 100644
--- a/posthog/models/filters/utils.py
+++ b/posthog/models/filters/utils.py
@@ -33,19 +33,31 @@ def get_filter(team, data: dict = {}, request: Optional[Request] = None):
if insight == INSIGHT_RETENTION:
return RetentionFilter(data={**data, "insight": INSIGHT_RETENTION}, request=request, team=team)
elif insight == INSIGHT_STICKINESS or (insight == INSIGHT_TRENDS and data.get("shown_as") == "Stickiness"):
- return StickinessFilter(data=data, request=request, team=team, get_earliest_timestamp=earliest_timestamp_func)
+ return StickinessFilter(
+ data=data,
+ request=request,
+ team=team,
+ get_earliest_timestamp=earliest_timestamp_func,
+ )
elif insight == INSIGHT_PATHS:
return PathFilter(data={**data, "insight": INSIGHT_PATHS}, request=request, team=team)
elif insight == INSIGHT_FUNNELS:
return Filter(
- data={**data, **(request.data if request else {}), "insight": INSIGHT_FUNNELS}, request=request, team=team
+ data={
+ **data,
+ **(request.data if request else {}),
+ "insight": INSIGHT_FUNNELS,
+ },
+ request=request,
+ team=team,
)
return Filter(data=data, request=request, team=team)
def validate_group_type_index(param_name: str, value: Any, required=False) -> Optional[GroupTypeIndex]:
error = ValidationError(
- f"{param_name} is required to be at least 0 and less than {GROUP_TYPES_LIMIT}", code="invalid"
+ f"{param_name} is required to be at least 0 and less than {GROUP_TYPES_LIMIT}",
+ code="invalid",
)
if required and value is None:
diff --git a/posthog/models/group/sql.py b/posthog/models/group/sql.py
index 2eb5222859729..41b9e72cebcbb 100644
--- a/posthog/models/group/sql.py
+++ b/posthog/models/group/sql.py
@@ -36,7 +36,10 @@
)
KAFKA_GROUPS_TABLE_SQL = lambda: GROUPS_TABLE_BASE_SQL.format(
- table_name="kafka_" + GROUPS_TABLE, cluster=CLICKHOUSE_CLUSTER, engine=kafka_engine(KAFKA_GROUPS), extra_fields=""
+ table_name="kafka_" + GROUPS_TABLE,
+ cluster=CLICKHOUSE_CLUSTER,
+ engine=kafka_engine(KAFKA_GROUPS),
+ extra_fields="",
)
# You must include the database here because of a bug in clickhouse
diff --git a/posthog/models/group/util.py b/posthog/models/group/util.py
index fa3520dc9912c..427c883a2e920 100644
--- a/posthog/models/group/util.py
+++ b/posthog/models/group/util.py
@@ -60,7 +60,15 @@ def create_group(
else:
timestamp = timestamp.astimezone(ZoneInfo("UTC"))
- raw_create_group_ch(team_id, group_type_index, group_key, properties, timestamp, timestamp=timestamp, sync=sync)
+ raw_create_group_ch(
+ team_id,
+ group_type_index,
+ group_key,
+ properties,
+ timestamp,
+ timestamp=timestamp,
+ sync=sync,
+ )
group = Group.objects.create(
team_id=team_id,
group_type_index=group_type_index,
@@ -73,7 +81,9 @@ def create_group(
def get_aggregation_target_field(
- aggregation_group_type_index: Optional[GroupTypeIndex], event_table_alias: str, default: str
+ aggregation_group_type_index: Optional[GroupTypeIndex],
+ event_table_alias: str,
+ default: str,
) -> str:
if aggregation_group_type_index is not None:
return f'{event_table_alias}."$group_{aggregation_group_type_index}"'
diff --git a/posthog/models/group_type_mapping.py b/posthog/models/group_type_mapping.py
index 80ebdebfdeaf6..ed4a19164f4fb 100644
--- a/posthog/models/group_type_mapping.py
+++ b/posthog/models/group_type_mapping.py
@@ -7,9 +7,13 @@ class GroupTypeMapping(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(fields=["team", "group_type"], name="unique group types for team"),
- models.UniqueConstraint(fields=["team", "group_type_index"], name="unique event column indexes for team"),
+ models.UniqueConstraint(
+ fields=["team", "group_type_index"],
+ name="unique event column indexes for team",
+ ),
models.CheckConstraint(
- check=models.Q(group_type_index__lte=5), name="group_type_index is less than or equal 5"
+ check=models.Q(group_type_index__lte=5),
+ name="group_type_index is less than or equal 5",
),
]
diff --git a/posthog/models/ingestion_warnings/sql.py b/posthog/models/ingestion_warnings/sql.py
index 55a631a0835c0..6f3023744f51f 100644
--- a/posthog/models/ingestion_warnings/sql.py
+++ b/posthog/models/ingestion_warnings/sql.py
@@ -1,7 +1,11 @@
from django.conf import settings
from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS_WITH_PARTITION, kafka_engine
-from posthog.clickhouse.table_engines import Distributed, MergeTreeEngine, ReplicationScheme
+from posthog.clickhouse.table_engines import (
+ Distributed,
+ MergeTreeEngine,
+ ReplicationScheme,
+)
from posthog.kafka_client.topics import KAFKA_INGESTION_WARNINGS
INGESTION_WARNINGS_TABLE_BASE_SQL = """
diff --git a/posthog/models/insight.py b/posthog/models/insight.py
index 1c5d168ed7b5b..a3057cdb11c7d 100644
--- a/posthog/models/insight.py
+++ b/posthog/models/insight.py
@@ -52,12 +52,20 @@ class Insight(models.Model):
refresh_attempt: models.IntegerField = models.IntegerField(null=True, blank=True)
last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now)
last_modified_by: models.ForeignKey = models.ForeignKey(
- "User", on_delete=models.SET_NULL, null=True, blank=True, related_name="modified_insights"
+ "User",
+ on_delete=models.SET_NULL,
+ null=True,
+ blank=True,
+ related_name="modified_insights",
)
# DEPRECATED: using the new "dashboards" relation instead
dashboard: models.ForeignKey = models.ForeignKey(
- "Dashboard", related_name="items", on_delete=models.CASCADE, null=True, blank=True
+ "Dashboard",
+ related_name="items",
+ on_delete=models.CASCADE,
+ null=True,
+ blank=True,
)
# DEPRECATED: on dashboard_insight now
layouts: models.JSONField = models.JSONField(default=dict)
@@ -75,7 +83,11 @@ class Insight(models.Model):
deprecated_tags: ArrayField = ArrayField(models.CharField(max_length=32), null=True, blank=True, default=list)
# DEPRECATED: now using app-wide tagging model. See EnterpriseTaggedItem
deprecated_tags_v2: ArrayField = ArrayField(
- models.CharField(max_length=32), null=True, blank=True, default=None, db_column="tags"
+ models.CharField(max_length=32),
+ null=True,
+ blank=True,
+ default=None,
+ db_column="tags",
)
# Changing these fields materially alters the Insight, so these count for the "last_modified_*" fields
@@ -141,7 +153,10 @@ def dashboard_filters(self, dashboard: Optional[Dashboard] = None):
elif self.filters.get("properties", {}).get("type"):
filters["properties"] = {
"type": "AND",
- "values": [self.filters["properties"], {"type": "AND", "values": dashboard_properties}],
+ "values": [
+ self.filters["properties"],
+ {"type": "AND", "values": dashboard_properties},
+ ],
}
elif not self.filters.get("properties"):
filters["properties"] = dashboard_properties
@@ -157,7 +172,9 @@ def dashboard_filters(self, dashboard: Optional[Dashboard] = None):
def dashboard_query(self, dashboard: Optional[Dashboard]) -> Optional[dict]:
if not dashboard or not self.query:
return self.query
- from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_filters
+ from posthog.hogql_queries.apply_dashboard_filters import (
+ apply_dashboard_filters,
+ )
return apply_dashboard_filters(self.query, dashboard.filters, self.team)
@@ -184,7 +201,9 @@ def generate_insight_cache_key(insight: Insight, dashboard: Optional[Dashboard])
dashboard_filters = dashboard.filters if dashboard else None
if dashboard_filters:
- from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_filters
+ from posthog.hogql_queries.apply_dashboard_filters import (
+ apply_dashboard_filters,
+ )
q = apply_dashboard_filters(insight.query, dashboard_filters, insight.team)
else:
diff --git a/posthog/models/insight_caching_state.py b/posthog/models/insight_caching_state.py
index 2c1382b637efd..9e6abc0b7b5a8 100644
--- a/posthog/models/insight_caching_state.py
+++ b/posthog/models/insight_caching_state.py
@@ -15,15 +15,24 @@ class Meta:
indexes = [models.Index(fields=["cache_key"], name="filter_by_cache_key_idx")]
constraints = [
UniqueConstraintByExpression(
- name="unique_insight_tile_idx", expression="(insight_id, coalesce(dashboard_tile_id, -1))"
+ name="unique_insight_tile_idx",
+ expression="(insight_id, coalesce(dashboard_tile_id, -1))",
)
]
team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE)
- insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE, related_name="caching_states", null=False)
+ insight = models.ForeignKey(
+ "posthog.Insight",
+ on_delete=models.CASCADE,
+ related_name="caching_states",
+ null=False,
+ )
dashboard_tile = models.ForeignKey(
- "posthog.DashboardTile", on_delete=models.CASCADE, related_name="caching_states", null=True
+ "posthog.DashboardTile",
+ on_delete=models.CASCADE,
+ related_name="caching_states",
+ null=True,
)
cache_key: models.CharField = models.CharField(max_length=400, null=False, blank=False)
@@ -67,7 +76,11 @@ def sync_dashboard_updated(sender, instance: Dashboard, **kwargs):
from posthog.celery import sync_insight_caching_state
update_fields = kwargs.get("update_fields")
- if update_fields in [frozenset({"filters_hash"}), frozenset({"last_refresh"}), frozenset({"last_accessed_at"})]:
+ if update_fields in [
+ frozenset({"filters_hash"}),
+ frozenset({"last_refresh"}),
+ frozenset({"last_accessed_at"}),
+ ]:
return
for tile_id in DashboardTile.objects.filter(dashboard=instance).values_list("pk", flat=True):
diff --git a/posthog/models/integration.py b/posthog/models/integration.py
index 55ed06232445a..8ce1c9d6ef7c7 100644
--- a/posthog/models/integration.py
+++ b/posthog/models/integration.py
@@ -106,7 +106,11 @@ def integration_from_slack_response(cls, team_id: str, created_by: User, params:
integration, created = Integration.objects.update_or_create(
team_id=team_id,
kind="slack",
- defaults={"config": config, "sensitive_config": sensitive_config, "created_by": created_by},
+ defaults={
+ "config": config,
+ "sensitive_config": sensitive_config,
+ "created_by": created_by,
+ },
)
return integration
@@ -147,6 +151,12 @@ def validate_request(cls, request: Request):
@classmethod
@cache_for(timedelta(minutes=5))
def slack_config(cls):
- config = get_instance_settings(["SLACK_APP_CLIENT_ID", "SLACK_APP_CLIENT_SECRET", "SLACK_APP_SIGNING_SECRET"])
+ config = get_instance_settings(
+ [
+ "SLACK_APP_CLIENT_ID",
+ "SLACK_APP_CLIENT_SECRET",
+ "SLACK_APP_SIGNING_SECRET",
+ ]
+ )
return config
diff --git a/posthog/models/messaging.py b/posthog/models/messaging.py
index c1a787e30d309..5514f98baccb2 100644
--- a/posthog/models/messaging.py
+++ b/posthog/models/messaging.py
@@ -31,4 +31,7 @@ class MessagingRecord(UUIDModel):
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True)
class Meta:
- unique_together = ("email_hash", "campaign_key") # can only send campaign once to each email
+ unique_together = (
+ "email_hash",
+ "campaign_key",
+ ) # can only send campaign once to each email
diff --git a/posthog/models/notebook/notebook.py b/posthog/models/notebook/notebook.py
index 490645909df26..ec61ab1c22ed0 100644
--- a/posthog/models/notebook/notebook.py
+++ b/posthog/models/notebook/notebook.py
@@ -19,7 +19,11 @@ class Notebook(UUIDModel):
created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True)
last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now)
last_modified_by: models.ForeignKey = models.ForeignKey(
- "User", on_delete=models.SET_NULL, null=True, blank=True, related_name="modified_notebooks"
+ "User",
+ on_delete=models.SET_NULL,
+ null=True,
+ blank=True,
+ related_name="modified_notebooks",
)
class Meta:
diff --git a/posthog/models/organization.py b/posthog/models/organization.py
index cc4c07568312e..700fea47658f1 100644
--- a/posthog/models/organization.py
+++ b/posthog/models/organization.py
@@ -24,7 +24,12 @@
from posthog.cloud_utils import is_cloud
from posthog.constants import MAX_SLUG_LENGTH, AvailableFeature
from posthog.email import is_email_available
-from posthog.models.utils import LowercaseSlugField, UUIDModel, create_with_slug, sane_repr
+from posthog.models.utils import (
+ LowercaseSlugField,
+ UUIDModel,
+ create_with_slug,
+ sane_repr,
+)
from posthog.redis import get_client
from posthog.utils import absolute_uri
@@ -56,7 +61,11 @@ def create(self, *args: Any, **kwargs: Any):
return create_with_slug(super().create, *args, **kwargs)
def bootstrap(
- self, user: Optional["User"], *, team_fields: Optional[Dict[str, Any]] = None, **kwargs
+ self,
+ user: Optional["User"],
+ *,
+ team_fields: Optional[Dict[str, Any]] = None,
+ **kwargs,
) -> Tuple["Organization", Optional["OrganizationMembership"], "Team"]:
"""Instead of doing the legwork of creating an organization yourself, delegate the details with bootstrap."""
from .team import Team # Avoiding circular import
@@ -67,7 +76,9 @@ def bootstrap(
organization_membership: Optional[OrganizationMembership] = None
if user is not None:
organization_membership = OrganizationMembership.objects.create(
- organization=organization, user=user, level=OrganizationMembership.Level.OWNER
+ organization=organization,
+ user=user,
+ level=OrganizationMembership.Level.OWNER,
)
user.current_organization = organization
user.organization = user.current_organization # Update cached property
@@ -111,7 +122,7 @@ class PluginsAccessLevel(models.IntegerChoices):
slug: LowercaseSlugField = LowercaseSlugField(unique=True, max_length=MAX_SLUG_LENGTH)
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True)
updated_at: models.DateTimeField = models.DateTimeField(auto_now=True)
- plugins_access_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(
+ plugins_access_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField(
default=PluginsAccessLevel.CONFIG,
choices=PluginsAccessLevel.choices,
)
@@ -222,8 +233,14 @@ def organization_about_to_be_created(sender, instance: Organization, raw, using,
def ensure_available_features_sync(sender, instance: Organization, **kwargs):
updated_fields = kwargs.get("update_fields") or []
if "available_features" in updated_fields:
- logger.info("Notifying plugin-server to reset available features cache.", {"organization_id": instance.id})
- get_client().publish("reset-available-features-cache", json.dumps({"organization_id": str(instance.id)}))
+ logger.info(
+ "Notifying plugin-server to reset available features cache.",
+ {"organization_id": instance.id},
+ )
+ get_client().publish(
+ "reset-available-features-cache",
+ json.dumps({"organization_id": str(instance.id)}),
+ )
class OrganizationMembership(UUIDModel):
@@ -235,7 +252,10 @@ class Level(models.IntegerChoices):
OWNER = 15, "owner"
organization: models.ForeignKey = models.ForeignKey(
- "posthog.Organization", on_delete=models.CASCADE, related_name="memberships", related_query_name="membership"
+ "posthog.Organization",
+ on_delete=models.CASCADE,
+ related_name="memberships",
+ related_query_name="membership",
)
user: models.ForeignKey = models.ForeignKey(
"posthog.User",
@@ -251,9 +271,14 @@ class Level(models.IntegerChoices):
class Meta:
constraints = [
- models.UniqueConstraint(fields=["organization_id", "user_id"], name="unique_organization_membership"),
models.UniqueConstraint(
- fields=["organization_id"], condition=models.Q(level=15), name="only_one_owner_per_organization"
+ fields=["organization_id", "user_id"],
+ name="unique_organization_membership",
+ ),
+ models.UniqueConstraint(
+ fields=["organization_id"],
+ condition=models.Q(level=15),
+ name="only_one_owner_per_organization",
),
]
@@ -261,7 +286,9 @@ def __str__(self):
return str(self.Level(self.level))
def validate_update(
- self, membership_being_updated: "OrganizationMembership", new_level: Optional[Level] = None
+ self,
+ membership_being_updated: "OrganizationMembership",
+ new_level: Optional[Level] = None,
) -> None:
if new_level is not None:
if membership_being_updated.id == self.id:
@@ -290,7 +317,10 @@ def validate_update(
class OrganizationInvite(UUIDModel):
organization: models.ForeignKey = models.ForeignKey(
- "posthog.Organization", on_delete=models.CASCADE, related_name="invites", related_query_name="invite"
+ "posthog.Organization",
+ on_delete=models.CASCADE,
+ related_name="invites",
+ related_query_name="invite",
)
target_email: models.EmailField = models.EmailField(null=True, db_index=True)
first_name: models.CharField = models.CharField(max_length=30, blank=True, default="")
@@ -326,7 +356,8 @@ def validate(
if self.is_expired():
raise exceptions.ValidationError(
- "This invite has expired. Please ask your admin for a new one.", code="expired"
+ "This invite has expired. Please ask your admin for a new one.",
+ code="expired",
)
if user is None and User.objects.filter(email=invite_email).exists():
@@ -334,7 +365,8 @@ def validate(
if OrganizationMembership.objects.filter(organization=self.organization, user=user).exists():
raise exceptions.ValidationError(
- "You already are a member of this organization.", code="user_already_member"
+ "You already are a member of this organization.",
+ code="user_already_member",
)
if OrganizationMembership.objects.filter(
@@ -352,7 +384,12 @@ def use(self, user: "User", *, prevalidated: bool = False) -> None:
if is_email_available(with_absolute_urls=True) and self.organization.is_member_join_email_enabled:
from posthog.tasks.email import send_member_join
- send_member_join.apply_async(kwargs={"invitee_uuid": user.uuid, "organization_id": self.organization_id})
+ send_member_join.apply_async(
+ kwargs={
+ "invitee_uuid": user.uuid,
+ "organization_id": self.organization_id,
+ }
+ )
OrganizationInvite.objects.filter(target_email__iexact=self.target_email).delete()
def is_expired(self) -> bool:
diff --git a/posthog/models/performance/sql.py b/posthog/models/performance/sql.py
index 14c5b1763cc08..31914e858b9b9 100644
--- a/posthog/models/performance/sql.py
+++ b/posthog/models/performance/sql.py
@@ -1,7 +1,16 @@
"""https://developer.mozilla.org/en-US/docs/Web/API/PerformanceEntry"""
from posthog import settings
-from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS_WITH_PARTITION, STORAGE_POLICY, kafka_engine, ttl_period
-from posthog.clickhouse.table_engines import Distributed, MergeTreeEngine, ReplicationScheme
+from posthog.clickhouse.kafka_engine import (
+ KAFKA_COLUMNS_WITH_PARTITION,
+ STORAGE_POLICY,
+ kafka_engine,
+ ttl_period,
+)
+from posthog.clickhouse.table_engines import (
+ Distributed,
+ MergeTreeEngine,
+ ReplicationScheme,
+)
from posthog.kafka_client.topics import KAFKA_PERFORMANCE_EVENTS
"""
diff --git a/posthog/models/person/person.py b/posthog/models/person/person.py
index b2b3bb3e36725..cae5e450fa766 100644
--- a/posthog/models/person/person.py
+++ b/posthog/models/person/person.py
@@ -66,7 +66,10 @@ def split_person(self, main_distinct_id: Optional[str], max_splits: Optional[int
pdi.version = (pdi.version or 0) + 1
pdi.save(update_fields=["version", "person_id"])
- from posthog.models.person.util import create_person, create_person_distinct_id
+ from posthog.models.person.util import (
+ create_person,
+ create_person_distinct_id,
+ )
create_person_distinct_id(
team_id=self.team_id,
@@ -75,7 +78,11 @@ def split_person(self, main_distinct_id: Optional[str], max_splits: Optional[int
is_deleted=False,
version=pdi.version,
)
- create_person(team_id=self.team_id, uuid=str(person.uuid), version=person.version or 0)
+ create_person(
+ team_id=self.team_id,
+ uuid=str(person.uuid),
+ version=person.version or 0,
+ )
objects = PersonManager()
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True)
@@ -138,7 +145,10 @@ class PersonOverride(models.Model):
class Meta:
constraints = [
- models.UniqueConstraint(fields=["team", "old_person_id"], name="unique override per old_person_id"),
+ models.UniqueConstraint(
+ fields=["team", "old_person_id"],
+ name="unique override per old_person_id",
+ ),
models.CheckConstraint(
check=~Q(old_person_id__exact=F("override_person_id")),
name="old_person_id_different_from_override_person_id",
diff --git a/posthog/models/person/sql.py b/posthog/models/person/sql.py
index 61088e6c03761..ffb80869b9e9a 100644
--- a/posthog/models/person/sql.py
+++ b/posthog/models/person/sql.py
@@ -2,7 +2,11 @@
from posthog.clickhouse.indexes import index_by_kafka_timestamp
from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS, STORAGE_POLICY, kafka_engine
from posthog.clickhouse.table_engines import CollapsingMergeTree, ReplacingMergeTree
-from posthog.kafka_client.topics import KAFKA_PERSON, KAFKA_PERSON_DISTINCT_ID, KAFKA_PERSON_UNIQUE_ID
+from posthog.kafka_client.topics import (
+ KAFKA_PERSON,
+ KAFKA_PERSON_DISTINCT_ID,
+ KAFKA_PERSON_UNIQUE_ID,
+)
from posthog.settings import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE
TRUNCATE_PERSON_TABLE_SQL = f"TRUNCATE TABLE IF EXISTS person ON CLUSTER '{CLICKHOUSE_CLUSTER}'"
@@ -48,7 +52,10 @@
)
KAFKA_PERSONS_TABLE_SQL = lambda: PERSONS_TABLE_BASE_SQL.format(
- table_name="kafka_" + PERSONS_TABLE, cluster=CLICKHOUSE_CLUSTER, engine=kafka_engine(KAFKA_PERSON), extra_fields=""
+ table_name="kafka_" + PERSONS_TABLE,
+ cluster=CLICKHOUSE_CLUSTER,
+ engine=kafka_engine(KAFKA_PERSON),
+ extra_fields="",
)
# You must include the database here because of a bug in clickhouse
@@ -154,7 +161,9 @@
_offset
FROM {database}.kafka_{table_name}
""".format(
- table_name=PERSONS_DISTINCT_ID_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE
+ table_name=PERSONS_DISTINCT_ID_TABLE,
+ cluster=CLICKHOUSE_CLUSTER,
+ database=CLICKHOUSE_DATABASE,
)
#
@@ -216,7 +225,9 @@
_partition
FROM {database}.kafka_{table_name}
""".format(
- table_name=PERSON_DISTINCT_ID2_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE
+ table_name=PERSON_DISTINCT_ID2_TABLE,
+ cluster=CLICKHOUSE_CLUSTER,
+ database=CLICKHOUSE_DATABASE,
)
#
diff --git a/posthog/models/person/util.py b/posthog/models/person/util.py
index 9af13bc6e9d05..7e8afc3db5e78 100644
--- a/posthog/models/person/util.py
+++ b/posthog/models/person/util.py
@@ -13,7 +13,11 @@
from posthog.client import sync_execute
from posthog.kafka_client.client import ClickhouseProducer
-from posthog.kafka_client.topics import KAFKA_PERSON, KAFKA_PERSON_DISTINCT_ID, KAFKA_PERSON_OVERRIDES
+from posthog.kafka_client.topics import (
+ KAFKA_PERSON,
+ KAFKA_PERSON_DISTINCT_ID,
+ KAFKA_PERSON_OVERRIDES,
+)
from posthog.models.person import Person, PersonDistinctId
from posthog.models.person.sql import (
BULK_INSERT_PERSON_DISTINCT_ID2,
@@ -53,12 +57,22 @@ def person_distinct_id_created(sender, instance: PersonDistinctId, created, **kw
@receiver(post_delete, sender=Person)
def person_deleted(sender, instance: Person, **kwargs):
- _delete_person(instance.team.id, instance.uuid, int(instance.version or 0), instance.created_at, sync=True)
+ _delete_person(
+ instance.team.id,
+ instance.uuid,
+ int(instance.version or 0),
+ instance.created_at,
+ sync=True,
+ )
@receiver(post_delete, sender=PersonDistinctId)
def person_distinct_id_deleted(sender, instance: PersonDistinctId, **kwargs):
_delete_ch_distinct_id(
- instance.team.pk, instance.person.uuid, instance.distinct_id, instance.version or 0, sync=True
+ instance.team.pk,
+ instance.person.uuid,
+ instance.distinct_id,
+ instance.version or 0,
+ sync=True,
)
try:
@@ -83,7 +97,11 @@ def bulk_create_persons(persons_list: List[Dict]):
for index, person in enumerate(inserted):
for distinct_id in persons_list[index]["distinct_ids"]:
distinct_ids.append(
- PersonDistinctId(person_id=person.pk, distinct_id=distinct_id, team_id=person.team_id)
+ PersonDistinctId(
+ person_id=person.pk,
+ distinct_id=distinct_id,
+ team_id=person.team_id,
+ )
)
distinct_id_inserts.append(f"('{distinct_id}', '{person.uuid}', {person.team_id}, 0, 0, now(), 0, 0)")
person_mapping[distinct_id] = person
@@ -96,7 +114,10 @@ def bulk_create_persons(persons_list: List[Dict]):
PersonDistinctId.objects.bulk_create(distinct_ids)
sync_execute(INSERT_PERSON_BULK_SQL + ", ".join(person_inserts), flush=False)
- sync_execute(BULK_INSERT_PERSON_DISTINCT_ID2 + ", ".join(distinct_id_inserts), flush=False)
+ sync_execute(
+ BULK_INSERT_PERSON_DISTINCT_ID2 + ", ".join(distinct_id_inserts),
+ flush=False,
+ )
return person_mapping
@@ -147,7 +168,12 @@ def create_person(
def create_person_distinct_id(
- team_id: int, distinct_id: str, person_id: str, version=0, is_deleted: bool = False, sync: bool = False
+ team_id: int,
+ distinct_id: str,
+ person_id: str,
+ version=0,
+ is_deleted: bool = False,
+ sync: bool = False,
) -> None:
p = ClickhouseProducer()
p.produce(
@@ -191,7 +217,9 @@ def create_person_override(
def get_persons_by_distinct_ids(team_id: int, distinct_ids: List[str]) -> QuerySet:
return Person.objects.filter(
- team_id=team_id, persondistinctid__team_id=team_id, persondistinctid__distinct_id__in=distinct_ids
+ team_id=team_id,
+ persondistinctid__team_id=team_id,
+ persondistinctid__distinct_id__in=distinct_ids,
)
@@ -208,7 +236,11 @@ def delete_person(person: Person, sync: bool = False) -> None:
def _delete_person(
- team_id: int, uuid: UUID, version: int, created_at: Optional[datetime.datetime] = None, sync: bool = False
+ team_id: int,
+ uuid: UUID,
+ version: int,
+ created_at: Optional[datetime.datetime] = None,
+ sync: bool = False,
) -> None:
create_person(
uuid=str(uuid),
diff --git a/posthog/models/person_overrides/sql.py b/posthog/models/person_overrides/sql.py
index 853988495f639..c518db6de0e11 100644
--- a/posthog/models/person_overrides/sql.py
+++ b/posthog/models/person_overrides/sql.py
@@ -14,7 +14,11 @@
from django.conf import settings
from posthog.kafka_client.topics import KAFKA_PERSON_OVERRIDE
-from posthog.settings.data_stores import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE, KAFKA_HOSTS
+from posthog.settings.data_stores import (
+ CLICKHOUSE_CLUSTER,
+ CLICKHOUSE_DATABASE,
+ KAFKA_HOSTS,
+)
PERSON_OVERRIDES_CREATE_TABLE_SQL = f"""
CREATE TABLE IF NOT EXISTS `{CLICKHOUSE_DATABASE}`.`person_overrides`
diff --git a/posthog/models/personal_api_key.py b/posthog/models/personal_api_key.py
index 7d42679a627a4..8692654e3861a 100644
--- a/posthog/models/personal_api_key.py
+++ b/posthog/models/personal_api_key.py
@@ -29,5 +29,9 @@ class PersonalAPIKey(models.Model):
# DEPRECATED: personal API keys are now specifically personal, without team affiliation
team = models.ForeignKey(
- "posthog.Team", on_delete=models.SET_NULL, related_name="personal_api_keys+", null=True, blank=True
+ "posthog.Team",
+ on_delete=models.SET_NULL,
+ related_name="personal_api_keys+",
+ null=True,
+ blank=True,
)
diff --git a/posthog/models/plugin.py b/posthog/models/plugin.py
index 0f055f9d68c49..b8787dd3df344 100644
--- a/posthog/models/plugin.py
+++ b/posthog/models/plugin.py
@@ -135,12 +135,24 @@ def install(self, **kwargs) -> "Plugin":
class Plugin(models.Model):
class PluginType(models.TextChoices):
LOCAL = "local", "local" # url starts with "file:"
- CUSTOM = "custom", "custom" # github or npm url downloaded as zip or tar.gz into field "archive"
- REPOSITORY = "repository", "repository" # same, but originating from our plugins.json repository
- SOURCE = "source", "source" # coded inside the browser (versioned via plugin_source_version)
+ CUSTOM = (
+ "custom",
+ "custom",
+ ) # github or npm url downloaded as zip or tar.gz into field "archive"
+ REPOSITORY = (
+ "repository",
+ "repository",
+ ) # same, but originating from our plugins.json repository
+ SOURCE = (
+ "source",
+ "source",
+ ) # coded inside the browser (versioned via plugin_source_version)
organization: models.ForeignKey = models.ForeignKey(
- "posthog.Organization", on_delete=models.CASCADE, related_name="plugins", related_query_name="plugin"
+ "posthog.Organization",
+ on_delete=models.CASCADE,
+ related_name="plugins",
+ related_query_name="plugin",
)
plugin_type: models.CharField = models.CharField(
max_length=200, null=True, blank=True, choices=PluginType.choices, default=None
@@ -240,7 +252,10 @@ class PluginAttachment(models.Model):
class PluginStorage(models.Model):
class Meta:
constraints = [
- models.UniqueConstraint(fields=["plugin_config_id", "key"], name="posthog_unique_plugin_storage_key")
+ models.UniqueConstraint(
+ fields=["plugin_config_id", "key"],
+ name="posthog_unique_plugin_storage_key",
+ )
]
plugin_config: models.ForeignKey = models.ForeignKey("PluginConfig", on_delete=models.CASCADE)
@@ -266,7 +281,10 @@ class PluginSourceFileManager(models.Manager):
def sync_from_plugin_archive(
self, plugin: Plugin, plugin_json_parsed: Optional[Dict[str, Any]] = None
) -> Tuple[
- "PluginSourceFile", Optional["PluginSourceFile"], Optional["PluginSourceFile"], Optional["PluginSourceFile"]
+ "PluginSourceFile",
+ Optional["PluginSourceFile"],
+ Optional["PluginSourceFile"],
+ Optional["PluginSourceFile"],
]:
"""Create PluginSourceFile objects from a plugin that has an archive.
@@ -281,7 +299,12 @@ def sync_from_plugin_archive(
plugin_json_instance, _ = PluginSourceFile.objects.update_or_create(
plugin=plugin,
filename="plugin.json",
- defaults={"source": plugin_json, "transpiled": None, "status": None, "error": None},
+ defaults={
+ "source": plugin_json,
+ "transpiled": None,
+ "status": None,
+ "error": None,
+ },
)
# Save frontend.tsx
frontend_tsx_instance: Optional["PluginSourceFile"] = None
@@ -289,7 +312,12 @@ def sync_from_plugin_archive(
frontend_tsx_instance, _ = PluginSourceFile.objects.update_or_create(
plugin=plugin,
filename="frontend.tsx",
- defaults={"source": frontend_tsx, "transpiled": None, "status": None, "error": None},
+ defaults={
+ "source": frontend_tsx,
+ "transpiled": None,
+ "status": None,
+ "error": None,
+ },
)
else:
filenames_to_delete.append("frontend.tsx")
@@ -299,7 +327,12 @@ def sync_from_plugin_archive(
site_ts_instance, _ = PluginSourceFile.objects.update_or_create(
plugin=plugin,
filename="site.ts",
- defaults={"source": site_ts, "transpiled": None, "status": None, "error": None},
+ defaults={
+ "source": site_ts,
+ "transpiled": None,
+ "status": None,
+ "error": None,
+ },
)
else:
filenames_to_delete.append("site.ts")
@@ -311,7 +344,12 @@ def sync_from_plugin_archive(
index_ts_instance, _ = PluginSourceFile.objects.update_or_create(
plugin=plugin,
filename="index.ts",
- defaults={"source": index_ts, "transpiled": None, "status": None, "error": None},
+ defaults={
+ "source": index_ts,
+ "transpiled": None,
+ "status": None,
+ "error": None,
+ },
)
else:
filenames_to_delete.append("index.ts")
@@ -319,7 +357,12 @@ def sync_from_plugin_archive(
PluginSourceFile.objects.filter(plugin=plugin, filename__in=filenames_to_delete).delete()
# Trigger plugin server reload and code transpilation
plugin.save()
- return plugin_json_instance, index_ts_instance, frontend_tsx_instance, site_ts_instance
+ return (
+ plugin_json_instance,
+ index_ts_instance,
+ frontend_tsx_instance,
+ site_ts_instance,
+ )
class PluginSourceFile(UUIDModel):
@@ -431,7 +474,8 @@ def preinstall_plugins_for_new_organization(sender, instance: Organization, crea
)
except Exception as e:
print(
- f"⚠️ Cannot preinstall plugin from {plugin_url}, skipping it for organization {instance.name}:\n", e
+ f"⚠️ Cannot preinstall plugin from {plugin_url}, skipping it for organization {instance.name}:\n",
+ e,
)
@@ -439,7 +483,6 @@ def preinstall_plugins_for_new_organization(sender, instance: Organization, crea
def enable_preinstalled_plugins_for_new_team(sender, instance: Team, created: bool, **kwargs):
if created and can_configure_plugins(instance.organization):
for order, preinstalled_plugin in enumerate(Plugin.objects.filter(is_preinstalled=True)):
-
PluginConfig.objects.create(
team=instance,
plugin=preinstalled_plugin,
diff --git a/posthog/models/prompt/prompt.py b/posthog/models/prompt/prompt.py
index 74d55a0f43354..2d975a54b3e1a 100644
--- a/posthog/models/prompt/prompt.py
+++ b/posthog/models/prompt/prompt.py
@@ -4,7 +4,6 @@
class Prompt(models.Model):
-
step: models.IntegerField = models.IntegerField()
type: models.CharField = models.CharField(max_length=200) # tooltip, modal, etc
title: models.CharField = models.CharField(max_length=200)
diff --git a/posthog/models/property/property.py b/posthog/models/property/property.py
index ff57e46b77e21..3b2b2decbc574 100644
--- a/posthog/models/property/property.py
+++ b/posthog/models/property/property.py
@@ -78,7 +78,12 @@ class BehavioralPropertyType(str, Enum):
PropertyIdentifier = Tuple[PropertyName, PropertyType, Optional[GroupTypeIndex]]
NEGATED_OPERATORS = ["is_not", "not_icontains", "not_regex", "is_not_set"]
-CLICKHOUSE_ONLY_PROPERTY_TYPES = ["static-cohort", "precalculated-cohort", "behavioral", "recording"]
+CLICKHOUSE_ONLY_PROPERTY_TYPES = [
+ "static-cohort",
+ "precalculated-cohort",
+ "behavioral",
+ "recording",
+]
VALIDATE_PROP_TYPES = {
"event": ["key", "value"],
@@ -95,7 +100,13 @@ class BehavioralPropertyType(str, Enum):
}
VALIDATE_BEHAVIORAL_PROP_TYPES = {
- BehavioralPropertyType.PERFORMED_EVENT: ["key", "value", "event_type", "time_value", "time_interval"],
+ BehavioralPropertyType.PERFORMED_EVENT: [
+ "key",
+ "value",
+ "event_type",
+ "time_value",
+ "time_interval",
+ ],
BehavioralPropertyType.PERFORMED_EVENT_MULTIPLE: [
"key",
"value",
@@ -104,7 +115,13 @@ class BehavioralPropertyType(str, Enum):
"time_interval",
"operator_value",
],
- BehavioralPropertyType.PERFORMED_EVENT_FIRST_TIME: ["key", "value", "event_type", "time_value", "time_interval"],
+ BehavioralPropertyType.PERFORMED_EVENT_FIRST_TIME: [
+ "key",
+ "value",
+ "event_type",
+ "time_value",
+ "time_interval",
+ ],
BehavioralPropertyType.PERFORMED_EVENT_SEQUENCE: [
"key",
"value",
@@ -282,7 +299,11 @@ class PropertyGroup:
type: PropertyOperatorType
values: Union[List[Property], List["PropertyGroup"]]
- def __init__(self, type: PropertyOperatorType, values: Union[List[Property], List["PropertyGroup"]]) -> None:
+ def __init__(
+ self,
+ type: PropertyOperatorType,
+ values: Union[List[Property], List["PropertyGroup"]],
+ ) -> None:
self.type = type
self.values = values
@@ -310,7 +331,10 @@ def to_dict(self):
if not self.values:
return {}
- return {"type": self.type.value, "values": [prop.to_dict() for prop in self.values]}
+ return {
+ "type": self.type.value,
+ "values": [prop.to_dict() for prop in self.values],
+ }
def __repr__(self):
params_repr = ", ".join(f"{repr(prop)}" for prop in self.values)
diff --git a/posthog/models/property/util.py b/posthog/models/property/util.py
index 18368ac082f5d..b353eb11bb141 100644
--- a/posthog/models/property/util.py
+++ b/posthog/models/property/util.py
@@ -17,7 +17,10 @@
from posthog.clickhouse.client.escape import escape_param_for_clickhouse
from posthog.clickhouse.kafka_engine import trim_quotes_expr
-from posthog.clickhouse.materialized_columns import TableWithProperties, get_materialized_columns
+from posthog.clickhouse.materialized_columns import (
+ TableWithProperties,
+ get_materialized_columns,
+)
from posthog.constants import PropertyOperatorType
from posthog.hogql import ast
from posthog.hogql.hogql import HogQLContext
@@ -36,7 +39,10 @@
)
from posthog.models.event import Selector
from posthog.models.group.sql import GET_GROUP_IDS_BY_PROPERTY_SQL
-from posthog.models.person.sql import GET_DISTINCT_IDS_BY_PERSON_ID_FILTER, GET_DISTINCT_IDS_BY_PROPERTY_SQL
+from posthog.models.person.sql import (
+ GET_DISTINCT_IDS_BY_PERSON_ID_FILTER,
+ GET_DISTINCT_IDS_BY_PROPERTY_SQL,
+)
from posthog.models.property import (
NEGATED_OPERATORS,
OperatorType,
@@ -177,13 +183,19 @@ def parse_prop_clauses(
else:
if person_properties_mode == PersonPropertiesMode.USING_SUBQUERY:
person_id_query, cohort_filter_params = format_filter_query(
- cohort, idx, hogql_context, custom_match_field=person_id_joined_alias
+ cohort,
+ idx,
+ hogql_context,
+ custom_match_field=person_id_joined_alias,
)
params = {**params, **cohort_filter_params}
final.append(f"{property_operator} {table_formatted}distinct_id IN ({person_id_query})")
else:
person_id_query, cohort_filter_params = format_cohort_subquery(
- cohort, idx, hogql_context, custom_match_field=f"{person_id_joined_alias}"
+ cohort,
+ idx,
+ hogql_context,
+ custom_match_field=f"{person_id_joined_alias}",
)
params = {**params, **cohort_filter_params}
final.append(f"{property_operator} {person_id_query}")
@@ -236,7 +248,8 @@ def parse_prop_clauses(
final.append(
" {property_operator} {table_name}distinct_id IN ({filter_query})".format(
filter_query=GET_DISTINCT_IDS_BY_PROPERTY_SQL.format(
- filters=filter_query, GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id)
+ filters=filter_query,
+ GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id),
),
table_name=table_formatted,
property_operator=property_operator,
@@ -270,7 +283,10 @@ def parse_prop_clauses(
params.update(filter_params)
elif prop.type == "element":
query, filter_params = filter_element(
- cast(StringMatching, prop.key), prop.value, operator=prop.operator, prepend="{}_".format(prepend)
+ cast(StringMatching, prop.key),
+ prop.value,
+ operator=prop.operator,
+ prepend="{}_".format(prepend),
)
if query:
final.append(f"{property_operator} {query}")
@@ -278,7 +294,10 @@ def parse_prop_clauses(
elif (
prop.type == "group"
and person_properties_mode
- in [PersonPropertiesMode.DIRECT_ON_EVENTS, PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2]
+ in [
+ PersonPropertiesMode.DIRECT_ON_EVENTS,
+ PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2,
+ ]
and groups_on_events_querying_enabled()
):
group_column = f"group{prop.group_type_index}_properties"
@@ -308,7 +327,11 @@ def parse_prop_clauses(
else:
# :TRICKY: offer groups support for queries which don't support automatically joining with groups table yet (e.g. lifecycle)
filter_query, filter_params = prop_filter_json_extract(
- prop, idx, prepend, prop_var=f"group_properties", allow_denormalized_props=False
+ prop,
+ idx,
+ prepend,
+ prop_var=f"group_properties",
+ allow_denormalized_props=False,
)
group_type_index_var = f"{prepend}_group_type_index_{idx}"
groups_subquery = GET_GROUP_IDS_BY_PROPERTY_SQL.format(
@@ -335,7 +358,8 @@ def parse_prop_clauses(
else:
# :TODO: (performance) Avoid subqueries whenever possible, use joins instead
subquery = GET_DISTINCT_IDS_BY_PERSON_ID_FILTER.format(
- filters=filter_query, GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id)
+ filters=filter_query,
+ GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id),
)
final.append(f"{property_operator} {table_formatted}distinct_id IN ({subquery})")
params.update(filter_params)
@@ -415,28 +439,46 @@ def prop_filter_json_extract(
params: Dict[str, Any] = {}
if operator == "is_not":
- params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): box_value(prop.value)}
+ params = {
+ "k{}_{}".format(prepend, idx): prop.key,
+ "v{}_{}".format(prepend, idx): box_value(prop.value),
+ }
return (
" {property_operator} NOT has(%(v{prepend}_{idx})s, {left})".format(
- idx=idx, prepend=prepend, left=property_expr, property_operator=property_operator
+ idx=idx,
+ prepend=prepend,
+ left=property_expr,
+ property_operator=property_operator,
),
params,
)
elif operator == "icontains":
value = "%{}%".format(prop.value)
- params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): value}
+ params = {
+ "k{}_{}".format(prepend, idx): prop.key,
+ "v{}_{}".format(prepend, idx): value,
+ }
return (
" {property_operator} {left} ILIKE %(v{prepend}_{idx})s".format(
- idx=idx, prepend=prepend, left=property_expr, property_operator=property_operator
+ idx=idx,
+ prepend=prepend,
+ left=property_expr,
+ property_operator=property_operator,
),
params,
)
elif operator == "not_icontains":
value = "%{}%".format(prop.value)
- params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): value}
+ params = {
+ "k{}_{}".format(prepend, idx): prop.key,
+ "v{}_{}".format(prepend, idx): value,
+ }
return (
" {property_operator} NOT ({left} ILIKE %(v{prepend}_{idx})s)".format(
- idx=idx, prepend=prepend, left=property_expr, property_operator=property_operator
+ idx=idx,
+ prepend=prepend,
+ left=property_expr,
+ property_operator=property_operator,
),
params,
)
@@ -445,7 +487,10 @@ def prop_filter_json_extract(
# If OR'ing, shouldn't be a problem since nothing will match this specific clause
return f"{property_operator} 1 = 2", {}
- params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): prop.value}
+ params = {
+ "k{}_{}".format(prepend, idx): prop.key,
+ "v{}_{}".format(prepend, idx): prop.value,
+ }
return (
" {property_operator} {regex_function}({left}, %(v{prepend}_{idx})s)".format(
@@ -458,7 +503,10 @@ def prop_filter_json_extract(
params,
)
elif operator == "is_set":
- params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): prop.value}
+ params = {
+ "k{}_{}".format(prepend, idx): prop.key,
+ "v{}_{}".format(prepend, idx): prop.value,
+ }
if is_denormalized:
return (
" {property_operator} notEmpty({left})".format(left=property_expr, property_operator=property_operator),
@@ -466,12 +514,18 @@ def prop_filter_json_extract(
)
return (
" {property_operator} JSONHas({prop_var}, %(k{prepend}_{idx})s)".format(
- idx=idx, prepend=prepend, prop_var=prop_var, property_operator=property_operator
+ idx=idx,
+ prepend=prepend,
+ prop_var=prop_var,
+ property_operator=property_operator,
),
params,
)
elif operator == "is_not_set":
- params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): prop.value}
+ params = {
+ "k{}_{}".format(prepend, idx): prop.key,
+ "v{}_{}".format(prepend, idx): prop.value,
+ }
if is_denormalized:
return (
" {property_operator} empty({left})".format(left=property_expr, property_operator=property_operator),
@@ -479,7 +533,11 @@ def prop_filter_json_extract(
)
return (
" {property_operator} (isNull({left}) OR NOT JSONHas({prop_var}, %(k{prepend}_{idx})s))".format(
- idx=idx, prepend=prepend, prop_var=prop_var, left=property_expr, property_operator=property_operator
+ idx=idx,
+ prepend=prepend,
+ prop_var=prop_var,
+ left=property_expr,
+ property_operator=property_operator,
),
params,
)
@@ -496,7 +554,10 @@ def prop_filter_json_extract(
parseDateTimeBestEffortOrNull(substring({property_expr}, 1, 10))
)) = %({prop_value_param_key})s"""
- return (query, {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value})
+ return (
+ query,
+ {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value},
+ )
elif operator == "is_date_after":
# TODO introducing duplication in these branches now rather than refactor too early
assert isinstance(prop.value, str)
@@ -518,7 +579,10 @@ def prop_filter_json_extract(
query = f"""{property_operator} {first_of_date_or_timestamp} > {adjusted_value}"""
- return (query, {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value})
+ return (
+ query,
+ {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value},
+ )
elif operator == "is_date_before":
# TODO introducing duplication in these branches now rather than refactor too early
assert isinstance(prop.value, str)
@@ -528,11 +592,17 @@ def prop_filter_json_extract(
first_of_date_or_timestamp = f"coalesce({try_parse_as_date},{try_parse_as_timestamp})"
query = f"""{property_operator} {first_of_date_or_timestamp} < %({prop_value_param_key})s"""
- return (query, {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value})
+ return (
+ query,
+ {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value},
+ )
elif operator in ["gt", "lt", "gte", "lte"]:
count_operator = get_count_operator(operator)
- params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): prop.value}
+ params = {
+ "k{}_{}".format(prepend, idx): prop.key,
+ "v{}_{}".format(prepend, idx): prop.value,
+ }
extract_property_expr = trim_quotes_expr(f"replaceRegexpAll({property_expr}, ' ', '')")
return (
f" {property_operator} toFloat64OrNull({extract_property_expr}) {count_operator} %(v{prepend}_{idx})s",
@@ -547,10 +617,17 @@ def prop_filter_json_extract(
}
else:
clause = " {property_operator} has(%(v{prepend}_{idx})s, {left})"
- params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): box_value(prop.value)}
+ params = {
+ "k{}_{}".format(prepend, idx): prop.key,
+ "v{}_{}".format(prepend, idx): box_value(prop.value),
+ }
return (
clause.format(
- left=property_expr, idx=idx, prepend=prepend, prop_var=prop_var, property_operator=property_operator
+ left=property_expr,
+ idx=idx,
+ prepend=prepend,
+ prop_var=prop_var,
+ property_operator=property_operator,
),
params,
)
@@ -664,7 +741,10 @@ def get_property_string_expr(
and (property_name, materialised_table_column) in materialized_columns
and ("group" not in materialised_table_column or groups_on_events_querying_enabled())
):
- return f'{table_string}"{materialized_columns[(property_name, materialised_table_column)]}"', True
+ return (
+ f'{table_string}"{materialized_columns[(property_name, materialised_table_column)]}"',
+ True,
+ )
return trim_quotes_expr(f"JSONExtractRaw({table_string}{column}, {var})"), False
@@ -731,7 +811,10 @@ def filter_element(
raise ValueError(f'Invalid element filtering key "{key}"')
if combination_conditions:
- return f"{'NOT ' if operator in NEGATED_OPERATORS else ''}({' OR '.join(combination_conditions)})", params
+ return (
+ f"{'NOT ' if operator in NEGATED_OPERATORS else ''}({' OR '.join(combination_conditions)})",
+ params,
+ )
else:
# If there are no values to filter by, this either matches nothing (for non-negated operators like "equals"),
# or everything (for negated operators like "doesn't equal")
@@ -837,7 +920,10 @@ def get_session_property_filter_statement(prop: Property, idx: int, prepend: str
value = f"session_duration_value{prepend}_{idx}"
operator = get_count_operator(prop.operator)
- return (f"{SessionQuery.SESSION_TABLE_ALIAS}.session_duration {operator} %({value})s", {value: duration})
+ return (
+ f"{SessionQuery.SESSION_TABLE_ALIAS}.session_duration {operator} %({value})s",
+ {value: duration},
+ )
else:
raise exceptions.ValidationError(f"Property '{prop.key}' is not allowed in session property filters.")
diff --git a/posthog/models/property_definition.py b/posthog/models/property_definition.py
index b295229a8cfcd..7747a17c71820 100644
--- a/posthog/models/property_definition.py
+++ b/posthog/models/property_definition.py
@@ -16,7 +16,10 @@ class PropertyType(models.TextChoices):
class PropertyFormat(models.TextChoices):
UnixTimestamp = "unix_timestamp", "Unix Timestamp in seconds"
- UnixTimestampMilliseconds = "unix_timestamp_milliseconds", "Unix Timestamp in milliseconds"
+ UnixTimestampMilliseconds = (
+ "unix_timestamp_milliseconds",
+ "Unix Timestamp in milliseconds",
+ )
ISO8601Date = "YYYY-MM-DDThh:mm:ssZ", "YYYY-MM-DDThh:mm:ssZ"
FullDate = "YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD hh:mm:ss"
FullDateIncreasing = "DD-MM-YYYY hh:mm:ss", "DD-MM-YYYY hh:mm:ss"
@@ -33,7 +36,10 @@ class Type(models.IntegerChoices):
GROUP = 3, "group"
team: models.ForeignKey = models.ForeignKey(
- Team, on_delete=models.CASCADE, related_name="property_definitions", related_query_name="team"
+ Team,
+ on_delete=models.CASCADE,
+ related_name="property_definitions",
+ related_query_name="team",
)
name: models.CharField = models.CharField(max_length=400)
is_numerical: models.BooleanField = models.BooleanField(
@@ -45,7 +51,7 @@ class Type(models.IntegerChoices):
# :TRICKY: May be null for historical events
type: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(default=Type.EVENT, choices=Type.choices)
# Only populated for `Type.GROUP`
- group_type_index: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(null=True)
+ group_type_index: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField(null=True)
# DEPRECATED
property_type_format = models.CharField(
@@ -76,15 +82,19 @@ class Meta:
models.Index(fields=["team_id", "type", "is_numerical"]),
] + [
GinIndex(
- name="index_property_definition_name", fields=["name"], opclasses=["gin_trgm_ops"]
+ name="index_property_definition_name",
+ fields=["name"],
+ opclasses=["gin_trgm_ops"],
) # To speed up DB-based fuzzy searching
]
constraints = [
models.CheckConstraint(
- name="property_type_is_valid", check=models.Q(property_type__in=PropertyType.values)
+ name="property_type_is_valid",
+ check=models.Q(property_type__in=PropertyType.values),
),
models.CheckConstraint(
- name="group_type_index_set", check=~models.Q(type=3) | models.Q(group_type_index__isnull=False)
+ name="group_type_index_set",
+ check=~models.Q(type=3) | models.Q(group_type_index__isnull=False),
),
UniqueConstraintByExpression(
name="posthog_propertydefinition_uniq",
diff --git a/posthog/models/sharing_configuration.py b/posthog/models/sharing_configuration.py
index 7dcdcb7e8f2b9..44cc70cbb7be4 100644
--- a/posthog/models/sharing_configuration.py
+++ b/posthog/models/sharing_configuration.py
@@ -26,7 +26,11 @@ class SharingConfiguration(models.Model):
enabled: models.BooleanField = models.BooleanField(default=False)
access_token: models.CharField = models.CharField(
- max_length=400, null=True, blank=True, default=get_default_access_token, unique=True
+ max_length=400,
+ null=True,
+ blank=True,
+ default=get_default_access_token,
+ unique=True,
)
def can_access_object(self, obj: models.Model):
diff --git a/posthog/models/subscription.py b/posthog/models/subscription.py
index e291f7c1b0490..3680155f7df27 100644
--- a/posthog/models/subscription.py
+++ b/posthog/models/subscription.py
@@ -92,7 +92,10 @@ def __init__(self, *args, **kwargs):
interval: models.IntegerField = models.IntegerField(default=1)
count: models.IntegerField = models.IntegerField(null=True)
byweekday: ArrayField = ArrayField(
- models.CharField(max_length=10, choices=SubscriptionByWeekDay.choices), null=True, blank=True, default=None
+ models.CharField(max_length=10, choices=SubscriptionByWeekDay.choices),
+ null=True,
+ blank=True,
+ default=None,
)
bysetpos: models.IntegerField = models.IntegerField(null=True)
start_date: models.DateTimeField = models.DateTimeField()
@@ -141,7 +144,9 @@ def url(self):
def resource_info(self) -> Optional[SubscriptionResourceInfo]:
if self.insight:
return SubscriptionResourceInfo(
- "Insight", f"{self.insight.name or self.insight.derived_name}", self.insight.url
+ "Insight",
+ f"{self.insight.name or self.insight.derived_name}",
+ self.insight.url,
)
elif self.dashboard:
return SubscriptionResourceInfo("Dashboard", self.dashboard.name, self.dashboard.url)
@@ -151,14 +156,25 @@ def resource_info(self) -> Optional[SubscriptionResourceInfo]:
@property
def summary(self):
try:
- human_frequency = {"daily": "day", "weekly": "week", "monthly": "month", "yearly": "year"}[self.frequency]
+ human_frequency = {
+ "daily": "day",
+ "weekly": "week",
+ "monthly": "month",
+ "yearly": "year",
+ }[self.frequency]
if self.interval > 1:
human_frequency = f"{human_frequency}s"
summary = f"sent every {str(self.interval) + ' ' if self.interval > 1 else ''}{human_frequency}"
if self.byweekday and self.bysetpos:
- human_bysetpos = {1: "first", 2: "second", 3: "third", 4: "fourth", -1: "last"}[self.bysetpos]
+ human_bysetpos = {
+ 1: "first",
+ 2: "second",
+ 3: "third",
+ 4: "fourth",
+ -1: "last",
+ }[self.bysetpos]
summary += (
f" on the {human_bysetpos} {self.byweekday[0].capitalize() if len(self.byweekday) == 1 else 'day'}"
)
diff --git a/posthog/models/tagged_item.py b/posthog/models/tagged_item.py
index 3d6b73383aaf8..4c55c4a663791 100644
--- a/posthog/models/tagged_item.py
+++ b/posthog/models/tagged_item.py
@@ -6,7 +6,14 @@
from posthog.models.utils import UUIDModel
-RELATED_OBJECTS = ("dashboard", "insight", "event_definition", "property_definition", "action", "feature_flag")
+RELATED_OBJECTS = (
+ "dashboard",
+ "insight",
+ "event_definition",
+ "property_definition",
+ "action",
+ "feature_flag",
+)
# Checks that exactly one object field is populated
@@ -14,7 +21,10 @@ def build_check(related_objects: Iterable[str]):
built_check_list: List[Union[Q, Q]] = []
for field in related_objects:
built_check_list.append(
- Q(*[(f"{other_field}__isnull", other_field != field) for other_field in related_objects], _connector="AND")
+ Q(
+ *[(f"{other_field}__isnull", other_field != field) for other_field in related_objects],
+ _connector="AND",
+ )
)
return Q(*built_check_list, _connector="OR")
@@ -23,7 +33,9 @@ def build_check(related_objects: Iterable[str]):
# uniqueness across null columns.
def build_partial_uniqueness_constraint(field: str):
return UniqueConstraint(
- fields=["tag", field], name=f"unique_{field}_tagged_item", condition=Q((f"{field}__isnull", False))
+ fields=["tag", field],
+ name=f"unique_{field}_tagged_item",
+ condition=Q((f"{field}__isnull", False)),
)
@@ -47,22 +59,46 @@ class TaggedItem(UUIDModel):
# When adding a new taggeditem-model relationship, make sure to add the foreign key field and append field name to
# the `RELATED_OBJECTS` tuple above.
dashboard: models.ForeignKey = models.ForeignKey(
- "Dashboard", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items"
+ "Dashboard",
+ on_delete=models.CASCADE,
+ null=True,
+ blank=True,
+ related_name="tagged_items",
)
insight: models.ForeignKey = models.ForeignKey(
- "Insight", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items"
+ "Insight",
+ on_delete=models.CASCADE,
+ null=True,
+ blank=True,
+ related_name="tagged_items",
)
event_definition: models.ForeignKey = models.ForeignKey(
- "EventDefinition", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items"
+ "EventDefinition",
+ on_delete=models.CASCADE,
+ null=True,
+ blank=True,
+ related_name="tagged_items",
)
property_definition: models.ForeignKey = models.ForeignKey(
- "PropertyDefinition", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items"
+ "PropertyDefinition",
+ on_delete=models.CASCADE,
+ null=True,
+ blank=True,
+ related_name="tagged_items",
)
action: models.ForeignKey = models.ForeignKey(
- "Action", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items"
+ "Action",
+ on_delete=models.CASCADE,
+ null=True,
+ blank=True,
+ related_name="tagged_items",
)
feature_flag: models.ForeignKey = models.ForeignKey(
- "FeatureFlag", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items"
+ "FeatureFlag",
+ on_delete=models.CASCADE,
+ null=True,
+ blank=True,
+ related_name="tagged_items",
)
class Meta:
diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py
index 4cd9ae773fdeb..bc458807b56a4 100644
--- a/posthog/models/team/team.py
+++ b/posthog/models/team/team.py
@@ -7,7 +7,11 @@
import pytz
from django.conf import settings
from django.contrib.postgres.fields import ArrayField
-from django.core.validators import MinLengthValidator, MaxValueValidator, MinValueValidator
+from django.core.validators import (
+ MinLengthValidator,
+ MaxValueValidator,
+ MinValueValidator,
+)
from django.db import models
from django.db.models.signals import post_delete, post_save
from zoneinfo import ZoneInfo
@@ -20,7 +24,11 @@
from posthog.models.filters.utils import GroupTypeIndex
from posthog.models.instance_setting import get_instance_setting
from posthog.models.signals import mutable_receiver
-from posthog.models.utils import UUIDClassicModel, generate_random_token_project, sane_repr
+from posthog.models.utils import (
+ UUIDClassicModel,
+ generate_random_token_project,
+ sane_repr,
+)
from posthog.settings.utils import get_list
from posthog.utils import GenericEmails, PersonOnEventsMode
@@ -66,7 +74,12 @@ def set_test_account_filters(self, organization: Optional[Any]) -> List:
example_email = re.search(r"@[\w.]+", example_emails[0])
if example_email:
return [
- {"key": "email", "operator": "not_icontains", "value": example_email.group(), "type": "person"}
+ {
+ "key": "email",
+ "operator": "not_icontains",
+ "value": example_email.group(),
+ "type": "person",
+ }
] + filters
return filters
@@ -126,7 +139,10 @@ def clickhouse_mode(self) -> str:
class Team(UUIDClassicModel):
organization: models.ForeignKey = models.ForeignKey(
- "posthog.Organization", on_delete=models.CASCADE, related_name="teams", related_query_name="team"
+ "posthog.Organization",
+ on_delete=models.CASCADE,
+ related_name="teams",
+ related_query_name="team",
)
api_token: models.CharField = models.CharField(
max_length=200,
@@ -136,7 +152,9 @@ class Team(UUIDClassicModel):
)
app_urls: ArrayField = ArrayField(models.CharField(max_length=200, null=True), default=list, blank=True)
name: models.CharField = models.CharField(
- max_length=200, default="Default Project", validators=[MinLengthValidator(1, "Project must have a name!")]
+ max_length=200,
+ default="Default Project",
+ validators=[MinLengthValidator(1, "Project must have a name!")],
)
slack_incoming_webhook: models.CharField = models.CharField(max_length=500, null=True, blank=True)
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True)
@@ -157,8 +175,10 @@ class Team(UUIDClassicModel):
decimal_places=2,
validators=[MinValueValidator(Decimal(0)), MaxValueValidator(Decimal(1))],
)
- session_recording_minimum_duration_milliseconds: models.IntegerField = models.IntegerField(
- null=True, blank=True, validators=[MinValueValidator(0), MaxValueValidator(15000)]
+ session_recording_minimum_duration_milliseconds: (models.IntegerField) = models.IntegerField(
+ null=True,
+ blank=True,
+ validators=[MinValueValidator(0), MaxValueValidator(15000)],
)
session_recording_linked_flag: models.JSONField = models.JSONField(null=True, blank=True)
capture_console_log_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True)
@@ -185,7 +205,11 @@ class Team(UUIDClassicModel):
recording_domains: ArrayField = ArrayField(models.CharField(max_length=200, null=True), blank=True, null=True)
primary_dashboard: models.ForeignKey = models.ForeignKey(
- "posthog.Dashboard", on_delete=models.SET_NULL, null=True, related_name="primary_dashboard_teams", blank=True
+ "posthog.Dashboard",
+ on_delete=models.SET_NULL,
+ null=True,
+ related_name="primary_dashboard_teams",
+ blank=True,
) # Dashboard shown on project homepage
# Generic field for storing any team-specific context that is more temporary in nature and thus
@@ -233,7 +257,10 @@ def person_on_events_mode(self) -> PersonOnEventsMode:
if self._person_on_events_querying_enabled:
# also tag person_on_events_enabled for legacy compatibility
- tag_queries(person_on_events_enabled=True, person_on_events_mode=PersonOnEventsMode.V1_ENABLED)
+ tag_queries(
+ person_on_events_enabled=True,
+ person_on_events_mode=PersonOnEventsMode.V1_ENABLED,
+ )
return PersonOnEventsMode.V1_ENABLED
return PersonOnEventsMode.DISABLED
@@ -259,7 +286,10 @@ def _person_on_events_querying_enabled(self) -> bool:
str(self.uuid),
groups={"organization": str(self.organization_id)},
group_properties={
- "organization": {"id": str(self.organization_id), "created_at": self.organization.created_at}
+ "organization": {
+ "id": str(self.organization_id),
+ "created_at": self.organization.created_at,
+ }
},
only_evaluate_locally=True,
send_feature_flag_events=False,
@@ -280,7 +310,10 @@ def _person_on_events_v2_querying_enabled(self) -> bool:
str(self.uuid),
groups={"organization": str(self.organization_id)},
group_properties={
- "organization": {"id": str(self.organization_id), "created_at": self.organization.created_at}
+ "organization": {
+ "id": str(self.organization_id),
+ "created_at": self.organization.created_at,
+ }
},
only_evaluate_locally=True,
send_feature_flag_events=False,
diff --git a/posthog/models/team/util.py b/posthog/models/team/util.py
index b2fa36b1430dd..ccaa249c559bf 100644
--- a/posthog/models/team/util.py
+++ b/posthog/models/team/util.py
@@ -49,6 +49,7 @@ def delete_batch_exports(team_ids: List[int]):
can_enable_actor_on_events = False
+
# :TRICKY: Avoid overly eagerly checking whether the migration is complete.
# We instead cache negative responses for a minute and a positive one forever.
def actor_on_events_ready() -> bool:
diff --git a/posthog/models/test/test_activity_logging.py b/posthog/models/test/test_activity_logging.py
index ebe161d5e3986..e7f3ed4c13663 100644
--- a/posthog/models/test/test_activity_logging.py
+++ b/posthog/models/test/test_activity_logging.py
@@ -13,6 +13,33 @@ def test_dict_changes_between(self):
self.assertEqual(len(changes), 3)
- self.assertIn(Change(type="Plugin", action="changed", field="change_field", before="foo", after="bar"), changes)
- self.assertIn(Change(type="Plugin", action="created", field="new_field", before=None, after="bar"), changes)
- self.assertIn(Change(type="Plugin", action="deleted", field="delete_field", before="foo", after=None), changes)
+ self.assertIn(
+ Change(
+ type="Plugin",
+ action="changed",
+ field="change_field",
+ before="foo",
+ after="bar",
+ ),
+ changes,
+ )
+ self.assertIn(
+ Change(
+ type="Plugin",
+ action="created",
+ field="new_field",
+ before=None,
+ after="bar",
+ ),
+ changes,
+ )
+ self.assertIn(
+ Change(
+ type="Plugin",
+ action="deleted",
+ field="delete_field",
+ before="foo",
+ after=None,
+ ),
+ changes,
+ )
diff --git a/posthog/models/test/test_async_deletion_model.py b/posthog/models/test/test_async_deletion_model.py
index c1f94dc825ed0..abb057fd6b9fa 100644
--- a/posthog/models/test/test_async_deletion_model.py
+++ b/posthog/models/test/test_async_deletion_model.py
@@ -35,7 +35,10 @@ def setUp(self):
@snapshot_clickhouse_queries
def test_mark_team_deletions_done(self):
deletion = AsyncDeletion.objects.create(
- deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user
+ deletion_type=DeletionType.Team,
+ team_id=self.teams[0].pk,
+ key=str(self.teams[0].pk),
+ created_by=self.user,
)
AsyncEventDeletion().mark_deletions_done()
@@ -48,7 +51,10 @@ def test_mark_deletions_done_team_when_not_done(self):
_create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1")
deletion = AsyncDeletion.objects.create(
- deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user
+ deletion_type=DeletionType.Team,
+ team_id=self.teams[0].pk,
+ key=str(self.teams[0].pk),
+ created_by=self.user,
)
AsyncEventDeletion().mark_deletions_done()
@@ -58,11 +64,26 @@ def test_mark_deletions_done_team_when_not_done(self):
@snapshot_clickhouse_queries
def test_mark_deletions_done_person(self):
- _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", person_id=uuid2)
- _create_event(event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1", person_id=uuid)
+ _create_event(
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ person_id=uuid2,
+ )
+ _create_event(
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[1],
+ distinct_id="1",
+ person_id=uuid,
+ )
deletion = AsyncDeletion.objects.create(
- deletion_type=DeletionType.Person, team_id=self.teams[0].pk, key=str(uuid), created_by=self.user
+ deletion_type=DeletionType.Person,
+ team_id=self.teams[0].pk,
+ key=str(uuid),
+ created_by=self.user,
)
AsyncEventDeletion().mark_deletions_done()
@@ -72,10 +93,19 @@ def test_mark_deletions_done_person(self):
@snapshot_clickhouse_queries
def test_mark_deletions_done_person_when_not_done(self):
- _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", person_id=uuid)
+ _create_event(
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ person_id=uuid,
+ )
deletion = AsyncDeletion.objects.create(
- deletion_type=DeletionType.Person, team_id=self.teams[0].pk, key=str(uuid), created_by=self.user
+ deletion_type=DeletionType.Person,
+ team_id=self.teams[0].pk,
+ key=str(uuid),
+ created_by=self.user,
)
AsyncEventDeletion().mark_deletions_done()
@@ -86,13 +116,25 @@ def test_mark_deletions_done_person_when_not_done(self):
@snapshot_clickhouse_queries
def test_mark_deletions_done_groups(self):
_create_event(
- event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_1": "foo"}
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ properties={"$group_1": "foo"},
)
_create_event(
- event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_0": "bar"}
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ properties={"$group_0": "bar"},
)
_create_event(
- event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1", properties={"$group_0": "foo"}
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[1],
+ distinct_id="1",
+ properties={"$group_0": "foo"},
)
deletion = AsyncDeletion.objects.create(
@@ -111,7 +153,11 @@ def test_mark_deletions_done_groups(self):
@snapshot_clickhouse_queries
def test_mark_deletions_done_groups_when_not_done(self):
_create_event(
- event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_0": "foo"}
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ properties={"$group_0": "foo"},
)
deletion = AsyncDeletion.objects.create(
@@ -132,7 +178,10 @@ def test_delete_teams(self):
_create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1")
AsyncDeletion.objects.create(
- deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user
+ deletion_type=DeletionType.Team,
+ team_id=self.teams[0].pk,
+ key=str(self.teams[0].pk),
+ created_by=self.user,
)
AsyncEventDeletion().run()
@@ -144,7 +193,10 @@ def test_delete_teams_unrelated(self):
_create_event(event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1")
AsyncDeletion.objects.create(
- deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user
+ deletion_type=DeletionType.Team,
+ team_id=self.teams[0].pk,
+ key=str(self.teams[0].pk),
+ created_by=self.user,
)
AsyncEventDeletion().run()
@@ -153,10 +205,19 @@ def test_delete_teams_unrelated(self):
@snapshot_clickhouse_alter_queries
def test_delete_person(self):
- _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", person_id=uuid)
+ _create_event(
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ person_id=uuid,
+ )
AsyncDeletion.objects.create(
- deletion_type=DeletionType.Person, team_id=self.teams[0].pk, key=str(uuid), created_by=self.user
+ deletion_type=DeletionType.Person,
+ team_id=self.teams[0].pk,
+ key=str(uuid),
+ created_by=self.user,
)
AsyncEventDeletion().run()
@@ -165,11 +226,26 @@ def test_delete_person(self):
@snapshot_clickhouse_alter_queries
def test_delete_person_unrelated(self):
- _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", person_id=uuid2)
- _create_event(event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1", person_id=uuid)
+ _create_event(
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ person_id=uuid2,
+ )
+ _create_event(
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[1],
+ distinct_id="1",
+ person_id=uuid,
+ )
AsyncDeletion.objects.create(
- deletion_type=DeletionType.Person, team_id=self.teams[0].pk, key=str(uuid), created_by=self.user
+ deletion_type=DeletionType.Person,
+ team_id=self.teams[0].pk,
+ key=str(uuid),
+ created_by=self.user,
)
AsyncEventDeletion().run()
@@ -179,7 +255,11 @@ def test_delete_person_unrelated(self):
@snapshot_clickhouse_alter_queries
def test_delete_group(self):
_create_event(
- event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_0": "foo"}
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ properties={"$group_0": "foo"},
)
AsyncDeletion.objects.create(
@@ -197,13 +277,25 @@ def test_delete_group(self):
@snapshot_clickhouse_alter_queries
def test_delete_group_unrelated(self):
_create_event(
- event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_1": "foo"}
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ properties={"$group_1": "foo"},
)
_create_event(
- event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_0": "bar"}
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[0],
+ distinct_id="1",
+ properties={"$group_0": "bar"},
)
_create_event(
- event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1", properties={"$group_0": "foo"}
+ event_uuid=uuid4(),
+ event="event1",
+ team=self.teams[1],
+ distinct_id="1",
+ properties={"$group_0": "foo"},
)
AsyncDeletion.objects.create(
@@ -222,7 +314,12 @@ def test_delete_group_unrelated(self):
def test_delete_auxilary_models_via_team(self):
create_person(team_id=self.teams[0].pk, properties={"x": 0}, version=0, uuid=uuid)
create_person_distinct_id(self.teams[0].pk, "0", uuid)
- create_group(team_id=self.teams[0].pk, group_type_index=0, group_key="org:5", properties={})
+ create_group(
+ team_id=self.teams[0].pk,
+ group_type_index=0,
+ group_key="org:5",
+ properties={},
+ )
insert_static_cohort([uuid4()], 0, self.teams[0])
self._insert_cohortpeople_row(self.teams[0], uuid4(), 3)
create_plugin_log_entry(
@@ -236,7 +333,10 @@ def test_delete_auxilary_models_via_team(self):
)
AsyncDeletion.objects.create(
- deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user
+ deletion_type=DeletionType.Team,
+ team_id=self.teams[0].pk,
+ key=str(self.teams[0].pk),
+ created_by=self.user,
)
AsyncEventDeletion().run()
@@ -252,7 +352,12 @@ def test_delete_auxilary_models_via_team(self):
def test_delete_auxilary_models_via_team_unrelated(self):
create_person(team_id=self.teams[1].pk, properties={"x": 0}, version=0, uuid=uuid)
create_person_distinct_id(self.teams[1].pk, "0", uuid)
- create_group(team_id=self.teams[1].pk, group_type_index=0, group_key="org:5", properties={})
+ create_group(
+ team_id=self.teams[1].pk,
+ group_type_index=0,
+ group_key="org:5",
+ properties={},
+ )
insert_static_cohort([uuid4()], 0, self.teams[1])
self._insert_cohortpeople_row(self.teams[1], uuid4(), 3)
create_plugin_log_entry(
@@ -266,7 +371,10 @@ def test_delete_auxilary_models_via_team_unrelated(self):
)
AsyncDeletion.objects.create(
- deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user
+ deletion_type=DeletionType.Team,
+ team_id=self.teams[0].pk,
+ key=str(self.teams[0].pk),
+ created_by=self.user,
)
AsyncEventDeletion().run()
@@ -284,7 +392,10 @@ def test_delete_cohortpeople(self):
self._insert_cohortpeople_row(team, uuid4(), cohort_id)
AsyncDeletion.objects.create(
- deletion_type=DeletionType.Cohort_full, team_id=team.pk, key=str(cohort_id) + "_0", created_by=self.user
+ deletion_type=DeletionType.Cohort_full,
+ team_id=team.pk,
+ key=str(cohort_id) + "_0",
+ created_by=self.user,
)
AsyncCohortDeletion().run()
@@ -298,7 +409,10 @@ def test_delete_cohortpeople_version(self):
self._insert_cohortpeople_row(team, uuid4(), cohort_id, 3)
AsyncDeletion.objects.create(
- deletion_type=DeletionType.Cohort_stale, team_id=team.pk, key=str(cohort_id) + "_3", created_by=self.user
+ deletion_type=DeletionType.Cohort_stale,
+ team_id=team.pk,
+ key=str(cohort_id) + "_3",
+ created_by=self.user,
)
AsyncCohortDeletion().run()
@@ -314,5 +428,10 @@ def _insert_cohortpeople_row(self, team: Team, person_id: UUID, cohort_id: int,
INSERT INTO cohortpeople (person_id, cohort_id, team_id, sign, version)
VALUES (%(person_id)s, %(cohort_id)s, %(team_id)s, 1, %(version)s)
""",
- {"person_id": str(person_id), "cohort_id": cohort_id, "team_id": team.pk, "version": version},
+ {
+ "person_id": str(person_id),
+ "cohort_id": cohort_id,
+ "team_id": team.pk,
+ "version": version,
+ },
)
diff --git a/posthog/models/test/test_dashboard_tile_model.py b/posthog/models/test/test_dashboard_tile_model.py
index fe88e813c1181..be13ba06975c3 100644
--- a/posthog/models/test/test_dashboard_tile_model.py
+++ b/posthog/models/test/test_dashboard_tile_model.py
@@ -5,7 +5,11 @@
from django.db.utils import IntegrityError
from posthog.models.dashboard import Dashboard
-from posthog.models.dashboard_tile import DashboardTile, Text, get_tiles_ordered_by_position
+from posthog.models.dashboard_tile import (
+ DashboardTile,
+ Text,
+ get_tiles_ordered_by_position,
+)
from posthog.models.exported_asset import ExportedAsset
from posthog.models.insight import Insight
from posthog.test.base import APIBaseTest
@@ -60,7 +64,6 @@ def test_cannot_add_a_tile_with_insight_and_text_on_validation(self) -> None:
DashboardTile.objects.create(dashboard=self.dashboard, insight=insight, text=text)
def test_cannot_set_caching_data_for_text_tiles(self) -> None:
-
tile_fields: List[Dict] = [
{"filters_hash": "123"},
{"refreshing": True},
diff --git a/posthog/models/test/test_entity_model.py b/posthog/models/test/test_entity_model.py
index 55d0de18fc01a..c11e5bd99e9fd 100644
--- a/posthog/models/test/test_entity_model.py
+++ b/posthog/models/test/test_entity_model.py
@@ -1,6 +1,10 @@
from django.test import TestCase
-from posthog.models.entity import TREND_FILTER_TYPE_ACTIONS, TREND_FILTER_TYPE_EVENTS, Entity
+from posthog.models.entity import (
+ TREND_FILTER_TYPE_ACTIONS,
+ TREND_FILTER_TYPE_EVENTS,
+ Entity,
+)
class TestEntity(TestCase):
@@ -16,7 +20,11 @@ def test_inclusion(self):
"type": TREND_FILTER_TYPE_EVENTS,
"properties": [
{"key": "email", "value": "test@posthog.com", "type": "person"},
- {"key": "current_url", "value": "test@posthog.com", "type": "element"},
+ {
+ "key": "current_url",
+ "value": "test@posthog.com",
+ "type": "element",
+ },
],
}
)
@@ -24,7 +32,13 @@ def test_inclusion(self):
{
"id": "e1",
"type": TREND_FILTER_TYPE_EVENTS,
- "properties": [{"key": "current_url", "value": "test@posthog.com", "type": "element"}],
+ "properties": [
+ {
+ "key": "current_url",
+ "value": "test@posthog.com",
+ "type": "element",
+ }
+ ],
}
)
@@ -38,7 +52,11 @@ def test_inclusion_unordered(self):
"type": TREND_FILTER_TYPE_EVENTS,
"properties": [
{"key": "browser", "value": "chrome", "type": "person"},
- {"key": "current_url", "value": "test@posthog.com", "type": "element"},
+ {
+ "key": "current_url",
+ "value": "test@posthog.com",
+ "type": "element",
+ },
{"key": "email", "value": "test@posthog.com", "type": "person"},
],
}
@@ -47,7 +65,13 @@ def test_inclusion_unordered(self):
{
"id": "e1",
"type": TREND_FILTER_TYPE_EVENTS,
- "properties": [{"key": "current_url", "value": "test@posthog.com", "type": "element"}],
+ "properties": [
+ {
+ "key": "current_url",
+ "value": "test@posthog.com",
+ "type": "element",
+ }
+ ],
}
)
@@ -55,7 +79,6 @@ def test_inclusion_unordered(self):
self.assertFalse(entity1.is_superset(entity2))
def test_equality_with_ids(self):
-
entity1 = Entity({"id": "e1", "type": TREND_FILTER_TYPE_ACTIONS})
entity2 = Entity({"id": "e1", "type": TREND_FILTER_TYPE_ACTIONS})
@@ -83,7 +106,11 @@ def test_equality_with_simple_properties(self):
"type": TREND_FILTER_TYPE_EVENTS,
"properties": [
{"key": "email", "value": "test@posthog.com", "type": "person"},
- {"key": "current_url", "value": "test@posthog.com", "type": "element"},
+ {
+ "key": "current_url",
+ "value": "test@posthog.com",
+ "type": "element",
+ },
],
}
)
@@ -92,7 +119,11 @@ def test_equality_with_simple_properties(self):
"id": "e1",
"type": TREND_FILTER_TYPE_EVENTS,
"properties": [
- {"key": "current_url", "value": "test@posthog.com", "type": "element"},
+ {
+ "key": "current_url",
+ "value": "test@posthog.com",
+ "type": "element",
+ },
{"key": "email", "value": "test@posthog.com", "type": "person"},
],
}
@@ -105,7 +136,11 @@ def test_equality_with_simple_properties(self):
"id": "e1",
"type": TREND_FILTER_TYPE_EVENTS,
"properties": [
- {"key": "current$url", "value": "test@posthog.com", "type": "element"},
+ {
+ "key": "current$url",
+ "value": "test@posthog.com",
+ "type": "element",
+ },
{"key": "email", "value": "test@posthog.com", "type": "person"},
],
}
@@ -120,8 +155,18 @@ def test_equality_with_complex_operator_properties(self):
"type": TREND_FILTER_TYPE_EVENTS,
"properties": [
{"key": "count", "operator": "lt", "value": 12, "type": "element"},
- {"key": "email", "operator": "in", "value": ["a, b"], "type": "person"},
- {"key": "selector", "value": [".btn"], "operator": "exact", "type": "element"},
+ {
+ "key": "email",
+ "operator": "in",
+ "value": ["a, b"],
+ "type": "person",
+ },
+ {
+ "key": "selector",
+ "value": [".btn"],
+ "operator": "exact",
+ "type": "element",
+ },
{"key": "test_prop", "value": 1.2, "operator": "gt"},
],
}
@@ -133,8 +178,18 @@ def test_equality_with_complex_operator_properties(self):
"properties": [
{"key": "test_prop", "value": 1.20, "operator": "gt"},
{"key": "count", "operator": "lt", "value": 12, "type": "element"},
- {"key": "selector", "value": [".btn"], "operator": "exact", "type": "element"},
- {"key": "email", "operator": "in", "value": ["a, b"], "type": "person"},
+ {
+ "key": "selector",
+ "value": [".btn"],
+ "operator": "exact",
+ "type": "element",
+ },
+ {
+ "key": "email",
+ "operator": "in",
+ "value": ["a, b"],
+ "type": "person",
+ },
],
}
)
@@ -149,8 +204,18 @@ def test_equality_with_complex_operator_properties(self):
"properties": [
{"key": "test_prop", "value": 1.200, "operator": "gt"},
{"key": "count", "operator": "lt", "value": 12, "type": "element"},
- {"key": "selector", "value": [".btn"], "operator": "exact", "type": "element"},
- {"key": "email", "operator": "in", "value": ["a, b"], "type": "person"},
+ {
+ "key": "selector",
+ "value": [".btn"],
+ "operator": "exact",
+ "type": "element",
+ },
+ {
+ "key": "email",
+ "operator": "in",
+ "value": ["a, b"],
+ "type": "person",
+ },
],
}
)
@@ -164,8 +229,18 @@ def test_equality_with_complex_operator_properties(self):
"properties": [
{"key": "test_prop", "value": 1.2001, "operator": "gt"},
{"key": "count", "operator": "lt", "value": 12, "type": "element"},
- {"key": "selector", "value": [".btn"], "operator": "exact", "type": "element"},
- {"key": "email", "operator": "in", "value": ["a, b"], "type": "person"},
+ {
+ "key": "selector",
+ "value": [".btn"],
+ "operator": "exact",
+ "type": "element",
+ },
+ {
+ "key": "email",
+ "operator": "in",
+ "value": ["a, b"],
+ "type": "person",
+ },
],
}
)
@@ -173,9 +248,19 @@ def test_equality_with_complex_operator_properties(self):
self.assertFalse(entity1.equals(entity2))
def test_equality_with_old_style_and_new_style_properties(self):
- entity1 = Entity({"id": "e1", "type": TREND_FILTER_TYPE_EVENTS, "properties": {"key": "value"}})
+ entity1 = Entity(
+ {
+ "id": "e1",
+ "type": TREND_FILTER_TYPE_EVENTS,
+ "properties": {"key": "value"},
+ }
+ )
entity2 = Entity(
- {"id": "e1", "type": TREND_FILTER_TYPE_EVENTS, "properties": [{"key": "key", "value": "value"}]}
+ {
+ "id": "e1",
+ "type": TREND_FILTER_TYPE_EVENTS,
+ "properties": [{"key": "key", "value": "value"}],
+ }
)
self.assertTrue(entity1.equals(entity2))
diff --git a/posthog/models/test/test_event_model.py b/posthog/models/test/test_event_model.py
index d5343e9141949..8c0f2ab8994b3 100644
--- a/posthog/models/test/test_event_model.py
+++ b/posthog/models/test/test_event_model.py
@@ -20,7 +20,10 @@ def test_filter_with_selector_direct_decendant_ordering(self):
self.team,
[
{"event": "$autocapture", "selector": "div > div > a"},
- {"event": "$autocapture", "selector": "div > a.somethingthatdoesntexist"},
+ {
+ "event": "$autocapture",
+ "selector": "div > a.somethingthatdoesntexist",
+ },
],
)
@@ -28,7 +31,10 @@ def test_filter_with_selector_direct_decendant_ordering(self):
def test_filter_with_selector_nth_child(self):
all_events = self._setup_action_selector_events()
- action = _create_action(self.team, [{"event": "$autocapture", "selector": "div > a:nth-child(2)"}])
+ action = _create_action(
+ self.team,
+ [{"event": "$autocapture", "selector": "div > a:nth-child(2)"}],
+ )
self.assertActionEventsMatch(action, [all_events[1]])
@@ -58,7 +64,13 @@ def _setup_action_selector_events(self):
team=self.team,
distinct_id="whatever",
elements=[
- Element(tag_name="a", href="/a-url", nth_child=1, nth_of_type=0, attr_class=["one-class"]),
+ Element(
+ tag_name="a",
+ href="/a-url",
+ nth_child=1,
+ nth_of_type=0,
+ attr_class=["one-class"],
+ ),
Element(tag_name="button", nth_child=0, nth_of_type=0),
Element(tag_name="div", nth_child=0, nth_of_type=0),
Element(tag_name="div", nth_child=0, nth_of_type=0, attr_id="nested"),
@@ -126,14 +138,30 @@ def test_with_normal_filters(self):
team=self.team,
event="$autocapture",
distinct_id="whatever",
- elements=[Element(tag_name="a", href="/a-url", text="some_text", nth_child=0, nth_of_type=0)],
+ elements=[
+ Element(
+ tag_name="a",
+ href="/a-url",
+ text="some_text",
+ nth_child=0,
+ nth_of_type=0,
+ )
+ ],
)
event2_uuid = _create_event(
team=self.team,
event="$autocapture",
distinct_id="whatever2",
- elements=[Element(tag_name="a", href="/a-url", text="some_text", nth_child=0, nth_of_type=0)],
+ elements=[
+ Element(
+ tag_name="a",
+ href="/a-url",
+ text="some_text",
+ nth_child=0,
+ nth_of_type=0,
+ )
+ ],
)
event3_uuid = _create_event(
@@ -141,9 +169,20 @@ def test_with_normal_filters(self):
event="$autocapture",
distinct_id="whatever",
elements=[
- Element(tag_name="a", href="/a-url-2", text="some_other_text", nth_child=0, nth_of_type=0),
+ Element(
+ tag_name="a",
+ href="/a-url-2",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ ),
# make sure elements don't get double counted if they're part of the same event
- Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0),
+ Element(
+ tag_name="div",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ ),
],
)
@@ -152,9 +191,20 @@ def test_with_normal_filters(self):
event="$autocapture",
distinct_id="whatever2",
elements=[
- Element(tag_name="a", href="/a-url-2", text="some_other_text", nth_child=0, nth_of_type=0),
+ Element(
+ tag_name="a",
+ href="/a-url-2",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ ),
# make sure elements don't get double counted if they're part of the same event
- Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0),
+ Element(
+ tag_name="div",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ ),
],
)
@@ -163,13 +213,29 @@ def test_with_normal_filters(self):
team=team2,
event="$autocapture",
distinct_id="whatever2",
- elements=[Element(tag_name="a", href="/a-url", text="some_other_text", nth_child=0, nth_of_type=0)],
+ elements=[
+ Element(
+ tag_name="a",
+ href="/a-url",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ )
+ ],
)
_create_event(
team=team2,
event="$autocapture",
distinct_id="whatever2",
- elements=[Element(tag_name="a", href="/a-url-2", text="some_other_text", nth_child=0, nth_of_type=0)],
+ elements=[
+ Element(
+ tag_name="a",
+ href="/a-url-2",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ )
+ ],
)
events = _get_events_for_action(action1)
@@ -184,14 +250,26 @@ def test_with_href_contains(self):
action1 = Action.objects.create(team=self.team)
ActionStep.objects.create(
- event="$autocapture", action=action1, href="/a-url", href_matching="contains", selector="a"
+ event="$autocapture",
+ action=action1,
+ href="/a-url",
+ href_matching="contains",
+ selector="a",
)
event1_uuid = _create_event(
team=self.team,
event="$autocapture",
distinct_id="whatever",
- elements=[Element(tag_name="a", href="/a-url", text="some_text", nth_child=0, nth_of_type=0)],
+ elements=[
+ Element(
+ tag_name="a",
+ href="/a-url",
+ text="some_text",
+ nth_child=0,
+ nth_of_type=0,
+ )
+ ],
)
event2_uuid = _create_event(
@@ -199,7 +277,13 @@ def test_with_href_contains(self):
event="$autocapture",
distinct_id="whatever2",
elements=[
- Element(tag_name="a", href="https://google.com/a-url", text="some_text", nth_child=0, nth_of_type=0)
+ Element(
+ tag_name="a",
+ href="https://google.com/a-url",
+ text="some_text",
+ nth_child=0,
+ nth_of_type=0,
+ )
],
)
@@ -208,9 +292,20 @@ def test_with_href_contains(self):
event="$autocapture",
distinct_id="whatever",
elements=[
- Element(tag_name="a", href="/a-url-2", text="some_other_text", nth_child=0, nth_of_type=0),
+ Element(
+ tag_name="a",
+ href="/a-url-2",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ ),
# make sure elements don't get double counted if they're part of the same event
- Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0),
+ Element(
+ tag_name="div",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ ),
],
)
@@ -219,9 +314,20 @@ def test_with_href_contains(self):
event="$autocapture",
distinct_id="whatever2",
elements=[
- Element(tag_name="a", href="/b-url", text="some_other_text", nth_child=0, nth_of_type=0),
+ Element(
+ tag_name="a",
+ href="/b-url",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ ),
# make sure elements don't get double counted if they're part of the same event
- Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0),
+ Element(
+ tag_name="div",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ ),
],
)
@@ -234,7 +340,12 @@ def test_with_href_contains(self):
def test_with_class(self):
_create_person(distinct_ids=["whatever"], team=self.team)
action1 = Action.objects.create(team=self.team)
- ActionStep.objects.create(event="$autocapture", action=action1, selector="a.nav-link.active", tag_name="a")
+ ActionStep.objects.create(
+ event="$autocapture",
+ action=action1,
+ selector="a.nav-link.active",
+ tag_name="a",
+ )
event1_uuid = _create_event(
event="$autocapture",
team=self.team,
@@ -250,7 +361,10 @@ def test_with_class(self):
event="$autocapture",
team=self.team,
distinct_id="whatever",
- elements=[Element(tag_name="span", attr_class=None), Element(tag_name="a", attr_class=None)],
+ elements=[
+ Element(tag_name="span", attr_class=None),
+ Element(tag_name="a", attr_class=None),
+ ],
)
events = _get_events_for_action(action1)
@@ -260,7 +374,12 @@ def test_with_class(self):
def test_with_class_with_escaped_symbols(self):
_create_person(distinct_ids=["whatever"], team=self.team)
action1 = Action.objects.create(team=self.team)
- ActionStep.objects.create(event="$autocapture", action=action1, selector="a.na\\v-link:b@ld", tag_name="a")
+ ActionStep.objects.create(
+ event="$autocapture",
+ action=action1,
+ selector="a.na\\v-link:b@ld",
+ tag_name="a",
+ )
event1_uuid = _create_event(
event="$autocapture",
team=self.team,
@@ -279,7 +398,10 @@ def test_with_class_with_escaped_slashes(self):
_create_person(distinct_ids=["whatever"], team=self.team)
action1 = Action.objects.create(team=self.team)
ActionStep.objects.create(
- event="$autocapture", action=action1, selector="a.na\\\\\\v-link:b@ld", tag_name="a"
+ event="$autocapture",
+ action=action1,
+ selector="a.na\\\\\\v-link:b@ld",
+ tag_name="a",
)
event1_uuid = _create_event(
event="$autocapture",
@@ -323,15 +445,28 @@ def test_filter_events_by_url(self):
ActionStep.objects.create(event="$autocapture", action=action1, href="/a-url-2")
action2 = Action.objects.create(team=self.team)
- ActionStep.objects.create(event="$autocapture", action=action2, url="123", url_matching=ActionStep.CONTAINS)
+ ActionStep.objects.create(
+ event="$autocapture",
+ action=action2,
+ url="123",
+ url_matching=ActionStep.CONTAINS,
+ )
action3 = Action.objects.create(team=self.team)
ActionStep.objects.create(
- event="$autocapture", action=action3, url="https://posthog.com/%/123", url_matching=ActionStep.CONTAINS
+ event="$autocapture",
+ action=action3,
+ url="https://posthog.com/%/123",
+ url_matching=ActionStep.CONTAINS,
)
action4 = Action.objects.create(team=self.team)
- ActionStep.objects.create(event="$autocapture", action=action4, url="/123$", url_matching=ActionStep.REGEX)
+ ActionStep.objects.create(
+ event="$autocapture",
+ action=action4,
+ url="/123$",
+ url_matching=ActionStep.REGEX,
+ )
_create_event(team=self.team, distinct_id="whatever", event="$autocapture")
event2_uuid = _create_event(
@@ -339,7 +474,14 @@ def test_filter_events_by_url(self):
team=self.team,
distinct_id="whatever",
properties={"$current_url": "https://posthog.com/feedback/123"},
- elements=[Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0)],
+ elements=[
+ Element(
+ tag_name="div",
+ text="some_other_text",
+ nth_child=0,
+ nth_of_type=0,
+ )
+ ],
)
events = _get_events_for_action(action1)
@@ -360,7 +502,12 @@ def test_filter_events_by_url(self):
def test_person_with_different_distinct_id(self):
action_watch_movie = Action.objects.create(team=self.team, name="watched movie")
- ActionStep.objects.create(action=action_watch_movie, tag_name="a", href="/movie", event="$autocapture")
+ ActionStep.objects.create(
+ action=action_watch_movie,
+ tag_name="a",
+ href="/movie",
+ event="$autocapture",
+ )
_create_person(distinct_ids=["anonymous_user", "is_now_signed_up"], team=self.team)
_create_event(
@@ -396,13 +543,19 @@ def test_no_person_leakage_from_other_teams(self):
self.assertEqual(events[0].distinct_id, "anonymous_user")
def test_person_property(self):
- _create_person(team=self.team, distinct_ids=["person1"], properties={"$browser": "Chrome"})
+ _create_person(
+ team=self.team,
+ distinct_ids=["person1"],
+ properties={"$browser": "Chrome"},
+ )
_create_person(team=self.team, distinct_ids=["person2"])
_create_event(event="$pageview", distinct_id="person1", team=self.team)
_create_event(event="$pageview", distinct_id="person2", team=self.team)
action = Action.objects.create(name="pageview", team=self.team)
ActionStep.objects.create(
- action=action, event="$pageview", properties=[{"key": "$browser", "value": "Chrome", "type": "person"}]
+ action=action,
+ event="$pageview",
+ properties=[{"key": "$browser", "value": "Chrome", "type": "person"}],
)
events = _get_events_for_action(action)
self.assertEqual(len(events), 1)
@@ -482,7 +635,10 @@ def test_selector_attribute(self):
self.assertEqual(selector1.parts[0].direct_descendant, False)
self.assertEqual(selector1.parts[0].unique_order, 0)
- self.assertEqual(selector1.parts[1].data, {"tag_name": "div", "attributes__attr__data-id": "5"})
+ self.assertEqual(
+ selector1.parts[1].data,
+ {"tag_name": "div", "attributes__attr__data-id": "5"},
+ )
self.assertEqual(selector1.parts[1].direct_descendant, True)
self.assertEqual(selector1.parts[1].unique_order, 0)
@@ -518,7 +674,10 @@ def test_class(self):
self.assertEqual(selector1.parts[0].direct_descendant, False)
self.assertEqual(selector1.parts[0].unique_order, 0)
- self.assertEqual(selector1.parts[1].data, {"tag_name": "div", "attr_class__contains": ["classone", "classtwo"]})
+ self.assertEqual(
+ selector1.parts[1].data,
+ {"tag_name": "div", "attr_class__contains": ["classone", "classtwo"]},
+ )
self.assertEqual(selector1.parts[1].direct_descendant, True)
self.assertEqual(selector1.parts[1].unique_order, 0)
diff --git a/posthog/models/test/test_exported_asset_model.py b/posthog/models/test/test_exported_asset_model.py
index 40337c6c50635..f17808caadd54 100644
--- a/posthog/models/test/test_exported_asset_model.py
+++ b/posthog/models/test/test_exported_asset_model.py
@@ -68,7 +68,10 @@ def test_delete_expired_assets(self) -> None:
ExportedAsset.delete_expired_assets()
- assert list(ExportedAsset.objects.all()) == [asset_that_is_not_expired, asset_that_has_no_expiry]
+ assert list(ExportedAsset.objects.all()) == [
+ asset_that_is_not_expired,
+ asset_that_has_no_expiry,
+ ]
assert list(ExportedAsset.objects_including_ttl_deleted.all()) == [
asset_that_is_not_expired,
asset_that_has_no_expiry,
diff --git a/posthog/models/test/test_insight_caching_state.py b/posthog/models/test/test_insight_caching_state.py
index 65b4086f9443f..2727d67f582f6 100644
--- a/posthog/models/test/test_insight_caching_state.py
+++ b/posthog/models/test/test_insight_caching_state.py
@@ -2,7 +2,13 @@
from django.utils.timezone import now
-from posthog.models import Dashboard, DashboardTile, Insight, InsightCachingState, SharingConfiguration
+from posthog.models import (
+ Dashboard,
+ DashboardTile,
+ Insight,
+ InsightCachingState,
+ SharingConfiguration,
+)
from posthog.models.signals import mute_selected_signals
from posthog.test.base import BaseTest
diff --git a/posthog/models/test/test_insight_model.py b/posthog/models/test/test_insight_model.py
index 08d82d0a416ac..2519b8a79cb0a 100644
--- a/posthog/models/test/test_insight_model.py
+++ b/posthog/models/test/test_insight_model.py
@@ -102,7 +102,8 @@ def test_dashboard_with_date_from_changes_filters_hash(self) -> None:
def test_query_hash_matches_same_query_source(self) -> None:
insight_with_query_at_top_level = Insight.objects.create(team=self.team, query={"kind": "EventsQuery"})
insight_with_query_in_source = Insight.objects.create(
- team=self.team, query={"kind": "DataTable", "source": {"kind": "EventsQuery"}}
+ team=self.team,
+ query={"kind": "DataTable", "source": {"kind": "EventsQuery"}},
)
filters_hash_one = generate_insight_cache_key(insight_with_query_at_top_level, None)
@@ -141,25 +142,37 @@ def test_dashboard_with_query_insight_and_filters(self) -> None:
# test that query filters are equal when there are no dashboard filters
{"dateRange": {"date_from": "-14d", "date_to": "-7d"}},
{},
- {"dateRange": {"date_from": "-14d", "date_to": "-7d"}, "properties": None},
+ {
+ "dateRange": {"date_from": "-14d", "date_to": "-7d"},
+ "properties": None,
+ },
),
(
# test that dashboard filters are used when there are no query filters
{},
{"date_from": "-14d", "date_to": "-7d"},
- {"dateRange": {"date_from": "-14d", "date_to": "-7d"}, "properties": None},
+ {
+ "dateRange": {"date_from": "-14d", "date_to": "-7d"},
+ "properties": None,
+ },
),
(
# test that dashboard filters take priority
{"dateRange": {"date_from": "-2d", "date_to": "-1d"}},
{"date_from": "-4d", "date_to": "-3d"},
- {"dateRange": {"date_from": "-4d", "date_to": "-3d"}, "properties": None},
+ {
+ "dateRange": {"date_from": "-4d", "date_to": "-3d"},
+ "properties": None,
+ },
),
(
# test that dashboard filters take priority, even if only one value is set, the other is set to None
{"dateRange": {"date_from": "-14d", "date_to": "-7d"}},
{"date_from": "all"},
- {"dateRange": {"date_from": "all", "date_to": None}, "properties": None},
+ {
+ "dateRange": {"date_from": "all", "date_to": None},
+ "properties": None,
+ },
),
(
# test that if no filters are set then none are outputted
@@ -171,13 +184,19 @@ def test_dashboard_with_query_insight_and_filters(self) -> None:
# test that properties from the query are used when there are no dashboard properties
{"properties": [browser_equals_firefox]},
{},
- {"dateRange": {"date_from": None, "date_to": None}, "properties": [browser_equals_firefox]},
+ {
+ "dateRange": {"date_from": None, "date_to": None},
+ "properties": [browser_equals_firefox],
+ },
),
(
# test that properties from the dashboard are used when there are no query properties
{},
{"properties": [browser_equals_chrome]},
- {"dateRange": {"date_from": None, "date_to": None}, "properties": [browser_equals_chrome]},
+ {
+ "dateRange": {"date_from": None, "date_to": None},
+ "properties": [browser_equals_chrome],
+ },
),
(
# test that properties are merged when set in both query and dashboard
diff --git a/posthog/models/test/test_organization_model.py b/posthog/models/test/test_organization_model.py
index 0f9c29904e4ab..f140dcc862f26 100644
--- a/posthog/models/test/test_organization_model.py
+++ b/posthog/models/test/test_organization_model.py
@@ -29,16 +29,19 @@ def test_plugins_are_preinstalled_on_self_hosted(self, mock_get):
with self.is_cloud(False):
with self.settings(PLUGINS_PREINSTALLED_URLS=["https://github.com/PostHog/helloworldplugin/"]):
new_org, _, _ = Organization.objects.bootstrap(
- self.user, plugins_access_level=Organization.PluginsAccessLevel.INSTALL
+ self.user,
+ plugins_access_level=Organization.PluginsAccessLevel.INSTALL,
)
self.assertEqual(Plugin.objects.filter(organization=new_org, is_preinstalled=True).count(), 1)
self.assertEqual(
- Plugin.objects.filter(organization=new_org, is_preinstalled=True).get().name, "helloworldplugin"
+ Plugin.objects.filter(organization=new_org, is_preinstalled=True).get().name,
+ "helloworldplugin",
)
self.assertEqual(mock_get.call_count, 2)
mock_get.assert_any_call(
- f"https://github.com/PostHog/helloworldplugin/archive/{HELLO_WORLD_PLUGIN_GITHUB_ZIP[0]}.zip", headers={}
+ f"https://github.com/PostHog/helloworldplugin/archive/{HELLO_WORLD_PLUGIN_GITHUB_ZIP[0]}.zip",
+ headers={},
)
@mock.patch("requests.get", side_effect=mocked_plugin_requests_get)
@@ -46,7 +49,8 @@ def test_plugins_are_not_preinstalled_on_cloud(self, mock_get):
with self.is_cloud(True):
with self.settings(PLUGINS_PREINSTALLED_URLS=["https://github.com/PostHog/helloworldplugin/"]):
new_org, _, _ = Organization.objects.bootstrap(
- self.user, plugins_access_level=Organization.PluginsAccessLevel.INSTALL
+ self.user,
+ plugins_access_level=Organization.PluginsAccessLevel.INSTALL,
)
self.assertEqual(Plugin.objects.filter(organization=new_org, is_preinstalled=True).count(), 0)
diff --git a/posthog/models/test/test_person_override_model.py b/posthog/models/test/test_person_override_model.py
index e3365adaf524a..13f3b0a8511ab 100644
--- a/posthog/models/test/test_person_override_model.py
+++ b/posthog/models/test/test_person_override_model.py
@@ -416,7 +416,13 @@ def create_connection(alias=DEFAULT_DB_ALIAS):
def _merge_people(
- team, cursor, old_person_uuid, override_person_uuid, oldest_event, can_lock_event=None, done_event=None
+ team,
+ cursor,
+ old_person_uuid,
+ override_person_uuid,
+ oldest_event,
+ can_lock_event=None,
+ done_event=None,
):
"""
Merge two people together, using the override_person_id as the canonical
@@ -592,7 +598,13 @@ def test_person_override_allow_consecutive_merges(people, team, oldest_event):
with create_connection() as second_cursor:
second_cursor.execute("BEGIN")
- _merge_people(team, second_cursor, override_person.uuid, new_override_person.uuid, oldest_event)
+ _merge_people(
+ team,
+ second_cursor,
+ override_person.uuid,
+ new_override_person.uuid,
+ oldest_event,
+ )
second_cursor.execute("COMMIT")
assert [_[0] for _ in PersonOverrideMapping.objects.all().values_list("uuid")] == [
@@ -648,12 +660,24 @@ def test_person_override_disallows_concurrent_merge(people, team, oldest_event):
done_t2_event = Event()
t1 = Thread(
target=_merge_people,
- args=(team, first_cursor, old_person.uuid, override_person.uuid, oldest_event),
+ args=(
+ team,
+ first_cursor,
+ old_person.uuid,
+ override_person.uuid,
+ oldest_event,
+ ),
kwargs={"can_lock_event": can_lock_event, "done_event": done_t1_event},
)
t2 = Thread(
target=_merge_people,
- args=(team, second_cursor, override_person.uuid, new_override_person.uuid, oldest_event),
+ args=(
+ team,
+ second_cursor,
+ override_person.uuid,
+ new_override_person.uuid,
+ oldest_event,
+ ),
kwargs={"done_event": done_t2_event},
)
t1.start()
@@ -708,12 +732,24 @@ def test_person_override_disallows_concurrent_merge_different_order(people, team
done_t2_event = Event()
t1 = Thread(
target=_merge_people,
- args=(team, first_cursor, old_person.uuid, override_person.uuid, oldest_event),
+ args=(
+ team,
+ first_cursor,
+ old_person.uuid,
+ override_person.uuid,
+ oldest_event,
+ ),
kwargs={"done_event": done_t1_event},
)
t2 = Thread(
target=_merge_people,
- args=(team, second_cursor, override_person.uuid, new_override_person.uuid, oldest_event),
+ args=(
+ team,
+ second_cursor,
+ override_person.uuid,
+ new_override_person.uuid,
+ oldest_event,
+ ),
kwargs={"can_lock_event": can_lock_event, "done_event": done_t2_event},
)
t1.start()
diff --git a/posthog/models/test/test_subscription_model.py b/posthog/models/test/test_subscription_model.py
index bc9bf583e6f15..8552d8bca795a 100644
--- a/posthog/models/test/test_subscription_model.py
+++ b/posthog/models/test/test_subscription_model.py
@@ -80,7 +80,12 @@ def test_generating_token(self):
token = get_unsubscribe_token(subscription, "test2@posthog.com")
assert token.startswith("ey")
- info = jwt.decode(token, "not-so-secret", audience=PosthogJwtAudience.UNSUBSCRIBE.value, algorithms=["HS256"])
+ info = jwt.decode(
+ token,
+ "not-so-secret",
+ audience=PosthogJwtAudience.UNSUBSCRIBE.value,
+ algorithms=["HS256"],
+ )
assert info["id"] == subscription.id
assert info["email"] == "test2@posthog.com"
@@ -137,7 +142,10 @@ def test_unsubscribe_deletes_subscription_if_last_subscriber(self):
def test_complex_rrule_configuration(self):
# Equivalent to last monday and wednesday of every other month
subscription = self._create_insight_subscription(
- interval=2, frequency="monthly", bysetpos=-1, byweekday=["wednesday", "friday"]
+ interval=2,
+ frequency="monthly",
+ bysetpos=-1,
+ byweekday=["wednesday", "friday"],
)
# Last wed or fri of 01.22 is Wed 28th
@@ -156,7 +164,15 @@ def test_should_work_for_nth_days(self):
interval=1,
frequency="monthly",
bysetpos=3,
- byweekday=["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"],
+ byweekday=[
+ "monday",
+ "tuesday",
+ "wednesday",
+ "thursday",
+ "friday",
+ "saturday",
+ "sunday",
+ ],
)
subscription.save()
assert subscription.next_delivery_date == datetime(2022, 1, 3, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
@@ -185,7 +201,15 @@ def test_subscription_summary(self):
subscription = self._create_insight_subscription(
interval=1,
frequency="monthly",
- byweekday=["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"],
+ byweekday=[
+ "monday",
+ "tuesday",
+ "wednesday",
+ "thursday",
+ "friday",
+ "saturday",
+ "sunday",
+ ],
bysetpos=3,
)
assert subscription.summary == "sent every month on the third day"
diff --git a/posthog/models/test/test_user_model.py b/posthog/models/test/test_user_model.py
index 7b805157266f7..fe26931522eac 100644
--- a/posthog/models/test/test_user_model.py
+++ b/posthog/models/test/test_user_model.py
@@ -12,7 +12,10 @@ def test_create_user_with_distinct_id(self):
def test_analytics_metadata(self):
# One org, one team, anonymized
organization, team, user = User.objects.bootstrap(
- organization_name="Test Org", email="test_org@posthog.com", password="12345678", anonymize_data=True
+ organization_name="Test Org",
+ email="test_org@posthog.com",
+ password="12345678",
+ anonymize_data=True,
)
with self.is_cloud(True):
diff --git a/posthog/models/uploaded_media.py b/posthog/models/uploaded_media.py
index 0a25f452495f1..0161b71beb4f6 100644
--- a/posthog/models/uploaded_media.py
+++ b/posthog/models/uploaded_media.py
@@ -35,11 +35,19 @@ def get_absolute_url(self) -> str:
@classmethod
def save_content(
- cls, team: Team, created_by: User, file_name: str, content_type: str, content: bytes
+ cls,
+ team: Team,
+ created_by: User,
+ file_name: str,
+ content_type: str,
+ content: bytes,
) -> Optional["UploadedMedia"]:
try:
media = UploadedMedia.objects.create(
- team=team, created_by=created_by, file_name=file_name, content_type=content_type
+ team=team,
+ created_by=created_by,
+ file_name=file_name,
+ content_type=content_type,
)
if settings.OBJECT_STORAGE_ENABLED:
save_content_to_object_storage(media, content)
@@ -54,7 +62,11 @@ def save_content(
except ObjectStorageError as ose:
capture_exception(ose)
logger.error(
- "uploaded_media.object-storage-error", file_name=file_name, team=team.pk, exception=ose, exc_info=True
+ "uploaded_media.object-storage-error",
+ file_name=file_name,
+ team=team.pk,
+ exception=ose,
+ exc_info=True,
)
return None
diff --git a/posthog/models/user.py b/posthog/models/user.py
index b385f4b0fc8ab..423936747e2cc 100644
--- a/posthog/models/user.py
+++ b/posthog/models/user.py
@@ -78,7 +78,11 @@ def bootstrap(
organization_fields.setdefault("name", organization_name)
organization = Organization.objects.create(**organization_fields)
user = self.create_user(
- email=email, password=password, first_name=first_name, is_staff=is_staff, **user_fields
+ email=email,
+ password=password,
+ first_name=first_name,
+ is_staff=is_staff,
+ **user_fields,
)
if create_team:
team = create_team(organization, user)
@@ -129,7 +133,10 @@ class User(AbstractUser, UUIDClassicModel):
TOOLBAR_CHOICES = [(DISABLED, DISABLED), (TOOLBAR, TOOLBAR)]
current_organization = models.ForeignKey(
- "posthog.Organization", models.SET_NULL, null=True, related_name="users_currently+"
+ "posthog.Organization",
+ models.SET_NULL,
+ null=True,
+ related_name="users_currently+",
)
current_team = models.ForeignKey("posthog.Team", models.SET_NULL, null=True, related_name="teams_currently+")
email = models.EmailField(_("email address"), unique=True)
@@ -168,7 +175,8 @@ def teams(self):
"""
teams = Team.objects.filter(organization__members=self)
if Organization.objects.filter(
- members=self, available_features__contains=[AvailableFeature.PROJECT_BASED_PERMISSIONING]
+ members=self,
+ available_features__contains=[AvailableFeature.PROJECT_BASED_PERMISSIONING],
).exists():
try:
from ee.models import ExplicitTeamMembership
@@ -210,7 +218,10 @@ def team(self) -> Optional[Team]:
return self.current_team
def join(
- self, *, organization: Organization, level: OrganizationMembership.Level = OrganizationMembership.Level.MEMBER
+ self,
+ *,
+ organization: Organization,
+ level: OrganizationMembership.Level = OrganizationMembership.Level.MEMBER,
) -> OrganizationMembership:
with transaction.atomic():
membership = OrganizationMembership.objects.create(user=self, organization=organization, level=level)
diff --git a/posthog/models/user_scene_personalisation.py b/posthog/models/user_scene_personalisation.py
index cf4d3c6ed724f..8b745f67a6808 100644
--- a/posthog/models/user_scene_personalisation.py
+++ b/posthog/models/user_scene_personalisation.py
@@ -8,10 +8,17 @@ class UserScenePersonalisation(UUIDModel):
dashboard: models.ForeignKey = models.ForeignKey("Dashboard", on_delete=models.CASCADE, null=True, blank=True)
team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, null=True, blank=True)
user: models.ForeignKey = models.ForeignKey(
- "User", on_delete=models.CASCADE, null=True, blank=True, related_name="scene_personalisation"
+ "User",
+ on_delete=models.CASCADE,
+ null=True,
+ blank=True,
+ related_name="scene_personalisation",
)
class Meta:
constraints = [
- models.UniqueConstraint(fields=["team", "user", "scene"], name="posthog_unique_scene_personalisation")
+ models.UniqueConstraint(
+ fields=["team", "user", "scene"],
+ name="posthog_unique_scene_personalisation",
+ )
]
diff --git a/posthog/models/utils.py b/posthog/models/utils.py
index 0452084be4c4d..b00a87eb881c5 100644
--- a/posthog/models/utils.py
+++ b/posthog/models/utils.py
@@ -78,7 +78,7 @@ def get_series(cls, unix_time_ms: int) -> int:
@classmethod
def is_valid_uuid(cls, candidate: Any) -> bool:
- if type(candidate) != str:
+ if not isinstance(candidate, str):
return False
hex = candidate.replace("urn:", "").replace("uuid:", "")
hex = hex.strip("{}").replace("-", "")
@@ -205,7 +205,9 @@ def create_with_slug(create_func: Callable[..., T], default_slug: str = "", *arg
def get_deferred_field_set_for_model(
- model: Type[models.Model], fields_not_deferred: Set[str] = set(), field_prefix: str = ""
+ model: Type[models.Model],
+ fields_not_deferred: Set[str] = set(),
+ field_prefix: str = "",
) -> Set[str]:
"""Return a set of field names to be deferred for a given model. Used with `.defer()` after `select_related`
diff --git a/posthog/permissions.py b/posthog/permissions.py
index 2a6339601cd9a..229a69a311b50 100644
--- a/posthog/permissions.py
+++ b/posthog/permissions.py
@@ -119,7 +119,6 @@ class OrganizationAdminWritePermissions(BasePermission):
message = "Your organization access level is insufficient."
def has_permission(self, request: Request, view) -> bool:
-
if request.method in SAFE_METHODS:
return True
@@ -136,7 +135,6 @@ def has_permission(self, request: Request, view) -> bool:
)
def has_object_permission(self, request: Request, view, object: Model) -> bool:
-
if request.method in SAFE_METHODS:
return True
@@ -156,7 +154,7 @@ class TeamMemberAccessPermission(BasePermission):
def has_permission(self, request, view) -> bool:
try:
- view.team
+ view.team # noqa: B018
except Team.DoesNotExist:
return True # This will be handled as a 404 in the viewset
requesting_level = view.user_permissions.current_team.effective_membership_level
diff --git a/posthog/plugins/site.py b/posthog/plugins/site.py
index 18b1dcbc947a4..9cb2b3023f80e 100644
--- a/posthog/plugins/site.py
+++ b/posthog/plugins/site.py
@@ -32,7 +32,13 @@ def get_transpiled_site_source(id: int, token: str) -> Optional[WebJsSource]:
plugin__pluginsourcefile__filename="site.ts",
plugin__pluginsourcefile__status=PluginSourceFile.Status.TRANSPILED,
)
- .values_list("id", "plugin__pluginsourcefile__transpiled", "web_token", "plugin__config_schema", "config")
+ .values_list(
+ "id",
+ "plugin__pluginsourcefile__transpiled",
+ "web_token",
+ "plugin__config_schema",
+ "config",
+ )
.first()
)
@@ -53,7 +59,13 @@ def get_decide_site_apps(team: "Team", using_database: str = "default") -> List[
plugin__pluginsourcefile__filename="site.ts",
plugin__pluginsourcefile__status=PluginSourceFile.Status.TRANSPILED,
)
- .values_list("id", "web_token", "plugin__pluginsourcefile__updated_at", "plugin__updated_at", "updated_at")
+ .values_list(
+ "id",
+ "web_token",
+ "plugin__pluginsourcefile__updated_at",
+ "plugin__updated_at",
+ "updated_at",
+ )
.all()
)
diff --git a/posthog/plugins/test/mock.py b/posthog/plugins/test/mock.py
index 91c2a473e7b42..04c61b17cf063 100644
--- a/posthog/plugins/test/mock.py
+++ b/posthog/plugins/test/mock.py
@@ -45,19 +45,34 @@ def ok(self):
if args[0] == "https://api.github.com/repos/PostHog/posthog/commits?sha=&path=":
return MockJSONResponse(
- [{"sha": "MOCKLATESTCOMMIT", "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT"}],
+ [
+ {
+ "sha": "MOCKLATESTCOMMIT",
+ "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT",
+ }
+ ],
200,
)
if args[0] == "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=":
return MockJSONResponse(
- [{"sha": "MOCKLATESTCOMMIT", "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT"}],
+ [
+ {
+ "sha": "MOCKLATESTCOMMIT",
+ "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT",
+ }
+ ],
200,
)
if args[0] == "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=test/path/in/repo":
return MockJSONResponse(
- [{"sha": "MOCKLATESTCOMMIT", "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT"}],
+ [
+ {
+ "sha": "MOCKLATESTCOMMIT",
+ "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT",
+ }
+ ],
200,
)
diff --git a/posthog/plugins/test/test_utils.py b/posthog/plugins/test/test_utils.py
index d597db91017c5..d2f971073d481 100644
--- a/posthog/plugins/test/test_utils.py
+++ b/posthog/plugins/test/test_utils.py
@@ -41,7 +41,10 @@ def test_parse_github_urls(self, mock_get):
self.assertEqual(parsed_url["tag"], "MOCKLATESTCOMMIT")
self.assertEqual(parsed_url.get("path", None), None)
self.assertEqual(mock_get.call_count, 1)
- mock_get.assert_called_with("https://api.github.com/repos/PostHog/posthog/commits?sha=&path=", headers={})
+ mock_get.assert_called_with(
+ "https://api.github.com/repos/PostHog/posthog/commits?sha=&path=",
+ headers={},
+ )
mock_get.reset_mock()
parsed_url = parse_url("https://github.com/PostHog/posthog/tree/82c9218ee40f561b7f37a22d6b6a0ca82887ee3e")
@@ -54,7 +57,8 @@ def test_parse_github_urls(self, mock_get):
mock_get.reset_mock()
parsed_url = parse_url(
- "https://github.com/PostHog/posthog/tree/82c9218ee40f561b7f37a22d6b6a0ca82887ee3e", get_latest_if_none=True
+ "https://github.com/PostHog/posthog/tree/82c9218ee40f561b7f37a22d6b6a0ca82887ee3e",
+ get_latest_if_none=True,
)
self.assertEqual(parsed_url["type"], "github")
self.assertEqual(parsed_url["user"], "PostHog")
@@ -83,11 +87,15 @@ def test_parse_github_urls(self, mock_get):
self.assertEqual(parsed_url["tag"], "MOCKLATESTCOMMIT")
self.assertEqual(parsed_url.get("path", None), None)
self.assertEqual(mock_get.call_count, 1)
- mock_get.assert_called_with("https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=", headers={})
+ mock_get.assert_called_with(
+ "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=",
+ headers={},
+ )
mock_get.reset_mock()
parsed_url = parse_url(
- "https://github.com/PostHog/posthog/tree/main/test/path/in/repo", get_latest_if_none=True
+ "https://github.com/PostHog/posthog/tree/main/test/path/in/repo",
+ get_latest_if_none=True,
)
self.assertEqual(parsed_url["type"], "github")
self.assertEqual(parsed_url["user"], "PostHog")
@@ -96,7 +104,8 @@ def test_parse_github_urls(self, mock_get):
self.assertEqual(parsed_url["path"], "test/path/in/repo")
self.assertEqual(mock_get.call_count, 1)
mock_get.assert_called_with(
- "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=test/path/in/repo", headers={}
+ "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=test/path/in/repo",
+ headers={},
)
mock_get.reset_mock()
@@ -165,14 +174,18 @@ def test_parse_github_urls(self, mock_get):
self.assertEqual(mock_get.call_count, 0)
mock_get.reset_mock()
- parsed_url = parse_url("https://github.com/PostHog/posthog?private_token=TOKEN", get_latest_if_none=True)
+ parsed_url = parse_url(
+ "https://github.com/PostHog/posthog?private_token=TOKEN",
+ get_latest_if_none=True,
+ )
self.assertEqual(parsed_url["type"], "github")
self.assertEqual(parsed_url["user"], "PostHog")
self.assertEqual(parsed_url["repo"], "posthog")
self.assertEqual(parsed_url["tag"], "MOCKLATESTCOMMIT")
self.assertEqual(parsed_url.get("path", None), None)
mock_get.assert_called_with(
- "https://api.github.com/repos/PostHog/posthog/commits?sha=&path=", headers={"Authorization": "Bearer TOKEN"}
+ "https://api.github.com/repos/PostHog/posthog/commits?sha=&path=",
+ headers={"Authorization": "Bearer TOKEN"},
)
self.assertEqual(mock_get.call_count, 1)
mock_get.reset_mock()
@@ -205,7 +218,10 @@ def test_parse_github_urls(self, mock_get):
self.assertEqual(mock_get.call_count, 1)
mock_get.reset_mock()
- parsed_url = parse_url("https://github.com/PostHog/posthog?private_token=TOKEN", get_latest_if_none=True)
+ parsed_url = parse_url(
+ "https://github.com/PostHog/posthog?private_token=TOKEN",
+ get_latest_if_none=True,
+ )
self.assertEqual(parsed_url["type"], "github")
self.assertEqual(parsed_url["user"], "PostHog")
self.assertEqual(parsed_url["repo"], "posthog")
@@ -233,34 +249,47 @@ def test_parse_gitlab_urls(self, mock_get):
self.assertEqual(parsed_url.get("private_token", None), None)
self.assertEqual(mock_get.call_count, 1)
mock_get.assert_called_with(
- "https://gitlab.com/api/v4/projects/mariusandra%2Fhelloworldplugin/repository/commits", headers={}
+ "https://gitlab.com/api/v4/projects/mariusandra%2Fhelloworldplugin/repository/commits",
+ headers={},
)
parsed_url = parse_url(
"https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/tree/master"
)
- self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline")
+ self.assertEqual(
+ parsed_url["project"],
+ "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline",
+ )
self.assertEqual(parsed_url["tag"], "master")
self.assertEqual(mock_get.call_count, 1)
parsed_url = parse_url(
"https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/tree/2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1"
)
- self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline")
+ self.assertEqual(
+ parsed_url["project"],
+ "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline",
+ )
self.assertEqual(parsed_url["tag"], "2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1")
self.assertEqual(mock_get.call_count, 1)
parsed_url = parse_url(
"https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/commit/2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1"
)
- self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline")
+ self.assertEqual(
+ parsed_url["project"],
+ "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline",
+ )
self.assertEqual(parsed_url["tag"], "2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1")
self.assertEqual(mock_get.call_count, 1)
parsed_url = parse_url(
"https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/archive/master/openshift-custom-pipeline-master.zip"
)
- self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline")
+ self.assertEqual(
+ parsed_url["project"],
+ "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline",
+ )
self.assertEqual(parsed_url["tag"], "master")
self.assertEqual(mock_get.call_count, 1)
@@ -273,7 +302,8 @@ def test_parse_gitlab_urls(self, mock_get):
self.assertEqual(mock_get.call_count, 1)
parsed_url = parse_url(
- "https://gitlab.com/mariusandra/helloworldplugin?private_token=PRIVATE", get_latest_if_none=True
+ "https://gitlab.com/mariusandra/helloworldplugin?private_token=PRIVATE",
+ get_latest_if_none=True,
)
self.assertEqual(parsed_url["type"], "gitlab")
self.assertEqual(parsed_url["project"], "mariusandra/helloworldplugin")
@@ -288,7 +318,10 @@ def test_parse_gitlab_urls(self, mock_get):
parsed_url = parse_url(
"https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/commit/2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1?private_token=PRIVATE"
)
- self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline")
+ self.assertEqual(
+ parsed_url["project"],
+ "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline",
+ )
self.assertEqual(parsed_url["tag"], "2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1")
self.assertEqual(parsed_url["private_token"], "PRIVATE")
self.assertEqual(mock_get.call_count, 2)
@@ -296,7 +329,8 @@ def test_parse_gitlab_urls(self, mock_get):
# default global token
with self.settings(GITLAB_TOKEN="MY_GITLAB_TOKEN"):
parsed_url = parse_url(
- "https://gitlab.com/mariusandra/helloworldplugin?private_token=PRIVATE", get_latest_if_none=True
+ "https://gitlab.com/mariusandra/helloworldplugin?private_token=PRIVATE",
+ get_latest_if_none=True,
)
self.assertEqual(parsed_url["type"], "gitlab")
self.assertEqual(parsed_url["project"], "mariusandra/helloworldplugin")
@@ -308,7 +342,10 @@ def test_parse_gitlab_urls(self, mock_get):
headers={"Authorization": "Bearer PRIVATE"},
)
- parsed_url = parse_url("https://gitlab.com/mariusandra/helloworldplugin", get_latest_if_none=True)
+ parsed_url = parse_url(
+ "https://gitlab.com/mariusandra/helloworldplugin",
+ get_latest_if_none=True,
+ )
self.assertEqual(parsed_url["type"], "gitlab")
self.assertEqual(parsed_url["project"], "mariusandra/helloworldplugin")
self.assertEqual(parsed_url["tag"], "ff78cbe1d70316055c610a962a8355a4616d874b")
@@ -332,14 +369,20 @@ def test_parse_npm_urls(self, mock_get):
self.assertEqual(parsed_url.get("tag", None), None)
self.assertEqual(mock_get.call_count, 0)
- parsed_url = parse_url("https://www.npmjs.com/package/posthog-helloworld-plugin", get_latest_if_none=True)
+ parsed_url = parse_url(
+ "https://www.npmjs.com/package/posthog-helloworld-plugin",
+ get_latest_if_none=True,
+ )
self.assertEqual(parsed_url["type"], "npm")
self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin")
self.assertEqual(parsed_url["tag"], "MOCK")
self.assertEqual(mock_get.call_count, 1)
mock_get.assert_called_with("https://registry.npmjs.org/posthog-helloworld-plugin/latest", headers={})
- parsed_url = parse_url("https://www.npmjs.com/package/@posthog/helloworldplugin", get_latest_if_none=True)
+ parsed_url = parse_url(
+ "https://www.npmjs.com/package/@posthog/helloworldplugin",
+ get_latest_if_none=True,
+ )
self.assertEqual(parsed_url["type"], "npm")
self.assertEqual(parsed_url["pkg"], "@posthog/helloworldplugin")
self.assertEqual(parsed_url["tag"], "MOCK")
@@ -359,7 +402,8 @@ def test_parse_npm_urls(self, mock_get):
self.assertEqual(mock_get.call_count, 2)
parsed_url = parse_url(
- "https://www.npmjs.com/package/posthog-helloworld-plugin/v/0.0.0", get_latest_if_none=True
+ "https://www.npmjs.com/package/posthog-helloworld-plugin/v/0.0.0",
+ get_latest_if_none=True,
)
self.assertEqual(parsed_url["type"], "npm")
self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin")
@@ -368,7 +412,8 @@ def test_parse_npm_urls(self, mock_get):
# private tokens
parsed_url = parse_url(
- "https://www.npmjs.com/package/posthog-helloworld-plugin?private_token=TOKEN", get_latest_if_none=True
+ "https://www.npmjs.com/package/posthog-helloworld-plugin?private_token=TOKEN",
+ get_latest_if_none=True,
)
self.assertEqual(parsed_url["type"], "npm")
self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin")
@@ -376,7 +421,8 @@ def test_parse_npm_urls(self, mock_get):
self.assertEqual(parsed_url["private_token"], "TOKEN")
self.assertEqual(mock_get.call_count, 3)
mock_get.assert_called_with(
- "https://registry.npmjs.org/posthog-helloworld-plugin/latest", headers={"Authorization": "Bearer TOKEN"}
+ "https://registry.npmjs.org/posthog-helloworld-plugin/latest",
+ headers={"Authorization": "Bearer TOKEN"},
)
parsed_url = parse_url("https://www.npmjs.com/package/posthog-helloworld-plugin/v/0.0.0?private_token=TOKEN")
@@ -396,7 +442,8 @@ def test_parse_npm_urls(self, mock_get):
# default global token
with self.settings(NPM_TOKEN="MY_NPM_TOKEN"):
parsed_url = parse_url(
- "https://www.npmjs.com/package/posthog-helloworld-plugin?private_token=TOKEN", get_latest_if_none=True
+ "https://www.npmjs.com/package/posthog-helloworld-plugin?private_token=TOKEN",
+ get_latest_if_none=True,
)
self.assertEqual(parsed_url["type"], "npm")
self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin")
@@ -404,10 +451,14 @@ def test_parse_npm_urls(self, mock_get):
self.assertEqual(parsed_url["private_token"], "TOKEN")
self.assertEqual(mock_get.call_count, 4)
mock_get.assert_called_with(
- "https://registry.npmjs.org/posthog-helloworld-plugin/latest", headers={"Authorization": "Bearer TOKEN"}
+ "https://registry.npmjs.org/posthog-helloworld-plugin/latest",
+ headers={"Authorization": "Bearer TOKEN"},
)
- parsed_url = parse_url("https://www.npmjs.com/package/posthog-helloworld-plugin", get_latest_if_none=True)
+ parsed_url = parse_url(
+ "https://www.npmjs.com/package/posthog-helloworld-plugin",
+ get_latest_if_none=True,
+ )
self.assertEqual(parsed_url["type"], "npm")
self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin")
self.assertEqual(parsed_url["tag"], "MOCK")
@@ -488,8 +539,14 @@ def test_download_plugin_archive_github(self, mock_get):
"https://github.com/PostHog/helloworldplugin/archive/f5a9ea85adaafe7c99014b7e8e0982c447631d54.zip",
headers={},
)
- self.assertEqual(zip_file.getinfo("helloworldplugin-imageless-version/index.js").CRC, 1913611967)
- self.assertEqual(zip_file.getinfo("helloworldplugin-imageless-version/plugin.json").CRC, 2713501883)
+ self.assertEqual(
+ zip_file.getinfo("helloworldplugin-imageless-version/index.js").CRC,
+ 1913611967,
+ )
+ self.assertEqual(
+ zip_file.getinfo("helloworldplugin-imageless-version/plugin.json").CRC,
+ 2713501883,
+ )
def test_download_plugin_archive_gitlab(self, mock_get):
plugin_gitlab = download_plugin_archive(
@@ -542,7 +599,8 @@ def test_download_plugin_archive_npm(self, mock_get):
self.assertEqual(plugin_npm_tgz, base64.b64decode(HELLO_WORLD_PLUGIN_NPM_TGZ[1]))
self.assertEqual(mock_get.call_count, 1)
mock_get.assert_called_with(
- "https://registry.npmjs.org/posthog-helloworld-plugin/-/posthog-helloworld-plugin-0.0.0.tgz", headers={}
+ "https://registry.npmjs.org/posthog-helloworld-plugin/-/posthog-helloworld-plugin-0.0.0.tgz",
+ headers={},
)
plugin_npm_tgz = download_plugin_archive(
@@ -576,21 +634,24 @@ def test_download_plugin_archive_npm(self, mock_get):
def test_get_file_from_archive(self, mock_get):
plugin_json_zip = cast(
- dict, get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_GITHUB_ZIP[1]), "plugin.json")
+ dict,
+ get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_GITHUB_ZIP[1]), "plugin.json"),
)
self.assertEqual(plugin_json_zip["name"], "helloworldplugin")
self.assertEqual(plugin_json_zip["url"], "https://github.com/PostHog/helloworldplugin")
self.assertEqual(plugin_json_zip["description"], "Greet the World and Foo a Bar, JS edition!")
plugin_json_zip = cast(
- dict, get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_GITLAB_ZIP[1]), "plugin.json")
+ dict,
+ get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_GITLAB_ZIP[1]), "plugin.json"),
)
self.assertEqual(plugin_json_zip["name"], "hellojsplugin")
self.assertEqual(plugin_json_zip["url"], "https://github.com/PosthHog/helloworldplugin")
self.assertEqual(plugin_json_zip["description"], "Greet the World and Foo a Bar, JS edition!")
plugin_json_tgz = cast(
- dict, get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_NPM_TGZ[1]), "plugin.json")
+ dict,
+ get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_NPM_TGZ[1]), "plugin.json"),
)
self.assertEqual(plugin_json_tgz["name"], "helloworldplugin")
self.assertEqual(plugin_json_tgz["url"], "https://github.com/PostHog/helloworldplugin")
diff --git a/posthog/plugins/utils.py b/posthog/plugins/utils.py
index 9ce8d867acbc4..45eeb5ca94843 100644
--- a/posthog/plugins/utils.py
+++ b/posthog/plugins/utils.py
@@ -47,7 +47,10 @@ def parse_github_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O
parsed["tag"] = "refs/tags/{}".format(parsed["tag"])
elif not re.match(r"^[a-f0-9]{40}$", parsed["tag"] or ""):
commits_url = "https://api.github.com/repos/{}/{}/commits?sha={}&path={}".format(
- parsed["user"], parsed["repo"], parsed["tag"] or "", parsed["path"] or ""
+ parsed["user"],
+ parsed["repo"],
+ parsed["tag"] or "",
+ parsed["path"] or "",
)
commits = requests.get(commits_url, headers=headers).json()
@@ -95,7 +98,8 @@ def parse_gitlab_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O
parsed["tag"] = path.split("/")[1]
parsed["root_url"] = "https://gitlab.com/{}{}".format(
- parsed["project"], "?private_token={}".format(private_token) if private_token else ""
+ parsed["project"],
+ "?private_token={}".format(private_token) if private_token else "",
)
if get_latest_if_none and not parsed["tag"]:
@@ -115,7 +119,9 @@ def parse_gitlab_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O
if parsed["tag"]:
parsed["tagged_url"] = "https://gitlab.com/{}/-/tree/{}{}".format(
- parsed["project"], parsed["tag"], "?private_token={}".format(private_token) if private_token else ""
+ parsed["project"],
+ parsed["tag"],
+ "?private_token={}".format(private_token) if private_token else "",
)
return parsed
@@ -124,7 +130,8 @@ def parse_gitlab_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O
def parse_npm_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, Optional[str]]]:
url, private_token = split_url_and_private_token(url)
match = re.search(
- r"^https?://(?:www\.)?npmjs\.com/package/([@a-z0-9_-]+(/[a-z0-9_-]+)?)?/?(v/([A-Za-z0-9_.-]+)/?|)$", url
+ r"^https?://(?:www\.)?npmjs\.com/package/([@a-z0-9_-]+(/[a-z0-9_-]+)?)?/?(v/([A-Za-z0-9_.-]+)/?|)$",
+ url,
)
if not match:
return None
@@ -136,19 +143,25 @@ def parse_npm_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, Opti
}
parsed["root_url"] = "https://www.npmjs.com/package/{}{}".format(
- parsed["pkg"], "?private_token={}".format(private_token) if private_token else ""
+ parsed["pkg"],
+ "?private_token={}".format(private_token) if private_token else "",
)
if get_latest_if_none and not parsed["tag"]:
try:
token = private_token or settings.NPM_TOKEN
headers = {"Authorization": "Bearer {}".format(token)} if token else {}
- details = requests.get("https://registry.npmjs.org/{}/latest".format(parsed["pkg"]), headers=headers).json()
+ details = requests.get(
+ "https://registry.npmjs.org/{}/latest".format(parsed["pkg"]),
+ headers=headers,
+ ).json()
parsed["tag"] = details["version"]
except Exception:
raise Exception("Could not get latest version for: {}".format(url))
if parsed["tag"]:
parsed["tagged_url"] = "https://www.npmjs.com/package/{}/v/{}{}".format(
- parsed["pkg"], parsed["tag"], "?private_token={}".format(private_token) if private_token else ""
+ parsed["pkg"],
+ parsed["tag"],
+ "?private_token={}".format(private_token) if private_token else "",
)
return parsed
@@ -184,7 +197,9 @@ def download_plugin_archive(url: str, tag: Optional[str] = None) -> bytes:
if not (tag or parsed_url.get("tag", None)):
raise Exception("No GitHub tag given!")
url = "https://github.com/{user}/{repo}/archive/{tag}.zip".format(
- user=parsed_url["user"], repo=parsed_url["repo"], tag=tag or parsed_url["tag"]
+ user=parsed_url["user"],
+ repo=parsed_url["repo"],
+ tag=tag or parsed_url["tag"],
)
token = parsed_url["private_token"] or settings.GITHUB_TOKEN
if token:
@@ -259,9 +274,9 @@ def get_file_from_zip_archive(archive: bytes, filename: str, *, json_parse: bool
file_bytes = reader.read()
if json_parse:
return json.loads(file_bytes)
- if type(file_bytes) == bytes:
+ if isinstance(file_bytes, bytes):
return file_bytes.decode("utf-8")
- return str(file_bytes)
+ return str(file_bytes) # type: ignore
def get_file_from_tgz_archive(archive: bytes, filename, *, json_parse: bool) -> Any:
diff --git a/posthog/queries/actor_base_query.py b/posthog/queries/actor_base_query.py
index 706829dfba191..396c216f9c01d 100644
--- a/posthog/queries/actor_base_query.py
+++ b/posthog/queries/actor_base_query.py
@@ -96,7 +96,7 @@ def is_aggregating_by_groups(self) -> bool:
def get_actors(
self,
- ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]], int]:
+ ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]], int,]:
"""Get actors in data model and dict formats. Builds query and executes"""
self._filter.team = self._team
query, params = self.actor_query()
@@ -109,13 +109,20 @@ def get_actors(
)
actors, serialized_actors = self.get_actors_from_result(raw_result)
- if hasattr(self._filter, "include_recordings") and self._filter.include_recordings and self._filter.insight in [INSIGHT_PATHS, INSIGHT_TRENDS, INSIGHT_FUNNELS]: # type: ignore
+ if (
+ hasattr(self._filter, "include_recordings")
+ and self._filter.include_recordings # type: ignore
+ and self._filter.insight in [INSIGHT_PATHS, INSIGHT_TRENDS, INSIGHT_FUNNELS]
+ ):
serialized_actors = self.add_matched_recordings_to_serialized_actors(serialized_actors, raw_result)
return actors, serialized_actors, len(raw_result)
def query_for_session_ids_with_recordings(
- self, session_ids: Set[str], date_from: datetime | None, date_to: datetime | None
+ self,
+ session_ids: Set[str],
+ date_from: datetime | None,
+ date_to: datetime | None,
) -> Set[str]:
"""Filters a list of session_ids to those that actually have recordings"""
query = """
@@ -154,7 +161,9 @@ def query_for_session_ids_with_recordings(
return {row[0] for row in raw_result}
def add_matched_recordings_to_serialized_actors(
- self, serialized_actors: Union[List[SerializedGroup], List[SerializedPerson]], raw_result
+ self,
+ serialized_actors: Union[List[SerializedGroup], List[SerializedPerson]],
+ raw_result,
) -> Union[List[SerializedGroup], List[SerializedPerson]]:
all_session_ids = set()
@@ -172,7 +181,9 @@ def add_matched_recordings_to_serialized_actors(
# Prune out deleted recordings
session_ids_with_deleted_recordings = set(
SessionRecording.objects.filter(
- team=self._team, session_id__in=session_ids_with_all_recordings, deleted=True
+ team=self._team,
+ session_id__in=session_ids_with_all_recordings,
+ deleted=True,
).values_list("session_id", flat=True)
)
session_ids_with_recordings = session_ids_with_all_recordings.difference(session_ids_with_deleted_recordings)
@@ -206,7 +217,7 @@ def add_matched_recordings_to_serialized_actors(
def get_actors_from_result(
self, raw_result
- ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]]]:
+ ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]],]:
actors: Union[QuerySet[Person], QuerySet[Group]]
serialized_actors: Union[List[SerializedGroup], List[SerializedPerson]]
@@ -215,7 +226,10 @@ def get_actors_from_result(
if self.is_aggregating_by_groups:
actors, serialized_actors = get_groups(
- self._team.pk, cast(int, self.aggregation_group_type_index), actor_ids, value_per_actor_id
+ self._team.pk,
+ cast(int, self.aggregation_group_type_index),
+ actor_ids,
+ value_per_actor_id,
)
else:
actors, serialized_actors = get_people(self._team, actor_ids, value_per_actor_id)
@@ -223,13 +237,19 @@ def get_actors_from_result(
if self.ACTOR_VALUES_INCLUDED:
# We fetched actors from Postgres in get_groups/get_people, so `ORDER BY actor_value DESC` no longer holds
# We need .sort() to restore this order
- serialized_actors.sort(key=lambda actor: cast(float, actor["value_at_data_point"]), reverse=True)
+ serialized_actors.sort(
+ key=lambda actor: cast(float, actor["value_at_data_point"]),
+ reverse=True,
+ )
return actors, serialized_actors
def get_groups(
- team_id: int, group_type_index: int, group_ids: List[Any], value_per_actor_id: Optional[Dict[str, float]] = None
+ team_id: int,
+ group_type_index: int,
+ group_ids: List[Any],
+ value_per_actor_id: Optional[Dict[str, float]] = None,
) -> Tuple[QuerySet[Group], List[SerializedGroup]]:
"""Get groups from raw SQL results in data model and dict formats"""
groups: QuerySet[Group] = Group.objects.filter(
@@ -239,7 +259,10 @@ def get_groups(
def get_people(
- team: Team, people_ids: List[Any], value_per_actor_id: Optional[Dict[str, float]] = None, distinct_id_limit=1000
+ team: Team,
+ people_ids: List[Any],
+ value_per_actor_id: Optional[Dict[str, float]] = None,
+ distinct_id_limit=1000,
) -> Tuple[QuerySet[Person], List[SerializedPerson]]:
"""Get people from raw SQL results in data model and dict formats"""
distinct_id_subquery = Subquery(
@@ -263,7 +286,9 @@ def get_people(
def serialize_people(
- team: Team, data: Union[QuerySet[Person], List[Person]], value_per_actor_id: Optional[Dict[str, float]] = None
+ team: Team,
+ data: Union[QuerySet[Person], List[Person]],
+ value_per_actor_id: Optional[Dict[str, float]] = None,
) -> List[SerializedPerson]:
from posthog.api.person import get_person_name
diff --git a/posthog/queries/app_metrics/app_metrics.py b/posthog/queries/app_metrics/app_metrics.py
index e6c36b799ff1e..26f91f626ec0b 100644
--- a/posthog/queries/app_metrics/app_metrics.py
+++ b/posthog/queries/app_metrics/app_metrics.py
@@ -13,7 +13,10 @@
from posthog.models.event.util import format_clickhouse_timestamp
from posthog.models.filters.mixins.base import IntervalType
from posthog.models.team.team import Team
-from posthog.queries.app_metrics.serializers import AppMetricsErrorsRequestSerializer, AppMetricsRequestSerializer
+from posthog.queries.app_metrics.serializers import (
+ AppMetricsErrorsRequestSerializer,
+ AppMetricsRequestSerializer,
+)
from posthog.queries.util import format_ch_timestamp, get_time_in_seconds_for_period
from posthog.utils import relative_date_parse
@@ -27,7 +30,10 @@ def __init__(self, team: Team):
def run(self):
results = sync_execute(
self.QUERY,
- {"team_id": self.team.pk, "from_date": format_clickhouse_timestamp(datetime.now() - timedelta(hours=24))},
+ {
+ "team_id": self.team.pk,
+ "from_date": format_clickhouse_timestamp(datetime.now() - timedelta(hours=24)),
+ },
)
return dict(results)
@@ -80,7 +86,9 @@ def query(self):
@property
def date_from(self):
return relative_date_parse(
- self.filter.validated_data.get("date_from"), self.team.timezone_info, always_truncate=True
+ self.filter.validated_data.get("date_from"),
+ self.team.timezone_info,
+ always_truncate=True,
)
@property
@@ -121,7 +129,12 @@ def run(self):
class AppMetricsErrorDetailsQuery:
QUERY = QUERY_APP_METRICS_ERROR_DETAILS
- def __init__(self, team: Team, plugin_config_id: int, filter: AppMetricsErrorsRequestSerializer):
+ def __init__(
+ self,
+ team: Team,
+ plugin_config_id: int,
+ filter: AppMetricsErrorsRequestSerializer,
+ ):
self.team = team
self.plugin_config_id = plugin_config_id
self.filter = filter
diff --git a/posthog/queries/app_metrics/historical_exports.py b/posthog/queries/app_metrics/historical_exports.py
index 484f01546001b..cbf22d480156b 100644
--- a/posthog/queries/app_metrics/historical_exports.py
+++ b/posthog/queries/app_metrics/historical_exports.py
@@ -7,7 +7,10 @@
from posthog.models.activity_logging.activity_log import ActivityLog
from posthog.models.plugin import PluginStorage
from posthog.models.team.team import Team
-from posthog.queries.app_metrics.app_metrics import AppMetricsErrorsQuery, AppMetricsQuery
+from posthog.queries.app_metrics.app_metrics import (
+ AppMetricsErrorsQuery,
+ AppMetricsQuery,
+)
from posthog.queries.app_metrics.serializers import AppMetricsRequestSerializer
diff --git a/posthog/queries/app_metrics/test/test_app_metrics.py b/posthog/queries/app_metrics/test/test_app_metrics.py
index affe411457116..95eeb13bc09f4 100644
--- a/posthog/queries/app_metrics/test/test_app_metrics.py
+++ b/posthog/queries/app_metrics/test/test_app_metrics.py
@@ -15,7 +15,10 @@
AppMetricsQuery,
TeamPluginsDeliveryRateQuery,
)
-from posthog.queries.app_metrics.serializers import AppMetricsErrorsRequestSerializer, AppMetricsRequestSerializer
+from posthog.queries.app_metrics.serializers import (
+ AppMetricsErrorsRequestSerializer,
+ AppMetricsRequestSerializer,
+)
from posthog.test.base import BaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries
from posthog.utils import cast_timestamp_or_now
@@ -100,7 +103,11 @@ def test_query_delivery_rate(self):
@freeze_time("2021-12-05T13:23:00Z")
def test_ignores_out_of_bound_metrics(self):
create_app_metric(
- team_id=-1, category="processEvent", plugin_config_id=3, timestamp="2021-12-05T00:10:00Z", successes=5
+ team_id=-1,
+ category="processEvent",
+ plugin_config_id=3,
+ timestamp="2021-12-05T00:10:00Z",
+ successes=5,
)
create_app_metric(
team_id=self.team.pk,
@@ -269,7 +276,11 @@ def test_ignores_unrelated_data(self):
# Negative examples
# Different team
create_app_metric(
- team_id=-1, category="processEvent", plugin_config_id=3, timestamp="2021-12-05T13:10:00Z", failures=1
+ team_id=-1,
+ category="processEvent",
+ plugin_config_id=3,
+ timestamp="2021-12-05T13:10:00Z",
+ failures=1,
)
# Different pluginConfigId
create_app_metric(
@@ -544,7 +555,9 @@ def test_error_details_query(self):
)
filter = make_filter(
- serializer_klass=AppMetricsErrorsRequestSerializer, category="processEvent", error_type="SomeError"
+ serializer_klass=AppMetricsErrorsRequestSerializer,
+ category="processEvent",
+ error_type="SomeError",
)
results = AppMetricsErrorDetailsQuery(self.team, 3, filter).run()
@@ -679,7 +692,9 @@ def test_ignores_unrelated_data(self):
)
filter = make_filter(
- serializer_klass=AppMetricsErrorsRequestSerializer, category="processEvent", error_type="SomeError"
+ serializer_klass=AppMetricsErrorsRequestSerializer,
+ category="processEvent",
+ error_type="SomeError",
)
results = AppMetricsErrorDetailsQuery(self.team, 3, filter).run()
diff --git a/posthog/queries/app_metrics/test/test_historical_exports.py b/posthog/queries/app_metrics/test/test_historical_exports.py
index 2e9ffcb41a7bb..6bed36981931c 100644
--- a/posthog/queries/app_metrics/test/test_historical_exports.py
+++ b/posthog/queries/app_metrics/test/test_historical_exports.py
@@ -8,9 +8,17 @@
from posthog.models.plugin import Plugin, PluginConfig, PluginStorage
from posthog.models.team.team import Team
from posthog.models.utils import UUIDT
-from posthog.queries.app_metrics.historical_exports import historical_export_metrics, historical_exports_activity
+from posthog.queries.app_metrics.historical_exports import (
+ historical_export_metrics,
+ historical_exports_activity,
+)
from posthog.queries.app_metrics.test.test_app_metrics import create_app_metric
-from posthog.test.base import BaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries, snapshot_postgres_queries
+from posthog.test.base import (
+ BaseTest,
+ ClickhouseTestMixin,
+ snapshot_clickhouse_queries,
+ snapshot_postgres_queries,
+)
SAMPLE_PAYLOAD = {"dateRange": ["2021-06-10", "2022-06-12"], "parallelism": 1}
@@ -32,12 +40,18 @@ def test_historical_exports_activity_for_not_finished_export(self):
activity="job_triggered",
detail=Detail(
name="Some export plugin",
- trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD),
+ trigger=Trigger(
+ job_type="Export historical events V2",
+ job_id="1234",
+ payload=SAMPLE_PAYLOAD,
+ ),
),
)
PluginStorage.objects.create(
- plugin_config_id=self.plugin_config.pk, key="EXPORT_COORDINATION", value=json.dumps({"progress": 0.33})
+ plugin_config_id=self.plugin_config.pk,
+ key="EXPORT_COORDINATION",
+ value=json.dumps({"progress": 0.33}),
)
activities = historical_exports_activity(self.team.pk, self.plugin_config.pk)
@@ -61,7 +75,11 @@ def test_historical_exports_activity_for_finished_export(self):
activity="job_triggered",
detail=Detail(
name="Some export plugin",
- trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD),
+ trigger=Trigger(
+ job_type="Export historical events V2",
+ job_id="1234",
+ payload=SAMPLE_PAYLOAD,
+ ),
),
)
with freeze_time("2021-08-25T13:00:00Z"):
@@ -69,7 +87,11 @@ def test_historical_exports_activity_for_finished_export(self):
activity="export_success",
detail=Detail(
name="Some export plugin",
- trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload={}),
+ trigger=Trigger(
+ job_type="Export historical events V2",
+ job_id="1234",
+ payload={},
+ ),
),
)
@@ -95,7 +117,11 @@ def test_historical_exports_activity_for_failed_export(self):
activity="job_triggered",
detail=Detail(
name="Some export plugin",
- trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD),
+ trigger=Trigger(
+ job_type="Export historical events V2",
+ job_id="1234",
+ payload=SAMPLE_PAYLOAD,
+ ),
),
)
with freeze_time("2021-08-25T13:00:00Z"):
@@ -104,7 +130,9 @@ def test_historical_exports_activity_for_failed_export(self):
detail=Detail(
name="Some export plugin",
trigger=Trigger(
- job_type="Export historical events V2", job_id="1234", payload={"failure_reason": "foobar"}
+ job_type="Export historical events V2",
+ job_id="1234",
+ payload={"failure_reason": "foobar"},
),
),
)
@@ -130,7 +158,11 @@ def test_historical_exports_activity_ignores_unrelated_entries(self):
activity="job_triggered",
detail=Detail(
name="Some export plugin",
- trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD),
+ trigger=Trigger(
+ job_type="Export historical events V2",
+ job_id="1234",
+ payload=SAMPLE_PAYLOAD,
+ ),
),
)
@@ -192,7 +224,9 @@ def test_historical_exports_orders_activity_by_created_at(self):
detail=Detail(
name="Some export plugin",
trigger=Trigger(
- job_type="Export historical events V2", job_id=str(hour), payload=SAMPLE_PAYLOAD
+ job_type="Export historical events V2",
+ job_id=str(hour),
+ payload=SAMPLE_PAYLOAD,
),
),
)
@@ -218,7 +252,11 @@ def test_historical_export_metrics(self):
activity="job_triggered",
detail=Detail(
name="Some export plugin",
- trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD),
+ trigger=Trigger(
+ job_type="Export historical events V2",
+ job_id="1234",
+ payload=SAMPLE_PAYLOAD,
+ ),
),
)
with freeze_time("2021-08-25T05:00:00Z"):
@@ -226,7 +264,11 @@ def test_historical_export_metrics(self):
activity="export_success",
detail=Detail(
name="Some export plugin",
- trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload={}),
+ trigger=Trigger(
+ job_type="Export historical events V2",
+ job_id="1234",
+ payload={},
+ ),
),
)
@@ -276,7 +318,11 @@ def test_historical_export_metrics(self):
"successes": [0, 102, 0, 10, 0, 0, 0],
"successes_on_retry": [0, 0, 0, 0, 0, 0, 0],
"failures": [0, 0, 2, 0, 0, 0, 0],
- "totals": {"successes": 112, "successes_on_retry": 0, "failures": 2},
+ "totals": {
+ "successes": 112,
+ "successes_on_retry": 0,
+ "failures": 2,
+ },
},
"summary": {
"duration": 4 * 60 * 60,
diff --git a/posthog/queries/base.py b/posthog/queries/base.py
index 57ff555c2dcc8..f03e6723ac72e 100644
--- a/posthog/queries/base.py
+++ b/posthog/queries/base.py
@@ -19,7 +19,11 @@
from posthog.models.cohort import Cohort, CohortPeople
from posthog.models.filters.filter import Filter
from posthog.models.filters.path_filter import PathFilter
-from posthog.models.property import CLICKHOUSE_ONLY_PROPERTY_TYPES, Property, PropertyGroup
+from posthog.models.property import (
+ CLICKHOUSE_ONLY_PROPERTY_TYPES,
+ Property,
+ PropertyGroup,
+)
from posthog.models.property.property import OperatorType, ValueT
from posthog.models.team import Team
from posthog.queries.util import convert_to_datetime_aware
@@ -181,9 +185,12 @@ def match_property(property: Property, override_property_values: Dict[str, Any])
def empty_or_null_with_value_q(
- column: str, key: str, operator: Optional[OperatorType], value: ValueT, negated: bool = False
+ column: str,
+ key: str,
+ operator: Optional[OperatorType],
+ value: ValueT,
+ negated: bool = False,
) -> Q:
-
if operator == "exact" or operator is None:
value_as_given = Property._parse_value(value)
value_as_coerced_to_number = Property._parse_value(value, convert_to_number=True)
@@ -220,13 +227,11 @@ def property_to_Q(
cohorts_cache: Optional[Dict[int, Cohort]] = None,
using_database: str = "default",
) -> Q:
-
if property.type in CLICKHOUSE_ONLY_PROPERTY_TYPES:
raise ValueError(f"property_to_Q: type is not supported: {repr(property.type)}")
value = property._parse_value(property.value)
if property.type == "cohort":
-
cohort_id = int(cast(Union[str, int], value))
if cohorts_cache is not None:
if cohorts_cache.get(cohort_id) is None:
@@ -239,14 +244,23 @@ def property_to_Q(
return Q(
Exists(
CohortPeople.objects.using(using_database)
- .filter(cohort_id=cohort_id, person_id=OuterRef("id"), cohort__id=cohort_id)
+ .filter(
+ cohort_id=cohort_id,
+ person_id=OuterRef("id"),
+ cohort__id=cohort_id,
+ )
.only("id")
)
)
else:
# :TRICKY: This has potential to create an infinite loop if the cohort is recursive.
# But, this shouldn't happen because we check for cyclic cohorts on creation.
- return property_group_to_Q(cohort.properties, override_property_values, cohorts_cache, using_database)
+ return property_group_to_Q(
+ cohort.properties,
+ override_property_values,
+ cohorts_cache,
+ using_database,
+ )
# short circuit query if key exists in override_property_values
if property.key in override_property_values and property.operator != "is_not_set":
@@ -277,7 +291,11 @@ def property_to_Q(
return Q(pk=-1)
if isinstance(property.operator, str) and property.operator.startswith("not_"):
return empty_or_null_with_value_q(
- column, property.key, cast(OperatorType, property.operator[4:]), value, negated=True
+ column,
+ property.key,
+ cast(OperatorType, property.operator[4:]),
+ value,
+ negated=True,
)
if property.operator in ("is_date_after", "is_date_before"):
@@ -294,7 +312,6 @@ def property_group_to_Q(
cohorts_cache: Optional[Dict[int, Cohort]] = None,
using_database: str = "default",
) -> Q:
-
filters = Q()
if not property_group or len(property_group.values) == 0:
@@ -303,7 +320,10 @@ def property_group_to_Q(
if isinstance(property_group.values[0], PropertyGroup):
for group in property_group.values:
group_filter = property_group_to_Q(
- cast(PropertyGroup, group), override_property_values, cohorts_cache, using_database
+ cast(PropertyGroup, group),
+ override_property_values,
+ cohorts_cache,
+ using_database,
)
if property_group.type == PropertyOperatorType.OR:
filters |= group_filter
diff --git a/posthog/queries/breakdown_props.py b/posthog/queries/breakdown_props.py
index 9d0ccf80db32e..a7a320e8e5dfa 100644
--- a/posthog/queries/breakdown_props.py
+++ b/posthog/queries/breakdown_props.py
@@ -2,7 +2,12 @@
from django.forms import ValidationError
-from posthog.constants import BREAKDOWN_TYPES, MONTHLY_ACTIVE, WEEKLY_ACTIVE, PropertyOperatorType
+from posthog.constants import (
+ BREAKDOWN_TYPES,
+ MONTHLY_ACTIVE,
+ WEEKLY_ACTIVE,
+ PropertyOperatorType,
+)
from posthog.hogql.hogql import HogQLContext
from posthog.models.cohort import Cohort
from posthog.models.cohort.util import format_filter_query
@@ -26,7 +31,10 @@
from posthog.queries.person_query import PersonQuery
from posthog.queries.query_date_range import QueryDateRange
from posthog.session_recordings.queries.session_query import SessionQuery
-from posthog.queries.trends.sql import HISTOGRAM_ELEMENTS_ARRAY_OF_KEY_SQL, TOP_ELEMENTS_ARRAY_OF_KEY_SQL
+from posthog.queries.trends.sql import (
+ HISTOGRAM_ELEMENTS_ARRAY_OF_KEY_SQL,
+ TOP_ELEMENTS_ARRAY_OF_KEY_SQL,
+)
from posthog.queries.util import PersonPropertiesMode
from posthog.utils import PersonOnEventsMode
@@ -98,7 +106,10 @@ def get_breakdown_prop_values(
)
person_query = PersonQuery(
- filter, team.pk, column_optimizer=column_optimizer, entity=entity if not use_all_funnel_entities else None
+ filter,
+ team.pk,
+ column_optimizer=column_optimizer,
+ entity=entity if not use_all_funnel_entities else None,
)
if person_properties_mode == PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2:
person_join_clauses = PERSON_OVERRIDES_JOIN_SQL.format(
@@ -160,7 +171,10 @@ def get_breakdown_prop_values(
filter.hogql_context,
filter.breakdown_normalize_url,
direct_on_events=person_properties_mode
- in [PersonPropertiesMode.DIRECT_ON_EVENTS, PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2],
+ in [
+ PersonPropertiesMode.DIRECT_ON_EVENTS,
+ PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2,
+ ],
cast_as_float=filter.using_histogram,
)
@@ -269,7 +283,11 @@ def _to_value_expression(
value_expression = translate_hogql(cast(str, breakdown), hogql_context)
else:
value_expression = get_single_or_multi_property_string_expr(
- breakdown, table="events", query_alias=None, column="properties", normalize_url=breakdown_normalize_url
+ breakdown,
+ table="events",
+ query_alias=None,
+ column="properties",
+ normalize_url=breakdown_normalize_url,
)
if cast_as_float:
diff --git a/posthog/queries/cohort_query.py b/posthog/queries/cohort_query.py
index ec1a1d7f9efff..1c1b697bfc222 100644
--- a/posthog/queries/cohort_query.py
+++ b/posthog/queries/cohort_query.py
@@ -1,6 +1,8 @@
from posthog.settings import EE_AVAILABLE
if EE_AVAILABLE:
- from ee.clickhouse.queries.enterprise_cohort_query import EnterpriseCohortQuery as CohortQuery
+ from ee.clickhouse.queries.enterprise_cohort_query import (
+ EnterpriseCohortQuery as CohortQuery,
+ )
else:
from posthog.queries.foss_cohort_query import FOSSCohortQuery as CohortQuery # type: ignore
diff --git a/posthog/queries/column_optimizer/column_optimizer.py b/posthog/queries/column_optimizer/column_optimizer.py
index 2d49afeeaf8dd..f1952e500c169 100644
--- a/posthog/queries/column_optimizer/column_optimizer.py
+++ b/posthog/queries/column_optimizer/column_optimizer.py
@@ -2,7 +2,9 @@
from posthog.settings import EE_AVAILABLE
if EE_AVAILABLE:
- from ee.clickhouse.queries.column_optimizer import EnterpriseColumnOptimizer as ColumnOptimizer
+ from ee.clickhouse.queries.column_optimizer import (
+ EnterpriseColumnOptimizer as ColumnOptimizer,
+ )
else:
from posthog.queries.column_optimizer.foss_column_optimizer import ( # type: ignore
FOSSColumnOptimizer as ColumnOptimizer,
diff --git a/posthog/queries/column_optimizer/foss_column_optimizer.py b/posthog/queries/column_optimizer/foss_column_optimizer.py
index 104f3cd000137..19487a7cb5de4 100644
--- a/posthog/queries/column_optimizer/foss_column_optimizer.py
+++ b/posthog/queries/column_optimizer/foss_column_optimizer.py
@@ -4,7 +4,10 @@
from posthog.clickhouse.materialized_columns import ColumnName, get_materialized_columns
from posthog.constants import TREND_FILTER_TYPE_ACTIONS, FunnelCorrelationType
-from posthog.models.action.util import get_action_tables_and_properties, uses_elements_chain
+from posthog.models.action.util import (
+ get_action_tables_and_properties,
+ uses_elements_chain,
+)
from posthog.models.entity import Entity
from posthog.models.filters import Filter
from posthog.models.filters.mixins.utils import cached_property
@@ -13,7 +16,11 @@
from posthog.models.filters.retention_filter import RetentionFilter
from posthog.models.filters.stickiness_filter import StickinessFilter
from posthog.models.filters.utils import GroupTypeIndex
-from posthog.models.property import PropertyIdentifier, PropertyType, TableWithProperties
+from posthog.models.property import (
+ PropertyIdentifier,
+ PropertyType,
+ TableWithProperties,
+)
from posthog.models.property.util import box_value, extract_tables_and_properties
from posthog.queries.property_optimizer import PropertyOptimizer
@@ -27,7 +34,13 @@ class FOSSColumnOptimizer:
def __init__(
self,
- filter: Union[Filter, PathFilter, RetentionFilter, StickinessFilter, PropertiesTimelineFilter],
+ filter: Union[
+ Filter,
+ PathFilter,
+ RetentionFilter,
+ StickinessFilter,
+ PropertiesTimelineFilter,
+ ],
team_id: int,
):
self.filter = filter
@@ -53,7 +66,10 @@ def person_columns_to_query(self) -> Set[ColumnName]:
return self.columns_to_query("person", set(self.used_properties_with_type("person")))
def columns_to_query(
- self, table: TableWithProperties, used_properties: Set[PropertyIdentifier], table_column: str = "properties"
+ self,
+ table: TableWithProperties,
+ used_properties: Set[PropertyIdentifier],
+ table_column: str = "properties",
) -> Set[ColumnName]:
"Transforms a list of property names to what columns are needed for that query"
@@ -119,12 +135,24 @@ def properties_used_in_filter(self) -> TCounter[PropertyIdentifier]:
boxed_breakdown = box_value(self.filter.breakdown)
for b in boxed_breakdown:
if isinstance(b, str):
- counter[(b, self.filter.breakdown_type, self.filter.breakdown_group_type_index)] += 1
+ counter[
+ (
+ b,
+ self.filter.breakdown_type,
+ self.filter.breakdown_group_type_index,
+ )
+ ] += 1
# If we have a breakdowns attribute then make sure we pull in everything we
# need to calculate it
for breakdown in self.filter.breakdowns or []:
- counter[(breakdown["property"], breakdown["type"], self.filter.breakdown_group_type_index)] += 1
+ counter[
+ (
+ breakdown["property"],
+ breakdown["type"],
+ self.filter.breakdown_group_type_index,
+ )
+ ] += 1
# Both entities and funnel exclusions can contain nested property filters
for entity in self.entities_used_in_filter():
@@ -147,7 +175,6 @@ def properties_used_in_filter(self) -> TCounter[PropertyIdentifier]:
and self.filter.correlation_type == FunnelCorrelationType.PROPERTIES
and self.filter.correlation_property_names
):
-
for prop_value in self.filter.correlation_property_names:
counter[(prop_value, "person", None)] += 1
@@ -157,7 +184,11 @@ def used_properties_with_type(self, property_type: PropertyType) -> TCounter[Pro
return Counter(
{
(name, type, group_type_index): count
- for (name, type, group_type_index), count in self.properties_used_in_filter.items()
+ for (
+ name,
+ type,
+ group_type_index,
+ ), count in self.properties_used_in_filter.items()
if type == property_type
}
)
diff --git a/posthog/queries/event_query/event_query.py b/posthog/queries/event_query/event_query.py
index 9be4dc1a2fbd2..5018892060873 100644
--- a/posthog/queries/event_query/event_query.py
+++ b/posthog/queries/event_query/event_query.py
@@ -47,7 +47,12 @@ class EventQuery(metaclass=ABCMeta):
def __init__(
self,
filter: Union[
- Filter, PathFilter, RetentionFilter, StickinessFilter, SessionRecordingsFilter, PropertiesTimelineFilter
+ Filter,
+ PathFilter,
+ RetentionFilter,
+ StickinessFilter,
+ SessionRecordingsFilter,
+ PropertiesTimelineFilter,
],
team: Team,
round_interval=False,
@@ -68,7 +73,10 @@ def __init__(
self._extra_event_properties = extra_event_properties
self._column_optimizer = ColumnOptimizer(self._filter, self._team_id)
self._extra_person_fields = extra_person_fields
- self.params: Dict[str, Any] = {"team_id": self._team_id, "timezone": team.timezone}
+ self.params: Dict[str, Any] = {
+ "team_id": self._team_id,
+ "timezone": team.timezone,
+ }
self._should_join_distinct_ids = should_join_distinct_ids
self._should_join_persons = should_join_persons
@@ -183,7 +191,12 @@ def _does_cohort_need_persons(self, prop: Property) -> bool:
def _person_query(self) -> PersonQuery:
if isinstance(self._filter, PropertiesTimelineFilter):
raise Exception("Properties Timeline never needs person query")
- return PersonQuery(self._filter, self._team_id, self._column_optimizer, extra_fields=self._extra_person_fields)
+ return PersonQuery(
+ self._filter,
+ self._team_id,
+ self._column_optimizer,
+ extra_fields=self._extra_person_fields,
+ )
def _get_person_query(self) -> Tuple[str, Dict]:
if self._should_join_persons:
@@ -205,7 +218,11 @@ def _get_groups_query(self) -> Tuple[str, Dict]:
def _sessions_query(self) -> SessionQuery:
if isinstance(self._filter, PropertiesTimelineFilter):
raise Exception("Properties Timeline never needs sessions query")
- return SessionQuery(filter=self._filter, team=self._team, session_id_alias=self._session_id_alias)
+ return SessionQuery(
+ filter=self._filter,
+ team=self._team,
+ session_id_alias=self._session_id_alias,
+ )
def _get_sessions_query(self) -> Tuple[str, Dict]:
if self._should_join_sessions:
diff --git a/posthog/queries/foss_cohort_query.py b/posthog/queries/foss_cohort_query.py
index b9fc8511b6301..e6005abab632a 100644
--- a/posthog/queries/foss_cohort_query.py
+++ b/posthog/queries/foss_cohort_query.py
@@ -5,9 +5,19 @@
from posthog.models import Filter, Team
from posthog.models.action import Action
from posthog.models.cohort import Cohort
-from posthog.models.cohort.util import format_static_cohort_query, get_count_operator, get_entity_query
+from posthog.models.cohort.util import (
+ format_static_cohort_query,
+ get_count_operator,
+ get_entity_query,
+)
from posthog.models.filters.mixins.utils import cached_property
-from posthog.models.property import BehavioralPropertyType, OperatorInterval, Property, PropertyGroup, PropertyName
+from posthog.models.property import (
+ BehavioralPropertyType,
+ OperatorInterval,
+ Property,
+ PropertyGroup,
+ PropertyName,
+)
from posthog.models.property.util import prop_filter_json_extract
from posthog.queries.event_query import EventQuery
from posthog.queries.util import PersonPropertiesMode
@@ -17,7 +27,14 @@
Event = Tuple[str, Union[str, int]]
-INTERVAL_TO_SECONDS = {"minute": 60, "hour": 3600, "day": 86400, "week": 604800, "month": 2592000, "year": 31536000}
+INTERVAL_TO_SECONDS = {
+ "minute": 60,
+ "hour": 3600,
+ "day": 86400,
+ "week": 604800,
+ "month": 2592000,
+ "year": 31536000,
+}
def relative_date_to_seconds(date: Tuple[Optional[int], Union[OperatorInterval, None]]):
@@ -101,7 +118,6 @@ def if_condition(condition: str, true_res: str, false_res: str) -> str:
class FOSSCohortQuery(EventQuery):
-
BEHAVIOR_QUERY_ALIAS = "behavior_query"
FUNNEL_QUERY_ALIAS = "funnel_query"
SEQUENCE_FIELD_ALIAS = "steps"
@@ -205,7 +221,13 @@ def _unwrap(property_group: PropertyGroup, negate_group: bool = False) -> Proper
new_property_group_list.append(
PropertyGroup(
type=PropertyOperatorType.AND,
- values=[Property(key="fake_key_01r2ho", value=0, type="person")],
+ values=[
+ Property(
+ key="fake_key_01r2ho",
+ value=0,
+ type="person",
+ )
+ ],
)
)
else:
@@ -228,7 +250,6 @@ def _unwrap(property_group: PropertyGroup, negate_group: bool = False) -> Proper
# Implemented in /ee
def get_query(self) -> Tuple[str, Dict[str, Any]]:
-
if not self._outer_property_groups:
# everything is pushed down, no behavioral stuff to do
# thus, use personQuery directly
@@ -240,7 +261,11 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]:
subq = []
- behavior_subquery, behavior_subquery_params, behavior_query_alias = self._get_behavior_subquery()
+ (
+ behavior_subquery,
+ behavior_subquery_params,
+ behavior_query_alias,
+ ) = self._get_behavior_subquery()
subq.append((behavior_subquery, behavior_query_alias))
self.params.update(behavior_subquery_params)
@@ -302,7 +327,6 @@ def _get_behavior_subquery(self) -> Tuple[str, Dict[str, Any], str]:
query, params = "", {}
if self._should_join_behavioral_query:
-
_fields = [
f"{self.DISTINCT_ID_TABLE_ALIAS if self._person_on_events_mode == PersonOnEventsMode.DISABLED else self.EVENT_TABLE_ALIAS}.person_id AS person_id"
]
@@ -328,7 +352,12 @@ def _get_behavior_subquery(self) -> Tuple[str, Dict[str, Any], str]:
query, params = (
query,
- {"team_id": self._team_id, event_param_name: self._events, **date_params, **person_prop_params},
+ {
+ "team_id": self._team_id,
+ event_param_name: self._events,
+ **date_params,
+ **person_prop_params,
+ },
)
return query, params, self.BEHAVIOR_QUERY_ALIAS
@@ -389,7 +418,6 @@ def build_conditions(prop: Optional[Union[PropertyGroup, Property]], prepend="le
# Implemented in /ee
def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]:
-
res: str = ""
params: Dict[str, Any] = {}
@@ -412,7 +440,12 @@ def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) ->
def get_person_condition(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]:
if self._outer_property_groups and len(self._outer_property_groups.flat):
return prop_filter_json_extract(
- prop, idx, prepend, prop_var="person_props", allow_denormalized_props=True, property_operator=""
+ prop,
+ idx,
+ prepend,
+ prop_var="person_props",
+ allow_denormalized_props=True,
+ property_operator="",
)
else:
return "", {}
@@ -440,7 +473,10 @@ def get_performed_event_condition(self, prop: Property, prepend: str, idx: int)
self._fields.append(field)
# Negation is handled in the where clause to ensure the right result if a full join occurs where the joined person did not perform the event
- return f"{'NOT' if prop.negation else ''} {column_name}", {f"{date_param}": date_value, **entity_params}
+ return f"{'NOT' if prop.negation else ''} {column_name}", {
+ f"{date_param}": date_value,
+ **entity_params,
+ }
def get_performed_event_multiple(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]:
event = (prop.event_type, prop.key)
@@ -461,7 +497,11 @@ def get_performed_event_multiple(self, prop: Property, prepend: str, idx: int) -
# Negation is handled in the where clause to ensure the right result if a full join occurs where the joined person did not perform the event
return (
f"{'NOT' if prop.negation else ''} {column_name}",
- {f"{operator_value_param}": count, f"{date_param}": date_value, **entity_params},
+ {
+ f"{operator_value_param}": count,
+ f"{date_param}": date_value,
+ **entity_params,
+ },
)
def _determine_should_join_distinct_ids(self) -> None:
@@ -497,7 +537,10 @@ def _validate_negations(self) -> None:
pass
def _get_entity(
- self, event: Tuple[Optional[str], Optional[Union[int, str]]], prepend: str, idx: int
+ self,
+ event: Tuple[Optional[str], Optional[Union[int, str]]],
+ prepend: str,
+ idx: int,
) -> Tuple[str, Dict[str, Any]]:
res: str = ""
params: Dict[str, Any] = {}
@@ -508,12 +551,20 @@ def _get_entity(
if event[0] == "actions":
self._add_action(int(event[1]))
res, params = get_entity_query(
- None, int(event[1]), self._team_id, f"{prepend}_entity_{idx}", self._filter.hogql_context
+ None,
+ int(event[1]),
+ self._team_id,
+ f"{prepend}_entity_{idx}",
+ self._filter.hogql_context,
)
elif event[0] == "events":
self._add_event(str(event[1]))
res, params = get_entity_query(
- str(event[1]), None, self._team_id, f"{prepend}_entity_{idx}", self._filter.hogql_context
+ str(event[1]),
+ None,
+ self._team_id,
+ f"{prepend}_entity_{idx}",
+ self._filter.hogql_context,
)
else:
raise ValueError(f"Event type must be 'events' or 'actions'")
diff --git a/posthog/queries/funnels/base.py b/posthog/queries/funnels/base.py
index 32cfadf4abd1e..8ac25880932a7 100644
--- a/posthog/queries/funnels/base.py
+++ b/posthog/queries/funnels/base.py
@@ -76,7 +76,10 @@ def __init__(
if self._filter.funnel_window_days:
self._filter = self._filter.shallow_clone(
- {FUNNEL_WINDOW_INTERVAL: self._filter.funnel_window_days, FUNNEL_WINDOW_INTERVAL_UNIT: "day"}
+ {
+ FUNNEL_WINDOW_INTERVAL: self._filter.funnel_window_days,
+ FUNNEL_WINDOW_INTERVAL_UNIT: "day",
+ }
)
if not self._filter.limit:
@@ -308,7 +311,6 @@ def _get_timestamp_selects(self) -> Tuple[str, str]:
target_step -= 1
if self._include_preceding_timestamp:
-
if target_step == 0:
raise ValueError("Cannot request preceding step timestamp if target funnel step is the first step")
@@ -391,7 +393,6 @@ def _get_exclusion_condition(self):
return ""
def _get_sorting_condition(self, curr_index: int, max_steps: int):
-
if curr_index == 1:
return "1"
@@ -414,7 +415,11 @@ def _get_sorting_condition(self, curr_index: int, max_steps: int):
return f"if({' AND '.join(conditions)}, {curr_index}, {self._get_sorting_condition(curr_index - 1, max_steps)})"
def _get_inner_event_query(
- self, entities=None, entity_name="events", skip_entity_filter=False, skip_step_filter=False
+ self,
+ entities=None,
+ entity_name="events",
+ skip_entity_filter=False,
+ skip_step_filter=False,
) -> str:
entities_to_use = entities or self._filter.entities
@@ -444,7 +449,12 @@ def _get_inner_event_query(
all_step_cols.extend(step_cols)
for exclusion_id, entity in enumerate(self._filter.exclusions):
- step_cols = self._get_step_col(entity, entity.funnel_from_step, entity_name, f"exclusion_{exclusion_id}_")
+ step_cols = self._get_step_col(
+ entity,
+ entity.funnel_from_step,
+ entity_name,
+ f"exclusion_{exclusion_id}_",
+ )
# every exclusion entity has the form: exclusion__step_i & timestamp exclusion__latest_i
# where i is the starting step for exclusion on that entity
all_step_cols.extend(step_cols)
@@ -715,7 +725,6 @@ def _get_breakdown_select_prop(self) -> str:
self.params.update({"breakdown": self._filter.breakdown})
if self._filter.breakdown_type == "person":
-
if self._team.person_on_events_mode != PersonOnEventsMode.DISABLED:
basic_prop_selector = get_single_or_multi_property_string_expr(
self._filter.breakdown,
@@ -727,7 +736,10 @@ def _get_breakdown_select_prop(self) -> str:
)
else:
basic_prop_selector = get_single_or_multi_property_string_expr(
- self._filter.breakdown, table="person", query_alias="prop_basic", column="person_props"
+ self._filter.breakdown,
+ table="person",
+ query_alias="prop_basic",
+ column="person_props",
)
elif self._filter.breakdown_type == "event":
basic_prop_selector = get_single_or_multi_property_string_expr(
@@ -756,7 +768,10 @@ def _get_breakdown_select_prop(self) -> str:
else:
properties_field = f"group_properties_{self._filter.breakdown_group_type_index}"
expression, _ = get_property_string_expr(
- table="groups", property_name=self._filter.breakdown, var="%(breakdown)s", column=properties_field
+ table="groups",
+ property_name=self._filter.breakdown,
+ var="%(breakdown)s",
+ column=properties_field,
)
basic_prop_selector = f"{expression} AS prop_basic"
elif self._filter.breakdown_type == "hogql":
@@ -789,7 +804,6 @@ def _get_breakdown_select_prop(self) -> str:
BreakdownAttributionType.FIRST_TOUCH,
BreakdownAttributionType.LAST_TOUCH,
]:
-
prop_conditional = (
"notEmpty(arrayFilter(x -> notEmpty(x), prop))"
if self._query_has_array_breakdown()
@@ -833,7 +847,10 @@ def _get_breakdown_conditions(self) -> Optional[str]:
if self._filter.breakdown:
use_all_funnel_entities = (
self._filter.breakdown_attribution_type
- in [BreakdownAttributionType.FIRST_TOUCH, BreakdownAttributionType.LAST_TOUCH]
+ in [
+ BreakdownAttributionType.FIRST_TOUCH,
+ BreakdownAttributionType.LAST_TOUCH,
+ ]
or self._filter.funnel_order_type == FunnelOrderType.UNORDERED
)
first_entity = self._filter.entities[0]
@@ -860,7 +877,11 @@ def _get_breakdown_conditions(self) -> Optional[str]:
def _get_breakdown_prop(self, group_remaining=False) -> str:
if self._filter.breakdown:
other_aggregation = "['Other']" if self._query_has_array_breakdown() else "'Other'"
- if group_remaining and self._filter.breakdown_type in ["person", "event", "group"]:
+ if group_remaining and self._filter.breakdown_type in [
+ "person",
+ "event",
+ "group",
+ ]:
return f", if(has(%(breakdown_values)s, prop), prop, {other_aggregation}) as prop"
else:
# Cohorts don't have "Other" aggregation
diff --git a/posthog/queries/funnels/funnel.py b/posthog/queries/funnels/funnel.py
index 79f0c69898214..e1ac23f00d637 100644
--- a/posthog/queries/funnels/funnel.py
+++ b/posthog/queries/funnels/funnel.py
@@ -111,7 +111,6 @@ def get_comparison_cols(self, level_index: int, max_steps: int):
return ", ".join(cols)
def build_step_subquery(self, level_index: int, max_steps: int, event_names_alias: str = "events"):
-
if level_index >= max_steps:
return f"""
SELECT
diff --git a/posthog/queries/funnels/funnel_event_query.py b/posthog/queries/funnels/funnel_event_query.py
index be41ec9116bf8..dad407abffa0d 100644
--- a/posthog/queries/funnels/funnel_event_query.py
+++ b/posthog/queries/funnels/funnel_event_query.py
@@ -21,7 +21,9 @@ def get_query(
# Aggregating by group
if self._filter.aggregation_group_type_index is not None:
aggregation_target = get_aggregation_target_field(
- self._filter.aggregation_group_type_index, self.EVENT_TABLE_ALIAS, self._person_id_alias
+ self._filter.aggregation_group_type_index,
+ self.EVENT_TABLE_ALIAS,
+ self._person_id_alias,
)
# Aggregating by HogQL
diff --git a/posthog/queries/funnels/funnel_persons.py b/posthog/queries/funnels/funnel_persons.py
index 5153473857eba..5cebef5fb7dcd 100644
--- a/posthog/queries/funnels/funnel_persons.py
+++ b/posthog/queries/funnels/funnel_persons.py
@@ -15,7 +15,11 @@ class ClickhouseFunnelActors(ClickhouseFunnel, ActorBaseQuery):
def aggregation_group_type_index(self):
return self._filter.aggregation_group_type_index
- def actor_query(self, limit_actors: Optional[bool] = True, extra_fields: Optional[List[str]] = None):
+ def actor_query(
+ self,
+ limit_actors: Optional[bool] = True,
+ extra_fields: Optional[List[str]] = None,
+ ):
extra_fields_string = ", ".join([self._get_timestamp_outer_select()] + (extra_fields or []))
return (
FUNNEL_PERSONS_BY_STEP_SQL.format(
diff --git a/posthog/queries/funnels/funnel_strict.py b/posthog/queries/funnels/funnel_strict.py
index dd5c4db883437..38b5d3a4c6a09 100644
--- a/posthog/queries/funnels/funnel_strict.py
+++ b/posthog/queries/funnels/funnel_strict.py
@@ -18,7 +18,6 @@ def get_query(self):
"""
def get_step_counts_query(self):
-
steps_per_person_query = self.get_step_counts_without_aggregation_query()
max_steps = len(self._filter.entities)
breakdown_clause = self._get_breakdown_prop()
diff --git a/posthog/queries/funnels/funnel_strict_persons.py b/posthog/queries/funnels/funnel_strict_persons.py
index 716c27608eb3b..cca6f8e598dc8 100644
--- a/posthog/queries/funnels/funnel_strict_persons.py
+++ b/posthog/queries/funnels/funnel_strict_persons.py
@@ -15,7 +15,11 @@ class ClickhouseFunnelStrictActors(ClickhouseFunnelStrict, ActorBaseQuery):
def aggregation_group_type_index(self):
return self._filter.aggregation_group_type_index
- def actor_query(self, limit_actors: Optional[bool] = True, extra_fields: Optional[List[str]] = None):
+ def actor_query(
+ self,
+ limit_actors: Optional[bool] = True,
+ extra_fields: Optional[List[str]] = None,
+ ):
extra_fields_string = ", ".join([self._get_timestamp_outer_select()] + (extra_fields or []))
return (
FUNNEL_PERSONS_BY_STEP_SQL.format(
diff --git a/posthog/queries/funnels/funnel_trends.py b/posthog/queries/funnels/funnel_trends.py
index 3c6bbca8b79a1..d67b24ae78bbc 100644
--- a/posthog/queries/funnels/funnel_trends.py
+++ b/posthog/queries/funnels/funnel_trends.py
@@ -55,13 +55,11 @@ class ClickhouseFunnelTrends(ClickhouseFunnelBase):
QUERY_TYPE = "funnel_trends"
def __init__(self, filter: Filter, team: Team) -> None:
-
super().__init__(filter, team)
self.funnel_order = get_funnel_order_class(filter)(filter, team)
def _exec_query(self):
-
return self._summarize_data(super()._exec_query())
def get_step_counts_without_aggregation_query(
@@ -97,7 +95,11 @@ def get_query(self) -> str:
# Expects multiple rows for same person, first event time, steps taken.
self.params.update(self.funnel_order.params)
- reached_from_step_count_condition, reached_to_step_count_condition, _ = self.get_steps_reached_conditions()
+ (
+ reached_from_step_count_condition,
+ reached_to_step_count_condition,
+ _,
+ ) = self.get_steps_reached_conditions()
interval_func = get_interval_func_ch(self._filter.interval)
if self._filter.date_from is None:
@@ -157,10 +159,13 @@ def get_steps_reached_conditions(self) -> Tuple[str, str, str]:
reached_to_step_count_condition = f"steps_completed >= {to_step+1}"
# Those who dropped off
did_not_reach_to_step_count_condition = f"{reached_from_step_count_condition} AND steps_completed < {to_step+1}"
- return reached_from_step_count_condition, reached_to_step_count_condition, did_not_reach_to_step_count_condition
+ return (
+ reached_from_step_count_condition,
+ reached_to_step_count_condition,
+ did_not_reach_to_step_count_condition,
+ )
def _summarize_data(self, results):
-
breakdown_clause = self._get_breakdown_prop()
summary = []
@@ -185,7 +190,6 @@ def _summarize_data(self, results):
return summary
def _format_results(self, summary):
-
if self._filter.breakdown:
grouper = lambda row: row["breakdown_value"]
sorted_data = sorted(summary, key=grouper)
diff --git a/posthog/queries/funnels/funnel_trends_persons.py b/posthog/queries/funnels/funnel_trends_persons.py
index 46f2a9f1bf7df..0f4391eb041e4 100644
--- a/posthog/queries/funnels/funnel_trends_persons.py
+++ b/posthog/queries/funnels/funnel_trends_persons.py
@@ -50,7 +50,11 @@ def actor_query(self, limit_actors: Optional[bool] = True):
# Expects multiple rows for same person, first event time, steps taken.
self.params.update(self.funnel_order.params)
- _, reached_to_step_count_condition, did_not_reach_to_step_count_condition = self.get_steps_reached_conditions()
+ (
+ _,
+ reached_to_step_count_condition,
+ did_not_reach_to_step_count_condition,
+ ) = self.get_steps_reached_conditions()
return (
FUNNEL_PERSONS_BY_STEP_SQL.format(
diff --git a/posthog/queries/funnels/funnel_unordered.py b/posthog/queries/funnels/funnel_unordered.py
index e72abdf40c220..ac3a6d939b09f 100644
--- a/posthog/queries/funnels/funnel_unordered.py
+++ b/posthog/queries/funnels/funnel_unordered.py
@@ -54,7 +54,6 @@ def _serialize_step(
}
def get_query(self):
-
max_steps = len(self._filter.entities)
for exclusion in self._filter.exclusions:
@@ -70,7 +69,6 @@ def get_query(self):
"""
def get_step_counts_query(self):
-
max_steps = len(self._filter.entities)
union_query = self.get_step_counts_without_aggregation_query()
@@ -140,7 +138,6 @@ def _get_step_times(self, max_steps: int):
return f", {formatted}" if formatted else ""
def get_sorting_condition(self, max_steps: int):
-
conditions = []
event_times_elements = []
diff --git a/posthog/queries/funnels/funnel_unordered_persons.py b/posthog/queries/funnels/funnel_unordered_persons.py
index 972a8b9ec7f7c..334798c990208 100644
--- a/posthog/queries/funnels/funnel_unordered_persons.py
+++ b/posthog/queries/funnels/funnel_unordered_persons.py
@@ -22,7 +22,11 @@ def _get_funnel_person_step_events(self):
return ", array() as matching_events"
return ""
- def actor_query(self, limit_actors: Optional[bool] = True, extra_fields: Optional[List[str]] = None):
+ def actor_query(
+ self,
+ limit_actors: Optional[bool] = True,
+ extra_fields: Optional[List[str]] = None,
+ ):
extra_fields_string = ", ".join([self._get_timestamp_outer_select()] + (extra_fields or []))
return (
FUNNEL_PERSONS_BY_STEP_SQL.format(
diff --git a/posthog/queries/funnels/test/breakdown_cases.py b/posthog/queries/funnels/test/breakdown_cases.py
index 541b8dad1bfd1..273ce3b201601 100644
--- a/posthog/queries/funnels/test/breakdown_cases.py
+++ b/posthog/queries/funnels/test/breakdown_cases.py
@@ -8,7 +8,11 @@
from posthog.models.filters import Filter
from posthog.queries.breakdown_props import ALL_USERS_COHORT_ID
from posthog.queries.funnels.funnel_unordered import ClickhouseFunnelUnordered
-from posthog.test.base import APIBaseTest, also_test_with_materialized_columns, snapshot_clickhouse_queries
+from posthog.test.base import (
+ APIBaseTest,
+ also_test_with_materialized_columns,
+ snapshot_clickhouse_queries,
+)
from posthog.test.test_journeys import journeys_for
@@ -46,7 +50,10 @@ def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]:
"breakdown": step.breakdown,
"breakdown_value": step.breakdown,
**(
- {"action_id": None, "name": f"Completed {order+1} step{'s' if order > 0 else ''}"}
+ {
+ "action_id": None,
+ "name": f"Completed {order+1} step{'s' if order > 0 else ''}",
+ }
if Funnel == ClickhouseFunnelUnordered
else {}
),
@@ -60,9 +67,12 @@ def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]:
@also_test_with_materialized_columns(["$browser", "$browser_version"])
def test_funnel_step_multi_property_breakdown_event(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -79,36 +89,60 @@ def test_funnel_step_multi_property_breakdown_event(self):
{
"event": "sign up",
"timestamp": datetime(2020, 1, 1, 12),
- "properties": {"key": "val", "$browser": "Chrome", "$browser_version": 95},
+ "properties": {
+ "key": "val",
+ "$browser": "Chrome",
+ "$browser_version": 95,
+ },
},
{
"event": "play movie",
"timestamp": datetime(2020, 1, 1, 13),
- "properties": {"key": "val", "$browser": "Chrome", "$browser_version": 95},
+ "properties": {
+ "key": "val",
+ "$browser": "Chrome",
+ "$browser_version": 95,
+ },
},
{
"event": "buy",
"timestamp": datetime(2020, 1, 1, 15),
- "properties": {"key": "val", "$browser": "Chrome", "$browser_version": 95},
+ "properties": {
+ "key": "val",
+ "$browser": "Chrome",
+ "$browser_version": 95,
+ },
},
],
"person2": [
{
"event": "sign up",
"timestamp": datetime(2020, 1, 2, 14),
- "properties": {"key": "val", "$browser": "Safari", "$browser_version": 15},
+ "properties": {
+ "key": "val",
+ "$browser": "Safari",
+ "$browser_version": 15,
+ },
},
{
"event": "play movie",
"timestamp": datetime(2020, 1, 2, 16),
- "properties": {"key": "val", "$browser": "Safari", "$browser_version": 15},
+ "properties": {
+ "key": "val",
+ "$browser": "Safari",
+ "$browser_version": 15,
+ },
},
],
"person3": [
{
"event": "sign up",
"timestamp": datetime(2020, 1, 2, 14),
- "properties": {"key": "val", "$browser": "Safari", "$browser_version": 14},
+ "properties": {
+ "key": "val",
+ "$browser": "Safari",
+ "$browser_version": 14,
+ },
}
],
}
@@ -126,7 +160,10 @@ def test_funnel_step_multi_property_breakdown_event(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Safari", "14"]), [people["person3"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["Safari", "14"]),
+ [people["person3"].uuid],
+ )
self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Safari", "14"]), [])
self._assert_funnel_breakdown_result_is_correct(
@@ -143,8 +180,14 @@ def test_funnel_step_multi_property_breakdown_event(self):
FunnelStepResult(name="buy", breakdown=["Safari", "15"], count=0),
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Safari", "15"]), [people["person2"].uuid])
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Safari", "15"]), [people["person2"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["Safari", "15"]),
+ [people["person2"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, ["Safari", "15"]),
+ [people["person2"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[2],
@@ -166,14 +209,23 @@ def test_funnel_step_multi_property_breakdown_event(self):
),
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Chrome", "95"]), [people["person1"].uuid])
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Chrome", "95"]), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["Chrome", "95"]),
+ [people["person1"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, ["Chrome", "95"]),
+ [people["person1"].uuid],
+ )
@also_test_with_materialized_columns(["$browser"])
def test_funnel_step_breakdown_event_with_string_only_breakdown(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -248,8 +300,14 @@ def test_funnel_step_breakdown_event_with_string_only_breakdown(self):
),
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid])
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Chrome"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Chrome"),
+ [people["person1"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, "Chrome"),
+ [people["person1"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[1],
[
@@ -266,15 +324,22 @@ def test_funnel_step_breakdown_event_with_string_only_breakdown(self):
)
self.assertCountEqual(
- self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid, people["person3"].uuid]
+ self._get_actor_ids_at_step(filter, 1, "Safari"),
+ [people["person2"].uuid, people["person3"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, "Safari"),
+ [people["person2"].uuid],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person2"].uuid])
@also_test_with_materialized_columns(["$browser"])
def test_funnel_step_breakdown_event(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -349,8 +414,14 @@ def test_funnel_step_breakdown_event(self):
),
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid])
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Chrome"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Chrome"),
+ [people["person1"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, "Chrome"),
+ [people["person1"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[1],
@@ -368,15 +439,22 @@ def test_funnel_step_breakdown_event(self):
)
self.assertCountEqual(
- self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid, people["person3"].uuid]
+ self._get_actor_ids_at_step(filter, 1, "Safari"),
+ [people["person2"].uuid, people["person3"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, "Safari"),
+ [people["person2"].uuid],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person2"].uuid])
@also_test_with_materialized_columns(["$browser"])
def test_funnel_step_breakdown_event_with_other(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -391,16 +469,28 @@ def test_funnel_step_breakdown_event_with_other(self):
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{
"event": "play movie",
"timestamp": datetime(2020, 1, 1, 13),
"properties": {"$browser": "Chrome"},
},
- {"event": "buy", "timestamp": datetime(2020, 1, 1, 15), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 1, 15),
+ "properties": {"$browser": "Chrome"},
+ },
],
"person2": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Safari"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Safari"},
+ },
{
"event": "play movie",
"timestamp": datetime(2020, 1, 2, 16),
@@ -408,10 +498,18 @@ def test_funnel_step_breakdown_event_with_other(self):
},
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Safari"}}
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Safari"},
+ }
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "random"}}
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "random"},
+ }
],
"person5": [
{
@@ -443,9 +541,13 @@ def test_funnel_step_breakdown_event_with_other(self):
)
self.assertCountEqual(
- self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid, people["person3"].uuid]
+ self._get_actor_ids_at_step(filter, 1, "Safari"),
+ [people["person2"].uuid, people["person3"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, "Safari"),
+ [people["person2"].uuid],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person2"].uuid])
self._assert_funnel_breakdown_result_is_correct(
result[0],
@@ -470,15 +572,25 @@ def test_funnel_step_breakdown_event_with_other(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 1, "Other"),
- [people["person1"].uuid, people["person4"].uuid, people["person5"].uuid],
+ [
+ people["person1"].uuid,
+ people["person4"].uuid,
+ people["person5"].uuid,
+ ],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, "Other"),
+ [people["person1"].uuid],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Other"), [people["person1"].uuid])
@also_test_with_materialized_columns(["$browser"])
def test_funnel_step_breakdown_event_no_type(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -491,16 +603,28 @@ def test_funnel_step_breakdown_event_no_type(self):
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{
"event": "play movie",
"timestamp": datetime(2020, 1, 1, 13),
"properties": {"$browser": "Chrome"},
},
- {"event": "buy", "timestamp": datetime(2020, 1, 1, 15), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 1, 15),
+ "properties": {"$browser": "Chrome"},
+ },
],
"person2": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Safari"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Safari"},
+ },
{
"event": "play movie",
"timestamp": datetime(2020, 1, 2, 16),
@@ -508,7 +632,11 @@ def test_funnel_step_breakdown_event_no_type(self):
},
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Safari"}}
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Safari"},
+ }
],
}
@@ -537,8 +665,14 @@ def test_funnel_step_breakdown_event_no_type(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid])
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Chrome"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Chrome"),
+ [people["person1"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, "Chrome"),
+ [people["person1"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[1],
@@ -556,15 +690,22 @@ def test_funnel_step_breakdown_event_no_type(self):
)
self.assertCountEqual(
- self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid, people["person3"].uuid]
+ self._get_actor_ids_at_step(filter, 1, "Safari"),
+ [people["person2"].uuid, people["person3"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, "Safari"),
+ [people["person2"].uuid],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person2"].uuid])
@also_test_with_materialized_columns(person_properties=["$browser"])
def test_funnel_step_breakdown_person(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -576,8 +717,16 @@ def test_funnel_step_breakdown_person(self):
filter = Filter(data=filters)
funnel = Funnel(filter, self.team)
- person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk, properties={"$browser": "Chrome"})
- person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk, properties={"$browser": "Safari"})
+ person1 = _create_person(
+ distinct_ids=["person1"],
+ team_id=self.team.pk,
+ properties={"$browser": "Chrome"},
+ )
+ person2 = _create_person(
+ distinct_ids=["person2"],
+ team_id=self.team.pk,
+ properties={"$browser": "Safari"},
+ )
peoples_journeys = {
"person1": [
@@ -638,9 +787,12 @@ def test_funnel_step_breakdown_person(self):
@also_test_with_materialized_columns(["some_breakdown_val"])
def test_funnel_step_breakdown_limit(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -684,9 +836,12 @@ def test_funnel_step_breakdown_limit(self):
@also_test_with_materialized_columns(["some_breakdown_val"])
def test_funnel_step_custom_breakdown_limit_with_nulls(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -738,9 +893,12 @@ def test_funnel_step_custom_breakdown_limit_with_nulls(self):
@also_test_with_materialized_columns(["some_breakdown_val"])
def test_funnel_step_custom_breakdown_limit_with_nulls_included(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -797,7 +955,6 @@ def test_funnel_step_custom_breakdown_limit_with_nulls_included(self):
@also_test_with_materialized_columns(["$browser"])
def test_funnel_step_breakdown_event_single_person_multiple_breakdowns(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}],
"insight": INSIGHT_FUNNELS,
@@ -816,11 +973,27 @@ def test_funnel_step_breakdown_event_single_person_multiple_breakdowns(self):
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Safari"}},
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 13),
+ "properties": {"$browser": "Safari"},
+ },
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
# mixed property type!
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
]
}
people = journeys_for(events_by_person, self.team)
@@ -835,27 +1008,38 @@ def test_funnel_step_breakdown_event_single_person_multiple_breakdowns(self):
self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "0"), [people["person1"].uuid])
self._assert_funnel_breakdown_result_is_correct(
- result[1], [FunnelStepResult(name="sign up", count=1, breakdown=["Chrome"])]
+ result[1],
+ [FunnelStepResult(name="sign up", count=1, breakdown=["Chrome"])],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Chrome"),
+ [people["person1"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
- result[2], [FunnelStepResult(name="sign up", count=1, breakdown=["Mac"])]
+ result[2],
+ [FunnelStepResult(name="sign up", count=1, breakdown=["Mac"])],
)
self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Mac"), [people["person1"].uuid])
self._assert_funnel_breakdown_result_is_correct(
- result[3], [FunnelStepResult(name="sign up", count=1, breakdown=["Safari"])]
+ result[3],
+ [FunnelStepResult(name="sign up", count=1, breakdown=["Safari"])],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Safari"),
+ [people["person1"].uuid],
+ )
def test_funnel_step_breakdown_event_single_person_events_with_multiple_properties(self):
-
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -907,7 +1091,10 @@ def test_funnel_step_breakdown_event_single_person_events_with_multiple_properti
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Chrome"),
+ [people["person1"].uuid],
+ )
self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Chrome"), [])
self._assert_funnel_breakdown_result_is_correct(
@@ -924,13 +1111,23 @@ def test_funnel_step_breakdown_event_single_person_events_with_multiple_properti
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person1"].uuid])
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Safari"),
+ [people["person1"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2, "Safari"),
+ [people["person1"].uuid],
+ )
@also_test_with_materialized_columns(person_properties=["key"], verify_no_jsonextract=False)
def test_funnel_cohort_breakdown(self):
# This caused some issues with SQL parsing
- _create_person(distinct_ids=[f"person1"], team_id=self.team.pk, properties={"key": "value"})
+ _create_person(
+ distinct_ids=[f"person1"],
+ team_id=self.team.pk,
+ properties={"key": "value"},
+ )
people = journeys_for(
{"person1": [{"event": "sign up", "timestamp": datetime(2020, 1, 2, 12)}]},
self.team,
@@ -943,7 +1140,11 @@ def test_funnel_cohort_breakdown(self):
groups=[{"properties": [{"key": "key", "value": "value", "type": "person"}]}],
)
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -962,15 +1163,25 @@ def test_funnel_cohort_breakdown(self):
self.assertEqual(result[0][0]["breakdown"], "all users")
self.assertEqual(len(result[1]), 3)
self.assertEqual(result[1][0]["breakdown"], "test_cohort")
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, cohort.pk), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, cohort.pk),
+ [people["person1"].uuid],
+ )
self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, cohort.pk), [])
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ALL_USERS_COHORT_ID), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ALL_USERS_COHORT_ID),
+ [people["person1"].uuid],
+ )
self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ALL_USERS_COHORT_ID), [])
# non array
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -985,11 +1196,13 @@ def test_funnel_cohort_breakdown(self):
self.assertEqual(len(result[0]), 3)
self.assertEqual(result[0][0]["breakdown"], "test_cohort")
self.assertEqual(result[0][0]["breakdown_value"], cohort.pk)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, cohort.pk), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, cohort.pk),
+ [people["person1"].uuid],
+ )
self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, cohort.pk), [])
def test_basic_funnel_default_funnel_days_breakdown_event(self):
-
events_by_person = {
"user_1": [
{
@@ -1051,7 +1264,11 @@ def test_basic_funnel_default_funnel_days_breakdown_event(self):
self._assert_funnel_breakdown_result_is_correct(
result[0],
[
- FunnelStepResult(name="user signed up", count=1, breakdown=["https://posthog.com/docs/x"]),
+ FunnelStepResult(
+ name="user signed up",
+ count=1,
+ breakdown=["https://posthog.com/docs/x"],
+ ),
FunnelStepResult(
name="paid",
count=1,
@@ -1139,7 +1356,6 @@ def test_basic_funnel_default_funnel_days_breakdown_action(self):
)
def test_funnel_step_breakdown_with_first_touch_attribution(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
"insight": INSIGHT_FUNNELS,
@@ -1157,21 +1373,41 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self):
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
{"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari"},
+ },
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# first touch means alakazam is disregarded
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
# no properties dude, represented by ''
"person5": [
@@ -1191,7 +1427,11 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self):
[
FunnelStepResult(name="sign up", breakdown=[""], count=1),
FunnelStepResult(
- name="buy", breakdown=[""], count=1, average_conversion_time=3600, median_conversion_time=3600
+ name="buy",
+ breakdown=[""],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
),
],
)
@@ -1203,7 +1443,11 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self):
[
FunnelStepResult(name="sign up", breakdown=["0"], count=1),
FunnelStepResult(
- name="buy", breakdown=["0"], count=1, average_conversion_time=3600, median_conversion_time=3600
+ name="buy",
+ breakdown=["0"],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
),
],
)
@@ -1224,7 +1468,10 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Chrome"),
+ [people["person1"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[3],
@@ -1256,10 +1503,12 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Safari"),
+ [people["person2"].uuid],
+ )
def test_funnel_step_breakdown_with_last_touch_attribution(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
"insight": INSIGHT_FUNNELS,
@@ -1277,21 +1526,41 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self):
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
{"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari"},
+ },
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# last touch means 0 is disregarded
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "Alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "Alakazam"},
+ },
],
# no properties dude, represented by ''
"person5": [
@@ -1311,7 +1580,11 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self):
[
FunnelStepResult(name="sign up", breakdown=[""], count=1),
FunnelStepResult(
- name="buy", breakdown=[""], count=1, average_conversion_time=3600, median_conversion_time=3600
+ name="buy",
+ breakdown=[""],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
),
],
)
@@ -1332,7 +1605,10 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Alakazam"), [people["person4"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Alakazam"),
+ [people["person4"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[2],
@@ -1348,7 +1624,10 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Chrome"),
+ [people["person1"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[3],
@@ -1380,10 +1659,12 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Safari"),
+ [people["person2"].uuid],
+ )
def test_funnel_step_breakdown_with_step_attribution(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
"insight": INSIGHT_FUNNELS,
@@ -1402,21 +1683,41 @@ def test_funnel_step_breakdown_with_step_attribution(self):
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
{"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari"},
+ },
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# step attribution means alakazam is valid when step = 1
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
}
people = journeys_for(events_by_person, self.team)
@@ -1431,7 +1732,11 @@ def test_funnel_step_breakdown_with_step_attribution(self):
[
FunnelStepResult(name="sign up", breakdown=[""], count=1),
FunnelStepResult(
- name="buy", breakdown=[""], count=1, average_conversion_time=86400, median_conversion_time=86400
+ name="buy",
+ breakdown=[""],
+ count=1,
+ average_conversion_time=86400,
+ median_conversion_time=86400,
),
],
)
@@ -1443,7 +1748,11 @@ def test_funnel_step_breakdown_with_step_attribution(self):
[
FunnelStepResult(name="sign up", breakdown=["0"], count=1),
FunnelStepResult(
- name="buy", breakdown=["0"], count=1, average_conversion_time=3600, median_conversion_time=3600
+ name="buy",
+ breakdown=["0"],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
),
],
)
@@ -1464,7 +1773,10 @@ def test_funnel_step_breakdown_with_step_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Chrome"),
+ [people["person1"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[3],
@@ -1483,7 +1795,6 @@ def test_funnel_step_breakdown_with_step_attribution(self):
self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Mac"), [people["person3"].uuid])
def test_funnel_step_breakdown_with_step_one_attribution(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
"insight": INSIGHT_FUNNELS,
@@ -1502,21 +1813,41 @@ def test_funnel_step_breakdown_with_step_one_attribution(self):
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
{"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari"},
+ },
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# step attribution means alakazam is valid when step = 1
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
}
people = journeys_for(events_by_person, self.team)
@@ -1532,13 +1863,18 @@ def test_funnel_step_breakdown_with_step_one_attribution(self):
[
FunnelStepResult(name="sign up", breakdown=[""], count=2),
FunnelStepResult(
- name="buy", breakdown=[""], count=2, average_conversion_time=3600, median_conversion_time=3600
+ name="buy",
+ breakdown=[""],
+ count=2,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
),
],
)
self.assertCountEqual(
- self._get_actor_ids_at_step(filter, 1, ""), [people["person1"].uuid, people["person3"].uuid]
+ self._get_actor_ids_at_step(filter, 1, ""),
+ [people["person1"].uuid, people["person3"].uuid],
)
self._assert_funnel_breakdown_result_is_correct(
@@ -1555,7 +1891,10 @@ def test_funnel_step_breakdown_with_step_one_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "Safari"),
+ [people["person2"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[2],
@@ -1571,10 +1910,12 @@ def test_funnel_step_breakdown_with_step_one_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "alakazam"), [people["person4"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "alakazam"),
+ [people["person4"].uuid],
+ )
def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
"insight": INSIGHT_FUNNELS,
@@ -1608,8 +1949,16 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self):
},
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$version": "no-mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$version": "no-mac"},
+ },
],
"person4": [
{
@@ -1617,7 +1966,11 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self):
"timestamp": datetime(2020, 1, 2, 15),
"properties": {"$browser": 0, "$version": 0},
},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
# no properties dude, represented by ''
"person5": [
@@ -1646,7 +1999,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["", ""]), [people["person5"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["", ""]),
+ [people["person5"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[1],
@@ -1661,7 +2017,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self):
),
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["0", "0"]), [people["person4"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["0", "0"]),
+ [people["person4"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[2],
@@ -1677,7 +2036,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Chrome", "xyz"]), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["Chrome", "xyz"]),
+ [people["person1"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[3],
@@ -1693,7 +2055,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Mac", ""]), [people["person3"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["Mac", ""]),
+ [people["person3"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[4],
@@ -1709,10 +2074,12 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Safari", "xyz"]), [people["person2"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["Safari", "xyz"]),
+ [people["person2"].uuid],
+ )
def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_funnel(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
"insight": INSIGHT_FUNNELS,
@@ -1746,7 +2113,11 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_
},
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
# {"event": "buy", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$version": "no-mac"}},
],
"person4": [
@@ -1784,7 +2155,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["", ""]), [people["person5"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["", ""]),
+ [people["person5"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[1],
@@ -1793,7 +2167,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_
FunnelStepResult(name="buy", breakdown=["0", "0"], count=0),
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["0", "0"]), [people["person4"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["0", "0"]),
+ [people["person4"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[2],
@@ -1809,7 +2186,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Chrome", "xyz"]), [people["person1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["Chrome", "xyz"]),
+ [people["person1"].uuid],
+ )
self._assert_funnel_breakdown_result_is_correct(
result[3],
@@ -1819,7 +2199,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Mac", ""]), [people["person3"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["Mac", ""]),
+ [people["person3"].uuid],
+ )
self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Mac", ""]), [])
self._assert_funnel_breakdown_result_is_correct(
@@ -1836,10 +2219,12 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Safari", "xyz"]), [people["person2"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, ["Safari", "xyz"]),
+ [people["person2"].uuid],
+ )
def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
"insight": INSIGHT_FUNNELS,
@@ -1858,7 +2243,11 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self)
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
@@ -1866,13 +2255,25 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self)
# {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
# {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# step attribution means alakazam is valid when step = 1
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
}
people = journeys_for(events_by_person, self.team)
@@ -1889,7 +2290,11 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self)
[
FunnelStepResult(name="sign up", breakdown=[""], count=1),
FunnelStepResult(
- name="buy", breakdown=[""], count=1, average_conversion_time=3600, median_conversion_time=3600
+ name="buy",
+ breakdown=[""],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
),
],
)
@@ -1910,10 +2315,12 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self)
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "alakazam"), [people["person4"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "alakazam"),
+ [people["person4"].uuid],
+ )
def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_funnel(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
"insight": INSIGHT_FUNNELS,
@@ -1932,7 +2339,11 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
@@ -1940,13 +2351,25 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu
# {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
# {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# step attribution means alakazam is valid when step = 1
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
}
people = journeys_for(events_by_person, self.team)
@@ -1963,7 +2386,11 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu
[
FunnelStepResult(name="sign up", breakdown=[""], count=1),
FunnelStepResult(
- name="buy", breakdown=[""], count=1, average_conversion_time=3600, median_conversion_time=3600
+ name="buy",
+ breakdown=[""],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
),
],
)
@@ -1984,7 +2411,10 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "alakazam"), [people["person4"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1, "alakazam"),
+ [people["person4"].uuid],
+ )
@snapshot_clickhouse_queries
def test_funnel_step_multiple_breakdown_snapshot(self):
@@ -2023,8 +2453,16 @@ def test_funnel_step_multiple_breakdown_snapshot(self):
},
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$version": "no-mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$version": "no-mac"},
+ },
],
"person4": [
{
@@ -2032,7 +2470,11 @@ def test_funnel_step_multiple_breakdown_snapshot(self):
"timestamp": datetime(2020, 1, 2, 15),
"properties": {"$browser": 0, "$version": 0},
},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
# no properties dude, represented by ''
"person5": [
@@ -2054,7 +2496,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen(self):
filters = {
"events": [
{"id": "sign up", "order": 0},
- {"id": "buy", "properties": [{"type": "event", "key": "$version", "value": "xyz"}], "order": 1},
+ {
+ "id": "buy",
+ "properties": [{"type": "event", "key": "$version", "value": "xyz"}],
+ "order": 1,
+ },
],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
@@ -2076,7 +2522,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen(self):
"timestamp": datetime(2020, 1, 1, 12),
"properties": {"$browser": "Chrome", "$version": "xyz"},
},
- {"event": "buy", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 1, 13),
+ "properties": {"$browser": "Chrome"},
+ },
# discarded at step 1 because doesn't meet criteria
],
"person2": [
@@ -2088,7 +2538,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen(self):
},
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
{
"event": "buy",
"timestamp": datetime(2020, 1, 2, 15),
@@ -2108,7 +2562,10 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen(self):
self.assertEqual(len(result), 4)
- self.assertCountEqual([res[0]["breakdown"] for res in result], [["Mac"], ["Chrome"], ["Safari"], [""]])
+ self.assertCountEqual(
+ [res[0]["breakdown"] for res in result],
+ [["Mac"], ["Chrome"], ["Safari"], [""]],
+ )
@snapshot_clickhouse_queries
def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
@@ -2117,7 +2574,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
filters = {
"events": [
{"id": "sign up", "order": 0},
- {"id": "buy", "properties": [{"type": "event", "key": "$version", "value": "xyz"}], "order": 1},
+ {
+ "id": "buy",
+ "properties": [{"type": "event", "key": "$version", "value": "xyz"}],
+ "order": 1,
+ },
],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
@@ -2140,7 +2601,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
"timestamp": datetime(2020, 1, 1, 12),
"properties": {"$browser": "Chrome", "$version": "xyz"},
},
- {"event": "buy", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 1, 13),
+ "properties": {"$browser": "Chrome"},
+ },
# discarded because doesn't meet criteria
],
"person2": [
@@ -2152,7 +2617,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
},
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
{
"event": "buy",
"timestamp": datetime(2020, 1, 2, 15),
@@ -2192,7 +2661,6 @@ def assert_funnel_results_equal(left: List[Dict[str, Any]], right: List[Dict[str
"""
def _filter(steps: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
-
return [{**step, "converted_people_url": None, "dropped_people_url": None} for step in steps]
assert len(left) == len(right)
@@ -2204,5 +2672,8 @@ def _filter(steps: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
try:
assert item[key] == other[key]
except AssertionError as e:
- e.args += (f"failed comparing ${key}", f'Got "{item[key]}" and "{other[key]}"')
+ e.args += (
+ f"failed comparing ${key}",
+ f'Got "{item[key]}" and "{other[key]}"',
+ )
raise
diff --git a/posthog/queries/funnels/test/conversion_time_cases.py b/posthog/queries/funnels/test/conversion_time_cases.py
index 278dfd989a724..02e8167818373 100644
--- a/posthog/queries/funnels/test/conversion_time_cases.py
+++ b/posthog/queries/funnels/test/conversion_time_cases.py
@@ -34,14 +34,26 @@ def test_funnel_with_multiple_incomplete_tries(self):
{
"person1": [
# person1 completed funnel on 2021-05-01
- {"event": "user signed up", "timestamp": datetime(2021, 5, 1, 1)},
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2021, 5, 1, 1),
+ },
{"event": "$pageview", "timestamp": datetime(2021, 5, 1, 2)},
- {"event": "something else", "timestamp": datetime(2021, 5, 1, 3)},
+ {
+ "event": "something else",
+ "timestamp": datetime(2021, 5, 1, 3),
+ },
# person1 completed part of funnel on 2021-05-03 and took 2 hours to convert
- {"event": "user signed up", "timestamp": datetime(2021, 5, 3, 4)},
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2021, 5, 3, 4),
+ },
{"event": "$pageview", "timestamp": datetime(2021, 5, 3, 5)},
# person1 completed part of funnel on 2021-05-04 and took 3 hours to convert
- {"event": "user signed up", "timestamp": datetime(2021, 5, 4, 7)},
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2021, 5, 4, 7),
+ },
{"event": "$pageview", "timestamp": datetime(2021, 5, 4, 10)},
]
},
@@ -61,7 +73,11 @@ def test_funnel_with_multiple_incomplete_tries(self):
def test_funnel_step_conversion_times(self):
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
@@ -121,13 +137,27 @@ def test_funnel_times_with_different_conversion_windows(self):
people = journeys_for(
{
"stopped_after_signup1": [
- {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14)},
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ },
{"event": "pageview", "timestamp": datetime(2020, 1, 2, 14, 5)},
],
- "stopped_after_signup2": [{"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14, 3)}],
+ "stopped_after_signup2": [
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2020, 1, 2, 14, 3),
+ }
+ ],
"stopped_after_signup3": [
- {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 12)},
- {"event": "pageview", "timestamp": datetime(2020, 1, 2, 12, 15)},
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2020, 1, 2, 12),
+ },
+ {
+ "event": "pageview",
+ "timestamp": datetime(2020, 1, 2, 12, 15),
+ },
],
},
self.team,
@@ -149,7 +179,10 @@ def test_funnel_times_with_different_conversion_windows(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 2),
- [people["stopped_after_signup1"].uuid, people["stopped_after_signup3"].uuid],
+ [
+ people["stopped_after_signup1"].uuid,
+ people["stopped_after_signup3"].uuid,
+ ],
)
filter = filter.shallow_clone({"funnel_window_interval": 5, "funnel_window_interval_unit": "minute"})
@@ -171,6 +204,9 @@ def test_funnel_times_with_different_conversion_windows(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [people["stopped_after_signup1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2),
+ [people["stopped_after_signup1"].uuid],
+ )
return TestFunnelConversionTime
diff --git a/posthog/queries/funnels/test/test_breakdowns_by_current_url.py b/posthog/queries/funnels/test/test_breakdowns_by_current_url.py
index 098d51ddecddd..bb6673387b64d 100644
--- a/posthog/queries/funnels/test/test_breakdowns_by_current_url.py
+++ b/posthog/queries/funnels/test/test_breakdowns_by_current_url.py
@@ -3,7 +3,11 @@
from posthog.models import Filter
from posthog.queries.funnels import ClickhouseFunnel
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ snapshot_clickhouse_queries,
+)
from posthog.test.test_journeys import journeys_for
@@ -16,13 +20,19 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com", "$pathname": ""},
+ "properties": {
+ "$current_url": "https://example.com",
+ "$pathname": "",
+ },
},
# trailing question mark
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 2),
- "properties": {"$current_url": "https://example.com?", "$pathname": "?"},
+ "properties": {
+ "$current_url": "https://example.com?",
+ "$pathname": "?",
+ },
},
{
"event": "terminate funnel",
@@ -34,13 +44,19 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com/", "$pathname": "/"},
+ "properties": {
+ "$current_url": "https://example.com/",
+ "$pathname": "/",
+ },
},
# trailing hash
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 2),
- "properties": {"$current_url": "https://example.com#", "$pathname": "#"},
+ "properties": {
+ "$current_url": "https://example.com#",
+ "$pathname": "#",
+ },
},
{
"event": "terminate funnel",
@@ -52,7 +68,10 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com/home", "$pathname": "/home"},
+ "properties": {
+ "$current_url": "https://example.com/home",
+ "$pathname": "/home",
+ },
},
{
"event": "terminate funnel",
@@ -64,19 +83,28 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com/home/", "$pathname": "/home/"},
+ "properties": {
+ "$current_url": "https://example.com/home/",
+ "$pathname": "/home/",
+ },
},
# trailing hash
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 2),
- "properties": {"$current_url": "https://example.com/home#", "$pathname": "/home#"},
+ "properties": {
+ "$current_url": "https://example.com/home#",
+ "$pathname": "/home#",
+ },
},
# all the things
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 3),
- "properties": {"$current_url": "https://example.com/home/?#", "$pathname": "/home/?#"},
+ "properties": {
+ "$current_url": "https://example.com/home/?#",
+ "$pathname": "/home/?#",
+ },
},
{
"event": "terminate funnel",
@@ -92,7 +120,13 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}):
Filter(
data={
"events": [
- {"id": "watched movie", "name": "watched movie", "type": "events", "order": 0, **events_extra},
+ {
+ "id": "watched movie",
+ "name": "watched movie",
+ "type": "events",
+ "order": 0,
+ **events_extra,
+ },
{
"id": "terminate funnel",
"name": "terminate funnel",
@@ -115,12 +149,24 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}):
@snapshot_clickhouse_queries
def test_breakdown_by_pathname(self) -> None:
- response = self._run({"breakdown": "$pathname", "breakdown_type": "event", "breakdown_normalize_url": True})
+ response = self._run(
+ {
+ "breakdown": "$pathname",
+ "breakdown_type": "event",
+ "breakdown_normalize_url": True,
+ }
+ )
actual = []
for breakdown_value in response:
for funnel_step in breakdown_value:
- actual.append((funnel_step["name"], funnel_step["count"], funnel_step["breakdown"]))
+ actual.append(
+ (
+ funnel_step["name"],
+ funnel_step["count"],
+ funnel_step["breakdown"],
+ )
+ )
assert actual == [
("watched movie", 2, ["/"]),
@@ -131,12 +177,24 @@ def test_breakdown_by_pathname(self) -> None:
@snapshot_clickhouse_queries
def test_breakdown_by_current_url(self) -> None:
- response = self._run({"breakdown": "$current_url", "breakdown_type": "event", "breakdown_normalize_url": True})
+ response = self._run(
+ {
+ "breakdown": "$current_url",
+ "breakdown_type": "event",
+ "breakdown_normalize_url": True,
+ }
+ )
actual = []
for breakdown_value in response:
for funnel_step in breakdown_value:
- actual.append((funnel_step["name"], funnel_step["count"], funnel_step["breakdown"]))
+ actual.append(
+ (
+ funnel_step["name"],
+ funnel_step["count"],
+ funnel_step["breakdown"],
+ )
+ )
assert actual == [
("watched movie", 2, ["https://example.com/home"]),
diff --git a/posthog/queries/funnels/test/test_funnel.py b/posthog/queries/funnels/test/test_funnel.py
index deddf642a4c50..334f0dc9c41c0 100644
--- a/posthog/queries/funnels/test/test_funnel.py
+++ b/posthog/queries/funnels/test/test_funnel.py
@@ -13,8 +13,13 @@
from posthog.models.filters import Filter
from posthog.models.instance_setting import get_instance_setting
from posthog.queries.funnels import ClickhouseFunnel, ClickhouseFunnelActors
-from posthog.queries.funnels.test.breakdown_cases import assert_funnel_results_equal, funnel_breakdown_test_factory
-from posthog.queries.funnels.test.conversion_time_cases import funnel_conversion_time_test_factory
+from posthog.queries.funnels.test.breakdown_cases import (
+ assert_funnel_results_equal,
+ funnel_breakdown_test_factory,
+)
+from posthog.queries.funnels.test.conversion_time_cases import (
+ funnel_conversion_time_test_factory,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
@@ -36,12 +41,26 @@ def _create_action(**kwargs):
return action
-class TestFunnelBreakdown(ClickhouseTestMixin, funnel_breakdown_test_factory(ClickhouseFunnel, ClickhouseFunnelActors, _create_event, _create_action, _create_person)): # type: ignore
+class TestFunnelBreakdown(
+ ClickhouseTestMixin,
+ funnel_breakdown_test_factory( # type: ignore
+ ClickhouseFunnel,
+ ClickhouseFunnelActors,
+ _create_event,
+ _create_action,
+ _create_person,
+ ),
+):
maxDiff = None
pass
-class TestFunnelConversionTime(ClickhouseTestMixin, funnel_conversion_time_test_factory(ClickhouseFunnel, ClickhouseFunnelActors, _create_event, _create_person)): # type: ignore
+class TestFunnelConversionTime(
+ ClickhouseTestMixin,
+ funnel_conversion_time_test_factory( # type: ignore
+ ClickhouseFunnel, ClickhouseFunnelActors, _create_event, _create_person
+ ),
+):
maxDiff = None
pass
@@ -96,10 +115,18 @@ def _single_step_funnel(self, properties=None, filters=None):
def _basic_funnel(self, properties=None, filters=None):
action_credit_card = Action.objects.create(team=self.team, name="paid")
ActionStep.objects.create(
- action=action_credit_card, event="$autocapture", tag_name="button", text="Pay $10"
+ action=action_credit_card,
+ event="$autocapture",
+ tag_name="button",
+ text="Pay $10",
)
action_play_movie = Action.objects.create(team=self.team, name="watched movie")
- ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie")
+ ActionStep.objects.create(
+ action=action_play_movie,
+ event="$autocapture",
+ tag_name="a",
+ href="/movie",
+ )
if filters is None:
filters = {
@@ -157,7 +184,10 @@ def test_funnel_events(self):
self._signup_event(distinct_id="stopped_after_pay")
self._pay_event(distinct_id="stopped_after_pay")
- person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk)
+ person_factory(
+ distinct_ids=["had_anonymous_id", "completed_movie"],
+ team_id=self.team.pk,
+ )
self._signup_event(distinct_id="had_anonymous_id")
self._pay_event(distinct_id="completed_movie")
self._movie_event(distinct_id="completed_movie")
@@ -193,18 +223,30 @@ def test_funnel_events_with_person_on_events_v2(self):
# events
stopped_after_signup_person_id = uuid.uuid4()
person_factory(distinct_ids=["stopped_after_signup"], team_id=self.team.pk)
- self._signup_event(distinct_id="stopped_after_signup", person_id=stopped_after_signup_person_id)
+ self._signup_event(
+ distinct_id="stopped_after_signup",
+ person_id=stopped_after_signup_person_id,
+ )
with freeze_time("2012-01-01T03:21:36.000Z"):
stopped_after_pay_person_id = uuid.uuid4()
person_factory(distinct_ids=["stopped_after_pay"], team_id=self.team.pk)
- self._signup_event(distinct_id="stopped_after_pay", person_id=stopped_after_pay_person_id)
+ self._signup_event(
+ distinct_id="stopped_after_pay",
+ person_id=stopped_after_pay_person_id,
+ )
with freeze_time("2012-01-01T03:21:37.000Z"):
- self._pay_event(distinct_id="stopped_after_pay", person_id=stopped_after_pay_person_id)
+ self._pay_event(
+ distinct_id="stopped_after_pay",
+ person_id=stopped_after_pay_person_id,
+ )
with freeze_time("2012-01-01T03:21:38.000Z"):
had_anonymous_id_person_id = uuid.uuid4()
- person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk)
+ person_factory(
+ distinct_ids=["had_anonymous_id", "completed_movie"],
+ team_id=self.team.pk,
+ )
self._signup_event(distinct_id="had_anonymous_id", person_id=had_anonymous_id_person_id)
with freeze_time("2012-01-01T03:21:39.000Z"):
self._pay_event(distinct_id="completed_movie", person_id=had_anonymous_id_person_id)
@@ -243,7 +285,12 @@ def test_funnel_events_with_person_on_events_v2(self):
def test_funnel_with_messed_up_order(self):
action_play_movie = Action.objects.create(team=self.team, name="watched movie")
- ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie")
+ ActionStep.objects.create(
+ action=action_play_movie,
+ event="$autocapture",
+ tag_name="a",
+ href="/movie",
+ )
funnel = self._basic_funnel(
filters={
@@ -261,7 +308,10 @@ def test_funnel_with_messed_up_order(self):
self._signup_event(distinct_id="stopped_after_pay")
self._movie_event(distinct_id="completed_movie")
- person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk)
+ person_factory(
+ distinct_ids=["had_anonymous_id", "completed_movie"],
+ team_id=self.team.pk,
+ )
self._signup_event(distinct_id="had_anonymous_id")
self._movie_event(distinct_id="completed_movie")
@@ -323,7 +373,12 @@ def test_funnel_with_any_event(self):
def test_funnel_with_new_entities_that_mess_up_order(self):
action_play_movie = Action.objects.create(team=self.team, name="watched movie")
- ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie")
+ ActionStep.objects.create(
+ action=action_play_movie,
+ event="$autocapture",
+ tag_name="a",
+ href="/movie",
+ )
funnel = self._basic_funnel(
filters={
@@ -345,7 +400,10 @@ def test_funnel_with_new_entities_that_mess_up_order(self):
self._signup_event(distinct_id="stopped_after_pay")
self._movie_event(distinct_id="completed_movie")
- person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk)
+ person_factory(
+ distinct_ids=["had_anonymous_id", "completed_movie"],
+ team_id=self.team.pk,
+ )
self._signup_event(distinct_id="had_anonymous_id")
self._movie_event(distinct_id="completed_movie")
@@ -405,10 +463,18 @@ def test_funnel_prop_filters(self):
def test_funnel_prop_filters_per_entity(self):
action_credit_card = Action.objects.create(team_id=self.team.pk, name="paid")
ActionStep.objects.create(
- action=action_credit_card, event="$autocapture", tag_name="button", text="Pay $10"
+ action=action_credit_card,
+ event="$autocapture",
+ tag_name="button",
+ text="Pay $10",
)
action_play_movie = Action.objects.create(team_id=self.team.pk, name="watched movie")
- ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie")
+ ActionStep.objects.create(
+ action=action_play_movie,
+ event="$autocapture",
+ tag_name="a",
+ href="/movie",
+ )
filters = {
"events": [
{
@@ -417,7 +483,11 @@ def test_funnel_prop_filters_per_entity(self):
"order": 0,
"properties": [
{"key": "$browser", "value": "Safari"},
- {"key": "$browser", "operator": "is_not", "value": "Chrome"},
+ {
+ "key": "$browser",
+ "operator": "is_not",
+ "value": "Chrome",
+ },
],
}
],
@@ -440,7 +510,11 @@ def test_funnel_prop_filters_per_entity(self):
funnel = self._basic_funnel(filters=filters)
# events
- person_factory(distinct_ids=["with_property"], team_id=self.team.pk, properties={"$browser": "Safari"})
+ person_factory(
+ distinct_ids=["with_property"],
+ team_id=self.team.pk,
+ properties={"$browser": "Safari"},
+ )
self._signup_event(distinct_id="with_property", properties={"$browser": "Safari"})
self._pay_event(distinct_id="with_property", properties={"$browser": "Safari"})
self._movie_event(distinct_id="with_property")
@@ -466,17 +540,31 @@ def test_funnel_prop_filters_per_entity(self):
def test_funnel_person_prop(self):
action_credit_card = Action.objects.create(team_id=self.team.pk, name="paid")
ActionStep.objects.create(
- action=action_credit_card, event="$autocapture", tag_name="button", text="Pay $10"
+ action=action_credit_card,
+ event="$autocapture",
+ tag_name="button",
+ text="Pay $10",
)
action_play_movie = Action.objects.create(team_id=self.team.pk, name="watched movie")
- ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie")
+ ActionStep.objects.create(
+ action=action_play_movie,
+ event="$autocapture",
+ tag_name="a",
+ href="/movie",
+ )
filters = {
"events": [
{
"id": "user signed up",
"type": "events",
"order": 0,
- "properties": [{"key": "email", "value": "hello@posthog.com", "type": "person"}],
+ "properties": [
+ {
+ "key": "email",
+ "value": "hello@posthog.com",
+ "type": "person",
+ }
+ ],
}
],
"actions": [
@@ -489,7 +577,9 @@ def test_funnel_person_prop(self):
# events
person_factory(
- distinct_ids=["with_property"], team_id=self.team.pk, properties={"email": "hello@posthog.com"}
+ distinct_ids=["with_property"],
+ team_id=self.team.pk,
+ properties={"email": "hello@posthog.com"},
)
self._signup_event(distinct_id="with_property")
self._pay_event(distinct_id="with_property")
@@ -507,18 +597,34 @@ def test_funnel_multiple_actions(self):
# This test prevents a regression
person_factory(distinct_ids=["person1"], team_id=self.team.pk)
event_factory(distinct_id="person1", event="event1", team=self.team)
- event_factory(distinct_id="person1", event="event2", properties={"test_propX": "a"}, team=self.team)
+ event_factory(
+ distinct_id="person1",
+ event="event2",
+ properties={"test_propX": "a"},
+ team=self.team,
+ )
action1 = Action.objects.create(team_id=self.team.pk, name="event2")
- ActionStep.objects.create(action=action1, event="event2", properties=[{"key": "test_propX", "value": "a"}])
+ ActionStep.objects.create(
+ action=action1,
+ event="event2",
+ properties=[{"key": "test_propX", "value": "a"}],
+ )
action2 = Action.objects.create(team_id=self.team.pk, name="event2")
- ActionStep.objects.create(action=action2, event="event2", properties=[{"key": "test_propX", "value": "c"}])
+ ActionStep.objects.create(
+ action=action2,
+ event="event2",
+ properties=[{"key": "test_propX", "value": "c"}],
+ )
result = Funnel(
filter=Filter(
data={
"events": [{"id": "event1", "order": 0}],
- "actions": [{"id": action1.pk, "order": 1}, {"id": action2.pk, "order": 2}],
+ "actions": [
+ {"id": action1.pk, "order": 1},
+ {"id": action2.pk, "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"funnel_window_days": 14,
}
@@ -531,7 +637,11 @@ def test_funnel_multiple_actions(self):
@also_test_with_materialized_columns(person_properties=["email"])
def test_funnel_filter_test_accounts(self):
- person_factory(distinct_ids=["person1"], team_id=self.team.pk, properties={"email": "test@posthog.com"})
+ person_factory(
+ distinct_ids=["person1"],
+ team_id=self.team.pk,
+ properties={"email": "test@posthog.com"},
+ )
person_factory(distinct_ids=["person2"], team_id=self.team.pk)
event_factory(distinct_id="person1", event="event1", team=self.team)
event_factory(distinct_id="person2", event="event1", team=self.team)
@@ -551,8 +661,16 @@ def test_funnel_filter_test_accounts(self):
@also_test_with_materialized_columns(person_properties=["email"])
def test_funnel_with_entity_person_property_filters(self):
- person_factory(distinct_ids=["person1"], team_id=self.team.pk, properties={"email": "test@posthog.com"})
- person_factory(distinct_ids=["person2"], team_id=self.team.pk, properties={"email": "another@example.com"})
+ person_factory(
+ distinct_ids=["person1"],
+ team_id=self.team.pk,
+ properties={"email": "test@posthog.com"},
+ )
+ person_factory(
+ distinct_ids=["person2"],
+ team_id=self.team.pk,
+ properties={"email": "another@example.com"},
+ )
person_factory(distinct_ids=["person3"], team_id=self.team.pk)
event_factory(distinct_id="person1", event="event1", team=self.team)
event_factory(distinct_id="person2", event="event1", team=self.team)
@@ -566,7 +684,12 @@ def test_funnel_with_entity_person_property_filters(self):
"id": "event1",
"order": 0,
"properties": [
- {"key": "email", "value": "is_set", "operator": "is_set", "type": "person"}
+ {
+ "key": "email",
+ "value": "is_set",
+ "operator": "is_set",
+ "type": "person",
+ }
],
}
],
@@ -580,8 +703,16 @@ def test_funnel_with_entity_person_property_filters(self):
@also_test_with_materialized_columns(person_properties=["email"], verify_no_jsonextract=False)
def test_funnel_filter_by_action_with_person_properties(self):
- person_factory(distinct_ids=["person1"], team_id=self.team.pk, properties={"email": "test@posthog.com"})
- person_factory(distinct_ids=["person2"], team_id=self.team.pk, properties={"email": "another@example.com"})
+ person_factory(
+ distinct_ids=["person1"],
+ team_id=self.team.pk,
+ properties={"email": "test@posthog.com"},
+ )
+ person_factory(
+ distinct_ids=["person2"],
+ team_id=self.team.pk,
+ properties={"email": "another@example.com"},
+ )
person_factory(distinct_ids=["person3"], team_id=self.team.pk)
event_factory(distinct_id="person1", event="event1", team=self.team)
event_factory(distinct_id="person2", event="event1", team=self.team)
@@ -591,7 +722,14 @@ def test_funnel_filter_by_action_with_person_properties(self):
ActionStep.objects.create(
action=action,
event="event1",
- properties=[{"key": "email", "value": "is_set", "operator": "is_set", "type": "person"}],
+ properties=[
+ {
+ "key": "email",
+ "value": "is_set",
+ "operator": "is_set",
+ "type": "person",
+ }
+ ],
)
result = Funnel(
@@ -624,9 +762,17 @@ def test_basic_funnel_default_funnel_days(self):
# event
_create_person(distinct_ids=["user_1"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-02T14:00:00Z"
+ team=self.team,
+ event="user signed up",
+ distinct_id="user_1",
+ timestamp="2020-01-02T14:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="user_1",
+ timestamp="2020-01-10T14:00:00Z",
)
- _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z")
result = funnel.run()
@@ -653,11 +799,23 @@ def test_basic_funnel_with_repeat_steps(self):
person1_stopped_after_two_signups = _create_person(
distinct_ids=["stopped_after_signup1"], team_id=self.team.pk
)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup1",
+ )
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup1",
+ )
person2_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup2"], team_id=self.team.pk)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup2")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup2",
+ )
result = funnel.run()
self.assertEqual(result[0]["name"], "user signed up")
@@ -667,16 +825,27 @@ def test_basic_funnel_with_repeat_steps(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 1),
- [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid],
+ [
+ person1_stopped_after_two_signups.uuid,
+ person2_stopped_after_signup.uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2),
+ [person1_stopped_after_two_signups.uuid],
+ )
@also_test_with_materialized_columns(["key"])
def test_basic_funnel_with_derivative_steps(self):
filters = {
"events": [
- {"id": "user signed up", "type": "events", "order": 0, "properties": {"key": "val"}},
+ {
+ "id": "user signed up",
+ "type": "events",
+ "order": 0,
+ "properties": {"key": "val"},
+ },
{"id": "user signed up", "type": "events", "order": 1},
],
"insight": INSIGHT_FUNNELS,
@@ -691,13 +860,23 @@ def test_basic_funnel_with_derivative_steps(self):
distinct_ids=["stopped_after_signup1"], team_id=self.team.pk
)
_create_event(
- team=self.team, event="user signed up", distinct_id="stopped_after_signup1", properties={"key": "val"}
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup1",
+ properties={"key": "val"},
+ )
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup1",
)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1")
person2_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup2"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="stopped_after_signup2", properties={"key": "val"}
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup2",
+ properties={"key": "val"},
)
result = funnel.run()
@@ -708,15 +887,24 @@ def test_basic_funnel_with_derivative_steps(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 1),
- [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid],
+ [
+ person1_stopped_after_two_signups.uuid,
+ person2_stopped_after_signup.uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2),
+ [person1_stopped_after_two_signups.uuid],
+ )
def test_basic_funnel_with_repeat_step_updated_param(self):
people = journeys_for(
{
- "stopped_after_signup1": [{"event": "user signed up"}, {"event": "user signed up"}],
+ "stopped_after_signup1": [
+ {"event": "user signed up"},
+ {"event": "user signed up"},
+ ],
"stopped_after_signup2": [{"event": "user signed up"}],
},
self.team,
@@ -743,10 +931,16 @@ def test_basic_funnel_with_repeat_step_updated_param(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 1),
- [people["stopped_after_signup1"].uuid, people["stopped_after_signup2"].uuid],
+ [
+ people["stopped_after_signup1"].uuid,
+ people["stopped_after_signup2"].uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [people["stopped_after_signup1"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2),
+ [people["stopped_after_signup1"].uuid],
+ )
filters = {
"events": [
@@ -792,7 +986,12 @@ def test_funnel_exclusions_full_window(self):
"date_from": "2021-05-01 00:00:00",
"date_to": "2021-05-14 00:00:00",
"exclusions": [
- {"id": "x 1 name with numbers 2", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}
+ {
+ "id": "x 1 name with numbers 2",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ }
],
}
filter = Filter(data=filters)
@@ -801,26 +1000,53 @@ def test_funnel_exclusions_full_window(self):
# event 1
person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person1",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person1",
+ timestamp="2021-05-01 02:00:00",
)
- _create_event(team=self.team, event="paid", distinct_id="person1", timestamp="2021-05-01 02:00:00")
# event 2
_create_person(distinct_ids=["person2"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 03:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person2",
+ timestamp="2021-05-01 03:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x 1 name with numbers 2",
+ distinct_id="person2",
+ timestamp="2021-05-01 03:30:00",
)
_create_event(
- team=self.team, event="x 1 name with numbers 2", distinct_id="person2", timestamp="2021-05-01 03:30:00"
+ team=self.team,
+ event="paid",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:00:00",
)
- _create_event(team=self.team, event="paid", distinct_id="person2", timestamp="2021-05-01 04:00:00")
# event 3
person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person3",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="paid", distinct_id="person3", timestamp="2021-05-01 06:00:00")
result = funnel.run()
self.assertEqual(len(result), 2)
@@ -845,7 +1071,14 @@ def test_advanced_funnel_exclusions_between_steps(self):
"date_from": "2021-05-01 00:00:00",
"date_to": "2021-05-14 00:00:00",
"insight": INSIGHT_FUNNELS,
- "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}],
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ }
+ ],
}
person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk)
@@ -853,53 +1086,145 @@ def test_advanced_funnel_exclusions_between_steps(self):
# this dude is discarded when funnel_from_step = 2
# this dude is discarded when funnel_from_step = 3
_create_event(
- team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person1",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person1",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person1",
+ timestamp="2021-05-01 03:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person1",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person1",
+ timestamp="2021-05-01 04:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person1",
+ timestamp="2021-05-01 05:00:00",
)
- _create_event(team=self.team, event="$pageview", distinct_id="person1", timestamp="2021-05-01 02:00:00")
- _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 03:00:00")
_create_event(
- team=self.team, event="insight viewed", distinct_id="person1", timestamp="2021-05-01 04:00:00"
+ team=self.team,
+ event="x",
+ distinct_id="person1",
+ timestamp="2021-05-01 05:30:00",
)
- _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 04:30:00")
_create_event(
- team=self.team, event="invite teammate", distinct_id="person1", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="pageview2",
+ distinct_id="person1",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 05:30:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person1", timestamp="2021-05-01 06:00:00")
person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk)
# this dude is discarded when funnel_from_step = 2
# this dude is discarded when funnel_from_step = 3
_create_event(
- team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person2",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person2",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person2",
+ timestamp="2021-05-01 05:00:00",
)
- _create_event(team=self.team, event="$pageview", distinct_id="person2", timestamp="2021-05-01 02:00:00")
_create_event(
- team=self.team, event="insight viewed", distinct_id="person2", timestamp="2021-05-01 04:00:00"
+ team=self.team,
+ event="x",
+ distinct_id="person2",
+ timestamp="2021-05-01 05:30:00",
)
- _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 04:30:00")
_create_event(
- team=self.team, event="invite teammate", distinct_id="person2", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="pageview2",
+ distinct_id="person2",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 05:30:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person2", timestamp="2021-05-01 06:00:00")
person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk)
# this dude is discarded when funnel_from_step = 0
# this dude is discarded when funnel_from_step = 3
_create_event(
- team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person3",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person3",
+ timestamp="2021-05-01 01:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person3",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person3",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 01:30:00")
- _create_event(team=self.team, event="$pageview", distinct_id="person3", timestamp="2021-05-01 02:00:00")
_create_event(
- team=self.team, event="insight viewed", distinct_id="person3", timestamp="2021-05-01 04:00:00"
+ team=self.team,
+ event="x",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:30:00",
)
_create_event(
- team=self.team, event="invite teammate", distinct_id="person3", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="pageview2",
+ distinct_id="person3",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 05:30:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person3", timestamp="2021-05-01 06:00:00")
filter = Filter(data=filters)
funnel = Funnel(filter, self.team)
@@ -914,7 +1239,16 @@ def test_advanced_funnel_exclusions_between_steps(self):
self.assertCountEqual(self._get_actor_ids_at_step(filter, 1), [person1.uuid, person2.uuid])
filter = filter.shallow_clone(
- {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 2}]}
+ {
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 1,
+ "funnel_to_step": 2,
+ }
+ ]
+ }
)
funnel = Funnel(filter, self.team)
@@ -928,7 +1262,16 @@ def test_advanced_funnel_exclusions_between_steps(self):
self.assertCountEqual(self._get_actor_ids_at_step(filter, 1), [person2.uuid, person3.uuid])
filter = filter.shallow_clone(
- {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 2, "funnel_to_step": 3}]}
+ {
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 2,
+ "funnel_to_step": 3,
+ }
+ ]
+ }
)
funnel = Funnel(filter, self.team)
@@ -942,7 +1285,16 @@ def test_advanced_funnel_exclusions_between_steps(self):
self.assertCountEqual(self._get_actor_ids_at_step(filter, 1), [person3.uuid])
filter = filter.shallow_clone(
- {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 3, "funnel_to_step": 4}]}
+ {
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 3,
+ "funnel_to_step": 4,
+ }
+ ]
+ }
)
funnel = Funnel(filter, self.team)
@@ -957,7 +1309,16 @@ def test_advanced_funnel_exclusions_between_steps(self):
# bigger step window
filter = filter.shallow_clone(
- {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 3}]}
+ {
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 1,
+ "funnel_to_step": 3,
+ }
+ ]
+ }
)
funnel = Funnel(filter, self.team)
@@ -988,7 +1349,10 @@ def test_advanced_funnel_with_repeat_steps(self):
people = journeys_for(
{
"stopped_after_signup1": [{"event": "user signed up"}],
- "stopped_after_pageview1": [{"event": "user signed up"}, {"event": "$pageview"}],
+ "stopped_after_pageview1": [
+ {"event": "user signed up"},
+ {"event": "$pageview"},
+ ],
"stopped_after_pageview2": [
{"event": "user signed up"},
{"event": "$pageview"},
@@ -1062,11 +1426,17 @@ def test_advanced_funnel_with_repeat_steps(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 4),
- [people["stopped_after_pageview3"].uuid, people["stopped_after_pageview4"].uuid],
- )
-
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 5), [people["stopped_after_pageview4"].uuid])
-
+ [
+ people["stopped_after_pageview3"].uuid,
+ people["stopped_after_pageview4"].uuid,
+ ],
+ )
+
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 5),
+ [people["stopped_after_pageview4"].uuid],
+ )
+
def test_advanced_funnel_with_repeat_steps_out_of_order_events(self):
filters = {
"events": [
@@ -1088,45 +1458,85 @@ def test_advanced_funnel_with_repeat_steps_out_of_order_events(self):
distinct_ids=["random", "stopped_after_signup1"], team_id=self.team.pk
)
_create_event(team=self.team, event="$pageview", distinct_id="random")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup1",
+ )
person2_stopped_after_one_pageview = _create_person(
distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk
)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview1")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_pageview1",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview1")
person3_stopped_after_two_pageview = _create_person(
distinct_ids=["stopped_after_pageview2"], team_id=self.team.pk
)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview2")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview2")
- _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview2")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_pageview2",
+ )
+ _create_event(
+ team=self.team,
+ event="blaah blaa",
+ distinct_id="stopped_after_pageview2",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview2")
person4_stopped_after_three_pageview = _create_person(
distinct_ids=["stopped_after_pageview3"], team_id=self.team.pk
)
- _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview3")
+ _create_event(
+ team=self.team,
+ event="blaah blaa",
+ distinct_id="stopped_after_pageview3",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview3")
- _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview3")
+ _create_event(
+ team=self.team,
+ event="blaah blaa",
+ distinct_id="stopped_after_pageview3",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview3")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview3")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_pageview3",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview3")
person5_stopped_after_many_pageview = _create_person(
distinct_ids=["stopped_after_pageview4"], team_id=self.team.pk
)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview4")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_pageview4",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview4")
- _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview4")
+ _create_event(
+ team=self.team,
+ event="blaah blaa",
+ distinct_id="stopped_after_pageview4",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview4")
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview4")
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview4")
_create_person(distinct_ids=["stopped_after_pageview5"], team_id=self.team.pk)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview5")
- _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview5")
+ _create_event(
+ team=self.team,
+ event="blaah blaa",
+ distinct_id="stopped_after_pageview5",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview5")
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview5")
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview5")
@@ -1168,19 +1578,34 @@ def test_advanced_funnel_with_repeat_steps_out_of_order_events(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [person5_stopped_after_many_pageview.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 3),
+ [person5_stopped_after_many_pageview.uuid],
+ )
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 4), [person5_stopped_after_many_pageview.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 4),
+ [person5_stopped_after_many_pageview.uuid],
+ )
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 5), [person5_stopped_after_many_pageview.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 5),
+ [person5_stopped_after_many_pageview.uuid],
+ )
@also_test_with_materialized_columns(["key"])
def test_funnel_with_actions(self):
-
sign_up_action = _create_action(
name="sign up",
team=self.team,
- properties=[{"key": "key", "type": "event", "value": ["val"], "operator": "exact"}],
+ properties=[
+ {
+ "key": "key",
+ "type": "event",
+ "value": ["val"],
+ "operator": "exact",
+ }
+ ],
)
filters = {
@@ -1199,15 +1624,24 @@ def test_funnel_with_actions(self):
distinct_ids=["stopped_after_signup1"], team_id=self.team.pk
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup1",
+ properties={"key": "val"},
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup1",
+ properties={"key": "val"},
)
person2_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup2"], team_id=self.team.pk)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup2", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup2",
+ properties={"key": "val"},
)
result = funnel.run()
@@ -1220,17 +1654,29 @@ def test_funnel_with_actions(self):
# check ordering of people in first step
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 1),
- [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid],
+ [
+ person1_stopped_after_two_signups.uuid,
+ person2_stopped_after_signup.uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2),
+ [person1_stopped_after_two_signups.uuid],
+ )
def test_funnel_with_different_actions_at_same_time_count_as_converted(self):
-
sign_up_action = _create_action(
name="sign up",
team=self.team,
- properties=[{"key": "key", "type": "event", "value": ["val"], "operator": "exact"}],
+ properties=[
+ {
+ "key": "key",
+ "type": "event",
+ "value": ["val"],
+ "operator": "exact",
+ }
+ ],
)
filters = {
@@ -1250,17 +1696,26 @@ def test_funnel_with_different_actions_at_same_time_count_as_converted(self):
distinct_ids=["stopped_after_signup1"], team_id=self.team.pk
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup1",
+ properties={"key": "val"},
)
_create_event(
- team=self.team, event="$pageview", distinct_id="stopped_after_signup1", properties={"key": "val"}
+ team=self.team,
+ event="$pageview",
+ distinct_id="stopped_after_signup1",
+ properties={"key": "val"},
)
person2_stopped_after_signup = _create_person(
distinct_ids=["stopped_after_signup2"], team_id=self.team.pk
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup2", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup2",
+ properties={"key": "val"},
)
result = funnel.run()
@@ -1273,16 +1728,29 @@ def test_funnel_with_different_actions_at_same_time_count_as_converted(self):
# check ordering of people in first step
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 1),
- [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid],
+ [
+ person1_stopped_after_two_signups.uuid,
+ person2_stopped_after_signup.uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2),
+ [person1_stopped_after_two_signups.uuid],
+ )
def test_funnel_with_actions_and_props(self):
sign_up_action = _create_action(
name="sign up",
team=self.team,
- properties=[{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}],
+ properties=[
+ {
+ "key": "email",
+ "operator": "icontains",
+ "value": ".com",
+ "type": "person",
+ }
+ ],
)
filters = {
@@ -1298,20 +1766,33 @@ def test_funnel_with_actions_and_props(self):
# event
person1_stopped_after_two_signups = _create_person(
- distinct_ids=["stopped_after_signup1"], team_id=self.team.pk, properties={"email": "fake@test.com"}
+ distinct_ids=["stopped_after_signup1"],
+ team_id=self.team.pk,
+ properties={"email": "fake@test.com"},
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup1",
+ properties={"key": "val"},
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup1",
+ properties={"key": "val"},
)
person2_stopped_after_signup = _create_person(
- distinct_ids=["stopped_after_signup2"], team_id=self.team.pk, properties={"email": "fake@test.com"}
+ distinct_ids=["stopped_after_signup2"],
+ team_id=self.team.pk,
+ properties={"email": "fake@test.com"},
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup2", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup2",
+ properties={"key": "val"},
)
result = funnel.run()
@@ -1324,13 +1805,18 @@ def test_funnel_with_actions_and_props(self):
# check ordering of people in first step
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 1),
- [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid],
+ [
+ person1_stopped_after_two_signups.uuid,
+ person2_stopped_after_signup.uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2),
+ [person1_stopped_after_two_signups.uuid],
+ )
def test_funnel_with_actions_and_props_with_zero_person_ids(self):
-
# only a person-on-event test
if not get_instance_setting("PERSON_ON_EVENTS_ENABLED"):
return True
@@ -1338,7 +1824,14 @@ def test_funnel_with_actions_and_props_with_zero_person_ids(self):
sign_up_action = _create_action(
name="sign up",
team=self.team,
- properties=[{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}],
+ properties=[
+ {
+ "key": "email",
+ "operator": "icontains",
+ "value": ".com",
+ "type": "person",
+ }
+ ],
)
filters = {
@@ -1354,20 +1847,33 @@ def test_funnel_with_actions_and_props_with_zero_person_ids(self):
# event
person1_stopped_after_two_signups = _create_person(
- distinct_ids=["stopped_after_signup1"], team_id=self.team.pk, properties={"email": "fake@test.com"}
+ distinct_ids=["stopped_after_signup1"],
+ team_id=self.team.pk,
+ properties={"email": "fake@test.com"},
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup1",
+ properties={"key": "val"},
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup1",
+ properties={"key": "val"},
)
person2_stopped_after_signup = _create_person(
- distinct_ids=["stopped_after_signup2"], team_id=self.team.pk, properties={"email": "fake@test.com"}
+ distinct_ids=["stopped_after_signup2"],
+ team_id=self.team.pk,
+ properties={"email": "fake@test.com"},
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_signup2", properties={"key": "val"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_signup2",
+ properties={"key": "val"},
)
_create_event(
@@ -1395,19 +1901,31 @@ def test_funnel_with_actions_and_props_with_zero_person_ids(self):
# check ordering of people in first step
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 1),
- [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid],
+ [
+ person1_stopped_after_two_signups.uuid,
+ person2_stopped_after_signup.uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 2),
+ [person1_stopped_after_two_signups.uuid],
+ )
@also_test_with_materialized_columns(["key"])
@skip("Flaky funnel test")
def test_funnel_with_actions_and_events(self):
-
sign_up_action = _create_action(
name="sign up",
team=self.team,
- properties=[{"key": "key", "type": "event", "value": ["val"], "operator": "exact"}],
+ properties=[
+ {
+ "key": "key",
+ "type": "event",
+ "value": ["val"],
+ "operator": "exact",
+ }
+ ],
)
filters = {
@@ -1479,7 +1997,10 @@ def test_funnel_with_actions_and_events(self):
person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 00:00:07"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person3",
+ timestamp="2021-05-01 00:00:07",
)
_create_event(
team=self.team,
@@ -1489,7 +2010,10 @@ def test_funnel_with_actions_and_events(self):
timestamp="2021-05-01 00:00:08",
)
_create_event(
- team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 00:00:09"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person3",
+ timestamp="2021-05-01 00:00:09",
)
_create_event(
team=self.team,
@@ -1501,7 +2025,10 @@ def test_funnel_with_actions_and_events(self):
person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 00:00:11"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person4",
+ timestamp="2021-05-01 00:00:11",
)
_create_event(
team=self.team,
@@ -1511,7 +2038,10 @@ def test_funnel_with_actions_and_events(self):
timestamp="2021-05-01 00:00:12",
)
_create_event(
- team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 00:00:13"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person4",
+ timestamp="2021-05-01 00:00:13",
)
_create_person(distinct_ids=["person5"], team_id=self.team.pk)
@@ -1535,33 +2065,58 @@ def test_funnel_with_actions_and_events(self):
# check ordering of people in steps
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 1),
- [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid, person3.uuid, person4.uuid],
+ [
+ person1_stopped_after_two_signups.uuid,
+ person2_stopped_after_signup.uuid,
+ person3.uuid,
+ person4.uuid,
+ ],
)
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 2),
- [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid, person3.uuid, person4.uuid],
+ [
+ person1_stopped_after_two_signups.uuid,
+ person2_stopped_after_signup.uuid,
+ person3.uuid,
+ person4.uuid,
+ ],
)
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 3),
- [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid, person3.uuid],
+ [
+ person1_stopped_after_two_signups.uuid,
+ person2_stopped_after_signup.uuid,
+ person3.uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 4), [person1_stopped_after_two_signups.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 4),
+ [person1_stopped_after_two_signups.uuid],
+ )
@also_test_with_materialized_columns(["$current_url"])
def test_funnel_with_matching_properties(self):
filters = {
"events": [
{"id": "user signed up", "order": 0},
- {"id": "$pageview", "order": 1, "properties": {"$current_url": "aloha.com"}},
+ {
+ "id": "$pageview",
+ "order": 1,
+ "properties": {"$current_url": "aloha.com"},
+ },
{
"id": "$pageview",
"order": 2,
"properties": {"$current_url": "aloha2.com"},
}, # different event to above
- {"id": "$pageview", "order": 3, "properties": {"$current_url": "aloha2.com"}},
+ {
+ "id": "$pageview",
+ "order": 3,
+ "properties": {"$current_url": "aloha2.com"},
+ },
{"id": "$pageview", "order": 4},
],
"insight": INSIGHT_FUNNELS,
@@ -1577,28 +2132,61 @@ def test_funnel_with_matching_properties(self):
"stopped_after_signup1": [{"event": "user signed up"}],
"stopped_after_pageview1": [
{"event": "user signed up"},
- {"event": "$pageview", "properties": {"$current_url": "aloha.com"}},
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha.com"},
+ },
],
"stopped_after_pageview2": [
{"event": "user signed up"},
- {"event": "$pageview", "properties": {"$current_url": "aloha.com"}},
- {"event": "blaah blaa", "properties": {"$current_url": "aloha.com"}},
- {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}},
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha.com"},
+ },
+ {
+ "event": "blaah blaa",
+ "properties": {"$current_url": "aloha.com"},
+ },
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha2.com"},
+ },
],
"stopped_after_pageview3": [
{"event": "user signed up"},
- {"event": "$pageview", "properties": {"$current_url": "aloha.com"}},
- {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}},
- {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}},
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha.com"},
+ },
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha2.com"},
+ },
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha2.com"},
+ },
{"event": "blaah blaa"},
],
"stopped_after_pageview4": [
{"event": "user signed up"},
- {"event": "$pageview", "properties": {"$current_url": "aloha.com"}},
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha.com"},
+ },
{"event": "blaah blaa"},
- {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}},
- {"event": "$pageview", "properties": {"$current_url": "aloha.com"}},
- {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}},
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha2.com"},
+ },
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha.com"},
+ },
+ {
+ "event": "$pageview",
+ "properties": {"$current_url": "aloha2.com"},
+ },
],
},
self.team,
@@ -1647,7 +2235,10 @@ def test_funnel_with_matching_properties(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 4),
- [people["stopped_after_pageview3"].uuid, people["stopped_after_pageview4"].uuid],
+ [
+ people["stopped_after_pageview3"].uuid,
+ people["stopped_after_pageview4"].uuid,
+ ],
)
self.assertCountEqual(self._get_actor_ids_at_step(filter, 5), [])
@@ -1658,19 +2249,31 @@ def test_funnel_conversion_window(self):
person = _create_person(distinct_ids=[f"user_{i}"], team=self.team)
ids_to_compare.append(str(person.uuid))
_create_event(
- event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00"
+ event="step one",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-01 00:00:00",
)
_create_event(
- event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-02 00:00:00"
+ event="step two",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-02 00:00:00",
)
for i in range(10, 25):
_create_person(distinct_ids=[f"user_{i}"], team=self.team)
_create_event(
- event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00"
+ event="step one",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-01 00:00:00",
)
_create_event(
- event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-10 00:00:00"
+ event="step two",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-10 00:00:00",
)
data = {
@@ -1693,7 +2296,10 @@ def test_funnel_conversion_window(self):
self.assertEqual(results[1]["count"], 10)
self.assertEqual(results[2]["count"], 0)
- self.assertCountEqual([str(id) for id in self._get_actor_ids_at_step(filter, 2)], ids_to_compare)
+ self.assertCountEqual(
+ [str(id) for id in self._get_actor_ids_at_step(filter, 2)],
+ ids_to_compare,
+ )
@snapshot_clickhouse_queries
def test_funnel_conversion_window_seconds(self):
@@ -1702,19 +2308,31 @@ def test_funnel_conversion_window_seconds(self):
person = _create_person(distinct_ids=[f"user_{i}"], team=self.team)
ids_to_compare.append(str(person.uuid))
_create_event(
- event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00"
+ event="step one",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-01 00:00:00",
)
_create_event(
- event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:10"
+ event="step two",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-01 00:00:10",
)
for i in range(10, 25):
_create_person(distinct_ids=[f"user_{i}"], team=self.team)
_create_event(
- event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00"
+ event="step one",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-01 00:00:00",
)
_create_event(
- event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:20"
+ event="step two",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-01 00:00:20",
)
data = {
@@ -1737,7 +2355,10 @@ def test_funnel_conversion_window_seconds(self):
self.assertEqual(results[1]["count"], 10)
self.assertEqual(results[2]["count"], 0)
- self.assertCountEqual([str(id) for id in self._get_actor_ids_at_step(filter, 2)], ids_to_compare)
+ self.assertCountEqual(
+ [str(id) for id in self._get_actor_ids_at_step(filter, 2)],
+ ids_to_compare,
+ )
def test_funnel_exclusions_invalid_params(self):
filters = {
@@ -1749,23 +2370,57 @@ def test_funnel_exclusions_invalid_params(self):
"funnel_window_days": 14,
"date_from": "2021-05-01 00:00:00",
"date_to": "2021-05-14 00:00:00",
- "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 1}],
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 1,
+ "funnel_to_step": 1,
+ }
+ ],
}
filter = Filter(data=filters)
self.assertRaises(ValidationError, lambda: Funnel(filter, self.team))
filter = filter.shallow_clone(
- {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 2}]}
+ {
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 1,
+ "funnel_to_step": 2,
+ }
+ ]
+ }
)
self.assertRaises(ValidationError, lambda: Funnel(filter, self.team))
filter = filter.shallow_clone(
- {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 2, "funnel_to_step": 1}]}
+ {
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 2,
+ "funnel_to_step": 1,
+ }
+ ]
+ }
)
self.assertRaises(ValidationError, lambda: Funnel(filter, self.team))
filter = filter.shallow_clone(
- {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 2}]}
+ {
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 2,
+ }
+ ]
+ }
)
self.assertRaises(ValidationError, lambda: Funnel(filter, self.team))
@@ -1779,7 +2434,14 @@ def test_funnel_exclusion_no_end_event(self):
"funnel_window_days": 1,
"date_from": "2021-05-01 00:00:00",
"date_to": "2021-05-14 00:00:00",
- "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}],
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ }
+ ],
}
filter = Filter(data=filters)
funnel = Funnel(filter, self.team)
@@ -1787,32 +2449,69 @@ def test_funnel_exclusion_no_end_event(self):
# event 1
person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person1",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person1",
+ timestamp="2021-05-01 02:00:00",
)
- _create_event(team=self.team, event="paid", distinct_id="person1", timestamp="2021-05-01 02:00:00")
# event 2
_create_person(distinct_ids=["person2"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 03:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person2",
+ timestamp="2021-05-01 03:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person2",
+ timestamp="2021-05-01 03:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:00:00",
+ )
+
+ # event 3
+ _create_person(distinct_ids=["person3"], team_id=self.team.pk)
+ # should be discarded, even if nothing happened after x, since within conversion window
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 03:30:00")
- _create_event(team=self.team, event="paid", distinct_id="person2", timestamp="2021-05-01 04:00:00")
-
- # event 3
- _create_person(distinct_ids=["person3"], team_id=self.team.pk)
- # should be discarded, even if nothing happened after x, since within conversion window
_create_event(
- team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="x",
+ distinct_id="person3",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 06:00:00")
# event 4 - outside conversion window
person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 07:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person4",
+ timestamp="2021-05-01 07:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person4",
+ timestamp="2021-05-02 08:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person4", timestamp="2021-05-02 08:00:00")
result = funnel.run()
self.assertEqual(len(result), 2)
@@ -1827,11 +2526,17 @@ def test_funnel_exclusion_no_end_event(self):
@also_test_with_materialized_columns(["key"])
def test_funnel_exclusions_with_actions(self):
-
sign_up_action = _create_action(
name="sign up",
team=self.team,
- properties=[{"key": "key", "type": "event", "value": ["val"], "operator": "exact"}],
+ properties=[
+ {
+ "key": "key",
+ "type": "event",
+ "value": ["val"],
+ "operator": "exact",
+ }
+ ],
)
filters = {
@@ -1844,7 +2549,12 @@ def test_funnel_exclusions_with_actions(self):
"date_from": "2021-05-01 00:00:00",
"date_to": "2021-05-14 00:00:00",
"exclusions": [
- {"id": sign_up_action.id, "type": "actions", "funnel_from_step": 0, "funnel_to_step": 1}
+ {
+ "id": sign_up_action.id,
+ "type": "actions",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ }
],
}
filter = Filter(data=filters)
@@ -1853,14 +2563,25 @@ def test_funnel_exclusions_with_actions(self):
# event 1
person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person1",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person1",
+ timestamp="2021-05-01 02:00:00",
)
- _create_event(team=self.team, event="paid", distinct_id="person1", timestamp="2021-05-01 02:00:00")
# event 2
_create_person(distinct_ids=["person2"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 03:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person2",
+ timestamp="2021-05-01 03:00:00",
)
_create_event(
team=self.team,
@@ -1869,14 +2590,27 @@ def test_funnel_exclusions_with_actions(self):
properties={"key": "val"},
timestamp="2021-05-01 03:30:00",
)
- _create_event(team=self.team, event="paid", distinct_id="person2", timestamp="2021-05-01 04:00:00")
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:00:00",
+ )
# event 3
person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person3",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="paid", distinct_id="person3", timestamp="2021-05-01 06:00:00")
result = funnel.run()
self.assertEqual(len(result), 2)
@@ -1919,7 +2653,12 @@ def test_funnel_with_denormalised_properties(self):
timestamp="2020-01-02T14:00:00Z",
properties={"test_prop": "hi"},
)
- _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z")
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="user_1",
+ timestamp="2020-01-10T14:00:00Z",
+ )
result = funnel.run()
@@ -1939,69 +2678,190 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self):
"date_to": "2021-05-14 00:00:00",
"insight": INSIGHT_FUNNELS,
"exclusions": [
- {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1},
- {"id": "y", "type": "events", "funnel_from_step": 2, "funnel_to_step": 3},
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ },
+ {
+ "id": "y",
+ "type": "events",
+ "funnel_from_step": 2,
+ "funnel_to_step": 3,
+ },
],
}
_create_person(distinct_ids=["person1"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person1",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person1",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person1",
+ timestamp="2021-05-01 03:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person1",
+ timestamp="2021-05-01 04:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 02:00:00")
- _create_event(team=self.team, event="$pageview", distinct_id="person1", timestamp="2021-05-01 03:00:00")
_create_event(
- team=self.team, event="insight viewed", distinct_id="person1", timestamp="2021-05-01 04:00:00"
+ team=self.team,
+ event="y",
+ distinct_id="person1",
+ timestamp="2021-05-01 04:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person1",
+ timestamp="2021-05-01 05:00:00",
)
- _create_event(team=self.team, event="y", distinct_id="person1", timestamp="2021-05-01 04:30:00")
_create_event(
- team=self.team, event="invite teammate", distinct_id="person1", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="pageview2",
+ distinct_id="person1",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="pageview2", distinct_id="person1", timestamp="2021-05-01 06:00:00")
_create_person(distinct_ids=["person2"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person2",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="y",
+ distinct_id="person2",
+ timestamp="2021-05-01 01:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person2",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="y",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person2",
+ timestamp="2021-05-01 05:00:00",
)
- _create_event(team=self.team, event="y", distinct_id="person2", timestamp="2021-05-01 01:30:00")
- _create_event(team=self.team, event="$pageview", distinct_id="person2", timestamp="2021-05-01 02:00:00")
_create_event(
- team=self.team, event="insight viewed", distinct_id="person2", timestamp="2021-05-01 04:00:00"
+ team=self.team,
+ event="x",
+ distinct_id="person2",
+ timestamp="2021-05-01 05:30:00",
)
- _create_event(team=self.team, event="y", distinct_id="person2", timestamp="2021-05-01 04:30:00")
_create_event(
- team=self.team, event="invite teammate", distinct_id="person2", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="pageview2",
+ distinct_id="person2",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 05:30:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person2", timestamp="2021-05-01 06:00:00")
_create_person(distinct_ids=["person3"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person3",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person3",
+ timestamp="2021-05-01 01:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person3",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person3",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 01:30:00")
- _create_event(team=self.team, event="$pageview", distinct_id="person3", timestamp="2021-05-01 02:00:00")
_create_event(
- team=self.team, event="insight viewed", distinct_id="person3", timestamp="2021-05-01 04:00:00"
+ team=self.team,
+ event="x",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:30:00",
)
_create_event(
- team=self.team, event="invite teammate", distinct_id="person3", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="pageview2",
+ distinct_id="person3",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 05:30:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person3", timestamp="2021-05-01 06:00:00")
person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 01:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="person4",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person4",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person4",
+ timestamp="2021-05-01 04:00:00",
)
- _create_event(team=self.team, event="$pageview", distinct_id="person4", timestamp="2021-05-01 02:00:00")
_create_event(
- team=self.team, event="insight viewed", distinct_id="person4", timestamp="2021-05-01 04:00:00"
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person4",
+ timestamp="2021-05-01 05:00:00",
)
_create_event(
- team=self.team, event="invite teammate", distinct_id="person4", timestamp="2021-05-01 05:00:00"
+ team=self.team,
+ event="pageview2",
+ distinct_id="person4",
+ timestamp="2021-05-01 06:00:00",
)
- _create_event(team=self.team, event="pageview2", distinct_id="person4", timestamp="2021-05-01 06:00:00")
filter = Filter(data=filters)
funnel = Funnel(filter, self.team)
@@ -2018,8 +2878,18 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self):
filter = filter.shallow_clone(
{
"exclusions": [
- {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1},
- {"id": "y", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1},
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ },
+ {
+ "id": "y",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ },
]
}
)
@@ -2037,8 +2907,18 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self):
filter = filter.shallow_clone(
{
"exclusions": [
- {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1},
- {"id": "y", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1},
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ },
+ {
+ "id": "y",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ },
]
}
)
@@ -2056,8 +2936,18 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self):
filter = filter.shallow_clone(
{
"exclusions": [
- {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 4},
- {"id": "y", "type": "events", "funnel_from_step": 0, "funnel_to_step": 4},
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 4,
+ },
+ {
+ "id": "y",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 4,
+ },
]
}
)
@@ -2095,7 +2985,12 @@ def test_funnel_with_elements_chain(self):
"name": "$autocapture",
"order": 1,
"properties": [
- {"key": "tag_name", "value": [tag_name], "operator": "exact", "type": "element"}
+ {
+ "key": "tag_name",
+ "value": [tag_name],
+ "operator": "exact",
+ "type": "element",
+ }
],
"type": "events",
},
@@ -2133,22 +3028,52 @@ def test_breakdown_values_is_set_on_the_query_with_fewer_than_two_entities(self)
@snapshot_clickhouse_queries
def test_funnel_with_cohorts_step_filter(self):
-
- _create_person(distinct_ids=["user_1"], team_id=self.team.pk, properties={"email": "n@test.com"})
+ _create_person(
+ distinct_ids=["user_1"],
+ team_id=self.team.pk,
+ properties={"email": "n@test.com"},
+ )
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="user_1",
+ timestamp="2020-01-02T14:00:00Z",
+ )
_create_event(
- team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-02T14:00:00Z"
+ team=self.team,
+ event="paid",
+ distinct_id="user_1",
+ timestamp="2020-01-10T14:00:00Z",
)
- _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z")
_create_person(distinct_ids=["user_2"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="user_2", timestamp="2020-01-02T14:00:00Z"
+ team=self.team,
+ event="user signed up",
+ distinct_id="user_2",
+ timestamp="2020-01-02T14:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="user_2",
+ timestamp="2020-01-10T14:00:00Z",
)
- _create_event(team=self.team, event="paid", distinct_id="user_2", timestamp="2020-01-10T14:00:00Z")
cohort = Cohort.objects.create(
team=self.team,
- groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "email",
+ "operator": "icontains",
+ "value": ".com",
+ "type": "person",
+ }
+ ]
+ }
+ ],
)
filters = {
@@ -2176,22 +3101,52 @@ def test_funnel_with_cohorts_step_filter(self):
@snapshot_clickhouse_queries
def test_funnel_with_precalculated_cohort_step_filter(self):
-
- _create_person(distinct_ids=["user_1"], team_id=self.team.pk, properties={"email": "n@test.com"})
+ _create_person(
+ distinct_ids=["user_1"],
+ team_id=self.team.pk,
+ properties={"email": "n@test.com"},
+ )
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="user_1",
+ timestamp="2020-01-02T14:00:00Z",
+ )
_create_event(
- team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-02T14:00:00Z"
+ team=self.team,
+ event="paid",
+ distinct_id="user_1",
+ timestamp="2020-01-10T14:00:00Z",
)
- _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z")
_create_person(distinct_ids=["user_2"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="user_2", timestamp="2020-01-02T14:00:00Z"
+ team=self.team,
+ event="user signed up",
+ distinct_id="user_2",
+ timestamp="2020-01-02T14:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="user_2",
+ timestamp="2020-01-10T14:00:00Z",
)
- _create_event(team=self.team, event="paid", distinct_id="user_2", timestamp="2020-01-10T14:00:00Z")
cohort = Cohort.objects.create(
team=self.team,
- groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "email",
+ "operator": "icontains",
+ "value": ".com",
+ "type": "person",
+ }
+ ]
+ }
+ ],
)
filters = {
@@ -2200,7 +3155,13 @@ def test_funnel_with_precalculated_cohort_step_filter(self):
"id": "user signed up",
"type": "events",
"order": 0,
- "properties": [{"type": "precalculated-cohort", "key": "id", "value": cohort.pk}],
+ "properties": [
+ {
+ "type": "precalculated-cohort",
+ "key": "id",
+ "value": cohort.pk,
+ }
+ ],
},
{"id": "paid", "type": "events", "order": 1},
],
@@ -2222,18 +3183,37 @@ def test_funnel_with_precalculated_cohort_step_filter(self):
@snapshot_clickhouse_queries
def test_funnel_with_static_cohort_step_filter(self):
-
- _create_person(distinct_ids=["user_1"], team_id=self.team.pk, properties={"email": "n@test.com"})
+ _create_person(
+ distinct_ids=["user_1"],
+ team_id=self.team.pk,
+ properties={"email": "n@test.com"},
+ )
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="user_1",
+ timestamp="2020-01-02T14:00:00Z",
+ )
_create_event(
- team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-02T14:00:00Z"
+ team=self.team,
+ event="paid",
+ distinct_id="user_1",
+ timestamp="2020-01-10T14:00:00Z",
)
- _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z")
_create_person(distinct_ids=["user_2"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="user_2", timestamp="2020-01-02T14:00:00Z"
+ team=self.team,
+ event="user signed up",
+ distinct_id="user_2",
+ timestamp="2020-01-02T14:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="user_2",
+ timestamp="2020-01-10T14:00:00Z",
)
- _create_event(team=self.team, event="paid", distinct_id="user_2", timestamp="2020-01-10T14:00:00Z")
cohort = Cohort.objects.create(team=self.team, groups=[], is_static=True)
cohort.insert_users_by_list(["user_2", "rando"])
@@ -2269,7 +3249,11 @@ def test_funnel_with_property_groups(self):
"date_to": "2020-07-01 00:00:00",
"events": [
{"id": "user signed up", "order": 0},
- {"id": "$pageview", "order": 1, "properties": {"$current_url": "aloha.com"}},
+ {
+ "id": "$pageview",
+ "order": 1,
+ "properties": {"$current_url": "aloha.com"},
+ },
{
"id": "$pageview",
"order": 2,
@@ -2284,15 +3268,35 @@ def test_funnel_with_property_groups(self):
{
"type": "AND",
"values": [
- {"key": "email", "operator": "icontains", "value": ".com", "type": "person"},
- {"key": "age", "operator": "exact", "value": "20", "type": "person"},
+ {
+ "key": "email",
+ "operator": "icontains",
+ "value": ".com",
+ "type": "person",
+ },
+ {
+ "key": "age",
+ "operator": "exact",
+ "value": "20",
+ "type": "person",
+ },
],
},
{
"type": "OR",
"values": [
- {"key": "email", "operator": "icontains", "value": ".org", "type": "person"},
- {"key": "age", "operator": "exact", "value": "28", "type": "person"},
+ {
+ "key": "email",
+ "operator": "icontains",
+ "value": ".org",
+ "type": "person",
+ },
+ {
+ "key": "age",
+ "operator": "exact",
+ "value": "28",
+ "type": "person",
+ },
],
},
],
@@ -2332,10 +3336,23 @@ def test_funnel_with_property_groups(self):
# event
journeys_for(
{
- "stopped_after_signup1": [{"event": "user signed up", "timestamp": datetime(2020, 5, 1, 0)}],
- "stopped_after_pageview1": [{"event": "user signed up", "timestamp": datetime(2020, 5, 1, 0)}],
+ "stopped_after_signup1": [
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2020, 5, 1, 0),
+ }
+ ],
+ "stopped_after_pageview1": [
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2020, 5, 1, 0),
+ }
+ ],
"stopped_after_pageview2": [
- {"event": "user signed up", "timestamp": datetime(2020, 5, 1, 0)},
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2020, 5, 1, 0),
+ },
{
"event": "$pageview",
"properties": {"$current_url": "aloha.com"},
@@ -2343,7 +3360,10 @@ def test_funnel_with_property_groups(self):
},
],
"stopped_after_pageview3": [
- {"event": "user signed up", "timestamp": datetime(2020, 5, 1, 0)},
+ {
+ "event": "user signed up",
+ "timestamp": datetime(2020, 5, 1, 0),
+ },
{
"event": "$pageview",
"properties": {"$current_url": "aloha.com"},
@@ -2398,10 +3418,16 @@ def test_funnel_with_property_groups(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 2),
- [people["stopped_after_pageview2"].uuid, people["stopped_after_pageview3"].uuid],
+ [
+ people["stopped_after_pageview2"].uuid,
+ people["stopped_after_pageview3"].uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [people["stopped_after_pageview3"].uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 3),
+ [people["stopped_after_pageview3"].uuid],
+ )
@snapshot_clickhouse_queries
def test_timezones(self):
@@ -2425,7 +3451,10 @@ def test_timezones(self):
_create_person(distinct_ids=["user_1"], team_id=self.team.pk)
# this event shouldn't appear as in US/Pacific this would be the previous day
_create_event(
- team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-01T01:00:00Z"
+ team=self.team,
+ event="user signed up",
+ distinct_id="user_1",
+ timestamp="2020-01-01T01:00:00Z",
)
result = funnel.run()
@@ -2434,7 +3463,12 @@ def test_timezones(self):
def test_funnel_with_sampling(self):
action_play_movie = Action.objects.create(team=self.team, name="watched movie")
- ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie")
+ ActionStep.objects.create(
+ action=action_play_movie,
+ event="$autocapture",
+ tag_name="a",
+ href="/movie",
+ )
funnel = self._basic_funnel(
filters={
@@ -2453,7 +3487,10 @@ def test_funnel_with_sampling(self):
self._signup_event(distinct_id="stopped_after_pay")
self._movie_event(distinct_id="completed_movie")
- person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk)
+ person_factory(
+ distinct_ids=["had_anonymous_id", "completed_movie"],
+ team_id=self.team.pk,
+ )
self._signup_event(distinct_id="had_anonymous_id")
self._movie_event(distinct_id="completed_movie")
@@ -2511,7 +3548,10 @@ def test_hogql_aggregation(self):
# properties.$session_id
result = self._basic_funnel(
- filters={**basic_filters, "funnel_aggregate_by_hogql": "properties.$session_id"}
+ filters={
+ **basic_filters,
+ "funnel_aggregate_by_hogql": "properties.$session_id",
+ }
).run()
self.assertEqual(result[0]["count"], 3)
self.assertEqual(result[1]["count"], 2)
@@ -2544,14 +3584,27 @@ def test_funnel_all_events_with_properties(self):
filters = {
"events": [
- {"type": "events", "id": "user signed up", "order": 0, "name": "user signed up", "math": "total"},
+ {
+ "type": "events",
+ "id": "user signed up",
+ "order": 0,
+ "name": "user signed up",
+ "math": "total",
+ },
{
"type": "events",
"id": None,
"order": 1,
"name": "All events",
"math": "total",
- "properties": [{"key": "is_saved", "value": ["true"], "operator": "exact", "type": "event"}],
+ "properties": [
+ {
+ "key": "is_saved",
+ "value": ["true"],
+ "operator": "exact",
+ "type": "event",
+ }
+ ],
},
],
"funnel_window_days": 14,
diff --git a/posthog/queries/funnels/test/test_funnel_persons.py b/posthog/queries/funnels/test/test_funnel_persons.py
index 87517e2bd1bad..46881af6d5b1d 100644
--- a/posthog/queries/funnels/test/test_funnel_persons.py
+++ b/posthog/queries/funnels/test/test_funnel_persons.py
@@ -9,7 +9,9 @@
from posthog.models.event.util import bulk_create_events
from posthog.models.person.util import bulk_create_persons
from posthog.queries.funnels.funnel_persons import ClickhouseFunnelActors
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
@@ -33,10 +35,20 @@ def _create_sample_data_multiple_dropoffs(self):
events = []
for i in range(5):
events.append(
- {"event": "step one", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-01 00:00:00"}
+ {
+ "event": "step one",
+ "distinct_id": f"user_{i}",
+ "team": self.team,
+ "timestamp": "2021-05-01 00:00:00",
+ }
)
events.append(
- {"event": "step two", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-03 00:00:00"}
+ {
+ "event": "step two",
+ "distinct_id": f"user_{i}",
+ "team": self.team,
+ "timestamp": "2021-05-03 00:00:00",
+ }
)
events.append(
{
@@ -49,21 +61,44 @@ def _create_sample_data_multiple_dropoffs(self):
for i in range(5, 15):
events.append(
- {"event": "step one", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-01 00:00:00"}
+ {
+ "event": "step one",
+ "distinct_id": f"user_{i}",
+ "team": self.team,
+ "timestamp": "2021-05-01 00:00:00",
+ }
)
events.append(
- {"event": "step two", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-03 00:00:00"}
+ {
+ "event": "step two",
+ "distinct_id": f"user_{i}",
+ "team": self.team,
+ "timestamp": "2021-05-03 00:00:00",
+ }
)
for i in range(15, 35):
events.append(
- {"event": "step one", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-01 00:00:00"}
+ {
+ "event": "step one",
+ "distinct_id": f"user_{i}",
+ "team": self.team,
+ "timestamp": "2021-05-01 00:00:00",
+ }
)
bulk_create_events(events)
def _create_browser_breakdown_events(self):
- person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk, properties={"$country": "PL"})
- person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk, properties={"$country": "EE"})
+ person1 = _create_person(
+ distinct_ids=["person1"],
+ team_id=self.team.pk,
+ properties={"$country": "PL"},
+ )
+ person2 = _create_person(
+ distinct_ids=["person2"],
+ team_id=self.team.pk,
+ properties={"$country": "EE"},
+ )
journeys_for(
{
"person1": [
@@ -181,9 +216,24 @@ def test_last_step_dropoff(self):
def _create_sample_data(self):
for i in range(110):
_create_person(distinct_ids=[f"user_{i}"], team=self.team)
- _create_event(event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00")
- _create_event(event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-03 00:00:00")
- _create_event(event="step three", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-05 00:00:00")
+ _create_event(
+ event="step one",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-01 00:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-03 00:00:00",
+ )
+ _create_event(
+ event="step three",
+ distinct_id=f"user_{i}",
+ team=self.team,
+ timestamp="2021-05-05 00:00:00",
+ )
def test_basic_offset(self):
self._create_sample_data()
@@ -306,7 +356,11 @@ def test_first_step_breakdowns(self):
"interval": "day",
"funnel_window_days": 7,
"funnel_step": 1,
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"breakdown_type": "event",
"breakdown": "$browser",
}
@@ -337,7 +391,11 @@ def test_first_step_breakdowns_with_multi_property_breakdown(self):
"interval": "day",
"funnel_window_days": 7,
"funnel_step": 1,
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"breakdown_type": "event",
"breakdown": ["$browser", "$browser_version"],
}
@@ -368,7 +426,11 @@ def test_first_step_breakdown_person(self):
"interval": "day",
"funnel_window_days": 7,
"funnel_step": 1,
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"breakdown_type": "person",
"breakdown": "$country",
}
@@ -384,7 +446,8 @@ def test_first_step_breakdown_person(self):
# Check custom_steps give same answers for breakdowns
_, custom_step_results, _ = ClickhouseFunnelActors(
- filter.shallow_clone({"funnel_step_breakdown": "EE", "funnel_custom_steps": [1, 2, 3]}), self.team
+ filter.shallow_clone({"funnel_step_breakdown": "EE", "funnel_custom_steps": [1, 2, 3]}),
+ self.team,
).get_actors()
self.assertEqual(results, custom_step_results)
@@ -395,7 +458,8 @@ def test_first_step_breakdown_person(self):
# Check custom_steps give same answers for breakdowns
_, custom_step_results, _ = ClickhouseFunnelActors(
- filter.shallow_clone({"funnel_step_breakdown": "PL", "funnel_custom_steps": [1, 2, 3]}), self.team
+ filter.shallow_clone({"funnel_step_breakdown": "PL", "funnel_custom_steps": [1, 2, 3]}),
+ self.team,
).get_actors()
self.assertEqual(results, custom_step_results)
@@ -403,7 +467,11 @@ def test_first_step_breakdown_person(self):
def test_funnel_cohort_breakdown_persons(self):
person = _create_person(distinct_ids=[f"person1"], team_id=self.team.pk, properties={"key": "value"})
_create_event(
- team=self.team, event="sign up", distinct_id=f"person1", properties={}, timestamp="2020-01-02T12:00:00Z"
+ team=self.team,
+ event="sign up",
+ distinct_id=f"person1",
+ properties={},
+ timestamp="2020-01-02T12:00:00Z",
)
cohort = Cohort.objects.create(
team=self.team,
@@ -411,7 +479,11 @@ def test_funnel_cohort_breakdown_persons(self):
groups=[{"properties": [{"key": "key", "value": "value", "type": "person"}]}],
)
filters = {
- "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}],
+ "events": [
+ {"id": "sign up", "order": 0},
+ {"id": "play movie", "order": 1},
+ {"id": "buy", "order": 2},
+ ],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
"date_to": "2020-01-08",
diff --git a/posthog/queries/funnels/test/test_funnel_strict.py b/posthog/queries/funnels/test/test_funnel_strict.py
index 8cc43e176a0e0..0f0d4b691ce21 100644
--- a/posthog/queries/funnels/test/test_funnel_strict.py
+++ b/posthog/queries/funnels/test/test_funnel_strict.py
@@ -7,9 +7,19 @@
from posthog.models.instance_setting import override_instance_config
from posthog.queries.funnels.funnel_strict import ClickhouseFunnelStrict
from posthog.queries.funnels.funnel_strict_persons import ClickhouseFunnelStrictActors
-from posthog.queries.funnels.test.breakdown_cases import assert_funnel_results_equal, funnel_breakdown_test_factory
-from posthog.queries.funnels.test.conversion_time_cases import funnel_conversion_time_test_factory
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person
+from posthog.queries.funnels.test.breakdown_cases import (
+ assert_funnel_results_equal,
+ funnel_breakdown_test_factory,
+)
+from posthog.queries.funnels.test.conversion_time_cases import (
+ funnel_conversion_time_test_factory,
+)
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ _create_event,
+ _create_person,
+)
from posthog.test.test_journeys import journeys_for
FORMAT_TIME = "%Y-%m-%d 00:00:00"
@@ -24,8 +34,16 @@ def _create_action(**kwargs):
return action
-class TestFunnelStrictStepsBreakdown(ClickhouseTestMixin, funnel_breakdown_test_factory(ClickhouseFunnelStrict, ClickhouseFunnelStrictActors, _create_event, _create_action, _create_person)): # type: ignore
-
+class TestFunnelStrictStepsBreakdown(
+ ClickhouseTestMixin,
+ funnel_breakdown_test_factory( # type: ignore
+ ClickhouseFunnelStrict,
+ ClickhouseFunnelStrictActors,
+ _create_event,
+ _create_action,
+ _create_person,
+ ),
+):
maxDiff = None
def test_basic_funnel_default_funnel_days_breakdown_event(self):
@@ -39,7 +57,6 @@ def test_basic_funnel_default_funnel_days_breakdown_action_materialized(self):
pass
def test_strict_breakdown_events_with_multiple_properties(self):
-
filters = {
"events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}],
"insight": INSIGHT_FUNNELS,
@@ -56,8 +73,16 @@ def test_strict_breakdown_events_with_multiple_properties(self):
people = journeys_for(
{
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
- {"event": "blah", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
+ {
+ "event": "blah",
+ "timestamp": datetime(2020, 1, 1, 13),
+ "properties": {"$browser": "Chrome"},
+ },
{
"event": "play movie",
"timestamp": datetime(2020, 1, 1, 14),
@@ -65,7 +90,11 @@ def test_strict_breakdown_events_with_multiple_properties(self):
},
],
"person2": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari"},
+ },
{
"event": "play movie",
"timestamp": datetime(2020, 1, 2, 14),
@@ -146,14 +175,20 @@ def test_strict_breakdown_events_with_multiple_properties(self):
self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Safari"]), [people["person2"].uuid])
-class TestFunnelStrictStepsConversionTime(ClickhouseTestMixin, funnel_conversion_time_test_factory(ClickhouseFunnelStrict, ClickhouseFunnelStrictActors, _create_event, _create_person)): # type: ignore
-
+class TestFunnelStrictStepsConversionTime(
+ ClickhouseTestMixin,
+ funnel_conversion_time_test_factory( # type: ignore
+ ClickhouseFunnelStrict,
+ ClickhouseFunnelStrictActors,
+ _create_event,
+ _create_person,
+ ),
+):
maxDiff = None
pass
class TestFunnelStrictSteps(ClickhouseTestMixin, APIBaseTest):
-
maxDiff = None
def _get_actor_ids_at_step(self, filter, funnel_step, breakdown_value=None):
@@ -177,7 +212,9 @@ def test_basic_strict_funnel(self):
funnel = ClickhouseFunnelStrict(filter, self.team)
person1_stopped_after_signup = _create_person(
- distinct_ids=["stopped_after_signup1"], team_id=self.team.pk, properties={"test": "okay"}
+ distinct_ids=["stopped_after_signup1"],
+ team_id=self.team.pk,
+ properties={"test": "okay"},
)
_create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1")
@@ -185,32 +222,60 @@ def test_basic_strict_funnel(self):
distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk
)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview1")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview1")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_pageview1",
+ )
person3_stopped_after_insight_view = _create_person(
distinct_ids=["stopped_after_insightview"], team_id=self.team.pk
)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview")
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview")
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview",
+ )
person4_stopped_after_insight_view_not_strict_order = _create_person(
distinct_ids=["stopped_after_insightview2"], team_id=self.team.pk
)
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview2")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview2",
+ )
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview2")
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview2")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview2")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview2",
+ )
person5_stopped_after_insight_view_random = _create_person(
distinct_ids=["stopped_after_insightview3"], team_id=self.team.pk
)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview3")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview3")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview3",
+ )
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview3")
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview3")
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview3")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview3",
+ )
person6 = _create_person(distinct_ids=["person6"], team_id=self.team.pk)
_create_event(team=self.team, event="blaah blaa", distinct_id="person6")
@@ -226,7 +291,11 @@ def test_basic_strict_funnel(self):
_create_event(team=self.team, event="blaah blaa", distinct_id="person7")
_create_person(distinct_ids=["stopped_after_insightview6"], team_id=self.team.pk)
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview6")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview6",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview6")
result = funnel.run()
@@ -250,7 +319,8 @@ def test_basic_strict_funnel(self):
)
self.assertCountEqual(
- self._get_actor_ids_at_step(filter, 2), [person3_stopped_after_insight_view.uuid, person7.uuid]
+ self._get_actor_ids_at_step(filter, 2),
+ [person3_stopped_after_insight_view.uuid, person7.uuid],
)
self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [person7.uuid])
@@ -263,7 +333,6 @@ def test_basic_strict_funnel(self):
self.assertEqual(result[0]["count"], 7)
def test_advanced_strict_funnel(self):
-
sign_up_action = _create_action(
name="sign up",
team=self.team,
@@ -297,59 +366,122 @@ def test_advanced_strict_funnel(self):
person2_stopped_after_one_pageview = _create_person(
distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk
)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview1")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_pageview1",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview1")
person3_stopped_after_insight_view = _create_person(
distinct_ids=["stopped_after_insightview"], team_id=self.team.pk
)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview")
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_insightview", properties={"key": "val"}
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview",
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_insightview",
+ properties={"key": "val"},
)
_create_event(
- team=self.team, event="sign up", distinct_id="stopped_after_insightview", properties={"key": "val2"}
+ team=self.team,
+ event="sign up",
+ distinct_id="stopped_after_insightview",
+ properties={"key": "val2"},
)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview")
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview")
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview",
+ )
person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk)
_create_event(team=self.team, event="blaah blaa", distinct_id="person4")
_create_event(team=self.team, event="user signed up", distinct_id="person4")
- _create_event(team=self.team, event="sign up", distinct_id="person4", properties={"key": "val"})
- _create_event(team=self.team, event="$pageview", distinct_id="person4", properties={"key": "val"})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="person4",
+ properties={"key": "val"},
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person4",
+ properties={"key": "val"},
+ )
_create_event(team=self.team, event="blaah blaa", distinct_id="person4")
person5 = _create_person(distinct_ids=["person5"], team_id=self.team.pk)
_create_event(team=self.team, event="blaah blaa", distinct_id="person5")
_create_event(team=self.team, event="user signed up", distinct_id="person5")
- _create_event(team=self.team, event="sign up", distinct_id="person5", properties={"key": "val"})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="person5",
+ properties={"key": "val"},
+ )
_create_event(team=self.team, event="$pageview", distinct_id="person5")
_create_event(team=self.team, event="blaah blaa", distinct_id="person5")
person6 = _create_person(distinct_ids=["person6"], team_id=self.team.pk)
_create_event(team=self.team, event="blaah blaa", distinct_id="person6")
_create_event(team=self.team, event="user signed up", distinct_id="person6")
- _create_event(team=self.team, event="sign up", distinct_id="person6", properties={"key": "val"})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="person6",
+ properties={"key": "val"},
+ )
_create_event(team=self.team, event="$pageview", distinct_id="person6")
- _create_event(team=self.team, event="pageview", distinct_id="person6", properties={"key": "val1"})
+ _create_event(
+ team=self.team,
+ event="pageview",
+ distinct_id="person6",
+ properties={"key": "val1"},
+ )
person7 = _create_person(distinct_ids=["person7"], team_id=self.team.pk)
_create_event(team=self.team, event="blaah blaa", distinct_id="person7")
_create_event(team=self.team, event="user signed up", distinct_id="person7")
- _create_event(team=self.team, event="sign up", distinct_id="person7", properties={"key": "val"})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="person7",
+ properties={"key": "val"},
+ )
_create_event(team=self.team, event="$pageview", distinct_id="person7")
_create_event(team=self.team, event="user signed up", distinct_id="person7")
- _create_event(team=self.team, event="pageview", distinct_id="person7", properties={"key": "val"})
+ _create_event(
+ team=self.team,
+ event="pageview",
+ distinct_id="person7",
+ properties={"key": "val"},
+ )
person8 = _create_person(distinct_ids=["person8"], team_id=self.team.pk)
_create_event(team=self.team, event="blaah blaa", distinct_id="person8")
_create_event(team=self.team, event="user signed up", distinct_id="person8")
_create_event(team=self.team, event="user signed up", distinct_id="person8")
- _create_event(team=self.team, event="sign up", distinct_id="person8", properties={"key": "val"})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="person8",
+ properties={"key": "val"},
+ )
_create_event(team=self.team, event="$pageview", distinct_id="person8")
- _create_event(team=self.team, event="pageview", distinct_id="person8", properties={"key": "val"})
+ _create_event(
+ team=self.team,
+ event="pageview",
+ distinct_id="person8",
+ properties={"key": "val"},
+ )
result = funnel.run()
@@ -410,7 +542,10 @@ def test_basic_strict_funnel_conversion_times(self):
person1_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup1"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="stopped_after_signup1", timestamp="2021-05-02 00:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup1",
+ timestamp="2021-05-02 00:00:00",
)
person2_stopped_after_one_pageview = _create_person(
@@ -423,7 +558,10 @@ def test_basic_strict_funnel_conversion_times(self):
timestamp="2021-05-02 00:00:00",
)
_create_event(
- team=self.team, event="$pageview", distinct_id="stopped_after_pageview1", timestamp="2021-05-02 01:00:00"
+ team=self.team,
+ event="$pageview",
+ distinct_id="stopped_after_pageview1",
+ timestamp="2021-05-02 01:00:00",
)
person3_stopped_after_insight_view = _create_person(
@@ -436,7 +574,10 @@ def test_basic_strict_funnel_conversion_times(self):
timestamp="2021-05-02 00:00:00",
)
_create_event(
- team=self.team, event="$pageview", distinct_id="stopped_after_insightview", timestamp="2021-05-02 02:00:00"
+ team=self.team,
+ event="$pageview",
+ distinct_id="stopped_after_insightview",
+ timestamp="2021-05-02 02:00:00",
)
_create_event(
team=self.team,
@@ -469,7 +610,13 @@ def test_basic_strict_funnel_conversion_times(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 2),
- [person2_stopped_after_one_pageview.uuid, person3_stopped_after_insight_view.uuid],
+ [
+ person2_stopped_after_one_pageview.uuid,
+ person3_stopped_after_insight_view.uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [person3_stopped_after_insight_view.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 3),
+ [person3_stopped_after_insight_view.uuid],
+ )
diff --git a/posthog/queries/funnels/test/test_funnel_strict_persons.py b/posthog/queries/funnels/test/test_funnel_strict_persons.py
index 9c9a304a59e8f..7b76faf42a54a 100644
--- a/posthog/queries/funnels/test/test_funnel_strict_persons.py
+++ b/posthog/queries/funnels/test/test_funnel_strict_persons.py
@@ -7,7 +7,9 @@
from posthog.constants import INSIGHT_FUNNELS
from posthog.models.filters import Filter
from posthog.queries.funnels.funnel_strict_persons import ClickhouseFunnelStrictActors
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
diff --git a/posthog/queries/funnels/test/test_funnel_time_to_convert.py b/posthog/queries/funnels/test/test_funnel_time_to_convert.py
index 514bb5af66473..dba62ca133ae3 100644
--- a/posthog/queries/funnels/test/test_funnel_time_to_convert.py
+++ b/posthog/queries/funnels/test/test_funnel_time_to_convert.py
@@ -22,17 +22,52 @@ def test_auto_bin_count_single_step(self):
_create_person(distinct_ids=["user b"], team=self.team)
_create_person(distinct_ids=["user c"], team=self.team)
- _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00")
- _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 18:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 19:00:00",
+ )
# Converted from 0 to 1 in 3600 s
- _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00")
+ _create_event(
+ event="step three",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 21:00:00",
+ )
- _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00")
- _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00")
+ _create_event(
+ event="step one",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:37:00",
+ )
# Converted from 0 to 1 in 2200 s
- _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00")
- _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-11 07:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-12 06:00:00",
+ )
# Converted from 0 to 1 in 82_800 s
filter = Filter(
@@ -60,9 +95,18 @@ def test_auto_bin_count_single_step(self):
results,
{
"bins": [
- (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B
- (42510.0, 0), # Analogous to above, just an interval (in this case 26_880 s) up - no users
- (82800.0, 1), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C
+ (
+ 2220.0,
+ 2,
+ ), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B
+ (
+ 42510.0,
+ 0,
+ ), # Analogous to above, just an interval (in this case 26_880 s) up - no users
+ (
+ 82800.0,
+ 1,
+ ), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C
],
"average_conversion_time": 29_540,
},
@@ -75,17 +119,52 @@ def test_auto_bin_count_single_step_duplicate_events(self):
_create_person(distinct_ids=["user b"], team=self.team)
_create_person(distinct_ids=["user c"], team=self.team)
- _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00")
- _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 18:00:00",
+ )
+ _create_event(
+ event="step one",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 19:00:00",
+ )
# Converted from 0 to 1 in 3600 s
- _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 21:00:00",
+ )
- _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00")
- _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00")
+ _create_event(
+ event="step one",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:00:00",
+ )
+ _create_event(
+ event="step one",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:37:00",
+ )
# Converted from 0 to 1 in 2200 s
- _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00")
- _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-11 07:00:00",
+ )
+ _create_event(
+ event="step one",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-12 06:00:00",
+ )
# Converted from 0 to 1 in 82_800 s
filter = Filter(
@@ -113,9 +192,18 @@ def test_auto_bin_count_single_step_duplicate_events(self):
results,
{
"bins": [
- (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B
- (42510.0, 0), # Analogous to above, just an interval (in this case 26_880 s) up - no users
- (82800.0, 1), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C
+ (
+ 2220.0,
+ 2,
+ ), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B
+ (
+ 42510.0,
+ 0,
+ ), # Analogous to above, just an interval (in this case 26_880 s) up - no users
+ (
+ 82800.0,
+ 1,
+ ), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C
],
"average_conversion_time": 29_540,
},
@@ -126,17 +214,52 @@ def test_custom_bin_count_single_step(self):
_create_person(distinct_ids=["user b"], team=self.team)
_create_person(distinct_ids=["user c"], team=self.team)
- _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00")
- _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 18:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 19:00:00",
+ )
# Converted from 0 to 1 in 3600 s
- _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00")
+ _create_event(
+ event="step three",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 21:00:00",
+ )
- _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00")
- _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00")
+ _create_event(
+ event="step one",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:37:00",
+ )
# Converted from 0 to 1 in 2200 s
- _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00")
- _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-11 07:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-12 06:00:00",
+ )
# Converted from 0 to 1 in 82_800 s
filter = Filter(
@@ -165,13 +288,22 @@ def test_custom_bin_count_single_step(self):
results,
{
"bins": [
- (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 13_732 s - users A and B
- (13732.0, 0), # Analogous to above, just an interval (in this case 13_732 s) up - no users
+ (
+ 2220.0,
+ 2,
+ ), # Reached step 1 from step 0 in at least 2200 s but less than 13_732 s - users A and B
+ (
+ 13732.0,
+ 0,
+ ), # Analogous to above, just an interval (in this case 13_732 s) up - no users
(25244.0, 0), # And so on
(36756.0, 0),
(48268.0, 0),
(59780.0, 0),
- (71292.0, 1), # Reached step 1 from step 0 in at least 71_292 s but less than 82_804 s - user C
+ (
+ 71292.0,
+ 1,
+ ), # Reached step 1 from step 0 in at least 71_292 s but less than 82_804 s - user C
(82804.0, 0),
],
"average_conversion_time": 29_540,
@@ -184,16 +316,51 @@ def test_auto_bin_count_total(self):
_create_person(distinct_ids=["user b"], team=self.team)
_create_person(distinct_ids=["user c"], team=self.team)
- _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00")
- _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00")
- _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 18:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 19:00:00",
+ )
+ _create_event(
+ event="step three",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 21:00:00",
+ )
# Converted from 0 to 2 in 10_800 s
- _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00")
- _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00")
+ _create_event(
+ event="step one",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:37:00",
+ )
- _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00")
- _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-11 07:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-12 06:00:00",
+ )
filter = Filter(
data={
@@ -217,8 +384,14 @@ def test_auto_bin_count_total(self):
results,
{
"bins": [
- (10800.0, 1), # Reached step 2 from step 0 in at least 10_800 s but less than 10_860 s - user A
- (10860.0, 0), # Analogous to above, just an interval (in this case 60 s) up - no users
+ (
+ 10800.0,
+ 1,
+ ), # Reached step 2 from step 0 in at least 10_800 s but less than 10_860 s - user A
+ (
+ 10860.0,
+ 0,
+ ), # Analogous to above, just an interval (in this case 60 s) up - no users
],
"average_conversion_time": 10_800.0,
},
@@ -226,7 +399,8 @@ def test_auto_bin_count_total(self):
# Let's verify that behavior with steps unspecified is the same as when first and last steps specified
funnel_trends_steps_specified = ClickhouseFunnelTimeToConvert(
- Filter(data={**filter._data, "funnel_from_step": 0, "funnel_to_step": 2}), self.team
+ Filter(data={**filter._data, "funnel_from_step": 0, "funnel_to_step": 2}),
+ self.team,
)
results_steps_specified = funnel_trends_steps_specified.run()
@@ -238,17 +412,52 @@ def test_basic_unordered(self):
_create_person(distinct_ids=["user b"], team=self.team)
_create_person(distinct_ids=["user c"], team=self.team)
- _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00")
- _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00")
- _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00")
+ _create_event(
+ event="step three",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 18:00:00",
+ )
+ _create_event(
+ event="step one",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 19:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 21:00:00",
+ )
# Converted from 0 to 1 in 7200 s
- _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00")
- _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00")
+ _create_event(
+ event="step one",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:37:00",
+ )
# Converted from 0 to 1 in 2200 s
- _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00")
- _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00")
+ _create_event(
+ event="step two",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-11 07:00:00",
+ )
+ _create_event(
+ event="step one",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-12 06:00:00",
+ )
# Converted from 0 to 1 in 82_800 s
filter = Filter(
@@ -278,9 +487,18 @@ def test_basic_unordered(self):
results,
{
"bins": [
- (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B
- (42510.0, 0), # Analogous to above, just an interval (in this case 26_880 s) up - no users
- (82800.0, 1), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C
+ (
+ 2220.0,
+ 2,
+ ), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B
+ (
+ 42510.0,
+ 0,
+ ), # Analogous to above, just an interval (in this case 26_880 s) up - no users
+ (
+ 82800.0,
+ 1,
+ ), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C
],
"average_conversion_time": 29540,
},
@@ -293,25 +511,85 @@ def test_basic_strict(self):
_create_person(distinct_ids=["user c"], team=self.team)
_create_person(distinct_ids=["user d"], team=self.team)
- _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00")
- _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 18:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 19:00:00",
+ )
# Converted from 0 to 1 in 3600 s
- _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00")
+ _create_event(
+ event="step three",
+ distinct_id="user a",
+ team=self.team,
+ timestamp="2021-06-08 21:00:00",
+ )
- _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00")
- _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00")
+ _create_event(
+ event="step one",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:37:00",
+ )
# Converted from 0 to 1 in 2200 s
- _create_event(event="blah", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:38:00")
- _create_event(event="step three", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:39:00")
+ _create_event(
+ event="blah",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:38:00",
+ )
+ _create_event(
+ event="step three",
+ distinct_id="user b",
+ team=self.team,
+ timestamp="2021-06-09 13:39:00",
+ )
- _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00")
- _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-11 07:00:00",
+ )
+ _create_event(
+ event="step two",
+ distinct_id="user c",
+ team=self.team,
+ timestamp="2021-06-12 06:00:00",
+ )
# Converted from 0 to 1 in 82_800 s
- _create_event(event="step one", distinct_id="user d", team=self.team, timestamp="2021-06-11 07:00:00")
- _create_event(event="blah", distinct_id="user d", team=self.team, timestamp="2021-06-12 07:00:00")
+ _create_event(
+ event="step one",
+ distinct_id="user d",
+ team=self.team,
+ timestamp="2021-06-11 07:00:00",
+ )
+ _create_event(
+ event="blah",
+ distinct_id="user d",
+ team=self.team,
+ timestamp="2021-06-12 07:00:00",
+ )
# Blah cancels conversion
- _create_event(event="step two", distinct_id="user d", team=self.team, timestamp="2021-06-12 09:00:00")
+ _create_event(
+ event="step two",
+ distinct_id="user d",
+ team=self.team,
+ timestamp="2021-06-12 09:00:00",
+ )
filter = Filter(
data={
@@ -340,9 +618,18 @@ def test_basic_strict(self):
results,
{
"bins": [
- (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B
- (42510.0, 0), # Analogous to above, just an interval (in this case 26_880 s) up - no users
- (82800.0, 1), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C
+ (
+ 2220.0,
+ 2,
+ ), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B
+ (
+ 42510.0,
+ 0,
+ ), # Analogous to above, just an interval (in this case 26_880 s) up - no users
+ (
+ 82800.0,
+ 1,
+ ), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C
],
"average_conversion_time": 29540,
},
diff --git a/posthog/queries/funnels/test/test_funnel_trends.py b/posthog/queries/funnels/test/test_funnel_trends.py
index 12e8b81af02a5..537333ce07476 100644
--- a/posthog/queries/funnels/test/test_funnel_trends.py
+++ b/posthog/queries/funnels/test/test_funnel_trends.py
@@ -8,7 +8,12 @@
from posthog.models.filters import Filter
from posthog.queries.funnels.funnel_trends import ClickhouseFunnelTrends
from posthog.queries.funnels.funnel_trends_persons import ClickhouseFunnelTrendsActors
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_person, snapshot_clickhouse_queries
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ _create_person,
+ snapshot_clickhouse_queries,
+)
from posthog.test.test_journeys import journeys_for
FORMAT_TIME = "%Y-%m-%d %H:%M:%S"
@@ -60,7 +65,10 @@ def _create_sample_data(self):
)
def test_no_event_in_period(self):
- journeys_for({"user a": [{"event": "Step one", "timestamp": datetime(2021, 6, 6, 21)}]}, self.team)
+ journeys_for(
+ {"user a": [{"event": "Step one", "timestamp": datetime(2021, 6, 6, 21)}]},
+ self.team,
+ )
filter = Filter(
data={
@@ -86,7 +94,10 @@ def test_no_event_in_period(self):
self.assertEqual(formatted_results[0]["days"][0], "2021-06-07")
def test_only_one_user_reached_one_step(self):
- journeys_for({"user a": [{"event": "step one", "timestamp": datetime(2021, 6, 7, 19)}]}, self.team)
+ journeys_for(
+ {"user a": [{"event": "step one", "timestamp": datetime(2021, 6, 7, 19)}]},
+ self.team,
+ )
filter = Filter(
data={
@@ -161,7 +172,8 @@ def test_only_one_user_reached_one_step(self):
self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1)
self.assertEqual(
- [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user a"]]
+ [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results],
+ [["user a"]],
)
# No users converted 2021-06-07
@@ -691,7 +703,8 @@ def test_one_person_in_multiple_periods_and_windows(self):
self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1)
self.assertEqual(
- [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user_two"]]
+ [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results],
+ [["user_two"]],
)
# 1 user who converted starting # 2021-05-04
@@ -701,7 +714,8 @@ def test_one_person_in_multiple_periods_and_windows(self):
self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1)
self.assertEqual(
- [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user_one"]]
+ [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results],
+ [["user_one"]],
)
def test_from_second_step(self):
@@ -889,7 +903,8 @@ def test_one_person_in_multiple_periods_and_windows_in_unordered_funnel(self):
self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1)
self.assertEqual(
- [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user_two"]]
+ [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results],
+ [["user_two"]],
)
# 1 user who converted starting # 2021-05-04
@@ -899,7 +914,8 @@ def test_one_person_in_multiple_periods_and_windows_in_unordered_funnel(self):
self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1)
self.assertEqual(
- [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user_one"]]
+ [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results],
+ [["user_one"]],
)
def test_one_person_in_multiple_periods_and_windows_in_strict_funnel(self):
@@ -976,19 +992,55 @@ def test_funnel_step_breakdown_event(self):
journeys_for(
{
"user_one": [
- {"event": "step one", "timestamp": datetime(2021, 5, 1), "properties": {"$browser": "Chrome"}},
- {"event": "step two", "timestamp": datetime(2021, 5, 3), "properties": {"$browser": "Chrome"}},
- {"event": "step three", "timestamp": datetime(2021, 5, 5), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "step one",
+ "timestamp": datetime(2021, 5, 1),
+ "properties": {"$browser": "Chrome"},
+ },
+ {
+ "event": "step two",
+ "timestamp": datetime(2021, 5, 3),
+ "properties": {"$browser": "Chrome"},
+ },
+ {
+ "event": "step three",
+ "timestamp": datetime(2021, 5, 5),
+ "properties": {"$browser": "Chrome"},
+ },
],
"user_two": [
- {"event": "step one", "timestamp": datetime(2021, 5, 2), "properties": {"$browser": "Chrome"}},
- {"event": "step two", "timestamp": datetime(2021, 5, 3), "properties": {"$browser": "Chrome"}},
- {"event": "step three", "timestamp": datetime(2021, 5, 5), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "step one",
+ "timestamp": datetime(2021, 5, 2),
+ "properties": {"$browser": "Chrome"},
+ },
+ {
+ "event": "step two",
+ "timestamp": datetime(2021, 5, 3),
+ "properties": {"$browser": "Chrome"},
+ },
+ {
+ "event": "step three",
+ "timestamp": datetime(2021, 5, 5),
+ "properties": {"$browser": "Chrome"},
+ },
],
"user_three": [
- {"event": "step one", "timestamp": datetime(2021, 5, 3), "properties": {"$browser": "Safari"}},
- {"event": "step two", "timestamp": datetime(2021, 5, 4), "properties": {"$browser": "Safari"}},
- {"event": "step three", "timestamp": datetime(2021, 5, 5), "properties": {"$browser": "Safari"}},
+ {
+ "event": "step one",
+ "timestamp": datetime(2021, 5, 3),
+ "properties": {"$browser": "Safari"},
+ },
+ {
+ "event": "step two",
+ "timestamp": datetime(2021, 5, 4),
+ "properties": {"$browser": "Safari"},
+ },
+ {
+ "event": "step three",
+ "timestamp": datetime(2021, 5, 5),
+ "properties": {"$browser": "Safari"},
+ },
],
},
self.team,
@@ -1018,16 +1070,40 @@ def test_funnel_step_breakdown_event(self):
for res in result:
if res["breakdown_value"] == ["Chrome"]:
- self.assertEqual(res["data"], [100.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
+ self.assertEqual(
+ res["data"],
+ [
+ 100.0,
+ 100.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ ],
+ )
elif res["breakdown_value"] == ["Safari"]:
- self.assertEqual(res["data"], [0.0, 0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
+ self.assertEqual(
+ res["data"],
+ [0.0, 0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ )
else:
self.fail(msg="Invalid breakdown value")
def test_funnel_step_breakdown_person(self):
_create_person(distinct_ids=["user_one"], team=self.team, properties={"$browser": "Chrome"})
_create_person(distinct_ids=["user_two"], team=self.team, properties={"$browser": "Chrome"})
- _create_person(distinct_ids=["user_three"], team=self.team, properties={"$browser": "Safari"})
+ _create_person(
+ distinct_ids=["user_three"],
+ team=self.team,
+ properties={"$browser": "Safari"},
+ )
journeys_for(
{
"user_one": [
@@ -1073,16 +1149,40 @@ def test_funnel_step_breakdown_person(self):
for res in result:
if res["breakdown_value"] == ["Chrome"]:
- self.assertEqual(res["data"], [100.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
+ self.assertEqual(
+ res["data"],
+ [
+ 100.0,
+ 100.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ ],
+ )
elif res["breakdown_value"] == ["Safari"]:
- self.assertEqual(res["data"], [0.0, 0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
+ self.assertEqual(
+ res["data"],
+ [0.0, 0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ )
else:
self.fail(msg="Invalid breakdown value")
def test_funnel_trend_cohort_breakdown(self):
_create_person(distinct_ids=["user_one"], team=self.team, properties={"key": "value"})
_create_person(distinct_ids=["user_two"], team=self.team, properties={"key": "value"})
- _create_person(distinct_ids=["user_three"], team=self.team, properties={"$browser": "Safari"})
+ _create_person(
+ distinct_ids=["user_three"],
+ team=self.team,
+ properties={"$browser": "Safari"},
+ )
journeys_for(
{
@@ -1131,26 +1231,56 @@ def test_funnel_trend_cohort_breakdown(self):
result = funnel_trends.run()
self.assertEqual(len(result), 1)
- self.assertEqual(result[0]["data"], [100.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
+ self.assertEqual(
+ result[0]["data"],
+ [100.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ )
@snapshot_clickhouse_queries
def test_timezones_trends(self):
journeys_for(
{
"user_one": [
- {"event": "step one", "timestamp": datetime(2021, 5, 1, 10)}, # 04-30 in pacific
- {"event": "step two", "timestamp": datetime(2021, 5, 1, 11)}, # today in pacific
- {"event": "step three", "timestamp": datetime(2021, 5, 1, 12)}, # today in pacific
+ {
+ "event": "step one",
+ "timestamp": datetime(2021, 5, 1, 10),
+ }, # 04-30 in pacific
+ {
+ "event": "step two",
+ "timestamp": datetime(2021, 5, 1, 11),
+ }, # today in pacific
+ {
+ "event": "step three",
+ "timestamp": datetime(2021, 5, 1, 12),
+ }, # today in pacific
],
"user_two": [
- {"event": "step one", "timestamp": datetime(2021, 5, 1, 1)}, # 04-30 in pacific
- {"event": "step two", "timestamp": datetime(2021, 5, 1, 2)}, # 04-30 in pacific
- {"event": "step three", "timestamp": datetime(2021, 5, 1, 3)}, # 04-30 in pacific
+ {
+ "event": "step one",
+ "timestamp": datetime(2021, 5, 1, 1),
+ }, # 04-30 in pacific
+ {
+ "event": "step two",
+ "timestamp": datetime(2021, 5, 1, 2),
+ }, # 04-30 in pacific
+ {
+ "event": "step three",
+ "timestamp": datetime(2021, 5, 1, 3),
+ }, # 04-30 in pacific
],
"user_three": [
- {"event": "step one", "timestamp": datetime(2021, 5, 1, 1)}, # 04-30 in pacific
- {"event": "step two", "timestamp": datetime(2021, 5, 1, 10)}, # today in pacific
- {"event": "step three", "timestamp": datetime(2021, 5, 1, 11)}, # today in pacific
+ {
+ "event": "step one",
+ "timestamp": datetime(2021, 5, 1, 1),
+ }, # 04-30 in pacific
+ {
+ "event": "step two",
+ "timestamp": datetime(2021, 5, 1, 10),
+ }, # today in pacific
+ {
+ "event": "step three",
+ "timestamp": datetime(2021, 5, 1, 11),
+ }, # today in pacific
],
"user_eight": [],
},
diff --git a/posthog/queries/funnels/test/test_funnel_trends_persons.py b/posthog/queries/funnels/test/test_funnel_trends_persons.py
index ee75bfb025719..60ec3df37c3ff 100644
--- a/posthog/queries/funnels/test/test_funnel_trends_persons.py
+++ b/posthog/queries/funnels/test/test_funnel_trends_persons.py
@@ -3,8 +3,14 @@
from posthog.constants import INSIGHT_FUNNELS, FunnelVizType
from posthog.models.filters import Filter
from posthog.queries.funnels.funnel_trends_persons import ClickhouseFunnelTrendsActors
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ snapshot_clickhouse_queries,
+)
from posthog.test.test_journeys import journeys_for
filter_data = {
@@ -17,7 +23,11 @@
"funnel_from_step": 0,
"entrance_period_start": "2021-05-01 00:00:00",
"drop_off": False,
- "events": [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}],
+ "events": [
+ {"id": "step one", "order": 0},
+ {"id": "step two", "order": 1},
+ {"id": "step three", "order": 2},
+ ],
"include_recordings": "true",
}
@@ -28,9 +38,21 @@ def test_funnel_trend_persons_returns_recordings(self):
persons = journeys_for(
{
"user_one": [
- {"event": "step one", "timestamp": datetime(2021, 5, 1), "properties": {"$session_id": "s1a"}},
- {"event": "step two", "timestamp": datetime(2021, 5, 2), "properties": {"$session_id": "s1b"}},
- {"event": "step three", "timestamp": datetime(2021, 5, 3), "properties": {"$session_id": "s1c"}},
+ {
+ "event": "step one",
+ "timestamp": datetime(2021, 5, 1),
+ "properties": {"$session_id": "s1a"},
+ },
+ {
+ "event": "step two",
+ "timestamp": datetime(2021, 5, 2),
+ "properties": {"$session_id": "s1b"},
+ },
+ {
+ "event": "step three",
+ "timestamp": datetime(2021, 5, 3),
+ "properties": {"$session_id": "s1c"},
+ },
]
},
self.team,
@@ -47,16 +69,31 @@ def test_funnel_trend_persons_returns_recordings(self):
filter = Filter(data={"funnel_to_step": 1, **filter_data})
_, results, _ = ClickhouseFunnelTrendsActors(filter, self.team).get_actors()
self.assertEqual([person["id"] for person in results], [persons["user_one"].uuid])
- self.assertEqual([person["matched_recordings"][0]["session_id"] for person in results], ["s1b"])
+ self.assertEqual(
+ [person["matched_recordings"][0]["session_id"] for person in results],
+ ["s1b"],
+ )
@snapshot_clickhouse_queries
def test_funnel_trend_persons_with_no_to_step(self):
persons = journeys_for(
{
"user_one": [
- {"event": "step one", "timestamp": datetime(2021, 5, 1), "properties": {"$session_id": "s1a"}},
- {"event": "step two", "timestamp": datetime(2021, 5, 2), "properties": {"$session_id": "s1b"}},
- {"event": "step three", "timestamp": datetime(2021, 5, 3), "properties": {"$session_id": "s1c"}},
+ {
+ "event": "step one",
+ "timestamp": datetime(2021, 5, 1),
+ "properties": {"$session_id": "s1a"},
+ },
+ {
+ "event": "step two",
+ "timestamp": datetime(2021, 5, 2),
+ "properties": {"$session_id": "s1b"},
+ },
+ {
+ "event": "step three",
+ "timestamp": datetime(2021, 5, 3),
+ "properties": {"$session_id": "s1c"},
+ },
]
},
self.team,
@@ -74,14 +111,21 @@ def test_funnel_trend_persons_with_no_to_step(self):
filter = Filter(data=filter_data)
_, results, _ = ClickhouseFunnelTrendsActors(filter, self.team).get_actors()
self.assertEqual([person["id"] for person in results], [persons["user_one"].uuid])
- self.assertEqual([person["matched_recordings"][0]["session_id"] for person in results], ["s1c"])
+ self.assertEqual(
+ [person["matched_recordings"][0]["session_id"] for person in results],
+ ["s1c"],
+ )
@snapshot_clickhouse_queries
def test_funnel_trend_persons_with_drop_off(self):
persons = journeys_for(
{
"user_one": [
- {"event": "step one", "timestamp": datetime(2021, 5, 1), "properties": {"$session_id": "s1a"}}
+ {
+ "event": "step one",
+ "timestamp": datetime(2021, 5, 1),
+ "properties": {"$session_id": "s1a"},
+ }
]
},
self.team,
@@ -98,4 +142,7 @@ def test_funnel_trend_persons_with_drop_off(self):
filter = Filter(data={**filter_data, "drop_off": True})
_, results, _ = ClickhouseFunnelTrendsActors(filter, self.team).get_actors()
self.assertEqual([person["id"] for person in results], [persons["user_one"].uuid])
- self.assertEqual([person["matched_recordings"][0].get("session_id") for person in results], ["s1a"])
+ self.assertEqual(
+ [person["matched_recordings"][0].get("session_id") for person in results],
+ ["s1a"],
+ )
diff --git a/posthog/queries/funnels/test/test_funnel_unordered.py b/posthog/queries/funnels/test/test_funnel_unordered.py
index cb4eaba04776f..ce3643d007fc6 100644
--- a/posthog/queries/funnels/test/test_funnel_unordered.py
+++ b/posthog/queries/funnels/test/test_funnel_unordered.py
@@ -7,13 +7,17 @@
from posthog.models.action_step import ActionStep
from posthog.models.filters import Filter
from posthog.queries.funnels.funnel_unordered import ClickhouseFunnelUnordered
-from posthog.queries.funnels.funnel_unordered_persons import ClickhouseFunnelUnorderedActors
+from posthog.queries.funnels.funnel_unordered_persons import (
+ ClickhouseFunnelUnorderedActors,
+)
from posthog.queries.funnels.test.breakdown_cases import (
FunnelStepResult,
assert_funnel_results_equal,
funnel_breakdown_test_factory,
)
-from posthog.queries.funnels.test.conversion_time_cases import funnel_conversion_time_test_factory
+from posthog.queries.funnels.test.conversion_time_cases import (
+ funnel_conversion_time_test_factory,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
@@ -35,7 +39,16 @@ def _create_action(**kwargs):
return action
-class TestFunnelUnorderedStepsBreakdown(ClickhouseTestMixin, funnel_breakdown_test_factory(ClickhouseFunnelUnordered, ClickhouseFunnelUnorderedActors, _create_event, _create_action, _create_person)): # type: ignore
+class TestFunnelUnorderedStepsBreakdown(
+ ClickhouseTestMixin,
+ funnel_breakdown_test_factory( # type: ignore
+ ClickhouseFunnelUnordered,
+ ClickhouseFunnelUnorderedActors,
+ _create_event,
+ _create_action,
+ _create_person,
+ ),
+):
maxDiff = None
def test_funnel_step_breakdown_event_single_person_events_with_multiple_properties(self):
@@ -170,21 +183,41 @@ def test_funnel_step_breakdown_with_step_attribution(self):
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
{"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari"},
+ },
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# step attribution means alakazam is valid when step = 1
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
}
people = journeys_for(events_by_person, self.team)
@@ -217,21 +250,41 @@ def test_funnel_step_breakdown_with_step_one_attribution(self):
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
{"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari"},
+ },
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# step attribution means alakazam is valid when step = 1
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
}
people = journeys_for(events_by_person, self.team)
@@ -261,7 +314,8 @@ def test_funnel_step_breakdown_with_step_one_attribution(self):
[people["person1"].uuid, people["person2"].uuid, people["person3"].uuid],
)
self.assertCountEqual(
- self._get_actor_ids_at_step(filter, 2, ""), [people["person1"].uuid, people["person3"].uuid]
+ self._get_actor_ids_at_step(filter, 2, ""),
+ [people["person1"].uuid, people["person3"].uuid],
)
self._assert_funnel_breakdown_result_is_correct(
@@ -296,7 +350,11 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self)
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
@@ -304,13 +362,25 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self)
# {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
# {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# step attribution means alakazam is valid when step = 1
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
}
people = journeys_for(events_by_person, self.team)
@@ -396,7 +466,11 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu
# event
events_by_person = {
"person1": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
{"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
],
"person2": [
@@ -404,13 +478,25 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu
# {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
# {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}
],
"person4": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
# step attribution means alakazam is valid when step = 1
- {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
],
}
people = journeys_for(events_by_person, self.team)
@@ -482,7 +568,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
filters = {
"events": [
{"id": "sign up", "order": 0},
- {"id": "buy", "properties": [{"type": "event", "key": "$version", "value": "xyz"}], "order": 1},
+ {
+ "id": "buy",
+ "properties": [{"type": "event", "key": "$version", "value": "xyz"}],
+ "order": 1,
+ },
],
"insight": INSIGHT_FUNNELS,
"date_from": "2020-01-01",
@@ -506,7 +596,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
"timestamp": datetime(2020, 1, 1, 12),
"properties": {"$browser": "Chrome", "$version": "xyz"},
},
- {"event": "buy", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 1, 13),
+ "properties": {"$browser": "Chrome"},
+ },
# discarded because doesn't meet criteria
],
"person2": [
@@ -518,7 +612,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
},
],
"person3": [
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
{
"event": "buy",
"timestamp": datetime(2020, 1, 2, 15),
@@ -541,7 +639,15 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
self.assertCountEqual([res[0]["breakdown"] for res in result], [[""], ["Mac"], ["Safari"]])
-class TestFunnelUnorderedStepsConversionTime(ClickhouseTestMixin, funnel_conversion_time_test_factory(ClickhouseFunnelUnordered, ClickhouseFunnelUnorderedActors, _create_event, _create_person)): # type: ignore
+class TestFunnelUnorderedStepsConversionTime(
+ ClickhouseTestMixin,
+ funnel_conversion_time_test_factory( # type: ignore
+ ClickhouseFunnelUnordered,
+ ClickhouseFunnelUnorderedActors,
+ _create_event,
+ _create_person,
+ ),
+):
maxDiff = None
pass
@@ -574,43 +680,79 @@ def test_basic_unordered_funnel(self):
distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk
)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview1")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview1")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_pageview1",
+ )
person3_stopped_after_insight_view = _create_person(
distinct_ids=["stopped_after_insightview"], team_id=self.team.pk
)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview")
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview")
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview",
+ )
person4_stopped_after_insight_view_reverse_order = _create_person(
distinct_ids=["stopped_after_insightview2"], team_id=self.team.pk
)
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview2")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview2",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview2")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview2")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview2",
+ )
person5_stopped_after_insight_view_random = _create_person(
distinct_ids=["stopped_after_insightview3"], team_id=self.team.pk
)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview3")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview3")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview3",
+ )
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview3")
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview3")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview3",
+ )
person6_did_only_insight_view = _create_person(
distinct_ids=["stopped_after_insightview4"], team_id=self.team.pk
)
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview4")
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview4")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview4",
+ )
person7_did_only_pageview = _create_person(distinct_ids=["stopped_after_insightview5"], team_id=self.team.pk)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview5")
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview5")
person8_didnot_signup = _create_person(distinct_ids=["stopped_after_insightview6"], team_id=self.team.pk)
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview6")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview6",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview6")
result = funnel.run()
@@ -649,7 +791,11 @@ def test_basic_unordered_funnel(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, -2),
- [person1_stopped_after_signup.uuid, person6_did_only_insight_view.uuid, person7_did_only_pageview.uuid],
+ [
+ person1_stopped_after_signup.uuid,
+ person6_did_only_insight_view.uuid,
+ person7_did_only_pageview.uuid,
+ ],
)
self.assertCountEqual(
@@ -691,38 +837,70 @@ def test_big_multi_step_unordered_funnel(self):
person3_stopped_after_insight_view = _create_person(
distinct_ids=["stopped_after_insightview"], team_id=self.team.pk
)
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview")
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview")
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview",
+ )
person4_stopped_after_insight_view_reverse_order = _create_person(
distinct_ids=["stopped_after_insightview2"], team_id=self.team.pk
)
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview2")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview2",
+ )
_create_event(team=self.team, event="crying", distinct_id="stopped_after_insightview2")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview2")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview2",
+ )
person5_stopped_after_insight_view_random = _create_person(
distinct_ids=["stopped_after_insightview3"], team_id=self.team.pk
)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview3")
- _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview3")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_insightview3",
+ )
_create_event(team=self.team, event="crying", distinct_id="stopped_after_insightview3")
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview3")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview3",
+ )
person6_did_only_insight_view = _create_person(
distinct_ids=["stopped_after_insightview4"], team_id=self.team.pk
)
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview4")
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview4")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview4",
+ )
person7_did_only_pageview = _create_person(distinct_ids=["stopped_after_insightview5"], team_id=self.team.pk)
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview5")
_create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview5")
person8_didnot_signup = _create_person(distinct_ids=["stopped_after_insightview6"], team_id=self.team.pk)
- _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview6")
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="stopped_after_insightview6",
+ )
_create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview6")
funnel = ClickhouseFunnelUnordered(filter, self.team)
@@ -771,7 +949,10 @@ def test_big_multi_step_unordered_funnel(self):
],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 4), [person5_stopped_after_insight_view_random.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 4),
+ [person5_stopped_after_insight_view_random.uuid],
+ )
def test_basic_unordered_funnel_conversion_times(self):
filter = Filter(
@@ -792,14 +973,20 @@ def test_basic_unordered_funnel_conversion_times(self):
person1_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup1"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="stopped_after_signup1", timestamp="2021-05-02 00:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup1",
+ timestamp="2021-05-02 00:00:00",
)
person2_stopped_after_one_pageview = _create_person(
distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk
)
_create_event(
- team=self.team, event="$pageview", distinct_id="stopped_after_pageview1", timestamp="2021-05-02 00:00:00"
+ team=self.team,
+ event="$pageview",
+ distinct_id="stopped_after_pageview1",
+ timestamp="2021-05-02 00:00:00",
)
_create_event(
team=self.team,
@@ -824,11 +1011,17 @@ def test_basic_unordered_funnel_conversion_times(self):
timestamp="2021-05-02 02:00:00",
)
_create_event(
- team=self.team, event="$pageview", distinct_id="stopped_after_insightview", timestamp="2021-05-02 04:00:00"
+ team=self.team,
+ event="$pageview",
+ distinct_id="stopped_after_insightview",
+ timestamp="2021-05-02 04:00:00",
)
_create_event(
- team=self.team, event="$pageview", distinct_id="stopped_after_insightview", timestamp="2021-05-03 00:00:00"
+ team=self.team,
+ event="$pageview",
+ distinct_id="stopped_after_insightview",
+ timestamp="2021-05-03 00:00:00",
)
_create_event(
team=self.team,
@@ -870,10 +1063,16 @@ def test_basic_unordered_funnel_conversion_times(self):
self.assertCountEqual(
self._get_actor_ids_at_step(filter, 2),
- [person2_stopped_after_one_pageview.uuid, person3_stopped_after_insight_view.uuid],
+ [
+ person2_stopped_after_one_pageview.uuid,
+ person3_stopped_after_insight_view.uuid,
+ ],
)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [person3_stopped_after_insight_view.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 3),
+ [person3_stopped_after_insight_view.uuid],
+ )
def test_single_event_unordered_funnel(self):
filter = Filter(
@@ -889,12 +1088,18 @@ def test_single_event_unordered_funnel(self):
_create_person(distinct_ids=["stopped_after_signup1"], team_id=self.team.pk)
_create_event(
- team=self.team, event="user signed up", distinct_id="stopped_after_signup1", timestamp="2021-05-02 00:00:00"
+ team=self.team,
+ event="user signed up",
+ distinct_id="stopped_after_signup1",
+ timestamp="2021-05-02 00:00:00",
)
_create_person(distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk)
_create_event(
- team=self.team, event="$pageview", distinct_id="stopped_after_pageview1", timestamp="2021-05-02 00:00:00"
+ team=self.team,
+ event="$pageview",
+ distinct_id="stopped_after_pageview1",
+ timestamp="2021-05-02 00:00:00",
)
_create_event(
team=self.team,
@@ -917,14 +1122,30 @@ def test_funnel_exclusions_invalid_params(self):
],
"insight": INSIGHT_FUNNELS,
"funnel_window_days": 14,
- "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 1}],
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 1,
+ "funnel_to_step": 1,
+ }
+ ],
}
filter = Filter(data=filters)
self.assertRaises(ValidationError, lambda: ClickhouseFunnelUnordered(filter, self.team).run())
# partial windows not allowed for unordered
filter = filter.shallow_clone(
- {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}]}
+ {
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ }
+ ]
+ }
)
self.assertRaises(ValidationError, lambda: ClickhouseFunnelUnordered(filter, self.team).run())
@@ -938,26 +1159,68 @@ def test_funnel_exclusions_full_window(self):
"funnel_window_days": 14,
"date_from": "2021-05-01 00:00:00",
"date_to": "2021-05-14 00:00:00",
- "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}],
+ "exclusions": [
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 1,
+ }
+ ],
}
filter = Filter(data=filters)
funnel = ClickhouseFunnelUnordered(filter, self.team)
# event 1
person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk)
- _create_event(team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00")
- _create_event(team=self.team, event="paid", distinct_id="person1", timestamp="2021-05-01 02:00:00")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="person1",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person1",
+ timestamp="2021-05-01 02:00:00",
+ )
# event 2
person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk)
- _create_event(team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 03:00:00")
- _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 03:30:00")
- _create_event(team=self.team, event="paid", distinct_id="person2", timestamp="2021-05-01 04:00:00")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="person2",
+ timestamp="2021-05-01 03:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person2",
+ timestamp="2021-05-01 03:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:00:00",
+ )
# event 3
person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk)
- _create_event(team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 05:00:00")
- _create_event(team=self.team, event="paid", distinct_id="person3", timestamp="2021-05-01 06:00:00")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="paid",
+ distinct_id="person3",
+ timestamp="2021-05-01 06:00:00",
+ )
result = funnel.run()
@@ -967,7 +1230,10 @@ def test_funnel_exclusions_full_window(self):
self.assertEqual(result[1]["name"], "Completed 2 steps")
self.assertEqual(result[1]["count"], 2)
- self.assertCountEqual(self._get_actor_ids_at_step(filter, 1), [person1.uuid, person2.uuid, person3.uuid])
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filter, 1),
+ [person1.uuid, person2.uuid, person3.uuid],
+ )
self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1.uuid, person3.uuid])
def test_advanced_funnel_multiple_exclusions_between_steps(self):
@@ -983,56 +1249,246 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self):
"date_to": "2021-05-14 00:00:00",
"insight": INSIGHT_FUNNELS,
"exclusions": [
- {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 4},
- {"id": "y", "type": "events", "funnel_from_step": 0, "funnel_to_step": 4},
+ {
+ "id": "x",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 4,
+ },
+ {
+ "id": "y",
+ "type": "events",
+ "funnel_from_step": 0,
+ "funnel_to_step": 4,
+ },
],
}
person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk)
- _create_event(team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00")
- _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 02:00:00")
- _create_event(team=self.team, event="$pageview", distinct_id="person1", timestamp="2021-05-01 03:00:00")
- _create_event(team=self.team, event="insight viewed", distinct_id="person1", timestamp="2021-05-01 04:00:00")
- _create_event(team=self.team, event="y", distinct_id="person1", timestamp="2021-05-01 04:30:00")
- _create_event(team=self.team, event="invite teammate", distinct_id="person1", timestamp="2021-05-01 05:00:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person1", timestamp="2021-05-01 06:00:00")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="person1",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person1",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person1",
+ timestamp="2021-05-01 03:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person1",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="y",
+ distinct_id="person1",
+ timestamp="2021-05-01 04:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person1",
+ timestamp="2021-05-01 05:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="pageview2",
+ distinct_id="person1",
+ timestamp="2021-05-01 06:00:00",
+ )
person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk)
- _create_event(team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 01:00:00")
- _create_event(team=self.team, event="y", distinct_id="person2", timestamp="2021-05-01 01:30:00")
- _create_event(team=self.team, event="$pageview", distinct_id="person2", timestamp="2021-05-01 02:00:00")
- _create_event(team=self.team, event="insight viewed", distinct_id="person2", timestamp="2021-05-01 04:00:00")
- _create_event(team=self.team, event="y", distinct_id="person2", timestamp="2021-05-01 04:30:00")
- _create_event(team=self.team, event="invite teammate", distinct_id="person2", timestamp="2021-05-01 05:00:00")
- _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 05:30:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person2", timestamp="2021-05-01 06:00:00")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="person2",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="y",
+ distinct_id="person2",
+ timestamp="2021-05-01 01:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person2",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="y",
+ distinct_id="person2",
+ timestamp="2021-05-01 04:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person2",
+ timestamp="2021-05-01 05:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person2",
+ timestamp="2021-05-01 05:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="pageview2",
+ distinct_id="person2",
+ timestamp="2021-05-01 06:00:00",
+ )
person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk)
- _create_event(team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 01:00:00")
- _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 01:30:00")
- _create_event(team=self.team, event="$pageview", distinct_id="person3", timestamp="2021-05-01 02:00:00")
- _create_event(team=self.team, event="insight viewed", distinct_id="person3", timestamp="2021-05-01 04:00:00")
- _create_event(team=self.team, event="invite teammate", distinct_id="person3", timestamp="2021-05-01 05:00:00")
- _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 05:30:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person3", timestamp="2021-05-01 06:00:00")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="person3",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person3",
+ timestamp="2021-05-01 01:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person3",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person3",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person3",
+ timestamp="2021-05-01 05:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="pageview2",
+ distinct_id="person3",
+ timestamp="2021-05-01 06:00:00",
+ )
person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk)
- _create_event(team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 01:00:00")
- _create_event(team=self.team, event="$pageview", distinct_id="person4", timestamp="2021-05-01 02:00:00")
- _create_event(team=self.team, event="insight viewed", distinct_id="person4", timestamp="2021-05-01 04:00:00")
- _create_event(team=self.team, event="invite teammate", distinct_id="person4", timestamp="2021-05-01 05:00:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person4", timestamp="2021-05-01 06:00:00")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="person4",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person4",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person4",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person4",
+ timestamp="2021-05-01 05:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="pageview2",
+ distinct_id="person4",
+ timestamp="2021-05-01 06:00:00",
+ )
person5 = _create_person(distinct_ids=["person5"], team_id=self.team.pk)
- _create_event(team=self.team, event="user signed up", distinct_id="person5", timestamp="2021-05-01 01:00:00")
- _create_event(team=self.team, event="x", distinct_id="person5", timestamp="2021-05-01 01:30:00")
- _create_event(team=self.team, event="$pageview", distinct_id="person5", timestamp="2021-05-01 02:00:00")
- _create_event(team=self.team, event="x", distinct_id="person5", timestamp="2021-05-01 02:30:00")
- _create_event(team=self.team, event="insight viewed", distinct_id="person5", timestamp="2021-05-01 04:00:00")
- _create_event(team=self.team, event="y", distinct_id="person5", timestamp="2021-05-01 04:30:00")
- _create_event(team=self.team, event="invite teammate", distinct_id="person5", timestamp="2021-05-01 05:00:00")
- _create_event(team=self.team, event="x", distinct_id="person5", timestamp="2021-05-01 05:30:00")
- _create_event(team=self.team, event="pageview2", distinct_id="person5", timestamp="2021-05-01 06:00:00")
+ _create_event(
+ team=self.team,
+ event="user signed up",
+ distinct_id="person5",
+ timestamp="2021-05-01 01:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person5",
+ timestamp="2021-05-01 01:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="person5",
+ timestamp="2021-05-01 02:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person5",
+ timestamp="2021-05-01 02:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="insight viewed",
+ distinct_id="person5",
+ timestamp="2021-05-01 04:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="y",
+ distinct_id="person5",
+ timestamp="2021-05-01 04:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="invite teammate",
+ distinct_id="person5",
+ timestamp="2021-05-01 05:00:00",
+ )
+ _create_event(
+ team=self.team,
+ event="x",
+ distinct_id="person5",
+ timestamp="2021-05-01 05:30:00",
+ )
+ _create_event(
+ team=self.team,
+ event="pageview2",
+ distinct_id="person5",
+ timestamp="2021-05-01 06:00:00",
+ )
filter = Filter(data=filters)
funnel = ClickhouseFunnelUnordered(filter, self.team)
@@ -1058,18 +1514,36 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self):
def test_funnel_unordered_all_events_with_properties(self):
_create_person(distinct_ids=["user"], team=self.team)
_create_event(event="user signed up", distinct_id="user", team=self.team)
- _create_event(event="added to card", distinct_id="user", properties={"is_saved": True}, team=self.team)
+ _create_event(
+ event="added to card",
+ distinct_id="user",
+ properties={"is_saved": True},
+ team=self.team,
+ )
filters = {
"events": [
- {"type": "events", "id": "user signed up", "order": 0, "name": "user signed up", "math": "total"},
+ {
+ "type": "events",
+ "id": "user signed up",
+ "order": 0,
+ "name": "user signed up",
+ "math": "total",
+ },
{
"type": "events",
"id": None,
"order": 1,
"name": "All events",
"math": "total",
- "properties": [{"key": "is_saved", "value": ["true"], "operator": "exact", "type": "event"}],
+ "properties": [
+ {
+ "key": "is_saved",
+ "value": ["true"],
+ "operator": "exact",
+ "type": "event",
+ }
+ ],
},
],
"funnel_window_days": 14,
@@ -1084,9 +1558,17 @@ def test_funnel_unordered_all_events_with_properties(self):
def test_funnel_unordered_entity_filters(self):
_create_person(distinct_ids=["user"], team=self.team)
- _create_event(event="user signed up", distinct_id="user", properties={"prop_a": "some value"}, team=self.team)
_create_event(
- event="user signed up", distinct_id="user", properties={"prop_b": "another value"}, team=self.team
+ event="user signed up",
+ distinct_id="user",
+ properties={"prop_a": "some value"},
+ team=self.team,
+ )
+ _create_event(
+ event="user signed up",
+ distinct_id="user",
+ properties={"prop_b": "another value"},
+ team=self.team,
)
filters = {
@@ -1097,7 +1579,14 @@ def test_funnel_unordered_entity_filters(self):
"order": 0,
"name": "user signed up",
"math": "total",
- "properties": [{"key": "prop_a", "value": ["some value"], "operator": "exact", "type": "event"}],
+ "properties": [
+ {
+ "key": "prop_a",
+ "value": ["some value"],
+ "operator": "exact",
+ "type": "event",
+ }
+ ],
},
{
"type": "events",
@@ -1105,7 +1594,14 @@ def test_funnel_unordered_entity_filters(self):
"order": 1,
"name": "user signed up",
"math": "total",
- "properties": [{"key": "prop_b", "value": "another", "operator": "icontains", "type": "event"}],
+ "properties": [
+ {
+ "key": "prop_b",
+ "value": "another",
+ "operator": "icontains",
+ "type": "event",
+ }
+ ],
},
],
}
diff --git a/posthog/queries/funnels/test/test_funnel_unordered_persons.py b/posthog/queries/funnels/test/test_funnel_unordered_persons.py
index 673dee6d30826..c00e6975f5044 100644
--- a/posthog/queries/funnels/test/test_funnel_unordered_persons.py
+++ b/posthog/queries/funnels/test/test_funnel_unordered_persons.py
@@ -6,8 +6,12 @@
from posthog.constants import INSIGHT_FUNNELS
from posthog.models.filters import Filter
-from posthog.queries.funnels.funnel_unordered_persons import ClickhouseFunnelUnorderedActors
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.queries.funnels.funnel_unordered_persons import (
+ ClickhouseFunnelUnorderedActors,
+)
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
diff --git a/posthog/queries/funnels/test/test_utils.py b/posthog/queries/funnels/test/test_utils.py
index 8a71c02e6f6e5..c45a4eddcb518 100644
--- a/posthog/queries/funnels/test/test_utils.py
+++ b/posthog/queries/funnels/test/test_utils.py
@@ -1,6 +1,10 @@
from posthog.constants import FunnelOrderType
from posthog.models.filters import Filter
-from posthog.queries.funnels import ClickhouseFunnel, ClickhouseFunnelStrict, ClickhouseFunnelUnordered
+from posthog.queries.funnels import (
+ ClickhouseFunnel,
+ ClickhouseFunnelStrict,
+ ClickhouseFunnelUnordered,
+)
from posthog.queries.funnels.utils import get_funnel_order_class
from posthog.test.base import BaseTest
diff --git a/posthog/queries/funnels/utils.py b/posthog/queries/funnels/utils.py
index 33ef56e271bd2..68f93c2d4542e 100644
--- a/posthog/queries/funnels/utils.py
+++ b/posthog/queries/funnels/utils.py
@@ -6,7 +6,11 @@
def get_funnel_order_class(filter: Filter) -> Type[ClickhouseFunnelBase]:
- from posthog.queries.funnels import ClickhouseFunnel, ClickhouseFunnelStrict, ClickhouseFunnelUnordered
+ from posthog.queries.funnels import (
+ ClickhouseFunnel,
+ ClickhouseFunnelStrict,
+ ClickhouseFunnelUnordered,
+ )
if filter.funnel_order_type == FunnelOrderType.UNORDERED:
return ClickhouseFunnelUnordered
diff --git a/posthog/queries/insight.py b/posthog/queries/insight.py
index 294eb012d60b0..5992d16ddf1e9 100644
--- a/posthog/queries/insight.py
+++ b/posthog/queries/insight.py
@@ -7,7 +7,13 @@
# Wrapper around sync_execute, adding query tags for insights performance
def insight_sync_execute(
- query, args=None, *, team_id: int, query_type: str, filter: Optional["FilterType"] = None, **kwargs
+ query,
+ args=None,
+ *,
+ team_id: int,
+ query_type: str,
+ filter: Optional["FilterType"] = None,
+ **kwargs,
):
tag_queries(team_id=team_id)
_tag_query(query, query_type, filter)
diff --git a/posthog/queries/paths/paths.py b/posthog/queries/paths/paths.py
index b829d8487dddf..6a98857e3927d 100644
--- a/posthog/queries/paths/paths.py
+++ b/posthog/queries/paths/paths.py
@@ -104,7 +104,6 @@ def _exec_query(self) -> List[Tuple]:
)
def get_query(self) -> str:
-
path_query = self.get_path_query()
funnel_cte = ""
@@ -198,7 +197,6 @@ def should_query_funnel(self) -> bool:
return False
def get_path_query(self) -> str:
-
paths_per_person_query = self.get_paths_per_person_query()
self.params["edge_limit"] = self._filter.edge_limit
@@ -243,7 +241,10 @@ def get_session_threshold_clause(self) -> str:
# Implemented in /ee
def get_target_clause(self) -> Tuple[str, Dict]:
- params: Dict[str, Union[str, None]] = {"target_point": None, "secondary_target_point": None}
+ params: Dict[str, Union[str, None]] = {
+ "target_point": None,
+ "secondary_target_point": None,
+ }
filtered_path_ordering_clause = self.get_filtered_path_ordering()
compacting_function = self.get_array_compacting_function()
diff --git a/posthog/queries/paths/paths_event_query.py b/posthog/queries/paths/paths_event_query.py
index 6cc96243cc034..913307b7fdc21 100644
--- a/posthog/queries/paths/paths_event_query.py
+++ b/posthog/queries/paths/paths_event_query.py
@@ -22,7 +22,6 @@ class PathEventQuery(EventQuery):
_filter: PathFilter
def get_query(self) -> Tuple[str, Dict[str, Any]]:
-
funnel_paths_timestamp = ""
funnel_paths_join = ""
funnel_paths_filter = ""
@@ -55,7 +54,13 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]:
]
_fields += [f"{self.EVENT_TABLE_ALIAS}.{field} AS {field}" for field in self._extra_fields]
_fields += [
- get_property_string_expr("events", field, f"'{field}'", "properties", table_alias=self.EVENT_TABLE_ALIAS)[0]
+ get_property_string_expr(
+ "events",
+ field,
+ f"'{field}'",
+ "properties",
+ table_alias=self.EVENT_TABLE_ALIAS,
+ )[0]
+ f" as {field}"
for field in self._extra_event_properties
]
diff --git a/posthog/queries/person_distinct_id_query.py b/posthog/queries/person_distinct_id_query.py
index 9e8d0606337bb..c04711fbe2370 100644
--- a/posthog/queries/person_distinct_id_query.py
+++ b/posthog/queries/person_distinct_id_query.py
@@ -2,7 +2,6 @@
def get_team_distinct_ids_query(team_id: int) -> str:
-
# ensure team_id is actually an int so we can safely interpolate into the query
assert isinstance(team_id, int)
diff --git a/posthog/queries/person_query.py b/posthog/queries/person_query.py
index 43b86d79ec256..73a779e5aca6a 100644
--- a/posthog/queries/person_query.py
+++ b/posthog/queries/person_query.py
@@ -5,8 +5,14 @@
from posthog.constants import PropertyOperatorType
from posthog.models import Filter
from posthog.models.cohort import Cohort
-from posthog.models.cohort.sql import GET_COHORTPEOPLE_BY_COHORT_ID, GET_STATIC_COHORTPEOPLE_BY_COHORT_ID
-from posthog.models.cohort.util import format_precalculated_cohort_query, format_static_cohort_query
+from posthog.models.cohort.sql import (
+ GET_COHORTPEOPLE_BY_COHORT_ID,
+ GET_STATIC_COHORTPEOPLE_BY_COHORT_ID,
+)
+from posthog.models.cohort.util import (
+ format_precalculated_cohort_query,
+ format_static_cohort_query,
+)
from posthog.models.entity import Entity
from posthog.models.filters.path_filter import PathFilter
from posthog.models.filters.retention_filter import RetentionFilter
@@ -71,7 +77,8 @@ def __init__(
self._extra_fields = self._extra_fields - {self.PERSON_PROPERTIES_ALIAS} | {"properties"}
properties = self._filter.property_groups.combine_property_group(
- PropertyOperatorType.AND, self._entity.property_groups if self._entity else None
+ PropertyOperatorType.AND,
+ self._entity.property_groups if self._entity else None,
)
self._inner_person_properties = self._column_optimizer.property_optimizer.parse_property_groups(
@@ -79,7 +86,10 @@ def __init__(
).inner
def get_query(
- self, prepend: Optional[Union[str, int]] = None, paginate: bool = False, filter_future_persons: bool = False
+ self,
+ prepend: Optional[Union[str, int]] = None,
+ paginate: bool = False,
+ filter_future_persons: bool = False,
) -> Tuple[str, Dict]:
prepend = str(prepend) if prepend is not None else ""
@@ -92,7 +102,10 @@ def get_query(
person_filters_finalization_condition,
person_filters_params,
) = self._get_person_filter_clauses(prepend=prepend)
- multiple_cohorts_condition, multiple_cohorts_params = self._get_multiple_cohorts_clause(prepend=prepend)
+ (
+ multiple_cohorts_condition,
+ multiple_cohorts_params,
+ ) = self._get_multiple_cohorts_clause(prepend=prepend)
single_cohort_join, single_cohort_params = self._get_fast_single_cohort_clause()
if paginate:
order = "ORDER BY argMax(person.created_at, version) DESC, id DESC" if paginate else ""
@@ -100,9 +113,11 @@ def get_query(
else:
order = ""
limit_offset, limit_params = "", {}
- search_prefiltering_condition, search_finalization_condition, search_params = self._get_search_clauses(
- prepend=prepend
- )
+ (
+ search_prefiltering_condition,
+ search_finalization_condition,
+ search_params,
+ ) = self._get_search_clauses(prepend=prepend)
distinct_id_condition, distinct_id_params = self._get_distinct_id_clause()
email_condition, email_params = self._get_email_clause()
filter_future_persons_condition = (
@@ -228,7 +243,11 @@ def _get_fast_single_cohort_clause(self) -> Tuple[str, Dict]:
) {self.COHORT_TABLE_ALIAS}
ON {self.COHORT_TABLE_ALIAS}.person_id = person.id
""",
- {"team_id": self._team_id, "cohort_id": self._cohort.pk, "version": self._cohort.version},
+ {
+ "team_id": self._team_id,
+ "cohort_id": self._cohort.pk,
+ "version": self._cohort.version,
+ },
)
else:
return "", {}
@@ -301,7 +320,14 @@ def _get_search_clauses(self, prepend: str = "") -> Tuple[str, str, Dict]:
prop_group = PropertyGroup(
type=PropertyOperatorType.AND,
- values=[Property(key="email", operator="icontains", value=self._filter.search, type="person")],
+ values=[
+ Property(
+ key="email",
+ operator="icontains",
+ value=self._filter.search,
+ type="person",
+ )
+ ],
)
finalization_conditions_sql, params = parse_prop_grouped_clauses(
team_id=self._team_id,
@@ -315,7 +341,7 @@ def _get_search_clauses(self, prepend: str = "") -> Tuple[str, str, Dict]:
)
finalization_sql = f"AND ({finalization_conditions_sql} OR {id_conditions_sql})"
- prefiltering_conditions_sql, prefiltering_params = parse_prop_grouped_clauses(
+ (prefiltering_conditions_sql, prefiltering_params,) = parse_prop_grouped_clauses(
team_id=self._team_id,
property_group=prop_group,
prepend=f"search_pre_{prepend}",
@@ -360,7 +386,8 @@ def _add_distinct_id_join_if_needed(self, query: str, params: Dict[Any, Any]) ->
GROUP BY person.*
ORDER BY created_at desc, id desc
""".format(
- person_query=query, distinct_id_query=get_team_distinct_ids_query(self._team_id)
+ person_query=query,
+ distinct_id_query=get_team_distinct_ids_query(self._team_id),
),
params,
)
@@ -371,7 +398,9 @@ def _get_email_clause(self) -> Tuple[str, Dict]:
if self._filter.email:
return prop_filter_json_extract(
- Property(key="email", value=self._filter.email, type="person"), 0, prepend="_email"
+ Property(key="email", value=self._filter.email, type="person"),
+ 0,
+ prepend="_email",
)
return "", {}
diff --git a/posthog/queries/properties_timeline/properties_timeline.py b/posthog/queries/properties_timeline/properties_timeline.py
index 328c0da8fa03c..578a9dee85620 100644
--- a/posthog/queries/properties_timeline/properties_timeline.py
+++ b/posthog/queries/properties_timeline/properties_timeline.py
@@ -5,7 +5,10 @@
from posthog.models.filters.properties_timeline_filter import PropertiesTimelineFilter
from posthog.models.group.group import Group
from posthog.models.person.person import Person
-from posthog.models.property.util import extract_tables_and_properties, get_single_or_multi_property_string_expr
+from posthog.models.property.util import (
+ extract_tables_and_properties,
+ get_single_or_multi_property_string_expr,
+)
from posthog.models.team.team import Team
from posthog.queries.insight import insight_sync_execute
from posthog.queries.trends.util import offset_time_series_date_by_interval
@@ -87,7 +90,9 @@ def run(
filter = filter.shallow_clone(
{
"date_to": offset_time_series_date_by_interval(
- cast(datetime.datetime, filter.date_from), filter=filter, team=team
+ cast(datetime.datetime, filter.date_from),
+ filter=filter,
+ team=team,
)
}
)
@@ -120,7 +125,10 @@ def run(
actor_properties_column=actor_properties_column,
)
- params = {**event_query_params, "actor_id": actor.uuid if isinstance(actor, Person) else actor.group_key}
+ params = {
+ **event_query_params,
+ "actor_id": actor.uuid if isinstance(actor, Person) else actor.group_key,
+ }
raw_query_result = insight_sync_execute(
formatted_sql,
{**params, **filter.hogql_context.values},
diff --git a/posthog/queries/property_values.py b/posthog/queries/property_values.py
index d5d37a076e869..a8b943f25d1d2 100644
--- a/posthog/queries/property_values.py
+++ b/posthog/queries/property_values.py
@@ -3,7 +3,10 @@
from django.utils import timezone
from posthog.models.event.sql import SELECT_PROP_VALUES_SQL_WITH_FILTER
-from posthog.models.person.sql import SELECT_PERSON_PROP_VALUES_SQL, SELECT_PERSON_PROP_VALUES_SQL_WITH_FILTER
+from posthog.models.person.sql import (
+ SELECT_PERSON_PROP_VALUES_SQL,
+ SELECT_PERSON_PROP_VALUES_SQL_WITH_FILTER,
+)
from posthog.models.property.util import get_property_string_expr
from posthog.models.team import Team
from posthog.queries.insight import insight_sync_execute
@@ -11,7 +14,10 @@
def get_property_values_for_key(
- key: str, team: Team, event_names: Optional[List[str]] = None, value: Optional[str] = None
+ key: str,
+ team: Team,
+ event_names: Optional[List[str]] = None,
+ value: Optional[str] = None,
):
property_field, mat_column_exists = get_property_string_expr("events", key, "%(key)s", "properties")
parsed_date_from = "AND timestamp >= '{}'".format(
diff --git a/posthog/queries/query_date_range.py b/posthog/queries/query_date_range.py
index 208bf0207843d..e0604c3b44b91 100644
--- a/posthog/queries/query_date_range.py
+++ b/posthog/queries/query_date_range.py
@@ -10,8 +10,16 @@
from posthog.models.filters.mixins.interval import IntervalMixin
from posthog.models.team import Team
-from posthog.queries.util import TIME_IN_SECONDS, get_earliest_timestamp, get_start_of_interval_sql
-from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping
+from posthog.queries.util import (
+ TIME_IN_SECONDS,
+ get_earliest_timestamp,
+ get_start_of_interval_sql,
+)
+from posthog.utils import (
+ DEFAULT_DATE_FROM_DAYS,
+ relative_date_parse,
+ relative_date_parse_with_delta_mapping,
+)
class QueryDateRange:
@@ -28,7 +36,13 @@ class QueryDateRange:
_table: str
_should_round: Optional[bool]
- def __init__(self, filter: AnyFilter, team: Team, should_round: Optional[bool] = None, table="") -> None:
+ def __init__(
+ self,
+ filter: AnyFilter,
+ team: Team,
+ should_round: Optional[bool] = None,
+ table="",
+ ) -> None:
filter.team = team # This is a dirty - but the easiest - way to get the team into the filter
self._filter = filter
self._team = team
@@ -97,7 +111,10 @@ def date_to(self) -> Tuple[str, Dict]:
date_to_query = self.date_to_clause
date_to = self.date_to_param
- date_to_param = {"date_to": date_to.strftime("%Y-%m-%d %H:%M:%S"), "timezone": self._team.timezone}
+ date_to_param = {
+ "date_to": date_to.strftime("%Y-%m-%d %H:%M:%S"),
+ "timezone": self._team.timezone,
+ }
return date_to_query, date_to_param
@@ -106,7 +123,10 @@ def date_from(self) -> Tuple[str, Dict]:
date_from_query = self.date_from_clause
date_from = self.date_from_param
- date_from_param = {"date_from": date_from.strftime("%Y-%m-%d %H:%M:%S"), "timezone": self._team.timezone}
+ date_from_param = {
+ "date_from": date_from.strftime("%Y-%m-%d %H:%M:%S"),
+ "timezone": self._team.timezone,
+ }
return date_from_query, date_from_param
diff --git a/posthog/queries/retention/__init__.py b/posthog/queries/retention/__init__.py
index f0817c016bb5c..c3d1590058eea 100644
--- a/posthog/queries/retention/__init__.py
+++ b/posthog/queries/retention/__init__.py
@@ -1,7 +1,9 @@
from posthog.settings import EE_AVAILABLE
if EE_AVAILABLE:
- from ee.clickhouse.queries.retention.retention import ClickhouseRetention as Retention
+ from ee.clickhouse.queries.retention.retention import (
+ ClickhouseRetention as Retention,
+ )
else:
from posthog.queries.retention.retention import Retention # type: ignore
diff --git a/posthog/queries/retention/actors_query.py b/posthog/queries/retention/actors_query.py
index ef31f6fd8c473..5a49c510a3240 100644
--- a/posthog/queries/retention/actors_query.py
+++ b/posthog/queries/retention/actors_query.py
@@ -99,7 +99,10 @@ def build_actor_activity_query(
aggregate_users_by_distinct_id: Optional[bool] = None,
retention_events_query=RetentionEventsQuery,
) -> Tuple[str, Dict[str, Any]]:
- from posthog.queries.retention import build_returning_event_query, build_target_event_query
+ from posthog.queries.retention import (
+ build_returning_event_query,
+ build_target_event_query,
+ )
"""
The retention actor query is used to retrieve something of the form:
@@ -134,7 +137,8 @@ def build_actor_activity_query(
}
query = RETENTION_BREAKDOWN_ACTOR_SQL.format(
- returning_event_query=returning_event_query, target_event_query=target_event_query
+ returning_event_query=returning_event_query,
+ target_event_query=target_event_query,
)
return query, all_params
@@ -147,7 +151,6 @@ def _build_actor_query(
selected_interval: Optional[int] = None,
retention_events_query=RetentionEventsQuery,
) -> Tuple[str, Dict[str, Any]]:
-
actor_activity_query, actor_activity_query_params = build_actor_activity_query(
filter=filter,
team=team,
@@ -157,7 +160,11 @@ def _build_actor_query(
retention_events_query=retention_events_query,
)
- params = {"offset": filter.offset, "limit": filter.limit or 100, **actor_activity_query_params}
+ params = {
+ "offset": filter.offset,
+ "limit": filter.limit or 100,
+ **actor_activity_query_params,
+ }
actor_query_template = """
SELECT
actor_id,
diff --git a/posthog/queries/retention/retention.py b/posthog/queries/retention/retention.py
index 145ee1404c37b..24cbe95376e93 100644
--- a/posthog/queries/retention/retention.py
+++ b/posthog/queries/retention/retention.py
@@ -6,7 +6,10 @@
from posthog.models.filters.retention_filter import RetentionFilter
from posthog.models.team import Team
from posthog.queries.insight import insight_sync_execute
-from posthog.queries.retention.actors_query import RetentionActorsByPeriod, build_actor_activity_query
+from posthog.queries.retention.actors_query import (
+ RetentionActorsByPeriod,
+ build_actor_activity_query,
+)
from posthog.queries.retention.retention_events_query import RetentionEventsQuery
from posthog.queries.retention.sql import RETENTION_BREAKDOWN_SQL
from posthog.queries.retention.types import BreakdownValues, CohortKey
@@ -49,7 +52,9 @@ def _get_retention_by_breakdown_values(
"count": correct_result_for_sampling(count, filter.sampling_factor),
"people": [],
"people_url": self._construct_people_url_for_trend_breakdown_interval(
- filter=filter, breakdown_values=breakdown_values, selected_interval=intervals_from_base
+ filter=filter,
+ breakdown_values=breakdown_values,
+ selected_interval=intervals_from_base,
),
}
for (breakdown_values, intervals_from_base, count) in result
@@ -58,10 +63,17 @@ def _get_retention_by_breakdown_values(
return result_dict
def _construct_people_url_for_trend_breakdown_interval(
- self, filter: RetentionFilter, selected_interval: int, breakdown_values: BreakdownValues
+ self,
+ filter: RetentionFilter,
+ selected_interval: int,
+ breakdown_values: BreakdownValues,
):
params = RetentionFilter(
- {**filter._data, "breakdown_values": breakdown_values, "selected_interval": selected_interval},
+ {
+ **filter._data,
+ "breakdown_values": breakdown_values,
+ "selected_interval": selected_interval,
+ },
).to_params()
return f"{self._base_uri}api/person/retention/?{urlencode(params)}"
@@ -69,7 +81,10 @@ def process_breakdown_table_result(self, resultset: Dict[CohortKey, Dict[str, An
result = [
{
"values": [
- resultset.get(CohortKey(breakdown_values, interval), {"count": 0, "people": []})
+ resultset.get(
+ CohortKey(breakdown_values, interval),
+ {"count": 0, "people": []},
+ )
for interval in range(filter.total_intervals)
],
"label": "::".join(map(str, breakdown_values)),
@@ -84,7 +99,12 @@ def process_breakdown_table_result(self, resultset: Dict[CohortKey, Dict[str, An
return result
- def process_table_result(self, resultset: Dict[CohortKey, Dict[str, Any]], filter: RetentionFilter, team: Team):
+ def process_table_result(
+ self,
+ resultset: Dict[CohortKey, Dict[str, Any]],
+ filter: RetentionFilter,
+ team: Team,
+ ):
"""
Constructs a response for the rest api when there is no breakdown specified
@@ -96,7 +116,11 @@ def process_table_result(self, resultset: Dict[CohortKey, Dict[str, Any]], filte
def construct_url(first_day):
params = RetentionFilter(
- {**filter._data, "display": "ActionsTable", "breakdown_values": [first_day]},
+ {
+ **filter._data,
+ "display": "ActionsTable",
+ "breakdown_values": [first_day],
+ },
).to_params()
return "/api/person/retention/?" f"{urlencode(params)}"
diff --git a/posthog/queries/retention/retention_events_query.py b/posthog/queries/retention/retention_events_query.py
index a3adba8b7bda6..609f66387e865 100644
--- a/posthog/queries/retention/retention_events_query.py
+++ b/posthog/queries/retention/retention_events_query.py
@@ -38,7 +38,6 @@ def __init__(
)
def get_query(self) -> Tuple[str, Dict[str, Any]]:
-
_fields = [
self.get_timestamp_field(),
self.target_field(),
@@ -176,7 +175,9 @@ def target_field(self) -> str:
def get_timestamp_field(self) -> str:
start_of_inteval_sql = get_start_of_interval_sql(
- self._filter.period, source=f"{self.EVENT_TABLE_ALIAS}.timestamp", team=self._team
+ self._filter.period,
+ source=f"{self.EVENT_TABLE_ALIAS}.timestamp",
+ team=self._team,
)
if self._event_query_type == RetentionQueryType.TARGET:
return f"DISTINCT {start_of_inteval_sql} AS event_date"
diff --git a/posthog/queries/stickiness/__init__.py b/posthog/queries/stickiness/__init__.py
index 6e3acf68fdaa8..421459fd7cfdd 100644
--- a/posthog/queries/stickiness/__init__.py
+++ b/posthog/queries/stickiness/__init__.py
@@ -2,7 +2,9 @@
if EE_AVAILABLE:
from ee.clickhouse.queries.stickiness import ClickhouseStickiness as Stickiness
- from ee.clickhouse.queries.stickiness import ClickhouseStickinessActors as StickinessActors
+ from ee.clickhouse.queries.stickiness import (
+ ClickhouseStickinessActors as StickinessActors,
+ )
else:
from posthog.queries.stickiness.stickiness import Stickiness # type: ignore
from posthog.queries.stickiness.stickiness_actors import StickinessActors # type: ignore
diff --git a/posthog/queries/stickiness/stickiness.py b/posthog/queries/stickiness/stickiness.py
index 2a43419be00e1..08bea51c8b042 100644
--- a/posthog/queries/stickiness/stickiness.py
+++ b/posthog/queries/stickiness/stickiness.py
@@ -20,7 +20,6 @@ class Stickiness:
actor_query_class = StickinessActors
def run(self, filter: StickinessFilter, team: Team, *args, **kwargs) -> List[Dict[str, Any]]:
-
response = []
for entity in filter.entities:
if entity.type == TREND_FILTER_TYPE_ACTIONS and entity.id is not None:
@@ -44,14 +43,26 @@ def stickiness(self, entity: Entity, filter: StickinessFilter, team: Team) -> Di
counts = insight_sync_execute(
query,
- {**event_params, **filter.hogql_context.values, "num_intervals": filter.total_intervals},
+ {
+ **event_params,
+ **filter.hogql_context.values,
+ "num_intervals": filter.total_intervals,
+ },
query_type="stickiness",
filter=filter,
team_id=team.pk,
)
return self.process_result(counts, filter, entity)
- def people(self, target_entity: Entity, filter: StickinessFilter, team: Team, request, *args, **kwargs):
+ def people(
+ self,
+ target_entity: Entity,
+ filter: StickinessFilter,
+ team: Team,
+ request,
+ *args,
+ **kwargs,
+ ):
_, serialized_actors, _ = self.actor_query_class(entity=target_entity, filter=filter, team=team).get_actors()
return serialized_actors
diff --git a/posthog/queries/stickiness/stickiness_event_query.py b/posthog/queries/stickiness/stickiness_event_query.py
index df7fb280b37b0..0e70af72bb997 100644
--- a/posthog/queries/stickiness/stickiness_event_query.py
+++ b/posthog/queries/stickiness/stickiness_event_query.py
@@ -20,7 +20,6 @@ def __init__(self, entity: Entity, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_query(self) -> Tuple[str, Dict[str, Any]]:
-
prop_query, prop_params = self._get_prop_groups(
self._filter.property_groups.combine_property_group(PropertyOperatorType.AND, self._entity.property_groups),
person_properties_mode=get_person_properties_mode(self._team),
diff --git a/posthog/queries/test/test_base.py b/posthog/queries/test/test_base.py
index 0710babe19525..bccc9ca60a53e 100644
--- a/posthog/queries/test/test_base.py
+++ b/posthog/queries/test/test_base.py
@@ -21,7 +21,10 @@ def test_determine_compared_filter(self):
self.assertIsInstance(compared_filter, PathFilter)
self.assertDictContainsSubset(
- {"date_from": "2020-05-16T00:00:00+00:00", "date_to": "2020-05-22T23:59:59.999999+00:00"},
+ {
+ "date_from": "2020-05-16T00:00:00+00:00",
+ "date_to": "2020-05-22T23:59:59.999999+00:00",
+ },
compared_filter.to_dict(),
)
@@ -186,7 +189,8 @@ def test_match_property_date_operators(self):
self.assertTrue(match_property(property_a, {"key": datetime.datetime(2022, 4, 30, 1, 2, 3)}))
self.assertTrue(
match_property(
- property_a, {"key": datetime.datetime(2022, 4, 30, 1, 2, 3, tzinfo=tz.gettz("Europe/Madrid"))}
+ property_a,
+ {"key": datetime.datetime(2022, 4, 30, 1, 2, 3, tzinfo=tz.gettz("Europe/Madrid"))},
)
)
self.assertTrue(match_property(property_a, {"key": parser.parse("2022-04-30")}))
diff --git a/posthog/queries/test/test_lifecycle.py b/posthog/queries/test/test_lifecycle.py
index bafbf6bbdff5e..6bb34bfd7d143 100644
--- a/posthog/queries/test/test_lifecycle.py
+++ b/posthog/queries/test/test_lifecycle.py
@@ -34,7 +34,10 @@ def _create_events(self, data, event="$pageview"):
_create_person(
team_id=self.team.pk,
distinct_ids=[id],
- properties={"name": id, **({"email": "test@posthog.com"} if id == "p1" else {})},
+ properties={
+ "name": id,
+ **({"email": "test@posthog.com"} if id == "p1" else {}),
+ },
)
)
for timestamp in timestamps:
@@ -241,14 +244,34 @@ def test_lifecycle_trend_prop_filtering(self):
)
_create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "p2"})
- _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-09T12:00:00Z")
- _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-12T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p2",
+ timestamp="2020-01-09T12:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p2",
+ timestamp="2020-01-12T12:00:00Z",
+ )
_create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "p3"})
- _create_event(team=self.team, event="$pageview", distinct_id="p3", timestamp="2020-01-12T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p3",
+ timestamp="2020-01-12T12:00:00Z",
+ )
_create_person(team_id=self.team.pk, distinct_ids=["p4"], properties={"name": "p4"})
- _create_event(team=self.team, event="$pageview", distinct_id="p4", timestamp="2020-01-15T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p4",
+ timestamp="2020-01-15T12:00:00Z",
+ )
result = Trends().run(
Filter(
@@ -352,14 +375,34 @@ def test_lifecycle_trend_person_prop_filtering(self):
)
_create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "p2"})
- _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-09T12:00:00Z")
- _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-12T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p2",
+ timestamp="2020-01-09T12:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p2",
+ timestamp="2020-01-12T12:00:00Z",
+ )
_create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "p3"})
- _create_event(team=self.team, event="$pageview", distinct_id="p3", timestamp="2020-01-12T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p3",
+ timestamp="2020-01-12T12:00:00Z",
+ )
_create_person(team_id=self.team.pk, distinct_ids=["p4"], properties={"name": "p4"})
- _create_event(team=self.team, event="$pageview", distinct_id="p4", timestamp="2020-01-15T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p4",
+ timestamp="2020-01-15T12:00:00Z",
+ )
result = Trends().run(
Filter(
@@ -392,15 +435,44 @@ def test_lifecycle_trend_person_prop_filtering(self):
def test_lifecycle_trends_distinct_id_repeat(self):
with freeze_time("2020-01-12T12:00:00Z"):
- _create_person(team_id=self.team.pk, distinct_ids=["p1", "another_p1"], properties={"name": "p1"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["p1", "another_p1"],
+ properties={"name": "p1"},
+ )
- _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-12T12:00:00Z")
- _create_event(team=self.team, event="$pageview", distinct_id="another_p1", timestamp="2020-01-14T12:00:00Z")
- _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-15T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p1",
+ timestamp="2020-01-12T12:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="another_p1",
+ timestamp="2020-01-14T12:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p1",
+ timestamp="2020-01-15T12:00:00Z",
+ )
- _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-17T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p1",
+ timestamp="2020-01-17T12:00:00Z",
+ )
- _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-19T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p1",
+ timestamp="2020-01-19T12:00:00Z",
+ )
result = Trends().run(
Filter(
@@ -495,7 +567,10 @@ def test_lifecycle_trend_people_paginated(self):
person_id = "person{}".format(i)
_create_person(team_id=self.team.pk, distinct_ids=[person_id])
_create_event(
- team=self.team, event="$pageview", distinct_id=person_id, timestamp="2020-01-15T12:00:00Z"
+ team=self.team,
+ event="$pageview",
+ distinct_id=person_id,
+ timestamp="2020-01-15T12:00:00Z",
)
# even if set to hour 6 it should default to beginning of day and include all pageviews above
result = self.client.get(
@@ -635,7 +710,15 @@ def test_lifecycle_trend_weeks(self):
)
self.assertEqual(
- result[0]["days"], ["2020-02-03", "2020-02-10", "2020-02-17", "2020-02-24", "2020-03-02", "2020-03-09"]
+ result[0]["days"],
+ [
+ "2020-02-03",
+ "2020-02-10",
+ "2020-02-17",
+ "2020-02-24",
+ "2020-03-02",
+ "2020-03-09",
+ ],
)
assertLifecycleResults(
@@ -812,7 +895,10 @@ def test_timezones(self):
assertLifecycleResults(
result_pacific,
[
- {"status": "dormant", "data": [-1.0, -2.0, -1.0, 0.0, -2.0, 0.0, -1.0, 0.0]},
+ {
+ "status": "dormant",
+ "data": [-1.0, -2.0, -1.0, 0.0, -2.0, 0.0, -1.0, 0.0],
+ },
{"status": "new", "data": [1, 0, 0, 1, 0, 0, 0, 0]},
{"status": "resurrecting", "data": [1, 1, 0, 1, 0, 1, 0, 1]},
{"status": "returning", "data": [0, 0, 0, 0, 0, 0, 0, 0]},
diff --git a/posthog/queries/test/test_paths.py b/posthog/queries/test/test_paths.py
index c7e3df9ced0a9..45f09a9ca5787 100644
--- a/posthog/queries/test/test_paths.py
+++ b/posthog/queries/test/test_paths.py
@@ -53,7 +53,11 @@ def test_current_url_paths_and_logic(self):
]
)
- _create_person(team_id=self.team.pk, distinct_ids=["person_1"], properties={"email": "test@posthog.com"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person_1"],
+ properties={"email": "test@posthog.com"},
+ )
events.append(
_create_event(
properties={"$current_url": "/"},
@@ -191,7 +195,10 @@ def test_current_url_paths_and_logic(self):
date_from = now() - relativedelta(days=7)
date_to = now() + relativedelta(days=7)
- date_params = {"date_from": date_from.strftime("%Y-%m-%d"), "date_to": date_to.strftime("%Y-%m-%d")}
+ date_params = {
+ "date_from": date_from.strftime("%Y-%m-%d"),
+ "date_to": date_to.strftime("%Y-%m-%d"),
+ }
filter = PathFilter(team=self.team, data={**date_params})
response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter)
@@ -204,7 +211,10 @@ def test_current_url_paths_and_logic(self):
date_from = now() + relativedelta(days=7)
date_to = now() - relativedelta(days=7)
- date_params = {"date_from": date_from.strftime("%Y-%m-%d"), "date_to": date_to.strftime("%Y-%m-%d")}
+ date_params = {
+ "date_from": date_from.strftime("%Y-%m-%d"),
+ "date_to": date_to.strftime("%Y-%m-%d"),
+ }
filter = PathFilter(team=self.team, data={**date_params})
response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter)
self.assertEqual(len(response), 0)
@@ -215,18 +225,86 @@ def test_custom_event_paths(self):
_create_person(team_id=self.team.pk, distinct_ids=["person_3"])
_create_person(team_id=self.team.pk, distinct_ids=["person_4"])
- _create_event(distinct_id="person_1", event="custom_event_1", team=self.team, properties={}),
- _create_event(distinct_id="person_1", event="custom_event_3", team=self.team, properties={}),
- _create_event(
- properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team
- ), # should be ignored,
- _create_event(distinct_id="person_2", event="custom_event_1", team=self.team, properties={}),
- _create_event(distinct_id="person_2", event="custom_event_2", team=self.team, properties={}),
- _create_event(distinct_id="person_2", event="custom_event_3", team=self.team, properties={}),
- _create_event(distinct_id="person_3", event="custom_event_2", team=self.team, properties={}),
- _create_event(distinct_id="person_3", event="custom_event_1", team=self.team, properties={}),
- _create_event(distinct_id="person_4", event="custom_event_1", team=self.team, properties={}),
- _create_event(distinct_id="person_4", event="custom_event_2", team=self.team, properties={}),
+ (
+ _create_event(
+ distinct_id="person_1",
+ event="custom_event_1",
+ team=self.team,
+ properties={},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_1",
+ event="custom_event_3",
+ team=self.team,
+ properties={},
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ ),
+ ) # should be ignored,
+ (
+ _create_event(
+ distinct_id="person_2",
+ event="custom_event_1",
+ team=self.team,
+ properties={},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_2",
+ event="custom_event_2",
+ team=self.team,
+ properties={},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_2",
+ event="custom_event_3",
+ team=self.team,
+ properties={},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_3",
+ event="custom_event_2",
+ team=self.team,
+ properties={},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_3",
+ event="custom_event_1",
+ team=self.team,
+ properties={},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_4",
+ event="custom_event_1",
+ team=self.team,
+ properties={},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_4",
+ event="custom_event_2",
+ team=self.team,
+ properties={},
+ ),
+ )
filter = PathFilter(team=self.team, data={"path_type": "custom_event"})
response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter)
@@ -253,21 +331,93 @@ def test_custom_hogql_paths(self):
_create_person(team_id=self.team.pk, distinct_ids=["person_3"])
_create_person(team_id=self.team.pk, distinct_ids=["person_4"])
- _create_event(distinct_id="person_1", event="custom_event_1", team=self.team, properties={"a": "!"}),
- _create_event(distinct_id="person_1", event="custom_event_3", team=self.team, properties={"a": "!"}),
- _create_event(
- properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team
- ), # should be ignored,
- _create_event(distinct_id="person_2", event="custom_event_1", team=self.team, properties={"a": "!"}),
- _create_event(distinct_id="person_2", event="custom_event_2", team=self.team, properties={"a": "!"}),
- _create_event(distinct_id="person_2", event="custom_event_3", team=self.team, properties={"a": "!"}),
- _create_event(distinct_id="person_3", event="custom_event_2", team=self.team, properties={"a": "!"}),
- _create_event(distinct_id="person_3", event="custom_event_1", team=self.team, properties={"a": "!"}),
- _create_event(distinct_id="person_4", event="custom_event_1", team=self.team, properties={"a": "!"}),
- _create_event(distinct_id="person_4", event="custom_event_2", team=self.team, properties={"a": "!"}),
+ (
+ _create_event(
+ distinct_id="person_1",
+ event="custom_event_1",
+ team=self.team,
+ properties={"a": "!"},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_1",
+ event="custom_event_3",
+ team=self.team,
+ properties={"a": "!"},
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ ),
+ ) # should be ignored,
+ (
+ _create_event(
+ distinct_id="person_2",
+ event="custom_event_1",
+ team=self.team,
+ properties={"a": "!"},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_2",
+ event="custom_event_2",
+ team=self.team,
+ properties={"a": "!"},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_2",
+ event="custom_event_3",
+ team=self.team,
+ properties={"a": "!"},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_3",
+ event="custom_event_2",
+ team=self.team,
+ properties={"a": "!"},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_3",
+ event="custom_event_1",
+ team=self.team,
+ properties={"a": "!"},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_4",
+ event="custom_event_1",
+ team=self.team,
+ properties={"a": "!"},
+ ),
+ )
+ (
+ _create_event(
+ distinct_id="person_4",
+ event="custom_event_2",
+ team=self.team,
+ properties={"a": "!"},
+ ),
+ )
filter = PathFilter(
- data={"path_type": "hogql", "paths_hogql_expression": "event || properties.a"}, team=self.team
+ data={
+ "path_type": "hogql",
+ "paths_hogql_expression": "event || properties.a",
+ },
+ team=self.team,
)
response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter)
@@ -293,17 +443,78 @@ def test_screen_paths(self):
_create_person(team_id=self.team.pk, distinct_ids=["person_3"])
_create_person(team_id=self.team.pk, distinct_ids=["person_4"])
- _create_event(properties={"$screen_name": "/"}, distinct_id="person_1", event="$screen", team=self.team),
- _create_event(properties={"$screen_name": "/about"}, distinct_id="person_1", event="$screen", team=self.team),
- _create_event(properties={"$screen_name": "/"}, distinct_id="person_2b", event="$screen", team=self.team),
- _create_event(
- properties={"$screen_name": "/pricing"}, distinct_id="person_2a", event="$screen", team=self.team
- ),
- _create_event(properties={"$screen_name": "/about"}, distinct_id="person_2b", event="$screen", team=self.team),
- _create_event(properties={"$screen_name": "/pricing"}, distinct_id="person_3", event="$screen", team=self.team),
- _create_event(properties={"$screen_name": "/"}, distinct_id="person_3", event="$screen", team=self.team),
- _create_event(properties={"$screen_name": "/"}, distinct_id="person_4", event="$screen", team=self.team),
- _create_event(properties={"$screen_name": "/pricing"}, distinct_id="person_4", event="$screen", team=self.team),
+ (
+ _create_event(
+ properties={"$screen_name": "/"},
+ distinct_id="person_1",
+ event="$screen",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$screen_name": "/about"},
+ distinct_id="person_1",
+ event="$screen",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$screen_name": "/"},
+ distinct_id="person_2b",
+ event="$screen",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$screen_name": "/pricing"},
+ distinct_id="person_2a",
+ event="$screen",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$screen_name": "/about"},
+ distinct_id="person_2b",
+ event="$screen",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$screen_name": "/pricing"},
+ distinct_id="person_3",
+ event="$screen",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$screen_name": "/"},
+ distinct_id="person_3",
+ event="$screen",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$screen_name": "/"},
+ distinct_id="person_4",
+ event="$screen",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$screen_name": "/pricing"},
+ distinct_id="person_4",
+ event="$screen",
+ team=self.team,
+ ),
+ )
filter = PathFilter(team=self.team, data={"path_type": "$screen"})
response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter)
@@ -329,47 +540,82 @@ def test_paths_properties_filter(self):
_create_person(team_id=self.team.pk, distinct_ids=["person_3"])
_create_person(team_id=self.team.pk, distinct_ids=["person_4"])
- _create_event(
- properties={"$current_url": "/", "$browser": "Chrome"},
- distinct_id="person_1",
- event="$pageview",
- team=self.team,
- ),
- _create_event(
- properties={"$current_url": "/about", "$browser": "Chrome"},
- distinct_id="person_1",
- event="$pageview",
- team=self.team,
- ),
- _create_event(
- properties={"$current_url": "/", "$browser": "Chrome"},
- distinct_id="person_2",
- event="$pageview",
- team=self.team,
- ),
- _create_event(
- properties={"$current_url": "/pricing", "$browser": "Chrome"},
- distinct_id="person_2",
- event="$pageview",
- team=self.team,
- ),
- _create_event(
- properties={"$current_url": "/about", "$browser": "Chrome"},
- distinct_id="person_2",
- event="$pageview",
- team=self.team,
- ),
- _create_event(
- properties={"$current_url": "/pricing"}, distinct_id="person_3", event="$pageview", team=self.team
- ),
- _create_event(properties={"$current_url": "/"}, distinct_id="person_3", event="$pageview", team=self.team),
- _create_event(properties={"$current_url": "/"}, distinct_id="person_4", event="$pageview", team=self.team),
- _create_event(
- properties={"$current_url": "/pricing"}, distinct_id="person_4", event="$pageview", team=self.team
- ),
+ (
+ _create_event(
+ properties={"$current_url": "/", "$browser": "Chrome"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/about", "$browser": "Chrome"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/", "$browser": "Chrome"},
+ distinct_id="person_2",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/pricing", "$browser": "Chrome"},
+ distinct_id="person_2",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/about", "$browser": "Chrome"},
+ distinct_id="person_2",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/pricing"},
+ distinct_id="person_3",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_3",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_4",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/pricing"},
+ distinct_id="person_4",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
filter = PathFilter(
- team=self.team, data={"properties": [{"key": "$browser", "value": "Chrome", "type": "event"}]}
+ team=self.team,
+ data={"properties": [{"key": "$browser", "value": "Chrome", "type": "event"}]},
)
response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter)
@@ -393,36 +639,118 @@ def test_paths_start(self):
_create_person(team_id=self.team.pk, distinct_ids=["person_4"])
_create_person(team_id=self.team.pk, distinct_ids=["person_5a", "person_5b"])
- _create_event(properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team),
- _create_event(
- properties={"$current_url": "/about/"}, distinct_id="person_1", event="$pageview", team=self.team
- ),
- _create_event(properties={"$current_url": "/"}, distinct_id="person_2", event="$pageview", team=self.team),
- _create_event(
- properties={"$current_url": "/pricing/"}, distinct_id="person_2", event="$pageview", team=self.team
- ),
- _create_event(properties={"$current_url": "/about"}, distinct_id="person_2", event="$pageview", team=self.team),
- _create_event(
- properties={"$current_url": "/pricing"}, distinct_id="person_3", event="$pageview", team=self.team
- ),
- _create_event(properties={"$current_url": "/"}, distinct_id="person_3", event="$pageview", team=self.team),
- _create_event(
- properties={"$current_url": "/about/"}, distinct_id="person_3", event="$pageview", team=self.team
- ),
- _create_event(properties={"$current_url": "/"}, distinct_id="person_4", event="$pageview", team=self.team),
- _create_event(
- properties={"$current_url": "/pricing/"}, distinct_id="person_4", event="$pageview", team=self.team
- ),
- _create_event(
- properties={"$current_url": "/pricing"}, distinct_id="person_5a", event="$pageview", team=self.team
- ),
- _create_event(
- properties={"$current_url": "/about"}, distinct_id="person_5b", event="$pageview", team=self.team
- ),
- _create_event(
- properties={"$current_url": "/pricing/"}, distinct_id="person_5a", event="$pageview", team=self.team
- ),
- _create_event(properties={"$current_url": "/help"}, distinct_id="person_5b", event="$pageview", team=self.team),
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/about/"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_2",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/pricing/"},
+ distinct_id="person_2",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/about"},
+ distinct_id="person_2",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/pricing"},
+ distinct_id="person_3",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_3",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/about/"},
+ distinct_id="person_3",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_4",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/pricing/"},
+ distinct_id="person_4",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/pricing"},
+ distinct_id="person_5a",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/about"},
+ distinct_id="person_5b",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/pricing/"},
+ distinct_id="person_5a",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/help"},
+ distinct_id="person_5b",
+ event="$pageview",
+ team=self.team,
+ ),
+ )
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path/?type=%24pageview&start=%2Fpricing"
@@ -463,34 +791,42 @@ def test_paths_start(self):
def test_paths_in_window(self):
_create_person(team_id=self.team.pk, distinct_ids=["person_1"])
- _create_event(
- properties={"$current_url": "/"},
- distinct_id="person_1",
- event="$pageview",
- team=self.team,
- timestamp="2020-04-14 03:25:34",
- ),
- _create_event(
- properties={"$current_url": "/about"},
- distinct_id="person_1",
- event="$pageview",
- team=self.team,
- timestamp="2020-04-14 03:30:34",
- ),
- _create_event(
- properties={"$current_url": "/"},
- distinct_id="person_1",
- event="$pageview",
- team=self.team,
- timestamp="2020-04-15 03:25:34",
- ),
- _create_event(
- properties={"$current_url": "/about"},
- distinct_id="person_1",
- event="$pageview",
- team=self.team,
- timestamp="2020-04-15 03:30:34",
- ),
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ timestamp="2020-04-14 03:25:34",
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/about"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ timestamp="2020-04-14 03:30:34",
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ timestamp="2020-04-15 03:25:34",
+ ),
+ )
+ (
+ _create_event(
+ properties={"$current_url": "/about"},
+ distinct_id="person_1",
+ event="$pageview",
+ team=self.team,
+ timestamp="2020-04-15 03:30:34",
+ ),
+ )
filter = PathFilter(team=self.team, data={"date_from": "2020-04-13"})
response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter)
diff --git a/posthog/queries/test/test_query_date_range.py b/posthog/queries/test/test_query_date_range.py
index 15e7944502ba6..ebb21ad90e5ac 100644
--- a/posthog/queries/test/test_query_date_range.py
+++ b/posthog/queries/test/test_query_date_range.py
@@ -7,7 +7,6 @@
class TestQueryDateRange(APIBaseTest):
def test_parsed_date(self):
-
with freeze_time("2021-08-25T00:00:00.000Z"):
filter = Filter(
data={
@@ -31,7 +30,6 @@ def test_parsed_date(self):
)
def test_parsed_date_hour(self):
-
with freeze_time("2021-08-25T00:00:00.000Z"):
filter = Filter(
data={
@@ -55,7 +53,6 @@ def test_parsed_date_hour(self):
) # ensure last hour is included
def test_parsed_date_middle_of_hour(self):
-
with freeze_time("2021-08-25T00:00:00.000Z"):
filter = Filter(
data={
@@ -80,7 +77,6 @@ def test_parsed_date_middle_of_hour(self):
) # ensure last hour is included
def test_parsed_date_week_rounded(self):
-
with freeze_time("2021-08-25T00:00:00.000Z"):
filter = Filter(
data={
@@ -104,7 +100,6 @@ def test_parsed_date_week_rounded(self):
)
def test_is_hourly(self):
-
with freeze_time("2021-08-25T00:00:00.000Z"):
filter = Filter(
data={
diff --git a/posthog/queries/test/test_retention.py b/posthog/queries/test/test_retention.py
index 42b7c596b14a9..5b823d462d917 100644
--- a/posthog/queries/test/test_retention.py
+++ b/posthog/queries/test/test_retention.py
@@ -57,7 +57,13 @@ def _create_events(team, user_and_timestamps, event="$pageview"):
if len(properties_args) == 1:
properties.update(properties_args[0])
- _create_event(team=team, event=event, distinct_id=distinct_id, timestamp=timestamp, properties=properties)
+ _create_event(
+ team=team,
+ event=event,
+ distinct_id=distinct_id,
+ timestamp=timestamp,
+ properties=properties,
+ )
i += 1
@@ -126,7 +132,19 @@ def test_day_interval(self):
self.assertEqual(len(result), 11)
self.assertEqual(
pluck(result, "label"),
- ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
+ [
+ "Day 0",
+ "Day 1",
+ "Day 2",
+ "Day 3",
+ "Day 4",
+ "Day 5",
+ "Day 6",
+ "Day 7",
+ "Day 8",
+ "Day 9",
+ "Day 10",
+ ],
)
self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
@@ -148,8 +166,16 @@ def test_day_interval(self):
)
def test_month_interval(self):
- _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"})
- _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"})
+ _create_person(
+ team=self.team,
+ distinct_ids=["person1", "alias1"],
+ properties={"email": "person1@test.com"},
+ )
+ _create_person(
+ team=self.team,
+ distinct_ids=["person2"],
+ properties={"email": "person2@test.com"},
+ )
_create_events(
self.team,
@@ -227,8 +253,16 @@ def test_month_interval(self):
@override_settings(PERSON_ON_EVENTS_V2_OVERRIDE=True)
@snapshot_clickhouse_queries
def test_month_interval_with_person_on_events_v2(self):
- _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"})
- _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"})
+ _create_person(
+ team=self.team,
+ distinct_ids=["person1", "alias1"],
+ properties={"email": "person1@test.com"},
+ )
+ _create_person(
+ team=self.team,
+ distinct_ids=["person2"],
+ properties={"email": "person2@test.com"},
+ )
person_id1 = str(uuid.uuid4())
person_id2 = str(uuid.uuid4())
@@ -386,8 +420,16 @@ def test_month_interval_with_person_on_events_v2(self):
)
def test_week_interval(self):
- _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"})
- _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"})
+ _create_person(
+ team=self.team,
+ distinct_ids=["person1", "alias1"],
+ properties={"email": "person1@test.com"},
+ )
+ _create_person(
+ team=self.team,
+ distinct_ids=["person2"],
+ properties={"email": "person2@test.com"},
+ )
_create_events(
self.team,
@@ -408,17 +450,32 @@ def test_week_interval(self):
)
result = retention().run(
- RetentionFilter(data={"date_to": _date(10, month=1, hour=0), "period": "Week", "total_intervals": 7}),
+ RetentionFilter(
+ data={
+ "date_to": _date(10, month=1, hour=0),
+ "period": "Week",
+ "total_intervals": 7,
+ }
+ ),
self.team,
)
self.assertEqual(
- pluck(result, "label"), ["Week 0", "Week 1", "Week 2", "Week 3", "Week 4", "Week 5", "Week 6"]
+ pluck(result, "label"),
+ ["Week 0", "Week 1", "Week 2", "Week 3", "Week 4", "Week 5", "Week 6"],
)
self.assertEqual(
pluck(result, "values", "count"),
- [[2, 2, 1, 2, 2, 0, 1], [2, 1, 2, 2, 0, 1], [1, 1, 1, 0, 0], [2, 2, 0, 1], [2, 0, 1], [0, 0], [1]],
+ [
+ [2, 2, 1, 2, 2, 0, 1],
+ [2, 1, 2, 2, 0, 1],
+ [1, 1, 1, 0, 0],
+ [2, 2, 0, 1],
+ [2, 0, 1],
+ [0, 0],
+ [1],
+ ],
)
self.assertEqual(
@@ -435,8 +492,16 @@ def test_week_interval(self):
)
def test_hour_interval(self):
- _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"})
- _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"})
+ _create_person(
+ team=self.team,
+ distinct_ids=["person1", "alias1"],
+ properties={"email": "person1@test.com"},
+ )
+ _create_person(
+ team=self.team,
+ distinct_ids=["person2"],
+ properties={"email": "person2@test.com"},
+ )
_create_events(
self.team,
@@ -513,8 +578,16 @@ def test_hour_interval(self):
# ensure that the first interval is properly rounded acoording to the specified period
def test_interval_rounding(self):
- _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"})
- _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"})
+ _create_person(
+ team=self.team,
+ distinct_ids=["person1", "alias1"],
+ properties={"email": "person1@test.com"},
+ )
+ _create_person(
+ team=self.team,
+ distinct_ids=["person2"],
+ properties={"email": "person2@test.com"},
+ )
_create_events(
self.team,
@@ -535,17 +608,32 @@ def test_interval_rounding(self):
)
result = retention().run(
- RetentionFilter(data={"date_to": _date(14, month=1, hour=0), "period": "Week", "total_intervals": 7}),
+ RetentionFilter(
+ data={
+ "date_to": _date(14, month=1, hour=0),
+ "period": "Week",
+ "total_intervals": 7,
+ }
+ ),
self.team,
)
self.assertEqual(
- pluck(result, "label"), ["Week 0", "Week 1", "Week 2", "Week 3", "Week 4", "Week 5", "Week 6"]
+ pluck(result, "label"),
+ ["Week 0", "Week 1", "Week 2", "Week 3", "Week 4", "Week 5", "Week 6"],
)
self.assertEqual(
pluck(result, "values", "count"),
- [[2, 2, 1, 2, 2, 0, 1], [2, 1, 2, 2, 0, 1], [1, 1, 1, 0, 0], [2, 2, 0, 1], [2, 0, 1], [0, 0], [1]],
+ [
+ [2, 2, 1, 2, 2, 0, 1],
+ [2, 1, 2, 2, 0, 1],
+ [1, 1, 1, 0, 0],
+ [2, 2, 0, 1],
+ [2, 0, 1],
+ [0, 0],
+ [1],
+ ],
)
self.assertEqual(
@@ -583,7 +671,11 @@ def test_retention_people_basic(self):
# even if set to hour 6 it should default to beginning of day and include all pageviews above
result, _ = retention().actors_in_period(
- RetentionFilter(data={"date_to": _date(10, hour=6), "selected_interval": 0}, team=self.team), self.team
+ RetentionFilter(
+ data={"date_to": _date(10, hour=6), "selected_interval": 0},
+ team=self.team,
+ ),
+ self.team,
)
self.assertEqual(len(result), 1)
self.assertTrue(result[0]["person"]["id"] == person1.uuid, person1.uuid)
@@ -632,12 +724,18 @@ def test_retention_people_paginated(self):
_create_person(team_id=self.team.pk, distinct_ids=[person_id])
_create_events(
self.team,
- [(person_id, _date(0)), (person_id, _date(1)), (person_id, _date(2)), (person_id, _date(5))],
+ [
+ (person_id, _date(0)),
+ (person_id, _date(1)),
+ (person_id, _date(2)),
+ (person_id, _date(5)),
+ ],
)
# even if set to hour 6 it should default to beginning of day and include all pageviews above
result = self.client.get(
- "/api/person/retention", data={"date_to": _date(10, hour=6), "selected_interval": 2}
+ "/api/person/retention",
+ data={"date_to": _date(10, hour=6), "selected_interval": 2},
).json()
self.assertEqual(len(result["result"]), 100)
@@ -650,7 +748,8 @@ def test_retention_invalid_properties(self):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertDictEqual(
- response.json(), self.validation_error_response("Properties are unparsable!", "invalid_input")
+ response.json(),
+ self.validation_error_response("Properties are unparsable!", "invalid_input"),
)
def test_retention_people_in_period(self):
@@ -676,7 +775,11 @@ def test_retention_people_in_period(self):
# even if set to hour 6 it should default to beginning of day and include all pageviews above
result, _ = retention().actors_in_period(
- RetentionFilter(data={"date_to": _date(10, hour=6), "selected_interval": 2}, team=self.team), self.team
+ RetentionFilter(
+ data={"date_to": _date(10, hour=6), "selected_interval": 2},
+ team=self.team,
+ ),
+ self.team,
)
# should be descending order on number of appearances
@@ -732,7 +835,9 @@ def test_retention_multiple_events(self):
)
_create_events(
- self.team, [("person1", _date(5)), ("person1", _date(6)), ("person2", _date(5))], "$pageview"
+ self.team,
+ [("person1", _date(5)), ("person1", _date(6)), ("person2", _date(5))],
+ "$pageview",
)
target_entity = json.dumps({"id": first_event, "type": TREND_FILTER_TYPE_EVENTS})
@@ -748,11 +853,22 @@ def test_retention_multiple_events(self):
self.team,
)
self.assertEqual(len(result), 7)
- self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
+ self.assertEqual(
+ pluck(result, "label"),
+ ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"],
+ )
self.assertEqual(
pluck(result, "values", "count"),
- [[2, 0, 0, 0, 0, 2, 1], [2, 0, 0, 0, 2, 1], [2, 0, 0, 2, 1], [2, 0, 2, 1], [0, 0, 0], [1, 0], [0]],
+ [
+ [2, 0, 0, 0, 0, 2, 1],
+ [2, 0, 0, 0, 2, 1],
+ [2, 0, 0, 2, 1],
+ [2, 0, 2, 1],
+ [0, 0, 0],
+ [1, 0],
+ [0],
+ ],
)
def test_retention_any_event(self):
@@ -778,7 +894,9 @@ def test_retention_any_event(self):
)
_create_events(
- self.team, [("person1", _date(5)), ("person1", _date(6)), ("person2", _date(5))], "$pageview"
+ self.team,
+ [("person1", _date(5)), ("person1", _date(6)), ("person2", _date(5))],
+ "$pageview",
)
result = retention().run(
@@ -793,11 +911,22 @@ def test_retention_any_event(self):
self.team,
)
self.assertEqual(len(result), 7)
- self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
+ self.assertEqual(
+ pluck(result, "label"),
+ ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"],
+ )
self.assertEqual(
pluck(result, "values", "count"),
- [[2, 2, 2, 2, 0, 2, 1], [2, 2, 2, 0, 2, 1], [2, 2, 0, 2, 1], [2, 0, 2, 1], [0, 0, 0], [3, 1], [1]],
+ [
+ [2, 2, 2, 2, 0, 2, 1],
+ [2, 2, 2, 0, 2, 1],
+ [2, 2, 0, 2, 1],
+ [2, 0, 2, 1],
+ [0, 0, 0],
+ [3, 1],
+ [1],
+ ],
)
@snapshot_clickhouse_queries
@@ -828,7 +957,10 @@ def test_retention_event_action(self):
data={
"date_to": _date(6, hour=0),
"target_entity": start_entity,
- "returning_entity": {"id": some_event, "type": TREND_FILTER_TYPE_EVENTS},
+ "returning_entity": {
+ "id": some_event,
+ "type": TREND_FILTER_TYPE_EVENTS,
+ },
"total_intervals": 7,
}
),
@@ -836,12 +968,23 @@ def test_retention_event_action(self):
)
self.assertEqual(len(result), 7)
- self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
+ self.assertEqual(
+ pluck(result, "label"),
+ ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"],
+ )
self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
- [[2, 0, 0, 1, 0, 1, 0], [2, 0, 1, 0, 1, 0], [2, 1, 0, 1, 0], [2, 0, 1, 0], [0, 0, 0], [0, 0], [0]],
+ [
+ [2, 0, 0, 1, 0, 1, 0],
+ [2, 0, 1, 0, 1, 0],
+ [2, 1, 0, 1, 0],
+ [2, 0, 1, 0],
+ [0, 0, 0],
+ [0, 0],
+ [0],
+ ],
)
def test_first_time_retention(self):
@@ -862,11 +1005,22 @@ def test_first_time_retention(self):
)
self.assertEqual(len(result), 7)
- self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
+ self.assertEqual(
+ pluck(result, "label"),
+ ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"],
+ )
self.assertEqual(
pluck(result, "values", "count"),
- [[2, 1, 2, 2, 1, 0, 1], [1, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0], [1, 1, 0, 1], [0, 0, 0], [0, 0], [0]],
+ [
+ [2, 1, 2, 2, 1, 0, 1],
+ [1, 1, 0, 1, 1, 1],
+ [0, 0, 0, 0, 0],
+ [1, 1, 0, 1],
+ [0, 0, 0],
+ [0, 0],
+ [0],
+ ],
)
def test_retention_with_properties(self):
@@ -891,14 +1045,29 @@ def test_retention_with_properties(self):
result = retention().run(
RetentionFilter(
- data={"properties": [{"key": "$some_property", "value": "value"}], "date_to": _date(10, hour=0)}
+ data={
+ "properties": [{"key": "$some_property", "value": "value"}],
+ "date_to": _date(10, hour=0),
+ }
),
self.team,
)
self.assertEqual(len(result), 11)
self.assertEqual(
pluck(result, "label"),
- ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
+ [
+ "Day 0",
+ "Day 1",
+ "Day 2",
+ "Day 3",
+ "Day 4",
+ "Day 5",
+ "Day 6",
+ "Day 7",
+ "Day 8",
+ "Day 9",
+ "Day 10",
+ ],
)
self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
@@ -921,9 +1090,15 @@ def test_retention_with_properties(self):
def test_retention_with_user_properties(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"}
+ team_id=self.team.pk,
+ distinct_ids=["person1", "alias1"],
+ properties={"email": "person1@test.com"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"email": "person2@test.com"},
)
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "person2@test.com"})
_create_events(
self.team,
@@ -944,7 +1119,13 @@ def test_retention_with_user_properties(self):
result = retention().run(
RetentionFilter(
data={
- "properties": [{"key": "email", "value": "person1@test.com", "type": "person"}],
+ "properties": [
+ {
+ "key": "email",
+ "value": "person1@test.com",
+ "type": "person",
+ }
+ ],
"date_to": _date(6, hour=0),
"total_intervals": 7,
}
@@ -953,11 +1134,22 @@ def test_retention_with_user_properties(self):
)
self.assertEqual(len(result), 7)
- self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
+ self.assertEqual(
+ pluck(result, "label"),
+ ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"],
+ )
self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
- [[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]],
+ [
+ [1, 1, 1, 0, 0, 1, 1],
+ [1, 1, 0, 0, 1, 1],
+ [1, 0, 0, 1, 1],
+ [0, 0, 0, 0],
+ [0, 0, 0],
+ [1, 1],
+ [1],
+ ],
)
@snapshot_clickhouse_queries
@@ -970,9 +1162,15 @@ def test_retention_with_user_properties_via_action(self):
)
_create_person(
- team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"}
+ team_id=self.team.pk,
+ distinct_ids=["person1", "alias1"],
+ properties={"email": "person1@test.com"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"email": "person2@test.com"},
)
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "person2@test.com"})
_create_events(
self.team,
@@ -1003,11 +1201,22 @@ def test_retention_with_user_properties_via_action(self):
)
self.assertEqual(len(result), 7)
- self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
+ self.assertEqual(
+ pluck(result, "label"),
+ ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"],
+ )
self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
- [[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]],
+ [
+ [1, 1, 1, 0, 0, 1, 1],
+ [1, 1, 0, 0, 1, 1],
+ [1, 0, 0, 1, 1],
+ [0, 0, 0, 0],
+ [0, 0, 0],
+ [1, 1],
+ [1],
+ ],
)
def test_retention_action_start_point(self):
@@ -1044,17 +1253,30 @@ def test_retention_action_start_point(self):
)
self.assertEqual(len(result), 7)
- self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
+ self.assertEqual(
+ pluck(result, "label"),
+ ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"],
+ )
self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
- [[1, 1, 1, 0, 0, 1, 1], [2, 2, 1, 0, 1, 2], [2, 1, 0, 1, 2], [1, 0, 0, 1], [0, 0, 0], [1, 1], [2]],
+ [
+ [1, 1, 1, 0, 0, 1, 1],
+ [2, 2, 1, 0, 1, 2],
+ [2, 1, 0, 1, 2],
+ [1, 0, 0, 1],
+ [0, 0, 0],
+ [1, 1],
+ [2],
+ ],
)
def test_filter_test_accounts(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"email": "test@posthog.com"}
+ team_id=self.team.pk,
+ distinct_ids=["person1", "alias1"],
+ properties={"email": "test@posthog.com"},
)
_create_person(team_id=self.team.pk, distinct_ids=["person2"])
@@ -1076,13 +1298,28 @@ def test_filter_test_accounts(self):
# even if set to hour 6 it should default to beginning of day and include all pageviews above
result = retention().run(
- RetentionFilter(data={"date_to": _date(10, hour=6), FILTER_TEST_ACCOUNTS: True}, team=self.team),
+ RetentionFilter(
+ data={"date_to": _date(10, hour=6), FILTER_TEST_ACCOUNTS: True},
+ team=self.team,
+ ),
self.team,
)
self.assertEqual(len(result), 11)
self.assertEqual(
pluck(result, "label"),
- ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
+ [
+ "Day 0",
+ "Day 1",
+ "Day 2",
+ "Day 3",
+ "Day 4",
+ "Day 5",
+ "Day 6",
+ "Day 7",
+ "Day 8",
+ "Day 9",
+ "Day 10",
+ ],
)
self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
@@ -1144,7 +1381,13 @@ def _create_first_time_retention_events(self):
_create_events(self.team, [("person3", _date(0))], "$user_signed_up")
_create_events(
- self.team, [("person3", _date(1)), ("person3", _date(3)), ("person3", _date(4)), ("person3", _date(5))]
+ self.team,
+ [
+ ("person3", _date(1)),
+ ("person3", _date(3)),
+ ("person3", _date(4)),
+ ("person3", _date(5)),
+ ],
)
_create_events(self.team, [("person4", _date(2))], "$user_signed_up")
@@ -1154,7 +1397,11 @@ def _create_first_time_retention_events(self):
return p1, p2, p3, p4
def test_retention_aggregate_by_distinct_id(self):
- _create_person(team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"test": "ok"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person1", "alias1"],
+ properties={"test": "ok"},
+ )
_create_person(team_id=self.team.pk, distinct_ids=["person2"])
_create_events(
@@ -1203,7 +1450,14 @@ def test_retention_aggregate_by_distinct_id(self):
[2, 1, 0, 1, 2, 0, 0, 0, 0],
[1, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
- [2, 1, 0, 0, 0, 0], # this first day is different b/c of the distinct_id aggregation
+ [
+ 2,
+ 1,
+ 0,
+ 0,
+ 0,
+ 0,
+ ], # this first day is different b/c of the distinct_id aggregation
[2, 0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0],
@@ -1229,7 +1483,14 @@ def test_retention_aggregate_by_distinct_id(self):
[1, 0, 0, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
- [2, 1, 0, 0, 0, 0], # this first day is different b/c of the distinct_id aggregation
+ [
+ 2,
+ 1,
+ 0,
+ 0,
+ 0,
+ 0,
+ ], # this first day is different b/c of the distinct_id aggregation
[1, 0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0],
@@ -1248,26 +1509,48 @@ def test_timezones(self):
[
("person1", _date(-1, 1)),
("person1", _date(0, 1)),
- ("person1", _date(1, 1)), # this is the only event in US Pacific on the first day
+ (
+ "person1",
+ _date(1, 1),
+ ), # this is the only event in US Pacific on the first day
("person2", _date(6, 1)),
("person2", _date(6, 9)),
],
)
- result = retention().run(RetentionFilter(data={"date_to": _date(10, hour=6)}, team=self.team), self.team)
+ result = retention().run(
+ RetentionFilter(data={"date_to": _date(10, hour=6)}, team=self.team),
+ self.team,
+ )
self.team.timezone = "US/Pacific"
self.team.save()
result_pacific = retention().run(
- RetentionFilter(data={"date_to": _date(10, hour=6)}, team=self.team), self.team
+ RetentionFilter(data={"date_to": _date(10, hour=6)}, team=self.team),
+ self.team,
)
self.assertEqual(
pluck(result_pacific, "label"),
- ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
+ [
+ "Day 0",
+ "Day 1",
+ "Day 2",
+ "Day 3",
+ "Day 4",
+ "Day 5",
+ "Day 6",
+ "Day 7",
+ "Day 8",
+ "Day 9",
+ "Day 10",
+ ],
)
- self.assertEqual(result_pacific[0]["date"], datetime(2020, 6, 10, tzinfo=ZoneInfo("US/Pacific")))
+ self.assertEqual(
+ result_pacific[0]["date"],
+ datetime(2020, 6, 10, tzinfo=ZoneInfo("US/Pacific")),
+ )
self.assertEqual(result_pacific[0]["date"].isoformat(), "2020-06-10T00:00:00-07:00")
self.assertEqual(
@@ -1327,12 +1610,25 @@ def test_day_interval_sampled(self):
# even if set to hour 6 it should default to beginning of day and include all pageviews above
result = retention().run(
- RetentionFilter(data={"date_to": _date(10, hour=6), "sampling_factor": 1}), self.team
+ RetentionFilter(data={"date_to": _date(10, hour=6), "sampling_factor": 1}),
+ self.team,
)
self.assertEqual(len(result), 11)
self.assertEqual(
pluck(result, "label"),
- ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
+ [
+ "Day 0",
+ "Day 1",
+ "Day 2",
+ "Day 3",
+ "Day 4",
+ "Day 5",
+ "Day 6",
+ "Day 7",
+ "Day 8",
+ "Day 9",
+ "Day 10",
+ ],
)
self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
diff --git a/posthog/queries/test/test_trends.py b/posthog/queries/test/test_trends.py
index 3cce0cfd1907a..63b7024d3d6bf 100644
--- a/posthog/queries/test/test_trends.py
+++ b/posthog/queries/test/test_trends.py
@@ -32,7 +32,11 @@
Person,
)
from posthog.models.group.util import create_group
-from posthog.models.instance_setting import get_instance_setting, override_instance_config, set_instance_setting
+from posthog.models.instance_setting import (
+ get_instance_setting,
+ override_instance_config,
+ set_instance_setting,
+)
from posthog.models.person.util import create_person_distinct_id
from posthog.queries.trends.trends import Trends
from posthog.test.base import (
@@ -54,7 +58,7 @@
def breakdown_label(entity: Entity, value: Union[str, int]) -> Dict[str, Optional[Union[str, int]]]:
ret_dict: Dict[str, Optional[Union[str, int]]] = {}
if not value or not isinstance(value, str) or "cohort_" not in value:
- label = value if (value or type(value) == bool) and value != "None" and value != "nan" else "Other"
+ label = value if (value or isinstance(value, bool)) and value != "None" and value != "nan" else "Other"
ret_dict["label"] = f"{entity.name} - {label}"
ret_dict["breakdown_value"] = label
else:
@@ -104,14 +108,19 @@ def _get_trend_people(self, filter: Filter, entity: Entity):
return response["results"][0]["people"]
def _create_events(self, use_time=False) -> Tuple[Action, Person]:
-
person = _create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
)
_, _, secondTeam = Organization.objects.bootstrap(None, team_fields={"api_token": "token456"})
freeze_without_time = ["2019-12-24", "2020-01-01", "2020-01-02"]
- freeze_with_time = ["2019-12-24 03:45:34", "2020-01-01 00:06:34", "2020-01-02 16:34:34"]
+ freeze_with_time = [
+ "2019-12-24 03:45:34",
+ "2020-01-01 00:06:34",
+ "2020-01-02 16:34:34",
+ ]
freeze_args = freeze_without_time
if use_time:
@@ -132,20 +141,31 @@ def _create_events(self, use_time=False) -> Tuple[Action, Person]:
distinct_id="blabla",
properties={"$some_property": "value", "$bool_prop": False},
)
- _create_event(team=self.team, event="sign up", distinct_id="anonymous_id", properties={"$bool_prop": False})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="anonymous_id",
+ properties={"$bool_prop": False},
+ )
_create_event(team=self.team, event="sign up", distinct_id="blabla")
with freeze_time(freeze_args[2]):
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$some_property": "other_value", "$some_numerical_prop": 80},
+ properties={
+ "$some_property": "other_value",
+ "$some_numerical_prop": 80,
+ },
)
_create_event(team=self.team, event="no events", distinct_id="blabla")
# second team should have no effect
_create_event(
- team=secondTeam, event="sign up", distinct_id="blabla", properties={"$some_property": "other_value"}
+ team=secondTeam,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$some_property": "other_value"},
)
_create_action(team=self.team, name="no events")
@@ -160,14 +180,27 @@ def _create_breakdown_events(self):
with freeze_time(freeze_without_time[0]):
for i in range(25):
- _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$some_property": i})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$some_property": i},
+ )
_create_action(team=self.team, name="sign up")
def _create_event_count_per_actor_events(self):
- _create_person(team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"fruit": "mango"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"fruit": "mango"},
+ )
_create_person(team_id=self.team.pk, distinct_ids=["tintin"], properties={"fruit": "mango"})
_create_person(team_id=self.team.pk, distinct_ids=["murmur"], properties={}) # No fruit here
- _create_person(team_id=self.team.pk, distinct_ids=["reeree"], properties={"fruit": "tomato"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["reeree"],
+ properties={"fruit": "tomato"},
+ )
with freeze_time("2020-01-01 00:06:02"):
_create_event(
@@ -177,7 +210,10 @@ def _create_event_count_per_actor_events(self):
properties={"color": "red", "$group_0": "bouba"},
)
_create_event(
- team=self.team, event="viewed video", distinct_id="blabla", properties={"$group_0": "bouba"}
+ team=self.team,
+ event="viewed video",
+ distinct_id="blabla",
+ properties={"$group_0": "bouba"},
) # No color here
_create_event(
team=self.team,
@@ -185,10 +221,20 @@ def _create_event_count_per_actor_events(self):
distinct_id="reeree",
properties={"color": "blue", "$group_0": "bouba"},
)
- _create_event(team=self.team, event="sign up", distinct_id="tintin", properties={"$group_0": "kiki"})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="tintin",
+ properties={"$group_0": "kiki"},
+ )
with freeze_time("2020-01-03 19:06:34"):
- _create_event(team=self.team, event="sign up", distinct_id="murmur", properties={"$group_0": "kiki"})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="murmur",
+ properties={"$group_0": "kiki"},
+ )
with freeze_time("2020-01-04 23:17:00"):
_create_event(
@@ -206,7 +252,10 @@ def _create_event_count_per_actor_events(self):
properties={"color": "blue", "$group_0": "bouba"},
)
_create_event(
- team=self.team, event="viewed video", distinct_id="tintin", properties={"color": "red"}
+ team=self.team,
+ event="viewed video",
+ distinct_id="tintin",
+ properties={"color": "red"},
) # No group here
_create_event(
team=self.team,
@@ -226,7 +275,13 @@ def test_trends_per_day(self):
with freeze_time("2020-01-04T13:00:01Z"):
# with self.assertNumQueries(16):
response = Trends().run(
- Filter(team=self.team, data={"date_from": "-7d", "events": [{"id": "sign up"}, {"id": "no events"}]}),
+ Filter(
+ team=self.team,
+ data={
+ "date_from": "-7d",
+ "events": [{"id": "sign up"}, {"id": "no events"}],
+ },
+ ),
self.team,
)
self.assertEqual(response[0]["label"], "sign up")
@@ -272,7 +327,11 @@ def test_trend_actors_person_on_events_pagination_with_alias_inconsistencies(sel
)
flush_persons_and_events()
- data = {"date_from": "-7d", "events": [{"id": "sign up", "math": "dau"}], "limit": 5}
+ data = {
+ "date_from": "-7d",
+ "events": [{"id": "sign up", "math": "dau"}],
+ "limit": 5,
+ }
with override_instance_config("PERSON_ON_EVENTS_ENABLED", True):
from posthog.models.team import util
@@ -318,7 +377,12 @@ def test_no_props(self):
"date_from": "-14d",
"breakdown": "$some_property",
"events": [
- {"id": "sign up", "name": "sign up", "type": "events", "order": 0},
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ },
{"id": "no events"},
],
},
@@ -348,7 +412,6 @@ def test_trends_per_day_48hours(self):
def test_trends_per_day_cumulative(self):
self._create_events()
with freeze_time("2020-01-04T13:00:01Z"):
-
response = Trends().run(
Filter(
team=self.team,
@@ -371,7 +434,6 @@ def test_trends_per_day_cumulative(self):
def test_trends_groups_per_day_cumulative(self):
self._create_event_count_per_actor_events()
with freeze_time("2020-01-06T13:00:01Z"):
-
response = Trends().run(
Filter(
team=self.team,
@@ -399,7 +461,6 @@ def test_trends_groups_per_day_cumulative(self):
def test_trends_breakdown_cumulative(self):
self._create_events()
with freeze_time("2020-01-04T13:00:01Z"):
-
response = Trends().run(
Filter(
team=self.team,
@@ -429,7 +490,11 @@ def test_trends_single_aggregate_dau(self):
daily_response = Trends().run(
Filter(
team=self.team,
- data={"display": TRENDS_TABLE, "interval": "week", "events": [{"id": "sign up", "math": "dau"}]},
+ data={
+ "display": TRENDS_TABLE,
+ "interval": "week",
+ "events": [{"id": "sign up", "math": "dau"}],
+ },
),
self.team,
)
@@ -438,29 +503,73 @@ def test_trends_single_aggregate_dau(self):
weekly_response = Trends().run(
Filter(
team=self.team,
- data={"display": TRENDS_TABLE, "interval": "day", "events": [{"id": "sign up", "math": "dau"}]},
+ data={
+ "display": TRENDS_TABLE,
+ "interval": "day",
+ "events": [{"id": "sign up", "math": "dau"}],
+ },
),
self.team,
)
self.assertEqual(daily_response[0]["aggregated_value"], 1)
- self.assertEqual(daily_response[0]["aggregated_value"], weekly_response[0]["aggregated_value"])
+ self.assertEqual(
+ daily_response[0]["aggregated_value"],
+ weekly_response[0]["aggregated_value"],
+ )
@also_test_with_materialized_columns(["$math_prop"])
def test_trends_single_aggregate_math(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
)
with freeze_time("2020-01-01 00:06:34"):
- _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 1})
- _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 1})
- _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 1})
- _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 2})
- _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 3})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$math_prop": 1},
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$math_prop": 1},
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$math_prop": 1},
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$math_prop": 2},
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$math_prop": 3},
+ )
with freeze_time("2020-01-02 00:06:34"):
- _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 4})
- _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 4})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$math_prop": 4},
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$math_prop": 4},
+ )
with freeze_time("2020-01-04T13:00:01Z"):
daily_response = Trends().run(
@@ -469,7 +578,13 @@ def test_trends_single_aggregate_math(self):
data={
"display": TRENDS_TABLE,
"interval": "week",
- "events": [{"id": "sign up", "math": "median", "math_property": "$math_prop"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$math_prop",
+ }
+ ],
},
),
self.team,
@@ -482,21 +597,36 @@ def test_trends_single_aggregate_math(self):
data={
"display": TRENDS_TABLE,
"interval": "day",
- "events": [{"id": "sign up", "math": "median", "math_property": "$math_prop"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$math_prop",
+ }
+ ],
},
),
self.team,
)
self.assertEqual(daily_response[0]["aggregated_value"], 2.0)
- self.assertEqual(daily_response[0]["aggregated_value"], weekly_response[0]["aggregated_value"])
+ self.assertEqual(
+ daily_response[0]["aggregated_value"],
+ weekly_response[0]["aggregated_value"],
+ )
@snapshot_clickhouse_queries
def test_trends_with_session_property_single_aggregate_math(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["blabla2"],
+ properties={"$some_prop": "some_val"},
)
- _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"})
_create_event(
team=self.team,
@@ -568,7 +698,13 @@ def test_trends_with_session_property_single_aggregate_math(self):
data={
"display": TRENDS_TABLE,
"interval": "week",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
@@ -581,20 +717,35 @@ def test_trends_with_session_property_single_aggregate_math(self):
data={
"display": TRENDS_TABLE,
"interval": "day",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
)
self.assertEqual(daily_response[0]["aggregated_value"], 7.5)
- self.assertEqual(daily_response[0]["aggregated_value"], weekly_response[0]["aggregated_value"])
+ self.assertEqual(
+ daily_response[0]["aggregated_value"],
+ weekly_response[0]["aggregated_value"],
+ )
def test_unique_session_with_session_breakdown(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["blabla2"],
+ properties={"$some_prop": "some_val"},
)
- _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"})
_create_event(
team=self.team,
@@ -779,7 +930,9 @@ def test_trends_breakdown_single_aggregate_cohorts(self):
def test_trends_breakdown_single_aggregate(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
)
with freeze_time("2020-01-01 00:06:34"):
_create_event(
@@ -831,7 +984,11 @@ def test_trends_breakdown_single_aggregate(self):
daily_response = Trends().run(
Filter(
team=self.team,
- data={"display": TRENDS_TABLE, "breakdown": "$browser", "events": [{"id": "sign up"}]},
+ data={
+ "display": TRENDS_TABLE,
+ "breakdown": "$browser",
+ "events": [{"id": "sign up"}],
+ },
),
self.team,
)
@@ -848,7 +1005,9 @@ def test_trends_breakdown_single_aggregate_with_zero_person_ids(self):
return True
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
)
with freeze_time("2020-01-01 00:06:34"):
_create_event(
@@ -935,7 +1094,11 @@ def test_trends_breakdown_single_aggregate_with_zero_person_ids(self):
daily_response = Trends().run(
Filter(
team=self.team,
- data={"display": TRENDS_TABLE, "breakdown": "$browser", "events": [{"id": "sign up"}]},
+ data={
+ "display": TRENDS_TABLE,
+ "breakdown": "$browser",
+ "events": [{"id": "sign up"}],
+ },
),
self.team,
)
@@ -948,7 +1111,9 @@ def test_trends_breakdown_single_aggregate_with_zero_person_ids(self):
def test_trends_breakdown_single_aggregate_math(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
)
with freeze_time("2020-01-01 00:06:34"):
_create_event(
@@ -1004,7 +1169,13 @@ def test_trends_breakdown_single_aggregate_math(self):
"display": TRENDS_TABLE,
"interval": "day",
"breakdown": "$some_property",
- "events": [{"id": "sign up", "math": "median", "math_property": "$math_prop"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$math_prop",
+ }
+ ],
},
),
self.team,
@@ -1018,21 +1189,36 @@ def test_trends_breakdown_single_aggregate_math(self):
"display": TRENDS_TABLE,
"interval": "week",
"breakdown": "$some_property",
- "events": [{"id": "sign up", "math": "median", "math_property": "$math_prop"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$math_prop",
+ }
+ ],
},
),
self.team,
)
self.assertEqual(daily_response[0]["aggregated_value"], 2.0)
- self.assertEqual(daily_response[0]["aggregated_value"], weekly_response[0]["aggregated_value"])
+ self.assertEqual(
+ daily_response[0]["aggregated_value"],
+ weekly_response[0]["aggregated_value"],
+ )
@snapshot_clickhouse_queries
def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["blabla2"],
+ properties={"$some_prop": "some_val"},
)
- _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"})
_create_event(
team=self.team,
@@ -1119,7 +1305,13 @@ def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakd
"display": TRENDS_TABLE,
"interval": "week",
"breakdown": "$some_property",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
@@ -1128,7 +1320,10 @@ def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakd
# value1 has: 5 seconds, 10 seconds, 15 seconds
# value2 has: 10 seconds, 15 seconds (aggregated by session, so 15 is not double counted)
# empty has: 1 seconds
- self.assertEqual([resp["breakdown_value"] for resp in daily_response], ["value2", "value1", ""])
+ self.assertEqual(
+ [resp["breakdown_value"] for resp in daily_response],
+ ["value2", "value1", ""],
+ )
self.assertEqual([resp["aggregated_value"] for resp in daily_response], [12.5, 10, 1])
with freeze_time("2020-01-04T13:00:01Z"):
@@ -1139,7 +1334,13 @@ def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakd
"display": TRENDS_TABLE,
"interval": "day",
"breakdown": "$some_property",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
@@ -1157,9 +1358,15 @@ def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakd
@snapshot_clickhouse_queries
def test_trends_person_breakdown_with_session_property_single_aggregate_math_and_breakdown(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["blabla2"],
+ properties={"$some_prop": "another_val"},
)
- _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "another_val"})
_create_event(
team=self.team,
@@ -1247,7 +1454,13 @@ def test_trends_person_breakdown_with_session_property_single_aggregate_math_and
"interval": "week",
"breakdown": "$some_prop",
"breakdown_type": "person",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
@@ -1255,7 +1468,10 @@ def test_trends_person_breakdown_with_session_property_single_aggregate_math_and
# another_val has: 10 seconds
# some_val has: 1, 5 seconds, 15 seconds
- self.assertEqual([resp["breakdown_value"] for resp in daily_response], ["another_val", "some_val"])
+ self.assertEqual(
+ [resp["breakdown_value"] for resp in daily_response],
+ ["another_val", "some_val"],
+ )
self.assertEqual([resp["aggregated_value"] for resp in daily_response], [10.0, 5.0])
@snapshot_clickhouse_queries
@@ -1289,7 +1505,6 @@ def test_trends_any_event_total_count(self):
@also_test_with_materialized_columns(["$math_prop", "$some_property"])
def test_trends_breakdown_with_math_func(self):
-
with freeze_time("2020-01-01 00:06:34"):
for i in range(20):
_create_person(team_id=self.team.pk, distinct_ids=[f"person{i}"])
@@ -1322,7 +1537,13 @@ def test_trends_breakdown_with_math_func(self):
"display": TRENDS_TABLE,
"interval": "day",
"breakdown": "$some_property",
- "events": [{"id": "sign up", "math": "p90", "math_property": "$math_prop"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "p90",
+ "math_property": "$math_prop",
+ }
+ ],
},
),
self.team,
@@ -1336,7 +1557,14 @@ def test_trends_compare_day_interval_relative_range(self):
self._create_events()
with freeze_time("2020-01-04T13:00:01Z"):
response = Trends().run(
- Filter(team=self.team, data={"compare": "true", "date_from": "-7d", "events": [{"id": "sign up"}]}),
+ Filter(
+ team=self.team,
+ data={
+ "compare": "true",
+ "date_from": "-7d",
+ "events": [{"id": "sign up"}],
+ },
+ ),
self.team,
)
@@ -1380,7 +1608,11 @@ def test_trends_compare_day_interval_relative_range(self):
with freeze_time("2020-01-04T13:00:01Z"):
no_compare_response = Trends().run(
- Filter(team=self.team, data={"compare": "false", "events": [{"id": "sign up"}]}), self.team
+ Filter(
+ team=self.team,
+ data={"compare": "false", "events": [{"id": "sign up"}]},
+ ),
+ self.team,
)
self.assertEqual(no_compare_response[0]["label"], "sign up")
@@ -1555,17 +1787,28 @@ def _test_events_with_dates(self, dates: List[str], result, query_time=None, **f
for time in dates:
with freeze_time(time):
_create_event(
- event="event_name", team=self.team, distinct_id="person_1", properties={"$browser": "Safari"}
+ event="event_name",
+ team=self.team,
+ distinct_id="person_1",
+ properties={"$browser": "Safari"},
)
if query_time:
with freeze_time(query_time):
response = Trends().run(
- Filter(team=self.team, data={**filter_params, "events": [{"id": "event_name"}]}), self.team
+ Filter(
+ team=self.team,
+ data={**filter_params, "events": [{"id": "event_name"}]},
+ ),
+ self.team,
)
else:
response = Trends().run(
- Filter(team=self.team, data={**filter_params, "events": [{"id": "event_name"}]}), self.team
+ Filter(
+ team=self.team,
+ data={**filter_params, "events": [{"id": "event_name"}]},
+ ),
+ self.team,
)
self.assertEqual(result[0]["count"], response[0]["count"])
@@ -1720,8 +1963,20 @@ def test_week_interval(self):
"label": "event_name",
"count": 4.0,
"data": [0.0, 1.0, 2.0, 1.0, 0.0],
- "labels": ["25-Oct-2020", "1-Nov-2020", "8-Nov-2020", "15-Nov-2020", "22-Nov-2020"],
- "days": ["2020-10-25", "2020-11-01", "2020-11-08", "2020-11-15", "2020-11-22"],
+ "labels": [
+ "25-Oct-2020",
+ "1-Nov-2020",
+ "8-Nov-2020",
+ "15-Nov-2020",
+ "22-Nov-2020",
+ ],
+ "days": [
+ "2020-10-25",
+ "2020-11-01",
+ "2020-11-08",
+ "2020-11-15",
+ "2020-11-22",
+ ],
}
],
)
@@ -1749,8 +2004,22 @@ def test_month_interval(self):
"label": "event_name",
"count": 3.0,
"data": [0.0, 2.0, 0.0, 0.0, 1.0, 0.0],
- "labels": ["1-Jun-2020", "1-Jul-2020", "1-Aug-2020", "1-Sep-2020", "1-Oct-2020", "1-Nov-2020"],
- "days": ["2020-06-01", "2020-07-01", "2020-08-01", "2020-09-01", "2020-10-01", "2020-11-01"],
+ "labels": [
+ "1-Jun-2020",
+ "1-Jul-2020",
+ "1-Aug-2020",
+ "1-Sep-2020",
+ "1-Oct-2020",
+ "1-Nov-2020",
+ ],
+ "days": [
+ "2020-06-01",
+ "2020-07-01",
+ "2020-08-01",
+ "2020-09-01",
+ "2020-10-01",
+ "2020-11-01",
+ ],
}
],
)
@@ -1778,7 +2047,12 @@ def test_interval_rounding(self):
"label": "event_name",
"count": 4.0,
"data": [1.0, 2.0, 1.0, 0.0],
- "labels": ["1-Nov-2020", "8-Nov-2020", "15-Nov-2020", "22-Nov-2020"],
+ "labels": [
+ "1-Nov-2020",
+ "8-Nov-2020",
+ "15-Nov-2020",
+ "22-Nov-2020",
+ ],
"days": ["2020-11-01", "2020-11-08", "2020-11-15", "2020-11-22"],
}
],
@@ -1872,7 +2146,12 @@ def test_yesterday_timerange(self):
def test_last24hours_timerange(self):
self._test_events_with_dates(
- dates=["2020-11-01 05:20:00", "2020-11-01 10:22:00", "2020-11-01 10:25:00", "2020-11-02 08:25:00"],
+ dates=[
+ "2020-11-01 05:20:00",
+ "2020-11-01 10:22:00",
+ "2020-11-01 10:25:00",
+ "2020-11-02 08:25:00",
+ ],
date_from="-24h",
query_time="2020-11-02 10:20:00",
result=[
@@ -1900,9 +2179,14 @@ def test_last24hours_timerange(self):
def test_last48hours_timerange(self):
self._test_events_with_dates(
- dates=["2020-11-01 05:20:00", "2020-11-01 10:22:00", "2020-11-01 10:25:00", "2020-11-02 08:25:00"],
- date_from="-48h",
- query_time="2020-11-02 10:20:00",
+ dates=[
+ "2020-11-01 05:20:00",
+ "2020-11-01 10:22:00",
+ "2020-11-01 10:25:00",
+ "2020-11-02 08:25:00",
+ ],
+ date_from="-48h",
+ query_time="2020-11-02 10:20:00",
result=[
{
"action": {
@@ -1928,7 +2212,12 @@ def test_last48hours_timerange(self):
def test_last7days_timerange(self):
self._test_events_with_dates(
- dates=["2020-11-01 05:20:00", "2020-11-02 10:22:00", "2020-11-04 10:25:00", "2020-11-05 08:25:00"],
+ dates=[
+ "2020-11-01 05:20:00",
+ "2020-11-02 10:22:00",
+ "2020-11-04 10:25:00",
+ "2020-11-05 08:25:00",
+ ],
date_from="-7d",
query_time="2020-11-07 10:20:00",
result=[
@@ -2000,7 +2289,23 @@ def test_last14days_timerange(self):
},
"label": "event_name",
"count": 6.0,
- "data": [0.0, 1.0, 1.0, 0.0, 1.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
+ "data": [
+ 0.0,
+ 1.0,
+ 1.0,
+ 0.0,
+ 1.0,
+ 2.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 1.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ ],
"labels": [
"31-Oct-2020",
"1-Nov-2020",
@@ -2077,7 +2382,14 @@ def test_last30days_timerange(self):
"22-Nov-2020",
"29-Nov-2020",
],
- "days": ["2020-10-25", "2020-11-01", "2020-11-08", "2020-11-15", "2020-11-22", "2020-11-29"],
+ "days": [
+ "2020-10-25",
+ "2020-11-01",
+ "2020-11-08",
+ "2020-11-15",
+ "2020-11-22",
+ "2020-11-29",
+ ],
}
],
)
@@ -2293,8 +2605,22 @@ def test_custom_range_timerange(self):
"label": "event_name",
"count": 3.0,
"data": [2.0, 0.0, 0.0, 0.0, 1.0, 0.0],
- "labels": ["5-Jan-2020", "6-Jan-2020", "7-Jan-2020", "8-Jan-2020", "9-Jan-2020", "10-Jan-2020"],
- "days": ["2020-01-05", "2020-01-06", "2020-01-07", "2020-01-08", "2020-01-09", "2020-01-10"],
+ "labels": [
+ "5-Jan-2020",
+ "6-Jan-2020",
+ "7-Jan-2020",
+ "8-Jan-2020",
+ "9-Jan-2020",
+ "10-Jan-2020",
+ ],
+ "days": [
+ "2020-01-05",
+ "2020-01-06",
+ "2020-01-07",
+ "2020-01-08",
+ "2020-01-09",
+ "2020-01-10",
+ ],
}
],
)
@@ -2363,9 +2689,15 @@ def test_trends_with_hogql_math(self):
@snapshot_clickhouse_queries
def test_trends_with_session_property_total_volume_math(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["blabla2"],
+ properties={"$some_prop": "some_val"},
)
- _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"})
_create_event(
team=self.team,
@@ -2452,7 +2784,13 @@ def test_trends_with_session_property_total_volume_math(self):
team=self.team,
data={
"interval": "week",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
@@ -2464,7 +2802,13 @@ def test_trends_with_session_property_total_volume_math(self):
team=self.team,
data={
"interval": "day",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
@@ -2491,9 +2835,15 @@ def test_trends_with_session_property_total_volume_math(self):
@snapshot_clickhouse_queries
def test_trends_with_session_property_total_volume_math_with_breakdowns(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["blabla2"],
+ properties={"$some_prop": "some_val"},
)
- _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"})
_create_event(
team=self.team,
@@ -2581,7 +2931,13 @@ def test_trends_with_session_property_total_volume_math_with_breakdowns(self):
data={
"breakdown": "$some_property",
"interval": "week",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
@@ -2594,7 +2950,13 @@ def test_trends_with_session_property_total_volume_math_with_breakdowns(self):
data={
"breakdown": "$some_property",
"interval": "day",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
@@ -2626,9 +2988,15 @@ def test_trends_with_session_property_total_volume_math_with_breakdowns(self):
def test_trends_with_session_property_total_volume_math_with_sessions_spanning_multiple_intervals(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["blabla2"],
+ properties={"$some_prop": "some_val"},
)
- _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"})
_create_event(
team=self.team,
@@ -2674,7 +3042,13 @@ def test_trends_with_session_property_total_volume_math_with_sessions_spanning_m
team=self.team,
data={
"interval": "day",
- "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}],
+ "events": [
+ {
+ "id": "sign up",
+ "math": "median",
+ "math_property": "$session_duration",
+ }
+ ],
},
),
self.team,
@@ -2717,9 +3091,24 @@ def test_filter_events_by_cohort(self):
_create_person(team_id=self.team.pk, distinct_ids=["person_1"], properties={"name": "John"})
_create_person(team_id=self.team.pk, distinct_ids=["person_2"], properties={"name": "Jane"})
- _create_event(event="event_name", team=self.team, distinct_id="person_1", properties={"$browser": "Safari"})
- _create_event(event="event_name", team=self.team, distinct_id="person_2", properties={"$browser": "Chrome"})
- _create_event(event="event_name", team=self.team, distinct_id="person_2", properties={"$browser": "Safari"})
+ _create_event(
+ event="event_name",
+ team=self.team,
+ distinct_id="person_1",
+ properties={"$browser": "Safari"},
+ )
+ _create_event(
+ event="event_name",
+ team=self.team,
+ distinct_id="person_2",
+ properties={"$browser": "Chrome"},
+ )
+ _create_event(
+ event="event_name",
+ team=self.team,
+ distinct_id="person_2",
+ properties={"$browser": "Safari"},
+ )
cohort = _create_cohort(
team=self.team,
@@ -2745,12 +3134,35 @@ def test_filter_events_by_cohort(self):
@snapshot_clickhouse_queries
def test_filter_events_by_precalculated_cohort(self):
with freeze_time("2020-01-02"):
- _create_person(team_id=self.team.pk, distinct_ids=["person_1"], properties={"name": "John"})
- _create_person(team_id=self.team.pk, distinct_ids=["person_2"], properties={"name": "Jane"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person_1"],
+ properties={"name": "John"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person_2"],
+ properties={"name": "Jane"},
+ )
- _create_event(event="event_name", team=self.team, distinct_id="person_1", properties={"$browser": "Safari"})
- _create_event(event="event_name", team=self.team, distinct_id="person_2", properties={"$browser": "Chrome"})
- _create_event(event="event_name", team=self.team, distinct_id="person_2", properties={"$browser": "Safari"})
+ _create_event(
+ event="event_name",
+ team=self.team,
+ distinct_id="person_1",
+ properties={"$browser": "Safari"},
+ )
+ _create_event(
+ event="event_name",
+ team=self.team,
+ distinct_id="person_2",
+ properties={"$browser": "Chrome"},
+ )
+ _create_event(
+ event="event_name",
+ team=self.team,
+ distinct_id="person_2",
+ properties={"$browser": "Safari"},
+ )
cohort = _create_cohort(
team=self.team,
@@ -2785,7 +3197,13 @@ def test_interval_filtering_hour(self):
with freeze_time("2020-01-02"):
response = Trends().run(
- Filter(data={"date_from": "2019-12-24", "interval": "hour", "events": [{"id": "sign up"}]}),
+ Filter(
+ data={
+ "date_from": "2019-12-24",
+ "interval": "hour",
+ "events": [{"id": "sign up"}],
+ }
+ ),
self.team,
)
self.assertEqual(response[0]["labels"][3], "24-Dec-2019 03:00")
@@ -2810,7 +3228,8 @@ def test_interval_filtering_week(self):
self.team,
)
self.assertEqual(
- response[0]["labels"][:5], ["24-Nov-2019", "1-Dec-2019", "8-Dec-2019", "15-Dec-2019", "22-Dec-2019"]
+ response[0]["labels"][:5],
+ ["24-Nov-2019", "1-Dec-2019", "8-Dec-2019", "15-Dec-2019", "22-Dec-2019"],
)
self.assertEqual(response[0]["data"][:5], [0.0, 0.0, 0.0, 0.0, 1.0])
@@ -2820,7 +3239,12 @@ def test_interval_filtering_month(self):
with freeze_time("2020-01-02"):
response = Trends().run(
Filter(
- team=self.team, data={"date_from": "2019-9-24", "interval": "month", "events": [{"id": "sign up"}]}
+ team=self.team,
+ data={
+ "date_from": "2019-9-24",
+ "interval": "month",
+ "events": [{"id": "sign up"}],
+ },
),
self.team,
)
@@ -2839,7 +3263,14 @@ def test_interval_filtering_today_hourly(self):
with freeze_time("2020-01-02T23:31:00Z"):
response = Trends().run(
- Filter(team=self.team, data={"date_from": "dStart", "interval": "hour", "events": [{"id": "sign up"}]}),
+ Filter(
+ team=self.team,
+ data={
+ "date_from": "dStart",
+ "interval": "hour",
+ "events": [{"id": "sign up"}],
+ },
+ ),
self.team,
)
self.assertEqual(response[0]["labels"][23], "2-Jan-2020 23:00")
@@ -2860,25 +3291,56 @@ def test_breakdown_label(self):
self.assertEqual(none_label, {"label": "$pageview - Other", "breakdown_value": "Other"})
cohort_all_label = breakdown_label(entity, "cohort_all")
- self.assertEqual(cohort_all_label, {"label": "$pageview - all users", "breakdown_value": "all"})
+ self.assertEqual(
+ cohort_all_label,
+ {"label": "$pageview - all users", "breakdown_value": "all"},
+ )
cohort = _create_cohort(team=self.team, name="cohort1", groups=[{"properties": {"name": "Jane"}}])
cohort_label = breakdown_label(entity, f"cohort_{cohort.pk}")
- self.assertEqual(cohort_label, {"label": f"$pageview - {cohort.name}", "breakdown_value": cohort.pk})
+ self.assertEqual(
+ cohort_label,
+ {"label": f"$pageview - {cohort.name}", "breakdown_value": cohort.pk},
+ )
@also_test_with_materialized_columns(["key"])
def test_breakdown_with_filter(self):
- _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"email": "test@posthog.com"})
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "test@gmail.com"})
- _create_event(event="sign up", distinct_id="person1", team=self.team, properties={"key": "val"})
- _create_event(event="sign up", distinct_id="person2", team=self.team, properties={"key": "oh"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"email": "test@posthog.com"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"email": "test@gmail.com"},
+ )
+ _create_event(
+ event="sign up",
+ distinct_id="person1",
+ team=self.team,
+ properties={"key": "val"},
+ )
+ _create_event(
+ event="sign up",
+ distinct_id="person2",
+ team=self.team,
+ properties={"key": "oh"},
+ )
response = Trends().run(
Filter(
team=self.team,
data={
"date_from": "-14d",
"breakdown": "key",
- "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ }
+ ],
"properties": [{"key": "key", "value": "oh", "operator": "not_icontains"}],
},
),
@@ -2889,7 +3351,10 @@ def test_breakdown_with_filter(self):
def test_action_filtering(self):
sign_up_action, person = self._create_events()
- action_response = Trends().run(Filter(team=self.team, data={"actions": [{"id": sign_up_action.id}]}), self.team)
+ action_response = Trends().run(
+ Filter(team=self.team, data={"actions": [{"id": sign_up_action.id}]}),
+ self.team,
+ )
event_response = Trends().run(Filter(team=self.team, data={"events": [{"id": "sign up"}]}), self.team)
self.assertEqual(len(action_response), 1)
@@ -2931,7 +3396,9 @@ def test_action_filtering_with_cohort(self):
)
sign_up_action = _create_action(
- team=self.team, name="sign up", properties=[{"key": "id", "type": "cohort", "value": cohort.id}]
+ team=self.team,
+ name="sign up",
+ properties=[{"key": "id", "type": "cohort", "value": cohort.id}],
)
cohort.calculate_people_ch(pending_version=2)
@@ -2963,9 +3430,21 @@ def test_trends_for_non_existing_action(self):
@also_test_with_materialized_columns(person_properties=["email", "bar"])
def test_trends_regression_filtering_by_action_with_person_properties(self):
- _create_person(team_id=self.team.pk, properties={"email": "foo@example.com", "bar": "aa"}, distinct_ids=["d1"])
- _create_person(team_id=self.team.pk, properties={"email": "bar@example.com", "bar": "bb"}, distinct_ids=["d2"])
- _create_person(team_id=self.team.pk, properties={"email": "efg@example.com", "bar": "ab"}, distinct_ids=["d3"])
+ _create_person(
+ team_id=self.team.pk,
+ properties={"email": "foo@example.com", "bar": "aa"},
+ distinct_ids=["d1"],
+ )
+ _create_person(
+ team_id=self.team.pk,
+ properties={"email": "bar@example.com", "bar": "bb"},
+ distinct_ids=["d2"],
+ )
+ _create_person(
+ team_id=self.team.pk,
+ properties={"email": "efg@example.com", "bar": "ab"},
+ distinct_ids=["d3"],
+ )
_create_person(team_id=self.team.pk, properties={"bar": "aa"}, distinct_ids=["d4"])
with freeze_time("2020-01-02 16:34:34"):
@@ -2983,7 +3462,11 @@ def test_trends_regression_filtering_by_action_with_person_properties(self):
with freeze_time("2020-01-04T13:01:01Z"):
response = Trends().run(
- Filter(team=self.team, data={"actions": [{"id": event_filtering_action.id}]}), self.team
+ Filter(
+ team=self.team,
+ data={"actions": [{"id": event_filtering_action.id}]},
+ ),
+ self.team,
)
self.assertEqual(len(response), 1)
self.assertEqual(response[0]["count"], 3)
@@ -2994,7 +3477,14 @@ def test_trends_regression_filtering_by_action_with_person_properties(self):
team=self.team,
data={
"actions": [{"id": event_filtering_action.id}],
- "properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}],
+ "properties": [
+ {
+ "key": "email",
+ "type": "person",
+ "value": "is_set",
+ "operator": "is_set",
+ }
+ ],
},
),
self.team,
@@ -3011,7 +3501,11 @@ def test_dau_filtering(self):
with freeze_time("2020-01-04"):
action_response = Trends().run(
- Filter(team=self.team, data={"actions": [{"id": sign_up_action.id, "math": "dau"}]}), self.team
+ Filter(
+ team=self.team,
+ data={"actions": [{"id": sign_up_action.id, "math": "dau"}]},
+ ),
+ self.team,
)
response = Trends().run(Filter(data={"events": [{"id": "sign up", "math": "dau"}]}), self.team)
@@ -3024,9 +3518,17 @@ def _create_maths_events(self, values):
_create_person(team_id=self.team.pk, distinct_ids=["someone_else"])
for value in values:
_create_event(
- team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": value}
+ team=self.team,
+ event="sign up",
+ distinct_id="someone_else",
+ properties={"some_number": value},
)
- _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": None})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="someone_else",
+ properties={"some_number": None},
+ )
return sign_up_action
def _test_math_property_aggregation(self, math_property, values, expected_value):
@@ -3035,12 +3537,30 @@ def _test_math_property_aggregation(self, math_property, values, expected_value)
action_response = Trends().run(
Filter(
team=self.team,
- data={"actions": [{"id": sign_up_action.id, "math": math_property, "math_property": "some_number"}]},
+ data={
+ "actions": [
+ {
+ "id": sign_up_action.id,
+ "math": math_property,
+ "math_property": "some_number",
+ }
+ ]
+ },
),
self.team,
)
event_response = Trends().run(
- Filter(data={"events": [{"id": "sign up", "math": math_property, "math_property": "some_number"}]}),
+ Filter(
+ data={
+ "events": [
+ {
+ "id": "sign up",
+ "math": math_property,
+ "math_property": "some_number",
+ }
+ ]
+ }
+ ),
self.team,
)
# :TRICKY: Work around clickhouse functions not being 100%
@@ -3083,16 +3603,47 @@ def test_p99_filtering(self):
def test_avg_filtering_non_number_resiliency(self):
sign_up_action, person = self._create_events()
_create_person(team_id=self.team.pk, distinct_ids=["someone_else"])
- _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": 2})
- _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": "x"})
- _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": None})
- _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": 8})
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="someone_else",
+ properties={"some_number": 2},
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="someone_else",
+ properties={"some_number": "x"},
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="someone_else",
+ properties={"some_number": None},
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="someone_else",
+ properties={"some_number": 8},
+ )
action_response = Trends().run(
- Filter(data={"actions": [{"id": sign_up_action.id, "math": "avg", "math_property": "some_number"}]}),
+ Filter(
+ data={
+ "actions": [
+ {
+ "id": sign_up_action.id,
+ "math": "avg",
+ "math_property": "some_number",
+ }
+ ]
+ }
+ ),
self.team,
)
event_response = Trends().run(
- Filter(data={"events": [{"id": "sign up", "math": "avg", "math_property": "some_number"}]}), self.team
+ Filter(data={"events": [{"id": "sign up", "math": "avg", "math_property": "some_number"}]}),
+ self.team,
)
self.assertEqual(action_response[0]["data"][-1], 5)
self.assertEntityResponseEqual(action_response, event_response)
@@ -3107,8 +3658,14 @@ def test_per_entity_filtering(self):
data={
"date_from": "-7d",
"events": [
- {"id": "sign up", "properties": [{"key": "$some_property", "value": "value"}]},
- {"id": "sign up", "properties": [{"key": "$some_property", "value": "other_value"}]},
+ {
+ "id": "sign up",
+ "properties": [{"key": "$some_property", "value": "value"}],
+ },
+ {
+ "id": "sign up",
+ "properties": [{"key": "$some_property", "value": "other_value"}],
+ },
],
},
),
@@ -3123,10 +3680,26 @@ def test_per_entity_filtering(self):
self.assertEqual(response[1]["count"], 1)
def _create_multiple_people(self):
- person1 = _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"name": "person1"})
- person2 = _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"name": "person2"})
- person3 = _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"name": "person3"})
- person4 = _create_person(team_id=self.team.pk, distinct_ids=["person4"], properties={"name": "person4"})
+ person1 = _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"name": "person1"},
+ )
+ person2 = _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"name": "person2"},
+ )
+ person3 = _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"name": "person3"},
+ )
+ person4 = _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person4"],
+ properties={"name": "person4"},
+ )
journey = {
"person1": [
@@ -3255,7 +3828,13 @@ def test_entity_person_property_filtering(self):
"events": [
{
"id": "watched movie",
- "properties": [{"key": "name", "value": "person1", "type": "person"}],
+ "properties": [
+ {
+ "key": "name",
+ "value": "person1",
+ "type": "person",
+ }
+ ],
}
]
},
@@ -3269,7 +3848,12 @@ def test_entity_person_property_filtering(self):
def test_breakdown_by_empty_cohort(self):
_create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "p1"})
- _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-04T12:00:00Z")
+ _create_event(
+ team=self.team,
+ event="$pageview",
+ distinct_id="p1",
+ timestamp="2020-01-04T12:00:00Z",
+ )
with freeze_time("2020-01-04T13:01:01Z"):
event_response = Trends().run(
@@ -3332,7 +3916,14 @@ def test_breakdown_by_cohort(self):
"date_from": "-14d",
"breakdown": json.dumps([cohort.pk, cohort2.pk, cohort3.pk, "all"]),
"breakdown_type": "cohort",
- "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "watched movie",
+ "name": "watched movie",
+ "type": "events",
+ "order": 0,
+ }
+ ],
},
),
self.team,
@@ -3403,7 +3994,8 @@ def test_interval_filtering_breakdown(self):
)
self.assertEqual(
- response[0]["labels"][:5], ["24-Nov-2019", "1-Dec-2019", "8-Dec-2019", "15-Dec-2019", "22-Dec-2019"]
+ response[0]["labels"][:5],
+ ["24-Nov-2019", "1-Dec-2019", "8-Dec-2019", "15-Dec-2019", "22-Dec-2019"],
)
self.assertEqual(response[0]["data"][:5], [0.0, 0.0, 0.0, 0.0, 1.0])
@@ -3472,14 +4064,22 @@ def test_breakdown_by_person_property(self):
"date_from": "-14d",
"breakdown": "name",
"breakdown_type": "person",
- "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "watched movie",
+ "name": "watched movie",
+ "type": "events",
+ "order": 0,
+ }
+ ],
},
),
self.team,
)
self.assertListEqual(
- sorted(res["breakdown_value"] for res in event_response), ["person1", "person2", "person3"]
+ sorted(res["breakdown_value"] for res in event_response),
+ ["person1", "person2", "person3"],
)
for response in event_response:
@@ -3505,14 +4105,22 @@ def test_breakdown_by_person_property_for_person_on_events(self):
"date_from": "-14d",
"breakdown": "name",
"breakdown_type": "person",
- "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "watched movie",
+ "name": "watched movie",
+ "type": "events",
+ "order": 0,
+ }
+ ],
},
),
self.team,
)
self.assertListEqual(
- sorted(res["breakdown_value"] for res in event_response), ["person1", "person2", "person3"]
+ sorted(res["breakdown_value"] for res in event_response),
+ ["person1", "person2", "person3"],
)
for response in event_response:
@@ -3564,14 +4172,22 @@ def test_breakdown_by_person_property_for_person_on_events_with_zero_person_ids(
"date_from": "-14d",
"breakdown": "name",
"breakdown_type": "person",
- "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "watched movie",
+ "name": "watched movie",
+ "type": "events",
+ "order": 0,
+ }
+ ],
},
),
self.team,
)
self.assertListEqual(
- sorted(res["breakdown_value"] for res in event_response), ["person1", "person2", "person3"]
+ sorted(res["breakdown_value"] for res in event_response),
+ ["person1", "person2", "person3"],
)
for response in event_response:
@@ -3643,7 +4259,13 @@ def test_breakdown_by_property_pie(self):
"breakdown_type": "event",
"display": "ActionsPie",
"events": [
- {"id": "watched movie", "name": "watched movie", "type": "events", "order": 0, "math": "dau"}
+ {
+ "id": "watched movie",
+ "name": "watched movie",
+ "type": "events",
+ "order": 0,
+ "math": "dau",
+ }
],
}
event_response = Trends().run(Filter(team=self.team, data=data), self.team)
@@ -3652,7 +4274,8 @@ def test_breakdown_by_property_pie(self):
entity = Entity({"id": "watched movie", "type": "events", "math": "dau"})
people_value_1 = self._get_trend_people(
- Filter(team=self.team, data={**data, "breakdown_value": "value_1"}), entity
+ Filter(team=self.team, data={**data, "breakdown_value": "value_1"}),
+ entity,
)
assert people_value_1 == [
# Persons with higher value come first
@@ -3695,7 +4318,8 @@ def test_breakdown_by_property_pie(self):
]
people_value_2 = self._get_trend_people(
- Filter(team=self.team, data={**data, "breakdown_value": "value_2"}), entity
+ Filter(team=self.team, data={**data, "breakdown_value": "value_2"}),
+ entity,
)
assert people_value_2 == [
{
@@ -3763,7 +4387,12 @@ def test_breakdown_by_person_property_pie_with_event_dau_filter(self):
"order": 0,
"math": "dau",
"properties": [
- {"key": "name", "operator": "not_icontains", "value": "person3", "type": "person"}
+ {
+ "key": "name",
+ "operator": "not_icontains",
+ "value": "person3",
+ "type": "person",
+ }
],
}
],
@@ -3864,7 +4493,11 @@ def test_filter_test_accounts_cohorts(self):
self.team.save()
response = Trends().run(
- Filter(data={"events": [{"id": "event_name"}], "filter_test_accounts": True}, team=self.team), self.team
+ Filter(
+ data={"events": [{"id": "event_name"}], "filter_test_accounts": True},
+ team=self.team,
+ ),
+ self.team,
)
self.assertEqual(response[0]["count"], 2)
@@ -3969,7 +4602,9 @@ def test_trends_aggregate_by_distinct_id(self):
# Stopgap until https://github.com/PostHog/meta/pull/39 is implemented
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
)
_create_person(team_id=self.team.pk, distinct_ids=["third"])
@@ -3984,7 +4619,13 @@ def test_trends_aggregate_by_distinct_id(self):
with override_instance_config("AGGREGATE_BY_DISTINCT_IDS_TEAMS", f"{self.team.pk},4"):
with freeze_time("2019-12-31T13:00:01Z"):
daily_response = Trends().run(
- Filter(team=self.team, data={"interval": "day", "events": [{"id": "sign up", "math": "dau"}]}),
+ Filter(
+ team=self.team,
+ data={
+ "interval": "day",
+ "events": [{"id": "sign up", "math": "dau"}],
+ },
+ ),
self.team,
)
@@ -3997,7 +4638,13 @@ def test_trends_aggregate_by_distinct_id(self):
data={
"interval": "day",
"events": [{"id": "sign up", "math": "dau"}],
- "properties": [{"key": "$some_prop", "value": "some_val", "type": "person"}],
+ "properties": [
+ {
+ "key": "$some_prop",
+ "value": "some_val",
+ "type": "person",
+ }
+ ],
},
),
self.team,
@@ -4028,7 +4675,10 @@ def test_trends_aggregate_by_distinct_id(self):
monthly_response = Trends().run(
Filter(
team=self.team,
- data={"interval": "day", "events": [{"id": "sign up", "math": "monthly_active"}]},
+ data={
+ "interval": "day",
+ "events": [{"id": "sign up", "math": "monthly_active"}],
+ },
),
self.team,
)
@@ -4037,7 +4687,11 @@ def test_trends_aggregate_by_distinct_id(self):
with freeze_time("2019-12-31T13:00:01Z"):
weekly_response = Trends().run(
Filter(
- team=self.team, data={"interval": "day", "events": [{"id": "sign up", "math": "weekly_active"}]}
+ team=self.team,
+ data={
+ "interval": "day",
+ "events": [{"id": "sign up", "math": "weekly_active"}],
+ },
),
self.team,
)
@@ -4067,7 +4721,14 @@ def test_breakdown_filtering_limit(self):
data={
"date_from": "-14d",
"breakdown": "$some_property",
- "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ }
+ ],
},
),
self.team,
@@ -4104,7 +4765,13 @@ def test_breakdown_with_person_property_filter(self):
"name": "watched movie",
"type": "events",
"order": 0,
- "properties": [{"key": "name", "value": "person2", "type": "person"}],
+ "properties": [
+ {
+ "key": "name",
+ "value": "person2",
+ "type": "person",
+ }
+ ],
}
],
},
@@ -4128,7 +4795,12 @@ def test_breakdown_filtering(self):
"date_from": "-14d",
"breakdown": "$some_property",
"events": [
- {"id": "sign up", "name": "sign up", "type": "events", "order": 0},
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ },
{"id": "no events"},
],
},
@@ -4148,13 +4820,36 @@ def test_breakdown_filtering(self):
@also_test_with_materialized_columns(person_properties=["email"])
def test_breakdown_filtering_persons(self):
- _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"email": "test@posthog.com"})
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "test@gmail.com"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"email": "test@posthog.com"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"email": "test@gmail.com"},
+ )
_create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={})
- _create_event(event="sign up", distinct_id="person1", team=self.team, properties={"key": "val"})
- _create_event(event="sign up", distinct_id="person2", team=self.team, properties={"key": "val"})
- _create_event(event="sign up", distinct_id="person3", team=self.team, properties={"key": "val"})
+ _create_event(
+ event="sign up",
+ distinct_id="person1",
+ team=self.team,
+ properties={"key": "val"},
+ )
+ _create_event(
+ event="sign up",
+ distinct_id="person2",
+ team=self.team,
+ properties={"key": "val"},
+ )
+ _create_event(
+ event="sign up",
+ distinct_id="person3",
+ team=self.team,
+ properties={"key": "val"},
+ )
response = Trends().run(
Filter(
team=self.team,
@@ -4162,7 +4857,14 @@ def test_breakdown_filtering_persons(self):
"date_from": "-14d",
"breakdown": "email",
"breakdown_type": "person",
- "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ }
+ ],
},
),
self.team,
@@ -4178,13 +4880,36 @@ def test_breakdown_filtering_persons(self):
# ensure that column names are properly handled when subqueries and person subquery share properties column
@also_test_with_materialized_columns(event_properties=["key"], person_properties=["email"])
def test_breakdown_filtering_persons_with_action_props(self):
- _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"email": "test@posthog.com"})
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "test@gmail.com"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"email": "test@posthog.com"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"email": "test@gmail.com"},
+ )
_create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={})
- _create_event(event="sign up", distinct_id="person1", team=self.team, properties={"key": "val"})
- _create_event(event="sign up", distinct_id="person2", team=self.team, properties={"key": "val"})
- _create_event(event="sign up", distinct_id="person3", team=self.team, properties={"key": "val"})
+ _create_event(
+ event="sign up",
+ distinct_id="person1",
+ team=self.team,
+ properties={"key": "val"},
+ )
+ _create_event(
+ event="sign up",
+ distinct_id="person2",
+ team=self.team,
+ properties={"key": "val"},
+ )
+ _create_event(
+ event="sign up",
+ distinct_id="person3",
+ team=self.team,
+ properties={"key": "val"},
+ )
action = _create_action(
name="sign up",
team=self.team,
@@ -4217,26 +4942,42 @@ def test_breakdown_filtering_with_properties(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Chrome", "$os": "Windows"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Chrome",
+ "$os": "Windows",
+ },
)
with freeze_time("2020-01-04T13:01:01Z"):
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Chrome", "$os": "Windows"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Chrome",
+ "$os": "Windows",
+ },
)
with freeze_time("2020-01-05T13:01:01Z"):
@@ -4278,26 +5019,42 @@ def test_breakdown_filtering_with_properties_in_new_format(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Windows"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Windows",
+ },
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Chrome", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Chrome",
+ "$os": "Mac",
+ },
)
with freeze_time("2020-01-04T13:01:01Z"):
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla1",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla2",
- properties={"$current_url": "second url", "$browser": "Chrome", "$os": "Windows"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Chrome",
+ "$os": "Windows",
+ },
)
with freeze_time("2020-01-05T13:01:01Z"):
@@ -4318,7 +5075,10 @@ def test_breakdown_filtering_with_properties_in_new_format(self):
],
"properties": {
"type": "OR",
- "values": [{"key": "$browser", "value": "Firefox"}, {"key": "$os", "value": "Windows"}],
+ "values": [
+ {"key": "$browser", "value": "Firefox"},
+ {"key": "$os", "value": "Windows"},
+ ],
},
},
),
@@ -4350,7 +5110,10 @@ def test_breakdown_filtering_with_properties_in_new_format(self):
],
"properties": {
"type": "AND",
- "values": [{"key": "$browser", "value": "Firefox"}, {"key": "$os", "value": "Windows"}],
+ "values": [
+ {"key": "$browser", "value": "Firefox"},
+ {"key": "$os", "value": "Windows"},
+ ],
},
},
),
@@ -4394,7 +5157,13 @@ def test_mau_with_breakdown_filtering_and_prop_filter(self):
"breakdown": "$some_prop",
"breakdown_type": "person",
"events": [{"id": "sign up", "math": "monthly_active"}],
- "properties": [{"key": "filter_prop", "value": "filter_val", "type": "person"}],
+ "properties": [
+ {
+ "key": "filter_prop",
+ "value": "filter_val",
+ "type": "person",
+ }
+ ],
"display": "ActionsLineGraph",
},
),
@@ -4415,19 +5184,29 @@ def test_dau_with_breakdown_filtering(self):
sign_up_action, _ = self._create_events()
with freeze_time("2020-01-02T13:01:01Z"):
_create_event(
- team=self.team, event="sign up", distinct_id="blabla", properties={"$some_property": "other_value"}
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$some_property": "other_value"},
)
with freeze_time("2020-01-04T13:01:01Z"):
action_response = Trends().run(
Filter(
team=self.team,
- data={"breakdown": "$some_property", "actions": [{"id": sign_up_action.id, "math": "dau"}]},
+ data={
+ "breakdown": "$some_property",
+ "actions": [{"id": sign_up_action.id, "math": "dau"}],
+ },
),
self.team,
)
event_response = Trends().run(
Filter(
- team=self.team, data={"breakdown": "$some_property", "events": [{"id": "sign up", "math": "dau"}]}
+ team=self.team,
+ data={
+ "breakdown": "$some_property",
+ "events": [{"id": "sign up", "math": "dau"}],
+ },
),
self.team,
)
@@ -4448,7 +5227,10 @@ def test_dau_with_breakdown_filtering_with_sampling(self):
sign_up_action, _ = self._create_events()
with freeze_time("2020-01-02T13:01:01Z"):
_create_event(
- team=self.team, event="sign up", distinct_id="blabla", properties={"$some_property": "other_value"}
+ team=self.team,
+ event="sign up",
+ distinct_id="blabla",
+ properties={"$some_property": "other_value"},
)
with freeze_time("2020-01-04T13:01:01Z"):
action_response = Trends().run(
@@ -4530,7 +5312,9 @@ def test_dau_with_breakdown_filtering_with_prop_filter(self):
def test_against_clashing_entity_and_property_filter_naming(self):
# Regression test for https://github.com/PostHog/posthog/issues/5814
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
)
_create_event(
team=self.team,
@@ -4548,10 +5332,21 @@ def test_against_clashing_entity_and_property_filter_naming(self):
"events": [
{
"id": "$pageview",
- "properties": [{"key": "$host", "operator": "icontains", "value": ".com"}],
+ "properties": [
+ {
+ "key": "$host",
+ "operator": "icontains",
+ "value": ".com",
+ }
+ ],
+ }
+ ],
+ "properties": [
+ {
+ "key": "$host",
+ "value": ["app.example.com", "another.com"],
}
],
- "properties": [{"key": "$host", "value": ["app.example.com", "another.com"]}],
"breakdown": "$some_prop",
"breakdown_type": "person",
},
@@ -4565,7 +5360,9 @@ def test_against_clashing_entity_and_property_filter_naming(self):
@also_test_with_materialized_columns(["$current_url"])
def test_action_with_prop(self):
_create_person(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
)
sign_up_action = Action.objects.create(team=self.team, name="sign up")
ActionStep.objects.create(
@@ -4609,7 +5406,9 @@ def test_combine_all_cohort_and_icontains(self):
# This caused some issues with SQL parsing
sign_up_action, _ = self._create_events()
cohort = Cohort.objects.create(
- team=self.team, name="a", groups=[{"properties": [{"key": "key", "value": "value", "type": "person"}]}]
+ team=self.team,
+ name="a",
+ groups=[{"properties": [{"key": "key", "value": "value", "type": "person"}]}],
)
action_response = Trends().run(
Filter(
@@ -4642,20 +5441,48 @@ def test_person_filtering_in_cohort_in_action(self):
step.save()
with freeze_time("2020-01-04T13:01:01Z"):
action_response = Trends().run(
- Filter(team=self.team, data={"actions": [{"id": sign_up_action.id}], "breakdown": "$some_property"}),
+ Filter(
+ team=self.team,
+ data={
+ "actions": [{"id": sign_up_action.id}],
+ "breakdown": "$some_property",
+ },
+ ),
self.team,
)
self.assertEqual(action_response[0]["count"], 2)
@also_test_with_materialized_columns(event_properties=["key"], person_properties=["email"])
def test_breakdown_user_props_with_filter(self):
- _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"email": "test@posthog.com"})
- _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "test@gmail.com"})
- person = _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"email": "test@gmail.com"})
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"email": "test@posthog.com"},
+ )
+ _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"email": "test@gmail.com"},
+ )
+ person = _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person3"],
+ properties={"email": "test@gmail.com"},
+ )
create_person_distinct_id(self.team.pk, "person1", str(person.uuid))
- _create_event(event="sign up", distinct_id="person1", team=self.team, properties={"key": "val"})
- _create_event(event="sign up", distinct_id="person2", team=self.team, properties={"key": "val"})
+ _create_event(
+ event="sign up",
+ distinct_id="person1",
+ team=self.team,
+ properties={"key": "val"},
+ )
+ _create_event(
+ event="sign up",
+ distinct_id="person2",
+ team=self.team,
+ properties={"key": "val"},
+ )
response = Trends().run(
Filter(
team=self.team,
@@ -4663,9 +5490,21 @@ def test_breakdown_user_props_with_filter(self):
"date_from": "-14d",
"breakdown": "email",
"breakdown_type": "person",
- "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ }
+ ],
"properties": [
- {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"},
+ {
+ "key": "email",
+ "value": "@posthog.com",
+ "operator": "not_icontains",
+ "type": "person",
+ },
{"key": "key", "value": "val"},
],
},
@@ -4682,7 +5521,11 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns(
_create_person(
team_id=self.team.pk,
distinct_ids=["person1"],
- properties={"email": "test@posthog.com", "$os": "ios", "$browser": "chrome"},
+ properties={
+ "email": "test@posthog.com",
+ "$os": "ios",
+ "$browser": "chrome",
+ },
)
_create_person(
team_id=self.team.pk,
@@ -4692,41 +5535,103 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns(
_create_person(
team_id=self.team.pk,
distinct_ids=["person3"],
- properties={"email": "test2@posthog.com", "$os": "android", "$browser": "chrome"},
+ properties={
+ "email": "test2@posthog.com",
+ "$os": "android",
+ "$browser": "chrome",
+ },
)
# a second person with same properties, just so snapshot passes on different CH versions (indeterminate sorting currently)
_create_person(
team_id=self.team.pk,
distinct_ids=["person32"],
- properties={"email": "test2@posthog.com", "$os": "android", "$browser": "chrome"},
+ properties={
+ "email": "test2@posthog.com",
+ "$os": "android",
+ "$browser": "chrome",
+ },
)
_create_person(
team_id=self.team.pk,
distinct_ids=["person4"],
- properties={"email": "test3@posthog.com", "$os": "android", "$browser": "safari"},
+ properties={
+ "email": "test3@posthog.com",
+ "$os": "android",
+ "$browser": "safari",
+ },
)
_create_person(
team_id=self.team.pk,
distinct_ids=["person5"],
- properties={"email": "test4@posthog.com", "$os": "android", "$browser": "safari"},
+ properties={
+ "email": "test4@posthog.com",
+ "$os": "android",
+ "$browser": "safari",
+ },
)
_create_person(
team_id=self.team.pk,
distinct_ids=["person6"],
- properties={"email": "test5@posthog.com", "$os": "android", "$browser": "safari"},
+ properties={
+ "email": "test5@posthog.com",
+ "$os": "android",
+ "$browser": "safari",
+ },
)
journeys_for(
team=self.team,
create_people=False,
events_by_person={
- "person1": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}],
- "person2": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}],
- "person3": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}],
- "person32": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}],
- "person4": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}],
- "person5": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}],
- "person6": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}],
+ "person1": [
+ {
+ "event": "sign up",
+ "properties": {"key": "val"},
+ "timestamp": datetime(2020, 5, 1, 0),
+ }
+ ],
+ "person2": [
+ {
+ "event": "sign up",
+ "properties": {"key": "val"},
+ "timestamp": datetime(2020, 5, 1, 0),
+ }
+ ],
+ "person3": [
+ {
+ "event": "sign up",
+ "properties": {"key": "val"},
+ "timestamp": datetime(2020, 5, 1, 0),
+ }
+ ],
+ "person32": [
+ {
+ "event": "sign up",
+ "properties": {"key": "val"},
+ "timestamp": datetime(2020, 5, 1, 0),
+ }
+ ],
+ "person4": [
+ {
+ "event": "sign up",
+ "properties": {"key": "val"},
+ "timestamp": datetime(2020, 5, 1, 0),
+ }
+ ],
+ "person5": [
+ {
+ "event": "sign up",
+ "properties": {"key": "val"},
+ "timestamp": datetime(2020, 5, 1, 0),
+ }
+ ],
+ "person6": [
+ {
+ "event": "sign up",
+ "properties": {"key": "val"},
+ "timestamp": datetime(2020, 5, 1, 0),
+ }
+ ],
},
)
@@ -4738,7 +5643,14 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns(
"date_to": "2020-07-01 00:00:00",
"breakdown": "email",
"breakdown_type": "person",
- "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ }
+ ],
"properties": {
"type": "AND",
"values": [
@@ -4757,8 +5669,18 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns(
{
"type": "OR",
"values": [
- {"key": "$os", "value": "android", "operator": "exact", "type": "person"},
- {"key": "$browser", "value": "safari", "operator": "exact", "type": "person"},
+ {
+ "key": "$os",
+ "value": "android",
+ "operator": "exact",
+ "type": "person",
+ },
+ {
+ "key": "$browser",
+ "value": "safari",
+ "operator": "exact",
+ "type": "person",
+ },
],
},
],
@@ -4811,8 +5733,18 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns(
{
"type": "AND",
"values": [
- {"key": "$os", "value": "android", "operator": "exact", "type": "person"},
- {"key": "$browser", "value": "chrome", "operator": "exact", "type": "person"},
+ {
+ "key": "$os",
+ "value": "android",
+ "operator": "exact",
+ "type": "person",
+ },
+ {
+ "key": "$browser",
+ "value": "chrome",
+ "operator": "exact",
+ "type": "person",
+ },
],
}
],
@@ -4898,7 +5830,14 @@ def test_weekly_active_users_aggregated_range_wider_than_week(self):
"date_from": "2020-01-01",
"date_to": "2020-01-08",
"display": TRENDS_TABLE,
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
@@ -4915,7 +5854,14 @@ def test_weekly_active_users_aggregated_range_wider_than_week_with_sampling(self
"date_from": "2020-01-01",
"date_to": "2020-01-08",
"display": TRENDS_TABLE,
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
@@ -4931,7 +5877,14 @@ def test_weekly_active_users_aggregated_range_narrower_than_week(self):
"date_from": "2020-01-11",
"date_to": "2020-01-12",
"display": TRENDS_TABLE,
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
@@ -4948,7 +5901,14 @@ def test_weekly_active_users_monthly(self):
"date_from": "2019-12-01",
"date_to": "2020-02-29", # T'was a leap year
"interval": "month",
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
@@ -4965,7 +5925,14 @@ def test_weekly_active_users_daily(self):
data = {
"date_from": "2020-01-08",
"date_to": "2020-01-19",
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
@@ -5013,7 +5980,14 @@ def test_weekly_active_users_daily_based_on_action(self):
data = {
"date_from": "2020-01-08",
"date_to": "2020-01-19",
- "actions": [{"id": action.id, "type": "actions", "order": 0, "math": "weekly_active"}],
+ "actions": [
+ {
+ "id": action.id,
+ "type": "actions",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
@@ -5036,7 +6010,10 @@ def test_weekly_active_users_daily_based_on_action(self):
],
)
# Same as test_weekly_active_users_daily
- self.assertEqual(result[0]["data"], [1.0, 3.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 1.0, 0.0])
+ self.assertEqual(
+ result[0]["data"],
+ [1.0, 3.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 1.0, 0.0],
+ )
@also_test_with_different_timezones
@snapshot_clickhouse_queries
@@ -5047,7 +6024,14 @@ def test_weekly_active_users_weekly(self):
"date_from": "2019-12-29",
"date_to": "2020-01-18",
"interval": "week",
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
@@ -5063,7 +6047,14 @@ def test_weekly_active_users_hourly(self):
"date_from": "2020-01-09T06:00:00Z",
"date_to": "2020-01-09T17:00:00Z",
"interval": "hour",
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
@@ -5091,7 +6082,10 @@ def test_weekly_active_users_hourly(self):
# necessary, because there's a presentation issue: in monthly/weekly graphs data points are formatted as
# D-MMM-YYYY, so if a user sees e.g. 1-Jan-2077, they'll likely expect the active users count to be for
# the first day of the month, and not the last. If they saw just Jan-2077, the more general case would work.
- self.assertEqual(result[0]["data"], [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0])
+ self.assertEqual(
+ result[0]["data"],
+ [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0],
+ )
def test_weekly_active_users_daily_based_on_action_with_zero_person_ids(self):
# only a person-on-event test
@@ -5121,13 +6115,23 @@ def test_weekly_active_users_daily_based_on_action_with_zero_person_ids(self):
data = {
"date_from": "2020-01-08",
"date_to": "2020-01-19",
- "actions": [{"id": action.id, "type": "actions", "order": 0, "math": "weekly_active"}],
+ "actions": [
+ {
+ "id": action.id,
+ "type": "actions",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
result = Trends().run(filter, self.team)
# Zero person IDs shouldn't be counted
- self.assertEqual(result[0]["data"], [1.0, 3.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 1.0, 0.0])
+ self.assertEqual(
+ result[0]["data"],
+ [1.0, 3.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 1.0, 0.0],
+ )
@also_test_with_materialized_columns(["key"])
def test_breakdown_weekly_active_users_daily(self):
@@ -5174,12 +6178,22 @@ def test_breakdown_weekly_active_users_daily(self):
"date_from": "2020-01-01T00:00:00Z",
"date_to": "2020-01-12T00:00:00Z",
"breakdown": "key",
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
result = Trends().run(filter, self.team)
- self.assertEqual(result[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 0.0])
+ self.assertEqual(
+ result[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 0.0],
+ )
@also_test_with_materialized_columns(person_properties=["name"])
@snapshot_clickhouse_queries
@@ -5212,15 +6226,30 @@ def test_weekly_active_users_filtering(self):
data={
"date_from": "2020-01-01T00:00:00Z",
"date_to": "2020-01-12T00:00:00Z",
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
"properties": [
- {"key": "name", "operator": "exact", "value": ["person-1", "person-2"], "type": "person"}
+ {
+ "key": "name",
+ "operator": "exact",
+ "value": ["person-1", "person-2"],
+ "type": "person",
+ }
],
},
)
result = Trends().run(filter, self.team)
- self.assertEqual(result[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 2.0, 2.0])
+ self.assertEqual(
+ result[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 2.0, 2.0],
+ )
@snapshot_clickhouse_queries
def test_breakdown_weekly_active_users_daily_based_on_action(self):
@@ -5281,14 +6310,30 @@ def test_breakdown_weekly_active_users_daily_based_on_action(self):
cohort = Cohort.objects.create(
team=self.team,
- groups=[{"properties": [{"key": "name", "operator": "exact", "value": ["p1", "p2"], "type": "person"}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "name",
+ "operator": "exact",
+ "value": ["p1", "p2"],
+ "type": "person",
+ }
+ ]
+ }
+ ],
)
pageview_action = _create_action(
name="$pageview",
team=self.team,
properties=[
- {"key": "name", "operator": "exact", "value": ["p1", "p2", "p3"], "type": "person"},
+ {
+ "key": "name",
+ "operator": "exact",
+ "value": ["p1", "p2", "p3"],
+ "type": "person",
+ },
{"type": "cohort", "key": "id", "value": cohort.pk},
],
)
@@ -5297,12 +6342,22 @@ def test_breakdown_weekly_active_users_daily_based_on_action(self):
"date_from": "2020-01-01T00:00:00Z",
"date_to": "2020-01-12T00:00:00Z",
"breakdown": "key",
- "actions": [{"id": pageview_action.id, "type": "actions", "order": 0, "math": "weekly_active"}],
+ "actions": [
+ {
+ "id": pageview_action.id,
+ "type": "actions",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
}
filter = Filter(team=self.team, data=data)
result = Trends().run(filter, self.team)
- self.assertEqual(result[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 0.0])
+ self.assertEqual(
+ result[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 0.0],
+ )
@also_test_with_materialized_columns(["key"])
@snapshot_clickhouse_queries
@@ -5313,7 +6368,14 @@ def test_breakdown_weekly_active_users_aggregated(self):
"date_from": "2020-01-11",
"date_to": "2020-01-11",
"display": TRENDS_TABLE,
- "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "math": "weekly_active",
+ }
+ ],
"breakdown": "key",
}
@@ -5383,7 +6445,14 @@ def test_breakdown_filtering_bar_chart_by_value(self):
data={
"date_from": "-7d",
"breakdown": "$some_property",
- "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ }
+ ],
"display": TRENDS_BAR_VALUE,
},
),
@@ -5555,7 +6624,12 @@ def test_filtering_with_action_props(self):
)
response = Trends().run(
- Filter(data={"date_from": "-14d", "actions": [{"id": action.pk, "type": "actions", "order": 0}]}),
+ Filter(
+ data={
+ "date_from": "-14d",
+ "actions": [{"id": action.pk, "type": "actions", "order": 0}],
+ }
+ ),
self.team,
)
@@ -5573,7 +6647,18 @@ def test_should_throw_exception(self, patch_sync_execute):
with self.assertRaises(Exception):
with self.settings(TEST=False, DEBUG=False):
Trends().run(
- Filter(data={"events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}]}),
+ Filter(
+ data={
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ }
+ ]
+ }
+ ),
self.team,
)
@@ -5585,21 +6670,33 @@ def test_timezones_hourly_relative_from(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-04T22:01:01",
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-05T07:01:01",
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-05T08:01:01",
)
@@ -5693,21 +6790,33 @@ def test_timezones_hourly_absolute_from(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-02T17:01:01",
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-03T17:01:01",
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-06T00:30:01", # Shouldn't be included anywhere
)
@@ -5779,27 +6888,45 @@ def test_timezones_daily(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-02T17:01:01",
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-03T17:01:01",
)
_create_event(
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-06T00:30:01", # Shouldn't be included anywhere
)
with freeze_time(datetime(2020, 1, 5, 5, 0, tzinfo=ZoneInfo(self.team.timezone))):
response = Trends().run(
- Filter(data={"date_from": "-7d", "events": [{"id": "sign up", "name": "sign up"}]}, team=self.team),
+ Filter(
+ data={
+ "date_from": "-7d",
+ "events": [{"id": "sign up", "name": "sign up"}],
+ },
+ team=self.team,
+ ),
self.team,
)
@@ -5823,12 +6950,16 @@ def test_timezones_daily(self):
response = Trends().run(
Filter(
team=self.team,
- data={"date_from": "-14d", "events": [{"id": "sign up", "name": "sign up", "math": "dau"}]},
+ data={
+ "date_from": "-14d",
+ "events": [{"id": "sign up", "name": "sign up", "math": "dau"}],
+ },
),
self.team,
)
self.assertEqual(
- response[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0]
+ response[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0],
)
self.assertEqual(
response[0]["labels"],
@@ -5857,7 +6988,13 @@ def test_timezones_daily(self):
team=self.team,
data={
"date_from": "-7d",
- "events": [{"id": "sign up", "name": "sign up", "math": "weekly_active"}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "math": "weekly_active",
+ }
+ ],
},
),
self.team,
@@ -5882,7 +7019,10 @@ def test_timezones_daily(self):
response = Trends().run(
Filter(
team=self.team,
- data={"date_from": "-7d", "events": [{"id": "sign up", "name": "sign up", "breakdown": "$os"}]},
+ data={
+ "date_from": "-7d",
+ "events": [{"id": "sign up", "name": "sign up", "breakdown": "$os"}],
+ },
),
self.team,
)
@@ -5932,7 +7072,11 @@ def test_non_deterministic_timezones(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
)
with freeze_time("2022-11-10T01:01:01Z"):
@@ -5940,7 +7084,11 @@ def test_non_deterministic_timezones(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
)
with freeze_time("2022-11-17T08:30:01Z"):
@@ -5948,7 +7096,11 @@ def test_non_deterministic_timezones(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
)
with freeze_time("2022-11-24T08:30:01Z"):
@@ -5956,7 +7108,11 @@ def test_non_deterministic_timezones(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
)
with freeze_time("2022-11-30T08:30:01Z"):
@@ -5964,7 +7120,11 @@ def test_non_deterministic_timezones(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
)
with freeze_time("2022-11-30T13:01:01Z"):
@@ -5991,21 +7151,33 @@ def test_timezones_weekly(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-11T19:01:01", # Saturday; TRICKY: This is the next UTC day in America/Phoenix
)
_create_event( # This event should count towards week of 2020-01-12 (or 2020-01-06 in Monday mode)
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-12T02:01:01", # Sunday; TRICKY: This is the previous UTC day in Asia/Tokyo
)
_create_event( # This event should count towards week of 2020-01-19 (or 2020-01-20 in Monday mode)
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "second url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-21T18:01:01", # Tuesday; TRICKY: This is the next UTC day in America/Phoenix
)
@@ -6057,7 +7229,11 @@ def test_same_day(self):
team=self.team,
event="sign up",
distinct_id="blabla",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-03T01:01:01Z",
)
response = Trends().run(
@@ -6086,7 +7262,11 @@ def test_same_day_with_person_on_events_v2(self):
team=self.team,
event="sign up",
distinct_id="distinctid1",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-03T01:01:01Z",
person_id=person_id1,
)
@@ -6095,7 +7275,11 @@ def test_same_day_with_person_on_events_v2(self):
team=self.team,
event="sign up",
distinct_id="distinctid2",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-03T01:01:01Z",
person_id=person_id2,
)
@@ -6148,7 +7332,11 @@ def test_same_day_with_person_on_events_v2_latest_override(self):
team=self.team,
event="sign up",
distinct_id="distinctid1",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-03T01:01:01Z",
person_id=person_id1,
)
@@ -6157,7 +7345,11 @@ def test_same_day_with_person_on_events_v2_latest_override(self):
team=self.team,
event="some other event",
distinct_id="distinctid2",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-03T01:01:01Z",
person_id=person_id2,
)
@@ -6166,7 +7358,11 @@ def test_same_day_with_person_on_events_v2_latest_override(self):
team=self.team,
event="sign up",
distinct_id="distinctid3",
- properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"},
+ properties={
+ "$current_url": "first url",
+ "$browser": "Firefox",
+ "$os": "Mac",
+ },
timestamp="2020-01-03T01:01:01Z",
person_id=person_id3,
)
@@ -6232,11 +7428,33 @@ def test_ilike_regression_with_current_clickhouse_version(self):
team=self.team,
data={
"date_from": "-14d",
- "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}],
+ "events": [
+ {
+ "id": "watched movie",
+ "name": "watched movie",
+ "type": "events",
+ "order": 0,
+ }
+ ],
"properties": [
- {"key": "email", "type": "event", "value": "posthog.com", "operator": "not_icontains"},
- {"key": "name", "type": "event", "value": "posthog.com", "operator": "not_icontains"},
- {"key": "name", "type": "person", "value": "posthog.com", "operator": "not_icontains"},
+ {
+ "key": "email",
+ "type": "event",
+ "value": "posthog.com",
+ "operator": "not_icontains",
+ },
+ {
+ "key": "name",
+ "type": "event",
+ "value": "posthog.com",
+ "operator": "not_icontains",
+ },
+ {
+ "key": "name",
+ "type": "person",
+ "value": "posthog.com",
+ "operator": "not_icontains",
+ },
],
},
),
@@ -6477,7 +7695,13 @@ def test_trends_count_per_group_average_daily(self):
team=self.team,
data={
"display": TRENDS_LINEAR,
- "events": [{"id": "viewed video", "math": "avg_count_per_actor", "math_group_type_index": 0}],
+ "events": [
+ {
+ "id": "viewed video",
+ "math": "avg_count_per_actor",
+ "math_group_type_index": 0,
+ }
+ ],
"date_from": "2020-01-01",
"date_to": "2020-01-07",
},
@@ -6518,7 +7742,13 @@ def test_trends_count_per_group_average_aggregated(self):
team=self.team,
data={
"display": TRENDS_TABLE,
- "events": [{"id": "viewed video", "math": "avg_count_per_actor", "math_group_type_index": 0}],
+ "events": [
+ {
+ "id": "viewed video",
+ "math": "avg_count_per_actor",
+ "math_group_type_index": 0,
+ }
+ ],
"date_from": "2020-01-01",
"date_to": "2020-01-07",
},
@@ -6537,7 +7767,10 @@ def test_trends_breakdown_timezone(self):
with freeze_time("2020-01-03 19:06:34"):
_create_person(team_id=self.team.pk, distinct_ids=["another_user"])
_create_event(
- team=self.team, event="viewed video", distinct_id="another_user", properties={"color": "orange"}
+ team=self.team,
+ event="viewed video",
+ distinct_id="another_user",
+ properties={"color": "orange"},
)
daily_response = Trends().run(
@@ -6564,11 +7797,29 @@ def _create_groups(self):
GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0)
GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1)
- create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"})
- create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"})
- create_group(team_id=self.team.pk, group_type_index=0, group_key="org:7", properties={"industry": "finance"})
create_group(
- team_id=self.team.pk, group_type_index=1, group_key="company:10", properties={"industry": "finance"}
+ team_id=self.team.pk,
+ group_type_index=0,
+ group_key="org:5",
+ properties={"industry": "finance"},
+ )
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=0,
+ group_key="org:6",
+ properties={"industry": "technology"},
+ )
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=0,
+ group_key="org:7",
+ properties={"industry": "finance"},
+ )
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=1,
+ group_key="company:10",
+ properties={"industry": "finance"},
)
# TODO: Delete this test when moved to person-on-events
@@ -6604,8 +7855,22 @@ def test_breakdown_with_filter_groups(self):
"date_from": "2020-01-01T00:00:00Z",
"date_to": "2020-01-12T00:00:00Z",
"breakdown": "key",
- "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}],
- "properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ }
+ ],
+ "properties": [
+ {
+ "key": "industry",
+ "value": "finance",
+ "type": "group",
+ "group_type_index": 0,
+ }
+ ],
},
),
self.team,
@@ -6618,7 +7883,9 @@ def test_breakdown_with_filter_groups(self):
self.assertEqual(response[1]["count"], 1)
@also_test_with_materialized_columns(
- event_properties=["key"], group_properties=[(0, "industry")], materialize_only_with_person_on_events=True
+ event_properties=["key"],
+ group_properties=[(0, "industry")],
+ materialize_only_with_person_on_events=True,
)
@snapshot_clickhouse_queries
def test_breakdown_with_filter_groups_person_on_events(self):
@@ -6653,8 +7920,22 @@ def test_breakdown_with_filter_groups_person_on_events(self):
"date_from": "2020-01-01T00:00:00Z",
"date_to": "2020-01-12T00:00:00Z",
"breakdown": "key",
- "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}],
- "properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ }
+ ],
+ "properties": [
+ {
+ "key": "industry",
+ "value": "finance",
+ "type": "group",
+ "group_type_index": 0,
+ }
+ ],
},
),
self.team,
@@ -6714,8 +7995,23 @@ def test_breakdown_with_filter_groups_person_on_events_v2(self):
"date_from": "2020-01-01T00:00:00Z",
"date_to": "2020-01-12T00:00:00Z",
"breakdown": "key",
- "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0, "math": "dau"}],
- "properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}],
+ "events": [
+ {
+ "id": "sign up",
+ "name": "sign up",
+ "type": "events",
+ "order": 0,
+ "math": "dau",
+ }
+ ],
+ "properties": [
+ {
+ "key": "industry",
+ "value": "finance",
+ "type": "group",
+ "group_type_index": 0,
+ }
+ ],
},
),
self.team,
@@ -6777,7 +8073,11 @@ def test_breakdown_by_group_props(self):
self.assertEqual(response[1]["count"], 1)
filter = filter.shallow_clone(
- {"breakdown_value": "technology", "date_from": "2020-01-02T00:00:00Z", "date_to": "2020-01-03"}
+ {
+ "breakdown_value": "technology",
+ "date_from": "2020-01-02T00:00:00Z",
+ "date_to": "2020-01-03",
+ }
)
entity = Entity({"id": "sign up", "name": "sign up", "type": "events", "order": 0})
res = self._get_trend_people(filter, entity)
@@ -6839,7 +8139,11 @@ def test_breakdown_by_group_props_person_on_events(self):
self.assertEqual(response[1]["count"], 1)
filter = filter.shallow_clone(
- {"breakdown_value": "technology", "date_from": "2020-01-02T00:00:00Z", "date_to": "2020-01-02"}
+ {
+ "breakdown_value": "technology",
+ "date_from": "2020-01-02T00:00:00Z",
+ "date_to": "2020-01-02",
+ }
)
entity = Entity({"id": "sign up", "name": "sign up", "type": "events", "order": 0})
res = self._get_trend_people(filter, entity)
@@ -6895,7 +8199,12 @@ def test_filtering_with_group_props(self):
self._create_groups()
Person.objects.create(team_id=self.team.pk, distinct_ids=["person1"], properties={"key": "value"})
- _create_event(event="$pageview", distinct_id="person1", team=self.team, timestamp="2020-01-02T12:00:00Z")
+ _create_event(
+ event="$pageview",
+ distinct_id="person1",
+ team=self.team,
+ timestamp="2020-01-02T12:00:00Z",
+ )
_create_event(
event="$pageview",
distinct_id="person1",
@@ -6925,7 +8234,12 @@ def test_filtering_with_group_props(self):
"date_to": "2020-01-12T00:00:00Z",
"events": [{"id": "$pageview", "type": "events", "order": 0}],
"properties": [
- {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0},
+ {
+ "key": "industry",
+ "value": "finance",
+ "type": "group",
+ "group_type_index": 0,
+ },
{"key": "key", "value": "value", "type": "person"},
],
},
@@ -6938,7 +8252,12 @@ def test_filtering_with_group_props_event_with_no_group_data(self):
self._create_groups()
Person.objects.create(team_id=self.team.pk, distinct_ids=["person1"], properties={"key": "value"})
- _create_event(event="$pageview", distinct_id="person1", team=self.team, timestamp="2020-01-02T12:00:00Z")
+ _create_event(
+ event="$pageview",
+ distinct_id="person1",
+ team=self.team,
+ timestamp="2020-01-02T12:00:00Z",
+ )
_create_event(
event="$pageview",
distinct_id="person1",
@@ -6985,7 +8304,9 @@ def test_filtering_with_group_props_event_with_no_group_data(self):
self.assertEqual(response[0]["count"], 4)
@also_test_with_materialized_columns(
- person_properties=["key"], group_properties=[(0, "industry")], materialize_only_with_person_on_events=True
+ person_properties=["key"],
+ group_properties=[(0, "industry")],
+ materialize_only_with_person_on_events=True,
)
@snapshot_clickhouse_queries
def test_breakdown_by_group_props_with_person_filter_person_on_events(self):
@@ -7033,14 +8354,21 @@ def test_breakdown_by_group_props_with_person_filter_person_on_events(self):
self.assertEqual(response[0]["count"], 1)
@also_test_with_materialized_columns(
- person_properties=["key"], group_properties=[(0, "industry")], materialize_only_with_person_on_events=True
+ person_properties=["key"],
+ group_properties=[(0, "industry")],
+ materialize_only_with_person_on_events=True,
)
@snapshot_clickhouse_queries
def test_filtering_with_group_props_person_on_events(self):
self._create_groups()
Person.objects.create(team_id=self.team.pk, distinct_ids=["person1"], properties={"key": "value"})
- _create_event(event="$pageview", distinct_id="person1", team=self.team, timestamp="2020-01-02T12:00:00Z")
+ _create_event(
+ event="$pageview",
+ distinct_id="person1",
+ team=self.team,
+ timestamp="2020-01-02T12:00:00Z",
+ )
_create_event(
event="$pageview",
distinct_id="person1",
@@ -7070,7 +8398,12 @@ def test_filtering_with_group_props_person_on_events(self):
"date_to": "2020-01-12T00:00:00Z",
"events": [{"id": "$pageview", "type": "events", "order": 0}],
"properties": [
- {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0},
+ {
+ "key": "industry",
+ "value": "finance",
+ "type": "group",
+ "group_type_index": 0,
+ },
{"key": "key", "value": "value", "type": "person"},
],
},
@@ -7081,17 +8414,38 @@ def test_filtering_with_group_props_person_on_events(self):
self.assertEqual(response[0]["count"], 1)
@also_test_with_materialized_columns(
- group_properties=[(0, "industry"), (2, "name")], materialize_only_with_person_on_events=True
+ group_properties=[(0, "industry"), (2, "name")],
+ materialize_only_with_person_on_events=True,
)
@snapshot_clickhouse_queries
def test_filtering_by_multiple_groups_person_on_events(self):
GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0)
GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=2)
- create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"})
- create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"})
- create_group(team_id=self.team.pk, group_type_index=2, group_key="company:5", properties={"name": "five"})
- create_group(team_id=self.team.pk, group_type_index=2, group_key="company:6", properties={"name": "six"})
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=0,
+ group_key="org:5",
+ properties={"industry": "finance"},
+ )
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=0,
+ group_key="org:6",
+ properties={"industry": "technology"},
+ )
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=2,
+ group_key="company:5",
+ properties={"name": "five"},
+ )
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=2,
+ group_key="company:6",
+ properties={"name": "six"},
+ )
journey = {
"person1": [
@@ -7105,8 +8459,16 @@ def test_filtering_by_multiple_groups_person_on_events(self):
"timestamp": datetime(2020, 1, 2, 12, 30),
"properties": {"$group_2": "company:6"},
},
- {"event": "sign up", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$group_0": "org:6"}},
- {"event": "sign up", "timestamp": datetime(2020, 1, 3, 15), "properties": {"$group_2": "company:5"}},
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$group_0": "org:6"},
+ },
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 3, 15),
+ "properties": {"$group_2": "company:5"},
+ },
]
}
@@ -7119,8 +8481,18 @@ def test_filtering_by_multiple_groups_person_on_events(self):
"date_to": "2020-01-12",
"events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}],
"properties": [
- {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0},
- {"key": "name", "value": "six", "type": "group", "group_type_index": 2},
+ {
+ "key": "industry",
+ "value": "finance",
+ "type": "group",
+ "group_type_index": 0,
+ },
+ {
+ "key": "name",
+ "value": "six",
+ "type": "group",
+ "group_type_index": 2,
+ },
],
},
)
@@ -7130,7 +8502,10 @@ def test_filtering_by_multiple_groups_person_on_events(self):
self.assertEqual(len(response), 1)
self.assertEqual(response[0]["count"], 1)
- self.assertEqual(response[0]["data"], [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
+ self.assertEqual(
+ response[0]["data"],
+ [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ )
filter = filter.shallow_clone({"date_from": "2020-01-02T00:00:00Z", "date_to": "2020-01-02T00:00:00Z"})
entity = Entity({"id": "sign up", "name": "sign up", "type": "events", "order": 0})
@@ -7161,7 +8536,16 @@ def test_get_cached_result_bad_cache(self):
set_instance_setting("STRICT_CACHING_TEAMS", "all")
fake_cached = {
- "result": [{"days": ["2020-11-01 05:20:00", "2020-11-01 10:22:00", "2020-11-01 10:25:00"], "data": []}]
+ "result": [
+ {
+ "days": [
+ "2020-11-01 05:20:00",
+ "2020-11-01 10:22:00",
+ "2020-11-01 10:25:00",
+ ],
+ "data": [],
+ }
+ ]
}
filter = Filter(
@@ -7183,7 +8567,14 @@ def test_get_cached_result_hour(self):
fake_cached = {
"result": [
- {"days": ["2020-11-01 05:20:00", "2020-11-01 10:22:00", "2020-11-01 10:25:00"], "data": [0.0, 0.0, 0.0]}
+ {
+ "days": [
+ "2020-11-01 05:20:00",
+ "2020-11-01 10:22:00",
+ "2020-11-01 10:25:00",
+ ],
+ "data": [0.0, 0.0, 0.0],
+ }
]
}
@@ -7215,10 +8606,21 @@ def test_get_cached_result_hour(self):
def test_get_cached_result_day(self):
set_instance_setting("STRICT_CACHING_TEAMS", "all")
- fake_cached = {"result": [{"days": ["2020-01-02", "2020-01-03", "2020-01-04"], "data": [0.0, 0.0, 0.0]}]}
+ fake_cached = {
+ "result": [
+ {
+ "days": ["2020-01-02", "2020-01-03", "2020-01-04"],
+ "data": [0.0, 0.0, 0.0],
+ }
+ ]
+ }
filter = Filter(
team=self.team,
- data={"date_from": "2020-01-02", "date_to": "2020-01-04", "events": [{"id": "sign up", "name": "sign up"}]},
+ data={
+ "date_from": "2020-01-02",
+ "date_to": "2020-01-04",
+ "events": [{"id": "sign up", "name": "sign up"}],
+ },
)
cache_key = generate_cache_key(f"{filter.toJSON()}_{self.team.pk}")
cache.set(cache_key, fake_cached, settings.CACHED_RESULTS_TTL)
@@ -7226,7 +8628,14 @@ def test_get_cached_result_day(self):
res = Trends().get_cached_result(filter, self.team)
self.assertTrue(res)
- fake_cached = {"result": [{"days": ["2020-01-01", "2020-01-02", "2020-01-03"], "data": [0.0, 0.0, 0.0]}]}
+ fake_cached = {
+ "result": [
+ {
+ "days": ["2020-01-01", "2020-01-02", "2020-01-03"],
+ "data": [0.0, 0.0, 0.0],
+ }
+ ]
+ }
cache.set(cache_key, fake_cached, settings.CACHED_RESULTS_TTL)
@@ -7236,11 +8645,22 @@ def test_get_cached_result_day(self):
def test_get_cached_result_week(self):
set_instance_setting("STRICT_CACHING_TEAMS", "all")
- fake_cached = {"result": [{"days": ["2020-11-01", "2020-11-08", "2020-11-15"], "data": [0.0, 0.0, 0.0]}]}
+ fake_cached = {
+ "result": [
+ {
+ "days": ["2020-11-01", "2020-11-08", "2020-11-15"],
+ "data": [0.0, 0.0, 0.0],
+ }
+ ]
+ }
filter = Filter(
team=self.team,
- data={"date_to": "2020-11-16", "events": [{"id": "sign up", "name": "sign up"}], "interval": "week"},
+ data={
+ "date_to": "2020-11-16",
+ "events": [{"id": "sign up", "name": "sign up"}],
+ "interval": "week",
+ },
)
cache_key = generate_cache_key(f"{filter.toJSON()}_{self.team.pk}")
cache.set(cache_key, fake_cached, settings.CACHED_RESULTS_TTL)
@@ -7250,7 +8670,11 @@ def test_get_cached_result_week(self):
filter = Filter(
team=self.team,
- data={"date_to": "2020-11-23", "events": [{"id": "sign up", "name": "sign up"}], "interval": "week"},
+ data={
+ "date_to": "2020-11-23",
+ "events": [{"id": "sign up", "name": "sign up"}],
+ "interval": "week",
+ },
)
res = Trends().get_cached_result(filter, self.team)
@@ -7259,11 +8683,22 @@ def test_get_cached_result_week(self):
def test_get_cached_result_month(self):
set_instance_setting("STRICT_CACHING_TEAMS", "all")
- fake_cached = {"result": [{"days": ["2020-09-01", "2020-10-01", "2020-11-01"], "data": [0.0, 0.0, 0.0]}]}
+ fake_cached = {
+ "result": [
+ {
+ "days": ["2020-09-01", "2020-10-01", "2020-11-01"],
+ "data": [0.0, 0.0, 0.0],
+ }
+ ]
+ }
filter = Filter(
team=self.team,
- data={"date_to": "2020-11-16", "events": [{"id": "sign up", "name": "sign up"}], "interval": "month"},
+ data={
+ "date_to": "2020-11-16",
+ "events": [{"id": "sign up", "name": "sign up"}],
+ "interval": "month",
+ },
)
cache_key = generate_cache_key(f"{filter.toJSON()}_{self.team.pk}")
cache.set(cache_key, fake_cached, settings.CACHED_RESULTS_TTL)
@@ -7273,7 +8708,11 @@ def test_get_cached_result_month(self):
filter = Filter(
team=self.team,
- data={"date_to": "2020-12-01", "events": [{"id": "sign up", "name": "sign up"}], "interval": "week"},
+ data={
+ "date_to": "2020-12-01",
+ "events": [{"id": "sign up", "name": "sign up"}],
+ "interval": "week",
+ },
)
res = Trends().get_cached_result(filter, self.team)
@@ -7290,7 +8729,11 @@ def test_merge_result(self):
}
filter = Filter(
team=self.team,
- data={"date_from": "2020-01-02", "date_to": "2020-01-04", "events": [{"id": "sign up", "name": "sign up"}]},
+ data={
+ "date_from": "2020-01-02",
+ "date_to": "2020-01-04",
+ "events": [{"id": "sign up", "name": "sign up"}],
+ },
)
result = [{"label": "sign up - Chrome", "data": [15.0, 12.0]}]
@@ -7298,10 +8741,13 @@ def test_merge_result(self):
self.assertEqual(merged_result[0]["data"], [23.0, 15.0, 12.0])
def test_merge_result_no_cache(self):
-
filter = Filter(
team=self.team,
- data={"date_from": "2020-01-02", "date_to": "2020-01-04", "events": [{"id": "sign up", "name": "sign up"}]},
+ data={
+ "date_from": "2020-01-02",
+ "date_to": "2020-01-04",
+ "events": [{"id": "sign up", "name": "sign up"}],
+ },
)
result = [{"label": "sign up - Chrome", "data": [15.0, 12.0]}]
@@ -7326,7 +8772,11 @@ def test_merge_result_multiple(self):
}
filter = Filter(
team=self.team,
- data={"date_from": "2020-01-02", "date_to": "2020-01-04", "events": [{"id": "sign up", "name": "sign up"}]},
+ data={
+ "date_from": "2020-01-02",
+ "date_to": "2020-01-04",
+ "events": [{"id": "sign up", "name": "sign up"}],
+ },
)
result = [
diff --git a/posthog/queries/time_to_see_data/sessions.py b/posthog/queries/time_to_see_data/sessions.py
index b7c3274bf9241..8ebeeb8db36a6 100644
--- a/posthog/queries/time_to_see_data/sessions.py
+++ b/posthog/queries/time_to_see_data/sessions.py
@@ -68,7 +68,10 @@ def get_session_events(query: SessionEventsQuerySerializer) -> Optional[Dict]:
events = query_with_columns(GET_SESSION_EVENTS, params)
queries = query_with_columns(GET_SESSION_QUERIES, params)
session_query = SessionsQuerySerializer(
- data={"team_id": query.validated_data["team_id"], "session_id": query.validated_data["session_id"]}
+ data={
+ "team_id": query.validated_data["team_id"],
+ "session_id": query.validated_data["session_id"],
+ }
)
session_query.is_valid(raise_exception=True)
sessions = get_sessions(session_query).data
diff --git a/posthog/queries/time_to_see_data/test/test_hierarchy.py b/posthog/queries/time_to_see_data/test/test_hierarchy.py
index c449609e26905..609300868ffe1 100644
--- a/posthog/queries/time_to_see_data/test/test_hierarchy.py
+++ b/posthog/queries/time_to_see_data/test/test_hierarchy.py
@@ -1,16 +1,37 @@
import pytest
-from posthog.queries.time_to_see_data.hierarchy import Node, NodeType, construct_hierarchy, is_child
+from posthog.queries.time_to_see_data.hierarchy import (
+ Node,
+ NodeType,
+ construct_hierarchy,
+ is_child,
+)
@pytest.mark.parametrize(
"potential_parent,potential_child,expected_result",
[
# Sessions
- (Node(NodeType.SESSION, {"session_id": 1}), Node(NodeType.INTERACTION, {"session_id": 1}), True),
- (Node(NodeType.SESSION, {"session_id": 1}), Node(NodeType.QUERY, {"session_id": 1}), True),
- (Node(NodeType.SESSION, {"session_id": 2}), Node(NodeType.QUERY, {"session_id": 1}), False),
- (Node(NodeType.SESSION, {"session_id": 1}), Node(NodeType.SESSION, {"session_id": 1}), False),
+ (
+ Node(NodeType.SESSION, {"session_id": 1}),
+ Node(NodeType.INTERACTION, {"session_id": 1}),
+ True,
+ ),
+ (
+ Node(NodeType.SESSION, {"session_id": 1}),
+ Node(NodeType.QUERY, {"session_id": 1}),
+ True,
+ ),
+ (
+ Node(NodeType.SESSION, {"session_id": 2}),
+ Node(NodeType.QUERY, {"session_id": 1}),
+ False,
+ ),
+ (
+ Node(NodeType.SESSION, {"session_id": 1}),
+ Node(NodeType.SESSION, {"session_id": 1}),
+ False,
+ ),
# Interactions
(
Node(NodeType.INTERACTION, {"primary_interaction_id": "1"}),
@@ -37,7 +58,11 @@
Node(NodeType.SUBQUERY, {"client_query_id": "123::2543245"}),
False,
),
- (Node(NodeType.INTERACTION, {"session_id": 1}), Node(NodeType.SESSION, {}), False),
+ (
+ Node(NodeType.INTERACTION, {"session_id": 1}),
+ Node(NodeType.SESSION, {}),
+ False,
+ ),
(Node(NodeType.INTERACTION, {}), Node(NodeType.INTERACTION, {}), False),
# Events
(
@@ -75,17 +100,44 @@ def test_is_child(potential_parent, potential_child, expected_result):
def test_construct_hierarchy():
session = {"session_id": 1}
- interaction_1 = {**session, "is_primary_interaction": True, "primary_interaction_id": "123"}
- event_11 = {**session, "is_primary_interaction": False, "primary_interaction_id": "123", "query_id": "456"}
+ interaction_1 = {
+ **session,
+ "is_primary_interaction": True,
+ "primary_interaction_id": "123",
+ }
+ event_11 = {
+ **session,
+ "is_primary_interaction": False,
+ "primary_interaction_id": "123",
+ "query_id": "456",
+ }
query_111 = {**session, "client_query_id": "123::456", "is_initial_query": True}
- subquery_1111 = {**session, "client_query_id": "123::456", "is_initial_query": False}
- event_12 = {**session, "is_primary_interaction": False, "primary_interaction_id": "123", "query_id": "789"}
+ subquery_1111 = {
+ **session,
+ "client_query_id": "123::456",
+ "is_initial_query": False,
+ }
+ event_12 = {
+ **session,
+ "is_primary_interaction": False,
+ "primary_interaction_id": "123",
+ "query_id": "789",
+ }
query_121 = {**session, "client_query_id": "123::789", "is_initial_query": True}
query_13 = {**session, "client_query_id": "123::1111", "is_initial_query": True}
- interaction_2 = {**session, "is_primary_interaction": True, "primary_interaction_id": "8888"}
+ interaction_2 = {
+ **session,
+ "is_primary_interaction": True,
+ "primary_interaction_id": "8888",
+ }
- stray_event = {**session, "is_primary_interaction": False, "primary_interaction_id": "efg", "query_id": "9999"}
+ stray_event = {
+ **session,
+ "is_primary_interaction": False,
+ "primary_interaction_id": "efg",
+ "query_id": "9999",
+ }
stray_query = {**session, "client_query_id": "foobar", "is_initial_query": True}
result = construct_hierarchy(
diff --git a/posthog/queries/trends/breakdown.py b/posthog/queries/trends/breakdown.py
index 7e1d8c0b6198b..e891190f6e310 100644
--- a/posthog/queries/trends/breakdown.py
+++ b/posthog/queries/trends/breakdown.py
@@ -23,7 +23,11 @@
from posthog.models.filters import Filter
from posthog.models.filters.mixins.utils import cached_property
from posthog.models.property import PropertyGroup
-from posthog.models.property.util import get_property_string_expr, normalize_url_breakdown, parse_prop_grouped_clauses
+from posthog.models.property.util import (
+ get_property_string_expr,
+ normalize_url_breakdown,
+ parse_prop_grouped_clauses,
+)
from posthog.models.team import Team
from posthog.models.team.team import groups_on_events_querying_enabled
from posthog.queries.breakdown_props import (
@@ -65,8 +69,16 @@
parse_response,
process_math,
)
-from posthog.queries.util import get_interval_func_ch, get_person_properties_mode, get_start_of_interval_sql
-from posthog.utils import PersonOnEventsMode, encode_get_request_params, generate_short_id
+from posthog.queries.util import (
+ get_interval_func_ch,
+ get_person_properties_mode,
+ get_start_of_interval_sql,
+)
+from posthog.utils import (
+ PersonOnEventsMode,
+ encode_get_request_params,
+ generate_short_id,
+)
from posthog.queries.person_on_events_v2_sql import PERSON_OVERRIDES_JOIN_SQL
@@ -186,23 +198,35 @@ def get_query(self) -> Tuple[str, Dict, Callable]:
_params, _breakdown_filter_params = {}, {}
if self.filter.breakdown_type == "cohort":
- _params, breakdown_filter, _breakdown_filter_params, breakdown_value = self._breakdown_cohort_params()
+ (
+ _params,
+ breakdown_filter,
+ _breakdown_filter_params,
+ breakdown_value,
+ ) = self._breakdown_cohort_params()
else:
aggregate_operation_for_breakdown_init = (
"count(*)"
if self.entity.math == "dau" or self.entity.math in COUNT_PER_ACTOR_MATH_FUNCTIONS
else aggregate_operation
)
- _params, breakdown_filter, _breakdown_filter_params, breakdown_value = self._breakdown_prop_params(
- aggregate_operation_for_breakdown_init, math_params
- )
+ (
+ _params,
+ breakdown_filter,
+ _breakdown_filter_params,
+ breakdown_value,
+ ) = self._breakdown_prop_params(aggregate_operation_for_breakdown_init, math_params)
if len(_params["values"]) == 0:
# If there are no breakdown values, we are sure that there's no relevant events, so instead of adjusting
# a "real" SELECT for this, we only include the below dummy SELECT.
# It's a drop-in replacement for a "real" one, simply always returning 0 rows.
# See https://github.com/PostHog/posthog/pull/5674 for context.
- return ("SELECT [now()] AS date, [0] AS total, '' AS breakdown_value LIMIT 0", {}, lambda _: [])
+ return (
+ "SELECT [now()] AS date, [0] AS total, '' AS breakdown_value LIMIT 0",
+ {},
+ lambda _: [],
+ )
person_join_condition, person_join_params = self._person_join_condition()
groups_join_condition, groups_join_params = self._groups_join_condition()
@@ -219,16 +243,20 @@ def get_query(self) -> Tuple[str, Dict, Callable]:
**sessions_join_params,
**sampling_params,
}
- breakdown_filter_params = {**breakdown_filter_params, **_breakdown_filter_params}
+ breakdown_filter_params = {
+ **breakdown_filter_params,
+ **_breakdown_filter_params,
+ }
if self.filter.display in NON_TIME_SERIES_DISPLAY_TYPES:
breakdown_filter = breakdown_filter.format(**breakdown_filter_params)
if self.entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]:
interval_func = get_interval_func_ch(self.filter.interval)
- active_user_format_params, active_user_query_params = get_active_user_params(
- self.filter, self.entity, self.team_id
- )
+ (
+ active_user_format_params,
+ active_user_query_params,
+ ) = get_active_user_params(self.filter, self.entity, self.team_id)
self.params.update(active_user_query_params)
conditions = BREAKDOWN_ACTIVE_USER_CONDITIONS_SQL.format(
**breakdown_filter_params, **active_user_format_params
@@ -297,9 +325,10 @@ def get_query(self) -> Tuple[str, Dict, Callable]:
breakdown_filter = breakdown_filter.format(**breakdown_filter_params)
if self.entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]:
- active_user_format_params, active_user_query_params = get_active_user_params(
- self.filter, self.entity, self.team_id
- )
+ (
+ active_user_format_params,
+ active_user_query_params,
+ ) = get_active_user_params(self.filter, self.entity, self.team_id)
self.params.update(active_user_query_params)
conditions = BREAKDOWN_ACTIVE_USER_CONDITIONS_SQL.format(
**breakdown_filter_params, **active_user_format_params
@@ -386,7 +415,12 @@ def get_query(self) -> Tuple[str, Dict, Callable]:
date_to_truncated=get_start_of_interval_sql(self.filter.interval, team=self.team, source="%(date_to)s"),
interval_func=get_interval_func_ch(self.filter.interval),
)
- self.params.update({"seconds_in_interval": seconds_in_interval, "num_intervals": num_intervals})
+ self.params.update(
+ {
+ "seconds_in_interval": seconds_in_interval,
+ "num_intervals": num_intervals,
+ }
+ )
return breakdown_query, self.params, self._parse_trend_result(self.filter, self.entity)
def _breakdown_cohort_params(self):
@@ -422,7 +456,10 @@ def _breakdown_prop_params(self, aggregate_operation: str, math_params: Dict):
return (
{"values": values_arr},
BREAKDOWN_PROP_JOIN_SQL if not self.filter.using_histogram else BREAKDOWN_HISTOGRAM_PROP_JOIN_SQL,
- {"breakdown_value_expr": breakdown_value, "numeric_property_filter": numeric_property_filter},
+ {
+ "breakdown_value_expr": breakdown_value,
+ "numeric_property_filter": numeric_property_filter,
+ },
breakdown_value,
)
@@ -447,12 +484,20 @@ def _get_breakdown_value(self, breakdown: str) -> str:
):
properties_field = f"group{self.filter.breakdown_group_type_index}_properties"
breakdown_value, _ = get_property_string_expr(
- "events", breakdown, "%(key)s", properties_field, materialised_table_column=properties_field
+ "events",
+ breakdown,
+ "%(key)s",
+ properties_field,
+ materialised_table_column=properties_field,
)
elif self.person_on_events_mode != PersonOnEventsMode.DISABLED and self.filter.breakdown_type != "group":
if self.filter.breakdown_type == "person":
breakdown_value, _ = get_property_string_expr(
- "events", breakdown, "%(key)s", "person_properties", materialised_table_column="person_properties"
+ "events",
+ breakdown,
+ "%(key)s",
+ "person_properties",
+ materialised_table_column="person_properties",
)
else:
breakdown_value, _ = get_property_string_expr("events", breakdown, "%(key)s", "properties")
@@ -462,7 +507,11 @@ def _get_breakdown_value(self, breakdown: str) -> str:
elif self.filter.breakdown_type == "group":
properties_field = f"group_properties_{self.filter.breakdown_group_type_index}"
breakdown_value, _ = get_property_string_expr(
- "groups", breakdown, "%(key)s", properties_field, materialised_table_column="group_properties"
+ "groups",
+ breakdown,
+ "%(key)s",
+ properties_field,
+ materialised_table_column="group_properties",
)
else:
breakdown_value, _ = get_property_string_expr("events", breakdown, "%(key)s", "properties")
@@ -565,7 +614,11 @@ def _parse(result: List) -> List:
parsed_result.update(
{
"persons_urls": self._get_persons_url(
- filter, entity, self.team, stats[0], result_descriptors["breakdown_value"]
+ filter,
+ entity,
+ self.team,
+ stats[0],
+ result_descriptors["breakdown_value"],
)
}
)
@@ -683,7 +736,10 @@ def _person_join_condition(self) -> Tuple[str, Dict]:
def _groups_join_condition(self) -> Tuple[str, Dict]:
return GroupsJoinQuery(
- self.filter, self.team_id, self.column_optimizer, person_on_events_mode=self.person_on_events_mode
+ self.filter,
+ self.team_id,
+ self.column_optimizer,
+ person_on_events_mode=self.person_on_events_mode,
).get_join_query()
def _sessions_join_condition(self) -> Tuple[str, Dict]:
diff --git a/posthog/queries/trends/lifecycle.py b/posthog/queries/trends/lifecycle.py
index 8e659a5368970..4821d5295a363 100644
--- a/posthog/queries/trends/lifecycle.py
+++ b/posthog/queries/trends/lifecycle.py
@@ -13,7 +13,11 @@
from posthog.queries.trends.sql import LIFECYCLE_EVENTS_QUERY, LIFECYCLE_SQL
from posthog.queries.trends.util import parse_response
from posthog.queries.util import get_person_properties_mode
-from posthog.utils import PersonOnEventsMode, encode_get_request_params, generate_short_id
+from posthog.utils import (
+ PersonOnEventsMode,
+ encode_get_request_params,
+ generate_short_id,
+)
# Lifecycle takes an event/action, time range, interval and for every period, splits the users who did the action into 4:
#
diff --git a/posthog/queries/trends/test/test_breakdowns.py b/posthog/queries/trends/test/test_breakdowns.py
index df24ea8c14237..29c917e4dd843 100644
--- a/posthog/queries/trends/test/test_breakdowns.py
+++ b/posthog/queries/trends/test/test_breakdowns.py
@@ -4,7 +4,11 @@
from posthog.constants import TRENDS_TABLE
from posthog.models import Filter
from posthog.queries.trends.trends import Trends
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ snapshot_clickhouse_queries,
+)
from posthog.test.test_journeys import journeys_for
@@ -17,7 +21,11 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$session_id": "1", "movie_length": 100, "$current_url": "https://example.com"},
+ "properties": {
+ "$session_id": "1",
+ "movie_length": 100,
+ "$current_url": "https://example.com",
+ },
}
],
# Duration 60 seconds, with 2 events in 1 session
@@ -25,12 +33,20 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$session_id": "2", "movie_length": 50, "$current_url": "https://example.com"},
+ "properties": {
+ "$session_id": "2",
+ "movie_length": 50,
+ "$current_url": "https://example.com",
+ },
},
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 2),
- "properties": {"$session_id": "2", "movie_length": 75, "$current_url": "https://example.com"},
+ "properties": {
+ "$session_id": "2",
+ "movie_length": 75,
+ "$current_url": "https://example.com",
+ },
},
],
# Duration 90 seconds, but session spans query boundary, so only a single event is counted
@@ -90,7 +106,14 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}):
response = Trends().run(
Filter(
data={
- "events": [{"id": "watched movie", "name": "watched movie", "type": "events", **events_extra}],
+ "events": [
+ {
+ "id": "watched movie",
+ "name": "watched movie",
+ "type": "events",
+ **events_extra,
+ }
+ ],
"date_from": "2020-01-02T00:00:00Z",
"date_to": "2020-01-12T00:00:00Z",
**extra,
@@ -106,7 +129,13 @@ def test_breakdown_by_session_duration_of_events(self):
{
"breakdown": "$session_duration",
"breakdown_type": "session",
- "properties": [{"key": "$current_url", "operator": "is_not", "value": ["https://test.com"]}],
+ "properties": [
+ {
+ "key": "$current_url",
+ "operator": "is_not",
+ "value": ["https://test.com"],
+ }
+ ],
}
)
@@ -128,16 +157,34 @@ def test_breakdown_by_session_duration_of_events_with_bucketing(self):
"breakdown": "$session_duration",
"breakdown_type": "session",
"breakdown_histogram_bin_count": 3,
- "properties": [{"key": "$current_url", "operator": "is_not", "value": ["https://test.com"]}],
+ "properties": [
+ {
+ "key": "$current_url",
+ "operator": "is_not",
+ "value": ["https://test.com"],
+ }
+ ],
}
)
self.assertEqual(
[(item["breakdown_value"], item["count"], item["data"]) for item in response],
[
- ("[0.0,69.92]", 3.0, [3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("[69.92,110.72]", 1.0, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("[110.72,180.01]", 5.0, [0.0, 0.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
+ (
+ "[0.0,69.92]",
+ 3.0,
+ [3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
+ (
+ "[69.92,110.72]",
+ 1.0,
+ [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
+ (
+ "[110.72,180.01]",
+ 5.0,
+ [0.0, 0.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
],
)
@@ -160,7 +207,8 @@ def test_breakdown_by_session_duration_of_events_single_aggregate(self):
@snapshot_clickhouse_queries
def test_breakdown_by_session_duration_of_unique_sessions(self):
response = self._run(
- {"breakdown": "$session_duration", "breakdown_type": "session"}, events_extra={"math": "unique_session"}
+ {"breakdown": "$session_duration", "breakdown_type": "session"},
+ events_extra={"math": "unique_session"},
)
self.assertEqual(
@@ -177,62 +225,126 @@ def test_breakdown_by_session_duration_of_unique_sessions(self):
@snapshot_clickhouse_queries
def test_breakdown_by_session_duration_of_unique_sessions_with_bucketing(self):
response = self._run(
- {"breakdown": "$session_duration", "breakdown_type": "session", "breakdown_histogram_bin_count": 3},
+ {
+ "breakdown": "$session_duration",
+ "breakdown_type": "session",
+ "breakdown_histogram_bin_count": 3,
+ },
events_extra={"math": "unique_session"},
)
self.assertEqual(
[(item["breakdown_value"], item["count"], item["data"]) for item in response],
[
- ("[0.0,69.92]", 2.0, [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("[69.92,110.72]", 1.0, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("[110.72,180.01]", 3.0, [0.0, 0.0, 1.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
+ (
+ "[0.0,69.92]",
+ 2.0,
+ [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
+ (
+ "[69.92,110.72]",
+ 1.0,
+ [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
+ (
+ "[110.72,180.01]",
+ 3.0,
+ [0.0, 0.0, 1.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
],
)
@snapshot_clickhouse_queries
def test_breakdown_by_event_property_with_bucketing(self):
response = self._run(
- {"breakdown": "movie_length", "breakdown_type": "event", "breakdown_histogram_bin_count": 3}
+ {
+ "breakdown": "movie_length",
+ "breakdown_type": "event",
+ "breakdown_histogram_bin_count": 3,
+ }
)
self.assertEqual(
[(item["breakdown_value"], item["count"], item["data"]) for item in response],
[
- ("[25.0,66.25]", 4.0, [2.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("[66.25,98.37]", 2.0, [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("[98.37,1000.01]", 2.0, [1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
+ (
+ "[25.0,66.25]",
+ 4.0,
+ [2.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
+ (
+ "[66.25,98.37]",
+ 2.0,
+ [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
+ (
+ "[98.37,1000.01]",
+ 2.0,
+ [1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
],
)
@snapshot_clickhouse_queries
def test_breakdown_by_event_property_of_unique_sessions_with_bucketing(self):
response = self._run(
- {"breakdown": "movie_length", "breakdown_type": "event", "breakdown_histogram_bin_count": 3},
+ {
+ "breakdown": "movie_length",
+ "breakdown_type": "event",
+ "breakdown_histogram_bin_count": 3,
+ },
events_extra={"math": "unique_session"},
)
self.assertEqual(
[(item["breakdown_value"], item["count"], item["data"]) for item in response],
[
- ("[25.0,66.25]", 3.0, [2.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("[66.25,98.37]", 2.0, [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("[98.37,1000.01]", 2.0, [1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
+ (
+ "[25.0,66.25]",
+ 3.0,
+ [2.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
+ (
+ "[66.25,98.37]",
+ 2.0,
+ [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
+ (
+ "[98.37,1000.01]",
+ 2.0,
+ [1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
],
)
def test_breakdown_by_event_property_with_bucketing_and_duplicate_buckets(self):
journey = {
"person1": [
- {"event": "watched tv", "timestamp": datetime(2020, 1, 2, 12, 1), "properties": {"episode_length": 300}}
+ {
+ "event": "watched tv",
+ "timestamp": datetime(2020, 1, 2, 12, 1),
+ "properties": {"episode_length": 300},
+ }
],
"person2": [
- {"event": "watched tv", "timestamp": datetime(2020, 1, 4, 12, 1), "properties": {"episode_length": 300}}
+ {
+ "event": "watched tv",
+ "timestamp": datetime(2020, 1, 4, 12, 1),
+ "properties": {"episode_length": 300},
+ }
],
"person3": [
- {"event": "watched tv", "timestamp": datetime(2020, 1, 6, 12, 1), "properties": {"episode_length": 300}}
+ {
+ "event": "watched tv",
+ "timestamp": datetime(2020, 1, 6, 12, 1),
+ "properties": {"episode_length": 300},
+ }
],
"person4": [
- {"event": "watched tv", "timestamp": datetime(2020, 1, 8, 12, 1), "properties": {"episode_length": 300}}
+ {
+ "event": "watched tv",
+ "timestamp": datetime(2020, 1, 8, 12, 1),
+ "properties": {"episode_length": 300},
+ }
],
}
@@ -256,22 +368,44 @@ def test_breakdown_by_event_property_with_bucketing_and_duplicate_buckets(self):
self.assertEqual(
[(item["breakdown_value"], item["count"], item["data"]) for item in response],
- [("[300.0,300.01]", 4.0, [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0])],
+ [
+ (
+ "[300.0,300.01]",
+ 4.0,
+ [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
+ )
+ ],
)
def test_breakdown_by_event_property_with_bucketing_and_single_bucket(self):
journey = {
"person1": [
- {"event": "watched tv", "timestamp": datetime(2020, 1, 2, 12, 1), "properties": {"episode_length": 300}}
+ {
+ "event": "watched tv",
+ "timestamp": datetime(2020, 1, 2, 12, 1),
+ "properties": {"episode_length": 300},
+ }
],
"person2": [
- {"event": "watched tv", "timestamp": datetime(2020, 1, 4, 12, 1), "properties": {"episode_length": 300}}
+ {
+ "event": "watched tv",
+ "timestamp": datetime(2020, 1, 4, 12, 1),
+ "properties": {"episode_length": 300},
+ }
],
"person3": [
- {"event": "watched tv", "timestamp": datetime(2020, 1, 5, 12, 1), "properties": {"episode_length": 320}}
+ {
+ "event": "watched tv",
+ "timestamp": datetime(2020, 1, 5, 12, 1),
+ "properties": {"episode_length": 320},
+ }
],
"person4": [
- {"event": "watched tv", "timestamp": datetime(2020, 1, 6, 12, 1), "properties": {"episode_length": 305}}
+ {
+ "event": "watched tv",
+ "timestamp": datetime(2020, 1, 6, 12, 1),
+ "properties": {"episode_length": 305},
+ }
],
}
@@ -293,7 +427,13 @@ def test_breakdown_by_event_property_with_bucketing_and_single_bucket(self):
self.assertEqual(
[(item["breakdown_value"], item["count"], item["data"]) for item in response],
- [("[300.0,320.01]", 4.0, [1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])],
+ [
+ (
+ "[300.0,320.01]",
+ 4.0,
+ [1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ )
+ ],
)
@snapshot_clickhouse_queries
@@ -301,7 +441,14 @@ def test_breakdown_by_event_property_with_entity_session_filter(self):
response = self._run(
{"breakdown": "$current_url", "breakdown_type": "event"},
events_extra={
- "properties": [{"key": "$session_duration", "type": "session", "operator": "gt", "value": 30}]
+ "properties": [
+ {
+ "key": "$session_duration",
+ "type": "session",
+ "operator": "gt",
+ "value": 30,
+ }
+ ]
},
)
@@ -309,7 +456,11 @@ def test_breakdown_by_event_property_with_entity_session_filter(self):
[(item["breakdown_value"], item["count"], item["data"]) for item in response],
[
("", 6.0, [1.0, 0.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("https://example.com", 2.0, [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
+ (
+ "https://example.com",
+ 2.0,
+ [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
],
)
@@ -326,6 +477,10 @@ def test_breakdown_histogram_by_missing_property_regression(self):
self.assertEqual(
[(item["breakdown_value"], item["count"], item["data"]) for item in response],
[
- ("[nan,nan]", 0.0, [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
+ (
+ "[nan,nan]",
+ 0.0,
+ [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
],
)
diff --git a/posthog/queries/trends/test/test_breakdowns_by_current_url.py b/posthog/queries/trends/test/test_breakdowns_by_current_url.py
index 76b7ad94902ea..bc7a81595843b 100644
--- a/posthog/queries/trends/test/test_breakdowns_by_current_url.py
+++ b/posthog/queries/trends/test/test_breakdowns_by_current_url.py
@@ -3,7 +3,11 @@
from posthog.models import Filter
from posthog.queries.trends.trends import Trends
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ snapshot_clickhouse_queries,
+)
from posthog.test.test_journeys import journeys_for
@@ -16,13 +20,19 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com", "$pathname": ""},
+ "properties": {
+ "$current_url": "https://example.com",
+ "$pathname": "",
+ },
},
# trailing question mark
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com?", "$pathname": "?"},
+ "properties": {
+ "$current_url": "https://example.com?",
+ "$pathname": "?",
+ },
},
],
"person2": [
@@ -30,13 +40,19 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com/", "$pathname": "/"},
+ "properties": {
+ "$current_url": "https://example.com/",
+ "$pathname": "/",
+ },
},
# trailing hash
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com#", "$pathname": "#"},
+ "properties": {
+ "$current_url": "https://example.com#",
+ "$pathname": "#",
+ },
},
],
"person3": [
@@ -44,7 +60,10 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com/home", "$pathname": "/home"},
+ "properties": {
+ "$current_url": "https://example.com/home",
+ "$pathname": "/home",
+ },
},
],
"person4": [
@@ -52,19 +71,28 @@ def setUp(self):
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com/home/", "$pathname": "/home/"},
+ "properties": {
+ "$current_url": "https://example.com/home/",
+ "$pathname": "/home/",
+ },
},
# trailing hash
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com/home#", "$pathname": "/home#"},
+ "properties": {
+ "$current_url": "https://example.com/home#",
+ "$pathname": "/home#",
+ },
},
# all the things
{
"event": "watched movie",
"timestamp": datetime(2020, 1, 2, 12, 1),
- "properties": {"$current_url": "https://example.com/home/?#", "$pathname": "/home/?#"},
+ "properties": {
+ "$current_url": "https://example.com/home/?#",
+ "$pathname": "/home/?#",
+ },
},
],
}
@@ -75,7 +103,14 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}):
response = Trends().run(
Filter(
data={
- "events": [{"id": "watched movie", "name": "watched movie", "type": "events", **events_extra}],
+ "events": [
+ {
+ "id": "watched movie",
+ "name": "watched movie",
+ "type": "events",
+ **events_extra,
+ }
+ ],
"date_from": "2020-01-02T00:00:00Z",
"date_to": "2020-01-12T00:00:00Z",
**extra,
@@ -87,7 +122,13 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}):
@snapshot_clickhouse_queries
def test_breakdown_by_pathname(self) -> None:
- response = self._run({"breakdown": "$pathname", "breakdown_type": "event", "breakdown_normalize_url": True})
+ response = self._run(
+ {
+ "breakdown": "$pathname",
+ "breakdown_type": "event",
+ "breakdown_normalize_url": True,
+ }
+ )
assert [(item["breakdown_value"], item["count"], item["data"]) for item in response] == [
("/", 4.0, [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
@@ -96,9 +137,23 @@ def test_breakdown_by_pathname(self) -> None:
@snapshot_clickhouse_queries
def test_breakdown_by_current_url(self) -> None:
- response = self._run({"breakdown": "$current_url", "breakdown_type": "event", "breakdown_normalize_url": True})
+ response = self._run(
+ {
+ "breakdown": "$current_url",
+ "breakdown_type": "event",
+ "breakdown_normalize_url": True,
+ }
+ )
assert [(item["breakdown_value"], item["count"], item["data"]) for item in response] == [
- ("https://example.com", 4.0, [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
- ("https://example.com/home", 4.0, [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
+ (
+ "https://example.com",
+ 4.0,
+ [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
+ (
+ "https://example.com/home",
+ 4.0,
+ [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ ),
]
diff --git a/posthog/queries/trends/test/test_formula.py b/posthog/queries/trends/test/test_formula.py
index 5dffeac08959c..d6dc332fbf4f4 100644
--- a/posthog/queries/trends/test/test_formula.py
+++ b/posthog/queries/trends/test/test_formula.py
@@ -7,7 +7,12 @@
from posthog.models.filters.filter import Filter
from posthog.models.group.util import create_group
from posthog.queries.trends.trends import Trends
-from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, snapshot_clickhouse_queries
+from posthog.test.base import (
+ APIBaseTest,
+ ClickhouseTestMixin,
+ _create_event,
+ snapshot_clickhouse_queries,
+)
class TestFormula(ClickhouseTestMixin, APIBaseTest):
@@ -17,10 +22,17 @@ def setUp(self):
super().setUp()
Person.objects.create(
- team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"}
+ team_id=self.team.pk,
+ distinct_ids=["blabla", "anonymous_id"],
+ properties={"$some_prop": "some_val"},
)
- create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"})
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=0,
+ group_key="org:5",
+ properties={"industry": "finance"},
+ )
with freeze_time("2020-01-02T13:01:01Z"):
_create_event(
@@ -39,33 +51,58 @@ def setUp(self):
team=self.team,
event="session start",
distinct_id="blabla",
- properties={"session duration": 300, "location": "Paris", "$session_id": "1", "$group_0": "org:5"},
+ properties={
+ "session duration": 300,
+ "location": "Paris",
+ "$session_id": "1",
+ "$group_0": "org:5",
+ },
)
_create_event(
team=self.team,
event="session start",
distinct_id="blabla",
- properties={"session duration": 400, "location": "London", "$session_id": "1", "$group_0": "org:5"},
+ properties={
+ "session duration": 400,
+ "location": "London",
+ "$session_id": "1",
+ "$group_0": "org:5",
+ },
)
with freeze_time("2020-01-03T13:01:01Z"):
_create_event(
team=self.team,
event="session start",
distinct_id="blabla",
- properties={"session duration": 400, "location": "London", "$session_id": "1", "$group_0": "org:5"},
+ properties={
+ "session duration": 400,
+ "location": "London",
+ "$session_id": "1",
+ "$group_0": "org:5",
+ },
)
with freeze_time("2020-01-03T13:04:01Z"):
_create_event(
team=self.team,
event="session start",
distinct_id="blabla",
- properties={"session duration": 500, "location": "London", "$session_id": "1", "$group_0": "org:5"},
+ properties={
+ "session duration": 500,
+ "location": "London",
+ "$session_id": "1",
+ "$group_0": "org:5",
+ },
)
_create_event(
team=self.team,
event="session end",
distinct_id="blabla",
- properties={"session duration": 500, "location": "London", "$session_id": "1", "$group_0": "org:5"},
+ properties={
+ "session duration": 500,
+ "location": "London",
+ "$session_id": "1",
+ "$group_0": "org:5",
+ },
)
_create_event(
@@ -98,8 +135,16 @@ def _run(self, extra: Dict = {}, run_at: Optional[str] = None):
Filter(
data={
"events": [
- {"id": "session start", "math": "sum", "math_property": "session duration"},
- {"id": "session start", "math": "avg", "math_property": "session duration"},
+ {
+ "id": "session start",
+ "math": "sum",
+ "math_property": "session duration",
+ },
+ {
+ "id": "session start",
+ "math": "avg",
+ "math_property": "session duration",
+ },
],
"formula": "A + B",
**extra,
@@ -191,13 +236,28 @@ def test_month_interval(self):
self.assertEqual(data, [0.0, 0.0, 2160.0])
def test_formula(self):
- self.assertEqual(self._run({"formula": "A - B"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 600.0, 450.0, 0.0])
- self.assertEqual(self._run({"formula": "A * B"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 270000.0, 405000.0, 0.0])
- self.assertEqual(self._run({"formula": "A / B"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 2.0, 0.0])
- self.assertEqual(self._run({"formula": "(A/3600)/B"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
+ self.assertEqual(
+ self._run({"formula": "A - B"})[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 600.0, 450.0, 0.0],
+ )
+ self.assertEqual(
+ self._run({"formula": "A * B"})[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 270000.0, 405000.0, 0.0],
+ )
+ self.assertEqual(
+ self._run({"formula": "A / B"})[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 2.0, 0.0],
+ )
+ self.assertEqual(
+ self._run({"formula": "(A/3600)/B"})[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ )
self.assertEqual(self._run({"formula": "(A/3600)/B"})[0]["count"], 0)
- self.assertEqual(self._run({"formula": "A/0"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
+ self.assertEqual(
+ self._run({"formula": "A/0"})[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
+ )
self.assertEqual(self._run({"formula": "A/0"})[0]["count"], 0)
@snapshot_clickhouse_queries
@@ -228,7 +288,12 @@ def test_regression_formula_with_unique_sessions_2x_and_duration_filter(self):
"id": "session start",
"math": "unique_session",
"properties": [
- {"key": "$session_duration", "value": 12, "operator": "gt", "type": "session"}
+ {
+ "key": "$session_duration",
+ "value": 12,
+ "operator": "gt",
+ "type": "session",
+ }
],
},
{"id": "session start", "math": "unique_session"},
@@ -252,14 +317,24 @@ def test_regression_formula_with_unique_sessions_2x_and_duration_filter_2x(self)
"id": "$autocapture",
"math": "unique_session",
"properties": [
- {"key": "$session_duration", "type": "session", "value": 30, "operator": "lt"}
+ {
+ "key": "$session_duration",
+ "type": "session",
+ "value": 30,
+ "operator": "lt",
+ }
],
},
{
"id": "session start",
"math": "unique_session",
"properties": [
- {"key": "$session_duration", "type": "session", "value": 500, "operator": "gt"}
+ {
+ "key": "$session_duration",
+ "type": "session",
+ "value": 500,
+ "operator": "gt",
+ }
],
},
],
@@ -286,7 +361,13 @@ def test_regression_formula_with_session_duration_aggregation(self):
"math": "avg",
"math_property": "$session_duration",
},
- {"type": "events", "id": "session end", "order": 1, "name": "$pageview", "math": "total"},
+ {
+ "type": "events",
+ "id": "session end",
+ "order": 1,
+ "name": "$pageview",
+ "math": "total",
+ },
],
"formula": "A / B",
}
@@ -349,12 +430,19 @@ def test_breakdown_aggregated(self):
@snapshot_clickhouse_queries
def test_breakdown_with_different_breakdown_values_per_series(self):
-
response = self._run(
{
"events": [
- {"id": "session start", "math": "sum", "math_property": "session duration"},
- {"id": "session end", "math": "sum", "math_property": "session duration"},
+ {
+ "id": "session start",
+ "math": "sum",
+ "math_property": "session duration",
+ },
+ {
+ "id": "session end",
+ "math": "sum",
+ "math_property": "session duration",
+ },
],
"formula": "A + B",
"breakdown": "location",
@@ -387,8 +475,18 @@ def test_breakdown_counts_of_different_events_one_without_events(self):
"breakdown": "location",
"breakdown_type": "event",
"events": [
- {"id": "session start", "name": "session start", "type": "events", "order": 0},
- {"id": "session error", "name": "session error", "type": "events", "order": 1},
+ {
+ "id": "session start",
+ "name": "session start",
+ "type": "events",
+ "order": 0,
+ },
+ {
+ "id": "session error",
+ "name": "session error",
+ "type": "events",
+ "order": 1,
+ },
],
}
),
@@ -468,9 +566,15 @@ def test_breakdown_cohort(self):
@snapshot_clickhouse_queries
def test_breakdown_hogql(self):
response = self._run(
- {"breakdown": "concat(person.properties.$some_prop, ' : ', properties.location)", "breakdown_type": "hogql"}
+ {
+ "breakdown": "concat(person.properties.$some_prop, ' : ', properties.location)",
+ "breakdown_type": "hogql",
+ }
+ )
+ self.assertEqual(
+ [series["label"] for series in response],
+ ["some_val : London", "some_val : Paris"],
)
- self.assertEqual([series["label"] for series in response], ["some_val : London", "some_val : Paris"])
self.assertEqual(
[
[0.0, 0.0, 0.0, 0.0, 0.0, 800.0, 1350.0, 0.0],
@@ -481,7 +585,11 @@ def test_breakdown_hogql(self):
def test_breakdown_mismatching_sizes(self):
response = self._run(
- {"events": [{"id": "session start"}, {"id": "session end"}], "breakdown": "location", "formula": "A + B"}
+ {
+ "events": [{"id": "session start"}, {"id": "session end"}],
+ "breakdown": "location",
+ "formula": "A + B",
+ }
)
self.assertEqual(response[0]["label"], "London")
@@ -522,7 +630,11 @@ def test_event_properties(self):
"math_property": "session duration",
"properties": [{"key": "$current_url", "value": "http://example.org"}],
},
- {"id": "session start", "math": "avg", "math_property": "session duration"},
+ {
+ "id": "session start",
+ "math": "avg",
+ "math_property": "session duration",
+ },
]
}
)[0]["data"],
@@ -541,7 +653,8 @@ def test_aggregated(self):
def test_cumulative(self):
self.assertEqual(
- self._run({"display": TRENDS_CUMULATIVE})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 1200.0, 2550.0, 2550.0]
+ self._run({"display": TRENDS_CUMULATIVE})[0]["data"],
+ [0.0, 0.0, 0.0, 0.0, 0.0, 1200.0, 2550.0, 2550.0],
)
def test_multiple_events(self):
@@ -550,9 +663,21 @@ def test_multiple_events(self):
self._run(
{
"events": [
- {"id": "session start", "math": "sum", "math_property": "session duration"},
- {"id": "session start", "math": "avg", "math_property": "session duration"},
- {"id": "session start", "math": "avg", "math_property": "session duration"},
+ {
+ "id": "session start",
+ "math": "sum",
+ "math_property": "session duration",
+ },
+ {
+ "id": "session start",
+ "math": "avg",
+ "math_property": "session duration",
+ },
+ {
+ "id": "session start",
+ "math": "avg",
+ "math_property": "session duration",
+ },
]
}
)[0]["data"],
@@ -573,13 +698,20 @@ def test_session_formulas(self):
)
def test_group_formulas(self):
-
self.assertEqual(
self._run(
{
"events": [
- {"id": "session start", "math": "unique_group", "math_group_type_index": 0},
- {"id": "session start", "math": "unique_group", "math_group_type_index": 0},
+ {
+ "id": "session start",
+ "math": "unique_group",
+ "math_group_type_index": 0,
+ },
+ {
+ "id": "session start",
+ "math": "unique_group",
+ "math_group_type_index": 0,
+ },
]
}
)[0]["data"],
diff --git a/posthog/queries/trends/test/test_paging_breakdowns.py b/posthog/queries/trends/test/test_paging_breakdowns.py
index 573d036e4b82e..e15175cd92d76 100644
--- a/posthog/queries/trends/test/test_paging_breakdowns.py
+++ b/posthog/queries/trends/test/test_paging_breakdowns.py
@@ -41,7 +41,17 @@ def _run(self, extra: Dict = {}, run_at: Optional[str] = None):
with freeze_time(run_at or "2020-01-04T13:01:01Z"):
action_response = Trends().run(
Filter(
- data={"events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}], **extra}
+ data={
+ "events": [
+ {
+ "id": "$pageview",
+ "name": "$pageview",
+ "type": "events",
+ "order": 0,
+ }
+ ],
+ **extra,
+ }
),
self.team,
)
diff --git a/posthog/queries/trends/test/test_person.py b/posthog/queries/trends/test/test_person.py
index bfd18b6ed8de8..1cd04cfd3d206 100644
--- a/posthog/queries/trends/test/test_person.py
+++ b/posthog/queries/trends/test/test_person.py
@@ -12,7 +12,9 @@
from posthog.models.group.util import create_group
from posthog.models.group_type_mapping import GroupTypeMapping
from posthog.queries.trends.trends_actors import TrendsActors
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
@@ -103,7 +105,11 @@ def test_person_query_does_not_include_recording_events_if_flag_not_set(self):
event = {"id": "pageview", "name": "pageview", "type": "events", "order": 0}
filter = Filter(
- data={"date_from": "2021-01-21T00:00:00Z", "date_to": "2021-01-21T23:59:59Z", "events": [event]}
+ data={
+ "date_from": "2021-01-21T00:00:00Z",
+ "date_to": "2021-01-21T23:59:59Z",
+ "events": [event],
+ }
)
entity = Entity(event)
_, serialized_actors, _ = TrendsActors(self.team, entity, filter).get_actors()
@@ -125,7 +131,11 @@ def test_group_query_includes_recording_events(self):
)
_create_event(
- event="pageview", distinct_id="u1", team=self.team, timestamp=timezone.now(), properties={"$group_0": "bla"}
+ event="pageview",
+ distinct_id="u1",
+ team=self.team,
+ timestamp=timezone.now(),
+ properties={"$group_0": "bla"},
)
_create_event(
event="pageview",
@@ -204,7 +214,10 @@ def test_weekly_active_users(self):
data = response.json()
self.assertEqual(data.get("results")[0].get("count"), 2)
- self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_17", "u_16"])
+ self.assertEqual(
+ [item["name"] for item in data.get("results")[0].get("people")],
+ ["u_17", "u_16"],
+ )
def test_weekly_active_users_grouped_by_week(self):
for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep
@@ -271,7 +284,10 @@ def test_weekly_active_users_cumulative(self):
data = response.json()
self.assertEqual(data.get("results")[0].get("count"), 2)
- self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_11", "u_10"])
+ self.assertEqual(
+ [item["name"] for item in data.get("results")[0].get("people")],
+ ["u_11", "u_10"],
+ )
@skip("see PR 17356")
def test_weekly_active_users_breakdown(self):
@@ -313,4 +329,7 @@ def test_weekly_active_users_breakdown(self):
data = response.json()
# self.assertEqual(data.get("results")[0].get("count"), 2)
- self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["a_17", "a_16"])
+ self.assertEqual(
+ [item["name"] for item in data.get("results")[0].get("people")],
+ ["a_17", "a_16"],
+ )
diff --git a/posthog/queries/trends/total_volume.py b/posthog/queries/trends/total_volume.py
index 154e105e77f92..31f5d83b4c15c 100644
--- a/posthog/queries/trends/total_volume.py
+++ b/posthog/queries/trends/total_volume.py
@@ -38,8 +38,16 @@
parse_response,
process_math,
)
-from posthog.queries.util import TIME_IN_SECONDS, get_interval_func_ch, get_start_of_interval_sql
-from posthog.utils import PersonOnEventsMode, encode_get_request_params, generate_short_id
+from posthog.queries.util import (
+ TIME_IN_SECONDS,
+ get_interval_func_ch,
+ get_start_of_interval_sql,
+)
+from posthog.utils import (
+ PersonOnEventsMode,
+ encode_get_request_params,
+ generate_short_id,
+)
class TrendsTotalVolume:
@@ -115,7 +123,11 @@ def _total_volume_query(self, entity: Entity, filter: Filter, team: Team) -> Tup
tag_queries(trend_volume_type="volume_aggregate")
content_sql = VOLUME_AGGREGATE_SQL.format(event_query_base=event_query_base, **content_sql_params)
- return (content_sql, params, self._parse_aggregate_volume_result(filter, entity, team.id))
+ return (
+ content_sql,
+ params,
+ self._parse_aggregate_volume_result(filter, entity, team.id),
+ )
else:
tag_queries(trend_volume_display="time_series")
null_sql = NULL_SQL.format(
@@ -133,12 +145,17 @@ def _total_volume_query(self, entity: Entity, filter: Filter, team: Team) -> Tup
aggregator=determine_aggregator(entity, team), # TODO: Support groups officialy and with tests
date_to_truncated=get_start_of_interval_sql(filter.interval, team=team, source="%(date_to)s"),
date_from_active_users_adjusted_truncated=get_start_of_interval_sql(
- filter.interval, team=team, source="%(date_from_active_users_adjusted)s"
+ filter.interval,
+ team=team,
+ source="%(date_from_active_users_adjusted)s",
),
**content_sql_params,
**trend_event_query.active_user_params,
)
- elif filter.display == TRENDS_CUMULATIVE and entity.math in (UNIQUE_USERS, UNIQUE_GROUPS):
+ elif filter.display == TRENDS_CUMULATIVE and entity.math in (
+ UNIQUE_USERS,
+ UNIQUE_GROUPS,
+ ):
# :TODO: Consider using bitmap-per-date to speed this up
tag_queries(trend_volume_type="cumulative_actors")
cumulative_sql = CUMULATIVE_SQL.format(
@@ -272,7 +289,11 @@ def _offset_date_to(self, point_datetime: datetime, filter: Filter, entity: Enti
return offset_time_series_date_by_interval(point_datetime, filter=filter, team=team)
def _get_persons_url(
- self, filter: Filter, entity: Entity, team: Team, point_datetimes: List[datetime]
+ self,
+ filter: Filter,
+ entity: Entity,
+ team: Team,
+ point_datetimes: List[datetime],
) -> List[Dict[str, Any]]:
persons_url = []
cache_invalidation_key = generate_short_id()
diff --git a/posthog/queries/trends/trends.py b/posthog/queries/trends/trends.py
index 049417799bb8b..479d64f3259b1 100644
--- a/posthog/queries/trends/trends.py
+++ b/posthog/queries/trends/trends.py
@@ -101,7 +101,12 @@ def adjusted_filter(self, filter: Filter, team: Team) -> Tuple[Filter, Optional[
return new_filter, label_to_payload
def merge_results(
- self, result, cached_result: Optional[Dict[str, Any]], entity_order: int, filter: Filter, team: Team
+ self,
+ result,
+ cached_result: Optional[Dict[str, Any]],
+ entity_order: int,
+ filter: Filter,
+ team: Team,
):
if cached_result and filter.display != TRENDS_CUMULATIVE:
new_res = []
@@ -139,7 +144,11 @@ def _run_query(self, filter: Filter, team: Team, entity: Entity) -> List[Dict[st
result = parse_function(result)
serialized_data = self._format_serialized(entity, result)
merged_results, cached_result = self.merge_results(
- serialized_data, cached_result, entity.order or entity.index, filter, team
+ serialized_data,
+ cached_result,
+ entity.order or entity.index,
+ filter,
+ team,
)
if cached_result:
@@ -149,7 +158,15 @@ def _run_query(self, filter: Filter, team: Team, entity: Entity) -> List[Dict[st
return merged_results
def _run_query_for_threading(
- self, result: List, index: int, query_type, sql, params, query_tags: Dict, filter: Filter, team_id: int
+ self,
+ result: List,
+ index: int,
+ query_type,
+ sql,
+ params,
+ query_tags: Dict,
+ filter: Filter,
+ team_id: int,
):
tag_queries(**query_tags)
with push_scope() as scope:
@@ -171,7 +188,16 @@ def _run_parallel(self, filter: Filter, team: Team) -> List[Dict[str, Any]]:
sql_statements_with_params[entity.index] = (sql, query_params)
thread = threading.Thread(
target=self._run_query_for_threading,
- args=(result, entity.index, query_type, sql, query_params, get_query_tags(), adjusted_filter, team.pk),
+ args=(
+ result,
+ entity.index,
+ query_type,
+ sql,
+ query_params,
+ get_query_tags(),
+ adjusted_filter,
+ team.pk,
+ ),
)
jobs.append(thread)
@@ -189,12 +215,20 @@ def _run_parallel(self, filter: Filter, team: Team) -> List[Dict[str, Any]]:
scope.set_tag("team", team)
for i, entity in enumerate(filter.entities):
scope.set_context(
- "query", {"sql": sql_statements_with_params[i][0], "params": sql_statements_with_params[i][1]}
+ "query",
+ {
+ "sql": sql_statements_with_params[i][0],
+ "params": sql_statements_with_params[i][1],
+ },
)
serialized_data = cast(List[Callable], parse_functions)[entity.index](result[entity.index])
serialized_data = self._format_serialized(entity, serialized_data)
merged_results, cached_result = self.merge_results(
- serialized_data, cached_result, entity.order or entity.index, filter, team
+ serialized_data,
+ cached_result,
+ entity.order or entity.index,
+ filter,
+ team,
)
result[entity.index] = merged_results
diff --git a/posthog/queries/trends/trends_actors.py b/posthog/queries/trends/trends_actors.py
index ed484968af7dd..228eac4f799e3 100644
--- a/posthog/queries/trends/trends_actors.py
+++ b/posthog/queries/trends/trends_actors.py
@@ -43,7 +43,8 @@ def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]:
self._filter = self._filter.shallow_clone(
{
"properties": self._filter.property_groups.combine_properties(
- PropertyOperatorType.AND, [Property(key="id", value=cohort.pk, type="cohort")]
+ PropertyOperatorType.AND,
+ [Property(key="id", value=cohort.pk, type="cohort")],
).to_dict()
}
)
@@ -115,7 +116,10 @@ def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]:
else ""
)
- actor_value_expression, actor_value_params = self._aggregation_actor_value_expression_with_params
+ (
+ actor_value_expression,
+ actor_value_params,
+ ) = self._aggregation_actor_value_expression_with_params
return (
GET_ACTORS_FROM_EVENT_QUERY.format(
@@ -126,7 +130,12 @@ def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]:
limit="LIMIT %(limit)s" if limit_actors else "",
offset="OFFSET %(offset)s" if limit_actors else "",
),
- {**params, **actor_value_params, "offset": self._filter.offset, "limit": self._filter.limit or 100},
+ {
+ **params,
+ **actor_value_params,
+ "offset": self._filter.offset,
+ "limit": self._filter.limit or 100,
+ },
)
@cached_property
diff --git a/posthog/queries/trends/trends_event_query.py b/posthog/queries/trends/trends_event_query.py
index c5504ce388486..6ef5cf009dafc 100644
--- a/posthog/queries/trends/trends_event_query.py
+++ b/posthog/queries/trends/trends_event_query.py
@@ -24,7 +24,13 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]:
+ " ".join(
[
", "
- + get_property_string_expr("events", property, f"'{property}'", "properties", table_alias="e")[0]
+ + get_property_string_expr(
+ "events",
+ property,
+ f"'{property}'",
+ "properties",
+ table_alias="e",
+ )[0]
+ f" as {property}"
for property in self._extra_event_properties
]
diff --git a/posthog/queries/trends/trends_event_query_base.py b/posthog/queries/trends/trends_event_query_base.py
index 93dd843349046..4eaaa46d75a52 100644
--- a/posthog/queries/trends/trends_event_query_base.py
+++ b/posthog/queries/trends/trends_event_query_base.py
@@ -1,6 +1,11 @@
from typing import Any, Dict, Tuple
-from posthog.constants import MONTHLY_ACTIVE, UNIQUE_USERS, WEEKLY_ACTIVE, PropertyOperatorType
+from posthog.constants import (
+ MONTHLY_ACTIVE,
+ UNIQUE_USERS,
+ WEEKLY_ACTIVE,
+ PropertyOperatorType,
+)
from posthog.models import Entity
from posthog.models.entity.util import get_entity_filtering_params
from posthog.models.filters.filter import Filter
@@ -8,7 +13,10 @@
from posthog.queries.event_query import EventQuery
from posthog.queries.person_query import PersonQuery
from posthog.queries.query_date_range import QueryDateRange
-from posthog.queries.trends.util import COUNT_PER_ACTOR_MATH_FUNCTIONS, get_active_user_params
+from posthog.queries.trends.util import (
+ COUNT_PER_ACTOR_MATH_FUNCTIONS,
+ get_active_user_params,
+)
from posthog.queries.util import get_person_properties_mode
from posthog.utils import PersonOnEventsMode
@@ -117,9 +125,10 @@ def _get_date_filter(self) -> Tuple[str, Dict]:
self.parsed_date_to = parsed_date_to
if self._entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]:
- active_user_format_params, active_user_query_params = get_active_user_params(
- self._filter, self._entity, self._team_id
- )
+ (
+ active_user_format_params,
+ active_user_query_params,
+ ) = get_active_user_params(self._filter, self._entity, self._team_id)
self.active_user_params = active_user_format_params
date_params.update(active_user_query_params)
diff --git a/posthog/queries/trends/util.py b/posthog/queries/trends/util.py
index 46cd2a8041f32..382201a9e0203 100644
--- a/posthog/queries/trends/util.py
+++ b/posthog/queries/trends/util.py
@@ -7,7 +7,13 @@
from dateutil.relativedelta import relativedelta
from rest_framework.exceptions import ValidationError
-from posthog.constants import MONTHLY_ACTIVE, NON_TIME_SERIES_DISPLAY_TYPES, UNIQUE_GROUPS, UNIQUE_USERS, WEEKLY_ACTIVE
+from posthog.constants import (
+ MONTHLY_ACTIVE,
+ NON_TIME_SERIES_DISPLAY_TYPES,
+ UNIQUE_GROUPS,
+ UNIQUE_USERS,
+ WEEKLY_ACTIVE,
+)
from posthog.hogql.hogql import translate_hogql
from posthog.models.entity import Entity
from posthog.models.event.sql import EVENT_JOIN_PERSON_SQL
@@ -42,7 +48,10 @@
"p99_count_per_actor": "quantile(0.99)",
}
-ALL_SUPPORTED_MATH_FUNCTIONS = [*list(PROPERTY_MATH_FUNCTIONS.keys()), *list(COUNT_PER_ACTOR_MATH_FUNCTIONS.keys())]
+ALL_SUPPORTED_MATH_FUNCTIONS = [
+ *list(PROPERTY_MATH_FUNCTIONS.keys()),
+ *list(COUNT_PER_ACTOR_MATH_FUNCTIONS.keys()),
+]
def process_math(
@@ -72,7 +81,8 @@ def process_math(
elif entity.math in PROPERTY_MATH_FUNCTIONS:
if entity.math_property is None:
raise ValidationError(
- {"math_property": "This field is required when `math` is set to a function."}, code="required"
+ {"math_property": "This field is required when `math` is set to a function."},
+ code="required",
)
if entity.math_property == "$session_duration":
aggregate_operation = f"{PROPERTY_MATH_FUNCTIONS[entity.math]}(session_duration)"
@@ -90,7 +100,10 @@ def process_math(
def parse_response(
- stats: Dict, filter: Filter, additional_values: Dict = {}, entity: Optional[Entity] = None
+ stats: Dict,
+ filter: Filter,
+ additional_values: Dict = {},
+ entity: Optional[Entity] = None,
) -> Dict[str, Any]:
counts = stats[1]
labels = [item.strftime("%-d-%b-%Y{}".format(" %H:%M" if filter.interval == "hour" else "")) for item in stats[0]]
diff --git a/posthog/queries/util.py b/posthog/queries/util.py
index ec218785b1dc9..8fdda3799dbd1 100644
--- a/posthog/queries/util.py
+++ b/posthog/queries/util.py
@@ -155,7 +155,9 @@ def convert_to_datetime_aware(date_obj):
def correct_result_for_sampling(
- value: Union[int, float], sampling_factor: Optional[float], entity_math: Optional[str] = None
+ value: Union[int, float],
+ sampling_factor: Optional[float],
+ entity_math: Optional[str] = None,
) -> Union[int, float]:
from posthog.queries.trends.util import ALL_SUPPORTED_MATH_FUNCTIONS
diff --git a/posthog/rate_limit.py b/posthog/rate_limit.py
index 587eb742ee4f1..dbaa478d9f462 100644
--- a/posthog/rate_limit.py
+++ b/posthog/rate_limit.py
@@ -82,7 +82,6 @@ def safely_get_team_id_from_view(view):
return None
def allow_request(self, request, view):
-
if not is_rate_limit_enabled(round(time.time() / 60)):
return True
@@ -114,7 +113,12 @@ def allow_request(self, request, view):
statsd.incr(
"rate_limit_exceeded",
- tags={"team_id": team_id, "scope": scope, "rate": rate, "path": path},
+ tags={
+ "team_id": team_id,
+ "scope": scope,
+ "rate": rate,
+ "path": path,
+ },
)
RATE_LIMIT_EXCEEDED_COUNTER.labels(team_id=team_id, scope=scope, path=path).inc()
@@ -188,7 +192,6 @@ def safely_get_token_from_request(request: Request) -> Optional[str]:
return None
def allow_request(self, request, view):
-
if not is_decide_rate_limit_enabled():
return True
diff --git a/posthog/session_recordings/models/session_recording.py b/posthog/session_recordings/models/session_recording.py
index b3b09a03d0b74..5ef51b34c2f1b 100644
--- a/posthog/session_recordings/models/session_recording.py
+++ b/posthog/session_recordings/models/session_recording.py
@@ -12,7 +12,9 @@
RecordingMatchingEvents,
RecordingMetadata,
)
-from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed
+from posthog.session_recordings.models.session_recording_event import (
+ SessionRecordingViewed,
+)
from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
diff --git a/posthog/session_recordings/models/session_recording_playlist.py b/posthog/session_recordings/models/session_recording_playlist.py
index a0f11b5718335..9c198dbd83e3c 100644
--- a/posthog/session_recordings/models/session_recording_playlist.py
+++ b/posthog/session_recordings/models/session_recording_playlist.py
@@ -5,7 +5,6 @@
class SessionRecordingPlaylist(models.Model):
-
short_id: models.CharField = models.CharField(max_length=12, blank=True, default=generate_short_id)
name: models.CharField = models.CharField(max_length=400, null=True, blank=True)
derived_name: models.CharField = models.CharField(max_length=400, null=True, blank=True)
@@ -18,7 +17,11 @@ class SessionRecordingPlaylist(models.Model):
created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True)
last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now)
last_modified_by: models.ForeignKey = models.ForeignKey(
- "User", on_delete=models.SET_NULL, null=True, blank=True, related_name="modified_playlists"
+ "User",
+ on_delete=models.SET_NULL,
+ null=True,
+ blank=True,
+ related_name="modified_playlists",
)
# DEPRECATED
diff --git a/posthog/session_recordings/models/session_recording_playlist_item.py b/posthog/session_recordings/models/session_recording_playlist_item.py
index f0caf9721aa7b..73d38815fdef8 100644
--- a/posthog/session_recordings/models/session_recording_playlist_item.py
+++ b/posthog/session_recordings/models/session_recording_playlist_item.py
@@ -14,7 +14,9 @@ class Meta:
blank=True,
)
playlist: models.ForeignKey = models.ForeignKey(
- "SessionRecordingPlaylist", related_name="playlist_items", on_delete=models.CASCADE
+ "SessionRecordingPlaylist",
+ related_name="playlist_items",
+ on_delete=models.CASCADE,
)
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True)
diff --git a/posthog/session_recordings/models/system_status_queries.py b/posthog/session_recordings/models/system_status_queries.py
index f14396bf06270..9728c0695c1fa 100644
--- a/posthog/session_recordings/models/system_status_queries.py
+++ b/posthog/session_recordings/models/system_status_queries.py
@@ -19,5 +19,7 @@ def get_recording_status_month_to_date() -> RecordingsSystemStatus:
"""
)[0]
return RecordingsSystemStatus(
- count=result[0], events=f"{result[1]:,} rrweb events in {result[2]:,} messages", size=result[3]
+ count=result[0],
+ events=f"{result[1]:,} rrweb events in {result[2]:,} messages",
+ size=result[3],
)
diff --git a/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py b/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py
index b725c95cb658e..c39eee18bf79c 100644
--- a/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py
+++ b/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py
@@ -117,7 +117,10 @@ def _get_console_log_clause(
console_logs_filter: List[Literal["error", "warn", "log"]]
) -> Tuple[str, Dict[str, Any]]:
return (
- (f"AND level in %(console_logs_levels)s", {"console_logs_levels": console_logs_filter})
+ (
+ f"AND level in %(console_logs_levels)s",
+ {"console_logs_levels": console_logs_filter},
+ )
if console_logs_filter
else ("", {})
)
@@ -126,7 +129,10 @@ def get_query(self) -> Tuple[str, Dict]:
if not self._filter.console_search_query:
return "", {}
- events_timestamp_clause, events_timestamp_params = self._get_events_timestamp_clause
+ (
+ events_timestamp_clause,
+ events_timestamp_params,
+ ) = self._get_events_timestamp_clause
console_log_clause, console_log_params = self._get_console_log_clause(self._filter.console_logs_filter)
return self._rawQuery.format(
@@ -319,9 +325,10 @@ def build_event_filters(self) -> SummaryEventFiltersSQL:
if entity.id and entity.id not in event_names_to_filter:
event_names_to_filter.append(entity.id)
- this_entity_condition_sql, this_entity_filter_params = self.format_event_filter(
- entity, prepend=f"event_matcher_{index}", team_id=self._team_id
- )
+ (
+ this_entity_condition_sql,
+ this_entity_filter_params,
+ ) = self.format_event_filter(entity, prepend=f"event_matcher_{index}", team_id=self._team_id)
joining = "OR" if index > 0 else ""
condition_sql += f"{joining} {this_entity_condition_sql}"
# wrap in smooths to constrain the scope of the OR
@@ -355,7 +362,10 @@ def _get_groups_query(self) -> Tuple[str, Dict]:
from posthog.queries.groups_join_query import GroupsJoinQuery
return GroupsJoinQuery(
- self._filter, self._team_id, self._column_optimizer, person_on_events_mode=self._person_on_events_mode
+ self._filter,
+ self._team_id,
+ self._column_optimizer,
+ person_on_events_mode=self._person_on_events_mode,
).get_join_query()
# We want to select events beyond the range of the recording to handle the case where
@@ -382,13 +392,17 @@ def get_query(self, select_event_ids: bool = False) -> Tuple[str, Dict[str, Any]
}
_, recording_start_time_params = _get_recording_start_time_clause(self._filter)
- provided_session_ids_clause, provided_session_ids_params = _get_filter_by_provided_session_ids_clause(
- recording_filters=self._filter, column_name="$session_id"
- )
+ (
+ provided_session_ids_clause,
+ provided_session_ids_params,
+ ) = _get_filter_by_provided_session_ids_clause(recording_filters=self._filter, column_name="$session_id")
event_filters = self.build_event_filters
event_filters_params = event_filters.params
- events_timestamp_clause, events_timestamp_params = self._get_events_timestamp_clause
+ (
+ events_timestamp_clause,
+ events_timestamp_params,
+ ) = self._get_events_timestamp_clause
groups_query, groups_params = self._get_groups_query()
@@ -407,9 +421,11 @@ def get_query(self, select_event_ids: bool = False) -> Tuple[str, Dict[str, Any]
person_id_joined_alias=f"{self.DISTINCT_ID_TABLE_ALIAS}.person_id",
)
- persons_join, persons_select_params, persons_sub_query = self._persons_join_or_subquery(
- event_filters, prop_query
- )
+ (
+ persons_join,
+ persons_select_params,
+ persons_sub_query,
+ ) = self._persons_join_or_subquery(event_filters, prop_query)
return (
self._raw_events_query.format(
@@ -590,9 +606,10 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]:
}
_, recording_start_time_params = _get_recording_start_time_clause(self._filter)
- provided_session_ids_clause, provided_session_ids_params = _get_filter_by_provided_session_ids_clause(
- recording_filters=self._filter
- )
+ (
+ provided_session_ids_clause,
+ provided_session_ids_params,
+ ) = _get_filter_by_provided_session_ids_clause(recording_filters=self._filter)
(
log_matching_session_ids_clause,
@@ -636,7 +653,8 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]:
)
def duration_clause(
- self, duration_filter_type: Literal["duration", "active_seconds", "inactive_seconds"]
+ self,
+ duration_filter_type: Literal["duration", "active_seconds", "inactive_seconds"],
) -> Tuple[str, Dict[str, Any]]:
duration_clause = ""
duration_params = {}
diff --git a/posthog/session_recordings/queries/session_recording_properties.py b/posthog/session_recordings/queries/session_recording_properties.py
index 22d54e9799b4d..e7c5544f14fe7 100644
--- a/posthog/session_recordings/queries/session_recording_properties.py
+++ b/posthog/session_recordings/queries/session_recording_properties.py
@@ -73,12 +73,19 @@ def format_session_recording_id_filters(self) -> Tuple[str, Dict]:
def get_query(self) -> Tuple[str, Dict[str, Any]]:
base_params = {"team_id": self._team_id}
- events_timestamp_clause, events_timestamp_params = self._get_events_timestamp_clause()
- session_ids_clause, session_ids_params = self.format_session_recording_id_filters()
+ (
+ events_timestamp_clause,
+ events_timestamp_params,
+ ) = self._get_events_timestamp_clause()
+ (
+ session_ids_clause,
+ session_ids_params,
+ ) = self.format_session_recording_id_filters()
return (
self._core_single_pageview_event_query.format(
- events_timestamp_clause=events_timestamp_clause, session_ids_clause=session_ids_clause
+ events_timestamp_clause=events_timestamp_clause,
+ session_ids_clause=session_ids_clause,
),
{**base_params, **events_timestamp_params, **session_ids_params},
)
diff --git a/posthog/session_recordings/queries/session_replay_events.py b/posthog/session_recordings/queries/session_replay_events.py
index 02c2a26519c21..0d60559c7a047 100644
--- a/posthog/session_recordings/queries/session_replay_events.py
+++ b/posthog/session_recordings/queries/session_replay_events.py
@@ -25,12 +25,19 @@ def exists(self, session_id: str, team: Team) -> bool:
AND session_id = %(session_id)s
AND min_first_timestamp >= now() - INTERVAL %(recording_ttl_days)s DAY
""",
- {"team_id": team.pk, "session_id": session_id, "recording_ttl_days": ttl_days(team)},
+ {
+ "team_id": team.pk,
+ "session_id": session_id,
+ "recording_ttl_days": ttl_days(team),
+ },
)
return result[0][0] > 0
def get_metadata(
- self, session_id: str, team: Team, recording_start_time: Optional[datetime] = None
+ self,
+ session_id: str,
+ team: Team,
+ recording_start_time: Optional[datetime] = None,
) -> Optional[RecordingMetadata]:
query = """
SELECT
@@ -63,7 +70,11 @@ def get_metadata(
replay_response: List[Tuple] = sync_execute(
query,
- {"team_id": team.pk, "session_id": session_id, "recording_start_time": recording_start_time},
+ {
+ "team_id": team.pk,
+ "session_id": session_id,
+ "recording_start_time": recording_start_time,
+ },
)
if len(replay_response) == 0:
diff --git a/posthog/session_recordings/queries/test/session_replay_sql.py b/posthog/session_recordings/queries/test/session_replay_sql.py
index fcc3eee03a44e..3b094a5a75c8c 100644
--- a/posthog/session_recordings/queries/test/session_replay_sql.py
+++ b/posthog/session_recordings/queries/test/session_replay_sql.py
@@ -7,7 +7,10 @@
from posthog.clickhouse.log_entries import INSERT_LOG_ENTRY_SQL
from posthog.kafka_client.client import ClickhouseProducer
-from posthog.kafka_client.topics import KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS, KAFKA_LOG_ENTRIES
+from posthog.kafka_client.topics import (
+ KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS,
+ KAFKA_LOG_ENTRIES,
+)
from posthog.models.event.util import format_clickhouse_timestamp
from posthog.utils import cast_timestamp_or_now
@@ -134,7 +137,11 @@ def produce_replay_summary(
}
p = ClickhouseProducer()
# because this is in a test it will write directly using SQL not really with Kafka
- p.produce(topic=KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS, sql=INSERT_SINGLE_SESSION_REPLAY, data=data)
+ p.produce(
+ topic=KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS,
+ sql=INSERT_SINGLE_SESSION_REPLAY,
+ data=data,
+ )
for level, messages in log_messages.items():
for message in messages:
diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py
index 9424a9df2a51c..f70f86fdba3cf 100644
--- a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py
+++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py
@@ -14,13 +14,17 @@
from posthog.models.action_step import ActionStep
from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter
from posthog.models.group.util import create_group
-from posthog.session_recordings.sql.session_replay_event_sql import TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL
+from posthog.session_recordings.sql.session_replay_event_sql import (
+ TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL,
+)
from posthog.models.team import Team
from posthog.session_recordings.queries.session_recording_list_from_replay_summary import (
SessionRecordingListFromReplaySummary,
)
from posthog.session_recordings.queries.session_replay_events import ttl_days
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
@@ -64,7 +68,11 @@ def create_event(
if properties is None:
properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"}
return _create_event(
- team=team, event=event_name, timestamp=timestamp, distinct_id=distinct_id, properties=properties
+ team=team,
+ event=event_name,
+ timestamp=timestamp,
+ distinct_id=distinct_id,
+ properties=properties,
)
@property
@@ -123,7 +131,10 @@ def test_basic_query(self):
filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None})
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert session_recordings == [
{
@@ -225,7 +236,10 @@ def test_basic_query_active_sessions(
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert sorted(
[(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings],
@@ -243,7 +257,10 @@ def test_basic_query_active_sessions(
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [
(session_id_active_is_61, 59, 61.0)
@@ -257,7 +274,10 @@ def test_basic_query_active_sessions(
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert [(s["session_id"], s["duration"], s["inactive_seconds"]) for s in session_recordings] == [
(session_id_inactive_is_61, 61, 61.0)
@@ -315,7 +335,10 @@ def test_basic_query_with_paging(self):
filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None, "limit": 1, "offset": 0})
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert session_recordings == [
{
@@ -341,7 +364,10 @@ def test_basic_query_with_paging(self):
filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None, "limit": 1, "offset": 1})
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert session_recordings == [
{
@@ -367,7 +393,10 @@ def test_basic_query_with_paging(self):
filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None, "limit": 1, "offset": 2})
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert session_recordings == []
@@ -479,7 +508,10 @@ def test_first_url_selection(self):
filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None})
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert sorted(
[{"session_id": r["session_id"], "first_url": r["first_url"]} for r in session_recordings],
@@ -561,7 +593,11 @@ def test_event_filter(self):
first_timestamp=self.base_time,
team_id=self.team.id,
)
- self.create_event(user, self.base_time, properties={"$session_id": session_id_one, "$window_id": str(uuid4())})
+ self.create_event(
+ user,
+ self.base_time,
+ properties={"$session_id": session_id_one, "$window_id": str(uuid4())},
+ )
produce_replay_summary(
distinct_id=user,
session_id=session_id_one,
@@ -571,7 +607,16 @@ def test_event_filter(self):
filter = SessionRecordingsFilter(
team=self.team,
- data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]},
+ data={
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ]
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -580,7 +625,16 @@ def test_event_filter(self):
filter = SessionRecordingsFilter(
team=self.team,
- data={"events": [{"id": "$autocapture", "type": "events", "order": 0, "name": "$autocapture"}]},
+ data={
+ "events": [
+ {
+ "id": "$autocapture",
+ "type": "events",
+ "order": 0,
+ "name": "$autocapture",
+ }
+ ]
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -609,7 +663,16 @@ def test_event_filter_has_ttl_applied_too(self):
filter = SessionRecordingsFilter(
team=self.team,
- data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]},
+ data={
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ]
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -654,7 +717,12 @@ def test_event_filter_with_active_sessions(
session_id_active_is_61 = f"test_basic_query_active_sessions-active-{str(uuid4())}"
self.create_event(
- user, self.base_time, properties={"$session_id": session_id_total_is_61, "$window_id": str(uuid4())}
+ user,
+ self.base_time,
+ properties={
+ "$session_id": session_id_total_is_61,
+ "$window_id": str(uuid4()),
+ },
)
produce_replay_summary(
session_id=session_id_total_is_61,
@@ -671,7 +739,12 @@ def test_event_filter_with_active_sessions(
)
self.create_event(
- user, self.base_time, properties={"$session_id": session_id_active_is_61, "$window_id": str(uuid4())}
+ user,
+ self.base_time,
+ properties={
+ "$session_id": session_id_active_is_61,
+ "$window_id": str(uuid4()),
+ },
)
produce_replay_summary(
session_id=session_id_active_is_61,
@@ -691,12 +764,22 @@ def test_event_filter_with_active_sessions(
team=self.team,
data={
"duration_type_filter": "duration",
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}',
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [
(session_id_total_is_61, 61, 59.0)
@@ -706,12 +789,22 @@ def test_event_filter_with_active_sessions(
team=self.team,
data={
"duration_type_filter": "active_seconds",
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}',
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
- (session_recordings, more_recordings_available) = session_recording_list_instance.run()
+ (
+ session_recordings,
+ more_recordings_available,
+ ) = session_recording_list_instance.run()
assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [
(session_id_active_is_61, 59, 61.0)
@@ -732,7 +825,11 @@ def test_event_filter_with_properties(self):
self.create_event(
user,
self.base_time,
- properties={"$browser": "Chrome", "$session_id": session_id_one, "$window_id": str(uuid4())},
+ properties={
+ "$browser": "Chrome",
+ "$session_id": session_id_one,
+ "$window_id": str(uuid4()),
+ },
)
produce_replay_summary(
distinct_id=user,
@@ -749,7 +846,14 @@ def test_event_filter_with_properties(self):
"type": "events",
"order": 0,
"name": "$pageview",
- "properties": [{"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "event"}],
+ "properties": [
+ {
+ "key": "$browser",
+ "value": ["Chrome"],
+ "operator": "exact",
+ "type": "event",
+ }
+ ],
}
]
},
@@ -768,7 +872,14 @@ def test_event_filter_with_properties(self):
"type": "events",
"order": 0,
"name": "$pageview",
- "properties": [{"key": "$browser", "value": ["Firefox"], "operator": "exact", "type": "event"}],
+ "properties": [
+ {
+ "key": "$browser",
+ "value": ["Firefox"],
+ "operator": "exact",
+ "type": "event",
+ }
+ ],
}
]
},
@@ -783,12 +894,22 @@ def test_multiple_event_filters(self):
user = "test_multiple_event_filters-user"
Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"})
produce_replay_summary(
- distinct_id=user, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id
+ distinct_id=user,
+ session_id=session_id,
+ first_timestamp=self.base_time,
+ team_id=self.team.id,
)
- self.create_event(user, self.base_time, properties={"$session_id": session_id, "$window_id": "1"})
self.create_event(
- user, self.base_time, properties={"$session_id": session_id, "$window_id": "1"}, event_name="new-event"
+ user,
+ self.base_time,
+ properties={"$session_id": session_id, "$window_id": "1"},
+ )
+ self.create_event(
+ user,
+ self.base_time,
+ properties={"$session_id": session_id, "$window_id": "1"},
+ event_name="new-event",
)
produce_replay_summary(
distinct_id=user,
@@ -801,8 +922,18 @@ def test_multiple_event_filters(self):
team=self.team,
data={
"events": [
- {"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"},
- {"id": "new-event", "type": "events", "order": 0, "name": "new-event"},
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ },
+ {
+ "id": "new-event",
+ "type": "events",
+ "order": 0,
+ "name": "new-event",
+ },
]
},
)
@@ -817,8 +948,18 @@ def test_multiple_event_filters(self):
team=self.team,
data={
"events": [
- {"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"},
- {"id": "new-event2", "type": "events", "order": 0, "name": "new-event2"},
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ },
+ {
+ "id": "new-event2",
+ "type": "events",
+ "order": 0,
+ "name": "new-event2",
+ },
]
},
)
@@ -844,7 +985,10 @@ def test_action_filter(self):
)
action_without_properties = self.create_action(
name="custom-event",
- properties=[{"key": "$session_id", "value": session_id_one}, {"key": "$window_id", "value": window_id}],
+ properties=[
+ {"key": "$session_id", "value": session_id_one},
+ {"key": "$window_id", "value": window_id},
+ ],
)
produce_replay_summary(
@@ -857,7 +1001,11 @@ def test_action_filter(self):
user,
self.base_time,
event_name="custom-event",
- properties={"$browser": "Chrome", "$session_id": session_id_one, "$window_id": window_id},
+ properties={
+ "$browser": "Chrome",
+ "$session_id": session_id_one,
+ "$window_id": window_id,
+ },
)
produce_replay_summary(
distinct_id=user,
@@ -869,7 +1017,14 @@ def test_action_filter(self):
filter = SessionRecordingsFilter(
team=self.team,
data={
- "actions": [{"id": action_with_properties.id, "type": "actions", "order": 1, "name": "custom-event"}]
+ "actions": [
+ {
+ "id": action_with_properties.id,
+ "type": "actions",
+ "order": 1,
+ "name": "custom-event",
+ }
+ ]
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -879,7 +1034,14 @@ def test_action_filter(self):
filter = SessionRecordingsFilter(
team=self.team,
data={
- "actions": [{"id": action_without_properties.id, "type": "actions", "order": 1, "name": "custom-event"}]
+ "actions": [
+ {
+ "id": action_without_properties.id,
+ "type": "actions",
+ "order": 1,
+ "name": "custom-event",
+ }
+ ]
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -898,7 +1060,14 @@ def test_action_filter(self):
"type": "actions",
"order": 1,
"name": "custom-event",
- "properties": [{"key": "$browser", "value": ["Firefox"], "operator": "exact", "type": "event"}],
+ "properties": [
+ {
+ "key": "$browser",
+ "value": ["Firefox"],
+ "operator": "exact",
+ "type": "event",
+ }
+ ],
}
]
},
@@ -917,7 +1086,14 @@ def test_action_filter(self):
"type": "actions",
"order": 1,
"name": "custom-event",
- "properties": [{"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "event"}],
+ "properties": [
+ {
+ "key": "$browser",
+ "value": ["Chrome"],
+ "operator": "exact",
+ "type": "event",
+ }
+ ],
}
]
},
@@ -941,7 +1117,11 @@ def test_all_sessions_recording_object_keys_with_entity_filter(self):
last_timestamp=(self.base_time + relativedelta(seconds=60)),
team_id=self.team.id,
)
- self.create_event(user, self.base_time, properties={"$session_id": session_id, "$window_id": window_id})
+ self.create_event(
+ user,
+ self.base_time,
+ properties={"$session_id": session_id, "$window_id": window_id},
+ )
produce_replay_summary(
distinct_id=user,
session_id=session_id,
@@ -951,7 +1131,16 @@ def test_all_sessions_recording_object_keys_with_entity_filter(self):
)
filter = SessionRecordingsFilter(
team=self.team,
- data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]},
+ data={
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ]
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -1056,7 +1245,8 @@ def test_date_from_filter(self):
assert session_recordings == []
filter = SessionRecordingsFilter(
- team=self.team, data={"date_from": (self.base_time - relativedelta(days=2)).strftime("%Y-%m-%d")}
+ team=self.team,
+ data={"date_from": (self.base_time - relativedelta(days=2)).strftime("%Y-%m-%d")},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -1086,7 +1276,8 @@ def test_date_from_filter_cannot_search_before_ttl(self):
)
filter = SessionRecordingsFilter(
- team=self.team, data={"date_from": (self.base_time - relativedelta(days=20)).strftime("%Y-%m-%d")}
+ team=self.team,
+ data={"date_from": (self.base_time - relativedelta(days=20)).strftime("%Y-%m-%d")},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -1094,7 +1285,8 @@ def test_date_from_filter_cannot_search_before_ttl(self):
assert session_recordings[0]["session_id"] == "storage is not past ttl"
filter = SessionRecordingsFilter(
- team=self.team, data={"date_from": (self.base_time - relativedelta(days=21)).strftime("%Y-%m-%d")}
+ team=self.team,
+ data={"date_from": (self.base_time - relativedelta(days=21)).strftime("%Y-%m-%d")},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -1102,7 +1294,8 @@ def test_date_from_filter_cannot_search_before_ttl(self):
assert session_recordings[0]["session_id"] == "storage is not past ttl"
filter = SessionRecordingsFilter(
- team=self.team, data={"date_from": (self.base_time - relativedelta(days=22)).strftime("%Y-%m-%d")}
+ team=self.team,
+ data={"date_from": (self.base_time - relativedelta(days=22)).strftime("%Y-%m-%d")},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -1129,14 +1322,16 @@ def test_date_to_filter(self):
)
filter = SessionRecordingsFilter(
- team=self.team, data={"date_to": (self.base_time - relativedelta(days=4)).strftime("%Y-%m-%d")}
+ team=self.team,
+ data={"date_to": (self.base_time - relativedelta(days=4)).strftime("%Y-%m-%d")},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
assert session_recordings == []
filter = SessionRecordingsFilter(
- team=self.team, data={"date_to": (self.base_time - relativedelta(days=3)).strftime("%Y-%m-%d")}
+ team=self.team,
+ data={"date_to": (self.base_time - relativedelta(days=3)).strftime("%Y-%m-%d")},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -1176,9 +1371,15 @@ def test_person_id_filter(self):
session_id_one = f"test_person_id_filter-{str(uuid4())}"
session_id_two = f"test_person_id_filter-{str(uuid4())}"
p = Person.objects.create(
- team=self.team, distinct_ids=[three_user_ids[0], three_user_ids[1]], properties={"email": "bla"}
+ team=self.team,
+ distinct_ids=[three_user_ids[0], three_user_ids[1]],
+ properties={"email": "bla"},
+ )
+ produce_replay_summary(
+ distinct_id=three_user_ids[0],
+ session_id=session_id_one,
+ team_id=self.team.id,
)
- produce_replay_summary(distinct_id=three_user_ids[0], session_id=session_id_one, team_id=self.team.id)
produce_replay_summary(
distinct_id=three_user_ids[1],
session_id=session_id_two,
@@ -1201,7 +1402,9 @@ def test_all_filters_at_once(self):
target_session_id = f"test_all_filters_at_once-{str(uuid4())}"
p = Person.objects.create(
- team=self.team, distinct_ids=[three_user_ids[0], three_user_ids[1]], properties={"email": "bla"}
+ team=self.team,
+ distinct_ids=[three_user_ids[0], three_user_ids[1]],
+ properties={"email": "bla"},
)
custom_event_action = self.create_action(name="custom-event")
@@ -1219,7 +1422,9 @@ def test_all_filters_at_once(self):
team_id=self.team.id,
)
self.create_event(
- three_user_ids[0], self.base_time - relativedelta(days=3), properties={"$session_id": target_session_id}
+ three_user_ids[0],
+ self.base_time - relativedelta(days=3),
+ properties={"$session_id": target_session_id},
)
self.create_event(
three_user_ids[0],
@@ -1250,8 +1455,22 @@ def test_all_filters_at_once(self):
"date_to": (self.base_time + relativedelta(days=3)).strftime("%Y-%m-%d"),
"date_from": (self.base_time - relativedelta(days=10)).strftime("%Y-%m-%d"),
"session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}',
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
- "actions": [{"id": custom_event_action.id, "type": "actions", "order": 1, "name": "custom-event"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
+ "actions": [
+ {
+ "id": custom_event_action.id,
+ "type": "actions",
+ "order": 1,
+ "name": "custom-event",
+ }
+ ],
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -1264,7 +1483,10 @@ def test_teams_dont_leak_event_filter(self):
session_id = f"test_teams_dont_leak_event_filter-{str(uuid4())}"
produce_replay_summary(
- distinct_id=user, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id
+ distinct_id=user,
+ session_id=session_id,
+ first_timestamp=self.base_time,
+ team_id=self.team.id,
)
self.create_event(1, self.base_time + relativedelta(seconds=15), team=another_team)
produce_replay_summary(
@@ -1276,7 +1498,16 @@ def test_teams_dont_leak_event_filter(self):
filter = SessionRecordingsFilter(
team=self.team,
- data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]},
+ data={
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ]
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -1295,7 +1526,10 @@ def test_event_filter_with_person_properties(self):
Person.objects.create(team=self.team, distinct_ids=[user_two], properties={"email": "bla2"})
produce_replay_summary(
- distinct_id=user_one, session_id=session_id_one, first_timestamp=self.base_time, team_id=self.team.id
+ distinct_id=user_one,
+ session_id=session_id_one,
+ first_timestamp=self.base_time,
+ team_id=self.team.id,
)
produce_replay_summary(
distinct_id=user_one,
@@ -1304,7 +1538,10 @@ def test_event_filter_with_person_properties(self):
team_id=self.team.id,
)
produce_replay_summary(
- distinct_id=user_two, session_id=session_id_two, first_timestamp=self.base_time, team_id=self.team.id
+ distinct_id=user_two,
+ session_id=session_id_two,
+ first_timestamp=self.base_time,
+ team_id=self.team.id,
)
produce_replay_summary(
distinct_id=user_two,
@@ -1315,7 +1552,16 @@ def test_event_filter_with_person_properties(self):
filter = SessionRecordingsFilter(
team=self.team,
- data={"properties": [{"key": "email", "value": ["bla"], "operator": "exact", "type": "person"}]},
+ data={
+ "properties": [
+ {
+ "key": "email",
+ "value": ["bla"],
+ "operator": "exact",
+ "type": "person",
+ }
+ ]
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -1336,12 +1582,24 @@ def test_filter_with_cohort_properties(self):
Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"})
Person.objects.create(
- team=self.team, distinct_ids=[user_two], properties={"email": "bla2", "$some_prop": "some_val"}
+ team=self.team,
+ distinct_ids=[user_two],
+ properties={"email": "bla2", "$some_prop": "some_val"},
)
cohort = Cohort.objects.create(
team=self.team,
name="cohort1",
- groups=[{"properties": [{"key": "$some_prop", "value": "some_val", "type": "person"}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "$some_prop",
+ "value": "some_val",
+ "type": "person",
+ }
+ ]
+ }
+ ],
)
cohort.calculate_people_ch(pending_version=0)
@@ -1373,7 +1631,16 @@ def test_filter_with_cohort_properties(self):
)
filter = SessionRecordingsFilter(
team=self.team,
- data={"properties": [{"key": "id", "value": cohort.pk, "operator": None, "type": "cohort"}]},
+ data={
+ "properties": [
+ {
+ "key": "id",
+ "value": cohort.pk,
+ "operator": None,
+ "type": "cohort",
+ }
+ ]
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -1393,12 +1660,24 @@ def test_filter_with_events_and_cohorts(self):
Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"})
Person.objects.create(
- team=self.team, distinct_ids=[user_two], properties={"email": "bla2", "$some_prop": "some_val"}
+ team=self.team,
+ distinct_ids=[user_two],
+ properties={"email": "bla2", "$some_prop": "some_val"},
)
cohort = Cohort.objects.create(
team=self.team,
name="cohort1",
- groups=[{"properties": [{"key": "$some_prop", "value": "some_val", "type": "person"}]}],
+ groups=[
+ {
+ "properties": [
+ {
+ "key": "$some_prop",
+ "value": "some_val",
+ "type": "person",
+ }
+ ]
+ }
+ ],
)
cohort.calculate_people_ch(pending_version=0)
@@ -1446,8 +1725,22 @@ def test_filter_with_events_and_cohorts(self):
data={
# has to be in the cohort and pageview has to be in the events
# test data has one user in the cohort but no pageviews
- "properties": [{"key": "id", "value": cohort.pk, "operator": None, "type": "cohort"}],
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "properties": [
+ {
+ "key": "id",
+ "value": cohort.pk,
+ "operator": None,
+ "type": "cohort",
+ }
+ ],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -1458,8 +1751,22 @@ def test_filter_with_events_and_cohorts(self):
filter = SessionRecordingsFilter(
team=self.team,
data={
- "properties": [{"key": "id", "value": cohort.pk, "operator": None, "type": "cohort"}],
- "events": [{"id": "custom_event", "type": "events", "order": 0, "name": "custom_event"}],
+ "properties": [
+ {
+ "key": "id",
+ "value": cohort.pk,
+ "operator": None,
+ "type": "cohort",
+ }
+ ],
+ "events": [
+ {
+ "id": "custom_event",
+ "type": "events",
+ "order": 0,
+ "name": "custom_event",
+ }
+ ],
},
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -1476,14 +1783,23 @@ def test_event_filter_with_matching_on_session_id(self):
session_id = f"test_event_filter_with_matching_on_session_id-1-{str(uuid4())}"
self.create_event(
- user_distinct_id, self.base_time, event_name="$pageview", properties={"$session_id": session_id}
+ user_distinct_id,
+ self.base_time,
+ event_name="$pageview",
+ properties={"$session_id": session_id},
)
self.create_event(
- user_distinct_id, self.base_time, event_name="$autocapture", properties={"$session_id": str(uuid4())}
+ user_distinct_id,
+ self.base_time,
+ event_name="$autocapture",
+ properties={"$session_id": str(uuid4())},
)
produce_replay_summary(
- distinct_id=user_distinct_id, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id
+ distinct_id=user_distinct_id,
+ session_id=session_id,
+ first_timestamp=self.base_time,
+ team_id=self.team.id,
)
produce_replay_summary(
distinct_id=user_distinct_id,
@@ -1494,7 +1810,16 @@ def test_event_filter_with_matching_on_session_id(self):
filter = SessionRecordingsFilter(
team=self.team,
- data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]},
+ data={
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ]
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -1504,7 +1829,16 @@ def test_event_filter_with_matching_on_session_id(self):
filter = SessionRecordingsFilter(
team=self.team,
- data={"events": [{"id": "$autocapture", "type": "events", "order": 0, "name": "$autocapture"}]},
+ data={
+ "events": [
+ {
+ "id": "$autocapture",
+ "type": "events",
+ "order": 0,
+ "name": "$autocapture",
+ }
+ ]
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
(session_recordings, _) = session_recording_list_instance.run()
@@ -1521,11 +1855,18 @@ def test_event_filter_with_hogql_properties(self):
self.create_event(
user,
self.base_time,
- properties={"$browser": "Chrome", "$session_id": session_id, "$window_id": str(uuid4())},
+ properties={
+ "$browser": "Chrome",
+ "$session_id": session_id,
+ "$window_id": str(uuid4()),
+ },
)
produce_replay_summary(
- distinct_id=user, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id
+ distinct_id=user,
+ session_id=session_id,
+ first_timestamp=self.base_time,
+ team_id=self.team.id,
)
produce_replay_summary(
distinct_id=user,
@@ -1586,11 +1927,18 @@ def test_event_filter_with_hogql_person_properties(self):
self.create_event(
user,
self.base_time,
- properties={"$browser": "Chrome", "$session_id": session_id, "$window_id": str(uuid4())},
+ properties={
+ "$browser": "Chrome",
+ "$session_id": session_id,
+ "$window_id": str(uuid4()),
+ },
)
produce_replay_summary(
- distinct_id=user, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id
+ distinct_id=user,
+ session_id=session_id,
+ first_timestamp=self.base_time,
+ team_id=self.team.id,
)
produce_replay_summary(
distinct_id=user,
@@ -1609,7 +1957,10 @@ def test_event_filter_with_hogql_person_properties(self):
"order": 0,
"name": "$pageview",
"properties": [
- {"key": "person.properties.email == 'bla'", "type": "hogql"},
+ {
+ "key": "person.properties.email == 'bla'",
+ "type": "hogql",
+ },
],
}
]
@@ -1631,7 +1982,10 @@ def test_event_filter_with_hogql_person_properties(self):
"order": 0,
"name": "$pageview",
"properties": [
- {"key": "person.properties.email == 'something else'", "type": "hogql"},
+ {
+ "key": "person.properties.email == 'something else'",
+ "type": "hogql",
+ },
],
}
]
@@ -1656,21 +2010,33 @@ def test_any_event_filter_with_properties(self):
self.create_event(
"user",
self.base_time,
- properties={"$browser": "Chrome", "$session_id": page_view_session_id, "$window_id": "1"},
+ properties={
+ "$browser": "Chrome",
+ "$session_id": page_view_session_id,
+ "$window_id": "1",
+ },
event_name="$pageview",
)
self.create_event(
"user",
self.base_time,
- properties={"$browser": "Chrome", "$session_id": my_custom_event_session_id, "$window_id": "1"},
+ properties={
+ "$browser": "Chrome",
+ "$session_id": my_custom_event_session_id,
+ "$window_id": "1",
+ },
event_name="my-custom-event",
)
self.create_event(
"user",
self.base_time,
- properties={"$browser": "Safari", "$session_id": non_matching__event_session_id, "$window_id": "1"},
+ properties={
+ "$browser": "Safari",
+ "$session_id": non_matching__event_session_id,
+ "$window_id": "1",
+ },
event_name="my-non-matching-event",
)
@@ -1727,7 +2093,14 @@ def test_any_event_filter_with_properties(self):
"type": "events",
"order": 0,
"name": "All events",
- "properties": [{"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "event"}],
+ "properties": [
+ {
+ "key": "$browser",
+ "value": ["Chrome"],
+ "operator": "exact",
+ "type": "event",
+ }
+ ],
}
]
},
@@ -1749,7 +2122,14 @@ def test_any_event_filter_with_properties(self):
"type": "events",
"order": 0,
"name": "All events",
- "properties": [{"key": "$browser", "value": ["Firefox"], "operator": "exact", "type": "event"}],
+ "properties": [
+ {
+ "key": "$browser",
+ "value": ["Firefox"],
+ "operator": "exact",
+ "type": "event",
+ }
+ ],
}
]
},
@@ -1990,7 +2370,14 @@ def test_filter_for_recordings_by_console_text(self):
first_timestamp=self.base_time,
team_id=self.team.id,
console_log_count=4,
- log_messages={"log": ["log message 1", "log message 2", "log message 3", "log message 4"]},
+ log_messages={
+ "log": [
+ "log message 1",
+ "log message 2",
+ "log message 3",
+ "log message 4",
+ ]
+ },
)
produce_replay_summary(
distinct_id="user",
@@ -1999,7 +2386,13 @@ def test_filter_for_recordings_by_console_text(self):
team_id=self.team.id,
console_warn_count=5,
log_messages={
- "warn": ["warn message 1", "warn message 2", "warn message 3", "warn message 4", "warn message 5"]
+ "warn": [
+ "warn message 1",
+ "warn message 2",
+ "warn message 3",
+ "warn message 4",
+ "warn message 5",
+ ]
},
)
produce_replay_summary(
@@ -2008,7 +2401,14 @@ def test_filter_for_recordings_by_console_text(self):
first_timestamp=self.base_time,
team_id=self.team.id,
console_error_count=4,
- log_messages={"error": ["error message 1", "error message 2", "error message 3", "error message 4"]},
+ log_messages={
+ "error": [
+ "error message 1",
+ "error message 2",
+ "error message 3",
+ "error message 4",
+ ]
+ },
)
produce_replay_summary(
distinct_id="user",
@@ -2018,7 +2418,12 @@ def test_filter_for_recordings_by_console_text(self):
console_error_count=4,
console_log_count=3,
log_messages={
- "error": ["error message 1", "error message 2", "error message 3", "error message 4"],
+ "error": [
+ "error message 1",
+ "error message 2",
+ "error message 3",
+ "error message 4",
+ ],
"log": ["log message 1", "log message 2", "log message 3"],
},
)
@@ -2026,7 +2431,10 @@ def test_filter_for_recordings_by_console_text(self):
filter = SessionRecordingsFilter(
team=self.team,
# there are 5 warn and 4 error logs, message 4 matches in both
- data={"console_logs": ["warn", "error"], "console_search_query": "message 4"},
+ data={
+ "console_logs": ["warn", "error"],
+ "console_search_query": "message 4",
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -2043,7 +2451,10 @@ def test_filter_for_recordings_by_console_text(self):
filter = SessionRecordingsFilter(
team=self.team,
# there are 5 warn and 4 error logs, message 5 matches only matches in warn
- data={"console_logs": ["warn", "error"], "console_search_query": "message 5"},
+ data={
+ "console_logs": ["warn", "error"],
+ "console_search_query": "message 5",
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -2058,7 +2469,10 @@ def test_filter_for_recordings_by_console_text(self):
filter = SessionRecordingsFilter(
team=self.team,
# match is case-insensitive
- data={"console_logs": ["warn", "error"], "console_search_query": "MESSAGE 5"},
+ data={
+ "console_logs": ["warn", "error"],
+ "console_search_query": "MESSAGE 5",
+ },
)
session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team)
@@ -2083,14 +2497,26 @@ def test_filter_for_recordings_by_console_text(self):
assert sorted([sr["session_id"] for sr in session_recordings]) == sorted([])
@also_test_with_materialized_columns(
- event_properties=["is_internal_user"], person_properties=["email"], verify_no_jsonextract=False
+ event_properties=["is_internal_user"],
+ person_properties=["email"],
+ verify_no_jsonextract=False,
)
@freeze_time("2021-01-21T20:00:00.000Z")
@snapshot_clickhouse_queries
def test_event_filter_with_test_accounts_excluded(self):
self.team.test_account_filters = [
- {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"},
- {"key": "is_internal_user", "value": ["false"], "operator": "exact", "type": "event"},
+ {
+ "key": "email",
+ "value": "@posthog.com",
+ "operator": "not_icontains",
+ "type": "person",
+ },
+ {
+ "key": "is_internal_user",
+ "value": ["false"],
+ "operator": "exact",
+ "type": "event",
+ },
{"key": "properties.$browser == 'Chrome'", "type": "hogql"},
]
self.team.save()
@@ -2106,7 +2532,11 @@ def test_event_filter_with_test_accounts_excluded(self):
self.create_event(
"user",
self.base_time,
- properties={"$session_id": "1", "$window_id": "1", "is_internal_user": "true"},
+ properties={
+ "$session_id": "1",
+ "$window_id": "1",
+ "is_internal_user": "true",
+ },
)
produce_replay_summary(
distinct_id="user",
@@ -2118,7 +2548,14 @@ def test_event_filter_with_test_accounts_excluded(self):
filter = SessionRecordingsFilter(
team=self.team,
data={
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"filter_test_accounts": True,
},
)
@@ -2129,7 +2566,14 @@ def test_event_filter_with_test_accounts_excluded(self):
filter = SessionRecordingsFilter(
team=self.team,
data={
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"filter_test_accounts": False,
},
)
@@ -2138,7 +2582,9 @@ def test_event_filter_with_test_accounts_excluded(self):
self.assertEqual(len(session_recordings), 1)
@also_test_with_materialized_columns(
- event_properties=["$browser"], person_properties=["email"], verify_no_jsonextract=False
+ event_properties=["$browser"],
+ person_properties=["email"],
+ verify_no_jsonextract=False,
)
@freeze_time("2021-01-21T20:00:00.000Z")
@snapshot_clickhouse_queries
@@ -2149,7 +2595,11 @@ def test_event_filter_with_hogql_event_properties_test_accounts_excluded(self):
self.team.save()
Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"})
- Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"})
+ Person.objects.create(
+ team=self.team,
+ distinct_ids=["user2"],
+ properties={"email": "not-the-other-one"},
+ )
produce_replay_summary(
distinct_id="user",
@@ -2186,7 +2636,14 @@ def test_event_filter_with_hogql_event_properties_test_accounts_excluded(self):
team=self.team,
data={
# pageview that matches the hogql test_accounts filter
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"filter_test_accounts": False,
},
)
@@ -2203,7 +2660,14 @@ def test_event_filter_with_hogql_event_properties_test_accounts_excluded(self):
team=self.team,
data={
# only 1 pageview that matches the hogql test_accounts filter
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"filter_test_accounts": True,
},
)
@@ -2239,12 +2703,21 @@ def test_top_level_event_property_test_account_filter(self):
The filter wasn't triggering the "should join events check", and so we didn't apply the filter at all
"""
self.team.test_account_filters = [
- {"key": "is_internal_user", "value": ["false"], "operator": "exact", "type": "event"},
+ {
+ "key": "is_internal_user",
+ "value": ["false"],
+ "operator": "exact",
+ "type": "event",
+ },
]
self.team.save()
Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"})
- Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"})
+ Person.objects.create(
+ team=self.team,
+ distinct_ids=["user2"],
+ properties={"email": "not-the-other-one"},
+ )
produce_replay_summary(
distinct_id="user",
@@ -2255,7 +2728,11 @@ def test_top_level_event_property_test_account_filter(self):
self.create_event(
"user",
self.base_time,
- properties={"$session_id": "1", "$window_id": "1", "is_internal_user": False},
+ properties={
+ "$session_id": "1",
+ "$window_id": "1",
+ "is_internal_user": False,
+ },
)
produce_replay_summary(
distinct_id="user",
@@ -2273,7 +2750,11 @@ def test_top_level_event_property_test_account_filter(self):
self.create_event(
"user2",
self.base_time,
- properties={"$session_id": "2", "$window_id": "1", "is_internal_user": True},
+ properties={
+ "$session_id": "2",
+ "$window_id": "1",
+ "is_internal_user": True,
+ },
)
# there are 2 pageviews
@@ -2281,7 +2762,14 @@ def test_top_level_event_property_test_account_filter(self):
team=self.team,
data={
# pageview that matches the hogql test_accounts filter
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"filter_test_accounts": False,
},
)
@@ -2316,7 +2804,11 @@ def test_top_level_hogql_event_property_test_account_filter(self):
self.team.save()
Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"})
- Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"})
+ Person.objects.create(
+ team=self.team,
+ distinct_ids=["user2"],
+ properties={"email": "not-the-other-one"},
+ )
produce_replay_summary(
distinct_id="user",
@@ -2327,7 +2819,11 @@ def test_top_level_hogql_event_property_test_account_filter(self):
self.create_event(
"user",
self.base_time,
- properties={"$session_id": "1", "$window_id": "1", "is_internal_user": False},
+ properties={
+ "$session_id": "1",
+ "$window_id": "1",
+ "is_internal_user": False,
+ },
)
produce_replay_summary(
distinct_id="user",
@@ -2345,7 +2841,11 @@ def test_top_level_hogql_event_property_test_account_filter(self):
self.create_event(
"user2",
self.base_time,
- properties={"$session_id": "2", "$window_id": "1", "is_internal_user": True},
+ properties={
+ "$session_id": "2",
+ "$window_id": "1",
+ "is_internal_user": True,
+ },
)
# there are 2 pageviews
@@ -2353,7 +2853,14 @@ def test_top_level_hogql_event_property_test_account_filter(self):
team=self.team,
data={
# pageview that matches the hogql test_accounts filter
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"filter_test_accounts": False,
},
)
@@ -2388,7 +2895,11 @@ def test_top_level_hogql_person_property_test_account_filter(self):
self.team.save()
Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"})
- Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"})
+ Person.objects.create(
+ team=self.team,
+ distinct_ids=["user2"],
+ properties={"email": "not-the-other-one"},
+ )
produce_replay_summary(
distinct_id="user",
@@ -2399,7 +2910,11 @@ def test_top_level_hogql_person_property_test_account_filter(self):
self.create_event(
"user",
self.base_time,
- properties={"$session_id": "1", "$window_id": "1", "is_internal_user": False},
+ properties={
+ "$session_id": "1",
+ "$window_id": "1",
+ "is_internal_user": False,
+ },
)
produce_replay_summary(
distinct_id="user",
@@ -2417,7 +2932,11 @@ def test_top_level_hogql_person_property_test_account_filter(self):
self.create_event(
"user2",
self.base_time,
- properties={"$session_id": "2", "$window_id": "1", "is_internal_user": True},
+ properties={
+ "$session_id": "2",
+ "$window_id": "1",
+ "is_internal_user": True,
+ },
)
# there are 2 pageviews
@@ -2425,7 +2944,14 @@ def test_top_level_hogql_person_property_test_account_filter(self):
team=self.team,
data={
# pageview that matches the hogql test_accounts filter
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"filter_test_accounts": False,
},
)
@@ -2458,7 +2984,11 @@ def test_top_level_person_property_test_account_filter(self):
self.team.save()
Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"})
- Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"})
+ Person.objects.create(
+ team=self.team,
+ distinct_ids=["user2"],
+ properties={"email": "not-the-other-one"},
+ )
produce_replay_summary(
distinct_id="user",
@@ -2469,7 +2999,11 @@ def test_top_level_person_property_test_account_filter(self):
self.create_event(
"user",
self.base_time,
- properties={"$session_id": "1", "$window_id": "1", "is_internal_user": False},
+ properties={
+ "$session_id": "1",
+ "$window_id": "1",
+ "is_internal_user": False,
+ },
)
produce_replay_summary(
distinct_id="user",
@@ -2487,7 +3021,11 @@ def test_top_level_person_property_test_account_filter(self):
self.create_event(
"user2",
self.base_time,
- properties={"$session_id": "2", "$window_id": "1", "is_internal_user": True},
+ properties={
+ "$session_id": "2",
+ "$window_id": "1",
+ "is_internal_user": True,
+ },
)
# there are 2 pageviews
@@ -2495,7 +3033,14 @@ def test_top_level_person_property_test_account_filter(self):
team=self.team,
data={
# pageview that matches the hogql test_accounts filter
- "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}],
+ "events": [
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ }
+ ],
"filter_test_accounts": False,
},
)
@@ -2531,8 +3076,18 @@ def test_event_filter_with_two_events_and_multiple_teams(self):
team=self.team,
data={
"events": [
- {"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"},
- {"id": "$pageleave", "type": "events", "order": 0, "name": "$pageleave"},
+ {
+ "id": "$pageview",
+ "type": "events",
+ "order": 0,
+ "name": "$pageview",
+ },
+ {
+ "id": "$pageleave",
+ "type": "events",
+ "order": 0,
+ "name": "$pageleave",
+ },
],
},
)
@@ -2585,11 +3140,19 @@ def test_event_filter_with_group_filter(self):
GroupTypeMapping.objects.create(team=self.team, group_type="project", group_type_index=0)
create_group(
- team_id=self.team.pk, group_type_index=0, group_key="project:1", properties={"name": "project one"}
+ team_id=self.team.pk,
+ group_type_index=0,
+ group_key="project:1",
+ properties={"name": "project one"},
)
GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=1)
- create_group(team_id=self.team.pk, group_type_index=1, group_key="org:1", properties={"name": "org one"})
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=1,
+ group_key="org:1",
+ properties={"name": "org one"},
+ )
self.create_event(
"user",
diff --git a/posthog/session_recordings/queries/test/test_session_recording_properties.py b/posthog/session_recordings/queries/test/test_session_recording_properties.py
index 9844d77006721..aa152b0b2fa16 100644
--- a/posthog/session_recordings/queries/test/test_session_recording_properties.py
+++ b/posthog/session_recordings/queries/test/test_session_recording_properties.py
@@ -4,9 +4,18 @@
from posthog.models import Person
from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter
-from posthog.session_recordings.queries.session_recording_properties import SessionRecordingProperties
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
-from posthog.test.base import BaseTest, ClickhouseTestMixin, _create_event, snapshot_clickhouse_queries
+from posthog.session_recordings.queries.session_recording_properties import (
+ SessionRecordingProperties,
+)
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
+from posthog.test.base import (
+ BaseTest,
+ ClickhouseTestMixin,
+ _create_event,
+ snapshot_clickhouse_queries,
+)
class TestSessionRecordingProperties(BaseTest, ClickhouseTestMixin):
@@ -20,7 +29,13 @@ def create_event(
):
if team is None:
team = self.team
- _create_event(team=team, event=event_name, timestamp=timestamp, distinct_id=distinct_id, properties=properties)
+ _create_event(
+ team=team,
+ event=event_name,
+ timestamp=timestamp,
+ distinct_id=distinct_id,
+ properties=properties,
+ )
@property
def base_time(self):
@@ -77,7 +92,10 @@ def test_properties_list(self):
self.assertEqual(session_recordings_properties[0]["properties"]["$browser"], "Chrome")
self.assertEqual(session_recordings_properties[0]["properties"]["$os"], "Mac OS X")
self.assertEqual(session_recordings_properties[0]["properties"]["$device_type"], "Desktop")
- self.assertEqual(session_recordings_properties[0]["properties"]["$current_url"], "https://blah.com/blah")
+ self.assertEqual(
+ session_recordings_properties[0]["properties"]["$current_url"],
+ "https://blah.com/blah",
+ )
self.assertEqual(session_recordings_properties[0]["properties"]["$host"], "blah.com")
self.assertEqual(session_recordings_properties[0]["properties"]["$pathname"], "/blah")
self.assertEqual(session_recordings_properties[0]["properties"]["$geoip_country_code"], "KR")
diff --git a/posthog/session_recordings/queries/test/test_session_replay_events.py b/posthog/session_recordings/queries/test/test_session_replay_events.py
index bbdec4ea0cc3e..04393f8500c07 100644
--- a/posthog/session_recordings/queries/test/test_session_replay_events.py
+++ b/posthog/session_recordings/queries/test/test_session_replay_events.py
@@ -1,6 +1,8 @@
from posthog.models import Team
from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.test.base import ClickhouseTestMixin, APIBaseTest
from dateutil.relativedelta import relativedelta
from django.utils.timezone import now
@@ -63,6 +65,8 @@ def test_get_metadata_does_not_leak_between_teams(self) -> None:
def test_get_metadata_filters_by_date(self) -> None:
metadata = SessionReplayEvents().get_metadata(
- session_id="1", team=self.team, recording_start_time=self.base_time + relativedelta(days=2)
+ session_id="1",
+ team=self.team,
+ recording_start_time=self.base_time + relativedelta(days=2),
)
assert metadata is None
diff --git a/posthog/session_recordings/queries/test/test_session_replay_summaries.py b/posthog/session_recordings/queries/test/test_session_replay_summaries.py
index 5a1e9b94db842..6d3376d467ae6 100644
--- a/posthog/session_recordings/queries/test/test_session_replay_summaries.py
+++ b/posthog/session_recordings/queries/test/test_session_replay_summaries.py
@@ -9,7 +9,9 @@
from posthog.models import Team
from posthog.models.event.util import format_clickhouse_timestamp
from posthog.queries.app_metrics.serializers import AppMetricsRequestSerializer
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.test.base import BaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries
diff --git a/posthog/session_recordings/realtime_snapshots.py b/posthog/session_recordings/realtime_snapshots.py
index 20e8a0846440c..e1191c4ddb37e 100644
--- a/posthog/session_recordings/realtime_snapshots.py
+++ b/posthog/session_recordings/realtime_snapshots.py
@@ -40,7 +40,10 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op
# We always publish as it could be that a rebalance has occured and the consumer doesn't know it should be
# sending data to redis
- redis.publish(SUBSCRIPTION_CHANNEL, json.dumps({"team_id": team_id, "session_id": session_id}))
+ redis.publish(
+ SUBSCRIPTION_CHANNEL,
+ json.dumps({"team_id": team_id, "session_id": session_id}),
+ )
if not encoded_snapshots and attempt_count < ATTEMPT_MAX:
logger.info(
@@ -50,7 +53,10 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op
attempt_count=attempt_count,
)
# If we don't have it we could be in the process of getting it and syncing it
- redis.publish(SUBSCRIPTION_CHANNEL, json.dumps({"team_id": team_id, "session_id": session_id}))
+ redis.publish(
+ SUBSCRIPTION_CHANNEL,
+ json.dumps({"team_id": team_id, "session_id": session_id}),
+ )
PUBLISHED_REALTIME_SUBSCRIPTIONS_COUNTER.labels(
team_id=team_id, session_id=session_id, attempt_count=attempt_count
).inc()
@@ -73,7 +79,10 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op
# very broad capture to see if there are any unexpected errors
capture_exception(
e,
- extras={"attempt_count": attempt_count, "operation": "get_realtime_snapshots"},
+ extras={
+ "attempt_count": attempt_count,
+ "operation": "get_realtime_snapshots",
+ },
tags={"team_id": team_id, "session_id": session_id},
)
raise e
diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py
index 827703006340c..6996d2c990460 100644
--- a/posthog/session_recordings/session_recording_api.py
+++ b/posthog/session_recordings/session_recording_api.py
@@ -29,17 +29,26 @@
SharingTokenPermission,
TeamMemberAccessPermission,
)
-from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed
+from posthog.session_recordings.models.session_recording_event import (
+ SessionRecordingViewed,
+)
from posthog.session_recordings.queries.session_recording_list_from_replay_summary import (
SessionRecordingListFromReplaySummary,
SessionIdEventsQuery,
)
-from posthog.session_recordings.queries.session_recording_properties import SessionRecordingProperties
-from posthog.rate_limit import ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle
+from posthog.session_recordings.queries.session_recording_properties import (
+ SessionRecordingProperties,
+)
+from posthog.rate_limit import (
+ ClickHouseBurstRateThrottle,
+ ClickHouseSustainedRateThrottle,
+)
from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
from posthog.session_recordings.realtime_snapshots import get_realtime_snapshots
-from posthog.session_recordings.snapshots.convert_legacy_snapshots import convert_original_version_lts_recording
+from posthog.session_recordings.snapshots.convert_legacy_snapshots import (
+ convert_original_version_lts_recording,
+)
from posthog.storage import object_storage
from prometheus_client import Counter
@@ -130,7 +139,11 @@ class SessionRecordingSnapshotsSerializer(serializers.Serializer):
class SessionRecordingViewSet(StructuredViewSetMixin, viewsets.GenericViewSet):
- permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission]
+ permission_classes = [
+ IsAuthenticated,
+ ProjectMembershipNecessaryPermissions,
+ TeamMemberAccessPermission,
+ ]
throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle]
serializer_class = SessionRecordingSerializer
# We don't use this
@@ -269,7 +282,9 @@ def snapshots(self, request: request.Request, **kwargs):
event_properties["$session_id"] = request.headers["X-POSTHOG-SESSION-ID"]
posthoganalytics.capture(
- self._distinct_id_from_request(request), "v2 session recording snapshots viewed", event_properties
+ self._distinct_id_from_request(request),
+ "v2 session recording snapshots viewed",
+ event_properties,
)
if source:
@@ -338,7 +353,9 @@ def snapshots(self, request: request.Request, **kwargs):
event_properties["source"] = "realtime"
event_properties["snapshots_length"] = len(snapshots)
posthoganalytics.capture(
- self._distinct_id_from_request(request), "session recording snapshots v2 loaded", event_properties
+ self._distinct_id_from_request(request),
+ "session recording snapshots v2 loaded",
+ event_properties,
)
response_data["snapshots"] = snapshots
@@ -366,7 +383,9 @@ def snapshots(self, request: request.Request, **kwargs):
event_properties["source"] = "blob"
event_properties["blob_key"] = blob_key
posthoganalytics.capture(
- self._distinct_id_from_request(request), "session recording snapshots v2 loaded", event_properties
+ self._distinct_id_from_request(request),
+ "session recording snapshots v2 loaded",
+ event_properties,
)
with requests.get(url=url, stream=True) as r:
@@ -451,9 +470,10 @@ def list_recordings(filter: SessionRecordingsFilter, request: request.Request, c
if (all_session_ids and filter.session_ids) or not all_session_ids:
# Only go to clickhouse if we still have remaining specified IDs, or we are not specifying IDs
- (ch_session_recordings, more_recordings_available) = SessionRecordingListFromReplaySummary(
- filter=filter, team=team
- ).run()
+ (
+ ch_session_recordings,
+ more_recordings_available,
+ ) = SessionRecordingListFromReplaySummary(filter=filter, team=team).run()
recordings_from_clickhouse = SessionRecording.get_or_build_from_clickhouse(team, ch_session_recordings)
recordings = recordings + recordings_from_clickhouse
@@ -462,7 +482,10 @@ def list_recordings(filter: SessionRecordingsFilter, request: request.Request, c
# If we have specified session_ids we need to sort them by the order they were specified
if all_session_ids:
- recordings = sorted(recordings, key=lambda x: cast(List[str], all_session_ids).index(x.session_id))
+ recordings = sorted(
+ recordings,
+ key=lambda x: cast(List[str], all_session_ids).index(x.session_id),
+ )
if not request.user.is_authenticated: # for mypy
raise exceptions.NotAuthenticated()
diff --git a/posthog/session_recordings/snapshots/convert_legacy_snapshots.py b/posthog/session_recordings/snapshots/convert_legacy_snapshots.py
index a60e1b74717e0..963016d0e869a 100644
--- a/posthog/session_recordings/snapshots/convert_legacy_snapshots.py
+++ b/posthog/session_recordings/snapshots/convert_legacy_snapshots.py
@@ -18,12 +18,17 @@
def _save_converted_content_back_to_storage(converted_content: str, recording: SessionRecording) -> str:
try:
- from ee.session_recordings.session_recording_extensions import save_recording_with_new_content
+ from ee.session_recordings.session_recording_extensions import (
+ save_recording_with_new_content,
+ )
return save_recording_with_new_content(recording, converted_content)
except ImportError:
# not running in EE context... shouldn't get here
- logger.error("attempted_to_save_converted_content_back_to_storage_in_non_ee_context", recording_id=recording.id)
+ logger.error(
+ "attempted_to_save_converted_content_back_to_storage_in_non_ee_context",
+ recording_id=recording.id,
+ )
return ""
diff --git a/posthog/session_recordings/sql/session_recording_event_sql.py b/posthog/session_recordings/sql/session_recording_event_sql.py
index 908b4b4034ddd..fc52f27fbdae8 100644
--- a/posthog/session_recordings/sql/session_recording_event_sql.py
+++ b/posthog/session_recordings/sql/session_recording_event_sql.py
@@ -2,7 +2,11 @@
from posthog.clickhouse.indexes import index_by_kafka_timestamp
from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS, kafka_engine, ttl_period
-from posthog.clickhouse.table_engines import Distributed, ReplacingMergeTree, ReplicationScheme
+from posthog.clickhouse.table_engines import (
+ Distributed,
+ ReplacingMergeTree,
+ ReplicationScheme,
+)
from posthog.kafka_client.topics import KAFKA_CLICKHOUSE_SESSION_RECORDING_EVENTS
SESSION_RECORDING_EVENTS_DATA_TABLE = lambda: "sharded_session_recording_events"
@@ -72,7 +76,9 @@
SESSION_RECORDING_EVENTS_DATA_TABLE_ENGINE = lambda: ReplacingMergeTree(
- "session_recording_events", ver="_timestamp", replication_scheme=ReplicationScheme.SHARDED
+ "session_recording_events",
+ ver="_timestamp",
+ replication_scheme=ReplicationScheme.SHARDED,
)
SESSION_RECORDING_EVENTS_TABLE_SQL = lambda: (
SESSION_RECORDING_EVENTS_TABLE_BASE_SQL
@@ -129,7 +135,10 @@
WRITABLE_SESSION_RECORDING_EVENTS_TABLE_SQL = lambda: SESSION_RECORDING_EVENTS_TABLE_BASE_SQL.format(
table_name="writable_session_recording_events",
cluster=settings.CLICKHOUSE_CLUSTER,
- engine=Distributed(data_table=SESSION_RECORDING_EVENTS_DATA_TABLE(), sharding_key="sipHash64(distinct_id)"),
+ engine=Distributed(
+ data_table=SESSION_RECORDING_EVENTS_DATA_TABLE(),
+ sharding_key="sipHash64(distinct_id)",
+ ),
extra_fields=KAFKA_COLUMNS,
materialized_columns="",
)
@@ -138,7 +147,10 @@
DISTRIBUTED_SESSION_RECORDING_EVENTS_TABLE_SQL = lambda: SESSION_RECORDING_EVENTS_TABLE_BASE_SQL.format(
table_name="session_recording_events",
cluster=settings.CLICKHOUSE_CLUSTER,
- engine=Distributed(data_table=SESSION_RECORDING_EVENTS_DATA_TABLE(), sharding_key="sipHash64(distinct_id)"),
+ engine=Distributed(
+ data_table=SESSION_RECORDING_EVENTS_DATA_TABLE(),
+ sharding_key="sipHash64(distinct_id)",
+ ),
extra_fields=KAFKA_COLUMNS,
materialized_columns=SESSION_RECORDING_EVENTS_PROXY_MATERIALIZED_COLUMNS,
)
diff --git a/posthog/session_recordings/sql/session_replay_event_migrations_sql.py b/posthog/session_recordings/sql/session_replay_event_migrations_sql.py
index ac897fccc1d08..dcf8e5abd809d 100644
--- a/posthog/session_recordings/sql/session_replay_event_migrations_sql.py
+++ b/posthog/session_recordings/sql/session_replay_event_migrations_sql.py
@@ -1,6 +1,8 @@
from django.conf import settings
-from posthog.session_recordings.sql.session_replay_event_sql import SESSION_REPLAY_EVENTS_DATA_TABLE
+from posthog.session_recordings.sql.session_replay_event_sql import (
+ SESSION_REPLAY_EVENTS_DATA_TABLE,
+)
DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL = (
lambda: "DROP TABLE IF EXISTS session_replay_events_mv ON CLUSTER {cluster}".format(
diff --git a/posthog/session_recordings/sql/session_replay_event_sql.py b/posthog/session_recordings/sql/session_replay_event_sql.py
index dfe839843979f..e7c2576e93f66 100644
--- a/posthog/session_recordings/sql/session_replay_event_sql.py
+++ b/posthog/session_recordings/sql/session_replay_event_sql.py
@@ -1,7 +1,11 @@
from django.conf import settings
from posthog.clickhouse.kafka_engine import kafka_engine
-from posthog.clickhouse.table_engines import Distributed, ReplicationScheme, AggregatingMergeTree
+from posthog.clickhouse.table_engines import (
+ Distributed,
+ ReplicationScheme,
+ AggregatingMergeTree,
+)
from posthog.kafka_client.topics import KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS
SESSION_REPLAY_EVENTS_DATA_TABLE = lambda: "sharded_session_replay_events"
@@ -147,7 +151,10 @@
WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: SESSION_REPLAY_EVENTS_TABLE_BASE_SQL.format(
table_name="writable_session_replay_events",
cluster=settings.CLICKHOUSE_CLUSTER,
- engine=Distributed(data_table=SESSION_REPLAY_EVENTS_DATA_TABLE(), sharding_key="sipHash64(distinct_id)"),
+ engine=Distributed(
+ data_table=SESSION_REPLAY_EVENTS_DATA_TABLE(),
+ sharding_key="sipHash64(distinct_id)",
+ ),
)
@@ -155,7 +162,10 @@
DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: SESSION_REPLAY_EVENTS_TABLE_BASE_SQL.format(
table_name="session_replay_events",
cluster=settings.CLICKHOUSE_CLUSTER,
- engine=Distributed(data_table=SESSION_REPLAY_EVENTS_DATA_TABLE(), sharding_key="sipHash64(distinct_id)"),
+ engine=Distributed(
+ data_table=SESSION_REPLAY_EVENTS_DATA_TABLE(),
+ sharding_key="sipHash64(distinct_id)",
+ ),
)
diff --git a/posthog/session_recordings/test/test_lts_session_recordings.py b/posthog/session_recordings/test/test_lts_session_recordings.py
index b16d873b93d7b..e7de94464c18f 100644
--- a/posthog/session_recordings/test/test_lts_session_recordings.py
+++ b/posthog/session_recordings/test/test_lts_session_recordings.py
@@ -19,7 +19,10 @@ def setUp(self):
# Create a new team each time to ensure no clashing between tests
self.team = Team.objects.create(organization=self.organization, name="New Team")
- @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True)
+ @patch(
+ "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists",
+ return_value=True,
+ )
@patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
def test_2023_08_01_version_stored_snapshots_can_be_gathered(
self, mock_list_objects: MagicMock, _mock_exists: MagicMock
@@ -72,7 +75,10 @@ def list_objects_func(path: str) -> List[str]:
],
}
- @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True)
+ @patch(
+ "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists",
+ return_value=True,
+ )
@patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
def test_original_version_stored_snapshots_can_be_gathered(
self, mock_list_objects: MagicMock, _mock_exists: MagicMock
@@ -112,7 +118,10 @@ def list_objects_func(path: str) -> List[str]:
],
}
- @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True)
+ @patch(
+ "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists",
+ return_value=True,
+ )
@patch("posthog.session_recordings.session_recording_api.requests.get")
@patch("posthog.session_recordings.session_recording_api.object_storage.get_presigned_url")
@patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
@@ -173,7 +182,10 @@ def list_objects_func(path: str) -> List[str]:
assert response_data == "the file contents"
- @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True)
+ @patch(
+ "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists",
+ return_value=True,
+ )
@patch("posthog.session_recordings.session_recording_api.requests.get")
@patch("posthog.session_recordings.session_recording_api.object_storage.tag")
@patch("posthog.session_recordings.session_recording_api.object_storage.write")
diff --git a/posthog/session_recordings/test/test_session_recording_helpers.py b/posthog/session_recordings/test/test_session_recording_helpers.py
index 6c64d84efaf78..d59cf816dbf83 100644
--- a/posthog/session_recordings/test/test_session_recording_helpers.py
+++ b/posthog/session_recordings/test/test_session_recording_helpers.py
@@ -244,14 +244,20 @@ def test_new_ingestion_large_full_snapshot_is_separated(raw_snapshot_events, moc
"distinct_id": "abc123",
"$session_id": "1234",
"$window_id": "1",
- "$snapshot_items": [{"type": 3, "timestamp": 1546300800000}, {"type": 3, "timestamp": 1546300800000}],
+ "$snapshot_items": [
+ {"type": 3, "timestamp": 1546300800000},
+ {"type": 3, "timestamp": 1546300800000},
+ ],
},
},
]
def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_events, mocker: MockerFixture):
- mocker.patch("posthog.models.utils.UUIDT", return_value="0178495e-8521-0000-8e1c-2652fa57099b")
+ mocker.patch(
+ "posthog.models.utils.UUIDT",
+ return_value="0178495e-8521-0000-8e1c-2652fa57099b",
+ )
mocker.patch("time.time", return_value=0)
almost_too_big_payloads = [
@@ -265,7 +271,11 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event
"properties": {
"$session_id": "1234",
"$window_id": "1",
- "$snapshot_data": {"type": 7, "timestamp": 234, "something": almost_too_big_payloads[0]},
+ "$snapshot_data": {
+ "type": 7,
+ "timestamp": 234,
+ "something": almost_too_big_payloads[0],
+ },
"distinct_id": "abc123",
},
},
@@ -274,7 +284,11 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event
"properties": {
"$session_id": "1234",
"$window_id": "1",
- "$snapshot_data": {"type": 8, "timestamp": 123, "something": almost_too_big_payloads[1]},
+ "$snapshot_data": {
+ "type": 8,
+ "timestamp": 123,
+ "something": almost_too_big_payloads[1],
+ },
"distinct_id": "abc123",
},
},
@@ -285,7 +299,13 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event
"properties": {
"$session_id": "1234",
"$window_id": "1",
- "$snapshot_items": [{"type": 7, "timestamp": 234, "something": almost_too_big_payloads[0]}],
+ "$snapshot_items": [
+ {
+ "type": 7,
+ "timestamp": 234,
+ "something": almost_too_big_payloads[0],
+ }
+ ],
"distinct_id": "abc123",
},
},
@@ -294,7 +314,13 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event
"properties": {
"$session_id": "1234",
"$window_id": "1",
- "$snapshot_items": [{"type": 8, "timestamp": 123, "something": almost_too_big_payloads[1]}],
+ "$snapshot_items": [
+ {
+ "type": 8,
+ "timestamp": 123,
+ "something": almost_too_big_payloads[1],
+ }
+ ],
"distinct_id": "abc123",
},
},
@@ -302,7 +328,10 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event
def test_new_ingestion_groups_using_snapshot_bytes_if_possible(raw_snapshot_events, mocker: MockerFixture):
- mocker.patch("posthog.models.utils.UUIDT", return_value="0178495e-8521-0000-8e1c-2652fa57099b")
+ mocker.patch(
+ "posthog.models.utils.UUIDT",
+ return_value="0178495e-8521-0000-8e1c-2652fa57099b",
+ )
mocker.patch("time.time", return_value=0)
almost_too_big_event = {
@@ -350,7 +379,11 @@ def test_new_ingestion_groups_using_snapshot_bytes_if_possible(raw_snapshot_even
},
]
- assert [event["properties"]["$snapshot_bytes"] for event in events] == [106, 1072, 159]
+ assert [event["properties"]["$snapshot_bytes"] for event in events] == [
+ 106,
+ 1072,
+ 159,
+ ]
space_with_headroom = math.ceil((106 + 1072 + 50) * 1.05)
assert list(mock_capture_flow(events, max_size_bytes=space_with_headroom)[1]) == [
diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py
index 61c05d993ee4a..3dca9b46b9fb2 100644
--- a/posthog/session_recordings/test/test_session_recordings.py
+++ b/posthog/session_recordings/test/test_session_recordings.py
@@ -12,13 +12,17 @@
from freezegun import freeze_time
from rest_framework import status
-from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed
+from posthog.session_recordings.models.session_recording_event import (
+ SessionRecordingViewed,
+)
from posthog.api.test.test_team import create_team
from posthog.constants import SESSION_RECORDINGS_FILTER_IDS
from posthog.models import Organization, Person, SessionRecording
from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter
from posthog.models.team import Team
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
@@ -122,10 +126,14 @@ def create_snapshots(
def test_get_session_recordings(self):
user = Person.objects.create(
- team=self.team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=["user"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
user2 = Person.objects.create(
- team=self.team, distinct_ids=["user2"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=["user2"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
base_time = (now() - relativedelta(days=1)).replace(microsecond=0)
session_id_one = f"test_get_session_recordings-1-{uuid.uuid4()}"
@@ -160,7 +168,15 @@ def test_get_session_recordings(self):
False,
user2.pk,
),
- (session_id_one, "user", base_time, base_time + relativedelta(seconds=30), 30, False, user.pk),
+ (
+ session_id_one,
+ "user",
+ base_time,
+ base_time + relativedelta(seconds=30),
+ 30,
+ False,
+ user.pk,
+ ),
]
@patch("posthog.session_recordings.session_recording_api.SessionRecordingListFromReplaySummary")
@@ -170,7 +186,7 @@ def test_console_log_filters_are_correctly_passed_to_listing(self, mock_summary_
self.client.get(f'/api/projects/{self.team.id}/session_recordings?console_logs=["warn", "error"]')
assert len(mock_summary_lister.call_args_list) == 1
- filter_passed_to_mock: SessionRecordingsFilter = mock_summary_lister.call_args_list[0].kwargs["filter"]
+ filter_passed_to_mock: (SessionRecordingsFilter) = mock_summary_lister.call_args_list[0].kwargs["filter"]
assert filter_passed_to_mock.console_logs_filter == ["warn", "error"]
@snapshot_postgres_queries
@@ -194,7 +210,9 @@ def test_listing_recordings_is_not_nplus1_for_persons(self):
def _person_with_snapshots(self, base_time: datetime, distinct_id: str = "user", session_id: str = "1") -> None:
Person.objects.create(
- team=self.team, distinct_ids=[distinct_id], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=[distinct_id],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
self.create_snapshot(distinct_id, session_id, base_time)
self.create_snapshot(distinct_id, session_id, base_time + relativedelta(seconds=10))
@@ -203,10 +221,14 @@ def _person_with_snapshots(self, base_time: datetime, distinct_id: str = "user",
def test_session_recordings_dont_leak_teams(self) -> None:
another_team = Team.objects.create(organization=self.organization)
Person.objects.create(
- team=another_team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=another_team,
+ distinct_ids=["user"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
Person.objects.create(
- team=self.team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=["user"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
base_time = (now() - relativedelta(days=1)).replace(microsecond=0)
@@ -236,7 +258,9 @@ def test_session_recording_for_user_with_multiple_distinct_ids(self) -> None:
def test_viewed_state_of_session_recording_version_1(self):
Person.objects.create(
- team=self.team, distinct_ids=["u1"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=["u1"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
base_time = (now() - timedelta(days=1)).replace(microsecond=0)
SessionRecordingViewed.objects.create(team=self.team, user=self.user, session_id="1")
@@ -252,7 +276,9 @@ def test_viewed_state_of_session_recording_version_1(self):
def test_viewed_state_of_session_recording_version_3(self):
Person.objects.create(
- team=self.team, distinct_ids=["u1"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=["u1"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
base_time = (now() - timedelta(days=1)).replace(microsecond=0)
session_id_one = "1"
@@ -272,7 +298,9 @@ def test_viewed_state_of_session_recording_version_3(self):
def test_setting_viewed_state_of_session_recording(self):
Person.objects.create(
- team=self.team, distinct_ids=["u1"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=["u1"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
base_time = (now() - relativedelta(days=1)).replace(microsecond=0)
@@ -326,7 +354,9 @@ def test_setting_viewed_state_of_session_recording(self):
def test_get_single_session_recording_metadata(self):
with freeze_time("2023-01-01T12:00:00.000Z"):
p = Person.objects.create(
- team=self.team, distinct_ids=["d1"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=["d1"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
session_recording_id = "session_1"
base_time = (now() - relativedelta(days=1)).replace(microsecond=0)
@@ -370,7 +400,12 @@ def test_get_single_session_recording_metadata(self):
def test_single_session_recording_doesnt_leak_teams(self):
another_team = Team.objects.create(organization=self.organization)
- self.create_snapshot("user", "id_no_team_leaking", now() - relativedelta(days=1), team_id=another_team.pk)
+ self.create_snapshot(
+ "user",
+ "id_no_team_leaking",
+ now() - relativedelta(days=1),
+ team_id=another_team.pk,
+ )
response = self.client.get(f"/api/projects/{self.team.id}/session_recordings/id_no_team_leaking")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
@@ -400,7 +435,12 @@ def test_session_recording_doesnt_exist(self):
def test_request_to_another_teams_endpoint_returns_401(self):
org = Organization.objects.create(name="Separate Org")
another_team = Team.objects.create(organization=org)
- self.create_snapshot("user", "id_no_team_leaking", now() - relativedelta(days=1), team_id=another_team.pk)
+ self.create_snapshot(
+ "user",
+ "id_no_team_leaking",
+ now() - relativedelta(days=1),
+ team_id=another_team.pk,
+ )
response = self.client.get(f"/api/projects/{another_team.pk}/session_recordings/id_no_team_leaking")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@@ -413,11 +453,28 @@ def test_request_to_another_teams_endpoint_returns_401(self):
def test_session_ids_filter(self, use_recording_events: bool, api_version: int):
with freeze_time("2020-09-13T12:26:40.000Z"):
Person.objects.create(
- team=self.team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=["user"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
+ )
+ self.create_snapshot(
+ "user",
+ "1",
+ now() - relativedelta(days=1),
+ use_recording_table=use_recording_events,
+ )
+ self.create_snapshot(
+ "user",
+ "2",
+ now() - relativedelta(days=2),
+ use_recording_table=use_recording_events,
+ )
+ self.create_snapshot(
+ "user",
+ "3",
+ now() - relativedelta(days=3),
+ use_recording_table=use_recording_events,
)
- self.create_snapshot("user", "1", now() - relativedelta(days=1), use_recording_table=use_recording_events)
- self.create_snapshot("user", "2", now() - relativedelta(days=2), use_recording_table=use_recording_events)
- self.create_snapshot("user", "3", now() - relativedelta(days=3), use_recording_table=use_recording_events)
# Fetch playlist
params_string = urlencode({"session_ids": '["1", "2", "3"]', "version": api_version})
@@ -433,7 +490,9 @@ def test_session_ids_filter(self, use_recording_events: bool, api_version: int):
def test_empty_list_session_ids_filter_returns_no_recordings(self):
with freeze_time("2020-09-13T12:26:40.000Z"):
Person.objects.create(
- team=self.team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"}
+ team=self.team,
+ distinct_ids=["user"],
+ properties={"$some_prop": "something", "email": "bob@bob.com"},
)
self.create_snapshot("user", "1", now() - relativedelta(days=1))
self.create_snapshot("user", "2", now() - relativedelta(days=2))
@@ -455,7 +514,10 @@ def test_delete_session_recording(self):
response = self.client.delete(f"/api/projects/{self.team.id}/session_recordings/1")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
- @patch("ee.session_recordings.session_recording_extensions.object_storage.copy_objects", return_value=2)
+ @patch(
+ "ee.session_recordings.session_recording_extensions.object_storage.copy_objects",
+ return_value=2,
+ )
def test_persist_session_recording(self, _mock_copy_objects: MagicMock) -> None:
self.create_snapshot("user", "1", now() - relativedelta(days=1), team_id=self.team.pk)
@@ -473,7 +535,10 @@ def test_persist_session_recording(self, _mock_copy_objects: MagicMock) -> None:
# New snapshot loading method
@freeze_time("2023-01-01T00:00:00Z")
- @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True)
+ @patch(
+ "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists",
+ return_value=True,
+ )
@patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
def test_get_snapshots_v2_default_response(self, mock_list_objects: MagicMock, _mock_exists: MagicMock) -> None:
session_id = str(uuid.uuid4())
@@ -510,7 +575,10 @@ def test_get_snapshots_v2_default_response(self, mock_list_objects: MagicMock, _
mock_list_objects.assert_called_with(f"session_recordings/team_id/{self.team.pk}/session_id/{session_id}/data")
@freeze_time("2023-01-01T00:00:00Z")
- @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True)
+ @patch(
+ "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists",
+ return_value=True,
+ )
@patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
def test_get_snapshots_v2_from_lts(self, mock_list_objects: MagicMock, _mock_exists: MagicMock) -> None:
session_id = str(uuid.uuid4())
@@ -568,7 +636,10 @@ def list_objects_func(path: str) -> List[str]:
]
@freeze_time("2023-01-01T00:00:00Z")
- @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True)
+ @patch(
+ "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists",
+ return_value=True,
+ )
@patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
def test_get_snapshots_v2_default_response_no_realtime_if_old(self, mock_list_objects, _mock_exists) -> None:
session_id = str(uuid.uuid4())
@@ -591,12 +662,19 @@ def test_get_snapshots_v2_default_response_no_realtime_if_old(self, mock_list_ob
]
}
- @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True)
+ @patch(
+ "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists",
+ return_value=True,
+ )
@patch("posthog.session_recordings.session_recording_api.SessionRecording.get_or_build")
@patch("posthog.session_recordings.session_recording_api.object_storage.get_presigned_url")
@patch("posthog.session_recordings.session_recording_api.requests")
def test_can_get_session_recording_blob(
- self, _mock_requests, mock_presigned_url, mock_get_session_recording, _mock_exists
+ self,
+ _mock_requests,
+ mock_presigned_url,
+ mock_get_session_recording,
+ _mock_exists,
) -> None:
session_id = str(uuid.uuid4())
"""API will add session_recordings/team_id/{self.team.pk}/session_id/{session_id}"""
@@ -662,7 +740,8 @@ def test_get_via_sharing_token(self, mock_copy_objects: MagicMock) -> None:
)
token = self.client.patch(
- f"/api/projects/{self.team.id}/session_recordings/{session_id}/sharing", {"enabled": True}
+ f"/api/projects/{self.team.id}/session_recordings/{session_id}/sharing",
+ {"enabled": True},
).json()["access_token"]
self.client.logout()
@@ -764,7 +843,10 @@ def test_get_matching_events(self) -> None:
session_id = f"test_get_matching_events-1-{uuid.uuid4()}"
self.create_snapshot("user", session_id, base_time)
event_id = _create_event(
- event="$pageview", properties={"$session_id": session_id}, team=self.team, distinct_id=uuid.uuid4()
+ event="$pageview",
+ properties={"$session_id": session_id},
+ team=self.team,
+ distinct_id=uuid.uuid4(),
)
# a non-matching session
diff --git a/posthog/settings/__init__.py b/posthog/settings/__init__.py
index 32b3d87d322ae..099e1812e5311 100644
--- a/posthog/settings/__init__.py
+++ b/posthog/settings/__init__.py
@@ -46,7 +46,10 @@
# https://posthog.com/docs/self-host/configure/environment-variables
debug_queries = get_from_env("DEBUG_QUERIES", False, type_cast=str_to_bool)
disable_paid_fs = get_from_env("DISABLE_PAID_FEATURE_SHOWCASING", False, type_cast=str_to_bool)
-INSTANCE_PREFERENCES = {"debug_queries": debug_queries, "disable_paid_fs": disable_paid_fs}
+INSTANCE_PREFERENCES = {
+ "debug_queries": debug_queries,
+ "disable_paid_fs": disable_paid_fs,
+}
SITE_URL: str = os.getenv("SITE_URL", "http://localhost:8000").rstrip("/")
INSTANCE_TAG: str = os.getenv("INSTANCE_TAG", "none")
@@ -60,7 +63,10 @@
"DISABLE_MMDB", TEST, type_cast=str_to_bool
) # plugin server setting disabling GeoIP feature
PLUGINS_PREINSTALLED_URLS: List[str] = (
- os.getenv("PLUGINS_PREINSTALLED_URLS", "https://www.npmjs.com/package/@posthog/geoip-plugin").split(",")
+ os.getenv(
+ "PLUGINS_PREINSTALLED_URLS",
+ "https://www.npmjs.com/package/@posthog/geoip-plugin",
+ ).split(",")
if not DISABLE_MMDB
else []
)
@@ -79,6 +85,9 @@
# Whether kea should be act in verbose mode
KEA_VERBOSE_LOGGING = get_from_env("KEA_VERBOSE_LOGGING", False, type_cast=str_to_bool)
+# MapLibre Style URL to configure map tile source
+MAPLIBRE_STYLE_URL = get_from_env("MAPLIBRE_STYLE_URL", optional=True)
+
# Only written in specific scripts - do not use outside of them.
PERSON_ON_EVENTS_OVERRIDE = get_from_env("PERSON_ON_EVENTS_OVERRIDE", optional=True, type_cast=str_to_bool)
diff --git a/posthog/settings/data_stores.py b/posthog/settings/data_stores.py
index 49fa73f3bd030..9f6f9ca74cab8 100644
--- a/posthog/settings/data_stores.py
+++ b/posthog/settings/data_stores.py
@@ -62,7 +62,10 @@ def postgres_config(host: str) -> dict:
PG_PASSWORD = os.getenv("PGPASSWORD", "posthog")
PG_PORT = os.getenv("PGPORT", "5432")
PG_DATABASE = os.getenv("PGDATABASE", "posthog")
- DATABASE_URL = os.getenv("DATABASE_URL", f"postgres://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}")
+ DATABASE_URL = os.getenv(
+ "DATABASE_URL",
+ f"postgres://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}",
+ )
else:
DATABASE_URL = os.getenv("DATABASE_URL", "")
diff --git a/posthog/settings/dynamic_settings.py b/posthog/settings/dynamic_settings.py
index 94d774b0200f0..b7eb65967fc65 100644
--- a/posthog/settings/dynamic_settings.py
+++ b/posthog/settings/dynamic_settings.py
@@ -223,4 +223,9 @@
# SECRET_SETTINGS can only be updated but will never be exposed through the API (we do store them plain text in the DB)
# On the frontend UI will clearly show which configuration elements are secret and whether they have a set value or not.
-SECRET_SETTINGS = ["EMAIL_HOST_PASSWORD", "SLACK_APP_CLIENT_SECRET", "SLACK_APP_SIGNING_SECRET", "SENTRY_AUTH_TOKEN"]
+SECRET_SETTINGS = [
+ "EMAIL_HOST_PASSWORD",
+ "SLACK_APP_CLIENT_SECRET",
+ "SLACK_APP_SIGNING_SECRET",
+ "SENTRY_AUTH_TOKEN",
+]
diff --git a/posthog/settings/ingestion.py b/posthog/settings/ingestion.py
index bd9edbc6fb03c..b60206d101ae9 100644
--- a/posthog/settings/ingestion.py
+++ b/posthog/settings/ingestion.py
@@ -1,7 +1,8 @@
import os
+
import structlog
-from posthog.settings.utils import get_from_env, get_list
+from posthog.settings.utils import get_from_env, get_list, get_set
from posthog.utils import str_to_bool
logger = structlog.get_logger(__name__)
@@ -32,3 +33,6 @@
REPLAY_RETENTION_DAYS_MIN = get_from_env("REPLAY_RETENTION_DAYS_MIN", type_cast=int, default=30)
REPLAY_RETENTION_DAYS_MAX = get_from_env("REPLAY_RETENTION_DAYS_MAX", type_cast=int, default=90)
+
+NEW_ANALYTICS_CAPTURE_ENDPOINT = os.getenv("NEW_CAPTURE_ENDPOINT", "/i/v0/e/")
+NEW_ANALYTICS_CAPTURE_TEAM_IDS = get_set(os.getenv("NEW_ANALYTICS_CAPTURE_TEAM_IDS", ""))
diff --git a/posthog/settings/sentry.py b/posthog/settings/sentry.py
index 208f3bfd81e2c..f2c36695b62cb 100644
--- a/posthog/settings/sentry.py
+++ b/posthog/settings/sentry.py
@@ -136,7 +136,12 @@ def sentry_init() -> None:
send_default_pii=send_pii,
dsn=os.environ["SENTRY_DSN"],
release=release,
- integrations=[DjangoIntegration(), CeleryIntegration(), RedisIntegration(), sentry_logging],
+ integrations=[
+ DjangoIntegration(),
+ CeleryIntegration(),
+ RedisIntegration(),
+ sentry_logging,
+ ],
request_bodies="always" if send_pii else "never",
sample_rate=1.0,
# Configures the sample rate for error events, in the range of 0.0 to 1.0 (default).
diff --git a/posthog/settings/service_requirements.py b/posthog/settings/service_requirements.py
index 2592d73a1be60..79cdc55d51c67 100644
--- a/posthog/settings/service_requirements.py
+++ b/posthog/settings/service_requirements.py
@@ -8,7 +8,9 @@
SKIP_SERVICE_VERSION_REQUIREMENTS = get_from_env(
- "SKIP_SERVICE_VERSION_REQUIREMENTS", TEST or IS_COLLECT_STATIC or DEBUG, type_cast=str_to_bool
+ "SKIP_SERVICE_VERSION_REQUIREMENTS",
+ TEST or IS_COLLECT_STATIC or DEBUG,
+ type_cast=str_to_bool,
)
if SKIP_SERVICE_VERSION_REQUIREMENTS and not (TEST or DEBUG):
diff --git a/posthog/settings/utils.py b/posthog/settings/utils.py
index 9181b04b1fbbd..6dd22dbf97cf8 100644
--- a/posthog/settings/utils.py
+++ b/posthog/settings/utils.py
@@ -1,5 +1,5 @@
import os
-from typing import Any, Callable, List, Optional
+from typing import Any, Callable, List, Optional, Set
from django.core.exceptions import ImproperlyConfigured
@@ -8,7 +8,13 @@
__all__ = ["get_from_env", "get_list", "str_to_bool"]
-def get_from_env(key: str, default: Any = None, *, optional: bool = False, type_cast: Optional[Callable] = None) -> Any:
+def get_from_env(
+ key: str,
+ default: Any = None,
+ *,
+ optional: bool = False,
+ type_cast: Optional[Callable] = None,
+) -> Any:
value = os.getenv(key)
if value is None or value == "":
if optional:
@@ -26,3 +32,9 @@ def get_list(text: str) -> List[str]:
if not text:
return []
return [item.strip() for item in text.split(",")]
+
+
+def get_set(text: str) -> Set[str]:
+ if not text:
+ return set()
+ return {item.strip() for item in text.split(",")}
diff --git a/posthog/settings/web.py b/posthog/settings/web.py
index b062ce632a71a..b846a2486c5df 100644
--- a/posthog/settings/web.py
+++ b/posthog/settings/web.py
@@ -112,6 +112,10 @@
MIDDLEWARE.insert(0, "django_statsd.middleware.StatsdMiddleware")
MIDDLEWARE.append("django_statsd.middleware.StatsdMiddlewareTimer")
+if DEBUG:
+ # Used on local devenv to reverse-proxy all of /i/* to capture-rs on port 3000
+ INSTALLED_APPS.append("revproxy")
+
# Append Enterprise Edition as an app if available
try:
from ee.apps import EnterpriseConfig # noqa: F401
@@ -179,7 +183,12 @@
SOCIAL_AUTH_STRATEGY = "social_django.strategy.DjangoStrategy"
SOCIAL_AUTH_STORAGE = "social_django.models.DjangoStorage"
-SOCIAL_AUTH_FIELDS_STORED_IN_SESSION = ["invite_id", "user_name", "email_opt_in", "organization_name"]
+SOCIAL_AUTH_FIELDS_STORED_IN_SESSION = [
+ "invite_id",
+ "user_name",
+ "email_opt_in",
+ "organization_name",
+]
SOCIAL_AUTH_GITHUB_SCOPE = ["user:email"]
SOCIAL_AUTH_GITHUB_KEY = os.getenv("SOCIAL_AUTH_GITHUB_KEY")
SOCIAL_AUTH_GITHUB_SECRET = os.getenv("SOCIAL_AUTH_GITHUB_SECRET")
@@ -218,7 +227,10 @@
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATIC_URL = "/static/"
-STATICFILES_DIRS = [os.path.join(BASE_DIR, "frontend/dist"), os.path.join(BASE_DIR, "posthog/year_in_posthog/images")]
+STATICFILES_DIRS = [
+ os.path.join(BASE_DIR, "frontend/dist"),
+ os.path.join(BASE_DIR, "posthog/year_in_posthog/images"),
+]
STATICFILES_STORAGE = "whitenoise.storage.ManifestStaticFilesStorage"
AUTH_USER_MODEL = "posthog.User"
diff --git a/posthog/storage/object_storage.py b/posthog/storage/object_storage.py
index 79ea0c90ceb19..a1ff639b1c293 100644
--- a/posthog/storage/object_storage.py
+++ b/posthog/storage/object_storage.py
@@ -111,7 +111,12 @@ def list_objects(self, bucket: str, prefix: str) -> Optional[List[str]]:
else:
return None
except Exception as e:
- logger.error("object_storage.list_objects_failed", bucket=bucket, prefix=prefix, error=e)
+ logger.error(
+ "object_storage.list_objects_failed",
+ bucket=bucket,
+ prefix=prefix,
+ error=e,
+ )
capture_exception(e)
return None
@@ -128,7 +133,13 @@ def read_bytes(self, bucket: str, key: str) -> Optional[bytes]:
s3_response = self.aws_client.get_object(Bucket=bucket, Key=key)
return s3_response["Body"].read()
except Exception as e:
- logger.error("object_storage.read_failed", bucket=bucket, file_name=key, error=e, s3_response=s3_response)
+ logger.error(
+ "object_storage.read_failed",
+ bucket=bucket,
+ file_name=key,
+ error=e,
+ s3_response=s3_response,
+ )
capture_exception(e)
raise ObjectStorageError("read failed") from e
@@ -149,7 +160,13 @@ def write(self, bucket: str, key: str, content: Union[str, bytes], extras: Dict
try:
s3_response = self.aws_client.put_object(Bucket=bucket, Body=content, Key=key, **(extras or {}))
except Exception as e:
- logger.error("object_storage.write_failed", bucket=bucket, file_name=key, error=e, s3_response=s3_response)
+ logger.error(
+ "object_storage.write_failed",
+ bucket=bucket,
+ file_name=key,
+ error=e,
+ s3_response=s3_response,
+ )
capture_exception(e)
raise ObjectStorageError("write failed") from e
@@ -165,7 +182,10 @@ def copy_objects(self, bucket: str, source_prefix: str, target_prefix: str) -> i
return len(source_objects)
except Exception as e:
logger.error(
- "object_storage.copy_objects_failed", source_prefix=source_prefix, target_prefix=target_prefix, error=e
+ "object_storage.copy_objects_failed",
+ source_prefix=source_prefix,
+ target_prefix=target_prefix,
+ error=e,
)
capture_exception(e)
return None
@@ -186,7 +206,11 @@ def object_storage_client() -> ObjectStorageClient:
endpoint_url=settings.OBJECT_STORAGE_ENDPOINT,
aws_access_key_id=settings.OBJECT_STORAGE_ACCESS_KEY_ID,
aws_secret_access_key=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY,
- config=Config(signature_version="s3v4", connect_timeout=1, retries={"max_attempts": 1}),
+ config=Config(
+ signature_version="s3v4",
+ connect_timeout=1,
+ retries={"max_attempts": 1},
+ ),
region_name=settings.OBJECT_STORAGE_REGION,
)
)
@@ -196,7 +220,10 @@ def object_storage_client() -> ObjectStorageClient:
def write(file_name: str, content: Union[str, bytes], extras: Dict | None = None) -> None:
return object_storage_client().write(
- bucket=settings.OBJECT_STORAGE_BUCKET, key=file_name, content=content, extras=extras
+ bucket=settings.OBJECT_STORAGE_BUCKET,
+ key=file_name,
+ content=content,
+ extras=extras,
)
@@ -219,7 +246,9 @@ def list_objects(prefix: str) -> Optional[List[str]]:
def copy_objects(source_prefix: str, target_prefix: str) -> int:
return (
object_storage_client().copy_objects(
- bucket=settings.OBJECT_STORAGE_BUCKET, source_prefix=source_prefix, target_prefix=target_prefix
+ bucket=settings.OBJECT_STORAGE_BUCKET,
+ source_prefix=source_prefix,
+ target_prefix=target_prefix,
)
or 0
)
diff --git a/posthog/storage/test/test_object_storage.py b/posthog/storage/test/test_object_storage.py
index 3544df570d4c2..f24114911ba9e 100644
--- a/posthog/storage/test/test_object_storage.py
+++ b/posthog/storage/test/test_object_storage.py
@@ -10,7 +10,14 @@
OBJECT_STORAGE_ENDPOINT,
OBJECT_STORAGE_SECRET_ACCESS_KEY,
)
-from posthog.storage.object_storage import health_check, read, write, get_presigned_url, list_objects, copy_objects
+from posthog.storage.object_storage import (
+ health_check,
+ read,
+ write,
+ get_presigned_url,
+ list_objects,
+ copy_objects,
+)
from posthog.test.base import APIBaseTest
TEST_BUCKET = "test_storage_bucket"
@@ -113,7 +120,8 @@ def test_can_copy_objects_between_prefixes(self) -> None:
write(file_name, "my content".encode("utf-8"))
copied_count = copy_objects(
- source_prefix=f"{TEST_BUCKET}/{shared_prefix}", target_prefix=f"{TEST_BUCKET}/the_destination/folder"
+ source_prefix=f"{TEST_BUCKET}/{shared_prefix}",
+ target_prefix=f"{TEST_BUCKET}/the_destination/folder",
)
assert copied_count == 3
@@ -137,7 +145,8 @@ def test_can_safely_copy_objects_from_unknown_prefix(self) -> None:
write(file_name, "my content".encode("utf-8"))
copied_count = copy_objects(
- source_prefix=f"nothing_here", target_prefix=f"{TEST_BUCKET}/the_destination/folder"
+ source_prefix=f"nothing_here",
+ target_prefix=f"{TEST_BUCKET}/the_destination/folder",
)
assert copied_count == 0
diff --git a/posthog/tasks/async_migrations.py b/posthog/tasks/async_migrations.py
index 608fead3e07a3..ae505b44131e5 100644
--- a/posthog/tasks/async_migrations.py
+++ b/posthog/tasks/async_migrations.py
@@ -7,7 +7,11 @@
start_async_migration,
update_migration_progress,
)
-from posthog.async_migrations.utils import force_stop_migration, process_error, trigger_migration
+from posthog.async_migrations.utils import (
+ force_stop_migration,
+ process_error,
+ trigger_migration,
+)
from posthog.celery import app
from posthog.models.instance_setting import get_instance_setting
@@ -44,7 +48,11 @@ def check_async_migration_health() -> None:
# failures and successes are handled elsewhere
# pending means we haven't picked up the task yet
# retry is not possible as max_retries == 0
- if migration_task_celery_state not in (states.STARTED, states.PENDING, states.FAILURE):
+ if migration_task_celery_state not in (
+ states.STARTED,
+ states.PENDING,
+ states.FAILURE,
+ ):
return
inspector = app.control.inspect()
diff --git a/posthog/tasks/calculate_cohort.py b/posthog/tasks/calculate_cohort.py
index f3c09f65119e4..1c4492071c78a 100644
--- a/posthog/tasks/calculate_cohort.py
+++ b/posthog/tasks/calculate_cohort.py
@@ -62,7 +62,10 @@ def calculate_cohort_from_list(cohort_id: int, items: List[str]) -> None:
@shared_task(ignore_result=True, max_retries=1)
def insert_cohort_from_insight_filter(cohort_id: int, filter_data: Dict[str, Any]) -> None:
- from posthog.api.cohort import insert_cohort_actors_into_ch, insert_cohort_people_into_pg
+ from posthog.api.cohort import (
+ insert_cohort_actors_into_ch,
+ insert_cohort_people_into_pg,
+ )
cohort = Cohort.objects.get(pk=cohort_id)
diff --git a/posthog/tasks/check_clickhouse_schema_drift.py b/posthog/tasks/check_clickhouse_schema_drift.py
index d4ed1347f4419..bea00530b7eba 100644
--- a/posthog/tasks/check_clickhouse_schema_drift.py
+++ b/posthog/tasks/check_clickhouse_schema_drift.py
@@ -91,7 +91,8 @@ def get_clickhouse_schema_drift(
def check_clickhouse_schema_drift(
- clickhouse_nodes: List[Tuple[str]] = [], clickhouse_schema: List[Tuple[str, str, str]] = []
+ clickhouse_nodes: List[Tuple[str]] = [],
+ clickhouse_schema: List[Tuple[str, str, str]] = [],
) -> None:
try:
if not clickhouse_nodes:
diff --git a/posthog/tasks/email.py b/posthog/tasks/email.py
index 44bd8eae03087..bd7f60188b166 100644
--- a/posthog/tasks/email.py
+++ b/posthog/tasks/email.py
@@ -9,7 +9,15 @@
from posthog.celery import app
from posthog.cloud_utils import is_cloud
from posthog.email import EmailMessage, is_email_available
-from posthog.models import Organization, OrganizationInvite, OrganizationMembership, Plugin, PluginConfig, Team, User
+from posthog.models import (
+ Organization,
+ OrganizationInvite,
+ OrganizationMembership,
+ Plugin,
+ PluginConfig,
+ Team,
+ User,
+)
from posthog.user_permissions import UserPermissions
logger = structlog.get_logger(__name__)
@@ -119,7 +127,10 @@ def send_email_verification(user_id: int, token: str) -> None:
retry_backoff=True,
)
def send_fatal_plugin_error(
- plugin_config_id: int, plugin_config_updated_at: Optional[str], error: str, is_system_error: bool
+ plugin_config_id: int,
+ plugin_config_updated_at: Optional[str],
+ error: str,
+ is_system_error: bool,
) -> None:
if not is_email_available(with_absolute_urls=True):
return
@@ -131,7 +142,12 @@ def send_fatal_plugin_error(
campaign_key=campaign_key,
subject=f"[Alert] {plugin} has been disabled in project {team} due to a fatal error",
template_name="fatal_plugin_error",
- template_context={"plugin": plugin, "team": team, "error": error, "is_system_error": is_system_error},
+ template_context={
+ "plugin": plugin,
+ "team": team,
+ "error": error,
+ "is_system_error": is_system_error,
+ },
)
memberships_to_email = []
memberships = OrganizationMembership.objects.prefetch_related("user", "organization").filter(
@@ -181,13 +197,21 @@ def send_email_change_emails(now_iso: str, user_name: str, old_address: str, new
campaign_key=f"email_change_old_address_{now_iso}",
subject="This is no longer your PostHog account email",
template_name="email_change_old_address",
- template_context={"user_name": user_name, "old_address": old_address, "new_address": new_address},
+ template_context={
+ "user_name": user_name,
+ "old_address": old_address,
+ "new_address": new_address,
+ },
)
message_new_address = EmailMessage(
campaign_key=f"email_change_new_address_{now_iso}",
subject="This is your new PostHog account email",
template_name="email_change_new_address",
- template_context={"user_name": user_name, "old_address": old_address, "new_address": new_address},
+ template_context={
+ "user_name": user_name,
+ "old_address": old_address,
+ "new_address": new_address,
+ },
)
message_old_address.add_recipient(email=old_address)
message_new_address.add_recipient(email=new_address)
diff --git a/posthog/tasks/exporter.py b/posthog/tasks/exporter.py
index 01c85537602f0..ed41d9d5412d0 100644
--- a/posthog/tasks/exporter.py
+++ b/posthog/tasks/exporter.py
@@ -34,14 +34,20 @@
# export_asset is used in chords/groups and so must not ignore its results
-@app.task(autoretry_for=(Exception,), max_retries=5, retry_backoff=True, acks_late=True, ignore_result=False)
+@app.task(
+ autoretry_for=(Exception,),
+ max_retries=5,
+ retry_backoff=True,
+ acks_late=True,
+ ignore_result=False,
+)
def export_asset(exported_asset_id: int, limit: Optional[int] = None) -> None:
from posthog.tasks.exports import csv_exporter, image_exporter
# if Celery is lagging then you can end up with an exported asset that has had a TTL added
# and that TTL has passed, in the exporter we don't care about that.
# the TTL is for later cleanup.
- exported_asset: ExportedAsset = ExportedAsset.objects_including_ttl_deleted.select_related(
+ exported_asset: (ExportedAsset) = ExportedAsset.objects_including_ttl_deleted.select_related(
"insight", "dashboard"
).get(pk=exported_asset_id)
diff --git a/posthog/tasks/exports/csv_exporter.py b/posthog/tasks/exports/csv_exporter.py
index 9643244119668..622798774ec1d 100644
--- a/posthog/tasks/exports/csv_exporter.py
+++ b/posthog/tasks/exports/csv_exporter.py
@@ -12,7 +12,12 @@
from posthog.models.exported_asset import ExportedAsset, save_content
from posthog.utils import absolute_uri
from .ordered_csv_renderer import OrderedCsvRenderer
-from ..exporter import EXPORT_FAILED_COUNTER, EXPORT_ASSET_UNKNOWN_COUNTER, EXPORT_SUCCEEDED_COUNTER, EXPORT_TIMER
+from ..exporter import (
+ EXPORT_FAILED_COUNTER,
+ EXPORT_ASSET_UNKNOWN_COUNTER,
+ EXPORT_SUCCEEDED_COUNTER,
+ EXPORT_TIMER,
+)
from ...constants import CSV_EXPORT_LIMIT
logger = structlog.get_logger(__name__)
@@ -128,12 +133,18 @@ def _convert_response_to_csv_data(data: Any) -> List[Any]:
for item in items:
if item.get("date"):
# Dated means we create a grid
- line = {"cohort": item["date"], "cohort size": item["values"][0]["count"]}
+ line = {
+ "cohort": item["date"],
+ "cohort size": item["values"][0]["count"],
+ }
for index, data in enumerate(item["values"]):
line[items[index]["label"]] = data["count"]
else:
# Otherwise we just specify "Period" for titles
- line = {"cohort": item["label"], "cohort size": item["values"][0]["count"]}
+ line = {
+ "cohort": item["label"],
+ "cohort size": item["values"][0]["count"],
+ }
for index, data in enumerate(item["values"]):
line[f"Period {index}"] = data["count"]
@@ -182,7 +193,9 @@ def _export_to_csv(exported_asset: ExportedAsset, limit: int = 1000) -> None:
body = resource.get("body", None)
next_url = None
access_token = encode_jwt(
- {"id": exported_asset.created_by_id}, datetime.timedelta(minutes=15), PosthogJwtAudience.IMPERSONATED_USER
+ {"id": exported_asset.created_by_id},
+ datetime.timedelta(minutes=15),
+ PosthogJwtAudience.IMPERSONATED_USER,
)
while len(all_csv_rows) < CSV_EXPORT_LIMIT:
@@ -243,13 +256,24 @@ def get_limit_param_key(path: str) -> str:
def make_api_call(
- access_token: str, body: Any, limit: int, method: str, next_url: Optional[str], path: str
+ access_token: str,
+ body: Any,
+ limit: int,
+ method: str,
+ next_url: Optional[str],
+ path: str,
) -> requests.models.Response:
request_url: str = absolute_uri(next_url or path)
try:
- url = add_query_params(request_url, {get_limit_param_key(request_url): str(limit), "is_csv_export": "1"})
+ url = add_query_params(
+ request_url,
+ {get_limit_param_key(request_url): str(limit), "is_csv_export": "1"},
+ )
response = requests.request(
- method=method.lower(), url=url, json=body, headers={"Authorization": f"Bearer {access_token}"}
+ method=method.lower(),
+ url=url,
+ json=body,
+ headers={"Authorization": f"Bearer {access_token}"},
)
return response
except Exception as ex:
diff --git a/posthog/tasks/exports/exporter_utils.py b/posthog/tasks/exports/exporter_utils.py
index 38b8979f8f467..a47f43aa41710 100644
--- a/posthog/tasks/exports/exporter_utils.py
+++ b/posthog/tasks/exports/exporter_utils.py
@@ -50,4 +50,8 @@ def log_error_if_site_url_not_reachable() -> None:
if not settings.SITE_URL:
logger.error("site_url_not_set")
elif not is_site_url_reachable():
- logger.error("site_url_not_reachable", site_url=settings.SITE_URL, exception=_site_reachable_exception)
+ logger.error(
+ "site_url_not_reachable",
+ site_url=settings.SITE_URL,
+ exception=_site_reachable_exception,
+ )
diff --git a/posthog/tasks/exports/image_exporter.py b/posthog/tasks/exports/image_exporter.py
index 057239a929f50..1961d9a456053 100644
--- a/posthog/tasks/exports/image_exporter.py
+++ b/posthog/tasks/exports/image_exporter.py
@@ -16,8 +16,16 @@
from webdriver_manager.core.os_manager import ChromeType
from posthog.caching.fetch_from_cache import synchronously_update_cache
-from posthog.models.exported_asset import ExportedAsset, get_public_access_token, save_content
-from posthog.tasks.exporter import EXPORT_SUCCEEDED_COUNTER, EXPORT_FAILED_COUNTER, EXPORT_TIMER
+from posthog.models.exported_asset import (
+ ExportedAsset,
+ get_public_access_token,
+ save_content,
+)
+from posthog.tasks.exporter import (
+ EXPORT_SUCCEEDED_COUNTER,
+ EXPORT_FAILED_COUNTER,
+ EXPORT_TIMER,
+)
from posthog.tasks.exports.exporter_utils import log_error_if_site_url_not_reachable
from posthog.utils import absolute_uri
@@ -111,7 +119,10 @@ def _export_to_png(exported_asset: ExportedAsset) -> None:
def _screenshot_asset(
- image_path: str, url_to_render: str, screenshot_width: ScreenWidth, wait_for_css_selector: CSSSelector
+ image_path: str,
+ url_to_render: str,
+ screenshot_width: ScreenWidth,
+ wait_for_css_selector: CSSSelector,
) -> None:
driver: Optional[webdriver.Chrome] = None
try:
diff --git a/posthog/tasks/exports/ordered_csv_renderer.py b/posthog/tasks/exports/ordered_csv_renderer.py
index c969772e9d815..1b7a16dd83c3e 100644
--- a/posthog/tasks/exports/ordered_csv_renderer.py
+++ b/posthog/tasks/exports/ordered_csv_renderer.py
@@ -16,7 +16,6 @@ def tablize(self, data: Any, header: Any = None, labels: Any = None) -> Generato
header = data.header
if data:
-
# First, flatten the data (i.e., convert it to a list of
# dictionaries that are each exactly one level deep). The key for
# each item designates the name of the column that the item will
diff --git a/posthog/tasks/exports/test/test_csv_exporter.py b/posthog/tasks/exports/test/test_csv_exporter.py
index 62ca713517f0e..65fda3baa0dd4 100644
--- a/posthog/tasks/exports/test/test_csv_exporter.py
+++ b/posthog/tasks/exports/test/test_csv_exporter.py
@@ -19,7 +19,10 @@
from posthog.storage import object_storage
from posthog.storage.object_storage import ObjectStorageError
from posthog.tasks.exports import csv_exporter
-from posthog.tasks.exports.csv_exporter import UnexpectedEmptyJsonResponse, add_query_params
+from posthog.tasks.exports.csv_exporter import (
+ UnexpectedEmptyJsonResponse,
+ add_query_params,
+)
from posthog.test.base import APIBaseTest, _create_event, flush_persons_and_events
from posthog.utils import absolute_uri
@@ -257,7 +260,10 @@ def test_limiting_query_as_expected(self) -> None:
with self.settings(SITE_URL="https://app.posthog.com"):
modified_url = add_query_params(absolute_uri(regression_11204), {"limit": "3500"})
actual_bits = self._split_to_dict(modified_url)
- expected_bits = {**self._split_to_dict(regression_11204), **{"limit": "3500"}}
+ expected_bits = {
+ **self._split_to_dict(regression_11204),
+ **{"limit": "3500"},
+ }
assert expected_bits == actual_bits
def test_limiting_existing_limit_query_as_expected(self) -> None:
@@ -265,7 +271,10 @@ def test_limiting_existing_limit_query_as_expected(self) -> None:
url_with_existing_limit = regression_11204 + "&limit=100000"
modified_url = add_query_params(absolute_uri(url_with_existing_limit), {"limit": "3500"})
actual_bits = self._split_to_dict(modified_url)
- expected_bits = {**self._split_to_dict(regression_11204), **{"limit": "3500"}}
+ expected_bits = {
+ **self._split_to_dict(regression_11204),
+ **{"limit": "3500"},
+ }
assert expected_bits == actual_bits
@patch("posthog.tasks.exports.csv_exporter.make_api_call")
@@ -341,7 +350,11 @@ def test_csv_exporter_events_query(self, mocked_uuidt, MAX_SELECT_RETURNED_ROWS=
team=self.team,
export_format=ExportedAsset.ExportFormat.CSV,
export_context={
- "source": {"kind": "EventsQuery", "select": ["event", "*"], "where": [f"distinct_id = '{random_uuid}'"]}
+ "source": {
+ "kind": "EventsQuery",
+ "select": ["event", "*"],
+ "where": [f"distinct_id = '{random_uuid}'"],
+ }
},
)
exported_asset.save()
diff --git a/posthog/tasks/exports/test/test_csv_exporter_renders.py b/posthog/tasks/exports/test/test_csv_exporter_renders.py
index 26cb67c08885a..f17e64635370b 100644
--- a/posthog/tasks/exports/test/test_csv_exporter_renders.py
+++ b/posthog/tasks/exports/test/test_csv_exporter_renders.py
@@ -33,7 +33,9 @@ def test_csv_rendering(mock_settings, mock_request, filename):
fixture = json.load(f)
asset = ExportedAsset(
- team=team, export_format=ExportedAsset.ExportFormat.CSV, export_context={"path": "/api/literally/anything"}
+ team=team,
+ export_format=ExportedAsset.ExportFormat.CSV,
+ export_context={"path": "/api/literally/anything"},
)
asset.save()
diff --git a/posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py b/posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py
index 70161c42b6244..b78a870e626c4 100644
--- a/posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py
+++ b/posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py
@@ -16,10 +16,26 @@ def test_sanitize_url_when_provided_path_and_site_url_has_a_port(self) -> None:
assert sanitised == "https://localhost:8000/some/location"
error_test_cases = [
- ("changing scheme", "https://localhost:8000", "http://localhost:8000/some/location"),
- ("changing port", "https://localhost:8000", "https://localhost:8123/some/location"),
- ("changing port and url", "https://something.posthog.com:8000", "https://localhost:8123/some/location"),
- ("changing domain", "https://app.posthog.com", "https://google.com/some/location"),
+ (
+ "changing scheme",
+ "https://localhost:8000",
+ "http://localhost:8000/some/location",
+ ),
+ (
+ "changing port",
+ "https://localhost:8000",
+ "https://localhost:8123/some/location",
+ ),
+ (
+ "changing port and url",
+ "https://something.posthog.com:8000",
+ "https://localhost:8123/some/location",
+ ),
+ (
+ "changing domain",
+ "https://app.posthog.com",
+ "https://google.com/some/location",
+ ),
]
@parameterized.expand(error_test_cases)
diff --git a/posthog/tasks/exports/test/test_image_exporter.py b/posthog/tasks/exports/test/test_image_exporter.py
index 948500a9d77b8..3c3a84133a1b1 100644
--- a/posthog/tasks/exports/test/test_image_exporter.py
+++ b/posthog/tasks/exports/test/test_image_exporter.py
@@ -20,7 +20,11 @@
@patch("posthog.tasks.exports.image_exporter.synchronously_update_cache")
@patch("posthog.tasks.exports.image_exporter._screenshot_asset")
-@patch("posthog.tasks.exports.image_exporter.open", new_callable=mock_open, read_data=b"image_data")
+@patch(
+ "posthog.tasks.exports.image_exporter.open",
+ new_callable=mock_open,
+ read_data=b"image_data",
+)
@patch("os.remove")
class TestImageExporter(APIBaseTest):
exported_asset: ExportedAsset
@@ -28,7 +32,9 @@ class TestImageExporter(APIBaseTest):
def setup_method(self, method):
insight = Insight.objects.create(team=self.team)
asset = ExportedAsset.objects.create(
- team=self.team, export_format=ExportedAsset.ExportFormat.PNG, insight=insight
+ team=self.team,
+ export_format=ExportedAsset.ExportFormat.PNG,
+ insight=insight,
)
self.exported_asset = asset
diff --git a/posthog/tasks/test/test_async_migrations.py b/posthog/tasks/test/test_async_migrations.py
index bb7cfce0797e7..27bb8fc991b8a 100644
--- a/posthog/tasks/test/test_async_migrations.py
+++ b/posthog/tasks/test/test_async_migrations.py
@@ -6,7 +6,10 @@
from celery.result import AsyncResult
from posthog.async_migrations.examples.test_migration import Migration
-from posthog.async_migrations.runner import run_async_migration_next_op, run_async_migration_operations
+from posthog.async_migrations.runner import (
+ run_async_migration_next_op,
+ run_async_migration_operations,
+)
from posthog.async_migrations.test.util import create_async_migration
from posthog.models.async_migration import AsyncMigration, MigrationStatus
from posthog.models.instance_setting import set_instance_setting
@@ -45,7 +48,10 @@ def setUp(self) -> None:
@pytest.mark.ee
@patch.object(AsyncResult, "state", states.STARTED)
@patch("posthog.celery.app.control.inspect", side_effect=inspect_mock)
- @patch("posthog.tasks.async_migrations.run_async_migration.delay", side_effect=run_async_migration_mock)
+ @patch(
+ "posthog.tasks.async_migrations.run_async_migration.delay",
+ side_effect=run_async_migration_mock,
+ )
def test_check_async_migration_health_during_resumable_op(self, _: Any, __: Any) -> None:
"""
Mocks celery tasks and tests that `check_async_migration_health` works as expected
@@ -76,7 +82,10 @@ def test_check_async_migration_health_during_resumable_op(self, _: Any, __: Any)
@pytest.mark.ee
@patch.object(AsyncResult, "state", states.STARTED)
@patch("posthog.celery.app.control.inspect", side_effect=inspect_mock)
- @patch("posthog.tasks.async_migrations.run_async_migration.delay", side_effect=run_async_migration_mock)
+ @patch(
+ "posthog.tasks.async_migrations.run_async_migration.delay",
+ side_effect=run_async_migration_mock,
+ )
def test_check_async_migration_health_during_non_resumable_op(self, _: Any, __: Any) -> None:
"""
Same as above, but now we find a non-resumbale op.
diff --git a/posthog/tasks/test/test_calculate_cohort.py b/posthog/tasks/test/test_calculate_cohort.py
index 749387c2a6344..0c81076c8fa81 100644
--- a/posthog/tasks/test/test_calculate_cohort.py
+++ b/posthog/tasks/test/test_calculate_cohort.py
@@ -71,7 +71,10 @@ def test_calculate_cohorts(self) -> None:
team=self.team,
filters={
"groups": [
- {"properties": [{"key": "id", "type": "cohort", "value": 267}], "rollout_percentage": None}
+ {
+ "properties": [{"key": "id", "type": "cohort", "value": 267}],
+ "rollout_percentage": None,
+ }
]
},
key="default-flag-1",
diff --git a/posthog/tasks/test/test_check_clickhouse_schema_drift.py b/posthog/tasks/test/test_check_clickhouse_schema_drift.py
index 831e4ffbc1c3d..8d38d134cac40 100644
--- a/posthog/tasks/test/test_check_clickhouse_schema_drift.py
+++ b/posthog/tasks/test/test_check_clickhouse_schema_drift.py
@@ -2,7 +2,10 @@
from clickhouse_driver.errors import Error as ClickhouseError
-from posthog.tasks.check_clickhouse_schema_drift import check_clickhouse_schema_drift, get_clickhouse_schema_drift
+from posthog.tasks.check_clickhouse_schema_drift import (
+ check_clickhouse_schema_drift,
+ get_clickhouse_schema_drift,
+)
def test_get_clickhouse_schema_drift() -> None:
diff --git a/posthog/tasks/test/test_email.py b/posthog/tasks/test/test_email.py
index 9ef1f27907908..a728879586aad 100644
--- a/posthog/tasks/test/test_email.py
+++ b/posthog/tasks/test/test_email.py
@@ -28,7 +28,10 @@ def create_org_team_and_user(creation_date: str, email: str, ingested_event: boo
org = Organization.objects.create(name="too_late_org")
Team.objects.create(organization=org, name="Default Project", ingested_event=ingested_event)
user = User.objects.create_and_join(
- organization=org, email=email, password=None, level=OrganizationMembership.Level.OWNER
+ organization=org,
+ email=email,
+ password=None,
+ level=OrganizationMembership.Level.OWNER,
)
return org, user
@@ -47,7 +50,11 @@ def setUpTestData(cls) -> None:
set_instance_setting("EMAIL_HOST", "fake_host")
set_instance_setting("EMAIL_ENABLED", True)
create_org_team_and_user("2022-01-01 00:00:00", "too_late_user@posthog.com")
- create_org_team_and_user("2022-01-02 00:00:00", "ingested_event_in_range_user@posthog.com", ingested_event=True)
+ create_org_team_and_user(
+ "2022-01-02 00:00:00",
+ "ingested_event_in_range_user@posthog.com",
+ ingested_event=True,
+ )
create_org_team_and_user("2022-01-03 00:00:00", "too_early_user@posthog.com")
def test_send_invite(self, MockEmailMessage: MagicMock) -> None:
@@ -68,7 +75,10 @@ def test_send_member_join(self, MockEmailMessage: MagicMock) -> None:
org, user = create_org_team_and_user("2022-01-02 00:00:00", "admin@posthog.com")
user = User.objects.create_and_join(
- organization=org, email="new-user@posthog.com", password=None, level=OrganizationMembership.Level.MEMBER
+ organization=org,
+ email="new-user@posthog.com",
+ password=None,
+ level=OrganizationMembership.Level.MEMBER,
)
send_member_join(user.uuid, org.id)
diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py
index ec758a24fd548..715c3829855d2 100644
--- a/posthog/tasks/test/test_usage_report.py
+++ b/posthog/tasks/test/test_usage_report.py
@@ -28,7 +28,9 @@
from posthog.models.plugin import PluginConfig
from posthog.models.sharing_configuration import SharingConfiguration
from posthog.schema import EventsQuery
-from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import (
+ produce_replay_summary,
+)
from posthog.tasks.usage_report import (
_get_all_org_reports,
_get_all_usage_data_as_team_rows,
@@ -109,7 +111,10 @@ def _create_sample_usage_data(self) -> None:
created_by=self.user,
)
SharingConfiguration.objects.create(
- team=self.org_1_team_1, dashboard=dashboard, access_token="testtoken", enabled=True
+ team=self.org_1_team_1,
+ dashboard=dashboard,
+ access_token="testtoken",
+ enabled=True,
)
FeatureFlag.objects.create(
@@ -184,7 +189,10 @@ def _create_sample_usage_data(self) -> None:
GroupTypeMapping.objects.create(team=self.org_1_team_1, group_type="organization", group_type_index=0)
GroupTypeMapping.objects.create(team=self.org_1_team_1, group_type="company", group_type_index=1)
create_group(
- team_id=self.org_1_team_1.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}
+ team_id=self.org_1_team_1.pk,
+ group_type_index=0,
+ group_key="org:5",
+ properties={"industry": "finance"},
)
create_group(
team_id=self.org_1_team_1.pk,
@@ -323,7 +331,10 @@ def _test_usage_report(self) -> List[dict]:
period_start, period_end = period
all_reports = _get_all_org_reports(period_start, period_end)
report = _get_full_org_usage_report_as_dict(
- _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period))
+ _get_full_org_usage_report(
+ all_reports[str(self.organization.id)],
+ get_instance_metadata(period),
+ )
)
assert report["table_sizes"]
@@ -349,7 +360,10 @@ def _test_usage_report(self) -> List[dict]:
"users_who_signed_up": [],
"users_who_signed_up_count": 0,
"table_sizes": report["table_sizes"],
- "plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1},
+ "plugins_installed": {
+ "Installed and enabled": 1,
+ "Installed but not enabled": 1,
+ },
"plugins_enabled": {"Installed and enabled": 1},
"instance_tag": "none",
"event_count_lifetime": 55,
@@ -480,7 +494,10 @@ def _test_usage_report(self) -> List[dict]:
"users_who_signed_up": [],
"users_who_signed_up_count": 0,
"table_sizes": report["table_sizes"],
- "plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1},
+ "plugins_installed": {
+ "Installed and enabled": 1,
+ "Installed but not enabled": 1,
+ },
"plugins_enabled": {"Installed and enabled": 1},
"instance_tag": "none",
"event_count_lifetime": 11,
@@ -571,7 +588,8 @@ def _test_usage_report(self) -> List[dict]:
for expectation in expectations:
report = _get_full_org_usage_report_as_dict(
_get_full_org_usage_report(
- all_reports[expectation["organization_id"]], get_instance_metadata(period)
+ all_reports[expectation["organization_id"]],
+ get_instance_metadata(period),
)
)
assert report == expectation
@@ -634,7 +652,11 @@ def test_usage_report_hogql_queries(self) -> None:
sync_execute("SYSTEM FLUSH LOGS")
sync_execute("TRUNCATE TABLE system.query_log")
- execute_hogql_query(query="select * from events limit 200", team=self.team, query_type="HogQLQuery")
+ execute_hogql_query(
+ query="select * from events limit 200",
+ team=self.team,
+ query_type="HogQLQuery",
+ )
EventsQueryRunner(query=EventsQuery(select=["event"], limit=50), team=self.team).calculate()
sync_execute("SYSTEM FLUSH LOGS")
@@ -881,7 +903,10 @@ def test_usage_report_survey_responses(self, billing_task_mock: MagicMock, posth
_create_event(
distinct_id="3",
event="survey sent",
- properties={"$survey_id": "seeeep-o12-as124", "$survey_response": "correct"},
+ properties={
+ "$survey_id": "seeeep-o12-as124",
+ "$survey_response": "correct",
+ },
timestamp=now() - relativedelta(hours=i),
team=self.analytics_team,
)
@@ -890,7 +915,10 @@ def test_usage_report_survey_responses(self, billing_task_mock: MagicMock, posth
_create_event(
distinct_id="4",
event="survey sent",
- properties={"$survey_id": "see22eep-o12-as124", "$survey_response": "correct"},
+ properties={
+ "$survey_id": "see22eep-o12-as124",
+ "$survey_response": "correct",
+ },
timestamp=now() - relativedelta(hours=i),
team=self.org_1_team_1,
)
@@ -958,17 +986,42 @@ def setUp(self) -> None:
self.team2 = Team.objects.create(organization=self.organization)
- _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-08T14:01:01Z")
- _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T12:01:01Z")
- _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z")
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id=1,
+ timestamp="2021-10-08T14:01:01Z",
+ )
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id=1,
+ timestamp="2021-10-09T12:01:01Z",
+ )
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id=1,
+ timestamp="2021-10-09T13:01:01Z",
+ )
_create_event(
event="$$internal_metrics_shouldnt_be_billed",
team=self.team,
distinct_id=1,
timestamp="2021-10-09T13:01:01Z",
)
- _create_event(event="$pageview", team=self.team2, distinct_id=1, timestamp="2021-10-09T14:01:01Z")
- _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-10T14:01:01Z")
+ _create_event(
+ event="$pageview",
+ team=self.team2,
+ distinct_id=1,
+ timestamp="2021-10-09T14:01:01Z",
+ )
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id=1,
+ timestamp="2021-10-10T14:01:01Z",
+ )
flush_persons_and_events()
TEST_clear_instance_license_cache()
@@ -1041,7 +1094,10 @@ def test_send_usage_cloud(self, mock_post: MagicMock, mock_client: MagicMock) ->
period_start, period_end = period
all_reports = _get_all_org_reports(period_start, period_end)
full_report_as_dict = _get_full_org_usage_report_as_dict(
- _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period))
+ _get_full_org_usage_report(
+ all_reports[str(self.organization.id)],
+ get_instance_metadata(period),
+ )
)
send_all_org_usage_reports(dry_run=False)
license = License.objects.first()
@@ -1057,7 +1113,10 @@ def test_send_usage_cloud(self, mock_post: MagicMock, mock_client: MagicMock) ->
self.user.distinct_id,
"organization usage report",
{**full_report_as_dict, "scope": "user"},
- groups={"instance": "http://localhost:8000", "organization": str(self.organization.id)},
+ groups={
+ "instance": "http://localhost:8000",
+ "organization": str(self.organization.id),
+ },
timestamp=None,
)
@@ -1134,7 +1193,13 @@ def test_capture_event_called_with_string_timestamp(self, mock_client: MagicMock
organization = Organization.objects.create()
mock_posthog = MagicMock()
mock_client.return_value = mock_posthog
- capture_event(mock_client, "test event", organization.id, {"prop1": "val1"}, "2021-10-10T23:01:00.00Z")
+ capture_event(
+ mock_client,
+ "test event",
+ organization.id,
+ {"prop1": "val1"},
+ "2021-10-10T23:01:00.00Z",
+ )
assert mock_client.capture.call_args[1]["timestamp"] == datetime(2021, 10, 10, 23, 1, tzinfo=tzutc())
@@ -1158,11 +1223,36 @@ class SendUsageNoLicenseTest(APIBaseTest):
def test_no_license(self, mock_post: MagicMock, mock_client: MagicMock) -> None:
TEST_clear_instance_license_cache()
# Same test, we just don't include the LicensedTestMixin so no license
- _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-08T14:01:01Z")
- _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T12:01:01Z")
- _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z")
- _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T14:01:01Z")
- _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-10T14:01:01Z")
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id=1,
+ timestamp="2021-10-08T14:01:01Z",
+ )
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id=1,
+ timestamp="2021-10-09T12:01:01Z",
+ )
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id=1,
+ timestamp="2021-10-09T13:01:01Z",
+ )
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id=1,
+ timestamp="2021-10-09T14:01:01Z",
+ )
+ _create_event(
+ event="$pageview",
+ team=self.team,
+ distinct_id=1,
+ timestamp="2021-10-10T14:01:01Z",
+ )
flush_persons_and_events()
diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py
index b150a75f88f12..a9a06ecbff7c5 100644
--- a/posthog/tasks/usage_report.py
+++ b/posthog/tasks/usage_report.py
@@ -41,7 +41,13 @@
from posthog.models.team.team import Team
from posthog.models.utils import namedtuplefetchall
from posthog.settings import CLICKHOUSE_CLUSTER, INSTANCE_TAG
-from posthog.utils import get_helm_info_env, get_instance_realm, get_instance_region, get_machine_id, get_previous_day
+from posthog.utils import (
+ get_helm_info_env,
+ get_instance_realm,
+ get_instance_region,
+ get_machine_id,
+ get_previous_day,
+)
logger = structlog.get_logger(__name__)
@@ -174,7 +180,10 @@ def get_instance_metadata(period: Tuple[datetime, datetime]) -> InstanceMetadata
metadata = InstanceMetadata(
deployment_infrastructure=os.getenv("DEPLOYMENT", "unknown"),
realm=realm,
- period={"start_inclusive": period_start.isoformat(), "end_inclusive": period_end.isoformat()},
+ period={
+ "start_inclusive": period_start.isoformat(),
+ "end_inclusive": period_end.isoformat(),
+ },
site_url=settings.SITE_URL,
product=get_product_name(realm, has_license),
# Non-cloud vars
@@ -197,7 +206,12 @@ def get_instance_metadata(period: Tuple[datetime, datetime]) -> InstanceMetadata
metadata.users_who_logged_in = [
{"id": user.id, "distinct_id": user.distinct_id}
if user.anonymize_data
- else {"id": user.id, "distinct_id": user.distinct_id, "first_name": user.first_name, "email": user.email}
+ else {
+ "id": user.id,
+ "distinct_id": user.distinct_id,
+ "first_name": user.first_name,
+ "email": user.email,
+ }
for user in User.objects.filter(is_active=True, last_login__gte=period_start, last_login__lte=period_end)
]
metadata.users_who_logged_in_count = len(metadata.users_who_logged_in)
@@ -205,8 +219,17 @@ def get_instance_metadata(period: Tuple[datetime, datetime]) -> InstanceMetadata
metadata.users_who_signed_up = [
{"id": user.id, "distinct_id": user.distinct_id}
if user.anonymize_data
- else {"id": user.id, "distinct_id": user.distinct_id, "first_name": user.first_name, "email": user.email}
- for user in User.objects.filter(is_active=True, date_joined__gte=period_start, date_joined__lte=period_end)
+ else {
+ "id": user.id,
+ "distinct_id": user.distinct_id,
+ "first_name": user.first_name,
+ "email": user.email,
+ }
+ for user in User.objects.filter(
+ is_active=True,
+ date_joined__gte=period_start,
+ date_joined__lte=period_end,
+ )
]
metadata.users_who_signed_up_count = len(metadata.users_who_signed_up)
@@ -243,7 +266,8 @@ def get_org_owner_or_first_user(organization_id: str) -> Optional[User]:
user = membership.user
else:
capture_exception(
- Exception("No user found for org while generating report"), {"org": {"organization_id": organization_id}}
+ Exception("No user found for org while generating report"),
+ {"org": {"organization_id": organization_id}},
)
return user
@@ -288,7 +312,12 @@ def send_report_to_billing_service(org_id: str, report: Dict[str, Any]) -> None:
logger.error(f"UsageReport failed sending to Billing for organization: {organization.id}: {err}")
capture_exception(err)
pha_client = Client("sTMFPsFhdP1Ssg")
- capture_event(pha_client, f"organization usage report to billing service failure", org_id, {"err": str(err)})
+ capture_event(
+ pha_client,
+ f"organization usage report to billing service failure",
+ org_id,
+ {"err": str(err)},
+ )
raise err
@@ -496,7 +525,12 @@ def get_teams_with_hogql_metric(
AND access_method = %(access_method)s
GROUP BY team_id
""",
- {"begin": begin, "end": end, "query_types": query_types, "access_method": access_method},
+ {
+ "begin": begin,
+ "end": end,
+ "query_types": query_types,
+ "access_method": access_method,
+ },
workload=Workload.OFFLINE,
settings=CH_BILLING_SETTINGS,
)
@@ -559,7 +593,10 @@ def get_teams_with_survey_responses_count_in_period(
@app.task(ignore_result=True, max_retries=0)
def capture_report(
- capture_event_name: str, org_id: str, full_report_dict: Dict[str, Any], at_date: Optional[datetime] = None
+ capture_event_name: str,
+ org_id: str,
+ full_report_dict: Dict[str, Any],
+ at_date: Optional[datetime] = None,
) -> None:
pha_client = Client("sTMFPsFhdP1Ssg")
try:
@@ -821,7 +858,10 @@ def _get_team_report(all_data: Dict[str, Any], team: Team) -> UsageReportCounter
def _add_team_report_to_org_reports(
- org_reports: Dict[str, OrgReport], team: Team, team_report: UsageReportCounters, period_start: datetime
+ org_reports: Dict[str, OrgReport],
+ team: Team,
+ team_report: UsageReportCounters,
+ period_start: datetime,
) -> None:
org_id = str(team.organization.id)
if org_id not in org_reports:
diff --git a/posthog/tasks/user_identify.py b/posthog/tasks/user_identify.py
index 9235410582eca..93dd0c851dbe8 100644
--- a/posthog/tasks/user_identify.py
+++ b/posthog/tasks/user_identify.py
@@ -6,6 +6,9 @@
@app.task(ignore_result=True)
def identify_task(user_id: int) -> None:
-
user = User.objects.get(id=user_id)
- posthoganalytics.capture(user.distinct_id, "update user properties", {"$set": user.get_analytics_metadata()})
+ posthoganalytics.capture(
+ user.distinct_id,
+ "update user properties",
+ {"$set": user.get_analytics_metadata()},
+ )
diff --git a/posthog/tasks/verify_persons_data_in_sync.py b/posthog/tasks/verify_persons_data_in_sync.py
index d5cf24d9ad220..8aea487d96279 100644
--- a/posthog/tasks/verify_persons_data_in_sync.py
+++ b/posthog/tasks/verify_persons_data_in_sync.py
@@ -53,7 +53,9 @@ def verify_persons_data_in_sync(
max_pk = Person.objects.filter(created_at__lte=now() - period_start).latest("id").id
person_data = list(
Person.objects.filter(
- pk__lte=max_pk, pk__gte=max_pk - LIMIT * 5, created_at__gte=now() - period_end
+ pk__lte=max_pk,
+ pk__gte=max_pk - LIMIT * 5,
+ created_at__gte=now() - period_end,
).values_list("id", "uuid", "team_id")[:limit]
)
person_data.sort(key=lambda row: row[2]) # keep persons from same team together
@@ -94,11 +96,15 @@ def _team_integrity_statistics(person_data: List[Any]) -> Counter:
)
ch_persons = _index_by(
- sync_execute(GET_PERSON_CH_QUERY, {"person_ids": person_uuids, "team_ids": team_ids}), lambda row: row[0]
+ sync_execute(GET_PERSON_CH_QUERY, {"person_ids": person_uuids, "team_ids": team_ids}),
+ lambda row: row[0],
)
ch_distinct_ids_mapping = _index_by(
- sync_execute(GET_DISTINCT_IDS_CH_QUERY, {"person_ids": person_uuids, "team_ids": team_ids}),
+ sync_execute(
+ GET_DISTINCT_IDS_CH_QUERY,
+ {"person_ids": person_uuids, "team_ids": team_ids},
+ ),
lambda row: row[1],
flat=False,
)
diff --git a/posthog/templates/head.html b/posthog/templates/head.html
index ed0d359faa014..7ca827ae15914 100644
--- a/posthog/templates/head.html
+++ b/posthog/templates/head.html
@@ -36,6 +36,11 @@
window.SENTRY_ENVIRONMENT = '{{ sentry_environment | escapejs }}';
{% endif %}
+{% if js_maplibre_style_url %}
+
+{% endif %}