diff --git a/.storybook/app-context.ts b/.storybook/app-context.ts index 6f52182cec58c..a85f06aa80e79 100644 --- a/.storybook/app-context.ts +++ b/.storybook/app-context.ts @@ -1,4 +1,4 @@ -import { MOCK_DEFAULT_TEAM } from 'lib/api.mock' +import { MOCK_DEFAULT_TEAM, MOCK_DEFAULT_PROJECT } from 'lib/api.mock' import { AppContext } from '~/types' export const getStorybookAppContext = (): AppContext => ({ @@ -6,6 +6,7 @@ export const getStorybookAppContext = (): AppContext => ({ // Ideally we wouldn't set `current_team` here, the same way we don't set `current_user`, but unfortunately // as of March 2024, a bunch of logics make the assumption that this is set, via `AppConfig` current_team: MOCK_DEFAULT_TEAM, + current_project: MOCK_DEFAULT_PROJECT, current_user: undefined as any, // `undefined` triggers a fetch and lets us mock the data default_event_name: '$pageview', persisted_feature_flags: [], diff --git a/ee/api/test/test_billing.py b/ee/api/test/test_billing.py index 6062516c4ccef..a053dd1ac9c1c 100644 --- a/ee/api/test/test_billing.py +++ b/ee/api/test/test_billing.py @@ -777,6 +777,7 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma # Create a demo project self.organization_membership.level = OrganizationMembership.Level.ADMIN self.organization_membership.save() + self.assertEqual(Team.objects.count(), 1) response = self.client.post("/api/projects/", {"name": "Test", "is_demo": True}) self.assertEqual(response.status_code, 201) self.assertEqual(Team.objects.count(), 3) diff --git a/ee/api/test/test_organization.py b/ee/api/test/test_organization.py index ca0d68413cf4c..ed47558d1efc4 100644 --- a/ee/api/test/test_organization.py +++ b/ee/api/test/test_organization.py @@ -28,7 +28,7 @@ def test_create_organization(self): OrganizationMembership.Level.OWNER, ) - @patch("secrets.choice", return_value="Y") + @patch("posthog.models.utils.generate_random_short_suffix", return_value="YYYY") def test_create_two_similarly_named_organizations(self, mock_choice): response = self.client.post( "/api/organizations/", diff --git a/ee/api/test/test_project.py b/ee/api/test/test_project.py new file mode 100644 index 0000000000000..5061bad71d32e --- /dev/null +++ b/ee/api/test/test_project.py @@ -0,0 +1,48 @@ +from ee.api.test.test_team import team_enterprise_api_test_factory +from posthog.api.test.test_team import EnvironmentToProjectRewriteClient +from posthog.models.organization import Organization, OrganizationMembership +from posthog.models.project import Project +from posthog.models.team.team import Team + + +class TestProjectEnterpriseAPI(team_enterprise_api_test_factory()): + """ + We inherit from TestTeamEnterpriseAPI, as previously /api/projects/ referred to the Team model, which used to mean "project". + Now as Team means "environment" and Project is separate, we must ensure backward compatibility of /api/projects/. + At the same time, this class is where we can continue adding `Project`-specific API tests. + """ + + client_class = EnvironmentToProjectRewriteClient + + def test_user_create_project_for_org_via_url(self): + # Set both current and new org to high enough membership level + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + + current_org, _, _ = Organization.objects.bootstrap(self.user, name="other_org") + other_org = self.organization # Bootstrapping above sets it to the current org + assert Team.objects.count() == 2 + assert Project.objects.count() == 2 + + assert current_org.id == self.user.current_organization_id + response = self.client.post(f"/api/organizations/{current_org.id}/projects/", {"name": "Via current org"}) + self.assertEqual(response.status_code, 201) + assert response.json()["organization"] == str(current_org.id) + assert Team.objects.count() == 3 + assert Project.objects.count() == 3 + + assert other_org.id != self.user.current_organization_id + response = self.client.post(f"/api/organizations/{other_org.id}/projects/", {"name": "Via path org"}) + self.assertEqual(response.status_code, 201, msg=response.json()) + assert response.json()["organization"] == str(other_org.id) + assert Team.objects.count() == 4 + assert Project.objects.count() == 4 + + def test_user_cannot_create_project_in_org_without_access(self): + _, _, _ = Organization.objects.bootstrap(self.user, name="other_org") + other_org = self.organization # Bootstrapping above sets it to the current org + + assert other_org.id != self.user.current_organization_id + response = self.client.post(f"/api/organizations/{other_org.id}/projects/", {"name": "Via path org"}) + self.assertEqual(response.status_code, 403, msg=response.json()) + assert response.json() == self.permission_denied_response("Your organization access level is insufficient.") diff --git a/ee/api/test/test_team.py b/ee/api/test/test_team.py index db9fb7efdbf37..d90f699fcd5c6 100644 --- a/ee/api/test/test_team.py +++ b/ee/api/test/test_team.py @@ -8,519 +8,503 @@ from ee.api.test.base import APILicensedTest from ee.models.explicit_team_membership import ExplicitTeamMembership from posthog.models.organization import Organization, OrganizationMembership +from posthog.models.project import Project from posthog.models.team import Team from posthog.models.user import User from posthog.test.base import FuzzyInt -class TestProjectEnterpriseAPI(APILicensedTest): - CLASS_DATA_LEVEL_SETUP = False - - # Creating projects - - def test_create_project(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - response = self.client.post("/api/projects/", {"name": "Test"}) - self.assertEqual(response.status_code, 201) - self.assertEqual(Team.objects.count(), 2) - response_data = response.json() - self.assertDictContainsSubset( - { - "name": "Test", - "access_control": False, - "effective_membership_level": OrganizationMembership.Level.ADMIN, - }, - response_data, - ) - self.assertEqual(self.organization.teams.count(), 2) - - def test_non_admin_cannot_create_project(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - count = Team.objects.count() - response = self.client.post("/api/projects/", {"name": "Test"}) - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertEqual(Team.objects.count(), count) - self.assertEqual( - response.json(), - self.permission_denied_response("Your organization access level is insufficient."), - ) - - def test_create_demo_project(self, *args): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - response = self.client.post("/api/projects/", {"name": "Hedgebox", "is_demo": True}) - self.assertEqual(Team.objects.count(), 3) - self.assertEqual(response.status_code, 201) - response_data = response.json() - self.assertDictContainsSubset( - { - "name": "Hedgebox", - "access_control": False, - "effective_membership_level": OrganizationMembership.Level.ADMIN, - }, - response_data, - ) - self.assertEqual(self.organization.teams.count(), 2) - - def test_create_two_demo_projects(self, *args): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - response = self.client.post("/api/projects/", {"name": "Hedgebox", "is_demo": True}) - self.assertEqual(Team.objects.count(), 3) - self.assertEqual(response.status_code, 201) - response_data = response.json() - self.assertDictContainsSubset( - { - "name": "Hedgebox", - "access_control": False, - "effective_membership_level": OrganizationMembership.Level.ADMIN, - }, - response_data, - ) - response_2 = self.client.post("/api/projects/", {"name": "Hedgebox", "is_demo": True}) - self.assertEqual(Team.objects.count(), 3) - response_2_data = response_2.json() - self.assertDictContainsSubset( - { - "type": "authentication_error", - "code": "permission_denied", - "detail": "You must upgrade your PostHog plan to be able to create and manage multiple projects.", - }, - response_2_data, - ) - self.assertEqual(self.organization.teams.count(), 2) - - def test_user_that_does_not_belong_to_an_org_cannot_create_a_project(self): - user = User.objects.create(email="no_org@posthog.com") - self.client.force_login(user) - - response = self.client.post("/api/projects/", {"name": "Test"}) - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND, response.content) - self.assertEqual( - response.json(), - { - "type": "invalid_request", - "code": "not_found", - "detail": "You need to belong to an organization.", - "attr": None, - }, - ) - - def test_user_create_project_for_org_via_url(self): - # Set both current and new org to high enough membership level - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - - current_org, _, _ = Organization.objects.bootstrap(self.user, name="other_org") - other_org = self.organization # Bootstrapping above sets it to the current org - - assert current_org.id == self.user.current_organization_id - response = self.client.post(f"/api/organizations/{current_org.id}/projects/", {"name": "Via current org"}) - self.assertEqual(response.status_code, 201) - assert response.json()["organization"] == str(current_org.id) - - assert other_org.id != self.user.current_organization_id - response = self.client.post(f"/api/organizations/{other_org.id}/projects/", {"name": "Via path org"}) - self.assertEqual(response.status_code, 201, msg=response.json()) - assert response.json()["organization"] == str(other_org.id) - - def test_user_cannot_create_project_in_org_without_access(self): - _, _, _ = Organization.objects.bootstrap(self.user, name="other_org") - other_org = self.organization # Bootstrapping above sets it to the current org - - assert other_org.id != self.user.current_organization_id - response = self.client.post(f"/api/organizations/{other_org.id}/projects/", {"name": "Via path org"}) - self.assertEqual(response.status_code, 403, msg=response.json()) - assert response.json() == self.permission_denied_response("Your organization access level is insufficient.") - - # Deleting projects - - def test_delete_team_as_org_admin_allowed(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - response = self.client.delete(f"/api/projects/{self.team.id}") - self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) - self.assertEqual(Team.objects.filter(organization=self.organization).count(), 0) - - def test_delete_team_as_org_member_forbidden(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - response = self.client.delete(f"/api/projects/{self.team.id}") - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1) - - def test_delete_open_team_as_org_member_but_project_admin_forbidden(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - ExplicitTeamMembership.objects.create( - team=self.team, - parent_membership=self.organization_membership, - level=ExplicitTeamMembership.Level.ADMIN, - ) - response = self.client.delete(f"/api/projects/{self.team.id}") - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1) - - def test_delete_private_team_as_org_member_but_project_admin_allowed(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - self.team.access_control = True - self.team.save() - ExplicitTeamMembership.objects.create( - team=self.team, - parent_membership=self.organization_membership, - level=ExplicitTeamMembership.Level.ADMIN, - ) - response = self.client.delete(f"/api/projects/{self.team.id}") - self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) - self.assertEqual(Team.objects.filter(organization=self.organization).count(), 0) - - def test_delete_second_team_as_org_admin_allowed(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - team = Team.objects.create(organization=self.organization) - response = self.client.delete(f"/api/projects/{team.id}") - self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) - self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1) - - def test_no_delete_team_not_administrating_organization(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - team = Team.objects.create(organization=self.organization) - response = self.client.delete(f"/api/projects/{team.id}") - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertEqual(Team.objects.filter(organization=self.organization).count(), 2) - - def test_no_delete_team_not_belonging_to_organization(self): - team_1 = Organization.objects.bootstrap(None)[2] - response = self.client.delete(f"/api/projects/{team_1.id}") - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) - self.assertTrue(Team.objects.filter(id=team_1.id).exists()) - organization, _, _ = User.objects.bootstrap("X", "someone@x.com", "qwerty", "Someone") - team_2 = Team.objects.create(organization=organization) - response = self.client.delete(f"/api/projects/{team_2.id}") - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) - self.assertEqual(Team.objects.filter(organization=organization).count(), 2) - - # Updating projects - - def test_rename_project_as_org_member_allowed(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - - response = self.client.patch(f"/api/projects/@current/", {"name": "Erinaceus europaeus"}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(self.team.name, "Erinaceus europaeus") - - def test_rename_private_project_as_org_member_forbidden(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - self.team.access_control = True - self.team.save() - - response = self.client.patch(f"/api/projects/@current/", {"name": "Acherontia atropos"}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertEqual(self.team.name, "Default project") - - def test_rename_private_project_current_as_org_outsider_forbidden(self): - self.organization_membership.delete() - - response = self.client.patch(f"/api/projects/@current/", {"name": "Acherontia atropos"}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) - - def test_rename_private_project_id_as_org_outsider_forbidden(self): - self.organization_membership.delete() - - response = self.client.patch(f"/api/projects/{self.team.id}/", {"name": "Acherontia atropos"}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) - - def test_rename_private_project_as_org_member_and_project_member_allowed(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - self.team.access_control = True - self.team.save() - ExplicitTeamMembership.objects.create( - team=self.team, - parent_membership=self.organization_membership, - level=ExplicitTeamMembership.Level.MEMBER, - ) - - response = self.client.patch(f"/api/projects/@current/", {"name": "Acherontia atropos"}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(self.team.name, "Acherontia atropos") - - def test_enable_access_control_as_org_member_forbidden(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - - response = self.client.patch(f"/api/projects/@current/", {"access_control": True}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertFalse(self.team.access_control) - - def test_enable_access_control_as_org_admin_allowed(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - - response = self.client.patch(f"/api/projects/@current/", {"access_control": True}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_200_OK) - self.assertTrue(self.team.access_control) - - def test_enable_access_control_as_org_member_and_project_admin_forbidden(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - ExplicitTeamMembership.objects.create( - team=self.team, - parent_membership=self.organization_membership, - level=ExplicitTeamMembership.Level.ADMIN, - ) - - response = self.client.patch(f"/api/projects/@current/", {"access_control": True}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertFalse(self.team.access_control) - - def test_disable_access_control_as_org_member_forbidden(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - self.team.access_control = True - self.team.save() - - response = self.client.patch(f"/api/projects/@current/", {"access_control": False}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertTrue(self.team.access_control) - - def test_disable_access_control_as_org_member_and_project_admin_forbidden(self): - # Only org-wide admins+ should be allowed to make the project open, - # because if a project-specific admin who is only an org member did it, they wouldn't be able to reenable it - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - self.team.access_control = True - self.team.save() - ExplicitTeamMembership.objects.create( - team=self.team, - parent_membership=self.organization_membership, - level=ExplicitTeamMembership.Level.ADMIN, - ) - - response = self.client.patch(f"/api/projects/@current/", {"access_control": False}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertTrue(self.team.access_control) - - def test_disable_access_control_as_org_admin_allowed(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - self.team.access_control = True - self.team.save() - - response = self.client.patch(f"/api/projects/@current/", {"access_control": False}) - self.team.refresh_from_db() - - self.assertEqual(response.status_code, HTTP_200_OK) - self.assertFalse(self.team.access_control) - - def test_can_update_and_retrieve_person_property_names_excluded_from_correlation(self): - response = self.client.patch( - f"/api/projects/@current/", - {"correlation_config": {"excluded_person_property_names": ["$os"]}}, - ) - self.assertEqual(response.status_code, HTTP_200_OK) - - response = self.client.get(f"/api/projects/@current/") - self.assertEqual(response.status_code, HTTP_200_OK) - - response_data = response.json() - - self.assertDictContainsSubset( - {"correlation_config": {"excluded_person_property_names": ["$os"]}}, - response_data, - ) - - # Fetching projects - - def test_fetch_team_as_org_admin_works(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - - response = self.client.get(f"/api/projects/@current/") - response_data = response.json() - - self.assertEqual(response.status_code, HTTP_200_OK) - self.assertDictContainsSubset( - { - "name": "Default project", - "access_control": False, - "effective_membership_level": OrganizationMembership.Level.ADMIN, - }, - response_data, - ) - - def test_fetch_team_as_org_member_works(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - - response = self.client.get(f"/api/projects/@current/") - response_data = response.json() - - self.assertEqual(response.status_code, HTTP_200_OK) - self.assertDictContainsSubset( - { - "name": "Default project", - "access_control": False, - "effective_membership_level": OrganizationMembership.Level.MEMBER, - }, - response_data, - ) - - def test_fetch_private_team_as_org_member(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - self.team.access_control = True - self.team.save() - - response = self.client.get(f"/api/projects/@current/") - response_data = response.json() - - self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertEqual( - self.permission_denied_response("You don't have sufficient permissions in the project."), - response_data, - ) - - def test_fetch_private_team_as_org_member_and_project_member(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - self.team.access_control = True - self.team.save() - ExplicitTeamMembership.objects.create( - team=self.team, - parent_membership=self.organization_membership, - level=ExplicitTeamMembership.Level.MEMBER, - ) - - response = self.client.get(f"/api/projects/@current/") - response_data = response.json() - - self.assertEqual(response.status_code, HTTP_200_OK) - self.assertDictContainsSubset( - { - "name": "Default project", - "access_control": True, - "effective_membership_level": OrganizationMembership.Level.MEMBER, - }, - response_data, - ) - - def test_fetch_private_team_as_org_member_and_project_admin(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - self.team.access_control = True - self.team.save() - ExplicitTeamMembership.objects.create( - team=self.team, - parent_membership=self.organization_membership, - level=ExplicitTeamMembership.Level.ADMIN, - ) - - response = self.client.get(f"/api/projects/@current/") - response_data = response.json() - - self.assertEqual(response.status_code, HTTP_200_OK) - self.assertDictContainsSubset( - { - "name": "Default project", - "access_control": True, - "effective_membership_level": OrganizationMembership.Level.ADMIN, - }, - response_data, - ) - - def test_fetch_team_as_org_outsider(self): - self.organization_membership.delete() - response = self.client.get(f"/api/projects/@current/") - response_data = response.json() - - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) - self.assertEqual(self.not_found_response(), response_data) - - def test_fetch_nonexistent_team(self): - response = self.client.get(f"/api/projects/234444/") - response_data = response.json() - - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) - self.assertEqual(self.not_found_response(), response_data) - - def test_list_teams_restricted_ones_hidden(self): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - Team.objects.create( - organization=self.organization, - name="Other", - access_control=True, - ) - - # The other team should not be returned as it's restricted for the logged-in user - projects_response = self.client.get(f"/api/projects/") - - # 9 (above): - with self.assertNumQueries(FuzzyInt(9, 10)): - current_org_response = self.client.get(f"/api/organizations/{self.organization.id}/") - - self.assertEqual(projects_response.status_code, HTTP_200_OK) - self.assertEqual( - projects_response.json().get("results"), - [ +def team_enterprise_api_test_factory(): # type: ignore + class TestTeamEnterpriseAPI(APILicensedTest): + CLASS_DATA_LEVEL_SETUP = False + + # Creating projects + + def test_create_team(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + self.assertEqual(Team.objects.count(), 1) + self.assertEqual(Project.objects.count(), 1) + response = self.client.post("/api/environments/", {"name": "Test"}) + self.assertEqual(response.status_code, 201) + self.assertEqual(Team.objects.count(), 2) + self.assertEqual(Project.objects.count(), 2) + response_data = response.json() + self.assertDictContainsSubset( + { + "name": "Test", + "access_control": False, + "effective_membership_level": OrganizationMembership.Level.ADMIN, + }, + response_data, + ) + self.assertEqual(self.organization.teams.count(), 2) + + def test_non_admin_cannot_create_team(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + count = Team.objects.count() + response = self.client.post("/api/environments/", {"name": "Test"}) + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertEqual(Team.objects.count(), count) + self.assertEqual( + response.json(), + self.permission_denied_response("Your organization access level is insufficient."), + ) + + def test_create_demo_team(self, *args): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + response = self.client.post("/api/environments/", {"name": "Hedgebox", "is_demo": True}) + self.assertEqual(Team.objects.count(), 3) + self.assertEqual(response.status_code, 201) + response_data = response.json() + self.assertDictContainsSubset( { - "id": self.team.id, - "uuid": str(self.team.uuid), - "organization": str(self.organization.id), - "api_token": self.team.api_token, - "name": self.team.name, - "completed_snippet_onboarding": False, - "has_completed_onboarding_for": {"product_analytics": True}, - "ingested_event": False, - "is_demo": False, - "timezone": "UTC", + "name": "Hedgebox", "access_control": False, - } - ], - ) - self.assertEqual(current_org_response.status_code, HTTP_200_OK) - self.assertEqual( - current_org_response.json().get("teams"), - [ + "effective_membership_level": OrganizationMembership.Level.ADMIN, + }, + response_data, + ) + self.assertEqual(self.organization.teams.count(), 2) + + def test_create_two_demo_teams(self, *args): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + response = self.client.post("/api/environments/", {"name": "Hedgebox", "is_demo": True}) + self.assertEqual(Team.objects.count(), 3) + self.assertEqual(response.status_code, 201) + response_data = response.json() + self.assertDictContainsSubset( { - "id": self.team.id, - "uuid": str(self.team.uuid), - "organization": str(self.organization.id), - "api_token": self.team.api_token, - "name": self.team.name, - "completed_snippet_onboarding": False, - "has_completed_onboarding_for": {"product_analytics": True}, - "ingested_event": False, - "is_demo": False, - "timezone": "UTC", + "name": "Hedgebox", "access_control": False, - } - ], - ) + "effective_membership_level": OrganizationMembership.Level.ADMIN, + }, + response_data, + ) + response_2 = self.client.post("/api/environments/", {"name": "Hedgebox", "is_demo": True}) + self.assertEqual(Team.objects.count(), 3) + response_2_data = response_2.json() + self.assertDictContainsSubset( + { + "type": "authentication_error", + "code": "permission_denied", + "detail": "You must upgrade your PostHog plan to be able to create and manage multiple projects or environments.", + }, + response_2_data, + ) + self.assertEqual(self.organization.teams.count(), 2) + + def test_user_that_does_not_belong_to_an_org_cannot_create_a_team(self): + user = User.objects.create(email="no_org@posthog.com") + self.client.force_login(user) + + response = self.client.post("/api/environments/", {"name": "Test"}) + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND, response.content) + self.assertEqual( + response.json(), + { + "type": "invalid_request", + "code": "not_found", + "detail": "You need to belong to an organization.", + "attr": None, + }, + ) + + # Deleting projects + + def test_delete_team_as_org_admin_allowed(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + response = self.client.delete(f"/api/environments/{self.team.id}") + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertEqual(Team.objects.filter(organization=self.organization).count(), 0) + + def test_delete_team_as_org_member_forbidden(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + response = self.client.delete(f"/api/environments/{self.team.id}") + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1) + + def test_delete_open_team_as_org_member_but_team_admin_forbidden(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + ExplicitTeamMembership.objects.create( + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, + ) + response = self.client.delete(f"/api/environments/{self.team.id}") + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1) + + def test_delete_private_team_as_org_member_but_team_admin_allowed(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + self.team.access_control = True + self.team.save() + ExplicitTeamMembership.objects.create( + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, + ) + response = self.client.delete(f"/api/environments/{self.team.id}") + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertEqual(Team.objects.filter(organization=self.organization).count(), 0) + + def test_delete_second_team_as_org_admin_allowed(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + team = Team.objects.create(organization=self.organization) + response = self.client.delete(f"/api/environments/{team.id}") + self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1) + + def test_no_delete_team_not_administrating_organization(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + team = Team.objects.create(organization=self.organization) + response = self.client.delete(f"/api/environments/{team.id}") + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertEqual(Team.objects.filter(organization=self.organization).count(), 2) + + def test_no_delete_team_not_belonging_to_organization(self): + team_1 = Organization.objects.bootstrap(None)[2] + response = self.client.delete(f"/api/environments/{team_1.id}") + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + self.assertTrue(Team.objects.filter(id=team_1.id).exists()) + organization, _, _ = User.objects.bootstrap("X", "someone@x.com", "qwerty", "Someone") + team_2 = Team.objects.create(organization=organization) + response = self.client.delete(f"/api/environments/{team_2.id}") + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + self.assertEqual(Team.objects.filter(organization=organization).count(), 2) + + # Updating projects + + def test_rename_team_as_org_member_allowed(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + + response = self.client.patch(f"/api/environments/@current/", {"name": "Erinaceus europaeus"}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(self.team.name, "Erinaceus europaeus") + + def test_rename_private_team_as_org_member_forbidden(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + self.team.access_control = True + self.team.save() + + response = self.client.patch(f"/api/environments/@current/", {"name": "Acherontia atropos"}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertEqual(self.team.name, "Default project") + + def test_rename_private_team_current_as_org_outsider_forbidden(self): + self.organization_membership.delete() + + response = self.client.patch(f"/api/environments/@current/", {"name": "Acherontia atropos"}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_rename_private_team_id_as_org_outsider_forbidden(self): + self.organization_membership.delete() + + response = self.client.patch(f"/api/environments/{self.team.id}/", {"name": "Acherontia atropos"}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + def test_rename_private_team_as_org_member_and_team_member_allowed(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + self.team.access_control = True + self.team.save() + ExplicitTeamMembership.objects.create( + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.MEMBER, + ) + + response = self.client.patch(f"/api/environments/@current/", {"name": "Acherontia atropos"}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(self.team.name, "Acherontia atropos") + + def test_enable_access_control_as_org_member_forbidden(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + + response = self.client.patch(f"/api/environments/@current/", {"access_control": True}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertFalse(self.team.access_control) + + def test_enable_access_control_as_org_admin_allowed(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + + response = self.client.patch(f"/api/environments/@current/", {"access_control": True}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertTrue(self.team.access_control) + + def test_enable_access_control_as_org_member_and_team_admin_forbidden(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + ExplicitTeamMembership.objects.create( + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, + ) + + response = self.client.patch(f"/api/environments/@current/", {"access_control": True}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertFalse(self.team.access_control) + + def test_disable_access_control_as_org_member_forbidden(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + self.team.access_control = True + self.team.save() + + response = self.client.patch(f"/api/environments/@current/", {"access_control": False}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertTrue(self.team.access_control) + + def test_disable_access_control_as_org_member_and_team_admin_forbidden(self): + # Only org-wide admins+ should be allowed to make the project open, + # because if a project-specific admin who is only an org member did it, they wouldn't be able to reenable it + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + self.team.access_control = True + self.team.save() + ExplicitTeamMembership.objects.create( + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, + ) + + response = self.client.patch(f"/api/environments/@current/", {"access_control": False}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertTrue(self.team.access_control) + + def test_disable_access_control_as_org_admin_allowed(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + self.team.access_control = True + self.team.save() + + response = self.client.patch(f"/api/environments/@current/", {"access_control": False}) + self.team.refresh_from_db() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertFalse(self.team.access_control) + + def test_can_update_and_retrieve_person_property_names_excluded_from_correlation(self): + response = self.client.patch( + f"/api/environments/@current/", + {"correlation_config": {"excluded_person_property_names": ["$os"]}}, + ) + self.assertEqual(response.status_code, HTTP_200_OK) + + response = self.client.get(f"/api/environments/@current/") + self.assertEqual(response.status_code, HTTP_200_OK) + + response_data = response.json() + + self.assertDictContainsSubset( + {"correlation_config": {"excluded_person_property_names": ["$os"]}}, + response_data, + ) + + # Fetching projects + + def test_fetch_team_as_org_admin_works(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + + response = self.client.get(f"/api/environments/@current/") + response_data = response.json() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertDictContainsSubset( + { + "name": "Default project", + "access_control": False, + "effective_membership_level": OrganizationMembership.Level.ADMIN, + }, + response_data, + ) + + def test_fetch_team_as_org_member_works(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + + response = self.client.get(f"/api/environments/@current/") + response_data = response.json() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertDictContainsSubset( + { + "name": "Default project", + "access_control": False, + "effective_membership_level": OrganizationMembership.Level.MEMBER, + }, + response_data, + ) + + def test_fetch_private_team_as_org_member(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + self.team.access_control = True + self.team.save() + + response = self.client.get(f"/api/environments/@current/") + response_data = response.json() + + self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + self.assertEqual( + self.permission_denied_response("You don't have sufficient permissions in the project."), + response_data, + ) + + def test_fetch_private_team_as_org_member_and_team_member(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + self.team.access_control = True + self.team.save() + ExplicitTeamMembership.objects.create( + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.MEMBER, + ) + + response = self.client.get(f"/api/environments/@current/") + response_data = response.json() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertDictContainsSubset( + { + "name": "Default project", + "access_control": True, + "effective_membership_level": OrganizationMembership.Level.MEMBER, + }, + response_data, + ) + + def test_fetch_private_team_as_org_member_and_team_admin(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + self.team.access_control = True + self.team.save() + ExplicitTeamMembership.objects.create( + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, + ) + + response = self.client.get(f"/api/environments/@current/") + response_data = response.json() + + self.assertEqual(response.status_code, HTTP_200_OK) + self.assertDictContainsSubset( + { + "name": "Default project", + "access_control": True, + "effective_membership_level": OrganizationMembership.Level.ADMIN, + }, + response_data, + ) + + def test_fetch_team_as_org_outsider(self): + self.organization_membership.delete() + response = self.client.get(f"/api/environments/@current/") + response_data = response.json() + + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + self.assertEqual(self.not_found_response(), response_data) + + def test_fetch_nonexistent_team(self): + response = self.client.get(f"/api/environments/234444/") + response_data = response.json() + + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + self.assertEqual(self.not_found_response(), response_data) + + def test_list_teams_restricted_ones_hidden(self): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + Team.objects.create( + organization=self.organization, + name="Other", + access_control=True, + ) + + # The other team should not be returned as it's restricted for the logged-in user + projects_response = self.client.get(f"/api/environments/") + + # 9 (above): + with self.assertNumQueries(FuzzyInt(9, 10)): + current_org_response = self.client.get(f"/api/organizations/{self.organization.id}/") + + self.assertEqual(projects_response.status_code, HTTP_200_OK) + self.assertEqual( + projects_response.json().get("results"), + [ + { + "id": self.team.id, + "uuid": str(self.team.uuid), + "organization": str(self.organization.id), + "api_token": self.team.api_token, + "name": self.team.name, + "completed_snippet_onboarding": False, + "has_completed_onboarding_for": {"product_analytics": True}, + "ingested_event": False, + "is_demo": False, + "timezone": "UTC", + "access_control": False, + } + ], + ) + self.assertEqual(current_org_response.status_code, HTTP_200_OK) + self.assertEqual( + current_org_response.json().get("teams"), + [ + { + "id": self.team.id, + "uuid": str(self.team.uuid), + "organization": str(self.organization.id), + "api_token": self.team.api_token, + "name": self.team.name, + "completed_snippet_onboarding": False, + "has_completed_onboarding_for": {"product_analytics": True}, + "ingested_event": False, + "is_demo": False, + "timezone": "UTC", + "access_control": False, + } + ], + ) + + return TestTeamEnterpriseAPI + + +class TestTeamEnterpriseAPI(team_enterprise_api_test_factory()): + pass diff --git a/ee/clickhouse/views/experiments.py b/ee/clickhouse/views/experiments.py index ffdbfcc16e428..7aed519d29ee6 100644 --- a/ee/clickhouse/views/experiments.py +++ b/ee/clickhouse/views/experiments.py @@ -333,7 +333,7 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg return super().update(instance, validated_data) -class ClickhouseExperimentsViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): +class EnterpriseExperimentsViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): scope_object = "experiment" serializer_class = ExperimentSerializer queryset = Experiment.objects.prefetch_related("feature_flag", "created_by").all() diff --git a/ee/clickhouse/views/groups.py b/ee/clickhouse/views/groups.py index 3c20275b2de7b..bfbb375e70990 100644 --- a/ee/clickhouse/views/groups.py +++ b/ee/clickhouse/views/groups.py @@ -25,7 +25,7 @@ class Meta: read_only_fields = ["group_type", "group_type_index"] -class ClickhouseGroupsTypesView(TeamAndOrgViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet): +class GroupsTypesViewSet(TeamAndOrgViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet): scope_object = "group" serializer_class = GroupTypeSerializer queryset = GroupTypeMapping.objects.all().order_by("group_type_index") @@ -54,7 +54,7 @@ class Meta: fields = ["group_type_index", "group_key", "group_properties", "created_at"] -class ClickhouseGroupsView(TeamAndOrgViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet): +class GroupsViewSet(TeamAndOrgViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet): scope_object = "group" serializer_class = GroupSerializer queryset = Group.objects.all() diff --git a/ee/clickhouse/views/insights.py b/ee/clickhouse/views/insights.py index 6072ab2957bb1..933928428ab50 100644 --- a/ee/clickhouse/views/insights.py +++ b/ee/clickhouse/views/insights.py @@ -26,7 +26,7 @@ def has_object_permission(self, request: Request, view, insight: Insight) -> boo return view.user_permissions.insight(insight).effective_privilege_level == Dashboard.PrivilegeLevel.CAN_EDIT -class ClickhouseInsightsViewSet(InsightViewSet): +class EnterpriseInsightsViewSet(InsightViewSet): permission_classes = [CanEditInsight] retention_query_class = ClickhouseRetention stickiness_query_class = ClickhouseStickiness diff --git a/ee/urls.py b/ee/urls.py index 68ede5f20126c..633766add1439 100644 --- a/ee/urls.py +++ b/ee/urls.py @@ -4,10 +4,8 @@ from django.contrib import admin from django.urls import include from django.urls.conf import path -from rest_framework_extensions.routers import NestedRegistryItem from ee.api import integration -from posthog.api.routing import DefaultRouterPlusPlus from .api import ( authentication, @@ -25,14 +23,16 @@ from .session_recordings import session_recording_playlist -def extend_api_router( - root_router: DefaultRouterPlusPlus, - *, - projects_router: NestedRegistryItem, - organizations_router: NestedRegistryItem, - project_dashboards_router: NestedRegistryItem, - project_feature_flags_router: NestedRegistryItem, -) -> None: +def extend_api_router() -> None: + from posthog.api import ( + router as root_router, + register_grandfathered_environment_nested_viewset, + projects_router, + organizations_router, + project_feature_flags_router, + project_dashboards_router, + ) + root_router.register(r"billing", billing.BillingViewset, "billing") root_router.register(r"license", license.LicenseViewSet) root_router.register(r"integrations", integration.PublicIntegrationViewSet) @@ -60,8 +60,8 @@ def extend_api_router( "organization_resource_access", ["organization_id"], ) - projects_router.register(r"hooks", hooks.HookViewSet, "environment_hooks", ["team_id"]) - projects_router.register( + register_grandfathered_environment_nested_viewset(r"hooks", hooks.HookViewSet, "environment_hooks", ["team_id"]) + register_grandfathered_environment_nested_viewset( r"explicit_members", explicit_team_member.ExplicitTeamMemberViewSet, "environment_explicit_members", @@ -74,7 +74,7 @@ def extend_api_router( ["project_id", "dashboard_id"], ) - projects_router.register( + register_grandfathered_environment_nested_viewset( r"subscriptions", subscription.SubscriptionViewSet, "environment_subscriptions", ["team_id"] ) projects_router.register( diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark.png index 667f5c839eea3..24cfa930cc9e1 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark.png differ diff --git a/frontend/src/lib/api.mock.ts b/frontend/src/lib/api.mock.ts index c6ca0a28c2cda..e10d348595cdd 100644 --- a/frontend/src/lib/api.mock.ts +++ b/frontend/src/lib/api.mock.ts @@ -12,6 +12,7 @@ import { PluginConfigWithPluginInfo, PluginInstallationType, PluginType, + ProjectType, PropertyFilterType, PropertyOperator, TeamType, @@ -90,6 +91,13 @@ export const MOCK_DEFAULT_TEAM: TeamType = { live_events_token: '123', } +export const MOCK_DEFAULT_PROJECT: ProjectType = { + id: MOCK_TEAM_ID, + name: 'MockHog App + Marketing', + organization_id: MOCK_ORGANIZATION_ID, + created_at: '2020-06-30T09:53:35.932534Z', +} + export const MOCK_DEFAULT_ORGANIZATION: OrganizationType = { customer_id: null, id: MOCK_ORGANIZATION_ID, diff --git a/frontend/src/types.ts b/frontend/src/types.ts index f5c24f30f0508..1000b66e54f06 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -469,6 +469,12 @@ export interface SessionRecordingAIConfig { important_user_properties: string[] } +export interface ProjectType { + id: number + name: string + organization_id: string + created_at: string +} export interface TeamType extends TeamBasicType { created_at: string updated_at: string @@ -3273,6 +3279,7 @@ export type EventOrPropType = EventDefinition & PropertyDefinition export interface AppContext { current_user: UserType | null + current_project: ProjectType | null current_team: TeamType | TeamPublicType | null preflight: PreflightStatus default_event_name: string diff --git a/posthog/api/__init__.py b/posthog/api/__init__.py index c6fd6851da9c7..3419e09b50209 100644 --- a/posthog/api/__init__.py +++ b/posthog/api/__init__.py @@ -1,5 +1,8 @@ -from rest_framework import decorators, exceptions +from rest_framework import decorators, exceptions, viewsets +from rest_framework_extensions.routers import NestedRegistryItem + +from posthog.api import project from posthog.api.routing import DefaultRouterPlusPlus from posthog.batch_exports import http as batch_exports from posthog.settings import EE_AVAILABLE @@ -81,35 +84,57 @@ def api_not_found(request): router.register(r"feature_flag", feature_flag.LegacyFeatureFlagViewSet) # Used for library side feature flag evaluation # Nested endpoints shared -projects_router = router.register(r"projects", team.RootTeamViewSet, "projects") -project_plugins_configs_router = projects_router.register( +projects_router = router.register(r"projects", project.RootProjectViewSet, "projects") +environments_router = router.register(r"environments", team.RootTeamViewSet, "environments") + + +def register_grandfathered_environment_nested_viewset( + prefix: str, viewset: type[viewsets.GenericViewSet], basename: str, parents_query_lookups: list[str] +) -> tuple[NestedRegistryItem, NestedRegistryItem]: + """ + Register the environment-specific viewset under both /environments/:team_id/ (correct endpoint) + and /projects/:team_id/ (legacy, but supported for backward compatibility endpoint). + DO NOT USE ON ANY NEW ENDPOINT YOU'RE ADDING! + """ + if parents_query_lookups[0] != "team_id": + raise ValueError("Only endpoints with team_id as the first parent query lookup can be environment-nested") + if not basename.startswith("environment_"): + raise ValueError("Only endpoints with a basename starting with `environment_` can be environment-nested") + environment_nested = environments_router.register(prefix, viewset, basename, parents_query_lookups) + legacy_project_nested = projects_router.register( + prefix, viewset, basename.replace("environment_", "project_"), parents_query_lookups + ) + return environment_nested, legacy_project_nested + + +register_grandfathered_environment_nested_viewset( r"plugin_configs", plugin.PluginConfigViewSet, "environment_plugin_configs", ["team_id"] ) -project_plugins_configs_router.register( +register_grandfathered_environment_nested_viewset( r"logs", plugin_log_entry.PluginLogEntryViewSet, "environment_plugin_config_logs", ["team_id", "plugin_config_id"], ) -projects_router.register( +register_grandfathered_environment_nested_viewset( r"pipeline_transformation_configs", plugin.PipelineTransformationsConfigsViewSet, "environment_pipeline_transformation_configs", ["team_id"], ) -projects_router.register( +register_grandfathered_environment_nested_viewset( r"pipeline_destination_configs", plugin.PipelineDestinationsConfigsViewSet, "environment_pipeline_destination_configs", ["team_id"], ) -projects_router.register( +register_grandfathered_environment_nested_viewset( r"pipeline_frontend_apps_configs", plugin.PipelineFrontendAppsConfigsViewSet, "environment_pipeline_frontend_apps_configs", ["team_id"], ) -projects_router.register( +register_grandfathered_environment_nested_viewset( r"pipeline_import_apps_configs", plugin.PipelineImportAppsConfigsViewSet, "environment_pipeline_import_apps_configs", @@ -147,9 +172,13 @@ def api_not_found(request): r"dashboards", dashboard.DashboardsViewSet, "project_dashboards", ["project_id"] ) -projects_router.register(r"exports", exports.ExportedAssetViewSet, "environment_exports", ["team_id"]) -projects_router.register(r"integrations", integration.IntegrationViewSet, "environment_integrations", ["team_id"]) -projects_router.register( +register_grandfathered_environment_nested_viewset( + r"exports", exports.ExportedAssetViewSet, "environment_exports", ["team_id"] +) +register_grandfathered_environment_nested_viewset( + r"integrations", integration.IntegrationViewSet, "environment_integrations", ["team_id"] +) +register_grandfathered_environment_nested_viewset( r"ingestion_warnings", ingestion_warnings.IngestionWarningsViewSet, "environment_ingestion_warnings", @@ -170,37 +199,50 @@ def api_not_found(request): ["project_id"], ) -app_metrics_router = projects_router.register( +environment_app_metrics_router, legacy_project_app_metrics_router = register_grandfathered_environment_nested_viewset( r"app_metrics", app_metrics.AppMetricsViewSet, "environment_app_metrics", ["team_id"] ) -app_metrics_router.register( +environment_app_metrics_router.register( r"historical_exports", app_metrics.HistoricalExportsAppMetricsViewSet, "environment_app_metrics_historical_exports", ["team_id", "plugin_config_id"], ) +legacy_project_app_metrics_router.register( + r"historical_exports", + app_metrics.HistoricalExportsAppMetricsViewSet, + "project_app_metrics_historical_exports", + ["team_id", "plugin_config_id"], +) -batch_exports_router = projects_router.register( - r"batch_exports", batch_exports.BatchExportViewSet, "environment_batch_exports", ["team_id"] +environment_batch_exports_router, legacy_project_batch_exports_router = ( + register_grandfathered_environment_nested_viewset( + r"batch_exports", batch_exports.BatchExportViewSet, "environment_batch_exports", ["team_id"] + ) ) -batch_export_runs_router = batch_exports_router.register( +environment_batch_exports_router.register( r"runs", batch_exports.BatchExportRunViewSet, "environment_batch_export_runs", ["team_id", "batch_export_id"] ) +legacy_project_batch_exports_router.register( + r"runs", batch_exports.BatchExportRunViewSet, "project_batch_export_runs", ["team_id", "batch_export_id"] +) -projects_router.register(r"warehouse_tables", table.TableViewSet, "environment_warehouse_tables", ["team_id"]) -projects_router.register( +register_grandfathered_environment_nested_viewset( + r"warehouse_tables", table.TableViewSet, "environment_warehouse_tables", ["team_id"] +) +register_grandfathered_environment_nested_viewset( r"warehouse_saved_queries", saved_query.DataWarehouseSavedQueryViewSet, "environment_warehouse_saved_queries", ["team_id"], ) -projects_router.register( +register_grandfathered_environment_nested_viewset( r"warehouse_view_links", view_link.ViewLinkViewSet, "environment_warehouse_view_links", ["team_id"], ) -projects_router.register( +register_grandfathered_environment_nested_viewset( r"warehouse_view_link", view_link.ViewLinkViewSet, "environment_warehouse_view_link", ["team_id"] ) @@ -220,10 +262,10 @@ def api_not_found(request): projects_router.register(r"uploaded_media", uploaded_media.MediaViewSet, "project_media", ["project_id"]) projects_router.register(r"tags", tagged_item.TaggedItemViewSet, "project_tags", ["project_id"]) -projects_router.register(r"query", query.QueryViewSet, "environment_query", ["team_id"]) +register_grandfathered_environment_nested_viewset(r"query", query.QueryViewSet, "environment_query", ["team_id"]) # External data resources -projects_router.register( +register_grandfathered_environment_nested_viewset( r"external_data_sources", external_data_source.ExternalDataSourceViewSet, "environment_external_data_sources", @@ -243,16 +285,16 @@ def api_not_found(request): ) -projects_router.register( +register_grandfathered_environment_nested_viewset( r"external_data_schemas", external_data_schema.ExternalDataSchemaViewset, - "project_external_data_schemas", + "environment_external_data_schemas", ["team_id"], ) # Organizations nested endpoints organizations_router = router.register(r"organizations", organization.OrganizationViewSet, "organizations") -organizations_router.register(r"projects", team.TeamViewSet, "projects", ["organization_id"]) +organizations_router.register(r"projects", project.ProjectViewSet, "organization_projects", ["organization_id"]) organizations_router.register( r"batch_exports", batch_exports.BatchExportOrganizationViewSet, "batch_exports", ["organization_id"] ) @@ -316,10 +358,10 @@ def api_not_found(request): # General endpoints (shared across CH & PG) router.register(r"login", authentication.LoginViewSet, "login") -router.register(r"login/token", authentication.TwoFactorViewSet) -router.register(r"login/precheck", authentication.LoginPrecheckViewSet) +router.register(r"login/token", authentication.TwoFactorViewSet, "login_token") +router.register(r"login/precheck", authentication.LoginPrecheckViewSet, "login_precheck") router.register(r"reset", authentication.PasswordResetViewSet, "password_reset") -router.register(r"users", user.UserViewSet) +router.register(r"users", user.UserViewSet, "users") router.register(r"personal_api_keys", personal_api_key.PersonalAPIKeyViewSet, "personal_api_keys") router.register(r"instance_status", instance_status.InstanceStatusViewSet, "instance_status") router.register(r"dead_letter_queue", dead_letter_queue.DeadLetterQueueViewSet, "dead_letter_queue") @@ -344,44 +386,46 @@ def api_not_found(request): router.register(r"event", LegacyEventViewSet, basename="event") # Nested endpoints CH -projects_router.register(r"events", EventViewSet, "environment_events", ["team_id"]) +register_grandfathered_environment_nested_viewset(r"events", EventViewSet, "environment_events", ["team_id"]) projects_router.register(r"actions", ActionViewSet, "project_actions", ["project_id"]) projects_router.register(r"cohorts", CohortViewSet, "project_cohorts", ["project_id"]) -projects_router.register(r"persons", PersonViewSet, "environment_persons", ["team_id"]) -projects_router.register(r"elements", ElementViewSet, "environment_elements", ["team_id"]) # TODO: Can be removed? -project_session_recordings_router = projects_router.register( - r"session_recordings", - SessionRecordingViewSet, - "environment_session_recordings", - ["team_id"], +register_grandfathered_environment_nested_viewset( + r"elements", + ElementViewSet, + "environment_elements", + ["team_id"], # TODO: Can be removed? +) +environment_sessions_recordings_router, legacy_project_session_recordings_router = ( + register_grandfathered_environment_nested_viewset( + r"session_recordings", + SessionRecordingViewSet, + "environment_session_recordings", + ["team_id"], + ) ) -projects_router.register(r"heatmaps", HeatmapViewSet, "environment_heatmaps", ["team_id"]) -projects_router.register(r"sessions", SessionViewSet, "environment_sessions", ["team_id"]) +register_grandfathered_environment_nested_viewset(r"heatmaps", HeatmapViewSet, "environment_heatmaps", ["team_id"]) +register_grandfathered_environment_nested_viewset(r"sessions", SessionViewSet, "environment_sessions", ["team_id"]) if EE_AVAILABLE: - from ee.clickhouse.views.experiments import ClickhouseExperimentsViewSet - from ee.clickhouse.views.groups import ( - ClickhouseGroupsTypesView, - ClickhouseGroupsView, - ) - from ee.clickhouse.views.insights import ClickhouseInsightsViewSet - from ee.clickhouse.views.person import ( - EnterprisePersonViewSet, - LegacyEnterprisePersonViewSet, - ) - - projects_router.register(r"experiments", ClickhouseExperimentsViewSet, "project_experiments", ["project_id"]) - projects_router.register(r"groups", ClickhouseGroupsView, "environment_groups", ["team_id"]) - projects_router.register(r"groups_types", ClickhouseGroupsTypesView, "project_groups_types", ["project_id"]) + from ee.clickhouse.views.experiments import EnterpriseExperimentsViewSet + from ee.clickhouse.views.groups import GroupsTypesViewSet, GroupsViewSet + from ee.clickhouse.views.insights import EnterpriseInsightsViewSet + from ee.clickhouse.views.person import EnterprisePersonViewSet, LegacyEnterprisePersonViewSet + + projects_router.register(r"experiments", EnterpriseExperimentsViewSet, "project_experiments", ["project_id"]) + register_grandfathered_environment_nested_viewset(r"groups", GroupsViewSet, "environment_groups", ["team_id"]) + projects_router.register(r"groups_types", GroupsTypesViewSet, "project_groups_types", ["project_id"]) project_insights_router = projects_router.register( - r"insights", ClickhouseInsightsViewSet, "project_insights", ["project_id"] + r"insights", EnterpriseInsightsViewSet, "project_insights", ["project_id"] + ) + register_grandfathered_environment_nested_viewset( + r"persons", EnterprisePersonViewSet, "environment_persons", ["team_id"] ) - projects_router.register(r"persons", EnterprisePersonViewSet, "environment_persons", ["team_id"]) - router.register(r"person", LegacyEnterprisePersonViewSet, basename="person") + router.register(r"person", LegacyEnterprisePersonViewSet, "persons") else: project_insights_router = projects_router.register(r"insights", InsightViewSet, "project_insights", ["project_id"]) - projects_router.register(r"persons", PersonViewSet, "environment_persons", ["team_id"]) - router.register(r"person", LegacyPersonViewSet, basename="person") + register_grandfathered_environment_nested_viewset(r"persons", PersonViewSet, "environment_persons", ["team_id"]) + router.register(r"person", LegacyPersonViewSet, "persons") project_dashboards_router.register( @@ -412,12 +456,18 @@ def api_not_found(request): ["team_id", "insight_id"], ) -project_session_recordings_router.register( +environment_sessions_recordings_router.register( r"sharing", sharing.SharingConfigurationViewSet, "environment_recording_sharing", ["team_id", "recording_id"], ) +legacy_project_session_recordings_router.register( + r"sharing", + sharing.SharingConfigurationViewSet, + "project_recording_sharing", + ["team_id", "recording_id"], +) projects_router.register( r"notebooks", @@ -440,7 +490,7 @@ def api_not_found(request): ["project_id"], ) -projects_router.register( +register_grandfathered_environment_nested_viewset( r"hog_functions", hog_function.HogFunctionViewSet, "environment_hog_functions", @@ -454,7 +504,7 @@ def api_not_found(request): ["project_id"], ) -projects_router.register( +register_grandfathered_environment_nested_viewset( r"alerts", alert.AlertViewSet, "environment_alerts", diff --git a/posthog/api/project.py b/posthog/api/project.py new file mode 100644 index 0000000000000..d2e3e341228b0 --- /dev/null +++ b/posthog/api/project.py @@ -0,0 +1,508 @@ +from datetime import timedelta +from functools import cached_property +from typing import Any, Optional, cast + +from django.shortcuts import get_object_or_404 +from loginas.utils import is_impersonated_session +from rest_framework import exceptions, request, response, serializers, viewsets +from rest_framework.decorators import action +from rest_framework.permissions import IsAuthenticated + +from posthog.api.geoip import get_geoip_properties +from posthog.api.routing import TeamAndOrgViewSetMixin +from posthog.api.shared import ProjectBasicSerializer +from posthog.api.team import PremiumMultiProjectPermissions, TeamSerializer, validate_team_attrs +from posthog.event_usage import report_user_action +from posthog.jwt import PosthogJwtAudience, encode_jwt +from posthog.models import User +from posthog.models.activity_logging.activity_log import ( + Detail, + dict_changes_between, + load_activity, + log_activity, +) +from posthog.models.activity_logging.activity_page import activity_page_response +from posthog.models.async_deletion import AsyncDeletion, DeletionType +from posthog.models.group_type_mapping import GroupTypeMapping +from posthog.models.organization import OrganizationMembership +from posthog.models.personal_api_key import APIScopeObjectOrNotSupported +from posthog.models.project import Project +from posthog.models.signals import mute_selected_signals +from posthog.models.team.team import Team +from posthog.models.team.util import delete_batch_exports, delete_bulky_postgres_data +from posthog.models.utils import UUIDT +from posthog.permissions import ( + APIScopePermission, + OrganizationAdminWritePermissions, + OrganizationMemberPermissions, + TeamMemberLightManagementPermission, + TeamMemberStrictManagementPermission, +) +from posthog.user_permissions import UserPermissions, UserPermissionsSerializerMixin +from posthog.utils import get_ip_address, get_week_start_for_country_code + + +class ProjectSerializer(ProjectBasicSerializer, UserPermissionsSerializerMixin): + effective_membership_level = serializers.SerializerMethodField() # Compat with TeamSerializer + has_group_types = serializers.SerializerMethodField() # Compat with TeamSerializer + live_events_token = serializers.SerializerMethodField() # Compat with TeamSerializer + + class Meta: + model = Project + fields = ( + "id", + "organization", + "name", + "created_at", + "effective_membership_level", # Compat with TeamSerializer + "has_group_types", # Compat with TeamSerializer + "live_events_token", # Compat with TeamSerializer + "updated_at", + "uuid", # Compat with TeamSerializer + "api_token", # Compat with TeamSerializer + "app_urls", # Compat with TeamSerializer + "slack_incoming_webhook", # Compat with TeamSerializer + "anonymize_ips", # Compat with TeamSerializer + "completed_snippet_onboarding", # Compat with TeamSerializer + "ingested_event", # Compat with TeamSerializer + "test_account_filters", # Compat with TeamSerializer + "test_account_filters_default_checked", # Compat with TeamSerializer + "path_cleaning_filters", # Compat with TeamSerializer + "is_demo", # Compat with TeamSerializer + "timezone", # Compat with TeamSerializer + "data_attributes", # Compat with TeamSerializer + "person_display_name_properties", # Compat with TeamSerializer + "correlation_config", # Compat with TeamSerializer + "autocapture_opt_out", # Compat with TeamSerializer + "autocapture_exceptions_opt_in", # Compat with TeamSerializer + "autocapture_web_vitals_opt_in", # Compat with TeamSerializer + "autocapture_web_vitals_allowed_metrics", # Compat with TeamSerializer + "autocapture_exceptions_errors_to_ignore", # Compat with TeamSerializer + "capture_console_log_opt_in", # Compat with TeamSerializer + "capture_performance_opt_in", # Compat with TeamSerializer + "session_recording_opt_in", # Compat with TeamSerializer + "session_recording_sample_rate", # Compat with TeamSerializer + "session_recording_minimum_duration_milliseconds", # Compat with TeamSerializer + "session_recording_linked_flag", # Compat with TeamSerializer + "session_recording_network_payload_capture_config", # Compat with TeamSerializer + "session_replay_config", # Compat with TeamSerializer + "access_control", # Compat with TeamSerializer + "week_start_day", # Compat with TeamSerializer + "primary_dashboard", # Compat with TeamSerializer + "live_events_columns", # Compat with TeamSerializer + "recording_domains", # Compat with TeamSerializer + "person_on_events_querying_enabled", # Compat with TeamSerializer + "inject_web_apps", # Compat with TeamSerializer + "extra_settings", # Compat with TeamSerializer + "modifiers", # Compat with TeamSerializer + "default_modifiers", # Compat with TeamSerializer + "has_completed_onboarding_for", # Compat with TeamSerializer + "surveys_opt_in", # Compat with TeamSerializer + "heatmaps_opt_in", # Compat with TeamSerializer + ) + read_only_fields = ( + "id", + "uuid", + "organization", + "effective_membership_level", + "has_group_types", + "live_events_token", + "created_at", + "api_token", + "updated_at", + "ingested_event", + "default_modifiers", + "person_on_events_querying_enabled", + ) + + team_passthrough_fields = { + "updated_at", + "uuid", + "api_token", + "app_urls", + "slack_incoming_webhook", + "anonymize_ips", + "completed_snippet_onboarding", + "ingested_event", + "test_account_filters", + "test_account_filters_default_checked", + "path_cleaning_filters", + "is_demo", + "timezone", + "data_attributes", + "person_display_name_properties", + "correlation_config", + "autocapture_opt_out", + "autocapture_exceptions_opt_in", + "autocapture_web_vitals_opt_in", + "autocapture_web_vitals_allowed_metrics", + "autocapture_exceptions_errors_to_ignore", + "capture_console_log_opt_in", + "capture_performance_opt_in", + "session_recording_opt_in", + "session_recording_sample_rate", + "session_recording_minimum_duration_milliseconds", + "session_recording_linked_flag", + "session_recording_network_payload_capture_config", + "session_replay_config", + "access_control", + "week_start_day", + "primary_dashboard", + "live_events_columns", + "recording_domains", + "person_on_events_querying_enabled", + "inject_web_apps", + "extra_settings", + "modifiers", + "default_modifiers", + "has_completed_onboarding_for", + "surveys_opt_in", + "heatmaps_opt_in", + } + + def get_effective_membership_level(self, project: Project) -> Optional[OrganizationMembership.Level]: + team = project.teams.get(pk=project.pk) + return self.user_permissions.team(team).effective_membership_level + + def get_has_group_types(self, project: Project) -> bool: + return GroupTypeMapping.objects.filter(team_id=project.id).exists() + + def get_live_events_token(self, project: Project) -> Optional[str]: + team = project.teams.get(pk=project.pk) + return encode_jwt( + {"team_id": team.id, "api_token": team.api_token}, + timedelta(days=7), + PosthogJwtAudience.LIVESTREAM, + ) + + @staticmethod + def validate_session_recording_linked_flag(value) -> dict | None: + return TeamSerializer.validate_session_recording_linked_flag(value) + + @staticmethod + def validate_session_recording_network_payload_capture_config(value) -> dict | None: + return TeamSerializer.validate_session_recording_network_payload_capture_config(value) + + @staticmethod + def validate_session_replay_config(value) -> dict | None: + return TeamSerializer.validate_session_replay_config(value) + + @staticmethod + def validate_session_replay_ai_summary_config(value: dict | None) -> dict | None: + return TeamSerializer.validate_session_replay_ai_summary_config(value) + + def validate(self, attrs: Any) -> Any: + attrs = validate_team_attrs(attrs, self.context["view"], self.context["request"], self.instance) + return super().validate(attrs) + + def create(self, validated_data: dict[str, Any], **kwargs) -> Project: + serializers.raise_errors_on_nested_writes("create", self, validated_data) + request = self.context["request"] + + if "week_start_day" not in validated_data: + country_code = get_geoip_properties(get_ip_address(request)).get("$geoip_country_code", None) + if country_code: + week_start_day_for_user_ip_location = get_week_start_for_country_code(country_code) + # get_week_start_for_country_code() also returns 6 for countries where the week starts on Saturday, + # but ClickHouse doesn't support Saturday as the first day of the week, so we fall back to Sunday + validated_data["week_start_day"] = 1 if week_start_day_for_user_ip_location == 1 else 0 + + team_fields: dict[str, Any] = {} + for field_name in validated_data.copy(): # Copy to avoid iterating over a changing dict + if field_name in self.Meta.team_passthrough_fields: + team_fields[field_name] = validated_data.pop(field_name) + project, team = Project.objects.create_with_team( + organization_id=self.context["view"].organization_id, + initiating_user=self.context["request"].user, + **validated_data, + team_fields=team_fields, + ) + + request.user.current_team = team + request.user.team = request.user.current_team # Update cached property + request.user.save() + + log_activity( + organization_id=project.organization_id, + team_id=project.pk, + user=request.user, + was_impersonated=is_impersonated_session(request), + scope="Project", + item_id=project.pk, + activity="created", + detail=Detail(name=str(project.name)), + ) + log_activity( + organization_id=project.organization_id, + team_id=team.pk, + user=request.user, + was_impersonated=is_impersonated_session(request), + scope="Team", + item_id=team.pk, + activity="created", + detail=Detail(name=str(team.name)), + ) + + return project + + def update(self, instance: Project, validated_data: dict[str, Any]) -> Project: + team = instance.passthrough_team + team_before_update = team.__dict__.copy() + project_before_update = instance.__dict__.copy() + + if ( + "session_replay_config" in validated_data + and validated_data["session_replay_config"] is not None + and team.session_replay_config is not None + ): + # for session_replay_config and its top level keys we merge existing settings with new settings + # this way we don't always have to receive the entire settings object to change one setting + # so for each key in validated_data["session_replay_config"] we merge it with the existing settings + # and then merge any top level keys that weren't provided + + for key, value in validated_data["session_replay_config"].items(): + if key in team.session_replay_config: + # if they're both dicts then we merge them, otherwise, the new value overwrites the old + if isinstance(team.session_replay_config[key], dict) and isinstance( + validated_data["session_replay_config"][key], dict + ): + validated_data["session_replay_config"][key] = { + **team.session_replay_config[key], # existing values + **value, # and new values on top + } + + # then also add back in any keys that exist but are not in the provided data + validated_data["session_replay_config"] = { + **team.session_replay_config, + **validated_data["session_replay_config"], + } + + should_team_be_saved_too = False + for attr, value in validated_data.items(): + if attr in self.Meta.team_passthrough_fields: + should_team_be_saved_too = True + setattr(team, attr, value) + else: + if attr == "name": # `name` should be updated on _both_ the Project and Team + should_team_be_saved_too = True + setattr(team, attr, value) + setattr(instance, attr, value) + + instance.save() + if should_team_be_saved_too: + team.save() + + team_after_update = team.__dict__.copy() + project_after_update = instance.__dict__.copy() + team_changes = dict_changes_between("Team", team_before_update, team_after_update, use_field_exclusions=True) + project_changes = dict_changes_between( + "Project", project_before_update, project_after_update, use_field_exclusions=True + ) + + if team_changes: + log_activity( + organization_id=cast(UUIDT, instance.organization_id), + team_id=instance.pk, + user=cast(User, self.context["request"].user), + was_impersonated=is_impersonated_session(request), + scope="Team", + item_id=instance.pk, + activity="updated", + detail=Detail( + name=str(team.name), + changes=team_changes, + ), + ) + if project_changes: + log_activity( + organization_id=cast(UUIDT, instance.organization_id), + team_id=instance.pk, + user=cast(User, self.context["request"].user), + was_impersonated=is_impersonated_session(request), + scope="Project", + item_id=instance.pk, + activity="updated", + detail=Detail( + name=str(instance.name), + changes=project_changes, + ), + ) + + return instance + + +class ProjectViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): + """ + Projects for the current organization. + """ + + scope_object: APIScopeObjectOrNotSupported = "project" + serializer_class = ProjectSerializer + queryset = Project.objects.all().select_related("organization").prefetch_related("teams") + lookup_field = "id" + ordering = "-created_by" + + def safely_get_queryset(self, queryset): + # IMPORTANT: This is actually what ensures that a user cannot read/update a project for which they don't have permission + visible_teams_ids = UserPermissions(cast(User, self.request.user)).team_ids_visible_for_user + return queryset.filter(id__in=visible_teams_ids) + + def get_serializer_class(self) -> type[serializers.BaseSerializer]: + if self.action == "list": + return ProjectBasicSerializer + return super().get_serializer_class() + + # NOTE: Team permissions are somewhat complex so we override the underlying viewset's get_permissions method + def dangerously_get_permissions(self) -> list: + """ + Special permissions handling for create requests as the organization is inferred from the current user. + """ + + permissions: list = [ + IsAuthenticated, + APIScopePermission, + PremiumMultiProjectPermissions, + *self.permission_classes, + ] + + # Return early for non-actions (e.g. OPTIONS) + if self.action: + if self.action == "create": + if "is_demo" not in self.request.data or not self.request.data["is_demo"]: + permissions.append(OrganizationAdminWritePermissions) + else: + permissions.append(OrganizationMemberPermissions) + elif self.action != "list": + # Skip TeamMemberAccessPermission for list action, as list is serialized with limited TeamBasicSerializer + permissions.append(TeamMemberLightManagementPermission) + + return [permission() for permission in permissions] + + def safely_get_object(self, queryset): + lookup_value = self.kwargs[self.lookup_field] + if lookup_value == "@current": + team = getattr(self.request.user, "team", None) + if team is None: + raise exceptions.NotFound() + return team.project + + filter_kwargs = {self.lookup_field: lookup_value} + try: + project = get_object_or_404(queryset, **filter_kwargs) + except ValueError as error: + raise exceptions.ValidationError(str(error)) + return project + + # :KLUDGE: Exposed for compatibility reasons for permission classes. + @property + def team(self): + project = self.get_object() + return project.teams.get(id=project.id) + + def perform_destroy(self, project: Project): + project_id = project.pk + organization_id = project.organization_id + project_name = project.name + + user = cast(User, self.request.user) + + teams: list[Team] = list(project.teams.all()) + delete_bulky_postgres_data(team_ids=[team.id for team in teams]) + delete_batch_exports(team_ids=[team.pk for team in teams]) + + with mute_selected_signals(): + super().perform_destroy(project) + + # Once the project is deleted, queue deletion of associated data + AsyncDeletion.objects.bulk_create( + [ + AsyncDeletion( + deletion_type=DeletionType.Team, + team_id=team.id, + key=str(team.id), + created_by=user, + ) + for team in teams + ], + ignore_conflicts=True, + ) + + for team in teams: + log_activity( + organization_id=cast(UUIDT, organization_id), + team_id=team.pk, + user=user, + was_impersonated=is_impersonated_session(self.request), + scope="Team", + item_id=team.pk, + activity="deleted", + detail=Detail(name=str(team.name)), + ) + report_user_action(user, f"team deleted", team=team) + log_activity( + organization_id=cast(UUIDT, organization_id), + team_id=project_id, + user=user, + was_impersonated=is_impersonated_session(self.request), + scope="Project", + item_id=project_id, + activity="deleted", + detail=Detail(name=str(project_name)), + ) + report_user_action( + user, + f"project deleted", + {"project_name": project_name}, + team=teams[0], + ) + + @action( + methods=["PATCH"], + detail=True, + # Only ADMIN or higher users are allowed to access this project + permission_classes=[TeamMemberStrictManagementPermission], + ) + def reset_token(self, request: request.Request, id: str, **kwargs) -> response.Response: + project = self.get_object() + project.passthrough_team.reset_token_and_save( + user=request.user, is_impersonated_session=is_impersonated_session(request) + ) + return response.Response(ProjectSerializer(project, context=self.get_serializer_context()).data) + + @action( + methods=["GET"], + detail=True, + permission_classes=[IsAuthenticated], + ) + def is_generating_demo_data(self, request: request.Request, id: str, **kwargs) -> response.Response: + project = self.get_object() + return response.Response({"is_generating_demo_data": project.passthrough_team.get_is_generating_demo_data()}) + + @action(methods=["GET"], detail=True) + def activity(self, request: request.Request, **kwargs): + # TODO: This is currently the same as in TeamViewSet - we should rework for the Project scope + limit = int(request.query_params.get("limit", "10")) + page = int(request.query_params.get("page", "1")) + + project = self.get_object() + + activity_page = load_activity( + scope="Team", + team_id=project.pk, + item_ids=[str(project.pk)], + limit=limit, + page=page, + ) + return activity_page_response(activity_page, limit, page, request) + + @cached_property + def user_permissions(self): + project = self.get_object() if self.action == "reset_token" else None + team = project.passthrough_team if project else None + return UserPermissions(cast(User, self.request.user), team) + + +class RootProjectViewSet(ProjectViewSet): + # NOTE: We don't want people creating projects via the "current_organization" concept, but rather specify the org ID + # in the URL - hence this is hidden from the API docs, but used in the app + hide_api_docs = True diff --git a/posthog/api/routing.py b/posthog/api/routing.py index d2f902e30b5b4..0f1784b3ac4a2 100644 --- a/posthog/api/routing.py +++ b/posthog/api/routing.py @@ -38,32 +38,6 @@ class DefaultRouterPlusPlus(ExtendedDefaultRouter): """DefaultRouter with optional trailing slash and drf-extensions nesting.""" - # This is an override because of changes in djangorestframework 3.15, which is required for python 3.11 - # changes taken from and explained here: https://github.com/nautobot/nautobot/pull/5546/files#diff-81850a2ccad5814aab4f477d447f85cc0a82e9c10fd88fd72327cda51a750471R30 - def _register(self, prefix, viewset, basename=None): - """ - Override DRF's BaseRouter.register() to bypass an unnecessary restriction added in version 3.15.0. - (Reference: https://github.com/encode/django-rest-framework/pull/8438) - """ - if basename is None: - basename = self.get_default_basename(viewset) - - # DRF: - # if self.is_already_registered(basename): - # msg = (f'Router with basename "{basename}" is already registered. ' - # f'Please provide a unique basename for viewset "{viewset}"') - # raise ImproperlyConfigured(msg) - # - # We bypass this because we have at least one use case (/api/extras/jobs/) where we are *intentionally* - # registering two viewsets with the same basename, but have carefully defined them so as not to conflict. - - # resuming standard DRF code... - self.registry.append((prefix, viewset, basename)) - - # invalidate the urls cache - if hasattr(self, "_urls"): - del self._urls - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.trailing_slash = r"/?" @@ -72,7 +46,7 @@ def __init__(self, *args, **kwargs): # NOTE: Previously known as the StructuredViewSetMixin # IMPORTANT: Almost all viewsets should inherit from this mixin. It should be the first thing it inherits from to ensure # that typing works as expected -class TeamAndOrgViewSetMixin(_GenericViewSet): +class TeamAndOrgViewSetMixin(_GenericViewSet): # TODO: Rename to include "Env" in name # This flag disables nested routing handling, reverting to the old request.user.team behavior # Allows for a smoother transition from the old flat API structure to the newer nested one param_derived_from_user_current_team: Optional[Literal["team_id", "project_id"]] = None diff --git a/posthog/api/shared.py b/posthog/api/shared.py index e37fe9de29297..fc849eb2e2b42 100644 --- a/posthog/api/shared.py +++ b/posthog/api/shared.py @@ -2,12 +2,17 @@ This module contains serializers that are used across other serializers for nested representations. """ -from typing import Optional +import copy +from typing import Any, Optional from rest_framework import serializers from posthog.models import Organization, Team, User from posthog.models.organization import OrganizationMembership +from posthog.models.project import Project +from rest_framework.fields import SkipField +from rest_framework.relations import PKOnlyObject +from rest_framework.utils import model_meta class UserBasicSerializer(serializers.ModelSerializer): @@ -36,6 +41,112 @@ def get_hedgehog_config(self, user: User) -> Optional[dict]: return None +class ProjectBasicSerializer(serializers.ModelSerializer): + """ + Serializer for `Project` model with minimal attributes to speeed up loading and transfer times. + Also used for nested serializers. + """ + + class Meta: + model = Project + fields = ( + "id", + "uuid", # Compat with TeamSerializer + "organization", + "api_token", # Compat with TeamSerializer + "name", + "completed_snippet_onboarding", # Compat with TeamSerializer + "has_completed_onboarding_for", # Compat with TeamSerializer + "ingested_event", # Compat with TeamSerializer + "is_demo", # Compat with TeamSerializer + "timezone", # Compat with TeamSerializer + "access_control", # Compat with TeamSerializer + ) + read_only_fields = fields + team_passthrough_fields = { + "uuid", + "api_token", + "completed_snippet_onboarding", + "has_completed_onboarding_for", + "ingested_event", + "is_demo", + "timezone", + "access_control", + } + + def get_fields(self): + declared_fields = copy.deepcopy(self._declared_fields) + + info = model_meta.get_field_info(Project) + team_info = model_meta.get_field_info(Team) + for field_name, field in team_info.fields.items(): + if field_name in info.fields: + continue + info.fields[field_name] = field + info.fields_and_pk[field_name] = field + for field_name, relation in team_info.forward_relations.items(): + if field_name in info.forward_relations: + continue + info.forward_relations[field_name] = relation + info.relations[field_name] = relation + for accessor_name, relation in team_info.reverse_relations.items(): + if accessor_name in info.reverse_relations: + continue + info.reverse_relations[accessor_name] = relation + info.relations[accessor_name] = relation + + field_names = self.get_field_names(declared_fields, info) + + extra_kwargs = self.get_extra_kwargs() + extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(field_names, declared_fields, extra_kwargs) + + fields = {} + for field_name in field_names: + if field_name in declared_fields: + fields[field_name] = declared_fields[field_name] + continue + extra_field_kwargs = extra_kwargs.get(field_name, {}) + source = extra_field_kwargs.get("source", "*") + if source == "*": + source = field_name + field_class, field_kwargs = self.build_field(source, info, model_class=Project, nested_depth=0) + field_kwargs = self.include_extra_kwargs(field_kwargs, extra_field_kwargs) + fields[field_name] = field_class(**field_kwargs) + fields.update(hidden_fields) + return fields + + def build_field(self, field_name, info, model_class, nested_depth): + if field_name in self.Meta.team_passthrough_fields: + model_class = Team + return super().build_field(field_name, info, model_class, nested_depth) + + def to_representation(self, instance): + """ + Object instance -> Dict of primitive datatypes. Basically copied from Serializer.to_representation + """ + ret: dict[str, Any] = {} + fields = self._readable_fields + + for field in fields: + assert field.field_name is not None + try: + attribute_source = instance + if field.field_name in self.Meta.team_passthrough_fields: + # This branch is the only material change from the original method + attribute_source = instance.passthrough_team + attribute = field.get_attribute(attribute_source) + except SkipField: + continue + + check_for_none = attribute.pk if isinstance(attribute, PKOnlyObject) else attribute + if check_for_none is None: + ret[field.field_name] = None + else: + ret[field.field_name] = field.to_representation(attribute) + + return ret + + class TeamBasicSerializer(serializers.ModelSerializer): """ Serializer for `Team` model with minimal attributes to speeed up loading and transfer times. diff --git a/posthog/api/signup.py b/posthog/api/signup.py index 7cda79d66195d..3847999ec551d 100644 --- a/posthog/api/signup.py +++ b/posthog/api/signup.py @@ -161,7 +161,7 @@ def enter_demo(self, validated_data) -> User: return self._user def create_team(self, organization: Organization, user: User) -> Team: - return Team.objects.create_with_data(user=user, organization=organization) + return Team.objects.create_with_data(initiating_user=user, organization=organization) def to_representation(self, instance) -> dict: data = UserBasicSerializer(instance=instance).data diff --git a/posthog/api/team.py b/posthog/api/team.py index 00584574186ec..34349958a6e88 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -3,22 +3,20 @@ from typing import Any, Optional, cast from datetime import timedelta -from django.core.cache import cache from django.shortcuts import get_object_or_404 from loginas.utils import is_impersonated_session from posthog.jwt import PosthogJwtAudience, encode_jwt from rest_framework.permissions import BasePermission, IsAuthenticated -from rest_framework import exceptions, request, response, serializers, viewsets from posthog.api.utils import action +from rest_framework import exceptions, request, response, serializers, viewsets from posthog.api.geoip import get_geoip_properties from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import TeamBasicSerializer from posthog.constants import AvailableFeature from posthog.event_usage import report_user_action -from posthog.models import InsightCachingState, Team, User +from posthog.models import Team, User from posthog.models.activity_logging.activity_log import ( - Change, Detail, dict_changes_between, load_activity, @@ -30,9 +28,8 @@ from posthog.models.organization import OrganizationMembership from posthog.models.personal_api_key import APIScopeObjectOrNotSupported from posthog.models.signals import mute_selected_signals -from posthog.models.team.team import set_team_in_cache from posthog.models.team.util import delete_batch_exports, delete_bulky_postgres_data -from posthog.models.utils import UUIDT, generate_random_token_project +from posthog.models.utils import UUIDT from posthog.permissions import ( CREATE_METHODS, APIScopePermission, @@ -42,15 +39,14 @@ TeamMemberStrictManagementPermission, get_organization_from_view, ) -from posthog.tasks.demo_create_data import create_data_for_demo_team from posthog.user_permissions import UserPermissions, UserPermissionsSerializerMixin from posthog.utils import get_ip_address, get_week_start_for_country_code -class PremiumMultiProjectPermissions(BasePermission): +class PremiumMultiProjectPermissions(BasePermission): # TODO: Rename to include "Env" in name """Require user to have all necessary premium features on their plan for create access to the endpoint.""" - message = "You must upgrade your PostHog plan to be able to create and manage multiple projects." + message = "You must upgrade your PostHog plan to be able to create and manage multiple projects or environments." def has_permission(self, request: request.Request, view) -> bool: if request.method in CREATE_METHODS: @@ -190,7 +186,7 @@ def get_effective_membership_level(self, team: Team) -> Optional[OrganizationMem return self.user_permissions.team(team).effective_membership_level def get_has_group_types(self, team: Team) -> bool: - return GroupTypeMapping.objects.filter(team=team).exists() + return GroupTypeMapping.objects.filter(team_id=team.id).exists() def get_live_events_token(self, team: Team) -> Optional[str]: return encode_jwt( @@ -199,7 +195,8 @@ def get_live_events_token(self, team: Team) -> Optional[str]: PosthogJwtAudience.LIVESTREAM, ) - def validate_session_recording_linked_flag(self, value) -> dict | None: + @staticmethod + def validate_session_recording_linked_flag(value) -> dict | None: if value is None: return None @@ -217,7 +214,8 @@ def validate_session_recording_linked_flag(self, value) -> dict | None: return value - def validate_session_recording_network_payload_capture_config(self, value) -> dict | None: + @staticmethod + def validate_session_recording_network_payload_capture_config(value) -> dict | None: if value is None: return None @@ -231,7 +229,8 @@ def validate_session_recording_network_payload_capture_config(self, value) -> di return value - def validate_session_replay_config(self, value) -> dict | None: + @staticmethod + def validate_session_replay_config(value) -> dict | None: if value is None: return None @@ -245,11 +244,12 @@ def validate_session_replay_config(self, value) -> dict | None: ) if "ai_config" in value: - self.validate_session_replay_ai_summary_config(value["ai_config"]) + TeamSerializer.validate_session_replay_ai_summary_config(value["ai_config"]) return value - def validate_session_replay_ai_summary_config(self, value: dict | None) -> dict | None: + @staticmethod + def validate_session_replay_ai_summary_config(value: dict | None) -> dict | None: if value is not None: if not isinstance(value, dict): raise exceptions.ValidationError("Must provide a dictionary or None.") @@ -269,44 +269,12 @@ def validate_session_replay_ai_summary_config(self, value: dict | None) -> dict return value def validate(self, attrs: Any) -> Any: - if "primary_dashboard" in attrs and attrs["primary_dashboard"].team != self.instance: - raise exceptions.PermissionDenied("Dashboard does not belong to this team.") - - if "access_control" in attrs: - # Only organization-wide admins and above should be allowed to switch the project between open and private - # If a project-only admin who is only an org member disabled this it, they wouldn't be able to reenable it - request = self.context["request"] - if isinstance(self.instance, Team): - organization_id = self.instance.organization_id - else: - organization_id = self.context["view"].organization - org_membership: OrganizationMembership = OrganizationMembership.objects.only("level").get( - organization_id=organization_id, user=request.user - ) - if org_membership.level < OrganizationMembership.Level.ADMIN: - raise exceptions.PermissionDenied("Your organization access level is insufficient.") - - if "autocapture_exceptions_errors_to_ignore" in attrs: - if not isinstance(attrs["autocapture_exceptions_errors_to_ignore"], list): - raise exceptions.ValidationError( - "Must provide a list for field: autocapture_exceptions_errors_to_ignore." - ) - for error in attrs["autocapture_exceptions_errors_to_ignore"]: - if not isinstance(error, str): - raise exceptions.ValidationError( - "Must provide a list of strings to field: autocapture_exceptions_errors_to_ignore." - ) - - if len(json.dumps(attrs["autocapture_exceptions_errors_to_ignore"])) > 300: - raise exceptions.ValidationError( - "Field autocapture_exceptions_errors_to_ignore must be less than 300 characters. Complex config should be provided in posthog-js initialization." - ) + attrs = validate_team_attrs(attrs, self.context["view"], self.context["request"], self.instance) return super().validate(attrs) def create(self, validated_data: dict[str, Any], **kwargs) -> Team: serializers.raise_errors_on_nested_writes("create", self, validated_data) request = self.context["request"] - organization = self.context["view"].organization # Use the org we used to validate permissions if "week_start_day" not in validated_data: country_code = get_geoip_properties(get_ip_address(request)).get("$geoip_country_code", None) @@ -316,20 +284,18 @@ def create(self, validated_data: dict[str, Any], **kwargs) -> Team: # but ClickHouse doesn't support Saturday as the first day of the week, so we fall back to Sunday validated_data["week_start_day"] = 1 if week_start_day_for_user_ip_location == 1 else 0 - if validated_data.get("is_demo", False): - team = Team.objects.create(**validated_data, organization=organization) - cache_key = f"is_generating_demo_data_{team.pk}" - cache.set(cache_key, "True") # create an item in the cache that we can use to see if the demo data is ready - create_data_for_demo_team.delay(team.pk, request.user.pk, cache_key) - else: - team = Team.objects.create_with_data(**validated_data, organization=organization) + team = Team.objects.create_with_data( + initiating_user=self.context["request"].user, + organization=self.context["view"].organization, + **validated_data, + ) request.user.current_team = team request.user.team = request.user.current_team # Update cached property request.user.save() log_activity( - organization_id=organization.id, + organization_id=team.organization_id, team_id=team.pk, user=request.user, was_impersonated=is_impersonated_session(request), @@ -341,22 +307,9 @@ def create(self, validated_data: dict[str, Any], **kwargs) -> Team: return team - def _clear_team_insight_caching_states(self, team: Team) -> None: - # TODO: Remove this method: - # 1. It only clear the cache for saved insights, queries not linked to one are being ignored here - # 2. We should anyway 100% be relying on cache keys being different for materially different queries, instead of - # on remembering to call this method when project settings change. We probably already are in the clear here! - hashes = InsightCachingState.objects.filter(team=team).values_list("cache_key", flat=True) - cache.delete_many(hashes) - def update(self, instance: Team, validated_data: dict[str, Any]) -> Team: before_update = instance.__dict__.copy() - if ("timezone" in validated_data and validated_data["timezone"] != instance.timezone) or ( - "modifiers" in validated_data and validated_data["modifiers"] != instance.modifiers - ): - self._clear_team_insight_caching_states(instance) - if ( "session_replay_config" in validated_data and validated_data["session_replay_config"] is not None @@ -409,7 +362,7 @@ class TeamViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): Projects for the current organization. """ - scope_object: APIScopeObjectOrNotSupported = "project" + scope_object: APIScopeObjectOrNotSupported = "project" # TODO: Change to `environment` on environments rollout serializer_class = TeamSerializer queryset = Team.objects.all().select_related("organization") lookup_field = "id" @@ -507,7 +460,7 @@ def perform_destroy(self, team: Team): activity="deleted", detail=Detail(name=str(team_name)), ) - # TRICKY: We pass in Team here as otherwise the access to "current_team" can fail if it was deleted + # TRICKY: We pass in `team` here as access to `user.current_team` can fail if it was deleted report_user_action(user, f"team deleted", team=team) @action( @@ -518,31 +471,7 @@ def perform_destroy(self, team: Team): ) def reset_token(self, request: request.Request, id: str, **kwargs) -> response.Response: team = self.get_object() - old_token = team.api_token - team.api_token = generate_random_token_project() - team.save() - - log_activity( - organization_id=team.organization_id, - team_id=team.pk, - user=cast(User, request.user), - was_impersonated=is_impersonated_session(request), - scope="Team", - item_id=team.pk, - activity="updated", - detail=Detail( - name=str(team.name), - changes=[ - Change( - type="Team", - action="changed", - field="api_token", - ) - ], - ), - ) - - set_team_in_cache(old_token, None) + team.reset_token_and_save(user=request.user, is_impersonated_session=is_impersonated_session(request)) return response.Response(TeamSerializer(team, context=self.get_serializer_context()).data) @action( @@ -552,8 +481,7 @@ def reset_token(self, request: request.Request, id: str, **kwargs) -> response.R ) def is_generating_demo_data(self, request: request.Request, id: str, **kwargs) -> response.Response: team = self.get_object() - cache_key = f"is_generating_demo_data_{team.pk}" - return response.Response({"is_generating_demo_data": cache.get(cache_key) == "True"}) + return response.Response({"is_generating_demo_data": team.get_is_generating_demo_data()}) @action(methods=["GET"], detail=True) def activity(self, request: request.Request, **kwargs): @@ -578,7 +506,38 @@ def user_permissions(self): class RootTeamViewSet(TeamViewSet): - # NOTE: We don't want people managing projects via the "current_organization" concept. - # Rather specifying the org ID at the top level - we still support it for backwards compat but don't document it anymore. - + # NOTE: We don't want people creating environments via the "current_organization"/"current_project" concept, but + # rather specify the org ID and project ID in the URL - hence this is hidden from the API docs, but used in the app hide_api_docs = True + + +def validate_team_attrs( + attrs: dict[str, Any], view: TeamAndOrgViewSetMixin, request: request.Request, instance +) -> dict[str, Any]: + if "primary_dashboard" in attrs and attrs["primary_dashboard"].team_id != instance.id: + raise exceptions.PermissionDenied("Dashboard does not belong to this team.") + + if "access_control" in attrs: + assert isinstance(request.user, User) + # Only organization-wide admins and above should be allowed to switch the project between open and private + # If a project-only admin who is only an org member disabled this it, they wouldn't be able to reenable it + org_membership: OrganizationMembership = OrganizationMembership.objects.only("level").get( + organization_id=instance.organization_id, user=request.user + ) + if org_membership.level < OrganizationMembership.Level.ADMIN: + raise exceptions.PermissionDenied("Your organization access level is insufficient.") + + if "autocapture_exceptions_errors_to_ignore" in attrs: + if not isinstance(attrs["autocapture_exceptions_errors_to_ignore"], list): + raise exceptions.ValidationError("Must provide a list for field: autocapture_exceptions_errors_to_ignore.") + for error in attrs["autocapture_exceptions_errors_to_ignore"]: + if not isinstance(error, str): + raise exceptions.ValidationError( + "Must provide a list of strings to field: autocapture_exceptions_errors_to_ignore." + ) + + if len(json.dumps(attrs["autocapture_exceptions_errors_to_ignore"])) > 300: + raise exceptions.ValidationError( + "Field autocapture_exceptions_errors_to_ignore must be less than 300 characters. Complex config should be provided in posthog-js initialization." + ) + return attrs diff --git a/posthog/api/test/__snapshots__/test_api_docs.ambr b/posthog/api/test/__snapshots__/test_api_docs.ambr index 9b470bb936f43..fb629e787c4f6 100644 --- a/posthog/api/test/__snapshots__/test_api_docs.ambr +++ b/posthog/api/test/__snapshots__/test_api_docs.ambr @@ -1,19 +1,7 @@ # serializer version: 1 # name: TestAPIDocsSchema.test_api_docs_generation_warnings_snapshot list([ - '/home/runner/work/posthog/posthog/posthog/api/organization.py: Warning [OrganizationViewSet > OrganizationSerializer]: unable to resolve type hint for function "get_metadata". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/organization.py: Warning [OrganizationViewSet > OrganizationSerializer]: unable to resolve type hint for function "get_member_count". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportOrganizationViewSet]: could not derive type of path parameter "organization_id" because model "posthog.batch_exports.models.BatchExport" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportOrganizationViewSet > BatchExportSerializer]: could not resolve serializer field "HogQLSelectQueryField(required=False)". Defaulting to "string"', - '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsViewSet > PluginSerializer]: unable to resolve type hint for function "get_hog_function_migration_available". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "organization_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleViewSet > RoleSerializer]: unable to resolve type hint for function "get_members". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleViewSet > RoleSerializer]: unable to resolve type hint for function "get_associated_flags". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleMembershipViewSet]: could not derive type of path parameter "organization_id" because model "ee.models.role.RoleMembership" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/action.py: Warning [ActionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.action.action.Action" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/activity_log.py: Warning [ActivityLogViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.activity_logging.activity_log.ActivityLog" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/annotation.py: Warning [AnnotationsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.annotation.Annotation" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/plugin_log_entry.py: Warning [PluginLogEntryViewSet]: could not derive type of path parameter "plugin_config_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', "/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Error [AppMetricsViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AppMetricsViewSet' should either include a `serializer_class` attribute, or override the `get_serializer_class()` method.)", '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [AppMetricsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Error [HistoricalExportsAppMetricsViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.', @@ -21,15 +9,8 @@ '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [HistoricalExportsAppMetricsViewSet]: could not derive type of path parameter "plugin_config_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [HistoricalExportsAppMetricsViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportViewSet]: could not derive type of path parameter "project_id" because model "posthog.batch_exports.models.BatchExport" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportViewSet > BatchExportSerializer]: could not resolve serializer field "HogQLSelectQueryField(required=False)". Defaulting to "string"', '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportRunViewSet]: could not derive type of path parameter "project_id" because model "posthog.batch_exports.models.BatchExportRun" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/cohort.py: Warning [CohortViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.cohort.cohort.Cohort" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard_templates.py: Warning [DashboardTemplateViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard_templates.DashboardTemplate" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard.py: Warning [DashboardsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard.Dashboard" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/api/dashboard_collaborator.py: Warning [DashboardCollaboratorViewSet]: could not derive type of path parameter "project_id" because model "ee.models.dashboard_privilege.DashboardPrivilege" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/sharing.py: Warning [SharingConfigurationViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.sharing_configuration.SharingConfiguration" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/early_access_feature.py: Warning [EarlyAccessFeatureViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.early_access_feature.EarlyAccessFeature" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - "/home/runner/work/posthog/posthog/posthog/api/event_definition.py: Error [EventDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')", - '/home/runner/work/posthog/posthog/posthog/api/event_definition.py: Warning [EventDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.event_definition.EventDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/event.py: Warning [EventViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_id". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_distinct_id". Consider using a type hint or @extend_schema_field. Defaulting to string.', @@ -40,29 +21,11 @@ '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_elements". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_elements_chain". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/api/event.py: Warning [EventViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/clickhouse/views/experiments.py: Warning [ClickhouseExperimentsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.experiment.Experiment" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/explicit_team_member.py: Warning [ExplicitTeamMemberViewSet]: could not derive type of path parameter "project_id" because model "ee.models.explicit_team_membership.ExplicitTeamMembership" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.exported_asset.ExportedAsset" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet > ExportedAssetSerializer]: unable to resolve type hint for function "has_content". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet > ExportedAssetSerializer]: unable to resolve type hint for function "filename". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/feature_flag.py: Warning [FeatureFlagViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.feature_flag.feature_flag.FeatureFlag" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/api/feature_flag_role_access.py: Warning [FeatureFlagRoleAccessViewSet]: could not derive type of path parameter "project_id" because model "ee.models.feature_flag_role_access.FeatureFlagRoleAccess" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [ClickhouseGroupsView]: could not derive type of path parameter "project_id" because model "posthog.models.group.group.Group" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [ClickhouseGroupsTypesView]: could not derive type of path parameter "project_id" because model "posthog.models.group_type_mapping.GroupTypeMapping" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/clickhouse/views/insights.py: Warning [ClickhouseInsightsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.insight.Insight" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_last_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_cache_target_age". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_next_allowed_client_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_result". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hasMore". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_columns". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_timezone". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_is_cached". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_query_status". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hogql". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [ClickhouseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_types". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/notebook.py: Warning [NotebookViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.notebook.notebook.Notebook" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/person.py: Warning [PersonViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.person.person.Person" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [GroupsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.group.group.Group" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/clickhouse/views/person.py: Warning [EnterprisePersonViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.person.person.Person" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet > PluginConfigSerializer]: unable to resolve type hint for function "get_config". Consider using a type hint or @extend_schema_field. Defaulting to string.', @@ -73,28 +36,67 @@ '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineImportAppsConfigsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineTransformationsConfigsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PluginConfigViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/plugin_log_entry.py: Warning [PluginLogEntryViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/plugin_log_entry.py: Warning [PluginLogEntryViewSet]: could not derive type of path parameter "plugin_config_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', - "/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Error [PropertyDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')", - '/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Warning [PropertyDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.property_definition.PropertyDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes and . This will very likely result in an incorrect schema. Try renaming one.', '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/query.py: Error [QueryViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.', - '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "session_recording_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording.SessionRecording" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer]: could not resolve field on model with path "viewed". This is likely a custom field that does some unknown magic. Maybe consider annotating the field/property? Defaulting to "string". (Exception: SessionRecording has no field named \'viewed\')', '/home/runner/work/posthog/posthog/posthog/api/person.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer > MinimalPersonSerializer]: unable to resolve type hint for function "get_distinct_ids". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer]: unable to resolve type hint for function "storage". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/sharing.py: Warning [SharingConfigurationViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.sharing_configuration.SharingConfiguration" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/session.py: Warning [SessionViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.subscription.Subscription" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet > SubscriptionSerializer]: unable to resolve type hint for function "summary". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/organization.py: Warning [OrganizationViewSet > OrganizationSerializer]: unable to resolve type hint for function "get_metadata". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/organization.py: Warning [OrganizationViewSet > OrganizationSerializer]: unable to resolve type hint for function "get_member_count". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportOrganizationViewSet]: could not derive type of path parameter "organization_id" because model "posthog.batch_exports.models.BatchExport" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsViewSet > PluginSerializer]: unable to resolve type hint for function "get_hog_function_migration_available". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/project.py: Warning [ProjectViewSet > ProjectSerializer]: could not resolve field on model with path "person_on_events_querying_enabled". This is likely a custom field that does some unknown magic. Maybe consider annotating the field/property? Defaulting to "string". (Exception: Project has no field named \'person_on_events_querying_enabled\')', + '/home/runner/work/posthog/posthog/posthog/api/project.py: Warning [ProjectViewSet > ProjectSerializer]: could not resolve field on model with path "default_modifiers". This is likely a custom field that does some unknown magic. Maybe consider annotating the field/property? Defaulting to "string". (Exception: Project has no field named \'default_modifiers\')', + '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "organization_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleViewSet > RoleSerializer]: unable to resolve type hint for function "get_members". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleViewSet > RoleSerializer]: unable to resolve type hint for function "get_associated_flags". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleMembershipViewSet]: could not derive type of path parameter "organization_id" because model "ee.models.role.RoleMembership" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/action.py: Warning [ActionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.action.action.Action" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/activity_log.py: Warning [ActivityLogViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.activity_logging.activity_log.ActivityLog" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/annotation.py: Warning [AnnotationsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.annotation.Annotation" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/cohort.py: Warning [CohortViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.cohort.cohort.Cohort" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard_templates.py: Warning [DashboardTemplateViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard_templates.DashboardTemplate" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard.py: Warning [DashboardsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard.Dashboard" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/api/dashboard_collaborator.py: Warning [DashboardCollaboratorViewSet]: could not derive type of path parameter "project_id" because model "ee.models.dashboard_privilege.DashboardPrivilege" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/early_access_feature.py: Warning [EarlyAccessFeatureViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.early_access_feature.EarlyAccessFeature" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + "/home/runner/work/posthog/posthog/posthog/api/event_definition.py: Error [EventDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')", + '/home/runner/work/posthog/posthog/posthog/api/event_definition.py: Warning [EventDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.event_definition.EventDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/clickhouse/views/experiments.py: Warning [EnterpriseExperimentsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.experiment.Experiment" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/feature_flag.py: Warning [FeatureFlagViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.feature_flag.feature_flag.FeatureFlag" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/api/feature_flag_role_access.py: Warning [FeatureFlagRoleAccessViewSet]: could not derive type of path parameter "project_id" because model "ee.models.feature_flag_role_access.FeatureFlagRoleAccess" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [GroupsTypesViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.group_type_mapping.GroupTypeMapping" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/clickhouse/views/insights.py: Warning [EnterpriseInsightsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.insight.Insight" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_last_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_cache_target_age". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_next_allowed_client_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_result". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hasMore". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_columns". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_timezone". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_is_cached". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_query_status". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hogql". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_types". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/notebook.py: Warning [NotebookViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.notebook.notebook.Notebook" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + "/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Error [PropertyDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')", + '/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Warning [PropertyDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.property_definition.PropertyDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "session_recording_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/survey.py: Warning [SurveyViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.feedback.survey.Survey" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/survey.py: Warning [SurveyViewSet > SurveySerializer]: unable to resolve type hint for function "get_conditions". Consider using a type hint or @extend_schema_field. Defaulting to string.', 'Warning: encountered multiple names for the same choice set (HrefMatchingEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.', 'Warning: encountered multiple names for the same choice set (EffectivePrivilegeLevelEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.', 'Warning: encountered multiple names for the same choice set (MembershipLevelEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.', + 'Warning: operationId "environments_app_metrics_historical_exports_retrieve" has collisions [(\'/api/environments/{project_id}/app_metrics/{plugin_config_id}/historical_exports/\', \'get\'), (\'/api/environments/{project_id}/app_metrics/{plugin_config_id}/historical_exports/{id}/\', \'get\')]. resolving with numeral suffixes.', + 'Warning: operationId "environments_persons_activity_retrieve" has collisions [(\'/api/environments/{project_id}/persons/{id}/activity/\', \'get\'), (\'/api/environments/{project_id}/persons/activity/\', \'get\')]. resolving with numeral suffixes.', 'Warning: operationId "list" has collisions [(\'/api/organizations/\', \'get\'), (\'/api/organizations/{organization_id}/projects/\', \'get\')]. resolving with numeral suffixes.', 'Warning: operationId "create" has collisions [(\'/api/organizations/\', \'post\'), (\'/api/organizations/{organization_id}/projects/\', \'post\')]. resolving with numeral suffixes.', 'Warning: operationId "retrieve" has collisions [(\'/api/organizations/{id}/\', \'get\'), (\'/api/organizations/{organization_id}/projects/{id}/\', \'get\')]. resolving with numeral suffixes.', diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index d375d41ab6314..2d1f760a286fb 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -64,6 +64,244 @@ ''' # --- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.10 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_replay_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."team_id" = 2 + AND "posthog_hogfunction"."filters" @> '{"filter_test_accounts": true}'::jsonb) + ''' +# --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.11 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_replay_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE ("posthog_team"."project_id" = 2 + AND "posthog_team"."id" = 2) + LIMIT 21 + ''' +# --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.12 + ''' + SELECT 1 AS "a" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + LIMIT 1 + ''' +# --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.13 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_replay_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE ("posthog_team"."project_id" = 2 + AND "posthog_team"."id" = 2) + LIMIT 21 + ''' +# --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.14 + ''' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."is_active", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."strapi_id", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."hedgehog_config", + "posthog_user"."events_column_config", + "posthog_user"."email_opt_in" + FROM "posthog_user" + WHERE "posthog_user"."id" = 2 + LIMIT 21 + ''' +# --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.15 ''' SELECT "posthog_featureflag"."id", "posthog_featureflag"."key", @@ -86,7 +324,7 @@ AND "posthog_featureflag"."team_id" = 2) ''' # --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.11 +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.16 ''' SELECT "posthog_pluginconfig"."id", "posthog_pluginconfig"."web_token", @@ -176,6 +414,75 @@ ''' # --- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.4 + ''' + SELECT "posthog_project"."id", + "posthog_project"."organization_id", + "posthog_project"."name", + "posthog_project"."created_at" + FROM "posthog_project" + WHERE "posthog_project"."id" = 2 + LIMIT 21 + ''' +# --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.5 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_replay_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE ("posthog_team"."project_id" = 2 + AND "posthog_team"."id" = 2) + LIMIT 21 + ''' +# --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.6 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -207,7 +514,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.5 +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.7 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -239,7 +546,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.6 +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.8 ''' SELECT "posthog_team"."id", "posthog_team"."organization_id", @@ -248,26 +555,9 @@ WHERE "posthog_team"."organization_id" IN ('00000000-0000-0000-0000-000000000000'::uuid) ''' # --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.7 +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.9 ''' - SELECT "posthog_hogfunction"."id", - "posthog_hogfunction"."team_id", - "posthog_hogfunction"."name", - "posthog_hogfunction"."description", - "posthog_hogfunction"."created_at", - "posthog_hogfunction"."created_by_id", - "posthog_hogfunction"."deleted", - "posthog_hogfunction"."updated_at", - "posthog_hogfunction"."enabled", - "posthog_hogfunction"."icon_url", - "posthog_hogfunction"."hog", - "posthog_hogfunction"."bytecode", - "posthog_hogfunction"."inputs_schema", - "posthog_hogfunction"."inputs", - "posthog_hogfunction"."filters", - "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id", - "posthog_team"."id", + SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", "posthog_team"."project_id", @@ -315,58 +605,11 @@ "posthog_team"."modifiers", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."plugins_opt_in", - "posthog_team"."opt_out_capture", - "posthog_team"."event_names", - "posthog_team"."event_names_with_usage", - "posthog_team"."event_properties", - "posthog_team"."event_properties_with_usage", - "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_hogfunction" - INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."team_id" = 2 - AND "posthog_hogfunction"."filters" @> '{"filter_test_accounts": true}'::jsonb) - ''' -# --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.8 - ''' - SELECT 1 AS "a" - FROM "posthog_grouptypemapping" - WHERE "posthog_grouptypemapping"."team_id" = 2 - LIMIT 1 - ''' -# --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.9 - ''' - SELECT "posthog_user"."id", - "posthog_user"."password", - "posthog_user"."last_login", - "posthog_user"."first_name", - "posthog_user"."last_name", - "posthog_user"."is_staff", - "posthog_user"."is_active", - "posthog_user"."date_joined", - "posthog_user"."uuid", - "posthog_user"."current_organization_id", - "posthog_user"."current_team_id", - "posthog_user"."email", - "posthog_user"."pending_email", - "posthog_user"."temporary_token", - "posthog_user"."distinct_id", - "posthog_user"."is_email_verified", - "posthog_user"."has_seen_product_intro_for", - "posthog_user"."strapi_id", - "posthog_user"."theme_mode", - "posthog_user"."partial_notification_settings", - "posthog_user"."anonymize_data", - "posthog_user"."toolbar_mode", - "posthog_user"."hedgehog_config", - "posthog_user"."events_column_config", - "posthog_user"."email_opt_in" - FROM "posthog_user" - WHERE "posthog_user"."id" = 2 + FROM "posthog_team" + WHERE ("posthog_team"."project_id" = 2 + AND "posthog_team"."id" = 2) LIMIT 21 ''' # --- diff --git a/posthog/api/test/__snapshots__/test_event.ambr b/posthog/api/test/__snapshots__/test_event.ambr index a8aaa175a427e..cae4ce7b8b045 100644 --- a/posthog/api/test/__snapshots__/test_event.ambr +++ b/posthog/api/test/__snapshots__/test_event.ambr @@ -1,5 +1,17 @@ # serializer version: 1 # name: TestEvents.test_event_property_values + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestEvents.test_event_property_values.1 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') @@ -11,7 +23,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values.1 +# name: TestEvents.test_event_property_values.2 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') @@ -25,7 +37,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values.2 +# name: TestEvents.test_event_property_values.3 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') @@ -39,7 +51,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values.3 +# name: TestEvents.test_event_property_values.4 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') @@ -53,7 +65,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values.4 +# name: TestEvents.test_event_property_values.5 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') @@ -68,7 +80,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values.5 +# name: TestEvents.test_event_property_values.6 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') @@ -84,7 +96,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values.6 +# name: TestEvents.test_event_property_values.7 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') @@ -100,6 +112,18 @@ ''' # --- # name: TestEvents.test_event_property_values_materialized + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestEvents.test_event_property_values_materialized.1 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT "mat_random_prop" @@ -111,7 +135,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values_materialized.1 +# name: TestEvents.test_event_property_values_materialized.2 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT "mat_random_prop" @@ -125,7 +149,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values_materialized.2 +# name: TestEvents.test_event_property_values_materialized.3 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT "mat_random_prop" @@ -139,7 +163,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values_materialized.3 +# name: TestEvents.test_event_property_values_materialized.4 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT "mat_random_prop" @@ -153,7 +177,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values_materialized.4 +# name: TestEvents.test_event_property_values_materialized.5 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT "mat_random_prop" @@ -168,7 +192,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values_materialized.5 +# name: TestEvents.test_event_property_values_materialized.6 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT "mat_random_prop" @@ -184,7 +208,7 @@ LIMIT 10 ''' # --- -# name: TestEvents.test_event_property_values_materialized.6 +# name: TestEvents.test_event_property_values_materialized.7 ''' /* user_id:0 request:_snapshot_ */ SELECT DISTINCT "mat_random_prop" diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index 95fdd27717c41..2bd47011c93c8 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -2665,6 +2665,7 @@ def test_short_circuited_team(self, *args): ], "has_completed_onboarding_for": {"product_analytics": True}, }, + initiating_user=self.user, ) with self.settings(DECIDE_SHORT_CIRCUITED_TEAM_IDS=[short_circuited_team.id]): response = self._post_decide( diff --git a/posthog/api/test/test_project.py b/posthog/api/test/test_project.py new file mode 100644 index 0000000000000..a3da3c81f9ce0 --- /dev/null +++ b/posthog/api/test/test_project.py @@ -0,0 +1,11 @@ +from posthog.api.test.test_team import EnvironmentToProjectRewriteClient, team_api_test_factory + + +class TestProjectAPI(team_api_test_factory()): # type: ignore + """ + We inherit from TestTeamAPI, as previously /api/projects/ referred to the Team model, which used to mean "project". + Now as Team means "environment" and Project is separate, we must ensure backward compatibility of /api/projects/. + At the same time, this class is where we can continue adding `Project`-specific API tests. + """ + + client_class = EnvironmentToProjectRewriteClient diff --git a/posthog/api/test/test_routing.py b/posthog/api/test/test_routing.py index 76abb2693cf92..24063d1a655e5 100644 --- a/posthog/api/test/test_routing.py +++ b/posthog/api/test/test_routing.py @@ -2,9 +2,74 @@ from posthog.api.routing import TeamAndOrgViewSetMixin +from django.test import override_settings +from django.urls import include, path +from rest_framework import viewsets +from posthog.api.annotation import AnnotationSerializer +from posthog.api.routing import DefaultRouterPlusPlus +from posthog.models.annotation import Annotation +from posthog.models.organization import Organization +from posthog.models.project import Project +from posthog.models.team.team import Team +from posthog.test.base import APIBaseTest -class TestRouting: +class FooViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): + scope_object = "INTERNAL" + queryset = Annotation.objects.all() + serializer_class = AnnotationSerializer + + +test_router = DefaultRouterPlusPlus() + +test_environments_router = test_router.register(r"environments", FooViewSet, "environments") +test_environments_router.register(r"foos", FooViewSet, "environment_foos", ["team_id"]) + +test_projects_router = test_router.register(r"projects", FooViewSet, "projects") +test_projects_router.register(r"foos", FooViewSet, "project_foos", ["project_id"]) + +test_organizations_router = test_router.register(r"organizations", FooViewSet, "organizations") +test_organizations_router.register(r"foos", FooViewSet, "organization_foos", ["organization_id"]) + + +urlpatterns = [ + path("api/", include(test_router.urls)), +] + + +@override_settings(ROOT_URLCONF=__name__) # Use `urlpatterns` from this file and not from `posthog.urls` +class TestTeamAndOrgViewSetMixin(APIBaseTest): + test_annotation: Annotation + + def setUp(self): + super().setUp() + other_org, _, other_org_team = Organization.objects.bootstrap(user=self.user) + self.other_org_annotation = Annotation.objects.create(team=other_org_team, organization=other_org) + _, other_project_team = Project.objects.create_with_team( + initiating_user=self.user, organization=self.organization + ) + self.other_project_annotation = Annotation.objects.create( + team=other_project_team, organization=self.organization + ) + other_team = Team.objects.create(organization=self.organization, project=self.project) + self.other_team_annotation = Annotation.objects.create(team=other_team, organization=self.organization) + self.current_team_annotation = Annotation.objects.create(team=self.team, organization=self.organization) + + def test_environment_nested_filtering(self): + response = self.client.get(f"/api/environments/{self.team.id}/foos/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json()["count"], 1) # Just current_team_annotation + + def test_project_nested_filtering(self): + response = self.client.get(f"/api/projects/{self.team.id}/foos/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json()["count"], 2) # Both current_team_annotation and other_team_annotation + + def test_organization_nested_filtering(self): + response = self.client.get(f"/api/organizations/{self.organization.id}/foos/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json()["count"], 3) # All except other_org_annotation + def test_cannot_override_special_methods(self): with pytest.raises(Exception) as e: diff --git a/posthog/api/test/test_team.py b/posthog/api/test/test_team.py index 04c3787ad25a1..3e4c3e48d2dad 100644 --- a/posthog/api/test/test_team.py +++ b/posthog/api/test/test_team.py @@ -4,12 +4,11 @@ from unittest import mock from unittest.mock import ANY, MagicMock, call, patch -from asgiref.sync import sync_to_async from django.core.cache import cache from django.http import HttpResponse from freezegun import freeze_time from parameterized import parameterized -from rest_framework import status +from rest_framework import status, test from temporalio.service import RPCError from posthog.api.test.batch_exports.conftest import start_test_worker @@ -26,297 +25,341 @@ from posthog.test.base import APIBaseTest -class TestTeamAPI(APIBaseTest): - def _assert_activity_log(self, expected: list[dict], team_id: Optional[int] = None) -> None: - if not team_id: - team_id = self.team.pk - - starting_log_response = self.client.get(f"/api/projects/{team_id}/activity") - assert starting_log_response.status_code == 200, starting_log_response.json() - assert starting_log_response.json()["results"] == expected - - def _assert_organization_activity_log(self, expected: list[dict]) -> None: - starting_log_response = self.client.get(f"/api/organizations/{self.organization.pk}/activity") - assert starting_log_response.status_code == 200, starting_log_response.json() - assert starting_log_response.json()["results"] == expected - - def _assert_activity_log_is_empty(self) -> None: - self._assert_activity_log([]) - - def test_list_projects(self): - response = self.client.get("/api/projects/") - self.assertEqual(response.status_code, status.HTTP_200_OK) - - # Listing endpoint always uses the simplified serializer - response_data = response.json() - self.assertEqual(len(response_data["results"]), 1) - self.assertEqual(response_data["results"][0]["name"], self.team.name) - self.assertNotIn("test_account_filters", response_data["results"][0]) - self.assertNotIn("data_attributes", response_data["results"][0]) - - # TODO: These assertions will no longer make sense when we fully remove these attributes from the model - self.assertNotIn("event_names", response_data["results"][0]) - self.assertNotIn("event_properties", response_data["results"][0]) - self.assertNotIn("event_properties_numerical", response_data["results"][0]) - - def test_retrieve_project(self): - response = self.client.get("/api/projects/@current/") - self.assertEqual(response.status_code, status.HTTP_200_OK) - - response_data = response.json() - self.assertEqual(response_data["name"], self.team.name) - self.assertEqual(response_data["timezone"], "UTC") - self.assertEqual(response_data["is_demo"], False) - self.assertEqual(response_data["slack_incoming_webhook"], self.team.slack_incoming_webhook) - self.assertEqual(response_data["has_group_types"], False) - self.assertEqual( - response_data["person_on_events_querying_enabled"], - get_instance_setting("PERSON_ON_EVENTS_ENABLED") or get_instance_setting("PERSON_ON_EVENTS_V2_ENABLED"), - ) +def team_api_test_factory(): + class TestTeamAPI(APIBaseTest): + """Tests for /api/environments/.""" + + def _assert_activity_log(self, expected: list[dict], team_id: Optional[int] = None) -> None: + if not team_id: + team_id = self.team.pk + + starting_log_response = self.client.get(f"/api/environments/{team_id}/activity") + assert starting_log_response.status_code == 200, starting_log_response.json() + assert starting_log_response.json()["results"] == expected + + def _assert_organization_activity_log(self, expected: list[dict]) -> None: + starting_log_response = self.client.get(f"/api/organizations/{self.organization.pk}/activity") + assert starting_log_response.status_code == 200, starting_log_response.json() + assert starting_log_response.json()["results"] == expected + + def _assert_activity_log_is_empty(self) -> None: + self._assert_activity_log([]) + + def test_list_teams(self): + response = self.client.get("/api/environments/") + self.assertEqual(response.status_code, status.HTTP_200_OK) + + # Listing endpoint always uses the simplified serializer + response_data = response.json() + self.assertEqual(len(response_data["results"]), 1) + self.assertEqual(response_data["results"][0]["name"], self.team.name) + self.assertNotIn("test_account_filters", response_data["results"][0]) + self.assertNotIn("data_attributes", response_data["results"][0]) + + # TODO: These assertions will no longer make sense when we fully remove these attributes from the model + self.assertNotIn("event_names", response_data["results"][0]) + self.assertNotIn("event_properties", response_data["results"][0]) + self.assertNotIn("event_properties_numerical", response_data["results"][0]) + + def test_retrieve_team(self): + response = self.client.get("/api/environments/@current/") + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.json() + self.assertEqual(response_data["name"], self.team.name) + self.assertEqual(response_data["timezone"], "UTC") + self.assertEqual(response_data["is_demo"], False) + self.assertEqual(response_data["slack_incoming_webhook"], self.team.slack_incoming_webhook) + self.assertEqual(response_data["has_group_types"], False) + self.assertEqual( + response_data["person_on_events_querying_enabled"], + get_instance_setting("PERSON_ON_EVENTS_ENABLED") or get_instance_setting("PERSON_ON_EVENTS_V2_ENABLED"), + ) - # TODO: These assertions will no longer make sense when we fully remove these attributes from the model - self.assertNotIn("event_names", response_data) - self.assertNotIn("event_properties", response_data) - self.assertNotIn("event_properties_numerical", response_data) - self.assertNotIn("event_names_with_usage", response_data) - self.assertNotIn("event_properties_with_usage", response_data) - - def test_cant_retrieve_project_from_another_org(self): - org = Organization.objects.create(name="New Org") - team = Team.objects.create(organization=org, name="Default project") - - response = self.client.get(f"/api/projects/{team.pk}/") - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(response.json(), self.not_found_response()) - - @patch("posthog.api.team.get_geoip_properties") - def test_ip_location_is_used_for_new_project_week_day_start(self, get_geoip_properties_mock: MagicMock): - self.organization.available_product_features = [ - {"key": AvailableFeature.ORGANIZATIONS_PROJECTS, "name": AvailableFeature.ORGANIZATIONS_PROJECTS} - ] - self.organization.save() - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - - get_geoip_properties_mock.return_value = {} - response = self.client.post("/api/projects/", {"name": "Test World"}) - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json()) - self.assertDictContainsSubset({"name": "Test World", "week_start_day": None}, response.json()) - - get_geoip_properties_mock.return_value = {"$geoip_country_code": "US"} - response = self.client.post("/api/projects/", {"name": "Test US"}) - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json()) - self.assertDictContainsSubset({"name": "Test US", "week_start_day": 0}, response.json()) - - get_geoip_properties_mock.return_value = {"$geoip_country_code": "PL"} - response = self.client.post("/api/projects/", {"name": "Test PL"}) - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json()) - self.assertDictContainsSubset({"name": "Test PL", "week_start_day": 1}, response.json()) - - get_geoip_properties_mock.return_value = {"$geoip_country_code": "IR"} - response = self.client.post("/api/projects/", {"name": "Test IR"}) - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json()) - self.assertDictContainsSubset({"name": "Test IR", "week_start_day": 0}, response.json()) - - def test_cant_create_team_without_license_on_selfhosted(self): - with self.is_cloud(False): - response = self.client.post("/api/projects/", {"name": "Test"}) - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + # TODO: These assertions will no longer make sense when we fully remove these attributes from the model + self.assertNotIn("event_names", response_data) + self.assertNotIn("event_properties", response_data) + self.assertNotIn("event_properties_numerical", response_data) + self.assertNotIn("event_names_with_usage", response_data) + self.assertNotIn("event_properties_with_usage", response_data) + + def test_cant_retrieve_team_from_another_org(self): + org = Organization.objects.create(name="New Org") + team = Team.objects.create(organization=org, name="Default project") + + response = self.client.get(f"/api/environments/{team.pk}/") + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual(response.json(), self.not_found_response()) + + @patch("posthog.api.project.get_geoip_properties") + @patch("posthog.api.team.get_geoip_properties") + def test_ip_location_is_used_for_new_team_week_day_start( + self, get_geoip_properties_mock: MagicMock, get_geoip_properties_legacy_endpoint: MagicMock + ): + if self.client_class is EnvironmentToProjectRewriteClient: + get_geoip_properties_mock = get_geoip_properties_legacy_endpoint + + self.organization.available_product_features = [ + {"key": AvailableFeature.ORGANIZATIONS_PROJECTS, "name": AvailableFeature.ORGANIZATIONS_PROJECTS} + ] + self.organization.save() + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + + get_geoip_properties_mock.return_value = {} + response = self.client.post("/api/environments/", {"name": "Test World"}) + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json()) + self.assertDictContainsSubset({"name": "Test World", "week_start_day": None}, response.json()) + + get_geoip_properties_mock.return_value = {"$geoip_country_code": "US"} + response = self.client.post("/api/environments/", {"name": "Test US"}) + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json()) + self.assertDictContainsSubset({"name": "Test US", "week_start_day": 0}, response.json()) + + get_geoip_properties_mock.return_value = {"$geoip_country_code": "PL"} + response = self.client.post("/api/environments/", {"name": "Test PL"}) + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json()) + self.assertDictContainsSubset({"name": "Test PL", "week_start_day": 1}, response.json()) + + get_geoip_properties_mock.return_value = {"$geoip_country_code": "IR"} + response = self.client.post("/api/environments/", {"name": "Test IR"}) + self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json()) + self.assertDictContainsSubset({"name": "Test IR", "week_start_day": 0}, response.json()) + + def test_cant_create_team_without_license_on_selfhosted(self): + with self.is_cloud(False): + response = self.client.post("/api/environments/", {"name": "Test"}) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + self.assertEqual(Team.objects.count(), 1) + response = self.client.post("/api/environments/", {"name": "Test"}) + self.assertEqual(Team.objects.count(), 1) + + def test_cant_create_a_second_team_without_license(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() self.assertEqual(Team.objects.count(), 1) - response = self.client.post("/api/projects/", {"name": "Test"}) + + response = self.client.post("/api/environments/", {"name": "Hedgebox", "is_demo": False}) + self.assertEqual(response.status_code, 403) + response_data = response.json() + self.assertDictContainsSubset( + { + "type": "authentication_error", + "code": "permission_denied", + "detail": "You must upgrade your PostHog plan to be able to create and manage multiple projects or environments.", + }, + response_data, + ) self.assertEqual(Team.objects.count(), 1) - def test_cant_create_a_second_project_without_license(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - self.assertEqual(Team.objects.count(), 1) - - response = self.client.post("/api/projects/", {"name": "Hedgebox", "is_demo": False}) - self.assertEqual(response.status_code, 403) - response_data = response.json() - self.assertDictContainsSubset( - { - "type": "authentication_error", - "code": "permission_denied", - "detail": "You must upgrade your PostHog plan to be able to create and manage multiple projects.", - }, - response_data, - ) - self.assertEqual(Team.objects.count(), 1) - - # another request without the is_demo parameter - response = self.client.post("/api/projects/", {"name": "Hedgebox"}) - self.assertEqual(response.status_code, 403) - response_data = response.json() - self.assertDictContainsSubset( - { - "type": "authentication_error", - "code": "permission_denied", - "detail": "You must upgrade your PostHog plan to be able to create and manage multiple projects.", - }, - response_data, - ) - self.assertEqual(Team.objects.count(), 1) + # another request without the is_demo parameter + response = self.client.post("/api/environments/", {"name": "Hedgebox"}) + self.assertEqual(response.status_code, 403) + response_data = response.json() + self.assertDictContainsSubset( + { + "type": "authentication_error", + "code": "permission_denied", + "detail": "You must upgrade your PostHog plan to be able to create and manage multiple projects or environments.", + }, + response_data, + ) + self.assertEqual(Team.objects.count(), 1) - @freeze_time("2022-02-08") - def test_update_project_timezone(self): - self._assert_activity_log_is_empty() + @freeze_time("2022-02-08") + def test_update_team_timezone(self): + self._assert_activity_log_is_empty() + + response = self.client.patch("/api/environments/@current/", {"timezone": "Europe/Lisbon"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.json() + self.assertEqual(response_data["name"], self.team.name) + self.assertEqual(response_data["timezone"], "Europe/Lisbon") + + self.team.refresh_from_db() + self.assertEqual(self.team.timezone, "Europe/Lisbon") + + self._assert_activity_log( + [ + { + "activity": "updated", + "created_at": "2022-02-08T00:00:00Z", + "detail": { + "changes": [ + { + "action": "changed", + "after": "Europe/Lisbon", + "before": "UTC", + "field": "timezone", + "type": "Team", + }, + ], + "name": "Default project", + "short_id": None, + "trigger": None, + "type": None, + }, + "item_id": str(self.team.pk), + "scope": "Team", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + ] + ) - response = self.client.patch("/api/projects/@current/", {"timezone": "Europe/Lisbon"}) - self.assertEqual(response.status_code, status.HTTP_200_OK) + def test_update_test_filter_default_checked(self): + response = self.client.patch( + "/api/environments/@current/", {"test_account_filters_default_checked": "true"} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) - response_data = response.json() - self.assertEqual(response_data["name"], self.team.name) - self.assertEqual(response_data["timezone"], "Europe/Lisbon") + response_data = response.json() + self.assertEqual(response_data["test_account_filters_default_checked"], True) - self.team.refresh_from_db() - self.assertEqual(self.team.timezone, "Europe/Lisbon") + self.team.refresh_from_db() + self.assertEqual(self.team.test_account_filters_default_checked, True) - self._assert_activity_log( - [ + def test_cannot_set_invalid_timezone_for_team(self): + response = self.client.patch("/api/environments/@current/", {"timezone": "America/I_Dont_Exist"}) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.json(), + { + "type": "validation_error", + "code": "invalid_choice", + "detail": '"America/I_Dont_Exist" is not a valid choice.', + "attr": "timezone", + }, + ) + + self.team.refresh_from_db() + self.assertNotEqual(self.team.timezone, "America/I_Dont_Exist") + + def test_cant_update_team_from_another_org(self): + org = Organization.objects.create(name="New Org") + team = Team.objects.create(organization=org, name="Default project") + + response = self.client.patch(f"/api/environments/{team.pk}/", {"timezone": "Africa/Accra"}) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual(response.json(), self.not_found_response()) + + team.refresh_from_db() + self.assertEqual(team.timezone, "UTC") + + def test_filter_permission(self): + response = self.client.patch( + f"/api/environments/{self.team.id}/", + {"test_account_filters": [{"key": "$current_url", "value": "test"}]}, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response_data = response.json() + self.assertEqual(response_data["name"], self.team.name) + self.assertEqual( + response_data["test_account_filters"], + [{"key": "$current_url", "value": "test"}], + ) + + @freeze_time("2022-02-08") + def test_delete_team_activity_log(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + + team: Team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) + + response = self.client.delete(f"/api/environments/{team.id}") + assert response.status_code == 204 + + # activity log is queried in the context of the team + # and the team was deleted, so we can't (for now) view a deleted team activity via the API + # even though the activity log is recorded + + deleted_team_activity_response = self.client.get(f"/api/environments/{team.id}/activity") + assert deleted_team_activity_response.status_code == status.HTTP_404_NOT_FOUND + + # we can't query by API but can prove the log was recorded + activity = [a.__dict__ for a in ActivityLog.objects.filter(team_id=team.pk).all()] + expected_activity = [ { - "activity": "updated", - "created_at": "2022-02-08T00:00:00Z", + "_state": ANY, + "activity": "deleted", + "created_at": ANY, "detail": { - "changes": [ - { - "action": "changed", - "after": "Europe/Lisbon", - "before": "UTC", - "field": "timezone", - "type": "Team", - }, - ], + "changes": None, "name": "Default project", "short_id": None, "trigger": None, "type": None, }, - "item_id": str(self.team.pk), + "id": ANY, + "is_system": False, + "organization_id": ANY, + "team_id": team.pk, + "item_id": str(team.pk), "scope": "Team", - "user": { - "email": "user1@posthog.com", - "first_name": "", - }, + "user_id": self.user.pk, + "was_impersonated": False, }, ] - ) - - def test_update_test_filter_default_checked(self): - response = self.client.patch("/api/projects/@current/", {"test_account_filters_default_checked": "true"}) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - response_data = response.json() - self.assertEqual(response_data["test_account_filters_default_checked"], True) - - self.team.refresh_from_db() - self.assertEqual(self.team.test_account_filters_default_checked, True) - - def test_cannot_set_invalid_timezone_for_project(self): - response = self.client.patch("/api/projects/@current/", {"timezone": "America/I_Dont_Exist"}) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "type": "validation_error", - "code": "invalid_choice", - "detail": '"America/I_Dont_Exist" is not a valid choice.', - "attr": "timezone", - }, - ) - - self.team.refresh_from_db() - self.assertNotEqual(self.team.timezone, "America/I_Dont_Exist") + if self.client_class is EnvironmentToProjectRewriteClient: + expected_activity.insert( + 0, + { + "_state": ANY, + "activity": "deleted", + "created_at": ANY, + "detail": { + "changes": None, + "name": "Default project", + "short_id": None, + "trigger": None, + "type": None, + }, + "id": ANY, + "is_system": False, + "organization_id": ANY, + "team_id": team.pk, + "item_id": str(team.project_id), + "scope": "Project", + "user_id": self.user.pk, + "was_impersonated": False, + }, + ) + assert activity == expected_activity - def test_cant_update_project_from_another_org(self): - org = Organization.objects.create(name="New Org") - team = Team.objects.create(organization=org, name="Default project") + @patch("posthog.api.project.delete_bulky_postgres_data") + @patch("posthog.api.team.delete_bulky_postgres_data") + @patch("posthoganalytics.capture") + def test_delete_team_own_second( + self, + mock_capture: MagicMock, + mock_delete_bulky_postgres_data: MagicMock, + mock_delete_bulky_postgres_data_legacy_endpoint: MagicMock, + ): + if self.client_class is EnvironmentToProjectRewriteClient: + mock_delete_bulky_postgres_data = mock_delete_bulky_postgres_data_legacy_endpoint - response = self.client.patch(f"/api/projects/{team.pk}/", {"timezone": "Africa/Accra"}) - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(response.json(), self.not_found_response()) + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() - team.refresh_from_db() - self.assertEqual(team.timezone, "UTC") + team: Team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) - def test_filter_permission(self): - response = self.client.patch( - f"/api/projects/{self.team.id}/", - {"test_account_filters": [{"key": "$current_url", "value": "test"}]}, - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(Team.objects.filter(organization=self.organization).count(), 2) - response_data = response.json() - self.assertEqual(response_data["name"], self.team.name) - self.assertEqual( - response_data["test_account_filters"], - [{"key": "$current_url", "value": "test"}], - ) + response = self.client.delete(f"/api/environments/{team.id}") - @freeze_time("2022-02-08") - @patch("posthog.api.team.delete_bulky_postgres_data") - @patch("posthoganalytics.capture") - def test_delete_team_activity_log(self, mock_capture: MagicMock, mock_delete_bulky_postgres_data: MagicMock): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - - team: Team = Team.objects.create_with_data(organization=self.organization) - - response = self.client.delete(f"/api/projects/{team.id}") - assert response.status_code == 204 - - # activity log is queried in the context of the team - # and the team was deleted, so we can't (for now) view a deleted team activity via the API - # even though the activity log is recorded - - deleted_team_activity_response = self.client.get(f"/api/projects/{team.id}/activity") - assert deleted_team_activity_response.status_code == status.HTTP_404_NOT_FOUND - - # we can't query by API but can prove the log was recorded - activity = [a.__dict__ for a in ActivityLog.objects.filter(team_id=team.pk).all()] - assert activity == [ - { - "_state": ANY, - "activity": "deleted", - "created_at": ANY, - "detail": { - "changes": None, - "name": "Default project", - "short_id": None, - "trigger": None, - "type": None, - }, - "id": ANY, - "is_system": False, - "organization_id": ANY, - "team_id": team.pk, - "item_id": str(team.pk), - "scope": "Team", - "user_id": self.user.pk, - "was_impersonated": False, - }, - ] - - @patch("posthog.api.team.delete_bulky_postgres_data") - @patch("posthoganalytics.capture") - def test_delete_team_own_second(self, mock_capture: MagicMock, mock_delete_bulky_postgres_data: MagicMock): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - - team: Team = Team.objects.create_with_data(organization=self.organization) - - self.assertEqual(Team.objects.filter(organization=self.organization).count(), 2) - - response = self.client.delete(f"/api/projects/{team.id}") - - self.assertEqual(response.status_code, 204) - self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1) - self.assertEqual( - AsyncDeletion.objects.filter(team_id=team.id, deletion_type=DeletionType.Team, key=str(team.id)).count(), - 1, - ) - mock_capture.assert_has_calls( - calls=[ + self.assertEqual(response.status_code, 204) + self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1) + self.assertEqual( + AsyncDeletion.objects.filter( + team_id=team.id, deletion_type=DeletionType.Team, key=str(team.id) + ).count(), + 1, + ) + expected_capture_calls = [ call( self.user.distinct_id, "membership level changed", @@ -325,722 +368,768 @@ def test_delete_team_own_second(self, mock_capture: MagicMock, mock_delete_bulky ), call(self.user.distinct_id, "team deleted", properties={}, groups=mock.ANY), ] - ) - mock_delete_bulky_postgres_data.assert_called_once_with(team_ids=[team.pk]) + if self.client_class is EnvironmentToProjectRewriteClient: + expected_capture_calls.append( + call( + self.user.distinct_id, + "project deleted", + properties={"project_name": "Default project"}, + groups=mock.ANY, + ) + ) + assert mock_capture.call_args_list == expected_capture_calls + mock_delete_bulky_postgres_data.assert_called_once_with(team_ids=[team.pk]) + + def test_delete_bulky_postgres_data(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + + team: Team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) + + self.assertEqual(Team.objects.filter(organization=self.organization).count(), 2) + + from posthog.models.cohort import Cohort, CohortPeople + from posthog.models.feature_flag.feature_flag import ( + FeatureFlag, + FeatureFlagHashKeyOverride, + ) - def test_delete_bulky_postgres_data(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() + # from posthog.models.insight_caching_state import InsightCachingState + from posthog.models.person import Person - team: Team = Team.objects.create_with_data(organization=self.organization) + cohort = Cohort.objects.create(team=team, created_by=self.user, name="test") + person = Person.objects.create( + team=team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com", "team": "posthog"}, + ) + person.add_distinct_id("test") + flag = FeatureFlag.objects.create( + team=team, + name="test", + key="test", + rollout_percentage=50, + created_by=self.user, + ) + FeatureFlagHashKeyOverride.objects.create( + team_id=team.pk, + person_id=person.id, + feature_flag_key=flag.key, + hash_key="test", + ) + CohortPeople.objects.create(cohort_id=cohort.pk, person_id=person.pk) + EarlyAccessFeature.objects.create( + team=team, + name="Test flag", + description="A fancy new flag.", + stage="beta", + feature_flag=flag, + ) - self.assertEqual(Team.objects.filter(organization=self.organization).count(), 2) + # if something is missing then teardown fails + response = self.client.delete(f"/api/environments/{team.id}") + self.assertEqual(response.status_code, 204) - from posthog.models.cohort import Cohort, CohortPeople - from posthog.models.feature_flag.feature_flag import ( - FeatureFlag, - FeatureFlagHashKeyOverride, - ) + def test_delete_batch_exports(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() - # from posthog.models.insight_caching_state import InsightCachingState - from posthog.models.person import Person + team: Team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) - cohort = Cohort.objects.create(team=team, created_by=self.user, name="test") - person = Person.objects.create( - team=team, - distinct_ids=["example_id"], - properties={"email": "tim@posthog.com", "team": "posthog"}, - ) - person.add_distinct_id("test") - flag = FeatureFlag.objects.create( - team=team, - name="test", - key="test", - rollout_percentage=50, - created_by=self.user, - ) - FeatureFlagHashKeyOverride.objects.create( - team_id=team.pk, - person_id=person.id, - feature_flag_key=flag.key, - hash_key="test", - ) - CohortPeople.objects.create(cohort_id=cohort.pk, person_id=person.pk) - EarlyAccessFeature.objects.create( - team=team, - name="Test flag", - description="A fancy new flag.", - stage="beta", - feature_flag=flag, - ) + destination_data = { + "type": "S3", + "config": { + "bucket_name": "my-production-s3-bucket", + "region": "us-east-1", + "prefix": "posthog-events/", + "aws_access_key_id": "abc123", + "aws_secret_access_key": "secret", + }, + } + + batch_export_data = { + "name": "my-production-s3-bucket-destination", + "destination": destination_data, + "interval": "hour", + } + + temporal = sync_connect() + + with start_test_worker(temporal): + response = self.client.post( + f"/api/environments/{team.id}/batch_exports", + json.dumps(batch_export_data), + content_type="application/json", + ) + self.assertEqual(response.status_code, 201) + + batch_export = response.json() + batch_export_id = batch_export["id"] + + response = self.client.delete(f"/api/environments/{team.id}") + self.assertEqual(response.status_code, 204) + + response = self.client.get(f"/api/environments/{team.id}/batch_exports/{batch_export_id}") + self.assertEqual(response.status_code, 404) + + with self.assertRaises(RPCError): + describe_schedule(temporal, batch_export_id) + + @freeze_time("2022-02-08") + def test_reset_token(self): + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + + self._assert_activity_log_is_empty() + + self.team.api_token = "xyz" + self.team.save() + + response = self.client.patch(f"/api/environments/{self.team.id}/reset_token/") + response_data = response.json() + + self.team.refresh_from_db() + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertNotEqual(response_data["api_token"], "xyz") + self.assertEqual(response_data["api_token"], self.team.api_token) + self.assertTrue(response_data["api_token"].startswith("phc_")) + + self._assert_activity_log( + [ + { + "activity": "updated", + "created_at": "2022-02-08T00:00:00Z", + "detail": { + "changes": [ + { + "action": "changed", + "after": None, + "before": None, + "field": "api_token", + "type": "Team", + }, + ], + "name": "Default project", + "short_id": None, + "trigger": None, + "type": None, + }, + "item_id": str(self.team.pk), + "scope": "Team", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + ] + ) + + def test_reset_token_insufficient_priviledges(self): + self.team.api_token = "xyz" + self.team.save() + + response = self.client.patch(f"/api/environments/{self.team.id}/reset_token/") + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_update_primary_dashboard(self): + d = Dashboard.objects.create(name="Test", team=self.team) + + # Can set it + response = self.client.patch("/api/environments/@current/", {"primary_dashboard": d.id}) + response_data = response.json() + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response_data["name"], self.team.name) + self.assertEqual(response_data["primary_dashboard"], d.id) + + def test_cant_set_primary_dashboard_to_another_teams_dashboard(self): + self.team.primary_dashboard_id = None # Remove the default primary dashboard from the picture + self.team.save() + + team_2 = Team.objects.create(organization=self.organization, name="Default project") + d = Dashboard.objects.create(name="Test", team=team_2) - # if something is missing then teardown fails - response = self.client.delete(f"/api/projects/{team.id}") - self.assertEqual(response.status_code, 204) - - def test_delete_batch_exports(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - - team: Team = Team.objects.create_with_data(organization=self.organization) - - destination_data = { - "type": "S3", - "config": { - "bucket_name": "my-production-s3-bucket", - "region": "us-east-1", - "prefix": "posthog-events/", - "aws_access_key_id": "abc123", - "aws_secret_access_key": "secret", - }, - } - - batch_export_data = { - "name": "my-production-s3-bucket-destination", - "destination": destination_data, - "interval": "hour", - } - - temporal = sync_connect() - - with start_test_worker(temporal): - response = self.client.post( - f"/api/projects/{team.id}/batch_exports", - json.dumps(batch_export_data), - content_type="application/json", + response = self.client.patch("/api/environments/@current/", {"primary_dashboard": d.id}) + self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + response = self.client.get("/api/environments/@current/") + response_data = response.json() + self.assertEqual(response_data["primary_dashboard"], None) + + def test_is_generating_demo_data(self): + cache_key = f"is_generating_demo_data_{self.team.pk}" + cache.set(cache_key, "True") + response = self.client.get(f"/api/environments/{self.team.id}/is_generating_demo_data/") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.json(), {"is_generating_demo_data": True}) + cache.delete(cache_key) + response = self.client.get(f"/api/environments/{self.team.id}/is_generating_demo_data/") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.json(), {"is_generating_demo_data": False}) + + @patch("posthog.tasks.demo_create_data.create_data_for_demo_team.delay") + def test_org_member_can_create_demo_project(self, mock_create_data_for_demo_team: MagicMock): + self.organization_membership.level = OrganizationMembership.Level.MEMBER + self.organization_membership.save() + response = self.client.post("/api/environments/", {"name": "Hedgebox", "is_demo": True}) + mock_create_data_for_demo_team.assert_called_once() + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + @freeze_time("2022-02-08") + def test_team_float_config_can_be_serialized_to_activity_log(self): + # regression test since this isn't true by default + response = self.client.patch(f"/api/environments/@current/", {"session_recording_sample_rate": 0.4}) + assert response.status_code == status.HTTP_200_OK + self._assert_activity_log( + [ + { + "activity": "updated", + "created_at": "2022-02-08T00:00:00Z", + "detail": { + "changes": [ + { + "action": "created", + "after": "0.4", + "before": None, + "field": "session_recording_sample_rate", + "type": "Team", + }, + ], + "name": "Default project", + "short_id": None, + "trigger": None, + "type": None, + }, + "item_id": str(self.team.pk), + "scope": "Team", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + ] ) - self.assertEqual(response.status_code, 201) - batch_export = response.json() - batch_export_id = batch_export["id"] + @freeze_time("2022-02-08") + def test_team_creation_is_in_activity_log(self): + Team.objects.all().delete() + + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + + team_name = str(uuid.uuid4()) + response = self.client.post("/api/environments/", {"name": team_name, "is_demo": False}) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + team_id = response.json()["id"] + self._assert_activity_log( + [ + { + "activity": "created", + "created_at": "2022-02-08T00:00:00Z", + "detail": { + "changes": None, + "name": team_name, + "short_id": None, + "trigger": None, + "type": None, + }, + "item_id": str(team_id), + "scope": "Team", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + ], + team_id=team_id, + ) - response = self.client.delete(f"/api/projects/{team.id}") - self.assertEqual(response.status_code, 204) + def test_team_is_cached_on_create_and_update(self): + Team.objects.all().delete() + self.organization_membership.level = OrganizationMembership.Level.ADMIN + self.organization_membership.save() + + response = self.client.post("/api/environments/", {"name": "Test", "is_demo": False}) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.json()["name"], "Test") + + token = response.json()["api_token"] + team_id = response.json()["id"] + + cached_team = get_team_in_cache(token) + + assert cached_team is not None + self.assertEqual(cached_team.name, "Test") + self.assertEqual(cached_team.uuid, response.json()["uuid"]) + self.assertEqual(cached_team.id, response.json()["id"]) + + response = self.client.patch( + f"/api/environments/{team_id}/", + {"timezone": "Europe/Istanbul", "session_recording_opt_in": True}, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + cached_team = get_team_in_cache(token) + assert cached_team is not None - response = self.client.get(f"/api/projects/{team.id}/batch_exports/{batch_export_id}") - self.assertEqual(response.status_code, 404) + self.assertEqual(cached_team.name, "Test") + self.assertEqual(cached_team.uuid, response.json()["uuid"]) + self.assertEqual(cached_team.session_recording_opt_in, True) - with self.assertRaises(RPCError): - describe_schedule(temporal, batch_export_id) + # only things in CachedTeamSerializer are cached! + self.assertEqual(cached_team.timezone, "UTC") - @freeze_time("2022-02-08") - def test_reset_token(self): - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() + # reset token should update cache as well + response = self.client.patch(f"/api/environments/{team_id}/reset_token/") + response_data = response.json() - self._assert_activity_log_is_empty() + cached_team = get_team_in_cache(token) + assert cached_team is None - self.team.api_token = "xyz" - self.team.save() + cached_team = get_team_in_cache(response_data["api_token"]) + assert cached_team is not None + self.assertEqual(cached_team.name, "Test") + self.assertEqual(cached_team.uuid, response.json()["uuid"]) + self.assertEqual(cached_team.session_recording_opt_in, True) - response = self.client.patch(f"/api/projects/{self.team.id}/reset_token/") - response_data = response.json() + def test_turn_on_exception_autocapture(self): + response = self.client.get("/api/environments/@current/") + assert response.json()["autocapture_exceptions_opt_in"] is None + + response = self.client.patch( + "/api/environments/@current/", + {"autocapture_exceptions_opt_in": "Welwyn Garden City"}, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json()["detail"] == "Must be a valid boolean." + + response = self.client.patch("/api/environments/@current/", {"autocapture_exceptions_opt_in": True}) + assert response.status_code == status.HTTP_200_OK + response = self.client.get("/api/environments/@current/") + assert response.json()["autocapture_exceptions_opt_in"] is True + + def test_configure_exception_autocapture_event_dropping(self): + response = self.client.get("/api/environments/@current/") + assert response.json()["autocapture_exceptions_errors_to_ignore"] is None + + response = self.client.patch( + "/api/environments/@current/", + {"autocapture_exceptions_errors_to_ignore": {"wat": "am i"}}, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert ( + response.json()["detail"] == "Must provide a list for field: autocapture_exceptions_errors_to_ignore." + ) + + response = self.client.patch( + "/api/environments/@current/", + {"autocapture_exceptions_errors_to_ignore": [1, False]}, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert ( + response.json()["detail"] + == "Must provide a list of strings to field: autocapture_exceptions_errors_to_ignore." + ) + + response = self.client.patch( + "/api/environments/@current/", + {"autocapture_exceptions_errors_to_ignore": ["wat am i"]}, + ) + assert response.status_code == status.HTTP_200_OK + response = self.client.get("/api/environments/@current/") + assert response.json()["autocapture_exceptions_errors_to_ignore"] == ["wat am i"] + + def test_configure_exception_autocapture_event_dropping_only_allows_simple_config(self): + response = self.client.patch( + "/api/environments/@current/", + {"autocapture_exceptions_errors_to_ignore": ["abc" * 300]}, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert ( + response.json()["detail"] + == "Field autocapture_exceptions_errors_to_ignore must be less than 300 characters. Complex config should be provided in posthog-js initialization." + ) - self.team.refresh_from_db() - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertNotEqual(response_data["api_token"], "xyz") - self.assertEqual(response_data["api_token"], self.team.api_token) - self.assertTrue(response_data["api_token"].startswith("phc_")) + @parameterized.expand( + [ + [ + "non numeric string", + "Welwyn Garden City", + "invalid_input", + "A valid number is required.", + ], + [ + "negative number", + "-1", + "min_value", + "Ensure this value is greater than or equal to 0.", + ], + [ + "greater than one", + "1.5", + "max_value", + "Ensure this value is less than or equal to 1.", + ], + [ + "too many digits", + "0.534", + "max_decimal_places", + "Ensure that there are no more than 2 decimal places.", + ], + ] + ) + def test_invalid_session_recording_sample_rates( + self, _name: str, provided_value: str, expected_code: str, expected_error: str + ) -> None: + response = self.client.patch( + "/api/environments/@current/", {"session_recording_sample_rate": provided_value} + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == { + "attr": "session_recording_sample_rate", + "code": expected_code, + "detail": expected_error, + "type": "validation_error", + } - self._assert_activity_log( + @parameterized.expand( [ - { - "activity": "updated", - "created_at": "2022-02-08T00:00:00Z", - "detail": { - "changes": [ - { - "action": "changed", - "after": None, - "before": None, - "field": "api_token", - "type": "Team", - }, - ], - "name": "Default project", - "short_id": None, - "trigger": None, - "type": None, - }, - "item_id": str(self.team.pk), - "scope": "Team", - "user": { - "email": "user1@posthog.com", - "first_name": "", - }, - }, + [ + "non numeric string", + "Trentham monkey forest", + "invalid_input", + "A valid integer is required.", + ], + [ + "negative number", + "-1", + "min_value", + "Ensure this value is greater than or equal to 0.", + ], + [ + "greater than 15000", + "15001", + "max_value", + "Ensure this value is less than or equal to 15000.", + ], + ["too many digits", "0.5", "invalid_input", "A valid integer is required."], ] ) + def test_invalid_session_recording_minimum_duration( + self, _name: str, provided_value: str, expected_code: str, expected_error: str + ) -> None: + response = self.client.patch( + "/api/environments/@current/", + {"session_recording_minimum_duration_milliseconds": provided_value}, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == { + "attr": "session_recording_minimum_duration_milliseconds", + "code": expected_code, + "detail": expected_error, + "type": "validation_error", + } - def test_reset_token_insufficient_priviledges(self): - self.team.api_token = "xyz" - self.team.save() - - response = self.client.patch(f"/api/projects/{self.team.id}/reset_token/") - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - def test_update_primary_dashboard(self): - d = Dashboard.objects.create(name="Test", team=self.team) - - # Can set it - response = self.client.patch("/api/projects/@current/", {"primary_dashboard": d.id}) - response_data = response.json() - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response_data["name"], self.team.name) - self.assertEqual(response_data["primary_dashboard"], d.id) - - def test_cant_set_primary_dashboard_to_another_teams_dashboard(self): - team_2 = Team.objects.create(organization=self.organization, name="Default project") - d = Dashboard.objects.create(name="Test", team=team_2) - - response = self.client.patch("/api/projects/@current/", {"primary_dashboard": d.id}) - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - response = self.client.get("/api/projects/@current/") - response_data = response.json() - self.assertEqual(response_data["primary_dashboard"], None) - - def test_is_generating_demo_data(self): - cache_key = f"is_generating_demo_data_{self.team.pk}" - cache.set(cache_key, "True") - response = self.client.get(f"/api/projects/{self.team.id}/is_generating_demo_data/") - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json(), {"is_generating_demo_data": True}) - cache.delete(cache_key) - response = self.client.get(f"/api/projects/{self.team.id}/is_generating_demo_data/") - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json(), {"is_generating_demo_data": False}) - - @patch("posthog.api.team.create_data_for_demo_team.delay") - def test_org_member_can_create_demo_project(self, mock_create_data_for_demo_team: MagicMock): - self.organization_membership.level = OrganizationMembership.Level.MEMBER - self.organization_membership.save() - response = self.client.post("/api/projects/", {"name": "Hedgebox", "is_demo": True}) - mock_create_data_for_demo_team.assert_called_once() - self.assertEqual(response.status_code, status.HTTP_201_CREATED) - - @freeze_time("2022-02-08") - def test_team_float_config_can_be_serialized_to_activity_log(self): - # regression test since this isn't true by default - response = self.client.patch(f"/api/projects/@current/", {"session_recording_sample_rate": 0.4}) - assert response.status_code == status.HTTP_200_OK - self._assert_activity_log( + @parameterized.expand( [ - { - "activity": "updated", - "created_at": "2022-02-08T00:00:00Z", - "detail": { - "changes": [ - { - "action": "created", - "after": "0.4", - "before": None, - "field": "session_recording_sample_rate", - "type": "Team", - }, - ], - "name": "Default project", - "short_id": None, - "trigger": None, - "type": None, - }, - "item_id": str(self.team.pk), - "scope": "Team", - "user": { - "email": "user1@posthog.com", - "first_name": "", - }, - }, + [ + "string", + "Marple bridge", + "invalid_input", + "Must provide a dictionary or None.", + ], + ["numeric string", "-1", "invalid_input", "Must provide a dictionary or None."], + ["numeric", 1, "invalid_input", "Must provide a dictionary or None."], + ["numeric positive string", "1", "invalid_input", "Must provide a dictionary or None."], + [ + "unexpected json - no id", + {"key": "something"}, + "invalid_input", + "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", + ], + [ + "unexpected json - no key", + {"id": 1}, + "invalid_input", + "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", + ], + [ + "unexpected json - only variant", + {"variant": "1"}, + "invalid_input", + "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", + ], + [ + "unexpected json - variant must be string", + {"variant": 1}, + "invalid_input", + "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", + ], + [ + "unexpected json - missing id", + {"key": "one", "variant": "1"}, + "invalid_input", + "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", + ], + [ + "unexpected json - missing key", + {"id": "one", "variant": "1"}, + "invalid_input", + "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", + ], + [ + "unexpected json - neither", + {"wat": "wat"}, + "invalid_input", + "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", + ], ] ) + def test_invalid_session_recording_linked_flag( + self, _name: str, provided_value: Any, expected_code: str, expected_error: str + ) -> None: + response = self._patch_linked_flag_config(provided_value, expected_status=status.HTTP_400_BAD_REQUEST) + + assert response.json() == { + "attr": "session_recording_linked_flag", + "code": expected_code, + "detail": expected_error, + "type": "validation_error", + } + + def test_can_set_and_unset_session_recording_linked_flag(self) -> None: + self._patch_linked_flag_config({"id": 1, "key": "provided_value"}) + self._assert_linked_flag_config({"id": 1, "key": "provided_value"}) - @freeze_time("2022-02-08") - def test_team_creation_is_in_activity_log(self): - Team.objects.all().delete() + self._patch_linked_flag_config(None) + self._assert_linked_flag_config(None) - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() + def test_can_set_and_unset_session_recording_linked_flag_variant(self) -> None: + self._patch_linked_flag_config({"id": 1, "key": "provided_value", "variant": "test"}) + self._assert_linked_flag_config({"id": 1, "key": "provided_value", "variant": "test"}) - team_name = str(uuid.uuid4()) - response = self.client.post("/api/projects/", {"name": team_name, "is_demo": False}) - self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self._patch_linked_flag_config(None) + self._assert_linked_flag_config(None) - team_id = response.json()["id"] - self._assert_activity_log( + @parameterized.expand( [ - { - "activity": "created", - "created_at": "2022-02-08T00:00:00Z", - "detail": { - "changes": None, - "name": team_name, - "short_id": None, - "trigger": None, - "type": None, - }, - "item_id": str(team_id), - "scope": "Team", - "user": { - "email": "user1@posthog.com", - "first_name": "", - }, - }, - ], - team_id=team_id, + [ + "string", + "Marple bridge", + "invalid_input", + "Must provide a dictionary or None.", + ], + ["numeric", "-1", "invalid_input", "Must provide a dictionary or None."], + [ + "unexpected json - no recordX", + {"key": "something"}, + "invalid_input", + "Must provide a dictionary with only 'recordHeaders' and/or 'recordBody' keys.", + ], + ] ) + def test_invalid_session_recording_network_payload_capture_config( + self, _name: str, provided_value: str, expected_code: str, expected_error: str + ) -> None: + response = self.client.patch( + "/api/environments/@current/", {"session_recording_network_payload_capture_config": provided_value} + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == { + "attr": "session_recording_network_payload_capture_config", + "code": expected_code, + "detail": expected_error, + "type": "validation_error", + } - def test_team_is_cached_on_create_and_update(self): - Team.objects.all().delete() - self.organization_membership.level = OrganizationMembership.Level.ADMIN - self.organization_membership.save() - - response = self.client.post("/api/projects/", {"name": "Test", "is_demo": False}) - self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(response.json()["name"], "Test") + def test_can_set_and_unset_session_recording_network_payload_capture_config(self) -> None: + # can set just one + first_patch_response = self.client.patch( + "/api/environments/@current/", + {"session_recording_network_payload_capture_config": {"recordHeaders": True}}, + ) + assert first_patch_response.status_code == status.HTTP_200_OK + get_response = self.client.get("/api/environments/@current/") + assert get_response.json()["session_recording_network_payload_capture_config"] == {"recordHeaders": True} + + # can set the other + first_patch_response = self.client.patch( + "/api/environments/@current/", + {"session_recording_network_payload_capture_config": {"recordBody": False}}, + ) + assert first_patch_response.status_code == status.HTTP_200_OK + get_response = self.client.get("/api/environments/@current/") + assert get_response.json()["session_recording_network_payload_capture_config"] == {"recordBody": False} - token = response.json()["api_token"] - team_id = response.json()["id"] + # can unset both + response = self.client.patch( + "/api/environments/@current/", {"session_recording_network_payload_capture_config": None} + ) + assert response.status_code == status.HTTP_200_OK + second_get_response = self.client.get("/api/environments/@current/") + assert second_get_response.json()["session_recording_network_payload_capture_config"] is None - cached_team = get_team_in_cache(token) + def test_can_set_and_unset_session_replay_config(self) -> None: + # can set + self._patch_session_replay_config({"record_canvas": True}) + self._assert_replay_config_is({"record_canvas": True}) - assert cached_team is not None - self.assertEqual(cached_team.name, "Test") - self.assertEqual(cached_team.uuid, response.json()["uuid"]) - self.assertEqual(cached_team.id, response.json()["id"]) + # can unset + self._patch_session_replay_config(None) + self._assert_replay_config_is(None) - response = self.client.patch( - f"/api/projects/{team_id}/", - {"timezone": "Europe/Istanbul", "session_recording_opt_in": True}, + @parameterized.expand( + [ + [ + "string", + "Marple bridge", + "invalid_input", + "Must provide a dictionary or None.", + ], + ["numeric", "-1", "invalid_input", "Must provide a dictionary or None."], + [ + "unexpected json - no record", + {"key": "something"}, + "invalid_input", + "Must provide a dictionary with only allowed keys: included_event_properties, opt_in, preferred_events, excluded_events, important_user_properties.", + ], + ] ) - self.assertEqual(response.status_code, status.HTTP_200_OK) + def test_invalid_session_replay_config_ai_config( + self, _name: str, provided_value: str, expected_code: str, expected_error: str + ) -> None: + response = self._patch_session_replay_config( + {"ai_config": provided_value}, expected_status=status.HTTP_400_BAD_REQUEST + ) + assert response.json() == { + "attr": "session_replay_config", + "code": expected_code, + "detail": expected_error, + "type": "validation_error", + } - cached_team = get_team_in_cache(token) - assert cached_team is not None + def test_can_set_and_unset_session_replay_config_ai_config(self) -> None: + # can set just the opt-in + self._patch_session_replay_config({"ai_config": {"opt_in": True}}) + self._assert_replay_config_is({"ai_config": {"opt_in": True}}) - self.assertEqual(cached_team.name, "Test") - self.assertEqual(cached_team.uuid, response.json()["uuid"]) - self.assertEqual(cached_team.session_recording_opt_in, True) + # can set some preferences + self._patch_session_replay_config( + {"ai_config": {"opt_in": False, "included_event_properties": ["something"]}} + ) + self._assert_replay_config_is({"ai_config": {"opt_in": False, "included_event_properties": ["something"]}}) - # only things in CachedTeamSerializer are cached! - self.assertEqual(cached_team.timezone, "UTC") + self._patch_session_replay_config({"ai_config": None}) + self._assert_replay_config_is({"ai_config": None}) - # reset token should update cache as well - response = self.client.patch(f"/api/projects/{team_id}/reset_token/") - response_data = response.json() + def test_can_set_replay_configs_without_providing_them_all(self) -> None: + # can set just the opt-in + self._patch_session_replay_config({"ai_config": {"opt_in": True}}) + self._assert_replay_config_is({"ai_config": {"opt_in": True}}) - cached_team = get_team_in_cache(token) - assert cached_team is None + self._patch_session_replay_config({"record_canvas": True}) + self._assert_replay_config_is({"record_canvas": True, "ai_config": {"opt_in": True}}) - cached_team = get_team_in_cache(response_data["api_token"]) - assert cached_team is not None - self.assertEqual(cached_team.name, "Test") - self.assertEqual(cached_team.uuid, response.json()["uuid"]) - self.assertEqual(cached_team.session_recording_opt_in, True) + def test_can_set_replay_configs_without_providing_them_all_even_when_either_side_is_none(self) -> None: + # because we do some dictionary copying we need a regression test to ensure we can always set and unset keys + self._patch_session_replay_config({"record_canvas": True, "ai_config": {"opt_in": True}}) + self._assert_replay_config_is({"record_canvas": True, "ai_config": {"opt_in": True}}) - def test_turn_on_exception_autocapture(self): - response = self.client.get("/api/projects/@current/") - assert response.json()["autocapture_exceptions_opt_in"] is None + self._patch_session_replay_config({"record_canvas": None}) + self._assert_replay_config_is({"record_canvas": None, "ai_config": {"opt_in": True}}) - response = self.client.patch( - "/api/projects/@current/", - {"autocapture_exceptions_opt_in": "Welwyn Garden City"}, - ) - assert response.status_code == status.HTTP_400_BAD_REQUEST - assert response.json()["detail"] == "Must be a valid boolean." + # top-level from having a value to None + self._patch_session_replay_config(None) + self._assert_replay_config_is(None) - response = self.client.patch("/api/projects/@current/", {"autocapture_exceptions_opt_in": True}) - assert response.status_code == status.HTTP_200_OK - response = self.client.get("/api/projects/@current/") - assert response.json()["autocapture_exceptions_opt_in"] is True + # top-level from None to having a value + self._patch_session_replay_config({"ai_config": None}) + self._assert_replay_config_is({"ai_config": None}) - def test_configure_exception_autocapture_event_dropping(self): - response = self.client.get("/api/projects/@current/") - assert response.json()["autocapture_exceptions_errors_to_ignore"] is None + # next-level from None to having a value + self._patch_session_replay_config({"ai_config": {"opt_in": True}}) + self._assert_replay_config_is({"ai_config": {"opt_in": True}}) - response = self.client.patch( - "/api/projects/@current/", - {"autocapture_exceptions_errors_to_ignore": {"wat": "am i"}}, - ) - assert response.status_code == status.HTTP_400_BAD_REQUEST - assert response.json()["detail"] == "Must provide a list for field: autocapture_exceptions_errors_to_ignore." + # next-level from having a value to None + self._patch_session_replay_config({"ai_config": None}) + self._assert_replay_config_is({"ai_config": None}) - response = self.client.patch( - "/api/projects/@current/", - {"autocapture_exceptions_errors_to_ignore": [1, False]}, - ) - assert response.status_code == status.HTTP_400_BAD_REQUEST - assert ( - response.json()["detail"] - == "Must provide a list of strings to field: autocapture_exceptions_errors_to_ignore." - ) + def test_can_set_replay_configs_patch_session_replay_config_one_level_deep(self) -> None: + # can set just the opt-in + self._patch_session_replay_config({"ai_config": {"opt_in": True}}) + self._assert_replay_config_is({"ai_config": {"opt_in": True}}) - response = self.client.patch( - "/api/projects/@current/", - {"autocapture_exceptions_errors_to_ignore": ["wat am i"]}, - ) - assert response.status_code == status.HTTP_200_OK - response = self.client.get("/api/projects/@current/") - assert response.json()["autocapture_exceptions_errors_to_ignore"] == ["wat am i"] - - def test_configure_exception_autocapture_event_dropping_only_allows_simple_config(self): - response = self.client.patch( - "/api/projects/@current/", - {"autocapture_exceptions_errors_to_ignore": ["abc" * 300]}, - ) - assert response.status_code == status.HTTP_400_BAD_REQUEST - assert ( - response.json()["detail"] - == "Field autocapture_exceptions_errors_to_ignore must be less than 300 characters. Complex config should be provided in posthog-js initialization." - ) + self._patch_session_replay_config({"ai_config": {"included_event_properties": ["something"]}}) + # even though opt_in was not provided in the patch it should be preserved + self._assert_replay_config_is({"ai_config": {"opt_in": True, "included_event_properties": ["something"]}}) - @parameterized.expand( - [ - [ - "non numeric string", - "Welwyn Garden City", - "invalid_input", - "A valid number is required.", - ], - [ - "negative number", - "-1", - "min_value", - "Ensure this value is greater than or equal to 0.", - ], - [ - "greater than one", - "1.5", - "max_value", - "Ensure this value is less than or equal to 1.", - ], - [ - "too many digits", - "0.534", - "max_decimal_places", - "Ensure that there are no more than 2 decimal places.", - ], - ] - ) - def test_invalid_session_recording_sample_rates( - self, _name: str, provided_value: str, expected_code: str, expected_error: str - ) -> None: - response = self.client.patch("/api/projects/@current/", {"session_recording_sample_rate": provided_value}) - assert response.status_code == status.HTTP_400_BAD_REQUEST - assert response.json() == { - "attr": "session_recording_sample_rate", - "code": expected_code, - "detail": expected_error, - "type": "validation_error", - } - - @parameterized.expand( - [ - [ - "non numeric string", - "Trentham monkey forest", - "invalid_input", - "A valid integer is required.", - ], - [ - "negative number", - "-1", - "min_value", - "Ensure this value is greater than or equal to 0.", - ], - [ - "greater than 15000", - "15001", - "max_value", - "Ensure this value is less than or equal to 15000.", - ], - ["too many digits", "0.5", "invalid_input", "A valid integer is required."], - ] - ) - def test_invalid_session_recording_minimum_duration( - self, _name: str, provided_value: str, expected_code: str, expected_error: str - ) -> None: - response = self.client.patch( - "/api/projects/@current/", - {"session_recording_minimum_duration_milliseconds": provided_value}, - ) - assert response.status_code == status.HTTP_400_BAD_REQUEST - assert response.json() == { - "attr": "session_recording_minimum_duration_milliseconds", - "code": expected_code, - "detail": expected_error, - "type": "validation_error", - } - - @parameterized.expand( - [ - [ - "string", - "Marple bridge", - "invalid_input", - "Must provide a dictionary or None.", - ], - ["numeric string", "-1", "invalid_input", "Must provide a dictionary or None."], - ["numeric", 1, "invalid_input", "Must provide a dictionary or None."], - ["numeric positive string", "1", "invalid_input", "Must provide a dictionary or None."], - [ - "unexpected json - no id", - {"key": "something"}, - "invalid_input", - "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", - ], - [ - "unexpected json - no key", - {"id": 1}, - "invalid_input", - "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", - ], - [ - "unexpected json - only variant", - {"variant": "1"}, - "invalid_input", - "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", - ], - [ - "unexpected json - variant must be string", - {"variant": 1}, - "invalid_input", - "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", - ], - [ - "unexpected json - missing id", - {"key": "one", "variant": "1"}, - "invalid_input", - "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", - ], - [ - "unexpected json - missing key", - {"id": "one", "variant": "1"}, - "invalid_input", - "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", - ], - [ - "unexpected json - neither", - {"wat": "wat"}, - "invalid_input", - "Must provide a dictionary with only 'id' and 'key' keys. _or_ only 'id', 'key', and 'variant' keys.", - ], - ] - ) - def test_invalid_session_recording_linked_flag( - self, _name: str, provided_value: Any, expected_code: str, expected_error: str - ) -> None: - response = self._patch_linked_flag_config(provided_value, expected_status=status.HTTP_400_BAD_REQUEST) - - assert response.json() == { - "attr": "session_recording_linked_flag", - "code": expected_code, - "detail": expected_error, - "type": "validation_error", - } - - def test_can_set_and_unset_session_recording_linked_flag(self) -> None: - self._patch_linked_flag_config({"id": 1, "key": "provided_value"}) - self._assert_linked_flag_config({"id": 1, "key": "provided_value"}) - - self._patch_linked_flag_config(None) - self._assert_linked_flag_config(None) - - def test_can_set_and_unset_session_recording_linked_flag_variant(self) -> None: - self._patch_linked_flag_config({"id": 1, "key": "provided_value", "variant": "test"}) - self._assert_linked_flag_config({"id": 1, "key": "provided_value", "variant": "test"}) - - self._patch_linked_flag_config(None) - self._assert_linked_flag_config(None) - - @parameterized.expand( - [ - [ - "string", - "Marple bridge", - "invalid_input", - "Must provide a dictionary or None.", - ], - ["numeric", "-1", "invalid_input", "Must provide a dictionary or None."], - [ - "unexpected json - no recordX", - {"key": "something"}, - "invalid_input", - "Must provide a dictionary with only 'recordHeaders' and/or 'recordBody' keys.", - ], - ] - ) - def test_invalid_session_recording_network_payload_capture_config( - self, _name: str, provided_value: str, expected_code: str, expected_error: str - ) -> None: - response = self.client.patch( - "/api/projects/@current/", {"session_recording_network_payload_capture_config": provided_value} - ) - assert response.status_code == status.HTTP_400_BAD_REQUEST - assert response.json() == { - "attr": "session_recording_network_payload_capture_config", - "code": expected_code, - "detail": expected_error, - "type": "validation_error", - } - - def test_can_set_and_unset_session_recording_network_payload_capture_config(self) -> None: - # can set just one - first_patch_response = self.client.patch( - "/api/projects/@current/", - {"session_recording_network_payload_capture_config": {"recordHeaders": True}}, - ) - assert first_patch_response.status_code == status.HTTP_200_OK - get_response = self.client.get("/api/projects/@current/") - assert get_response.json()["session_recording_network_payload_capture_config"] == {"recordHeaders": True} - - # can set the other - first_patch_response = self.client.patch( - "/api/projects/@current/", - {"session_recording_network_payload_capture_config": {"recordBody": False}}, - ) - assert first_patch_response.status_code == status.HTTP_200_OK - get_response = self.client.get("/api/projects/@current/") - assert get_response.json()["session_recording_network_payload_capture_config"] == {"recordBody": False} + self._patch_session_replay_config( + {"ai_config": {"opt_in": None, "included_event_properties": ["something"]}} + ) + # even though opt_in was not provided in the patch it should be preserved + self._assert_replay_config_is({"ai_config": {"opt_in": None, "included_event_properties": ["something"]}}) + + # but we don't go into the next nested level and patch that data + # sending a new value without the original + self._patch_session_replay_config({"ai_config": {"included_event_properties": ["and another"]}}) + # and the existing second level nesting is not preserved + self._assert_replay_config_is({"ai_config": {"opt_in": None, "included_event_properties": ["and another"]}}) + + def _assert_replay_config_is(self, expected: dict[str, Any] | None) -> HttpResponse: + get_response = self.client.get("/api/environments/@current/") + assert get_response.status_code == status.HTTP_200_OK, get_response.json() + assert get_response.json()["session_replay_config"] == expected + + return get_response + + def _patch_session_replay_config( + self, config: dict[str, Any] | None, expected_status: int = status.HTTP_200_OK + ) -> HttpResponse: + patch_response = self.client.patch( + "/api/environments/@current/", + {"session_replay_config": config}, + ) + assert patch_response.status_code == expected_status, patch_response.json() - # can unset both - response = self.client.patch( - "/api/projects/@current/", {"session_recording_network_payload_capture_config": None} - ) - assert response.status_code == status.HTTP_200_OK - second_get_response = self.client.get("/api/projects/@current/") - assert second_get_response.json()["session_recording_network_payload_capture_config"] is None + return patch_response - def test_can_set_and_unset_session_replay_config(self) -> None: - # can set - self._patch_session_replay_config({"record_canvas": True}) - self._assert_replay_config_is({"record_canvas": True}) + def _assert_linked_flag_config(self, expected_config: dict | None) -> HttpResponse: + response = self.client.get("/api/environments/@current/") + assert response.status_code == status.HTTP_200_OK + assert response.json()["session_recording_linked_flag"] == expected_config + return response - # can unset - self._patch_session_replay_config(None) - self._assert_replay_config_is(None) + def _patch_linked_flag_config( + self, config: dict | None, expected_status: int = status.HTTP_200_OK + ) -> HttpResponse: + response = self.client.patch("/api/environments/@current/", {"session_recording_linked_flag": config}) + assert response.status_code == expected_status, response.json() + return response - @parameterized.expand( - [ - [ - "string", - "Marple bridge", - "invalid_input", - "Must provide a dictionary or None.", - ], - ["numeric", "-1", "invalid_input", "Must provide a dictionary or None."], - [ - "unexpected json - no record", - {"key": "something"}, - "invalid_input", - "Must provide a dictionary with only allowed keys: included_event_properties, opt_in, preferred_events, excluded_events, important_user_properties.", - ], - ] - ) - def test_invalid_session_replay_config_ai_config( - self, _name: str, provided_value: str, expected_code: str, expected_error: str - ) -> None: - response = self._patch_session_replay_config( - {"ai_config": provided_value}, expected_status=status.HTTP_400_BAD_REQUEST - ) - assert response.json() == { - "attr": "session_replay_config", - "code": expected_code, - "detail": expected_error, - "type": "validation_error", - } - - def test_can_set_and_unset_session_replay_config_ai_config(self) -> None: - # can set just the opt-in - self._patch_session_replay_config({"ai_config": {"opt_in": True}}) - self._assert_replay_config_is({"ai_config": {"opt_in": True}}) - - # can set some preferences - self._patch_session_replay_config({"ai_config": {"opt_in": False, "included_event_properties": ["something"]}}) - self._assert_replay_config_is({"ai_config": {"opt_in": False, "included_event_properties": ["something"]}}) - - self._patch_session_replay_config({"ai_config": None}) - self._assert_replay_config_is({"ai_config": None}) - - def test_can_set_replay_configs_without_providing_them_all(self) -> None: - # can set just the opt-in - self._patch_session_replay_config({"ai_config": {"opt_in": True}}) - self._assert_replay_config_is({"ai_config": {"opt_in": True}}) - - self._patch_session_replay_config({"record_canvas": True}) - self._assert_replay_config_is({"record_canvas": True, "ai_config": {"opt_in": True}}) - - def test_can_set_replay_configs_without_providing_them_all_even_when_either_side_is_none(self) -> None: - # because we do some dictionary copying we need a regression test to ensure we can always set and unset keys - self._patch_session_replay_config({"record_canvas": True, "ai_config": {"opt_in": True}}) - self._assert_replay_config_is({"record_canvas": True, "ai_config": {"opt_in": True}}) - - self._patch_session_replay_config({"record_canvas": None}) - self._assert_replay_config_is({"record_canvas": None, "ai_config": {"opt_in": True}}) - - # top-level from having a value to None - self._patch_session_replay_config(None) - self._assert_replay_config_is(None) - - # top-level from None to having a value - self._patch_session_replay_config({"ai_config": None}) - self._assert_replay_config_is({"ai_config": None}) - - # next-level from None to having a value - self._patch_session_replay_config({"ai_config": {"opt_in": True}}) - self._assert_replay_config_is({"ai_config": {"opt_in": True}}) - - # next-level from having a value to None - self._patch_session_replay_config({"ai_config": None}) - self._assert_replay_config_is({"ai_config": None}) - - def test_can_set_replay_configs_patch_session_replay_config_one_level_deep(self) -> None: - # can set just the opt-in - self._patch_session_replay_config({"ai_config": {"opt_in": True}}) - self._assert_replay_config_is({"ai_config": {"opt_in": True}}) - - self._patch_session_replay_config({"ai_config": {"included_event_properties": ["something"]}}) - # even though opt_in was not provided in the patch it should be preserved - self._assert_replay_config_is({"ai_config": {"opt_in": True, "included_event_properties": ["something"]}}) - - self._patch_session_replay_config({"ai_config": {"opt_in": None, "included_event_properties": ["something"]}}) - # even though opt_in was not provided in the patch it should be preserved - self._assert_replay_config_is({"ai_config": {"opt_in": None, "included_event_properties": ["something"]}}) - - # but we don't go into the next nested level and patch that data - # sending a new value without the original - self._patch_session_replay_config({"ai_config": {"included_event_properties": ["and another"]}}) - # and the existing second level nesting is not preserved - self._assert_replay_config_is({"ai_config": {"opt_in": None, "included_event_properties": ["and another"]}}) - - def _assert_replay_config_is(self, expected: dict[str, Any] | None) -> HttpResponse: - get_response = self.client.get("/api/projects/@current/") - assert get_response.status_code == status.HTTP_200_OK, get_response.json() - assert get_response.json()["session_replay_config"] == expected - - return get_response - - def _patch_session_replay_config( - self, config: dict[str, Any] | None, expected_status: int = status.HTTP_200_OK - ) -> HttpResponse: - patch_response = self.client.patch( - "/api/projects/@current/", - {"session_replay_config": config}, - ) - assert patch_response.status_code == expected_status, patch_response.json() + return TestTeamAPI - return patch_response - def _assert_linked_flag_config(self, expected_config: dict | None) -> HttpResponse: - response = self.client.get("/api/projects/@current/") - assert response.status_code == status.HTTP_200_OK - assert response.json()["session_recording_linked_flag"] == expected_config - return response +class EnvironmentToProjectRewriteClient(test.APIClient): + """ + This client rewrites all requests to the /api/environments/ endpoint ("proper" environments endpoint) + to /api/projects/ (previously known as the "team" endpoint). Allows us to test for backwards compatibility of + the /api/projects/ endpoint - for use in `test_project.py`. + """ - def _patch_linked_flag_config(self, config: dict | None, expected_status: int = status.HTTP_200_OK) -> HttpResponse: - response = self.client.patch("/api/projects/@current/", {"session_recording_linked_flag": config}) - assert response.status_code == expected_status, response.json() - return response + def generic( + self, + method, + path, + data="", + content_type="application/octet-stream", + secure=False, + *, + headers=None, + **extra, + ): + path = path.replace("/api/environments/", "/api/projects/") + return super().generic(method, path, data, content_type, secure, headers=headers, **extra) def create_team(organization: Organization, name: str = "Test team", timezone: str = "UTC") -> Team: @@ -1059,10 +1148,5 @@ def create_team(organization: Organization, name: str = "Test team", timezone: s ) -async def acreate_team(organization: Organization, name: str = "Test team", timezone: str = "UTC") -> Team: - """ - This is a helper that just creates a team. It currently uses the orm, but we - could use either the api, or django admin to create, to get better parity - with real world scenarios. - """ - return await sync_to_async(create_team)(organization, name=name, timezone=timezone) +class TestTeamAPI(team_api_test_factory()): # type: ignore + pass diff --git a/posthog/demo/legacy/__init__.py b/posthog/demo/legacy/__init__.py index a62ca3f350e23..194eb3fcc0542 100644 --- a/posthog/demo/legacy/__init__.py +++ b/posthog/demo/legacy/__init__.py @@ -19,21 +19,6 @@ def demo_route(request: HttpRequest): return render_template("demo.html", request=request, context={"api_token": project_api_token}) -def create_demo_team(organization: Organization, *args) -> Team: - team = Team.objects.create_with_data( - default_dashboards=False, - organization=organization, - name=TEAM_NAME, - ingested_event=True, - completed_snippet_onboarding=True, - session_recording_opt_in=True, - is_demo=True, - ) - create_demo_data(team) - EventDefinition.objects.get_or_create(team=team, name="$pageview") - return team - - def create_demo_data(team: Team, dashboards=True): WebDataGenerator(team, n_people=40).create(dashboards=dashboards) AppDataGenerator(team, n_people=100).create(dashboards=dashboards) diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr index 69d6e856f5540..2fe83c79dc067 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr @@ -840,6 +840,18 @@ ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 ''' SELECT groupArray(1)(date)[1] AS date, arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, @@ -886,7 +898,31 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.10 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.11 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.12 ''' SELECT groupArray(1)(date)[1] AS date, arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, @@ -933,7 +969,54 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.13 + ''' + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + breakdown_value AS breakdown_value + FROM + (SELECT count(DISTINCT e__pdi.person_id) AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value + FROM events AS e SAMPLE 1.0 + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start, + breakdown_value) + GROUP BY day_start, + breakdown_value + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE isNotNull(breakdown_value) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.14 ''' SELECT groupArray(1)(date)[1] AS date, arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, @@ -980,6 +1063,100 @@ max_bytes_before_external_group_by=0 ''' # --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.15 + ''' + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + [ifNull(toString(breakdown_value_1), '$$_posthog_breakdown_null_$$')] AS breakdown_value + FROM + (SELECT count(DISTINCT e__pdi.person_id) AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 + FROM events AS e SAMPLE 1.0 + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start, + breakdown_value_1) + GROUP BY day_start, + breakdown_value_1 + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE arrayExists(x -> isNotNull(x), breakdown_value) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 + ''' + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + breakdown_value AS breakdown_value + FROM + (SELECT count(DISTINCT e__pdi.person_id) AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value + FROM events AS e SAMPLE 1.0 + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start, + breakdown_value) + GROUP BY day_start, + breakdown_value + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE isNotNull(breakdown_value) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 ''' SELECT groupArray(1)(date)[1] AS date, @@ -1027,6 +1204,113 @@ max_bytes_before_external_group_by=0 ''' # --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.4 + ''' + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + [ifNull(toString(breakdown_value_1), '$$_posthog_breakdown_null_$$')] AS breakdown_value + FROM + (SELECT count(DISTINCT e__pdi.person_id) AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 + FROM events AS e SAMPLE 1.0 + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start, + breakdown_value_1) + GROUP BY day_start, + breakdown_value_1 + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE arrayExists(x -> isNotNull(x), breakdown_value) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.5 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.6 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.7 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.8 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.9 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- # name: TestTrends.test_filter_events_by_precalculated_cohort ''' @@ -1642,6 +1926,18 @@ # --- # name: TestTrends.test_person_filtering_in_cohort_in_action ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_person_filtering_in_cohort_in_action.1 + ''' SELECT count(DISTINCT person_id) FROM cohortpeople @@ -1650,7 +1946,7 @@ AND version = NULL ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action.1 +# name: TestTrends.test_person_filtering_in_cohort_in_action.2 ''' /* cohort_calculation: */ SELECT count(DISTINCT person_id) @@ -1660,7 +1956,7 @@ AND version = 0 ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action.2 +# name: TestTrends.test_person_filtering_in_cohort_in_action.3 ''' SELECT groupArray(1)(date)[1] AS date, arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, @@ -1712,6 +2008,18 @@ # --- # name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2 ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.1 + ''' SELECT count(DISTINCT person_id) FROM cohortpeople @@ -1720,7 +2028,7 @@ AND version = NULL ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.1 +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.2 ''' /* cohort_calculation: */ SELECT count(DISTINCT person_id) @@ -1730,7 +2038,7 @@ AND version = 0 ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.2 +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.3 ''' SELECT groupArray(1)(date)[1] AS date, arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, @@ -3494,6 +3802,18 @@ ''' # --- # name: TestTrends.test_trends_any_event_total_count + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_any_event_total_count.1 ''' SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) @@ -3519,7 +3839,7 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestTrends.test_trends_any_event_total_count.1 +# name: TestTrends.test_trends_any_event_total_count.2 ''' SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) @@ -3546,6 +3866,18 @@ ''' # --- # name: TestTrends.test_trends_breakdown_cumulative + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_breakdown_cumulative.1 ''' SELECT groupArray(1)(date)[1] AS date, arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, @@ -3599,6 +3931,18 @@ ''' # --- # name: TestTrends.test_trends_breakdown_cumulative_poe_v2 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_breakdown_cumulative_poe_v2.1 ''' SELECT groupArray(1)(date)[1] AS date, arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, @@ -3891,6 +4235,18 @@ ''' # --- # name: TestTrends.test_trends_compare_day_interval_relative_range + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_compare_day_interval_relative_range.1 ''' SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) @@ -3916,7 +4272,7 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestTrends.test_trends_compare_day_interval_relative_range.1 +# name: TestTrends.test_trends_compare_day_interval_relative_range.2 ''' SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 23:59:59', 6, 'UTC'))))), 1))) AS date, arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) @@ -3942,7 +4298,7 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestTrends.test_trends_compare_day_interval_relative_range.2 +# name: TestTrends.test_trends_compare_day_interval_relative_range.3 ''' SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) @@ -4209,6 +4565,18 @@ ''' # --- # name: TestTrends.test_trends_per_day_cumulative + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_per_day_cumulative.1 ''' SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) @@ -4240,6 +4608,18 @@ ''' # --- # name: TestTrends.test_trends_per_day_dau_cumulative + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_per_day_dau_cumulative.1 ''' SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py index 82c13105f1ebb..3b8489d897645 100644 --- a/posthog/models/activity_logging/activity_log.py +++ b/posthog/models/activity_logging/activity_log.py @@ -3,6 +3,7 @@ from datetime import datetime from decimal import Decimal from typing import Any, Literal, Optional, Union +from uuid import UUID import structlog from django.core.paginator import Paginator @@ -40,6 +41,7 @@ "SessionRecordingPlaylist", "Comment", "Team", + "Project", ] ChangeAction = Literal["changed", "created", "deleted", "merged", "split", "exported"] @@ -219,6 +221,7 @@ class Meta: "property_type_format", ], "Team": ["uuid", "updated_at", "api_token", "created_at", "id"], + "Project": ["id", "created_at"], } @@ -368,10 +371,10 @@ def dict_changes_between( def log_activity( *, - organization_id: Optional[UUIDT], + organization_id: Optional[UUID], team_id: int, user: Optional[User], - item_id: Optional[Union[int, str, UUIDT]], + item_id: Optional[Union[int, str, UUID]], scope: str, activity: str, detail: Detail, diff --git a/posthog/models/organization.py b/posthog/models/organization.py index e64f45ff8abc4..cf1cd5c26986a 100644 --- a/posthog/models/organization.py +++ b/posthog/models/organization.py @@ -72,7 +72,9 @@ def bootstrap( with transaction.atomic(using=self.db): organization = Organization.objects.create(**kwargs) - _, team = Project.objects.create_with_team(organization=organization, team_fields=team_fields) + _, team = Project.objects.create_with_team( + initiating_user=user, organization=organization, team_fields=team_fields + ) organization_membership: Optional[OrganizationMembership] = None if user is not None: organization_membership = OrganizationMembership.objects.create( diff --git a/posthog/models/project.py b/posthog/models/project.py index 5bf82245db590..edd1fdff4edcc 100644 --- a/posthog/models/project.py +++ b/posthog/models/project.py @@ -1,4 +1,5 @@ from typing import TYPE_CHECKING, Optional, cast +from functools import cached_property from django.db import models from django.db import transaction from django.core.validators import MinLengthValidator @@ -6,18 +7,29 @@ from posthog.models.utils import sane_repr if TYPE_CHECKING: - from .team import Team + from posthog.models import Team, User class ProjectManager(models.Manager): - def create_with_team(self, team_fields: Optional[dict] = None, **kwargs) -> tuple["Project", "Team"]: + def create_with_team( + self, *, team_fields: Optional[dict] = None, initiating_user: Optional["User"], **kwargs + ) -> tuple["Project", "Team"]: from .team import Team + if team_fields is None: + team_fields = {} + if "name" in kwargs and "name" not in team_fields: + team_fields["name"] = kwargs["name"] + with transaction.atomic(using=self.db): common_id = Team.objects.increment_id_sequence() project = cast("Project", self.create(id=common_id, **kwargs)) - team = Team.objects.create( - id=common_id, organization=project.organization, project=project, **(team_fields or {}) + team = Team.objects.create_with_data( + id=common_id, + organization_id=project.organization_id, + project=project, + initiating_user=initiating_user, + **team_fields, ) return project, team @@ -25,10 +37,10 @@ def create_with_team(self, team_fields: Optional[dict] = None, **kwargs) -> tupl class Project(models.Model): """DO NOT USE YET - you probably mean the `Team` model instead. - `Project` is part of the environemnts feature, which is a work in progress. + `Project` is part of the environments feature, which is a work in progress. """ - id = models.BigIntegerField(primary_key=True, verbose_name="ID") + id = models.BigIntegerField(primary_key=True, verbose_name="ID") # Same as Team.id field organization = models.ForeignKey( "posthog.Organization", on_delete=models.CASCADE, @@ -50,3 +62,7 @@ def __str__(self): return str(self.pk) __repr__ = sane_repr("id", "name") + + @cached_property + def passthrough_team(self) -> "Team": + return self.teams.get(pk=self.pk) diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 3aaedbcd5a6fa..3bce7301ea23d 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -1,9 +1,10 @@ import re from decimal import Decimal from functools import lru_cache -from typing import TYPE_CHECKING, Any, Optional, cast +from typing import TYPE_CHECKING, Optional, cast +from uuid import UUID from zoneinfo import ZoneInfo - +from django.core.cache import cache import posthoganalytics import pydantic import pytz @@ -26,6 +27,7 @@ from posthog.models.filters.mixins.utils import cached_property from posthog.models.filters.utils import GroupTypeIndex from posthog.models.instance_setting import get_instance_setting +from posthog.models.organization import OrganizationMembership from posthog.models.signals import mutable_receiver from posthog.models.utils import ( UUIDClassicModel, @@ -65,7 +67,7 @@ class TeamManager(models.Manager): def get_queryset(self): return super().get_queryset().defer(*DEPRECATED_ATTRS) - def set_test_account_filters(self, organization: Optional[Any]) -> list: + def set_test_account_filters(self, organization_id: Optional[UUID]) -> list: filters = [ { "key": "$host", @@ -74,10 +76,12 @@ def set_test_account_filters(self, organization: Optional[Any]) -> list: "type": "event", } ] - if organization: - example_emails = organization.members.only("email") + if organization_id: + example_emails_raw = OrganizationMembership.objects.filter(organization_id=organization_id).values_list( + "user__email", flat=True + ) generic_emails = GenericEmails() - example_emails = [email.email for email in example_emails if not generic_emails.is_generic(email.email)] + example_emails = [email for email in example_emails_raw if not generic_emails.is_generic(email)] if len(example_emails) > 0: example_email = re.search(r"@[\w.]+", example_emails[0]) if example_email: @@ -87,16 +91,25 @@ def set_test_account_filters(self, organization: Optional[Any]) -> list: ] return filters - def create_with_data(self, user: Any = None, default_dashboards: bool = True, **kwargs) -> "Team": - kwargs["test_account_filters"] = self.set_test_account_filters(kwargs.get("organization")) + def create_with_data(self, *, initiating_user: Optional["User"], **kwargs) -> "Team": team = cast("Team", self.create(**kwargs)) - # Create default dashboards (skipped for demo projects) - if default_dashboards: - dashboard = Dashboard.objects.db_manager(self.db).create(name="My App Dashboard", pinned=True, team=team) - create_dashboard_from_template("DEFAULT_APP", dashboard) - team.primary_dashboard = dashboard - team.save() + if kwargs.get("is_demo"): + if initiating_user is None: + raise ValueError("initiating_user must be provided when creating a demo team") + team.kick_off_demo_data_generation(initiating_user) + return team # Return quickly, as the demo data and setup will be created asynchronously + + team.test_account_filters = self.set_test_account_filters( + kwargs.get("organization_id") or kwargs["organization"].id + ) + + # Create default dashboards + dashboard = Dashboard.objects.db_manager(self.db).create(name="My App Dashboard", pinned=True, team=team) + create_dashboard_from_template("DEFAULT_APP", dashboard) + team.primary_dashboard = dashboard + + team.save() return team def create(self, **kwargs): @@ -447,6 +460,44 @@ def path_cleaning_filter_models(self) -> list[PathCleaningFilter]: continue return filters + def reset_token_and_save(self, *, user: "User", is_impersonated_session: bool): + from posthog.models.activity_logging.activity_log import Change, Detail, log_activity + + old_token = self.api_token + self.api_token = generate_random_token_project() + self.save() + set_team_in_cache(old_token, None) + log_activity( + organization_id=self.organization_id, + team_id=self.pk, + user=cast("User", user), + was_impersonated=is_impersonated_session, + scope="Team", + item_id=self.pk, + activity="updated", + detail=Detail( + name=str(self.name), + changes=[ + Change( + type="Team", + action="changed", + field="api_token", + ) + ], + ), + ) + + def get_is_generating_demo_data(self) -> bool: + cache_key = f"is_generating_demo_data_{self.id}" + return cache.get(cache_key) == "True" + + def kick_off_demo_data_generation(self, initiating_user: "User") -> None: + from posthog.tasks.demo_create_data import create_data_for_demo_team + + cache_key = f"is_generating_demo_data_{self.id}" + cache.set(cache_key, "True") # Create an item in the cache that we can use to see if the demo data is ready + create_data_for_demo_team.delay(self.id, initiating_user.id, cache_key) + def all_users_with_access(self) -> QuerySet["User"]: from ee.models.explicit_team_membership import ExplicitTeamMembership from posthog.models.organization import OrganizationMembership diff --git a/posthog/models/test/test_project.py b/posthog/models/test/test_project.py index d6bfe0ed3a36a..1fd7434f90da3 100644 --- a/posthog/models/test/test_project.py +++ b/posthog/models/test/test_project.py @@ -7,6 +7,7 @@ class TestProject(BaseTest): def test_create_project_with_team_no_team_fields(self): project, team = Project.objects.create_with_team( + initiating_user=self.user, organization=self.organization, name="Test project", ) @@ -17,13 +18,14 @@ def test_create_project_with_team_no_team_fields(self): self.assertEqual( team.name, - "Default project", # TODO: When Environments are rolled out, ensure this says "Default environment" + "Test project", # TODO: When Environments are rolled out, ensure this says "Default environment" ) self.assertEqual(team.organization, self.organization) self.assertEqual(team.project, project) def test_create_project_with_team_with_team_fields(self): project, team = Project.objects.create_with_team( + initiating_user=self.user, organization=self.organization, name="Test project", team_fields={"name": "Test team", "access_control": True}, @@ -42,6 +44,7 @@ def test_create_project_with_team_uses_team_id_sequence(self): expected_common_id = Team.objects.increment_id_sequence() + 1 project, team = Project.objects.create_with_team( + initiating_user=self.user, organization=self.organization, name="Test project", team_fields={"name": "Test team", "access_control": True}, @@ -64,6 +67,7 @@ def test_create_project_with_team_does_not_create_if_team_fails(self, mock_creat with self.assertRaises(Exception): Project.objects.create_with_team( + initiating_user=self.user, organization=self.organization, name="Test project", team_fields={"name": "Test team", "access_control": True}, diff --git a/posthog/models/user.py b/posthog/models/user.py index 621c1d36429a7..748533be437cd 100644 --- a/posthog/models/user.py +++ b/posthog/models/user.py @@ -80,7 +80,9 @@ def bootstrap( if create_team: team = create_team(organization, user) else: - team = Team.objects.create_with_data(user=user, organization=organization, **(team_fields or {})) + team = Team.objects.create_with_data( + initiating_user=user, organization=organization, **(team_fields or {}) + ) user.join(organization=organization, level=OrganizationMembership.Level.OWNER) return organization, team, user diff --git a/posthog/permissions.py b/posthog/permissions.py index d7c6bd4cf81d9..6a2a3c14cb490 100644 --- a/posthog/permissions.py +++ b/posthog/permissions.py @@ -173,11 +173,7 @@ class TeamMemberLightManagementPermission(BasePermission): def has_permission(self, request, view) -> bool: try: - if request.resolver_match.url_name.startswith("team-"): - # /projects/ endpoint handling - team = view.get_object() - else: - team = view.team + team = view.team except Team.DoesNotExist: return True # This will be handled as a 404 in the viewset requesting_level = view.user_permissions.team(team).effective_membership_level diff --git a/posthog/queries/test/__snapshots__/test_trends.ambr b/posthog/queries/test/__snapshots__/test_trends.ambr index 6e60ae26b943e..81808cef8269b 100644 --- a/posthog/queries/test/__snapshots__/test_trends.ambr +++ b/posthog/queries/test/__snapshots__/test_trends.ambr @@ -853,6 +853,18 @@ # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 + ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, count(*) as count @@ -867,7 +879,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 ''' SELECT groupArray(day_start) as date, @@ -919,7 +931,7 @@ ORDER BY breakdown_value ''' # --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, @@ -935,7 +947,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.4 ''' SELECT groupArray(day_start) as date, @@ -1480,6 +1492,18 @@ # --- # name: TestTrends.test_person_filtering_in_cohort_in_action ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_person_filtering_in_cohort_in_action.1 + ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, count(*) as count @@ -1513,7 +1537,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action.1 +# name: TestTrends.test_person_filtering_in_cohort_in_action.2 ''' SELECT groupArray(day_start) as date, @@ -1579,6 +1603,18 @@ # --- # name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2 ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.1 + ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, count(*) as count @@ -1613,7 +1649,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.1 +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.2 ''' SELECT groupArray(day_start) as date, @@ -3970,6 +4006,18 @@ # --- # name: TestTrends.test_trends_any_event_total_count ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_any_event_total_count.1 + ''' SELECT groupArray(day_start) as date, groupArray(count) AS total @@ -3994,7 +4042,7 @@ ORDER BY day_start) ''' # --- -# name: TestTrends.test_trends_any_event_total_count.1 +# name: TestTrends.test_trends_any_event_total_count.2 ''' SELECT groupArray(day_start) as date, @@ -4022,6 +4070,18 @@ # --- # name: TestTrends.test_trends_breakdown_cumulative ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_breakdown_cumulative.1 + ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, count(*) as count @@ -4036,7 +4096,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_trends_breakdown_cumulative.1 +# name: TestTrends.test_trends_breakdown_cumulative.2 ''' SELECT groupArray(day_start) as date, @@ -4098,6 +4158,18 @@ # --- # name: TestTrends.test_trends_breakdown_cumulative_poe_v2 ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_breakdown_cumulative_poe_v2.1 + ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, count(*) as count @@ -4120,7 +4192,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_trends_breakdown_cumulative_poe_v2.1 +# name: TestTrends.test_trends_breakdown_cumulative_poe_v2.2 ''' SELECT groupArray(day_start) as date, @@ -4298,6 +4370,18 @@ # --- # name: TestTrends.test_trends_compare_day_interval_relative_range ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_compare_day_interval_relative_range.1 + ''' SELECT groupArray(day_start) as date, groupArray(count) AS total @@ -4322,7 +4406,7 @@ ORDER BY day_start) ''' # --- -# name: TestTrends.test_trends_compare_day_interval_relative_range.1 +# name: TestTrends.test_trends_compare_day_interval_relative_range.2 ''' SELECT groupArray(day_start) as date, @@ -4348,7 +4432,7 @@ ORDER BY day_start) ''' # --- -# name: TestTrends.test_trends_compare_day_interval_relative_range.2 +# name: TestTrends.test_trends_compare_day_interval_relative_range.3 ''' SELECT groupArray(day_start) as date, @@ -4657,6 +4741,18 @@ # --- # name: TestTrends.test_trends_per_day_cumulative ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_trends_per_day_cumulative.1 + ''' SELECT groupArray(day_start) as date, groupArray(count) AS total diff --git a/posthog/test/base.py b/posthog/test/base.py index 451264bfc205b..0b7ccd78a85dc 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -107,20 +107,21 @@ def _setup_test_data(klass): klass.organization = Organization.objects.create(name=klass.CONFIG_ORGANIZATION_NAME) - klass.project, klass.team = Project.objects.create_with_team( + klass.project = Project.objects.create(id=Team.objects.increment_id_sequence(), organization=klass.organization) + klass.team = Team.objects.create( + id=klass.project.id, + project=klass.project, organization=klass.organization, - team_fields={ - "api_token": klass.CONFIG_API_TOKEN, - "test_account_filters": [ - { - "key": "email", - "value": "@posthog.com", - "operator": "not_icontains", - "type": "person", - } - ], - "has_completed_onboarding_for": {"product_analytics": True}, - }, + api_token=klass.CONFIG_API_TOKEN, + test_account_filters=[ + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } + ], + has_completed_onboarding_for={"product_analytics": True}, ) if klass.CONFIG_EMAIL: klass.user = User.objects.create_and_join(klass.organization, klass.CONFIG_EMAIL, klass.CONFIG_PASSWORD) diff --git a/posthog/test/test_middleware.py b/posthog/test/test_middleware.py index ce8bfeb71b7bb..f5b4190ef4293 100644 --- a/posthog/test/test_middleware.py +++ b/posthog/test/test_middleware.py @@ -124,7 +124,7 @@ class TestAutoProjectMiddleware(APIBaseTest): @classmethod def setUpTestData(cls): super().setUpTestData() - cls.base_app_num_queries = 40 + cls.base_app_num_queries = 45 # Create another team that the user does have access to cls.second_team = create_team(organization=cls.organization, name="Second Life") diff --git a/posthog/test/test_team.py b/posthog/test/test_team.py index 076fc21e5fe34..6894fb6134642 100644 --- a/posthog/test/test_team.py +++ b/posthog/test/test_team.py @@ -76,7 +76,7 @@ def test_team_has_expected_defaults(self): self.assertEqual(team.autocapture_exceptions_errors_to_ignore, None) def test_create_team_with_test_account_filters(self): - team = Team.objects.create_with_data(organization=self.organization) + team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) self.assertEqual( team.test_account_filters, [ @@ -99,7 +99,7 @@ def test_create_team_with_test_account_filters(self): user = User.objects.create(email="test@gmail.com") organization = Organization.objects.create() organization.members.set([user]) - team = Team.objects.create_with_data(organization=organization) + team = Team.objects.create_with_data(initiating_user=self.user, organization=organization) self.assertEqual( team.test_account_filters, [ @@ -113,7 +113,7 @@ def test_create_team_with_test_account_filters(self): ) def test_create_team_sets_primary_dashboard(self): - team = Team.objects.create_with_data(organization=self.organization) + team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) self.assertIsInstance(team.primary_dashboard, Dashboard) # Ensure insights are created and linked @@ -139,7 +139,7 @@ def test_preinstalled_are_autoenabled(self, mock_get): def test_team_on_cloud_uses_feature_flag_to_determine_person_on_events(self, mock_feature_enabled): with self.is_cloud(True): with override_instance_config("PERSON_ON_EVENTS_ENABLED", False): - team = Team.objects.create_with_data(organization=self.organization) + team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) self.assertEqual( team.person_on_events_mode, PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS ) @@ -162,7 +162,7 @@ def test_team_on_cloud_uses_feature_flag_to_determine_person_on_events(self, moc def test_team_on_self_hosted_uses_instance_setting_to_determine_person_on_events(self, mock_feature_enabled): with self.is_cloud(False): with override_instance_config("PERSON_ON_EVENTS_V2_ENABLED", True): - team = Team.objects.create_with_data(organization=self.organization) + team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) self.assertEqual( team.person_on_events_mode, PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS ) @@ -171,7 +171,7 @@ def test_team_on_self_hosted_uses_instance_setting_to_determine_person_on_events assert args_list[0][0] != "persons-on-events-v2-reads-enabled" with override_instance_config("PERSON_ON_EVENTS_V2_ENABLED", False): - team = Team.objects.create_with_data(organization=self.organization) + team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) self.assertEqual(team.person_on_events_mode, PersonsOnEventsMode.DISABLED) for args_list in mock_feature_enabled.call_args_list: # It is ok if we check other feature flags, just not `persons-on-events-v2-reads-enabled` @@ -179,7 +179,7 @@ def test_team_on_self_hosted_uses_instance_setting_to_determine_person_on_events def test_each_team_gets_project_with_default_name_and_same_id(self): # Can be removed once environments are fully rolled out - team = Team.objects.create_with_data(organization=self.organization) + team = Team.objects.create_with_data(initiating_user=self.user, organization=self.organization) project = Project.objects.filter(id=team.id).first() @@ -188,7 +188,7 @@ def test_each_team_gets_project_with_default_name_and_same_id(self): def test_each_team_gets_project_with_custom_name_and_same_id(self): # Can be removed once environments are fully rolled out - team = Team.objects.create_with_data(organization=self.organization, name="Hogflix") + team = Team.objects.create_with_data(organization=self.organization, initiating_user=self.user, name="Hogflix") project = Project.objects.filter(id=team.id).first() @@ -203,7 +203,7 @@ def test_team_not_created_if_project_creation_fails(self, mock_create): initial_project_count = Project.objects.count() with self.assertRaises(Exception): - Team.objects.create_with_data(organization=self.organization, name="Hogflix") + Team.objects.create_with_data(organization=self.organization, initiating_user=self.user, name="Hogflix") self.assertEqual(Team.objects.count(), initial_team_count) self.assertEqual(Project.objects.count(), initial_project_count) diff --git a/posthog/urls.py b/posthog/urls.py index c34956e8131b3..69eaddf1c9717 100644 --- a/posthog/urls.py +++ b/posthog/urls.py @@ -28,10 +28,6 @@ authentication, capture, decide, - organizations_router, - project_dashboards_router, - project_feature_flags_router, - projects_router, router, sharing, signup, @@ -71,13 +67,7 @@ logger.warn(f"Could not import ee.urls", exc_info=True) pass else: - extend_api_router( - router, - projects_router=projects_router, - organizations_router=organizations_router, - project_dashboards_router=project_dashboards_router, - project_feature_flags_router=project_feature_flags_router, - ) + extend_api_router() @requires_csrf_token diff --git a/posthog/utils.py b/posthog/utils.py index aaf02658b42d1..0e2f5a6b30f38 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -349,12 +349,14 @@ def render_template( if not request.GET.get("no-preloaded-app-context"): from posthog.api.shared import TeamPublicSerializer from posthog.api.team import TeamSerializer + from posthog.api.project import ProjectSerializer from posthog.api.user import UserSerializer from posthog.user_permissions import UserPermissions from posthog.views import preflight_check posthog_app_context = { "current_user": None, + "current_project": None, "current_team": None, "preflight": json.loads(preflight_check(request).getvalue()), "default_event_name": "$pageview", @@ -386,6 +388,12 @@ def render_template( many=False, ) posthog_app_context["current_team"] = team_serialized.data + project_serialized = ProjectSerializer( + user.team.project, + context={"request": request, "user_permissions": user_permissions}, + many=False, + ) + posthog_app_context["current_project"] = project_serialized.data posthog_app_context["frontend_apps"] = get_frontend_apps(user.team.pk) posthog_app_context["default_event_name"] = get_default_event_name(user.team) diff --git a/posthog/warehouse/api/test/test_external_data_source.py b/posthog/warehouse/api/test/test_external_data_source.py index 8e295ac2e925c..8a455a7b89883 100644 --- a/posthog/warehouse/api/test/test_external_data_source.py +++ b/posthog/warehouse/api/test/test_external_data_source.py @@ -621,7 +621,7 @@ def test_internal_postgres(self, patch_get_sql_schemas_for_source_type): } ] - new_team = Team.objects.create(name="new_team", organization=self.team.organization) + new_team = Team.objects.create(id=984961485, name="new_team", organization=self.team.organization) response = self.client.post( f"/api/projects/{new_team.pk}/external_data_sources/database_schema/", @@ -665,7 +665,7 @@ def test_internal_postgres(self, patch_get_sql_schemas_for_source_type): } ] - new_team = Team.objects.create(name="new_team", organization=self.team.organization) + new_team = Team.objects.create(id=984961486, name="new_team", organization=self.team.organization) response = self.client.post( f"/api/projects/{new_team.pk}/external_data_sources/database_schema/",