diff --git a/posthog/tasks/test/test_warehouse.py b/posthog/tasks/test/test_warehouse.py index 5c131470075b3..66248cf2a2489 100644 --- a/posthog/tasks/test/test_warehouse.py +++ b/posthog/tasks/test/test_warehouse.py @@ -15,6 +15,41 @@ class TestWarehouse(APIBaseTest): @patch("posthog.tasks.warehouse.MONTHLY_LIMIT", 100) + @patch("posthog.tasks.warehouse.cancel_external_data_workflow") + @patch("posthog.tasks.warehouse.pause_external_data_schedule") + @patch("ee.billing.quota_limiting.list_limited_team_attributes") + def test_check_synced_row_limits_of_team_monthly_limit( + self, + list_limited_team_attributes_mock: MagicMock, + pause_schedule_mock: MagicMock, + cancel_workflow_mock: MagicMock, + ) -> None: + list_limited_team_attributes_mock.return_value = [] + + source = ExternalDataSource.objects.create( + source_id="test_id", + connection_id="fake connectino_id", + destination_id="fake destination_id", + team=self.team, + status="Running", + source_type="Stripe", + ) + + job = ExternalDataJob.objects.create( + pipeline=source, workflow_id="fake_workflow_id", team=self.team, status="Running", rows_synced=100000 + ) + + check_synced_row_limits_of_team(self.team.pk) + + source.refresh_from_db() + self.assertEqual(source.status, ExternalDataSource.Status.PAUSED) + + job.refresh_from_db() + self.assertEqual(job.status, ExternalDataJob.Status.CANCELLED) + + self.assertEqual(pause_schedule_mock.call_count, 1) + self.assertEqual(cancel_workflow_mock.call_count, 1) + @patch("posthog.tasks.warehouse.cancel_external_data_workflow") @patch("posthog.tasks.warehouse.pause_external_data_schedule") @patch("ee.billing.quota_limiting.list_limited_team_attributes") @@ -24,7 +59,7 @@ def test_check_synced_row_limits_of_team( pause_schedule_mock: MagicMock, cancel_workflow_mock: MagicMock, ) -> None: - list_limited_team_attributes_mock.return_value = [self.team.pk] + list_limited_team_attributes_mock.return_value = [self.team.api_token] source = ExternalDataSource.objects.create( source_id="test_id", diff --git a/posthog/tasks/warehouse.py b/posthog/tasks/warehouse.py index 45672afa3506f..0426aade2991f 100644 --- a/posthog/tasks/warehouse.py +++ b/posthog/tasks/warehouse.py @@ -41,10 +41,12 @@ def check_synced_row_limits() -> None: @shared_task(ignore_result=True) def check_synced_row_limits_of_team(team_id: int) -> None: logger.info("Checking synced row limits of team", team_id=team_id) + team_model = Team.objects.get(pk=team_id) from ee.billing.quota_limiting import list_limited_team_attributes, QuotaResource, QuotaLimitingCaches - limited_teams_rows_synced = list_limited_team_attributes( + # TODO: temp workaround. Should use team ids directly instead of tokens + limited_team_tokens_rows_synced = list_limited_team_attributes( QuotaResource.ROWS_SYNCED, QuotaLimitingCaches.QUOTA_LIMITER_CACHE_KEY ) @@ -59,7 +61,7 @@ def check_synced_row_limits_of_team(team_id: int) -> None: ] total_rows_synced = sum(rows_synced_list) - if team_id in limited_teams_rows_synced or total_rows_synced > MONTHLY_LIMIT: + if team_model.api_token in limited_team_tokens_rows_synced or total_rows_synced > MONTHLY_LIMIT: running_jobs = ExternalDataJob.objects.filter(team_id=team_id, status=ExternalDataJob.Status.RUNNING) for job in running_jobs: try: