diff --git a/posthog/api/app_metrics.py b/posthog/api/app_metrics.py index 7107f76a443c5..3afdda18ef853 100644 --- a/posthog/api/app_metrics.py +++ b/posthog/api/app_metrics.py @@ -118,8 +118,12 @@ def get_batch_export_runs_app_metrics_queryset(self, batch_export_id: str): .annotate(dates=TruncDay("last_updated_at")) .values("dates") .annotate( - successes=Sum(Coalesce("records_total_count", 0), filter=Q(status=BatchExportRun.Status.COMPLETED)), - failures=Sum(Coalesce("records_total_count", 0), filter=~Q(status=BatchExportRun.Status.COMPLETED)), + successes=Sum( + Coalesce("records_total_count", 0), filter=Q(status=BatchExportRun.Status.COMPLETED), default=0 + ), + failures=Sum( + Coalesce("records_total_count", 0), filter=~Q(status=BatchExportRun.Status.COMPLETED), default=0 + ), ) .order_by("dates") .all() diff --git a/posthog/api/test/test_app_metrics.py b/posthog/api/test/test_app_metrics.py index 4659d1c023a31..dd9c01ba023b6 100644 --- a/posthog/api/test/test_app_metrics.py +++ b/posthog/api/test/test_app_metrics.py @@ -156,6 +156,75 @@ def test_retrieve_batch_export_runs_app_metrics(self): }, ) + def test_retrieve_batch_export_runs_app_metrics_defaults_to_zero(self): + """Test batch export metrics returned by app metrics endpoint.""" + destination_data = { + "type": "S3", + "config": { + "bucket_name": "my-production-s3-bucket", + "region": "us-east-1", + "prefix": "posthog-events/", + "aws_access_key_id": "abc123", + "aws_secret_access_key": "secret", + }, + } + + batch_export_data = { + "name": "my-production-s3-bucket-destination", + "destination": destination_data, + "interval": "hour", + } + + temporal = sync_connect() + now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc) + + with start_test_worker(temporal): + response = create_batch_export_ok( + self.client, + self.team.pk, + json.dumps(batch_export_data), + ) + batch_export_id = response["id"] + + for days_ago in range(0, 7): + last_updated_at = now - dt.timedelta(days=days_ago) + + with freeze_time(last_updated_at): + # Since 'last_updated_at' uses 'auto_now', passing the argument is ignored. + # We have to re-freeze time to get each run created on a single date. + BatchExportRun.objects.create( + batch_export_id=batch_export_id, + data_interval_end=last_updated_at, + data_interval_start=last_updated_at - dt.timedelta(hours=1), + status=BatchExportRun.Status.COMPLETED, + records_completed=1, + records_total_count=1, + ) + + response = self.client.get(f"/api/projects/@current/app_metrics/{batch_export_id}?date_from=-7d") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual( + response.json(), + { + "metrics": { + "dates": [ + "2021-11-29", + "2021-11-30", + "2021-12-01", + "2021-12-02", + "2021-12-03", + "2021-12-04", + "2021-12-05", + ], + "successes": [1, 1, 1, 1, 1, 1, 1], + "successes_on_retry": [0, 0, 0, 0, 0, 0, 0], + "failures": [0, 0, 0, 0, 0, 0, 0], + "totals": {"successes": 7, "successes_on_retry": 0, "failures": 0}, + }, + "errors": None, + }, + ) + def test_list_historical_exports(self): self._create_activity_log( activity="job_triggered",