diff --git a/ee/tasks/subscriptions/__init__.py b/ee/tasks/subscriptions/__init__.py index 416e0a3337533..17a1455fb2dfb 100644 --- a/ee/tasks/subscriptions/__init__.py +++ b/ee/tasks/subscriptions/__init__.py @@ -3,7 +3,7 @@ import structlog from prometheus_client import Counter -from sentry_sdk import capture_exception +from sentry_sdk import capture_exception, capture_message from ee.tasks.subscriptions.email_subscriptions import send_email_subscription_report from ee.tasks.subscriptions.slack_subscriptions import send_slack_subscription_report @@ -41,11 +41,15 @@ def _deliver_subscription_report( # Same value as before so nothing to do return + insights, assets = generate_assets(subscription) + + if not assets: + capture_message("No assets are in this subscription", tags={"subscription_id": subscription.id}) + return + if subscription.target_type == "email": SUBSCRIPTION_QUEUED.labels(destination="email").inc() - insights, assets = generate_assets(subscription) - # Send emails emails = subscription.target_value.split(",") if is_new_subscription_target: @@ -77,7 +81,6 @@ def _deliver_subscription_report( elif subscription.target_type == "slack": SUBSCRIPTION_QUEUED.labels(destination="slack").inc() - insights, assets = generate_assets(subscription) try: send_slack_subscription_report( subscription, assets, total_asset_count=len(insights), is_new_subscription=is_new_subscription_target diff --git a/ee/tasks/subscriptions/subscription_utils.py b/ee/tasks/subscriptions/subscription_utils.py index ccd517b356cd9..265e0385a5bcf 100644 --- a/ee/tasks/subscriptions/subscription_utils.py +++ b/ee/tasks/subscriptions/subscription_utils.py @@ -44,9 +44,12 @@ def generate_assets( ] ExportedAsset.objects.bulk_create(assets) + if not assets: + return insights, assets + # Wait for all assets to be exported tasks = [exporter.export_asset.si(asset.id) for asset in assets] - # run them one after the other so we don't exhaust celery workers + # run them one after the other, so we don't exhaust celery workers parallel_job = chain(*tasks).apply_async() wait_for_parallel_celery_group(