diff --git a/posthog/api/alert.py b/posthog/api/alert.py index a177c61b0322d..3275ae53d7933 100644 --- a/posthog/api/alert.py +++ b/posthog/api/alert.py @@ -215,6 +215,14 @@ def update(self, instance, validated_data): # If anything changed we set to NOT_FIRING, so it's firing and notifying with the new settings instance.state = AlertState.NOT_FIRING + calculation_interval_changed = ( + "calculation_interval" in validated_data + and validated_data["calculation_interval"] != self.calculation_interval + ) + if conditions_or_threshold_changed or calculation_interval_changed: + # calculate alert right now, don't wait until preset time + self.next_check_at = None + return super().update(instance, validated_data) def validate_snoozed_until(self, value): diff --git a/posthog/tasks/alerts/checks.py b/posthog/tasks/alerts/checks.py index d6f8c020f1d7e..539c66bf348ad 100644 --- a/posthog/tasks/alerts/checks.py +++ b/posthog/tasks/alerts/checks.py @@ -11,7 +11,7 @@ from django.conf import settings from django.db import transaction import structlog -from sentry_sdk import capture_exception +from sentry_sdk import capture_exception, set_tag from posthog.errors import CHQueryErrorTooManySimultaneousQueries from posthog.hogql_queries.legacy_compatibility.flagged_conversion_manager import ( @@ -289,10 +289,10 @@ def check_alert_and_notify_atomically(alert: AlertConfiguration) -> None: error_message = f"AlertCheckError: error sending notifications for alert_id = {alert.id}" logger.exception(error_message) - capture_exception( - Exception(error_message), - {"alert_id": alert.id, "message": str(err)}, - ) + set_tag("alert_config_id", alert.id) + set_tag("evaluation_error_message", str(err)) + + capture_exception(Exception(error_message)) # don't want alert state to be updated (so that it's retried as next_check_at won't be updated) # so we raise again as @transaction.atomic decorator won't commit db updates diff --git a/posthog/tasks/alerts/utils.py b/posthog/tasks/alerts/utils.py index 06b94cc938089..0084cc6efa0e4 100644 --- a/posthog/tasks/alerts/utils.py +++ b/posthog/tasks/alerts/utils.py @@ -59,8 +59,8 @@ def alert_calculation_interval_to_relativedelta(alert_calculation_interval: Aler def send_notifications_for_breaches(alert: AlertConfiguration, breaches: list[str]) -> None: subject = f"PostHog alert {alert.name} is firing" campaign_key = f"alert-firing-notification-{alert.id}-{timezone.now().timestamp()}" - insight_url = f"/project/{alert.team.pk}/insights/{alert.insight.short_id}?alert_id={alert.id}" - alert_url = f"{insight_url}/alerts/{alert.id}" + insight_url = f"/project/{alert.team.pk}/insights/{alert.insight.short_id}" + alert_url = f"{insight_url}?alert_id={alert.id}" message = EmailMessage( campaign_key=campaign_key, subject=subject, @@ -86,14 +86,14 @@ def send_notifications_for_breaches(alert: AlertConfiguration, breaches: list[st def send_notifications_for_errors(alert: AlertConfiguration, error: dict) -> None: subject = f"PostHog alert {alert.name} check failed to evaluate" campaign_key = f"alert-firing-notification-{alert.id}-{timezone.now().timestamp()}" - insight_url = f"/project/{alert.team.pk}/insights/{alert.insight.short_id}?alert_id={alert.id}" - alert_url = f"{insight_url}/alerts/{alert.id}" + insight_url = f"/project/{alert.team.pk}/insights/{alert.insight.short_id}" + alert_url = f"{insight_url}?alert_id={alert.id}" message = EmailMessage( campaign_key=campaign_key, subject=subject, - template_name="alert_check_firing", + template_name="alert_check_failed_to_evaluate", template_context={ - "match_descriptions": error, + "alert_error": error, "insight_url": insight_url, "insight_name": alert.insight.name, "alert_url": alert_url, diff --git a/posthog/templates/alert_check_failed_to_evaluate.html b/posthog/templates/alert_check_failed_to_evaluate.html new file mode 100644 index 0000000000000..4cf8a5f5d54e0 --- /dev/null +++ b/posthog/templates/alert_check_failed_to_evaluate.html @@ -0,0 +1,10 @@ +{% extends "email/base.html" %} {% load posthog_assets %} {% block section %} +
+ The {{ alert_name }} alert failed to evaluate for insight {{ insight_name }}> with the following error: +
+{{ alert_error }}
+