diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 5a6f653aeef6e..9deccea94b408 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0015_add_verified_properties otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0378_alter_user_theme_mode +posthog: 0379_alter_scheduledchange sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/posthog/api/test/test_scheduled_change.py b/posthog/api/test/test_scheduled_change.py index f7ec88f204c82..415e987cb0216 100644 --- a/posthog/api/test/test_scheduled_change.py +++ b/posthog/api/test/test_scheduled_change.py @@ -13,7 +13,7 @@ def test_can_create_flag_change(self): f"/api/projects/{self.team.id}/scheduled_changes/", data={ "id": 6, - "record_id": 119, + "record_id": "119", "model_name": "FeatureFlag", "payload": payload, "scheduled_at": "2023-12-08T12:00:00Z", @@ -27,6 +27,6 @@ def test_can_create_flag_change(self): assert response.status_code == status.HTTP_201_CREATED, response_data assert ScheduledChange.objects.filter(id=response_data["id"]).exists() assert response_data["model_name"] == "FeatureFlag" - assert response_data["record_id"] == 119 + assert response_data["record_id"] == "119" assert response_data["payload"] == payload assert response_data["created_by"]["id"] == self.user.id diff --git a/posthog/celery.py b/posthog/celery.py index d1804524760ac..7980df823600b 100644 --- a/posthog/celery.py +++ b/posthog/celery.py @@ -271,6 +271,13 @@ def setup_periodic_tasks(sender: Celery, **kwargs): name="recalculate cohorts", ) + add_periodic_task_with_expiry( + sender, + 120, + process_scheduled_changes.s(), + name="process scheduled changes", + ) + if clear_clickhouse_crontab := get_crontab(settings.CLEAR_CLICKHOUSE_REMOVED_DATA_SCHEDULE_CRON): sender.add_periodic_task( clear_clickhouse_crontab, @@ -871,6 +878,13 @@ def calculate_cohort(): calculate_cohorts() +@app.task(ignore_result=True) +def process_scheduled_changes(): + from posthog.tasks.process_scheduled_changes import process_scheduled_changes + + process_scheduled_changes() + + @app.task(ignore_result=True) def sync_insight_cache_states_task(): from posthog.caching.insight_caching_state import sync_insight_cache_states diff --git a/posthog/migrations/0379_alter_scheduledchange.py b/posthog/migrations/0379_alter_scheduledchange.py new file mode 100644 index 0000000000000..0e0025324151a --- /dev/null +++ b/posthog/migrations/0379_alter_scheduledchange.py @@ -0,0 +1,29 @@ +# Generated by Django 3.2.19 on 2023-12-21 14:01 + +from django.db import migrations, models +from django.contrib.postgres.operations import AddIndexConcurrently # type: ignore + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [ + ("posthog", "0378_alter_user_theme_mode"), + ] + + operations = [ + migrations.AlterField( + model_name="scheduledchange", + name="record_id", + field=models.CharField(max_length=200), + ), + migrations.AlterField( + model_name="scheduledchange", + name="scheduled_at", + field=models.DateTimeField(), + ), + AddIndexConcurrently( + model_name="scheduledchange", + index=models.Index(fields=["scheduled_at", "executed_at"], name="posthog_sch_schedul_c3687e_idx"), + ), + ] diff --git a/posthog/models/feature_flag/feature_flag.py b/posthog/models/feature_flag/feature_flag.py index c339abe44d0ed..80c92452d6d74 100644 --- a/posthog/models/feature_flag/feature_flag.py +++ b/posthog/models/feature_flag/feature_flag.py @@ -297,6 +297,31 @@ def get_cohort_ids( return list(cohort_ids) + def scheduled_changes_dispatcher(self, payload): + from posthog.api.feature_flag import FeatureFlagSerializer + + if "operation" not in payload or "value" not in payload: + raise Exception("Invalid payload") + + context = { + "request": {"user": self.created_by}, + "team_id": self.team_id, + } + serializer_data = {} + + if payload["operation"] == "add_release_condition": + existing_groups = self.get_filters().get("groups", []) + new_groups = payload["value"].get("groups", []) + serializer_data["filters"] = {"groups": existing_groups + new_groups} + elif payload["operation"] == "update_status": + serializer_data["active"] = payload["value"] + else: + raise Exception(f"Unrecognized operation: {payload['operation']}") + + serializer = FeatureFlagSerializer(self, data=serializer_data, context=context, partial=True) + if serializer.is_valid(raise_exception=True): + serializer.save() + @property def uses_cohorts(self) -> bool: for condition in self.conditions: diff --git a/posthog/models/scheduled_change.py b/posthog/models/scheduled_change.py index 2fea198fd3ba0..ee92cc59c506e 100644 --- a/posthog/models/scheduled_change.py +++ b/posthog/models/scheduled_change.py @@ -1,5 +1,4 @@ from django.db import models -from django.utils import timezone class ScheduledChange(models.Model): @@ -7,10 +6,10 @@ class AllowedModels(models.TextChoices): FEATURE_FLAG = "FeatureFlag", "feature flag" id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID") - record_id = models.IntegerField() + record_id: models.CharField = models.CharField(max_length=200) model_name: models.CharField = models.CharField(max_length=100, choices=AllowedModels.choices) payload: models.JSONField = models.JSONField(default=dict) - scheduled_at: models.DateTimeField = models.DateTimeField(default=timezone.now) + scheduled_at: models.DateTimeField = models.DateTimeField() executed_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) failure_reason = models.CharField(max_length=400, null=True, blank=True) @@ -18,3 +17,8 @@ class AllowedModels(models.TextChoices): created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + + class Meta: + indexes = [ + models.Index(fields=["scheduled_at", "executed_at"]), + ] diff --git a/posthog/tasks/__init__.py b/posthog/tasks/__init__.py index 261a4c33ef1a5..80d3661259f16 100644 --- a/posthog/tasks/__init__.py +++ b/posthog/tasks/__init__.py @@ -7,6 +7,7 @@ demo_create_data, email, exporter, + process_scheduled_changes, split_person, sync_all_organization_available_features, usage_report, @@ -20,6 +21,7 @@ "demo_create_data", "email", "exporter", + "process_scheduled_changes", "split_person", "sync_all_organization_available_features", "user_identify", diff --git a/posthog/tasks/process_scheduled_changes.py b/posthog/tasks/process_scheduled_changes.py new file mode 100644 index 0000000000000..22d09e9948d35 --- /dev/null +++ b/posthog/tasks/process_scheduled_changes.py @@ -0,0 +1,39 @@ +from posthog.models import ScheduledChange +from django.utils import timezone +from posthog.models import FeatureFlag +from django.db import transaction, OperationalError + +models = {"FeatureFlag": FeatureFlag} + + +def process_scheduled_changes() -> None: + try: + with transaction.atomic(): + scheduled_changes = ( + ScheduledChange.objects.select_for_update(nowait=True) + .filter( + executed_at__isnull=True, + scheduled_at__lte=timezone.now(), + ) + .order_by("scheduled_at")[:10000] + ) + + for scheduled_change in scheduled_changes: + try: + # Execute the change on the model instance + model = models[scheduled_change.model_name] + instance = model.objects.get(id=scheduled_change.record_id) + instance.scheduled_changes_dispatcher(scheduled_change.payload) + + # Mark scheduled change completed + scheduled_change.executed_at = timezone.now() + scheduled_change.save() + + except Exception as e: + # Store the failure reason + scheduled_change.failure_reason = str(e) + scheduled_change.executed_at = timezone.now() + scheduled_change.save() + except OperationalError: + # Failed to obtain the lock + pass diff --git a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr new file mode 100644 index 0000000000000..87019fd274336 --- /dev/null +++ b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr @@ -0,0 +1,291 @@ +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes + ' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 2) + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.1 + ' + SELECT "posthog_scheduledchange"."id", + "posthog_scheduledchange"."record_id", + "posthog_scheduledchange"."model_name", + "posthog_scheduledchange"."payload", + "posthog_scheduledchange"."scheduled_at", + "posthog_scheduledchange"."executed_at", + "posthog_scheduledchange"."failure_reason", + "posthog_scheduledchange"."team_id", + "posthog_scheduledchange"."created_at", + "posthog_scheduledchange"."created_by_id", + "posthog_scheduledchange"."updated_at" + FROM "posthog_scheduledchange" + WHERE ("posthog_scheduledchange"."executed_at" IS NULL + AND "posthog_scheduledchange"."scheduled_at" <= '2023-12-21T09:00:00+00:00'::timestamptz) + ORDER BY "posthog_scheduledchange"."scheduled_at" ASC + LIMIT 10000 + FOR + UPDATE NOWAIT + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.10 + ' + SELECT "posthog_scheduledchange"."id", + "posthog_scheduledchange"."record_id", + "posthog_scheduledchange"."model_name", + "posthog_scheduledchange"."payload", + "posthog_scheduledchange"."scheduled_at", + "posthog_scheduledchange"."executed_at", + "posthog_scheduledchange"."failure_reason", + "posthog_scheduledchange"."team_id", + "posthog_scheduledchange"."created_at", + "posthog_scheduledchange"."created_by_id", + "posthog_scheduledchange"."updated_at" + FROM "posthog_scheduledchange" + WHERE "posthog_scheduledchange"."id" = 2 + LIMIT 21 + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.11 + ' + SELECT "posthog_scheduledchange"."id", + "posthog_scheduledchange"."record_id", + "posthog_scheduledchange"."model_name", + "posthog_scheduledchange"."payload", + "posthog_scheduledchange"."scheduled_at", + "posthog_scheduledchange"."executed_at", + "posthog_scheduledchange"."failure_reason", + "posthog_scheduledchange"."team_id", + "posthog_scheduledchange"."created_at", + "posthog_scheduledchange"."created_by_id", + "posthog_scheduledchange"."updated_at" + FROM "posthog_scheduledchange" + WHERE "posthog_scheduledchange"."id" = 2 + LIMIT 21 + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.12 + ' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE "posthog_featureflag"."key" = 'flag-1' + LIMIT 21 + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.2 + ' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE "posthog_featureflag"."id" = 2 + LIMIT 21 + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.3 + ' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."is_active", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."requested_password_reset_at", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."email_opt_in", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."events_column_config" + FROM "posthog_user" + WHERE "posthog_user"."id" = 2 + LIMIT 21 + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.4 + ' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 2) + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.5 + ' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE "posthog_featureflag"."id" = 2 + LIMIT 21 + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.6 + ' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."is_active", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."requested_password_reset_at", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."email_opt_in", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."events_column_config" + FROM "posthog_user" + WHERE "posthog_user"."id" = 2 + LIMIT 21 + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.7 + ' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 2) + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.8 + ' + SELECT "posthog_scheduledchange"."id", + "posthog_scheduledchange"."record_id", + "posthog_scheduledchange"."model_name", + "posthog_scheduledchange"."payload", + "posthog_scheduledchange"."scheduled_at", + "posthog_scheduledchange"."executed_at", + "posthog_scheduledchange"."failure_reason", + "posthog_scheduledchange"."team_id", + "posthog_scheduledchange"."created_at", + "posthog_scheduledchange"."created_by_id", + "posthog_scheduledchange"."updated_at" + FROM "posthog_scheduledchange" + WHERE "posthog_scheduledchange"."id" = 2 + LIMIT 21 + ' +--- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.9 + ' + SELECT "posthog_scheduledchange"."id", + "posthog_scheduledchange"."record_id", + "posthog_scheduledchange"."model_name", + "posthog_scheduledchange"."payload", + "posthog_scheduledchange"."scheduled_at", + "posthog_scheduledchange"."executed_at", + "posthog_scheduledchange"."failure_reason", + "posthog_scheduledchange"."team_id", + "posthog_scheduledchange"."created_at", + "posthog_scheduledchange"."created_by_id", + "posthog_scheduledchange"."updated_at" + FROM "posthog_scheduledchange" + WHERE "posthog_scheduledchange"."id" = 2 + LIMIT 21 + ' +--- diff --git a/posthog/tasks/test/test_process_scheduled_changes.py b/posthog/tasks/test/test_process_scheduled_changes.py new file mode 100644 index 0000000000000..866f3847c5d34 --- /dev/null +++ b/posthog/tasks/test/test_process_scheduled_changes.py @@ -0,0 +1,179 @@ +from datetime import datetime, timedelta, timezone +from posthog.models import ScheduledChange, FeatureFlag +from posthog.test.base import APIBaseTest, QueryMatchingTest, snapshot_postgres_queries +from posthog.tasks.process_scheduled_changes import process_scheduled_changes +from freezegun import freeze_time + + +class TestProcessScheduledChanges(APIBaseTest, QueryMatchingTest): + def test_schedule_feature_flag_set_active(self) -> None: + feature_flag = FeatureFlag.objects.create( + name="Flag 1", + key="flag-1", + active=False, + filters={"groups": []}, + team=self.team, + created_by=self.user, + ) + + ScheduledChange.objects.create( + team=self.team, + record_id=feature_flag.id, + model_name="FeatureFlag", + payload={"operation": "update_status", "value": True}, + scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)).isoformat(), + ) + + process_scheduled_changes() + + updated_flag = FeatureFlag.objects.get(key="flag-1") + self.assertEqual(updated_flag.active, True) + + def test_schedule_feature_flag_add_release_condition(self) -> None: + feature_flag = FeatureFlag.objects.create( + name="Flag 1", + key="flag-1", + active=False, + filters={"groups": []}, + team=self.team, + created_by=self.user, + ) + + new_release_condition = { + "variant": None, + "properties": [{"key": "$browser", "type": "person", "value": ["Chrome"], "operator": "exact"}], + "rollout_percentage": 30, + } + + payload = { + "operation": "add_release_condition", + "value": {"groups": [new_release_condition], "payloads": {}, "multivariate": None}, + } + + ScheduledChange.objects.create( + team=self.team, + record_id=feature_flag.id, + model_name="FeatureFlag", + payload=payload, + scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)), + ) + + process_scheduled_changes() + + updated_flag = FeatureFlag.objects.get(key="flag-1") + self.assertEqual(updated_flag.filters["groups"][0], new_release_condition) + + def test_schedule_feature_flag_invalid_payload(self) -> None: + feature_flag = FeatureFlag.objects.create( + name="Flag 1", + key="flag-1", + active=False, + filters={"groups": []}, + team=self.team, + created_by=self.user, + ) + + payload = {"foo": "bar"} + + scheduled_change = ScheduledChange.objects.create( + team=self.team, + record_id=feature_flag.id, + model_name="FeatureFlag", + payload=payload, + scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)), + ) + + process_scheduled_changes() + + updated_flag = FeatureFlag.objects.get(key="flag-1") + self.assertEqual(updated_flag.filters["groups"], []) + + updated_scheduled_change = ScheduledChange.objects.get(id=scheduled_change.id) + self.assertEqual(updated_scheduled_change.failure_reason, "Invalid payload") + + @snapshot_postgres_queries + @freeze_time("2023-12-21T09:00:00Z") + def test_schedule_feature_flag_multiple_changes(self) -> None: + feature_flag = FeatureFlag.objects.create( + name="Flag", + key="flag-1", + active=True, + filters={"groups": []}, + team=self.team, + created_by=self.user, + ) + + # Create 4 scheduled changes + # 1. Due in the past + change_past_condition = { + "properties": [{"key": "$geoip_city_name", "value": ["Sydney"], "operator": "exact", "type": "person"}], + "rollout_percentage": 50, + "variant": None, + } + change_past = ScheduledChange.objects.create( + team=self.team, + record_id=feature_flag.id, + model_name="FeatureFlag", + payload={ + "operation": "add_release_condition", + "value": {"groups": [change_past_condition], "multivariate": None, "payloads": {}}, + }, + scheduled_at=(datetime.now(timezone.utc) - timedelta(hours=1)), + ) + + # 2. Due in the past and already executed + change_past_executed_at = datetime.now(timezone.utc) - timedelta(hours=5) + change_past_executed = ScheduledChange.objects.create( + team=self.team, + record_id=feature_flag.id, + model_name="FeatureFlag", + payload={"operation": "update_status", "value": False}, + scheduled_at=change_past_executed_at, + executed_at=change_past_executed_at, + ) + + # 3. Due exactly now + change_due_now_condition = { + "properties": [{"key": "$geoip_city_name", "value": ["New York"], "operator": "exact", "type": "person"}], + "rollout_percentage": 75, + "variant": None, + } + change_due_now = ScheduledChange.objects.create( + team=self.team, + record_id=feature_flag.id, + model_name="FeatureFlag", + payload={ + "operation": "add_release_condition", + "value": {"groups": [change_due_now_condition], "multivariate": None, "payloads": {}}, + }, + scheduled_at=datetime.now(timezone.utc), + ) + + # 4. Due in the future + change_due_future = ScheduledChange.objects.create( + team=self.team, + record_id=feature_flag.id, + model_name="FeatureFlag", + payload={"operation": "update_status", "value": False}, + scheduled_at=(datetime.now(timezone.utc) + timedelta(hours=1)), + ) + + process_scheduled_changes() + + # Refresh change records + change_past = ScheduledChange.objects.get(id=change_past.id) + change_past_executed = ScheduledChange.objects.get(id=change_past_executed.id) + change_due_now = ScheduledChange.objects.get(id=change_due_now.id) + change_due_future = ScheduledChange.objects.get(id=change_due_future.id) + + # Changes due have been marked executed + self.assertIsNotNone(change_past.executed_at) + self.assertIsNotNone(change_due_now.executed_at) + + # Other changes have not been executed + self.assertEqual(change_past_executed.executed_at, change_past_executed_at) + self.assertIsNone(change_due_future.executed_at) + + # The changes due have been propagated in the correct order (oldest scheduled_at first) + updated_flag = FeatureFlag.objects.get(key="flag-1") + self.assertEqual(updated_flag.filters["groups"], [change_past_condition, change_due_now_condition])