diff --git a/.github/actions/run-backend-tests/action.yml b/.github/actions/run-backend-tests/action.yml index d7c9689f55901..3c5a4d1986027 100644 --- a/.github/actions/run-backend-tests/action.yml +++ b/.github/actions/run-backend-tests/action.yml @@ -6,7 +6,7 @@ name: Run Django tests inputs: python-version: required: true - description: Python version, e.g. 3.11.9 + description: Python version, e.g. 3.10.10 clickhouse-server-image: required: true description: ClickHouse server image tag, e.g. clickhouse/clickhouse-server:latest diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index 9478b7d2f8c80..bd50811fae662 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -54,7 +54,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.11.9 + python-version: 3.10.10 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/.github/workflows/build-hogql-parser.yml b/.github/workflows/build-hogql-parser.yml index 4b950b281f065..50653eaa02e17 100644 --- a/.github/workflows/build-hogql-parser.yml +++ b/.github/workflows/build-hogql-parser.yml @@ -73,7 +73,7 @@ jobs: - if: ${{ !endsWith(matrix.os, '-arm') }} uses: actions/setup-python@v4 with: - python-version: '3.11' + python-version: '3.10' # Compiling Python 3.11 from source on ARM. We tried using the "deadsnakes" ARM repo, but it was flakey. - if: ${{ endsWith(matrix.os, '-arm') }} diff --git a/.github/workflows/ci-backend-update-test-timing.yml b/.github/workflows/ci-backend-update-test-timing.yml index 01ad7d33ce305..a2082f6b98955 100644 --- a/.github/workflows/ci-backend-update-test-timing.yml +++ b/.github/workflows/ci-backend-update-test-timing.yml @@ -28,7 +28,7 @@ jobs: concurrency: 1 group: 1 token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} - python-version: '3.11.9' + python-version: '3.10.10' clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.5.81-alpine' segment: 'FOSS' person-on-events: false diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index b757f69c8f804..14d2c1045c2c4 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -108,7 +108,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.11.9 + python-version: 3.10.10 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} @@ -163,7 +163,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.11.9 + python-version: 3.10.10 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} @@ -232,7 +232,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.11.9'] + python-version: ['3.10.10'] clickhouse-server-image: ['clickhouse/clickhouse-server:23.12.5.81-alpine'] segment: ['Core'] person-on-events: [false, true] @@ -243,7 +243,7 @@ jobs: - segment: 'Temporal' person-on-events: false clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.5.81-alpine' - python-version: '3.11.9' + python-version: '3.10.10' concurrency: 1 group: 1 @@ -331,7 +331,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.11.9 + python-version: 3.10.10 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/.github/workflows/ci-hog.yml b/.github/workflows/ci-hog.yml index 2a2ee8ecb8684..860f0b6e47be8 100644 --- a/.github/workflows/ci-hog.yml +++ b/.github/workflows/ci-hog.yml @@ -70,7 +70,7 @@ jobs: if: needs.changes.outputs.hog == 'true' uses: actions/setup-python@v5 with: - python-version: 3.11.9 + python-version: 3.10.10 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/.github/workflows/ci-plugin-server.yml b/.github/workflows/ci-plugin-server.yml index b4d6cb0a17f36..dac67b705b6a5 100644 --- a/.github/workflows/ci-plugin-server.yml +++ b/.github/workflows/ci-plugin-server.yml @@ -115,7 +115,7 @@ jobs: if: needs.changes.outputs.plugin-server == 'true' uses: actions/setup-python@v5 with: - python-version: 3.11.9 + python-version: 3.10.10 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} @@ -207,7 +207,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.11.9 + python-version: 3.10.10 cache: 'pip' cache-dependency-path: '**/requirements*.txt' token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/bin/build-schema-python.sh b/bin/build-schema-python.sh index efd65bb091b78..d033c5f4f1e21 100755 --- a/bin/build-schema-python.sh +++ b/bin/build-schema-python.sh @@ -4,27 +4,25 @@ set -e # Generate schema.py from schema.json datamodel-codegen \ - --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.11 --disable-timestamp \ + --class-name='SchemaRoot' --collapse-root-models --target-python-version 3.10 --disable-timestamp \ --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum \ --input frontend/src/queries/schema.json --input-file-type jsonschema \ --output posthog/schema.py --output-model-type pydantic_v2.BaseModel \ --custom-file-header "# mypy: disable-error-code=\"assignment\"" \ --set-default-enum-member --capitalise-enum-members \ --wrap-string-literal - # Format schema.py ruff format posthog/schema.py - # Check schema.py and autofix ruff check --fix posthog/schema.py - -# Replace class Foo(str, Enum) with class Foo(StrEnum) for proper handling in format strings in python 3.11 -# Remove this when https://github.com/koxudaxi/datamodel-code-generator/issues/1313 is resolved +# HACK: Datamodel-codegen output for enum-type fields with a default is invalid – the default value is a plain string, +# and not the expected enum member. We fix this using sed, which is pretty hacky, but does the job. +# Specifically, we need to replace `Optional[PropertyOperator] = "exact"` +# with `Optional[PropertyOperator] = PropertyOperator("exact")` to make the default value valid. +# Remove this when https://github.com/koxudaxi/datamodel-code-generator/issues/1929 is resolved. if [[ "$OSTYPE" == "darwin"* ]]; then # sed needs `-i` to be followed by `''` on macOS - sed -i '' -e 's/str, Enum/StrEnum/g' posthog/schema.py - sed -i '' 's/from enum import Enum/from enum import Enum, StrEnum/g' posthog/schema.py + sed -i '' -e 's/Optional\[PropertyOperator\] = \("[A-Za-z_]*"\)/Optional[PropertyOperator] = PropertyOperator(\1)/g' posthog/schema.py else - sed -i -e 's/str, Enum/StrEnum/g' posthog/schema.py - sed -i 's/from enum import Enum/from enum import Enum, StrEnum/g' posthog/schema.py -fi + sed -i -e 's/Optional\[PropertyOperator\] = \("[A-Za-z_]*"\)/Optional[PropertyOperator] = PropertyOperator(\1)/g' posthog/schema.py +fi \ No newline at end of file diff --git a/ee/api/test/__snapshots__/test_time_to_see_data.ambr b/ee/api/test/__snapshots__/test_time_to_see_data.ambr index beda2bc14bdef..2d93af68cee82 100644 --- a/ee/api/test/__snapshots__/test_time_to_see_data.ambr +++ b/ee/api/test/__snapshots__/test_time_to_see_data.ambr @@ -20,7 +20,7 @@ "first_name": "", "last_name": "", "email": "", - "is_email_verified": null + "is_email_verified": false } }, "children": [ diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index a67d6523858f2..1a5735473ffa0 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from django.core.cache import cache from flaky import flaky from rest_framework import status @@ -1601,8 +1601,8 @@ def test_create_exposure_cohort_for_experiment_with_custom_action_filters_exposu explicit_datetime = parser.isoparse(target_filter["explicit_datetime"]) self.assertTrue( - explicit_datetime <= datetime.now(UTC) - timedelta(days=5) - and explicit_datetime >= datetime.now(UTC) - timedelta(days=5, hours=1) + explicit_datetime <= datetime.now(timezone.utc) - timedelta(days=5) + and explicit_datetime >= datetime.now(timezone.utc) - timedelta(days=5, hours=1) ) cohort_id = cohort["id"] diff --git a/ee/session_recordings/session_summary/test/test_summarize_session.py b/ee/session_recordings/session_summary/test/test_summarize_session.py index 3cc69df02b18b..69412608dd3b9 100644 --- a/ee/session_recordings/session_summary/test/test_summarize_session.py +++ b/ee/session_recordings/session_summary/test/test_summarize_session.py @@ -1,4 +1,4 @@ -from datetime import datetime, UTC +from datetime import timezone, datetime from dateutil.parser import isoparse @@ -23,7 +23,7 @@ def test_format_dates_as_millis_since_start(self) -> None: ["$pageview", isoparse("2021-01-01T00:00:02Z")], ], ), - datetime(2021, 1, 1, 0, 0, 0, tzinfo=UTC), + datetime(2021, 1, 1, 0, 0, 0, tzinfo=timezone.utc), ) assert processed.columns == ["event", "milliseconds_since_start"] assert processed.results == [["$pageview", 0], ["$pageview", 1000], ["$pageview", 2000]] diff --git a/ee/session_recordings/test/test_session_recording_extensions.py b/ee/session_recordings/test/test_session_recording_extensions.py index e425213f747a2..ad545e5cec33f 100644 --- a/ee/session_recordings/test/test_session_recording_extensions.py +++ b/ee/session_recordings/test/test_session_recording_extensions.py @@ -1,5 +1,5 @@ import gzip -from datetime import timedelta, datetime, UTC +from datetime import timedelta, datetime, timezone from secrets import token_urlsafe from unittest.mock import patch, MagicMock from uuid import uuid4 @@ -84,7 +84,7 @@ def test_can_build_different_object_storage_paths(self) -> None: def test_persists_recording_from_blob_ingested_storage(self): with self.settings(OBJECT_STORAGE_SESSION_RECORDING_BLOB_INGESTION_FOLDER=TEST_BUCKET): - two_minutes_ago = (datetime.now() - timedelta(minutes=2)).replace(tzinfo=UTC) + two_minutes_ago = (datetime.now() - timedelta(minutes=2)).replace(tzinfo=timezone.utc) with freeze_time(two_minutes_ago): session_id = f"test_persists_recording_from_blob_ingested_storage-s1-{uuid4()}" diff --git a/ee/session_recordings/test/test_session_recording_playlist.py b/ee/session_recordings/test/test_session_recording_playlist.py index 0ec14e0decb15..6fb6a730a7aae 100644 --- a/ee/session_recordings/test/test_session_recording_playlist.py +++ b/ee/session_recordings/test/test_session_recording_playlist.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from unittest import mock from unittest.mock import MagicMock, patch from uuid import uuid4 @@ -187,7 +187,7 @@ def test_get_pinned_recordings_for_playlist(self, mock_copy_objects: MagicMock) session_one = f"test_fetch_playlist_recordings-session1-{uuid4()}" session_two = f"test_fetch_playlist_recordings-session2-{uuid4()}" - three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=UTC) + three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=timezone.utc) produce_replay_summary( team_id=self.team.id, @@ -242,7 +242,7 @@ def test_fetch_playlist_recordings(self, mock_copy_objects: MagicMock, mock_list session_one = f"test_fetch_playlist_recordings-session1-{uuid4()}" session_two = f"test_fetch_playlist_recordings-session2-{uuid4()}" - three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=UTC) + three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=timezone.utc) for session_id in [session_one, session_two]: produce_replay_summary( diff --git a/ee/tasks/subscriptions/subscription_utils.py b/ee/tasks/subscriptions/subscription_utils.py index eb8afed13cbaf..6fa4b63960fc2 100644 --- a/ee/tasks/subscriptions/subscription_utils.py +++ b/ee/tasks/subscriptions/subscription_utils.py @@ -56,7 +56,7 @@ def generate_assets( # Wait for all assets to be exported tasks = [exporter.export_asset.si(asset.id) for asset in assets] # run them one after the other, so we don't exhaust celery workers - exports_expire = datetime.datetime.now(tz=datetime.UTC) + datetime.timedelta( + exports_expire = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta( minutes=settings.PARALLEL_ASSET_GENERATION_MAX_TIMEOUT_MINUTES ) parallel_job = chain(*tasks).apply_async(expires=exports_expire, retry=False) diff --git a/mypy.ini b/mypy.ini index 438b5f47ef60c..414b1d252174b 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,5 +1,5 @@ [mypy] -python_version = 3.11 +python_version = 3.10 plugins = mypy_django_plugin.main, mypy_drf_plugin.main, diff --git a/posthog/api/app_metrics.py b/posthog/api/app_metrics.py index 12d5483873768..6fe56947b42c7 100644 --- a/posthog/api/app_metrics.py +++ b/posthog/api/app_metrics.py @@ -90,7 +90,9 @@ def get_batch_export_runs_app_metrics_queryset(self, batch_export_id: str): after = self.request.GET.get("date_from", "-30d") before = self.request.GET.get("date_to", None) after_datetime = relative_date_parse(after, self.team.timezone_info) - before_datetime = relative_date_parse(before, self.team.timezone_info) if before else dt.datetime.now(dt.UTC) + before_datetime = ( + relative_date_parse(before, self.team.timezone_info) if before else dt.datetime.now(dt.timezone.utc) + ) date_range = (after_datetime, before_datetime) runs = ( BatchExportRun.objects.select_related("batch_export__destination") diff --git a/posthog/api/authentication.py b/posthog/api/authentication.py index b04707ca559de..d82d958f27428 100644 --- a/posthog/api/authentication.py +++ b/posthog/api/authentication.py @@ -290,7 +290,7 @@ def create(self, validated_data): user = None if user: - user.requested_password_reset_at = datetime.datetime.now(datetime.UTC) + user.requested_password_reset_at = datetime.datetime.now(datetime.timezone.utc) user.save() token = password_reset_token_generator.make_token(user) send_password_reset(user.id, token) diff --git a/posthog/api/comments.py b/posthog/api/comments.py index 06443f92b2fcc..20961be0e3cbb 100644 --- a/posthog/api/comments.py +++ b/posthog/api/comments.py @@ -11,13 +11,11 @@ from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer -from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer from posthog.models.comment import Comment class CommentSerializer(serializers.ModelSerializer): created_by = UserBasicSerializer(read_only=True) - deleted = ClassicBehaviorBooleanFieldSerializer() class Meta: model = Comment diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index 029a3186d4365..6887b85dcf53b 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -23,7 +23,6 @@ from posthog.api.shared import UserBasicSerializer from posthog.api.tagged_item import TaggedItemSerializerMixin, TaggedItemViewSetMixin from posthog.api.dashboards.dashboard import Dashboard -from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer from posthog.auth import PersonalAPIKeyAuthentication, TemporaryTokenAuthentication from posthog.constants import FlagRequestType from posthog.event_usage import report_user_action @@ -90,9 +89,6 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo is_simple_flag = serializers.SerializerMethodField() rollout_percentage = serializers.SerializerMethodField() - ensure_experience_continuity = ClassicBehaviorBooleanFieldSerializer() - has_enriched_analytics = ClassicBehaviorBooleanFieldSerializer() - experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True) surveys: serializers.SerializerMethodField = serializers.SerializerMethodField() features: serializers.SerializerMethodField = serializers.SerializerMethodField() diff --git a/posthog/api/plugin.py b/posthog/api/plugin.py index 481b63476f10e..47a5ab5b3bb80 100644 --- a/posthog/api/plugin.py +++ b/posthog/api/plugin.py @@ -22,7 +22,6 @@ from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import FiltersSerializer -from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer from posthog.models import Plugin, PluginAttachment, PluginConfig, User from posthog.models.activity_logging.activity_log import ( ActivityPage, @@ -587,8 +586,6 @@ class PluginConfigSerializer(serializers.ModelSerializer): delivery_rate_24h = serializers.SerializerMethodField() error = serializers.SerializerMethodField() - deleted = ClassicBehaviorBooleanFieldSerializer() - class Meta: model = PluginConfig fields = [ diff --git a/posthog/api/routing.py b/posthog/api/routing.py index f2816f9a2b131..c4e67d1826274 100644 --- a/posthog/api/routing.py +++ b/posthog/api/routing.py @@ -36,32 +36,6 @@ class DefaultRouterPlusPlus(ExtendedDefaultRouter): """DefaultRouter with optional trailing slash and drf-extensions nesting.""" - # This is an override because of changes in djangorestframework 3.15, which is required for python 3.11 - # changes taken from and explained here: https://github.com/nautobot/nautobot/pull/5546/files#diff-81850a2ccad5814aab4f477d447f85cc0a82e9c10fd88fd72327cda51a750471R30 - def _register(self, prefix, viewset, basename=None): - """ - Override DRF's BaseRouter.register() to bypass an unnecessary restriction added in version 3.15.0. - (Reference: https://github.com/encode/django-rest-framework/pull/8438) - """ - if basename is None: - basename = self.get_default_basename(viewset) - - # DRF: - # if self.is_already_registered(basename): - # msg = (f'Router with basename "{basename}" is already registered. ' - # f'Please provide a unique basename for viewset "{viewset}"') - # raise ImproperlyConfigured(msg) - # - # We bypass this because we have at least one use case (/api/extras/jobs/) where we are *intentionally* - # registering two viewsets with the same basename, but have carefully defined them so as not to conflict. - - # resuming standard DRF code... - self.registry.append((prefix, viewset, basename)) - - # invalidate the urls cache - if hasattr(self, "_urls"): - del self._urls - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.trailing_slash = r"/?" diff --git a/posthog/api/test/__snapshots__/test_api_docs.ambr b/posthog/api/test/__snapshots__/test_api_docs.ambr index 2ded9229008c7..8793984c350a5 100644 --- a/posthog/api/test/__snapshots__/test_api_docs.ambr +++ b/posthog/api/test/__snapshots__/test_api_docs.ambr @@ -77,7 +77,7 @@ "/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Error [PropertyDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')", '/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Warning [PropertyDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.property_definition.PropertyDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', - '/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes and . This will very likely result in an incorrect schema. Try renaming one.', + '/opt/hostedtoolcache/Python/3.10.10/x64/lib/python3.10/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes and . This will very likely result in an incorrect schema. Try renaming one.', '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/query.py: Error [QueryViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.', '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', diff --git a/posthog/api/test/batch_exports/test_log_entry.py b/posthog/api/test/batch_exports/test_log_entry.py index 06dcb6ce4a8d2..b166583ee0b87 100644 --- a/posthog/api/test/batch_exports/test_log_entry.py +++ b/posthog/api/test/batch_exports/test_log_entry.py @@ -38,7 +38,7 @@ def create_batch_export_log_entry( "log_source": "batch_exports", "log_source_id": batch_export_id, "instance_id": run_id, - "timestamp": dt.datetime.now(dt.UTC).strftime("%Y-%m-%d %H:%M:%S.%f"), + "timestamp": dt.datetime.now(dt.timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f"), "level": level, "message": message, }, @@ -147,7 +147,7 @@ def test_log_level_filter(batch_export, team, level): results = [] timeout = 10 - start = dt.datetime.now(dt.UTC) + start = dt.datetime.now(dt.timezone.utc) while not results: results = fetch_batch_export_log_entries( @@ -157,7 +157,7 @@ def test_log_level_filter(batch_export, team, level): after=dt.datetime(2023, 9, 22, 0, 59, 59), before=dt.datetime(2023, 9, 22, 1, 0, 1), ) - if (dt.datetime.now(dt.UTC) - start) > dt.timedelta(seconds=timeout): + if (dt.datetime.now(dt.timezone.utc) - start) > dt.timedelta(seconds=timeout): break results.sort(key=lambda record: record.message) @@ -195,7 +195,7 @@ def test_log_level_filter_with_lowercase(batch_export, team, level): results = [] timeout = 10 - start = dt.datetime.now(dt.UTC) + start = dt.datetime.now(dt.timezone.utc) while not results: results = fetch_batch_export_log_entries( @@ -205,7 +205,7 @@ def test_log_level_filter_with_lowercase(batch_export, team, level): after=dt.datetime(2023, 9, 22, 0, 59, 59), before=dt.datetime(2023, 9, 22, 1, 0, 1), ) - if (dt.datetime.now(dt.UTC) - start) > dt.timedelta(seconds=timeout): + if (dt.datetime.now(dt.timezone.utc) - start) > dt.timedelta(seconds=timeout): break results.sort(key=lambda record: record.message) diff --git a/posthog/api/test/batch_exports/test_pause.py b/posthog/api/test/batch_exports/test_pause.py index 33c32f1a200bc..7db786347e09c 100644 --- a/posthog/api/test/batch_exports/test_pause.py +++ b/posthog/api/test/batch_exports/test_pause.py @@ -397,8 +397,8 @@ def test_unpause_can_trigger_a_backfill(client: HttpClient): data = get_batch_export_ok(client, team.pk, batch_export_id) assert batch_export["last_updated_at"] < data["last_updated_at"] - start_at = dt.datetime.strptime(data["last_paused_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.UTC) - end_at = dt.datetime.strptime(data["last_updated_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.UTC) + start_at = dt.datetime.strptime(data["last_paused_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.timezone.utc) + end_at = dt.datetime.strptime(data["last_updated_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.timezone.utc) mock_backfill.assert_called_once_with( ANY, batch_export["id"], diff --git a/posthog/api/test/batch_exports/test_update.py b/posthog/api/test/batch_exports/test_update.py index 2a7f3241fd037..7b749c62dc24f 100644 --- a/posthog/api/test/batch_exports/test_update.py +++ b/posthog/api/test/batch_exports/test_update.py @@ -94,8 +94,8 @@ def test_can_put_config(client: HttpClient): new_schedule = describe_schedule(temporal, batch_export["id"]) assert old_schedule.schedule.spec.intervals[0].every != new_schedule.schedule.spec.intervals[0].every assert new_schedule.schedule.spec.intervals[0].every == dt.timedelta(days=1) - assert new_schedule.schedule.spec.start_at == dt.datetime(2022, 7, 19, 0, 0, 0, tzinfo=dt.UTC) - assert new_schedule.schedule.spec.end_at == dt.datetime(2023, 7, 20, 0, 0, 0, tzinfo=dt.UTC) + assert new_schedule.schedule.spec.start_at == dt.datetime(2022, 7, 19, 0, 0, 0, tzinfo=dt.timezone.utc) + assert new_schedule.schedule.spec.end_at == dt.datetime(2023, 7, 20, 0, 0, 0, tzinfo=dt.timezone.utc) decoded_payload = async_to_sync(codec.decode)(new_schedule.schedule.action.args) args = json.loads(decoded_payload[0].data) diff --git a/posthog/api/test/test_app_metrics.py b/posthog/api/test/test_app_metrics.py index 67b9a0a42eaa5..c639b37aee68d 100644 --- a/posthog/api/test/test_app_metrics.py +++ b/posthog/api/test/test_app_metrics.py @@ -100,7 +100,7 @@ def test_retrieve_batch_export_runs_app_metrics(self): temporal = sync_connect() - now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.UTC) + now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc) with start_test_worker(temporal): response = create_batch_export_ok( self.client, @@ -191,7 +191,7 @@ def test_retrieve_batch_export_runs_app_metrics_defaults_to_zero(self): } temporal = sync_connect() - now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.UTC) + now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc) with start_test_worker(temporal): response = create_batch_export_ok( diff --git a/posthog/api/test/test_capture.py b/posthog/api/test/test_capture.py index 756d1638a3f5b..a7d605d9a3ae8 100644 --- a/posthog/api/test/test_capture.py +++ b/posthog/api/test/test_capture.py @@ -13,7 +13,7 @@ import structlog import zlib from datetime import datetime, timedelta -from datetime import UTC +from datetime import timezone as tz from django.http import HttpResponse from django.test.client import MULTIPART_CONTENT, Client from django.utils import timezone @@ -1415,7 +1415,7 @@ def test_js_library_underscore_sent_at(self, kafka_produce): # right time sent as sent_at to process_event sent_at = datetime.fromisoformat(arguments["sent_at"]) - self.assertEqual(sent_at.tzinfo, UTC) + self.assertEqual(sent_at.tzinfo, tz.utc) timediff = sent_at.timestamp() - tomorrow_sent_at.timestamp() self.assertLess(abs(timediff), 1) diff --git a/posthog/api/user.py b/posthog/api/user.py index ee2b66c47eb1c..ad5ef32de0e8c 100644 --- a/posthog/api/user.py +++ b/posthog/api/user.py @@ -38,7 +38,6 @@ from posthog.api.utils import ( PublicIPOnlyHttpAdapter, raise_if_user_provided_url_unsafe, - ClassicBehaviorBooleanFieldSerializer, ) from posthog.auth import ( PersonalAPIKeyAuthentication, @@ -88,7 +87,6 @@ class UserSerializer(serializers.ModelSerializer): current_password = serializers.CharField(write_only=True, required=False) notification_settings = serializers.DictField(required=False) scene_personalisation = ScenePersonalisationBasicSerializer(many=True, read_only=True) - anonymize_data = ClassicBehaviorBooleanFieldSerializer() class Meta: model = User diff --git a/posthog/api/utils.py b/posthog/api/utils.py index 2f1bd5c087bab..65cff4897ebb9 100644 --- a/posthog/api/utils.py +++ b/posthog/api/utils.py @@ -7,7 +7,6 @@ from requests.adapters import HTTPAdapter from typing import Literal, Optional, Union -from rest_framework.fields import Field from urllib3 import HTTPSConnectionPool, HTTPConnectionPool, PoolManager from uuid import UUID @@ -15,7 +14,7 @@ from django.core.exceptions import RequestDataTooBig from django.db.models import QuerySet from prometheus_client import Counter -from rest_framework import request, status, serializers +from rest_framework import request, status from rest_framework.exceptions import ValidationError from statshog.defaults.django import statsd @@ -36,14 +35,6 @@ class PaginationMode(Enum): previous = auto() -# This overrides a change in DRF 3.15 that alters our behavior. If the user passes an empty argument, -# the new version keeps it as null vs coalescing it to the default. -# Don't add this to new classes -class ClassicBehaviorBooleanFieldSerializer(serializers.BooleanField): - def __init__(self, **kwargs): - Field.__init__(self, allow_null=True, required=False, **kwargs) - - def get_target_entity(filter: Union[Filter, StickinessFilter]) -> Entity: # Except for "events", we require an entity id and type to be provided if not filter.target_entity_id and filter.target_entity_type != "events": diff --git a/posthog/async_migrations/test/test_utils.py b/posthog/async_migrations/test/test_utils.py index 15d88019e785f..da01ec9dda54d 100644 --- a/posthog/async_migrations/test/test_utils.py +++ b/posthog/async_migrations/test/test_utils.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from unittest.mock import patch import pytest @@ -49,7 +49,7 @@ def test_process_error(self, _): sm.refresh_from_db() self.assertEqual(sm.status, MigrationStatus.Errored) - self.assertGreater(sm.finished_at, datetime.now(UTC) - timedelta(hours=1)) + self.assertGreater(sm.finished_at, datetime.now(timezone.utc) - timedelta(hours=1)) errors = AsyncMigrationError.objects.filter(async_migration=sm).order_by("created_at") self.assertEqual(errors.count(), 2) self.assertEqual(errors[0].description, "some error") @@ -81,7 +81,7 @@ def test_complete_migration(self): sm.refresh_from_db() self.assertEqual(sm.status, MigrationStatus.CompletedSuccessfully) - self.assertGreater(sm.finished_at, datetime.now(UTC) - timedelta(hours=1)) + self.assertGreater(sm.finished_at, datetime.now(timezone.utc) - timedelta(hours=1)) self.assertEqual(sm.progress, 100) errors = AsyncMigrationError.objects.filter(async_migration=sm) diff --git a/posthog/batch_exports/http.py b/posthog/batch_exports/http.py index ec812db0b47e8..98a97a74b3f4a 100644 --- a/posthog/batch_exports/http.py +++ b/posthog/batch_exports/http.py @@ -76,11 +76,11 @@ def validate_date_input(date_input: Any, team: Team | None = None) -> dt.datetim if parsed.tzinfo is None: if team: - parsed = parsed.replace(tzinfo=team.timezone_info).astimezone(dt.UTC) + parsed = parsed.replace(tzinfo=team.timezone_info).astimezone(dt.timezone.utc) else: - parsed = parsed.replace(tzinfo=dt.UTC) + parsed = parsed.replace(tzinfo=dt.timezone.utc) else: - parsed = parsed.astimezone(dt.UTC) + parsed = parsed.astimezone(dt.timezone.utc) return parsed diff --git a/posthog/batch_exports/models.py b/posthog/batch_exports/models.py index 7c1b3b7b0a4a3..f891089e7a327 100644 --- a/posthog/batch_exports/models.py +++ b/posthog/batch_exports/models.py @@ -1,7 +1,7 @@ import collections.abc import dataclasses import datetime as dt -import enum +from enum import Enum import typing from datetime import timedelta @@ -254,7 +254,7 @@ def interval_time_delta(self) -> timedelta: raise ValueError(f"Invalid interval: '{self.interval}'") -class BatchExportLogEntryLevel(enum.StrEnum): +class BatchExportLogEntryLevel(str, Enum): """Enumeration of batch export log levels.""" DEBUG = "DEBUG" diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index 9ac836e261c84..2483738cefbc0 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -269,7 +269,7 @@ def pause_batch_export(temporal: Client, batch_export_id: str, note: str | None raise BatchExportServiceRPCError(f"BatchExport {batch_export_id} could not be paused") from exc batch_export.paused = True - batch_export.last_paused_at = dt.datetime.now(dt.UTC) + batch_export.last_paused_at = dt.datetime.now(dt.timezone.utc) batch_export.save() return True @@ -297,7 +297,7 @@ async def apause_batch_export(temporal: Client, batch_export_id: str, note: str raise BatchExportServiceRPCError(f"BatchExport {batch_export_id} could not be paused") from exc batch_export.paused = True - batch_export.last_paused_at = dt.datetime.now(dt.UTC) + batch_export.last_paused_at = dt.datetime.now(dt.timezone.utc) await batch_export.asave() return True diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py index 42b82d38867cf..91e33d79c2d45 100644 --- a/posthog/clickhouse/client/execute_async.py +++ b/posthog/clickhouse/client/execute_async.py @@ -156,7 +156,7 @@ def execute_process_query( query_status.error = True # Assume error in case nothing below ends up working - pickup_time = datetime.datetime.now(datetime.UTC) + pickup_time = datetime.datetime.now(datetime.timezone.utc) if query_status.start_time: wait_duration = (pickup_time - query_status.start_time) / datetime.timedelta(seconds=1) QUERY_WAIT_TIME.labels( @@ -177,7 +177,7 @@ def execute_process_query( query_status.complete = True query_status.error = False query_status.results = results - query_status.end_time = datetime.datetime.now(datetime.UTC) + query_status.end_time = datetime.datetime.now(datetime.timezone.utc) query_status.expiration_time = query_status.end_time + datetime.timedelta(seconds=manager.STATUS_TTL_SECONDS) process_duration = (query_status.end_time - pickup_time) / datetime.timedelta(seconds=1) QUERY_PROCESS_TIME.labels(team=team_id).observe(process_duration) @@ -218,7 +218,7 @@ def enqueue_process_query_task( return manager.get_query_status() # Immediately set status, so we don't have race with celery - query_status = QueryStatus(id=query_id, team_id=team.id, start_time=datetime.datetime.now(datetime.UTC)) + query_status = QueryStatus(id=query_id, team_id=team.id, start_time=datetime.datetime.now(datetime.timezone.utc)) manager.store_query_status(query_status) task_signature = process_query_task.si( diff --git a/posthog/clickhouse/table_engines.py b/posthog/clickhouse/table_engines.py index b67ef9be5bc10..e2b83d3f29006 100644 --- a/posthog/clickhouse/table_engines.py +++ b/posthog/clickhouse/table_engines.py @@ -1,11 +1,11 @@ import uuid -from enum import StrEnum +from enum import Enum from typing import Optional from django.conf import settings -class ReplicationScheme(StrEnum): +class ReplicationScheme(str, Enum): NOT_SHARDED = "NOT_SHARDED" SHARDED = "SHARDED" REPLICATED = "REPLICATED" diff --git a/posthog/clickhouse/test/test_person_overrides.py b/posthog/clickhouse/test/test_person_overrides.py index 4dbf0900b5387..ec632eebe7774 100644 --- a/posthog/clickhouse/test/test_person_overrides.py +++ b/posthog/clickhouse/test/test_person_overrides.py @@ -1,5 +1,5 @@ import json -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from time import sleep from typing import TypedDict from uuid import UUID, uuid4 @@ -124,7 +124,7 @@ def test_person_overrides_dict(): "override_person_id": uuid4(), "merged_at": datetime.fromisoformat("2020-01-02T00:00:00+00:00"), "oldest_event": datetime.fromisoformat("2020-01-01T00:00:00+00:00"), - "created_at": datetime.now(UTC), + "created_at": datetime.now(timezone.utc), "version": 1, } diff --git a/posthog/constants.py b/posthog/constants.py index af1e627bc7160..fc8f7a9142195 100644 --- a/posthog/constants.py +++ b/posthog/constants.py @@ -1,4 +1,4 @@ -from enum import StrEnum +from enum import Enum from typing import Literal from semantic_version import Version @@ -9,7 +9,7 @@ # N.B. Keep this in sync with frontend enum (types.ts) # AND ensure it is added to the Billing Service -class AvailableFeature(StrEnum): +class AvailableFeature(str, Enum): ZAPIER = "zapier" ORGANIZATIONS_PROJECTS = "organizations_projects" PROJECT_BASED_PERMISSIONING = "project_based_permissioning" @@ -215,19 +215,19 @@ class AvailableFeature(StrEnum): BREAKDOWN_TYPES = Literal["event", "person", "cohort", "group", "session", "hogql"] -class FunnelOrderType(StrEnum): +class FunnelOrderType(str, Enum): STRICT = "strict" UNORDERED = "unordered" ORDERED = "ordered" -class FunnelVizType(StrEnum): +class FunnelVizType(str, Enum): TRENDS = "trends" TIME_TO_CONVERT = "time_to_convert" STEPS = "steps" -class FunnelCorrelationType(StrEnum): +class FunnelCorrelationType(str, Enum): EVENTS = "events" PROPERTIES = "properties" EVENT_WITH_PROPERTIES = "event_with_properties" @@ -240,7 +240,7 @@ class FunnelCorrelationType(StrEnum): PERSON_UUID_FILTER = "person_uuid" -class AnalyticsDBMS(StrEnum): +class AnalyticsDBMS(str, Enum): POSTGRES = "postgres" CLICKHOUSE = "clickhouse" @@ -251,13 +251,13 @@ class AnalyticsDBMS(StrEnum): MONTHLY_ACTIVE = "monthly_active" -class RetentionQueryType(StrEnum): +class RetentionQueryType(str, Enum): RETURNING = "returning" TARGET = "target" TARGET_FIRST_TIME = "target_first_time" -class ExperimentSignificanceCode(StrEnum): +class ExperimentSignificanceCode(str, Enum): SIGNIFICANT = "significant" NOT_ENOUGH_EXPOSURE = "not_enough_exposure" LOW_WIN_PROBABILITY = "low_win_probability" @@ -265,7 +265,7 @@ class ExperimentSignificanceCode(StrEnum): HIGH_P_VALUE = "high_p_value" -class ExperimentNoResultsErrorKeys(StrEnum): +class ExperimentNoResultsErrorKeys(str, Enum): NO_EVENTS = "no-events" NO_FLAG_INFO = "no-flag-info" NO_CONTROL_VARIANT = "no-control-variant" @@ -273,12 +273,12 @@ class ExperimentNoResultsErrorKeys(StrEnum): NO_RESULTS = "no-results" -class PropertyOperatorType(StrEnum): +class PropertyOperatorType(str, Enum): AND = "AND" OR = "OR" -class BreakdownAttributionType(StrEnum): +class BreakdownAttributionType(str, Enum): FIRST_TOUCH = "first_touch" # FIRST_TOUCH attribution means the breakdown value is the first property value found within all funnel steps LAST_TOUCH = "last_touch" @@ -294,7 +294,7 @@ class BreakdownAttributionType(StrEnum): GROUP_TYPES_LIMIT = 5 -class EventDefinitionType(StrEnum): +class EventDefinitionType(str, Enum): # Mimics EventDefinitionType in frontend/src/types.ts ALL = "all" ACTION_EVENT = "action_event" @@ -303,7 +303,7 @@ class EventDefinitionType(StrEnum): EVENT_CUSTOM = "event_custom" -class FlagRequestType(StrEnum): +class FlagRequestType(str, Enum): DECIDE = "decide" LOCAL_EVALUATION = "local-evaluation" diff --git a/posthog/decorators.py b/posthog/decorators.py index c4aba39e3d2c5..eb66afcf422d4 100644 --- a/posthog/decorators.py +++ b/posthog/decorators.py @@ -1,4 +1,4 @@ -from enum import StrEnum +from enum import Enum from functools import wraps from typing import Any, TypeVar, Union, cast from collections.abc import Callable @@ -17,7 +17,7 @@ from .utils import generate_cache_key, get_safe_cache -class CacheType(StrEnum): +class CacheType(str, Enum): TRENDS = "Trends" FUNNEL = "Funnel" RETENTION = "Retention" diff --git a/posthog/demo/matrix/models.py b/posthog/demo/matrix/models.py index 511da24c1e08d..50747fb65ca95 100644 --- a/posthog/demo/matrix/models.py +++ b/posthog/demo/matrix/models.py @@ -106,7 +106,9 @@ class SimEvent: group4_created_at: Optional[dt.datetime] = None def __str__(self) -> str: - separator = "-" if self.timestamp < dt.datetime.now(dt.UTC) else "+" # Future events are denoted by a '+' + separator = ( + "-" if self.timestamp < dt.datetime.now(dt.timezone.utc) else "+" + ) # Future events are denoted by a '+' display = f"{self.timestamp} {separator} {self.event} # {self.distinct_id}" if current_url := self.properties.get("$current_url"): display += f" @ {current_url}" diff --git a/posthog/demo/matrix/randomization.py b/posthog/demo/matrix/randomization.py index 71701d2c6ce99..9500f72778a0f 100644 --- a/posthog/demo/matrix/randomization.py +++ b/posthog/demo/matrix/randomization.py @@ -1,11 +1,11 @@ -from enum import StrEnum +from enum import Enum import mimesis.random WeightedPool = tuple[list[str], list[int]] -class Industry(StrEnum): +class Industry(str, Enum): TECHNOLOGY = "technology" FINANCE = "finance" MEDIA = "media" diff --git a/posthog/demo/products/hedgebox/models.py b/posthog/demo/products/hedgebox/models.py index 9b0c72afc69a7..af7b3d6862f31 100644 --- a/posthog/demo/products/hedgebox/models.py +++ b/posthog/demo/products/hedgebox/models.py @@ -1,7 +1,7 @@ import datetime as dt import math from dataclasses import dataclass, field -from enum import auto, StrEnum +from enum import auto, Enum from typing import ( TYPE_CHECKING, Any, @@ -66,7 +66,7 @@ class HedgeboxSessionIntent(SimSessionIntent): DOWNGRADE_PLAN = auto() -class HedgeboxPlan(StrEnum): +class HedgeboxPlan(str, Enum): PERSONAL_FREE = "personal/free" PERSONAL_PRO = "personal/pro" BUSINESS_STANDARD = "business/standard" diff --git a/posthog/hogql/ast.py b/posthog/hogql/ast.py index 72b2c32f7b745..a21a74f4a91bf 100644 --- a/posthog/hogql/ast.py +++ b/posthog/hogql/ast.py @@ -1,4 +1,4 @@ -from enum import StrEnum +from enum import Enum from typing import Any, Literal, Optional, Union from dataclasses import dataclass, field @@ -554,7 +554,7 @@ class Alias(Expr): hidden: bool = False -class ArithmeticOperationOp(StrEnum): +class ArithmeticOperationOp(str, Enum): Add = "+" Sub = "-" Mult = "*" @@ -581,7 +581,7 @@ class Or(Expr): type: Optional[ConstantType] = None -class CompareOperationOp(StrEnum): +class CompareOperationOp(str, Enum): Eq = "==" NotEq = "!=" Gt = ">" diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index f484a6d0fad70..769d4a250e65e 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -1,5 +1,5 @@ from datetime import date, datetime -from enum import StrEnum +from enum import Enum from typing import Optional, Literal, TypeAlias from uuid import UUID from pydantic import ConfigDict, BaseModel @@ -47,7 +47,7 @@ BREAKDOWN_VALUES_LIMIT_FOR_COUNTRIES = 300 -class LimitContext(StrEnum): +class LimitContext(str, Enum): QUERY = "query" QUERY_ASYNC = "query_async" EXPORT = "export" diff --git a/posthog/hogql/database/schema/persons.py b/posthog/hogql/database/schema/persons.py index 0b0747593b7a7..12f4d3640017d 100644 --- a/posthog/hogql/database/schema/persons.py +++ b/posthog/hogql/database/schema/persons.py @@ -1,4 +1,5 @@ -from typing import cast, Optional, Self +from typing import cast, Optional +from typing_extensions import Self import posthoganalytics from posthog.hogql.ast import SelectQuery, And, CompareOperation, CompareOperationOp, Field, JoinExpr diff --git a/posthog/hogql/test/test_resolver.py b/posthog/hogql/test/test_resolver.py index cc4cde4554a4f..6b9e0d166d50b 100644 --- a/posthog/hogql/test/test_resolver.py +++ b/posthog/hogql/test/test_resolver.py @@ -1,4 +1,4 @@ -from datetime import datetime, date, UTC +from datetime import timezone, datetime, date from typing import Optional, cast import pytest from django.test import override_settings @@ -97,7 +97,7 @@ def test_resolve_constant_type(self): "SELECT 1, 'boo', true, 1.1232, null, {date}, {datetime}, {uuid}, {array}, {array12}, {tuple}", placeholders={ "date": ast.Constant(value=date(2020, 1, 10)), - "datetime": ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=UTC)), + "datetime": ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc)), "uuid": ast.Constant(value=UUID("00000000-0000-4000-8000-000000000000")), "array": ast.Constant(value=[]), "array12": ast.Constant(value=[1, 2]), diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_actors_query_builder.py b/posthog/hogql_queries/insights/trends/test/test_trends_actors_query_builder.py index b12f15ac6411c..fd95febddf61a 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_actors_query_builder.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_actors_query_builder.py @@ -1,4 +1,4 @@ -from datetime import datetime, UTC +from datetime import datetime, timezone from typing import Optional, cast from freezegun import freeze_time @@ -70,7 +70,7 @@ def _get_date_where_sql(self, **kwargs): def _get_utc_string(self, dt: datetime | None) -> str | None: if dt is None: return None - return dt.astimezone(UTC).strftime("%Y-%m-%d %H:%M:%SZ") + return dt.astimezone(timezone.utc).strftime("%Y-%m-%d %H:%M:%SZ") def test_time_frame(self): self.team.timezone = "Europe/Berlin" diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py index 0d1dd48e9bfe5..0fc639b08fcd0 100644 --- a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py @@ -1,5 +1,5 @@ import copy -from enum import StrEnum +from enum import Enum import json import re from typing import Any, Literal @@ -35,7 +35,7 @@ from posthog.utils import str_to_bool -class MathAvailability(StrEnum): +class MathAvailability(str, Enum): Unavailable = ("Unavailable",) All = ("All",) ActorsOnly = "ActorsOnly" diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 38e1ccc255a0d..d38cd03626f3a 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from enum import IntEnum from typing import Any, Generic, Optional, TypeVar, Union, cast, TypeGuard from zoneinfo import ZoneInfo @@ -445,7 +445,7 @@ def handle_cache_and_async_logic( elif execution_mode == ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE: # We're allowed to calculate if the cache is older than 24 hours, but we'll do it asynchronously assert isinstance(cached_response, CachedResponse) - if datetime.now(UTC) - cached_response.last_refresh > EXTENDED_CACHE_AGE: + if datetime.now(timezone.utc) - cached_response.last_refresh > EXTENDED_CACHE_AGE: query_status_response = self.enqueue_async_calculation(cache_key=cache_key, user=user) cached_response.query_status = query_status_response.query_status return cached_response @@ -490,8 +490,8 @@ def run( fresh_response_dict = { **self.calculate().model_dump(), "is_cached": False, - "last_refresh": datetime.now(UTC), - "next_allowed_client_refresh": datetime.now(UTC) + self._refresh_frequency(), + "last_refresh": datetime.now(timezone.utc), + "next_allowed_client_refresh": datetime.now(timezone.utc) + self._refresh_frequency(), "cache_key": cache_key, "timezone": self.team.timezone, } diff --git a/posthog/jwt.py b/posthog/jwt.py index 897abf98ee9a0..ead4196aa4730 100644 --- a/posthog/jwt.py +++ b/posthog/jwt.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from enum import Enum from typing import Any @@ -23,7 +23,7 @@ def encode_jwt(payload: dict, expiry_delta: timedelta, audience: PosthogJwtAudie encoded_jwt = jwt.encode( { **payload, - "exp": datetime.now(tz=UTC) + expiry_delta, + "exp": datetime.now(tz=timezone.utc) + expiry_delta, "aud": audience.value, }, settings.SECRET_KEY, diff --git a/posthog/kafka_client/client.py b/posthog/kafka_client/client.py index f0008c4ba72e8..3f58e572417b8 100644 --- a/posthog/kafka_client/client.py +++ b/posthog/kafka_client/client.py @@ -1,5 +1,5 @@ import json -from enum import StrEnum +from enum import Enum from typing import Any, Optional from collections.abc import Callable @@ -83,7 +83,7 @@ def subscribe(self, _): return -class _KafkaSecurityProtocol(StrEnum): +class _KafkaSecurityProtocol(str, Enum): PLAINTEXT = "PLAINTEXT" SSL = "SSL" SASL_PLAINTEXT = "SASL_PLAINTEXT" diff --git a/posthog/management/commands/create_batch_export_from_app.py b/posthog/management/commands/create_batch_export_from_app.py index 80907dccea7f7..90806ad900fee 100644 --- a/posthog/management/commands/create_batch_export_from_app.py +++ b/posthog/management/commands/create_batch_export_from_app.py @@ -116,7 +116,7 @@ def handle(self, *args, **options): if options.get("backfill_batch_export", False) and dry_run is False: client = sync_connect() - end_at = dt.datetime.now(dt.UTC) + end_at = dt.datetime.now(dt.timezone.utc) start_at = end_at - (dt.timedelta(hours=1) if interval == "hour" else dt.timedelta(days=1)) backfill_export( client, diff --git a/posthog/management/commands/create_channel_definitions_file.py b/posthog/management/commands/create_channel_definitions_file.py index bea98c02b5243..cab70bf31d360 100644 --- a/posthog/management/commands/create_channel_definitions_file.py +++ b/posthog/management/commands/create_channel_definitions_file.py @@ -3,7 +3,7 @@ import subprocess from collections import OrderedDict from dataclasses import dataclass -from enum import StrEnum +from enum import Enum from typing import Optional from django.core.management.base import BaseCommand @@ -12,7 +12,7 @@ OUTPUT_FILE = "posthog/models/channel_type/channel_definitions.json" -class EntryKind(StrEnum): +class EntryKind(str, Enum): source = "source" medium = "medium" diff --git a/posthog/management/commands/generate_demo_data.py b/posthog/management/commands/generate_demo_data.py index ce094620453a1..f75f151259570 100644 --- a/posthog/management/commands/generate_demo_data.py +++ b/posthog/management/commands/generate_demo_data.py @@ -65,7 +65,7 @@ def add_arguments(self, parser): def handle(self, *args, **options): timer = monotonic() seed = options.get("seed") or secrets.token_hex(16) - now = options.get("now") or dt.datetime.now(dt.UTC) + now = options.get("now") or dt.datetime.now(dt.timezone.utc) existing_team_id = options.get("team_id") if ( existing_team_id is not None diff --git a/posthog/management/commands/migrate_team.py b/posthog/management/commands/migrate_team.py index e2395a46e2ad5..d964a7db0c07b 100644 --- a/posthog/management/commands/migrate_team.py +++ b/posthog/management/commands/migrate_team.py @@ -254,7 +254,7 @@ def create_migration( raise CommandError("Didn't receive 'y', exiting") print() # noqa: T201 - now = dt.datetime.now(dt.UTC) + now = dt.datetime.now(dt.timezone.utc) # This is a precaution so we don't accidentally leave the export running indefinitely. end_at = now + dt.timedelta(days=end_days_from_now) @@ -299,5 +299,5 @@ def parse_to_utc(date_str: str) -> dt.datetime: except ValueError: raise ValueError("Invalid date format. Expected 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.") - utc_datetime = parsed_datetime.replace(tzinfo=dt.UTC) + utc_datetime = parsed_datetime.replace(tzinfo=dt.timezone.utc) return utc_datetime diff --git a/posthog/management/commands/plugin_server_load_test.py b/posthog/management/commands/plugin_server_load_test.py index a97a5f6973804..4adfe8941e644 100644 --- a/posthog/management/commands/plugin_server_load_test.py +++ b/posthog/management/commands/plugin_server_load_test.py @@ -63,7 +63,7 @@ def add_arguments(self, parser): def handle(self, *args, **options): seed = options.get("seed") or secrets.token_hex(16) - now = options.get("now") or dt.datetime.now(dt.UTC) + now = options.get("now") or dt.datetime.now(dt.timezone.utc) admin = KafkaAdminClient(bootstrap_servers=settings.KAFKA_HOSTS) consumer = KafkaConsumer(KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, bootstrap_servers=settings.KAFKA_HOSTS) diff --git a/posthog/management/commands/test/test_sync_persons_to_clickhouse.py b/posthog/management/commands/test/test_sync_persons_to_clickhouse.py index b38d0fbe138fb..3609a358054bd 100644 --- a/posthog/management/commands/test/test_sync_persons_to_clickhouse.py +++ b/posthog/management/commands/test/test_sync_persons_to_clickhouse.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from unittest import mock from uuid import UUID, uuid4 @@ -143,7 +143,7 @@ def test_distinct_ids_deleted(self): wraps=posthog.management.commands.sync_persons_to_clickhouse.raw_create_group_ch, ) def test_group_sync(self, mocked_ch_call): - ts = datetime.now(UTC) + ts = datetime.now(timezone.utc) Group.objects.create( team_id=self.team.pk, group_type_index=2, @@ -183,12 +183,12 @@ def test_group_sync_updates_group(self, mocked_ch_call): 2, "group-key", {"a": 5}, - timestamp=datetime.now(UTC) - timedelta(hours=3), + timestamp=datetime.now(timezone.utc) - timedelta(hours=3), ) group.group_properties = {"a": 5, "b": 3} group.save() - ts_before = datetime.now(UTC) + ts_before = datetime.now(timezone.utc) run_group_sync(self.team.pk, live_run=True, sync=True) mocked_ch_call.assert_called_once() @@ -213,7 +213,7 @@ def test_group_sync_updates_group(self, mocked_ch_call): ) self.assertLessEqual( ch_group[4].strftime("%Y-%m-%d %H:%M:%S"), - datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S"), + datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S"), ) # second time it's a no-op @@ -225,7 +225,7 @@ def test_group_sync_updates_group(self, mocked_ch_call): wraps=posthog.management.commands.sync_persons_to_clickhouse.raw_create_group_ch, ) def test_group_sync_multiple_entries(self, mocked_ch_call): - ts = datetime.now(UTC) + ts = datetime.now(timezone.utc) Group.objects.create( team_id=self.team.pk, group_type_index=2, @@ -430,7 +430,7 @@ def everything_test_run(self, live_run): group_type_index=2, group_key="group-key", group_properties={"a": 1234}, - created_at=datetime.now(UTC) - timedelta(hours=3), + created_at=datetime.now(timezone.utc) - timedelta(hours=3), version=5, ) diff --git a/posthog/models/feature_flag/flag_matching.py b/posthog/models/feature_flag/flag_matching.py index 90783f6860419..7c56a54725290 100644 --- a/posthog/models/feature_flag/flag_matching.py +++ b/posthog/models/feature_flag/flag_matching.py @@ -1,6 +1,6 @@ import hashlib from dataclasses import dataclass -from enum import StrEnum +from enum import Enum import time import structlog from typing import Literal, Optional, Union, cast @@ -67,7 +67,7 @@ PERSON_KEY = "person" -class FeatureFlagMatchReason(StrEnum): +class FeatureFlagMatchReason(str, Enum): SUPER_CONDITION_VALUE = "super_condition_value" CONDITION_MATCH = "condition_match" NO_CONDITION_MATCH = "no_condition_match" diff --git a/posthog/models/filters/stickiness_filter.py b/posthog/models/filters/stickiness_filter.py index d19b2418e0655..cde6d8020928f 100644 --- a/posthog/models/filters/stickiness_filter.py +++ b/posthog/models/filters/stickiness_filter.py @@ -72,7 +72,7 @@ def __init__( else: data = {"insight": INSIGHT_STICKINESS} super().__init__(data, request, **kwargs) - team: Optional[Team] = kwargs.get("team", None) + team: Optional["Team"] = kwargs.get("team", None) if not team: raise ValidationError("Team must be provided to stickiness filter") self.team = team diff --git a/posthog/models/plugin.py b/posthog/models/plugin.py index 87ab0497c8118..46ddfb9177f4c 100644 --- a/posthog/models/plugin.py +++ b/posthog/models/plugin.py @@ -1,7 +1,7 @@ import datetime import os from dataclasses import dataclass -from enum import StrEnum +from enum import Enum from typing import Any, Optional, cast from uuid import UUID @@ -288,13 +288,13 @@ class Meta: value: models.TextField = models.TextField(blank=True, null=True) -class PluginLogEntrySource(StrEnum): +class PluginLogEntrySource(str, Enum): SYSTEM = "SYSTEM" PLUGIN = "PLUGIN" CONSOLE = "CONSOLE" -class PluginLogEntryType(StrEnum): +class PluginLogEntryType(str, Enum): DEBUG = "DEBUG" LOG = "LOG" INFO = "INFO" diff --git a/posthog/models/property/property.py b/posthog/models/property/property.py index bb378b7616d43..7185306b8ccb2 100644 --- a/posthog/models/property/property.py +++ b/posthog/models/property/property.py @@ -1,5 +1,5 @@ import json -from enum import StrEnum +from enum import Enum from typing import ( Any, Literal, @@ -14,7 +14,7 @@ from posthog.utils import str_to_bool -class BehavioralPropertyType(StrEnum): +class BehavioralPropertyType(str, Enum): PERFORMED_EVENT = "performed_event" PERFORMED_EVENT_MULTIPLE = "performed_event_multiple" PERFORMED_EVENT_FIRST_TIME = "performed_event_first_time" diff --git a/posthog/models/test/test_async_deletion_model.py b/posthog/models/test/test_async_deletion_model.py index 8f4125be67a3c..060c938186689 100644 --- a/posthog/models/test/test_async_deletion_model.py +++ b/posthog/models/test/test_async_deletion_model.py @@ -65,7 +65,7 @@ def test_mark_deletions_done_team_when_not_done(self): @snapshot_clickhouse_queries def test_mark_deletions_done_person(self): - base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) _create_event( event_uuid=uuid4(), @@ -101,7 +101,7 @@ def test_mark_deletions_done_person(self): @snapshot_clickhouse_queries def test_mark_deletions_done_person_when_not_done(self): - base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) _create_event( event_uuid=uuid4(), @@ -226,7 +226,7 @@ def test_delete_teams_unrelated(self): @snapshot_clickhouse_alter_queries def test_delete_person(self): - base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) # Event for person, created before AsyncDeletion, so it should be deleted _create_event( @@ -264,7 +264,7 @@ def test_delete_person(self): @snapshot_clickhouse_alter_queries def test_delete_person_unrelated(self): - base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + base_datetime = dt.datetime(2024, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) _create_event( event_uuid=uuid4(), diff --git a/posthog/models/test/test_person_override_model.py b/posthog/models/test/test_person_override_model.py index ea64d7a9c9749..d080970329664 100644 --- a/posthog/models/test/test_person_override_model.py +++ b/posthog/models/test/test_person_override_model.py @@ -48,7 +48,7 @@ def people(team): @pytest.fixture def oldest_event(): - return dt.datetime.now(dt.UTC) + return dt.datetime.now(dt.timezone.utc) @pytest.mark.django_db(transaction=True) diff --git a/posthog/schema.py b/posthog/schema.py index c081961771336..c44c3362371e9 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -2,7 +2,7 @@ from __future__ import annotations -from enum import Enum, StrEnum +from enum import Enum from typing import Any, Literal, Optional, Union from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, RootModel @@ -20,7 +20,7 @@ class MathGroupTypeIndex(float, Enum): NUMBER_4 = 4 -class AggregationAxisFormat(StrEnum): +class AggregationAxisFormat(str, Enum): NUMERIC = "numeric" DURATION = "duration" DURATION_MS = "duration_ms" @@ -28,7 +28,7 @@ class AggregationAxisFormat(StrEnum): PERCENTAGE_SCALED = "percentage_scaled" -class Kind(StrEnum): +class Kind(str, Enum): METHOD = "Method" FUNCTION = "Function" CONSTRUCTOR = "Constructor" @@ -87,7 +87,7 @@ class AutocompleteCompletionItem(BaseModel): ) -class BaseMathType(StrEnum): +class BaseMathType(str, Enum): TOTAL = "total" DAU = "dau" WEEKLY_ACTIVE = "weekly_active" @@ -95,14 +95,14 @@ class BaseMathType(StrEnum): UNIQUE_SESSION = "unique_session" -class BreakdownAttributionType(StrEnum): +class BreakdownAttributionType(str, Enum): FIRST_TOUCH = "first_touch" LAST_TOUCH = "last_touch" ALL_EVENTS = "all_events" STEP = "step" -class BreakdownType(StrEnum): +class BreakdownType(str, Enum): COHORT = "cohort" PERSON = "person" EVENT = "event" @@ -164,7 +164,7 @@ class ChartAxis(BaseModel): column: str -class ChartDisplayType(StrEnum): +class ChartDisplayType(str, Enum): ACTIONS_LINE_GRAPH = "ActionsLineGraph" ACTIONS_BAR = "ActionsBar" ACTIONS_AREA_GRAPH = "ActionsAreaGraph" @@ -205,7 +205,7 @@ class CompareFilter(BaseModel): compare_to: Optional[str] = None -class CountPerActorMathType(StrEnum): +class CountPerActorMathType(str, Enum): AVG_COUNT_PER_ACTOR = "avg_count_per_actor" MIN_COUNT_PER_ACTOR = "min_count_per_actor" MAX_COUNT_PER_ACTOR = "max_count_per_actor" @@ -255,14 +255,14 @@ class DatabaseSchemaSource(BaseModel): status: str -class Type(StrEnum): +class Type(str, Enum): POSTHOG = "posthog" DATA_WAREHOUSE = "data_warehouse" VIEW = "view" BATCH_EXPORT = "batch_export" -class DatabaseSerializedFieldType(StrEnum): +class DatabaseSerializedFieldType(str, Enum): INTEGER = "integer" FLOAT = "float" STRING = "string" @@ -301,13 +301,13 @@ class Day(RootModel[int]): root: int -class DurationType(StrEnum): +class DurationType(str, Enum): DURATION = "duration" ACTIVE_SECONDS = "active_seconds" INACTIVE_SECONDS = "inactive_seconds" -class Key(StrEnum): +class Key(str, Enum): TAG_NAME = "tag_name" TEXT = "text" HREF = "href" @@ -336,14 +336,14 @@ class EmptyPropertyFilter(BaseModel): ) -class EntityType(StrEnum): +class EntityType(str, Enum): ACTIONS = "actions" EVENTS = "events" DATA_WAREHOUSE = "data_warehouse" NEW_ENTITY = "new_entity" -class ErrorTrackingOrder(StrEnum): +class ErrorTrackingOrder(str, Enum): LAST_SEEN = "last_seen" FIRST_SEEN = "first_seen" UNIQUE_OCCURRENCES = "unique_occurrences" @@ -360,7 +360,7 @@ class EventDefinition(BaseModel): properties: dict[str, Any] -class CorrelationType(StrEnum): +class CorrelationType(str, Enum): SUCCESS = "success" FAILURE = "failure" @@ -418,12 +418,12 @@ class EventsQueryPersonColumn(BaseModel): uuid: str -class FilterLogicalOperator(StrEnum): +class FilterLogicalOperator(str, Enum): AND_ = "AND" OR_ = "OR" -class FunnelConversionWindowTimeUnit(StrEnum): +class FunnelConversionWindowTimeUnit(str, Enum): SECOND = "second" MINUTE = "minute" HOUR = "hour" @@ -440,7 +440,7 @@ class FunnelCorrelationResult(BaseModel): skewed: bool -class FunnelCorrelationResultsType(StrEnum): +class FunnelCorrelationResultsType(str, Enum): EVENTS = "events" PROPERTIES = "properties" EVENT_WITH_PROPERTIES = "event_with_properties" @@ -468,18 +468,18 @@ class FunnelExclusionSteps(BaseModel): funnelToStep: int -class FunnelLayout(StrEnum): +class FunnelLayout(str, Enum): HORIZONTAL = "horizontal" VERTICAL = "vertical" -class FunnelPathType(StrEnum): +class FunnelPathType(str, Enum): FUNNEL_PATH_BEFORE_STEP = "funnel_path_before_step" FUNNEL_PATH_BETWEEN_STEPS = "funnel_path_between_steps" FUNNEL_PATH_AFTER_STEP = "funnel_path_after_step" -class FunnelStepReference(StrEnum): +class FunnelStepReference(str, Enum): TOTAL = "total" PREVIOUS = "previous" @@ -492,7 +492,7 @@ class FunnelTimeToConvertResults(BaseModel): bins: list[list[int]] -class FunnelVizType(StrEnum): +class FunnelVizType(str, Enum): STEPS = "steps" TIME_TO_CONVERT = "time_to_convert" TRENDS = "trends" @@ -516,44 +516,44 @@ class HogQLNotice(BaseModel): start: Optional[int] = None -class BounceRatePageViewMode(StrEnum): +class BounceRatePageViewMode(str, Enum): COUNT_PAGEVIEWS = "count_pageviews" UNIQ_URLS = "uniq_urls" -class InCohortVia(StrEnum): +class InCohortVia(str, Enum): AUTO = "auto" LEFTJOIN = "leftjoin" SUBQUERY = "subquery" LEFTJOIN_CONJOINED = "leftjoin_conjoined" -class MaterializationMode(StrEnum): +class MaterializationMode(str, Enum): AUTO = "auto" LEGACY_NULL_AS_STRING = "legacy_null_as_string" LEGACY_NULL_AS_NULL = "legacy_null_as_null" DISABLED = "disabled" -class PersonsArgMaxVersion(StrEnum): +class PersonsArgMaxVersion(str, Enum): AUTO = "auto" V1 = "v1" V2 = "v2" -class PersonsJoinMode(StrEnum): +class PersonsJoinMode(str, Enum): INNER = "inner" LEFT = "left" -class PersonsOnEventsMode(StrEnum): +class PersonsOnEventsMode(str, Enum): DISABLED = "disabled" PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS = "person_id_no_override_properties_on_events" PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS = "person_id_override_properties_on_events" PERSON_ID_OVERRIDE_PROPERTIES_JOINED = "person_id_override_properties_joined" -class SessionTableVersion(StrEnum): +class SessionTableVersion(str, Enum): AUTO = "auto" V1 = "v1" V2 = "v2" @@ -586,7 +586,7 @@ class HogQueryResponse(BaseModel): stdout: Optional[str] = None -class Compare(StrEnum): +class Compare(str, Enum): CURRENT = "current" PREVIOUS = "previous" @@ -626,7 +626,7 @@ class InsightDateRange(BaseModel): ) -class InsightFilterProperty(StrEnum): +class InsightFilterProperty(str, Enum): TRENDS_FILTER = "trendsFilter" FUNNELS_FILTER = "funnelsFilter" RETENTION_FILTER = "retentionFilter" @@ -635,7 +635,7 @@ class InsightFilterProperty(StrEnum): LIFECYCLE_FILTER = "lifecycleFilter" -class InsightNodeKind(StrEnum): +class InsightNodeKind(str, Enum): TRENDS_QUERY = "TrendsQuery" FUNNELS_QUERY = "FunnelsQuery" RETENTION_QUERY = "RetentionQuery" @@ -644,7 +644,7 @@ class InsightNodeKind(StrEnum): LIFECYCLE_QUERY = "LifecycleQuery" -class InsightType(StrEnum): +class InsightType(str, Enum): TRENDS = "TRENDS" STICKINESS = "STICKINESS" LIFECYCLE = "LIFECYCLE" @@ -656,7 +656,7 @@ class InsightType(StrEnum): HOG = "HOG" -class IntervalType(StrEnum): +class IntervalType(str, Enum): MINUTE = "minute" HOUR = "hour" DAY = "day" @@ -664,14 +664,14 @@ class IntervalType(StrEnum): MONTH = "month" -class LifecycleToggle(StrEnum): +class LifecycleToggle(str, Enum): NEW = "new" RESURRECTING = "resurrecting" RETURNING = "returning" DORMANT = "dormant" -class NodeKind(StrEnum): +class NodeKind(str, Enum): EVENTS_NODE = "EventsNode" ACTIONS_NODE = "ActionsNode" DATA_WAREHOUSE_NODE = "DataWarehouseNode" @@ -716,7 +716,7 @@ class PathCleaningFilter(BaseModel): regex: Optional[str] = None -class PathType(StrEnum): +class PathType(str, Enum): FIELD_PAGEVIEW = "$pageview" FIELD_SCREEN = "$screen" CUSTOM_EVENT = "custom_event" @@ -765,7 +765,7 @@ class PathsFilterLegacy(BaseModel): step_limit: Optional[int] = None -class PropertyFilterType(StrEnum): +class PropertyFilterType(str, Enum): META = "meta" EVENT = "event" PERSON = "person" @@ -780,7 +780,7 @@ class PropertyFilterType(StrEnum): DATA_WAREHOUSE_PERSON_PROPERTY = "data_warehouse_person_property" -class PropertyMathType(StrEnum): +class PropertyMathType(str, Enum): AVG = "avg" SUM = "sum" MIN = "min" @@ -791,7 +791,7 @@ class PropertyMathType(StrEnum): P99 = "p99" -class PropertyOperator(StrEnum): +class PropertyOperator(str, Enum): EXACT = "exact" IS_NOT = "is_not" ICONTAINS = "icontains" @@ -909,7 +909,7 @@ class RecordingPropertyFilter(BaseModel): value: Optional[Union[str, float, list[Union[str, float]]]] = None -class Kind1(StrEnum): +class Kind1(str, Enum): ACTIONS_NODE = "ActionsNode" EVENTS_NODE = "EventsNode" @@ -927,19 +927,19 @@ class RetentionEntity(BaseModel): uuid: Optional[str] = None -class RetentionReference(StrEnum): +class RetentionReference(str, Enum): TOTAL = "total" PREVIOUS = "previous" -class RetentionPeriod(StrEnum): +class RetentionPeriod(str, Enum): HOUR = "Hour" DAY = "Day" WEEK = "Week" MONTH = "Month" -class RetentionType(StrEnum): +class RetentionType(str, Enum): RETENTION_RECURRING = "retention_recurring" RETENTION_FIRST_TIME = "retention_first_time" @@ -970,7 +970,7 @@ class SessionPropertyFilter(BaseModel): value: Optional[Union[str, float, list[Union[str, float]]]] = None -class StepOrderValue(StrEnum): +class StepOrderValue(str, Enum): STRICT = "strict" UNORDERED = "unordered" ORDERED = "ordered" @@ -1101,7 +1101,7 @@ class TimelineEntry(BaseModel): sessionId: Optional[str] = Field(default=None, description="Session ID. None means out-of-session events") -class YAxisScaleType(StrEnum): +class YAxisScaleType(str, Enum): LOG10 = "log10" LINEAR = "linear" @@ -1191,7 +1191,7 @@ class VizSpecificOptions(BaseModel): RETENTION: Optional[RETENTION] = None -class Kind2(StrEnum): +class Kind2(str, Enum): UNIT = "unit" DURATION_S = "duration_s" PERCENTAGE = "percentage" @@ -1238,7 +1238,7 @@ class WebOverviewQueryResponse(BaseModel): ) -class WebStatsBreakdown(StrEnum): +class WebStatsBreakdown(str, Enum): PAGE = "Page" INITIAL_PAGE = "InitialPage" EXIT_PAGE = "ExitPage" diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py index e4ecc1ccfe37a..e0ac5d701a3eb 100644 --- a/posthog/session_recordings/session_recording_api.py +++ b/posthog/session_recordings/session_recording_api.py @@ -1,7 +1,7 @@ import os import time from contextlib import contextmanager -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from prometheus_client import Histogram import json from typing import Any, cast @@ -430,7 +430,7 @@ def _gather_session_recording_sources(self, recording: SessionRecording) -> Resp # Keys are like 1619712000-1619712060 blob_key = full_key.replace(blob_prefix.rstrip("/") + "/", "") blob_key_base = blob_key.split(".")[0] # Remove the extension if it exists - time_range = [datetime.fromtimestamp(int(x) / 1000, tz=UTC) for x in blob_key_base.split("-")] + time_range = [datetime.fromtimestamp(int(x) / 1000, tz=timezone.utc) for x in blob_key_base.split("-")] sources.append( { @@ -446,7 +446,7 @@ def _gather_session_recording_sources(self, recording: SessionRecording) -> Resp newest_timestamp = min(sources, key=lambda k: k["end_timestamp"])["end_timestamp"] if might_have_realtime: - might_have_realtime = oldest_timestamp + timedelta(hours=24) > datetime.now(UTC) + might_have_realtime = oldest_timestamp + timedelta(hours=24) > datetime.now(timezone.utc) if might_have_realtime: sources.append( { diff --git a/posthog/session_recordings/session_recording_helpers.py b/posthog/session_recordings/session_recording_helpers.py index c54117603c92d..8dfb1c0ad2396 100644 --- a/posthog/session_recordings/session_recording_helpers.py +++ b/posthog/session_recordings/session_recording_helpers.py @@ -2,7 +2,7 @@ import gzip import json from collections import defaultdict -from datetime import datetime, UTC +from datetime import datetime, timezone from typing import Any from collections.abc import Callable, Generator @@ -268,7 +268,7 @@ def is_active_event(event: SessionRecordingEventSummary) -> bool: def parse_snapshot_timestamp(timestamp: int): - return datetime.fromtimestamp(timestamp / 1000, UTC) + return datetime.fromtimestamp(timestamp / 1000, timezone.utc) def convert_to_timestamp(source: str) -> int: diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index bed84f6be517a..eee3c288c3be9 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -1,7 +1,7 @@ import json import time import uuid -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from unittest.mock import ANY, patch, MagicMock, call from urllib.parse import urlencode @@ -395,7 +395,7 @@ def test_get_single_session_recording_metadata(self): "distinct_id": "d1", "viewed": False, "recording_duration": 30, - "start_time": base_time.replace(tzinfo=UTC).strftime("%Y-%m-%dT%H:%M:%SZ"), + "start_time": base_time.replace(tzinfo=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"), "end_time": (base_time + relativedelta(seconds=30)).strftime("%Y-%m-%dT%H:%M:%SZ"), "click_count": 0, "keypress_count": 0, diff --git a/posthog/tasks/test/test_process_scheduled_changes.py b/posthog/tasks/test/test_process_scheduled_changes.py index 452c97a6e24e1..0e1fb9b9db3f8 100644 --- a/posthog/tasks/test/test_process_scheduled_changes.py +++ b/posthog/tasks/test/test_process_scheduled_changes.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, UTC +from datetime import datetime, timedelta, timezone from posthog.models import ScheduledChange, FeatureFlag from posthog.test.base import APIBaseTest, QueryMatchingTest, snapshot_postgres_queries from posthog.tasks.process_scheduled_changes import process_scheduled_changes @@ -21,7 +21,7 @@ def test_schedule_feature_flag_set_active(self) -> None: record_id=feature_flag.id, model_name="FeatureFlag", payload={"operation": "update_status", "value": True}, - scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)).isoformat(), + scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)).isoformat(), ) process_scheduled_changes() @@ -55,7 +55,7 @@ def test_schedule_feature_flag_add_release_condition(self) -> None: record_id=feature_flag.id, model_name="FeatureFlag", payload=payload, - scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)), + scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)), ) process_scheduled_changes() @@ -105,7 +105,7 @@ def test_schedule_feature_flag_add_release_condition_preserve_variants(self) -> record_id=feature_flag.id, model_name="FeatureFlag", payload=payload, - scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)), + scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)), ) process_scheduled_changes() @@ -131,7 +131,7 @@ def test_schedule_feature_flag_invalid_payload(self) -> None: record_id=feature_flag.id, model_name="FeatureFlag", payload=payload, - scheduled_at=(datetime.now(UTC) - timedelta(seconds=30)), + scheduled_at=(datetime.now(timezone.utc) - timedelta(seconds=30)), ) process_scheduled_changes() @@ -169,11 +169,11 @@ def test_schedule_feature_flag_multiple_changes(self) -> None: "operation": "add_release_condition", "value": {"groups": [change_past_condition], "multivariate": None, "payloads": {}}, }, - scheduled_at=(datetime.now(UTC) - timedelta(hours=1)), + scheduled_at=(datetime.now(timezone.utc) - timedelta(hours=1)), ) # 2. Due in the past and already executed - change_past_executed_at = datetime.now(UTC) - timedelta(hours=5) + change_past_executed_at = datetime.now(timezone.utc) - timedelta(hours=5) change_past_executed = ScheduledChange.objects.create( team=self.team, record_id=feature_flag.id, @@ -197,7 +197,7 @@ def test_schedule_feature_flag_multiple_changes(self) -> None: "operation": "add_release_condition", "value": {"groups": [change_due_now_condition], "multivariate": None, "payloads": {}}, }, - scheduled_at=datetime.now(UTC), + scheduled_at=datetime.now(timezone.utc), ) # 4. Due in the future @@ -206,7 +206,7 @@ def test_schedule_feature_flag_multiple_changes(self) -> None: record_id=feature_flag.id, model_name="FeatureFlag", payload={"operation": "update_status", "value": False}, - scheduled_at=(datetime.now(UTC) + timedelta(hours=1)), + scheduled_at=(datetime.now(timezone.utc) + timedelta(hours=1)), ) process_scheduled_changes() diff --git a/posthog/tasks/test/test_warehouse.py b/posthog/tasks/test/test_warehouse.py index b03c04146a582..dec9da654c591 100644 --- a/posthog/tasks/test/test_warehouse.py +++ b/posthog/tasks/test/test_warehouse.py @@ -46,7 +46,7 @@ def test_check_synced_row_limits_of_team( @patch("posthog.tasks.warehouse.get_ph_client") @patch( "posthog.tasks.warehouse.DEFAULT_DATE_TIME", - datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.UTC), + datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.timezone.utc), ) @freeze_time("2023-11-07") def test_capture_workspace_rows_synced_by_team_month_cutoff(self, mock_get_ph_client: MagicMock) -> None: @@ -87,13 +87,13 @@ def test_capture_workspace_rows_synced_by_team_month_cutoff(self, mock_get_ph_cl self.team.refresh_from_db() self.assertEqual( self.team.external_data_workspace_last_synced_at, - datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.UTC), + datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.timezone.utc), ) @patch("posthog.tasks.warehouse.get_ph_client") @patch( "posthog.tasks.warehouse.DEFAULT_DATE_TIME", - datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.UTC), + datetime.datetime(2023, 11, 7, 0, 0, 0, tzinfo=datetime.timezone.utc), ) @freeze_time("2023-11-07") def test_capture_workspace_rows_synced_by_team_month_cutoff_field_set(self, mock_get_ph_client: MagicMock) -> None: @@ -101,7 +101,7 @@ def test_capture_workspace_rows_synced_by_team_month_cutoff_field_set(self, mock mock_get_ph_client.return_value = mock_ph_client self.team.external_data_workspace_last_synced_at = datetime.datetime( - 2023, 10, 30, 19, 32, 41, tzinfo=datetime.UTC + 2023, 10, 30, 19, 32, 41, tzinfo=datetime.timezone.utc ) self.team.save() @@ -142,5 +142,5 @@ def test_capture_workspace_rows_synced_by_team_month_cutoff_field_set(self, mock self.team.refresh_from_db() self.assertEqual( self.team.external_data_workspace_last_synced_at, - datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.UTC), + datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.timezone.utc), ) diff --git a/posthog/tasks/warehouse.py b/posthog/tasks/warehouse.py index 0bff919c739d9..ff76f40e34460 100644 --- a/posthog/tasks/warehouse.py +++ b/posthog/tasks/warehouse.py @@ -18,7 +18,7 @@ MONTHLY_LIMIT = 500_000_000 # TODO: adjust to whenever billing officially starts -DEFAULT_DATE_TIME = datetime.datetime(2024, 6, 1, tzinfo=datetime.UTC) +DEFAULT_DATE_TIME = datetime.datetime(2024, 6, 1, tzinfo=datetime.timezone.utc) def capture_external_data_rows_synced() -> None: @@ -91,7 +91,7 @@ def check_synced_row_limits_of_team(team_id: int) -> None: def capture_workspace_rows_synced_by_team(team_id: int) -> None: ph_client = get_ph_client() team = Team.objects.get(pk=team_id) - now = datetime.datetime.now(datetime.UTC) + now = datetime.datetime.now(datetime.timezone.utc) begin = team.external_data_workspace_last_synced_at or DEFAULT_DATE_TIME team.external_data_workspace_last_synced_at = now diff --git a/posthog/temporal/batch_exports/backfill_batch_export.py b/posthog/temporal/batch_exports/backfill_batch_export.py index c7e6d53c47ad5..75df851caefdc 100644 --- a/posthog/temporal/batch_exports/backfill_batch_export.py +++ b/posthog/temporal/batch_exports/backfill_batch_export.py @@ -114,7 +114,7 @@ class BackfillScheduleInputs: def get_utcnow(): """Return the current time in UTC. This function is only required for mocking during tests, because mocking the global datetime breaks Temporal.""" - return dt.datetime.now(dt.UTC) + return dt.datetime.now(dt.timezone.utc) @temporalio.activity.defn diff --git a/posthog/temporal/batch_exports/squash_person_overrides.py b/posthog/temporal/batch_exports/squash_person_overrides.py index 69ecd87c0da96..eac97a38a6fb6 100644 --- a/posthog/temporal/batch_exports/squash_person_overrides.py +++ b/posthog/temporal/batch_exports/squash_person_overrides.py @@ -5,7 +5,7 @@ import json import typing from dataclasses import dataclass, field -from datetime import date, datetime, timedelta, timezone, UTC +from datetime import date, datetime, timedelta, timezone from temporalio import activity, workflow from temporalio.common import RetryPolicy @@ -14,7 +14,7 @@ from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.heartbeat import Heartbeater -EPOCH = datetime(1970, 1, 1, 0, 0, tzinfo=UTC) +EPOCH = datetime(1970, 1, 1, 0, 0, tzinfo=timezone.utc) CREATE_TABLE_PERSON_DISTINCT_ID_OVERRIDES_JOIN = """ @@ -174,7 +174,7 @@ } -def parse_clickhouse_timestamp(s: str, tzinfo: timezone = UTC) -> datetime: +def parse_clickhouse_timestamp(s: str, tzinfo: timezone = timezone.utc) -> datetime: """Parse a timestamp from ClickHouse.""" return datetime.strptime(s.strip(), "%Y-%m-%d %H:%M:%S.%f").replace(tzinfo=tzinfo) diff --git a/posthog/temporal/tests/batch_exports/conftest.py b/posthog/temporal/tests/batch_exports/conftest.py index 617cfe1559b05..98bd2e80422b0 100644 --- a/posthog/temporal/tests/batch_exports/conftest.py +++ b/posthog/temporal/tests/batch_exports/conftest.py @@ -203,7 +203,7 @@ def data_interval_start(data_interval_end, interval): @pytest.fixture def data_interval_end(interval): """Set a test data interval end.""" - return dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) + return dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) @pytest_asyncio.fixture diff --git a/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py b/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py index e0eb79ab10cd0..f8823710c27fd 100644 --- a/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py +++ b/posthog/temporal/tests/batch_exports/test_backfill_batch_export.py @@ -60,66 +60,66 @@ async def temporal_schedule(temporal_client, team): "start_at,end_at,step,expected", [ ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), dt.timedelta(days=1), [ ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), ) ], ), ( - dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 1, 12, 20, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 12, 20, 0, tzinfo=dt.timezone.utc), dt.timedelta(hours=1), [ ( - dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 10, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.timezone.utc), ), ( - dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 11, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.timezone.utc), ), ], ), ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), dt.timedelta(hours=12), [ ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.timezone.utc), ), ( - dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 12, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), ), ], ), ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.timezone.utc), dt.timedelta(days=1), [ ( - dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), ), ( - dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 2, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.timezone.utc), ), ( - dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 3, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.timezone.utc), ), ( - dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 1, 4, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 1, 5, 0, 0, 0, tzinfo=dt.timezone.utc), ), ], ), @@ -145,8 +145,8 @@ async def test_get_schedule_frequency(activity_environment, temporal_worker, tem @pytest.mark.django_db(transaction=True) async def test_backfill_schedule_activity(activity_environment, temporal_worker, temporal_client, temporal_schedule): """Test backfill_schedule activity schedules all backfill runs.""" - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) - end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc) desc = await temporal_schedule.describe() inputs = BackfillScheduleInputs( @@ -199,8 +199,8 @@ async def test_backfill_schedule_activity(activity_environment, temporal_worker, @pytest.mark.django_db(transaction=True) async def test_backfill_batch_export_workflow(temporal_worker, temporal_schedule, temporal_client, team): """Test BackfillBatchExportWorkflow executes all backfill runs and updates model.""" - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) - end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc) desc = await temporal_schedule.describe() @@ -275,9 +275,9 @@ async def test_backfill_batch_export_workflow_no_end_at( """Test BackfillBatchExportWorkflow executes all backfill runs and updates model.""" # Note the mocked time here, we should stop backfilling at 8 minutes and unpause the job. - mock_utcnow.return_value = dt.datetime(2023, 1, 1, 0, 8, 12, tzinfo=dt.UTC) + mock_utcnow.return_value = dt.datetime(2023, 1, 1, 0, 8, 12, tzinfo=dt.timezone.utc) - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) end_at = None desc = await temporal_schedule.describe() @@ -356,8 +356,8 @@ async def test_backfill_batch_export_workflow_fails_when_schedule_deleted( temporal_worker, temporal_schedule, temporal_client, team ): """Test BackfillBatchExportWorkflow fails when its underlying Temporal Schedule is deleted.""" - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) - end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc) desc = await temporal_schedule.describe() @@ -398,8 +398,8 @@ async def test_backfill_batch_export_workflow_fails_when_schedule_deleted_after_ In this test, in contrats to the previous one, we wait until we have started running some backfill runs before cancelling. """ - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) - end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.UTC) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + end_at = dt.datetime(2023, 1, 1, 0, 10, 0, tzinfo=dt.timezone.utc) desc = await temporal_schedule.describe() @@ -471,8 +471,8 @@ async def test_backfill_batch_export_workflow_is_cancelled_on_repeated_failures( temporal_worker, failing_s3_batch_export, temporal_client, ateam, clickhouse_client ): """Test BackfillBatchExportWorkflow will be cancelled on repeated failures.""" - start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.UTC) - end_at = dt.datetime(2023, 1, 1, 1, 0, 0, tzinfo=dt.UTC) + start_at = dt.datetime(2023, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc) + end_at = dt.datetime(2023, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc) # We need some data otherwise the S3 batch export will not fail as it short-circuits. for d in date_range(start_at, end_at, dt.timedelta(minutes=5)): diff --git a/posthog/temporal/tests/batch_exports/test_batch_exports.py b/posthog/temporal/tests/batch_exports/test_batch_exports.py index dda307dda004a..2634da9c1dff9 100644 --- a/posthog/temporal/tests/batch_exports/test_batch_exports.py +++ b/posthog/temporal/tests/batch_exports/test_batch_exports.py @@ -41,7 +41,9 @@ def assert_records_match_events(records, events): key in ("timestamp", "_inserted_at", "created_at") and expected.get(key.removeprefix("_"), None) is not None ): - assert value == dt.datetime.fromisoformat(expected[key.removeprefix("_")]).replace(tzinfo=dt.UTC), msg + assert value == dt.datetime.fromisoformat(expected[key.removeprefix("_")]).replace( + tzinfo=dt.timezone.utc + ), msg elif isinstance(expected[key], dict): assert value == json.dumps(expected[key]), msg else: @@ -287,7 +289,7 @@ async def test_iter_records_with_single_field_and_alias(clickhouse_client, field if isinstance(result, dt.datetime): # Event generation function returns datetimes as strings. - expected_value = dt.datetime.fromisoformat(expected_value).replace(tzinfo=dt.UTC) + expected_value = dt.datetime.fromisoformat(expected_value).replace(tzinfo=dt.timezone.utc) assert result == expected_value @@ -386,16 +388,16 @@ async def test_iter_records_uses_extra_query_parameters(clickhouse_client): "hour", "2023-08-01T00:00:00+00:00", ( - dt.datetime(2023, 7, 31, 23, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 7, 31, 23, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.timezone.utc), ), ), ( "day", "2023-08-01T00:00:00+00:00", ( - dt.datetime(2023, 7, 31, 0, 0, 0, tzinfo=dt.UTC), - dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 7, 31, 0, 0, 0, tzinfo=dt.timezone.utc), + dt.datetime(2023, 8, 1, 0, 0, 0, tzinfo=dt.timezone.utc), ), ), ], diff --git a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py index fc3ee12b9812f..99802232b9aa8 100644 --- a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py @@ -50,7 +50,7 @@ pytestmark = [SKIP_IF_MISSING_GOOGLE_APPLICATION_CREDENTIALS, pytest.mark.asyncio, pytest.mark.django_db] -TEST_TIME = dt.datetime.now(dt.UTC) +TEST_TIME = dt.datetime.now(dt.timezone.utc) async def assert_clickhouse_records_in_bigquery( @@ -144,7 +144,7 @@ async def assert_clickhouse_records_in_bigquery( if k in json_columns and v is not None: expected_record[k] = json.loads(v) elif isinstance(v, dt.datetime): - expected_record[k] = v.replace(tzinfo=dt.UTC) + expected_record[k] = v.replace(tzinfo=dt.timezone.utc) else: expected_record[k] = v @@ -298,7 +298,7 @@ async def test_insert_into_bigquery_activity_inserts_data_into_bigquery_table( with freeze_time(TEST_TIME) as frozen_time: await activity_environment.run(insert_into_bigquery_activity, insert_inputs) - ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) + ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc) await assert_clickhouse_records_in_bigquery( bigquery_client=bigquery_client, @@ -352,7 +352,7 @@ async def test_insert_into_bigquery_activity_merges_data_in_follow_up_runs( with freeze_time(TEST_TIME) as frozen_time: await activity_environment.run(insert_into_bigquery_activity, insert_inputs) - ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) + ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc) await assert_clickhouse_records_in_bigquery( bigquery_client=bigquery_client, @@ -393,7 +393,7 @@ async def test_insert_into_bigquery_activity_merges_data_in_follow_up_runs( with freeze_time(TEST_TIME) as frozen_time: await activity_environment.run(insert_into_bigquery_activity, insert_inputs) - ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) + ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc) await assert_clickhouse_records_in_bigquery( bigquery_client=bigquery_client, @@ -523,7 +523,7 @@ async def test_bigquery_export_workflow( persons_to_export_created ) - ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) + ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc) await assert_clickhouse_records_in_bigquery( bigquery_client=bigquery_client, clickhouse_client=clickhouse_client, @@ -773,7 +773,7 @@ async def never_finish_activity(_: BigQueryInsertInputs) -> str: ([{"test": 6.0}], [bigquery.SchemaField("test", "FLOAT64")]), ([{"test": True}], [bigquery.SchemaField("test", "BOOL")]), ([{"test": dt.datetime.now()}], [bigquery.SchemaField("test", "TIMESTAMP")]), - ([{"test": dt.datetime.now(tz=dt.UTC)}], [bigquery.SchemaField("test", "TIMESTAMP")]), + ([{"test": dt.datetime.now(tz=dt.timezone.utc)}], [bigquery.SchemaField("test", "TIMESTAMP")]), ( [ { @@ -783,7 +783,7 @@ async def never_finish_activity(_: BigQueryInsertInputs) -> str: "test_float": 6.0, "test_bool": False, "test_timestamp": dt.datetime.now(), - "test_timestamptz": dt.datetime.now(tz=dt.UTC), + "test_timestamptz": dt.datetime.now(tz=dt.timezone.utc), } ], [ diff --git a/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py index 5821a8a98a90a..4dfb8563ff943 100644 --- a/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py @@ -99,7 +99,7 @@ async def assert_clickhouse_records_in_mock_server( if k == "properties": expected_record[k] = json.loads(v) if v else {} elif isinstance(v, dt.datetime): - expected_record[k] = v.replace(tzinfo=dt.UTC).isoformat() + expected_record[k] = v.replace(tzinfo=dt.timezone.utc).isoformat() else: expected_record[k] = v @@ -134,8 +134,8 @@ async def test_insert_into_http_activity_inserts_data_into_http_endpoint( * Are not duplicates of other events that are in the same batch. * Do not have an event name contained in the batch export's exclude_events. """ - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) # Generate a random team id integer. There's still a chance of a collision, # but it's very small. @@ -211,8 +211,8 @@ async def test_insert_into_http_activity_throws_on_bad_http_status( clickhouse_client, activity_environment, http_config, exclude_events ): """Test that the insert_into_http_activity function throws on status >= 400""" - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) # Generate a random team id integer. There's still a chance of a collision, # but it's very small. diff --git a/posthog/temporal/tests/batch_exports/test_logger.py b/posthog/temporal/tests/batch_exports/test_logger.py index 3ee605882e349..4ee3ca9a014aa 100644 --- a/posthog/temporal/tests/batch_exports/test_logger.py +++ b/posthog/temporal/tests/batch_exports/test_logger.py @@ -211,13 +211,13 @@ def activity_environment(request): "activity_environment", [ ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.UTC)}", + workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.timezone.utc)}", workflow_type="s3-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), ), ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.UTC)}", + workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.timezone.utc)}", workflow_type="backfill-batch-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), @@ -262,13 +262,13 @@ async def log_activity(): "activity_environment", [ ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.UTC)}", + workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.timezone.utc)}", workflow_type="s3-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), ), ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.UTC)}", + workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.timezone.utc)}", workflow_type="backfill-batch-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), @@ -324,13 +324,13 @@ def log_entries_table(): "activity_environment", [ ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.UTC)}", + workflow_id=f"{BATCH_EXPORT_ID}-{dt.datetime.now(dt.timezone.utc)}", workflow_type="s3-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), ), ActivityInfo( - workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.UTC)}", + workflow_id=f"{BATCH_EXPORT_ID}-Backfill-{dt.datetime.now(dt.timezone.utc)}", workflow_type="backfill-batch-export", workflow_run_id=str(uuid.uuid4()), attempt=random.randint(1, 10000), diff --git a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py index 54f638a68d688..3d4722fe6db7d 100644 --- a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py @@ -117,7 +117,7 @@ async def assert_clickhouse_records_in_postgres( if k in {"properties", "set", "set_once", "person_properties"} and v is not None: expected_record[k] = json.loads(v) elif isinstance(v, dt.datetime): - expected_record[k] = v.replace(tzinfo=dt.UTC) + expected_record[k] = v.replace(tzinfo=dt.timezone.utc) else: expected_record[k] = v @@ -201,8 +201,8 @@ async def test_insert_into_postgres_activity_inserts_data_into_postgres_table( development postgres instance for testing. But we setup and manage our own database to avoid conflicting with PostHog itself. """ - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) # Generate a random team id integer. There's still a chance of a collision, # but it's very small. diff --git a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py index 40071bd153b53..db8257a7ee583 100644 --- a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py @@ -130,7 +130,7 @@ async def assert_clickhouse_records_in_redshfit( remove_escaped_whitespace_recursive(json.loads(v)), ensure_ascii=False ) elif isinstance(v, dt.datetime): - expected_record[k] = v.replace(tzinfo=dt.UTC) # type: ignore + expected_record[k] = v.replace(tzinfo=dt.timezone.utc) # type: ignore else: expected_record[k] = v @@ -242,8 +242,8 @@ async def test_insert_into_redshift_activity_inserts_data_into_redshift_table( Once we have these events, we pass them to the assert_events_in_redshift function to check that they appear in the expected Redshift table. """ - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) # Generate a random team id integer. There's still a chance of a collision, # but it's very small. diff --git a/posthog/temporal/tests/batch_exports/test_run_updates.py b/posthog/temporal/tests/batch_exports/test_run_updates.py index 649585f52836b..1e50e13325b82 100644 --- a/posthog/temporal/tests/batch_exports/test_run_updates.py +++ b/posthog/temporal/tests/batch_exports/test_run_updates.py @@ -85,8 +85,8 @@ async def test_start_batch_export_run(activity_environment, team, batch_export): We check if a 'BatchExportRun' is created after the activity runs. """ - start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC) - end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC) + start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) + end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) inputs = StartBatchExportRunInputs( team_id=team.id, @@ -110,8 +110,8 @@ async def test_start_batch_export_run(activity_environment, team, batch_export): @pytest.mark.asyncio async def test_finish_batch_export_run(activity_environment, team, batch_export): """Test the export_run_status activity.""" - start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC) - end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC) + start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) + end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) inputs = StartBatchExportRunInputs( team_id=team.id, @@ -145,8 +145,8 @@ async def test_finish_batch_export_run(activity_environment, team, batch_export) @pytest.mark.asyncio async def test_finish_batch_export_run_pauses_if_reaching_failure_threshold(activity_environment, team, batch_export): """Test if 'finish_batch_export_run' will pause a batch export upon reaching failure_threshold.""" - start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC) - end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC) + start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) + end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) inputs = StartBatchExportRunInputs( team_id=team.id, @@ -183,8 +183,8 @@ async def test_finish_batch_export_run_pauses_if_reaching_failure_threshold(acti @pytest.mark.asyncio async def test_finish_batch_export_run_never_pauses_with_small_check_window(activity_environment, team, batch_export): """Test if 'finish_batch_export_run' will never pause a batch export with a small check window.""" - start = dt.datetime(2023, 4, 24, tzinfo=dt.UTC) - end = dt.datetime(2023, 4, 25, tzinfo=dt.UTC) + start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) + end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) inputs = StartBatchExportRunInputs( team_id=team.id, diff --git a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py index 1462fd03b0b35..41863344d84c7 100644 --- a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py @@ -981,8 +981,8 @@ async def test_insert_into_snowflake_activity_inserts_data_into_snowflake_table( that they appear in the expected Snowflake table. This function runs against a real Snowflake instance, so the environment should be populated with the necessary credentials. """ - data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.UTC) - data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) team_id = random.randint(1, 1000000) await generate_test_events_in_clickhouse( diff --git a/posthog/temporal/tests/persons_on_events_squash/test_squash_person_overrides_workflow.py b/posthog/temporal/tests/persons_on_events_squash/test_squash_person_overrides_workflow.py index 71a5bff99c508..19bf42bad8f5f 100644 --- a/posthog/temporal/tests/persons_on_events_squash/test_squash_person_overrides_workflow.py +++ b/posthog/temporal/tests/persons_on_events_squash/test_squash_person_overrides_workflow.py @@ -1,7 +1,7 @@ import operator import random from collections import defaultdict -from datetime import datetime, UTC +from datetime import datetime, timezone from typing import NamedTuple, TypedDict from uuid import UUID, uuid4 @@ -862,7 +862,7 @@ async def test_delete_person_overrides_mutation_within_grace_period( activity_environment, events_to_override, person_overrides_data, clickhouse_client ): """Test we do not delete person overrides if they are within the grace period.""" - now = datetime.now(tz=UTC) + now = datetime.now(tz=timezone.utc) override_timestamp = int(now.timestamp()) team_id, person_override = next(iter(person_overrides_data.items())) distinct_id, _ = next(iter(person_override)) @@ -914,7 +914,7 @@ async def test_delete_person_overrides_mutation_within_grace_period( assert int(row[0]) == not_deleted_person["team_id"] assert row[1] == not_deleted_person["distinct_id"] assert UUID(row[2]) == UUID(not_deleted_person["person_id"]) - _timestamp = datetime.strptime(row[3], "%Y-%m-%d %H:%M:%S").replace(tzinfo=UTC) + _timestamp = datetime.strptime(row[3], "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc) # _timestamp is up to second precision assert _timestamp == now.replace(microsecond=0) diff --git a/posthog/temporal/tests/test_clickhouse.py b/posthog/temporal/tests/test_clickhouse.py index 79003f718e682..0d02dcd5bf790 100644 --- a/posthog/temporal/tests/test_clickhouse.py +++ b/posthog/temporal/tests/test_clickhouse.py @@ -23,12 +23,12 @@ (("; DROP TABLE events --",), b"('; DROP TABLE events --')"), (("'a'); DROP TABLE events --",), b"('\\'a\\'); DROP TABLE events --')"), ( - dt.datetime(2023, 7, 14, 0, 0, 0, tzinfo=dt.UTC), + dt.datetime(2023, 7, 14, 0, 0, 0, tzinfo=dt.timezone.utc), b"toDateTime('2023-07-14 00:00:00', 'UTC')", ), (dt.datetime(2023, 7, 14, 0, 0, 0), b"toDateTime('2023-07-14 00:00:00')"), ( - dt.datetime(2023, 7, 14, 0, 0, 0, 5555, tzinfo=dt.UTC), + dt.datetime(2023, 7, 14, 0, 0, 0, 5555, tzinfo=dt.timezone.utc), b"toDateTime64('2023-07-14 00:00:00.005555', 6, 'UTC')", ), ], diff --git a/posthog/temporal/tests/utils/datetimes.py b/posthog/temporal/tests/utils/datetimes.py index d5a3f747bf874..c168e885a3e8d 100644 --- a/posthog/temporal/tests/utils/datetimes.py +++ b/posthog/temporal/tests/utils/datetimes.py @@ -16,4 +16,4 @@ def to_isoformat(d: str | None) -> str | None: """Parse a string and return it as default isoformatted.""" if d is None: return None - return dt.datetime.fromisoformat(d).replace(tzinfo=dt.UTC).isoformat() + return dt.datetime.fromisoformat(d).replace(tzinfo=dt.timezone.utc).isoformat() diff --git a/posthog/test/test_datetime.py b/posthog/test/test_datetime.py index 9365cffb085a8..2b8e6b087e5fb 100644 --- a/posthog/test/test_datetime.py +++ b/posthog/test/test_datetime.py @@ -1,4 +1,4 @@ -from datetime import datetime, UTC +from datetime import datetime, timezone from posthog.datetime import ( start_of_hour, @@ -23,7 +23,7 @@ def test_start_of_day(): def test_end_of_day(): assert end_of_day(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime( - 2023, 2, 8, 23, 59, 59, 999999, tzinfo=UTC + 2023, 2, 8, 23, 59, 59, 999999, tzinfo=timezone.utc ) diff --git a/posthog/utils.py b/posthog/utils.py index a8faf24b19e5c..18f271c4a5d56 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -1289,12 +1289,12 @@ async def wait_for_parallel_celery_group(task: Any, expires: Optional[datetime.d default_expires = datetime.timedelta(minutes=5) if not expires: - expires = datetime.datetime.now(tz=datetime.UTC) + default_expires + expires = datetime.datetime.now(tz=datetime.timezone.utc) + default_expires sleep_generator = sleep_time_generator() while not task.ready(): - if datetime.datetime.now(tz=datetime.UTC) > expires: + if datetime.datetime.now(tz=datetime.timezone.utc) > expires: child_states = [] child: AsyncResult children = task.children or [] diff --git a/posthog/warehouse/external_data_source/workspace.py b/posthog/warehouse/external_data_source/workspace.py index f7e80761eb1d3..0a9f9436bab47 100644 --- a/posthog/warehouse/external_data_source/workspace.py +++ b/posthog/warehouse/external_data_source/workspace.py @@ -27,7 +27,7 @@ def get_or_create_workspace(team_id: int): workspace_id = create_workspace(team_id) team.external_data_workspace_id = workspace_id # start tracking from now - team.external_data_workspace_last_synced_at = datetime.datetime.now(datetime.UTC) + team.external_data_workspace_last_synced_at = datetime.datetime.now(datetime.timezone.utc) team.save() return team.external_data_workspace_id diff --git a/production.Dockerfile b/production.Dockerfile index b64293dcb69a8..1e3eb2d11551f 100644 --- a/production.Dockerfile +++ b/production.Dockerfile @@ -83,7 +83,7 @@ RUN corepack enable && \ # # --------------------------------------------------------- # -FROM python:3.11.9-slim-bullseye AS posthog-build +FROM python:3.10.10-slim-bullseye AS posthog-build WORKDIR /code SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] @@ -99,11 +99,10 @@ RUN apt-get update && \ "libxmlsec1" \ "libxmlsec1-dev" \ "libffi-dev" \ - "zlib1g-dev" \ "pkg-config" \ && \ rm -rf /var/lib/apt/lists/* && \ - PIP_NO_BINARY=lxml,xmlsec pip install -r requirements.txt --compile --no-cache-dir --target=/python-runtime + pip install -r requirements.txt --compile --no-cache-dir --target=/python-runtime ENV PATH=/python-runtime/bin:$PATH \ PYTHONPATH=/python-runtime @@ -140,7 +139,104 @@ RUN apt-get update && \ # # --------------------------------------------------------- # -FROM unit:python3.11 +# Build a version of the unit docker image for python3.10 +# We can remove this step once we are on python3.11 +FROM unit:python3.11 as unit +FROM python:3.10-bullseye as unit-131-python-310 + +# copied from https://github.com/nginx/unit/blob/master/pkg/docker/Dockerfile.python3.11 +LABEL org.opencontainers.image.title="Unit (python3.10)" +LABEL org.opencontainers.image.description="Official build of Unit for Docker." +LABEL org.opencontainers.image.url="https://unit.nginx.org" +LABEL org.opencontainers.image.source="https://github.com/nginx/unit" +LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" +LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers " +LABEL org.opencontainers.image.version="1.31.1" + +RUN set -ex \ + && savedAptMark="$(apt-mark showmanual)" \ + && apt-get update \ + && apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \ + && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ + && mkdir -p /usr/src/unit \ + && cd /usr/src/unit \ + && hg clone -u 1.31.1-1 https://hg.nginx.org/unit \ + && cd unit \ + && NCPU="$(getconf _NPROCESSORS_ONLN)" \ + && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ + && CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \ + && LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \ + && CONFIGURE_ARGS_MODULES="--prefix=/usr \ + --statedir=/var/lib/unit \ + --control=unix:/var/run/control.unit.sock \ + --runstatedir=/var/run \ + --pid=/var/run/unit.pid \ + --logdir=/var/log \ + --log=/var/log/unit.log \ + --tmpdir=/var/tmp \ + --user=unit \ + --group=unit \ + --openssl \ + --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \ + && CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \ + --njs" \ + && make -j $NCPU -C pkg/contrib .njs \ + && export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \ + && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \ + && make -j $NCPU unitd \ + && install -pm755 build/sbin/unitd /usr/sbin/unitd-debug \ + && make clean \ + && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/modules \ + && make -j $NCPU unitd \ + && install -pm755 build/sbin/unitd /usr/sbin/unitd \ + && make clean \ + && /bin/true \ + && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \ + && ./configure python --config=/usr/local/bin/python3-config \ + && make -j $NCPU python3-install \ + && make clean \ + && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \ + && ./configure python --config=/usr/local/bin/python3-config \ + && make -j $NCPU python3-install \ + && cd \ + && rm -rf /usr/src/unit \ + && for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \ + ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \ + done \ + && apt-mark showmanual | xargs apt-mark auto > /dev/null \ + && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \ + && /bin/true \ + && mkdir -p /var/lib/unit/ \ + && mkdir -p /docker-entrypoint.d/ \ + && groupadd --gid 998 unit \ + && useradd \ + --uid 998 \ + --gid unit \ + --no-create-home \ + --home /nonexistent \ + --comment "unit user" \ + --shell /bin/false \ + unit \ + && apt-get update \ + && apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \ + && apt-get purge -y --auto-remove build-essential \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /requirements.apt \ + && ln -sf /dev/stdout /var/log/unit.log + +COPY --from=unit /usr/local/bin/docker-entrypoint.sh /usr/local/bin/ +COPY --from=unit /usr/share/unit/welcome/welcome.* /usr/share/unit/welcome/ + +STOPSIGNAL SIGTERM + +ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"] +EXPOSE 80 +CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"] + +# +# --------------------------------------------------------- +# +FROM unit-131-python-310 WORKDIR /code SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] ENV PYTHONUNBUFFERED 1 @@ -169,7 +265,7 @@ RUN apt-get install -y --no-install-recommends \ # Install and use a non-root user. RUN groupadd -g 1000 posthog && \ - useradd -r -g posthog posthog && \ + useradd -u 999 -r -g posthog posthog && \ chown posthog:posthog /code USER posthog diff --git a/pyproject.toml b/pyproject.toml index e4861307d72ec..58de4e0f9f6f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,9 @@ [project] -requires-python = ">=3.11" +requires-python = ">=3.10" [tool.black] line-length = 120 -target-version = ['py311'] +target-version = ['py310'] [tool.isort] profile = "black" diff --git a/requirements-dev.in b/requirements-dev.in index a2413e07cf1a8..03858feaa89e9 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -11,7 +11,7 @@ -c requirements.txt -ruff~=0.4.10 +ruff~=0.4.3 mypy~=1.10.0 mypy-baseline~=0.7.0 mypy-extensions==1.0.0 diff --git a/requirements-dev.txt b/requirements-dev.txt index c534a931f0c92..dbf468cd45bb2 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -288,7 +288,8 @@ ruamel-yaml==0.18.6 # via prance ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.4.10 +ruff==0.4.3 + # via -r requirements-dev.in six==1.16.0 # via # -c requirements.txt diff --git a/requirements.in b/requirements.in index af1e1c39a260e..9a4dcaa36ee0d 100644 --- a/requirements.in +++ b/requirements.in @@ -29,7 +29,7 @@ django-redis==5.2.0 django-statsd==2.5.2 django-structlog==2.1.3 django-revproxy==0.12.0 -djangorestframework==3.15.1 +djangorestframework==3.14.0 djangorestframework-csv==2.1.1 djangorestframework-dataclasses==1.2.0 django-fernet-encrypted-fields==0.1.3 diff --git a/requirements.txt b/requirements.txt index e95a2b28ad5d3..dcb21290076b1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -198,7 +198,7 @@ django-structlog==2.1.3 # via -r requirements.in django-two-factor-auth==1.14.0 # via -r requirements.in -djangorestframework==3.15.1 +djangorestframework==3.14.0 # via # -r requirements.in # djangorestframework-csv @@ -475,6 +475,7 @@ pytz==2023.3 # via # -r requirements.in # clickhouse-driver + # djangorestframework # dlt # infi-clickhouse-orm # pandas diff --git a/unit.json.tpl b/unit.json.tpl index 42f23a75a0374..ef1ba4b3ffec6 100644 --- a/unit.json.tpl +++ b/unit.json.tpl @@ -39,7 +39,7 @@ }, "applications": { "posthog": { - "type": "python 3.11", + "type": "python 3.10", "processes": $NGINX_UNIT_APP_PROCESSES, "working_directory": "/code", "path": ".", @@ -51,7 +51,7 @@ } }, "metrics": { - "type": "python 3.11", + "type": "python 3.10", "processes": 1, "working_directory": "/code/bin", "path": ".",