Skip to content

Commit

Permalink
chore: Update pyproject.toml for Python 3.11 (#23181)
Browse files Browse the repository at this point in the history
  • Loading branch information
webjunkie authored and timgl committed Jun 27, 2024
1 parent 6a93be3 commit 7af5a57
Show file tree
Hide file tree
Showing 55 changed files with 184 additions and 191 deletions.
6 changes: 3 additions & 3 deletions ee/clickhouse/views/test/test_clickhouse_experiments.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime, timedelta, timezone
from datetime import datetime, timedelta, UTC
from django.core.cache import cache
from flaky import flaky
from rest_framework import status
Expand Down Expand Up @@ -1601,8 +1601,8 @@ def test_create_exposure_cohort_for_experiment_with_custom_action_filters_exposu
explicit_datetime = parser.isoparse(target_filter["explicit_datetime"])

self.assertTrue(
explicit_datetime <= datetime.now(timezone.utc) - timedelta(days=5)
and explicit_datetime >= datetime.now(timezone.utc) - timedelta(days=5, hours=1)
explicit_datetime <= datetime.now(UTC) - timedelta(days=5)
and explicit_datetime >= datetime.now(UTC) - timedelta(days=5, hours=1)
)

cohort_id = cohort["id"]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import timezone, datetime
from datetime import datetime, UTC

from dateutil.parser import isoparse

Expand All @@ -23,7 +23,7 @@ def test_format_dates_as_millis_since_start(self) -> None:
["$pageview", isoparse("2021-01-01T00:00:02Z")],
],
),
datetime(2021, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
datetime(2021, 1, 1, 0, 0, 0, tzinfo=UTC),
)
assert processed.columns == ["event", "milliseconds_since_start"]
assert processed.results == [["$pageview", 0], ["$pageview", 1000], ["$pageview", 2000]]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import gzip
from datetime import timedelta, datetime, timezone
from datetime import timedelta, datetime, UTC
from secrets import token_urlsafe
from unittest.mock import patch, MagicMock
from uuid import uuid4
Expand Down Expand Up @@ -84,7 +84,7 @@ def test_can_build_different_object_storage_paths(self) -> None:

def test_persists_recording_from_blob_ingested_storage(self):
with self.settings(OBJECT_STORAGE_SESSION_RECORDING_BLOB_INGESTION_FOLDER=TEST_BUCKET):
two_minutes_ago = (datetime.now() - timedelta(minutes=2)).replace(tzinfo=timezone.utc)
two_minutes_ago = (datetime.now() - timedelta(minutes=2)).replace(tzinfo=UTC)

with freeze_time(two_minutes_ago):
session_id = f"test_persists_recording_from_blob_ingested_storage-s1-{uuid4()}"
Expand Down
6 changes: 3 additions & 3 deletions ee/session_recordings/test/test_session_recording_playlist.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime, timedelta, timezone
from datetime import datetime, timedelta, UTC
from unittest import mock
from unittest.mock import MagicMock, patch
from uuid import uuid4
Expand Down Expand Up @@ -187,7 +187,7 @@ def test_get_pinned_recordings_for_playlist(self, mock_copy_objects: MagicMock)

session_one = f"test_fetch_playlist_recordings-session1-{uuid4()}"
session_two = f"test_fetch_playlist_recordings-session2-{uuid4()}"
three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=timezone.utc)
three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=UTC)

produce_replay_summary(
team_id=self.team.id,
Expand Down Expand Up @@ -242,7 +242,7 @@ def test_fetch_playlist_recordings(self, mock_copy_objects: MagicMock, mock_list

session_one = f"test_fetch_playlist_recordings-session1-{uuid4()}"
session_two = f"test_fetch_playlist_recordings-session2-{uuid4()}"
three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=timezone.utc)
three_days_ago = (datetime.now() - timedelta(days=3)).replace(tzinfo=UTC)

for session_id in [session_one, session_two]:
produce_replay_summary(
Expand Down
2 changes: 1 addition & 1 deletion ee/tasks/subscriptions/subscription_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def generate_assets(
# Wait for all assets to be exported
tasks = [exporter.export_asset.si(asset.id) for asset in assets]
# run them one after the other, so we don't exhaust celery workers
exports_expire = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(
exports_expire = datetime.datetime.now(tz=datetime.UTC) + datetime.timedelta(
minutes=settings.PARALLEL_ASSET_GENERATION_MAX_TIMEOUT_MINUTES
)
parallel_job = chain(*tasks).apply_async(expires=exports_expire, retry=False)
Expand Down
4 changes: 1 addition & 3 deletions posthog/api/app_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,7 @@ def get_batch_export_runs_app_metrics_queryset(self, batch_export_id: str):
after = self.request.GET.get("date_from", "-30d")
before = self.request.GET.get("date_to", None)
after_datetime = relative_date_parse(after, self.team.timezone_info)
before_datetime = (
relative_date_parse(before, self.team.timezone_info) if before else dt.datetime.now(dt.timezone.utc)
)
before_datetime = relative_date_parse(before, self.team.timezone_info) if before else dt.datetime.now(dt.UTC)
date_range = (after_datetime, before_datetime)
runs = (
BatchExportRun.objects.select_related("batch_export__destination")
Expand Down
2 changes: 1 addition & 1 deletion posthog/api/authentication.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ def create(self, validated_data):
user = None

if user:
user.requested_password_reset_at = datetime.datetime.now(datetime.timezone.utc)
user.requested_password_reset_at = datetime.datetime.now(datetime.UTC)
user.save()
token = password_reset_token_generator.make_token(user)
send_password_reset(user.id, token)
Expand Down
10 changes: 5 additions & 5 deletions posthog/api/test/batch_exports/test_log_entry.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def create_batch_export_log_entry(
"log_source": "batch_exports",
"log_source_id": batch_export_id,
"instance_id": run_id,
"timestamp": dt.datetime.now(dt.timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f"),
"timestamp": dt.datetime.now(dt.UTC).strftime("%Y-%m-%d %H:%M:%S.%f"),
"level": level,
"message": message,
},
Expand Down Expand Up @@ -147,7 +147,7 @@ def test_log_level_filter(batch_export, team, level):

results = []
timeout = 10
start = dt.datetime.now(dt.timezone.utc)
start = dt.datetime.now(dt.UTC)

while not results:
results = fetch_batch_export_log_entries(
Expand All @@ -157,7 +157,7 @@ def test_log_level_filter(batch_export, team, level):
after=dt.datetime(2023, 9, 22, 0, 59, 59),
before=dt.datetime(2023, 9, 22, 1, 0, 1),
)
if (dt.datetime.now(dt.timezone.utc) - start) > dt.timedelta(seconds=timeout):
if (dt.datetime.now(dt.UTC) - start) > dt.timedelta(seconds=timeout):
break

results.sort(key=lambda record: record.message)
Expand Down Expand Up @@ -195,7 +195,7 @@ def test_log_level_filter_with_lowercase(batch_export, team, level):

results = []
timeout = 10
start = dt.datetime.now(dt.timezone.utc)
start = dt.datetime.now(dt.UTC)

while not results:
results = fetch_batch_export_log_entries(
Expand All @@ -205,7 +205,7 @@ def test_log_level_filter_with_lowercase(batch_export, team, level):
after=dt.datetime(2023, 9, 22, 0, 59, 59),
before=dt.datetime(2023, 9, 22, 1, 0, 1),
)
if (dt.datetime.now(dt.timezone.utc) - start) > dt.timedelta(seconds=timeout):
if (dt.datetime.now(dt.UTC) - start) > dt.timedelta(seconds=timeout):
break

results.sort(key=lambda record: record.message)
Expand Down
4 changes: 2 additions & 2 deletions posthog/api/test/batch_exports/test_pause.py
Original file line number Diff line number Diff line change
Expand Up @@ -397,8 +397,8 @@ def test_unpause_can_trigger_a_backfill(client: HttpClient):

data = get_batch_export_ok(client, team.pk, batch_export_id)
assert batch_export["last_updated_at"] < data["last_updated_at"]
start_at = dt.datetime.strptime(data["last_paused_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.timezone.utc)
end_at = dt.datetime.strptime(data["last_updated_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.timezone.utc)
start_at = dt.datetime.strptime(data["last_paused_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.UTC)
end_at = dt.datetime.strptime(data["last_updated_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=dt.UTC)
mock_backfill.assert_called_once_with(
ANY,
batch_export["id"],
Expand Down
4 changes: 2 additions & 2 deletions posthog/api/test/batch_exports/test_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ def test_can_put_config(client: HttpClient):
new_schedule = describe_schedule(temporal, batch_export["id"])
assert old_schedule.schedule.spec.intervals[0].every != new_schedule.schedule.spec.intervals[0].every
assert new_schedule.schedule.spec.intervals[0].every == dt.timedelta(days=1)
assert new_schedule.schedule.spec.start_at == dt.datetime(2022, 7, 19, 0, 0, 0, tzinfo=dt.timezone.utc)
assert new_schedule.schedule.spec.end_at == dt.datetime(2023, 7, 20, 0, 0, 0, tzinfo=dt.timezone.utc)
assert new_schedule.schedule.spec.start_at == dt.datetime(2022, 7, 19, 0, 0, 0, tzinfo=dt.UTC)
assert new_schedule.schedule.spec.end_at == dt.datetime(2023, 7, 20, 0, 0, 0, tzinfo=dt.UTC)

decoded_payload = async_to_sync(codec.decode)(new_schedule.schedule.action.args)
args = json.loads(decoded_payload[0].data)
Expand Down
4 changes: 2 additions & 2 deletions posthog/api/test/test_app_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def test_retrieve_batch_export_runs_app_metrics(self):

temporal = sync_connect()

now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc)
now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.UTC)
with start_test_worker(temporal):
response = create_batch_export_ok(
self.client,
Expand Down Expand Up @@ -213,7 +213,7 @@ def test_retrieve_batch_export_runs_app_metrics_defaults_to_zero(self):
}

temporal = sync_connect()
now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc)
now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.UTC)

with start_test_worker(temporal):
response = create_batch_export_ok(
Expand Down
4 changes: 2 additions & 2 deletions posthog/api/test/test_capture.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import structlog
import zlib
from datetime import datetime, timedelta
from datetime import timezone as tz
from datetime import UTC
from django.http import HttpResponse
from django.test.client import MULTIPART_CONTENT, Client
from django.utils import timezone
Expand Down Expand Up @@ -1305,7 +1305,7 @@ def test_js_library_underscore_sent_at(self, kafka_produce):
# right time sent as sent_at to process_event

sent_at = datetime.fromisoformat(arguments["sent_at"])
self.assertEqual(sent_at.tzinfo, tz.utc)
self.assertEqual(sent_at.tzinfo, UTC)

timediff = sent_at.timestamp() - tomorrow_sent_at.timestamp()
self.assertLess(abs(timediff), 1)
Expand Down
6 changes: 3 additions & 3 deletions posthog/async_migrations/test/test_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime, timedelta, timezone
from datetime import datetime, timedelta, UTC
from unittest.mock import patch

import pytest
Expand Down Expand Up @@ -49,7 +49,7 @@ def test_process_error(self, _):

sm.refresh_from_db()
self.assertEqual(sm.status, MigrationStatus.Errored)
self.assertGreater(sm.finished_at, datetime.now(timezone.utc) - timedelta(hours=1))
self.assertGreater(sm.finished_at, datetime.now(UTC) - timedelta(hours=1))
errors = AsyncMigrationError.objects.filter(async_migration=sm).order_by("created_at")
self.assertEqual(errors.count(), 2)
self.assertEqual(errors[0].description, "some error")
Expand Down Expand Up @@ -81,7 +81,7 @@ def test_complete_migration(self):
sm.refresh_from_db()

self.assertEqual(sm.status, MigrationStatus.CompletedSuccessfully)
self.assertGreater(sm.finished_at, datetime.now(timezone.utc) - timedelta(hours=1))
self.assertGreater(sm.finished_at, datetime.now(UTC) - timedelta(hours=1))

self.assertEqual(sm.progress, 100)
errors = AsyncMigrationError.objects.filter(async_migration=sm)
Expand Down
6 changes: 3 additions & 3 deletions posthog/batch_exports/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,11 @@ def validate_date_input(date_input: Any, team: Team | None = None) -> dt.datetim

if parsed.tzinfo is None:
if team:
parsed = parsed.replace(tzinfo=team.timezone_info).astimezone(dt.timezone.utc)
parsed = parsed.replace(tzinfo=team.timezone_info).astimezone(dt.UTC)
else:
parsed = parsed.replace(tzinfo=dt.timezone.utc)
parsed = parsed.replace(tzinfo=dt.UTC)
else:
parsed = parsed.astimezone(dt.timezone.utc)
parsed = parsed.astimezone(dt.UTC)

return parsed

Expand Down
4 changes: 2 additions & 2 deletions posthog/batch_exports/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def pause_batch_export(temporal: Client, batch_export_id: str, note: str | None
raise BatchExportServiceRPCError(f"BatchExport {batch_export_id} could not be paused") from exc

batch_export.paused = True
batch_export.last_paused_at = dt.datetime.now(dt.timezone.utc)
batch_export.last_paused_at = dt.datetime.now(dt.UTC)
batch_export.save()

return True
Expand Down Expand Up @@ -285,7 +285,7 @@ async def apause_batch_export(temporal: Client, batch_export_id: str, note: str
raise BatchExportServiceRPCError(f"BatchExport {batch_export_id} could not be paused") from exc

batch_export.paused = True
batch_export.last_paused_at = dt.datetime.now(dt.timezone.utc)
batch_export.last_paused_at = dt.datetime.now(dt.UTC)
await batch_export.asave()

return True
Expand Down
6 changes: 3 additions & 3 deletions posthog/clickhouse/client/execute_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def execute_process_query(

query_status.error = True # Assume error in case nothing below ends up working

pickup_time = datetime.datetime.now(datetime.timezone.utc)
pickup_time = datetime.datetime.now(datetime.UTC)
if query_status.start_time:
wait_duration = (pickup_time - query_status.start_time) / datetime.timedelta(seconds=1)
QUERY_WAIT_TIME.labels(team=team_id).observe(wait_duration)
Expand All @@ -173,7 +173,7 @@ def execute_process_query(
query_status.complete = True
query_status.error = False
query_status.results = results
query_status.end_time = datetime.datetime.now(datetime.timezone.utc)
query_status.end_time = datetime.datetime.now(datetime.UTC)
query_status.expiration_time = query_status.end_time + datetime.timedelta(seconds=manager.STATUS_TTL_SECONDS)
process_duration = (query_status.end_time - pickup_time) / datetime.timedelta(seconds=1)
QUERY_PROCESS_TIME.labels(team=team_id).observe(process_duration)
Expand Down Expand Up @@ -232,7 +232,7 @@ def enqueue_process_query_task(
return manager.get_query_status()

# Immediately set status, so we don't have race with celery
query_status = QueryStatus(id=query_id, team_id=team.id, start_time=datetime.datetime.now(datetime.timezone.utc))
query_status = QueryStatus(id=query_id, team_id=team.id, start_time=datetime.datetime.now(datetime.UTC))
manager.store_query_status(query_status)

if _test_only_bypass_celery:
Expand Down
4 changes: 2 additions & 2 deletions posthog/clickhouse/test/test_person_overrides.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import json
from datetime import datetime, timedelta, timezone
from datetime import datetime, timedelta, UTC
from time import sleep
from typing import TypedDict
from uuid import UUID, uuid4
Expand Down Expand Up @@ -124,7 +124,7 @@ def test_person_overrides_dict():
"override_person_id": uuid4(),
"merged_at": datetime.fromisoformat("2020-01-02T00:00:00+00:00"),
"oldest_event": datetime.fromisoformat("2020-01-01T00:00:00+00:00"),
"created_at": datetime.now(timezone.utc),
"created_at": datetime.now(UTC),
"version": 1,
}

Expand Down
4 changes: 1 addition & 3 deletions posthog/demo/matrix/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,7 @@ class SimEvent:
group4_created_at: Optional[dt.datetime] = None

def __str__(self) -> str:
separator = (
"-" if self.timestamp < dt.datetime.now(dt.timezone.utc) else "+"
) # Future events are denoted by a '+'
separator = "-" if self.timestamp < dt.datetime.now(dt.UTC) else "+" # Future events are denoted by a '+'
display = f"{self.timestamp} {separator} {self.event} # {self.distinct_id}"
if current_url := self.properties.get("$current_url"):
display += f" @ {current_url}"
Expand Down
4 changes: 2 additions & 2 deletions posthog/hogql/test/test_resolver.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import timezone, datetime, date
from datetime import datetime, date, UTC
from typing import Optional, cast
import pytest
from django.test import override_settings
Expand Down Expand Up @@ -97,7 +97,7 @@ def test_resolve_constant_type(self):
"SELECT 1, 'boo', true, 1.1232, null, {date}, {datetime}, {uuid}, {array}, {array12}, {tuple}",
placeholders={
"date": ast.Constant(value=date(2020, 1, 10)),
"datetime": ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc)),
"datetime": ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=UTC)),
"uuid": ast.Constant(value=UUID("00000000-0000-4000-8000-000000000000")),
"array": ast.Constant(value=[]),
"array12": ast.Constant(value=[1, 2]),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime, timezone
from datetime import datetime, UTC
from typing import Optional, cast

from freezegun import freeze_time
Expand Down Expand Up @@ -70,7 +70,7 @@ def _get_date_where_sql(self, **kwargs):
def _get_utc_string(self, dt: datetime | None) -> str | None:
if dt is None:
return None
return dt.astimezone(timezone.utc).strftime("%Y-%m-%d %H:%M:%SZ")
return dt.astimezone(UTC).strftime("%Y-%m-%d %H:%M:%SZ")

def test_time_frame(self):
self.team.timezone = "Europe/Berlin"
Expand Down
8 changes: 4 additions & 4 deletions posthog/hogql_queries/query_runner.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from abc import ABC, abstractmethod
from datetime import datetime, timedelta, timezone
from datetime import datetime, timedelta, UTC
from enum import IntEnum
from typing import Any, Generic, Optional, TypeVar, Union, cast, TypeGuard
from zoneinfo import ZoneInfo
Expand Down Expand Up @@ -445,7 +445,7 @@ def handle_cache_and_async_logic(
elif execution_mode == ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE:
# We're allowed to calculate if the cache is older than 24 hours, but we'll do it asynchronously
assert isinstance(cached_response, CachedResponse)
if datetime.now(timezone.utc) - cached_response.last_refresh > EXTENDED_CACHE_AGE:
if datetime.now(UTC) - cached_response.last_refresh > EXTENDED_CACHE_AGE:
query_status_response = self.enqueue_async_calculation(cache_key=cache_key, user=user)
cached_response.query_status = query_status_response.query_status
return cached_response
Expand Down Expand Up @@ -490,8 +490,8 @@ def run(
fresh_response_dict = {
**self.calculate().model_dump(),
"is_cached": False,
"last_refresh": datetime.now(timezone.utc),
"next_allowed_client_refresh": datetime.now(timezone.utc) + self._refresh_frequency(),
"last_refresh": datetime.now(UTC),
"next_allowed_client_refresh": datetime.now(UTC) + self._refresh_frequency(),
"cache_key": cache_key,
"timezone": self.team.timezone,
}
Expand Down
4 changes: 2 additions & 2 deletions posthog/jwt.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime, timedelta, timezone
from datetime import datetime, timedelta, UTC
from enum import Enum
from typing import Any

Expand All @@ -23,7 +23,7 @@ def encode_jwt(payload: dict, expiry_delta: timedelta, audience: PosthogJwtAudie
encoded_jwt = jwt.encode(
{
**payload,
"exp": datetime.now(tz=timezone.utc) + expiry_delta,
"exp": datetime.now(tz=UTC) + expiry_delta,
"aud": audience.value,
},
settings.SECRET_KEY,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def handle(self, *args, **options):

if options.get("backfill_batch_export", False) and dry_run is False:
client = sync_connect()
end_at = dt.datetime.now(dt.timezone.utc)
end_at = dt.datetime.now(dt.UTC)
start_at = end_at - (dt.timedelta(hours=1) if interval == "hour" else dt.timedelta(days=1))
backfill_export(
client,
Expand Down
Loading

0 comments on commit 7af5a57

Please sign in to comment.