Skip to content

Commit

Permalink
Merge branch 'master' into dw-editor-variables
Browse files Browse the repository at this point in the history
  • Loading branch information
EDsCODE authored Dec 4, 2024
2 parents fc253af + 5e9ae64 commit 98c6eba
Show file tree
Hide file tree
Showing 8 changed files with 1,348 additions and 1,558 deletions.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -276,21 +276,23 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]:
)
)

if self._query.date_from:
query_date_from = self.query_date_range.date_from()
if query_date_from:
exprs.append(
ast.CompareOperation(
op=ast.CompareOperationOp.GtEq,
left=ast.Field(chain=["s", "min_first_timestamp"]),
right=ast.Constant(value=self.query_date_range.date_from()),
right=ast.Constant(value=query_date_from),
)
)

if self._query.date_to:
query_date_to = self.query_date_range.date_to()
if query_date_to:
exprs.append(
ast.CompareOperation(
op=ast.CompareOperationOp.LtEq,
left=ast.Field(chain=["s", "min_first_timestamp"]),
right=ast.Constant(value=self.query_date_range.date_to()),
right=ast.Constant(value=query_date_to),
)
)

Expand Down

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,8 @@ def create_event(
properties=properties,
)

def _filter_recordings_by(self, recordings_filter: dict) -> SessionRecordingQueryResult:
the_filter = SessionRecordingsFilter(team=self.team, data=recordings_filter)
def _filter_recordings_by(self, recordings_filter: dict | None = None) -> SessionRecordingQueryResult:
the_filter = SessionRecordingsFilter(team=self.team, data=recordings_filter or {})
session_recording_list_instance = SessionRecordingListFromFilters(
filter=the_filter, team=self.team, hogql_query_modifiers=None
)
Expand Down Expand Up @@ -784,6 +784,26 @@ def test_ttl_days(self):
with freeze_time("2023-09-05T12:00:01Z"):
assert ttl_days(self.team) == 35

@snapshot_clickhouse_queries
def test_listing_ignores_future_replays(self):
with freeze_time("2023-08-29T12:00:01Z"):
produce_replay_summary(team_id=self.team.id, session_id="29th Aug")

with freeze_time("2023-09-01T12:00:01Z"):
produce_replay_summary(team_id=self.team.id, session_id="1st-sep")

with freeze_time("2023-09-02T12:00:01Z"):
produce_replay_summary(team_id=self.team.id, session_id="2nd-sep")

with freeze_time("2023-09-03T12:00:01Z"):
produce_replay_summary(team_id=self.team.id, session_id="3rd-sep")

with freeze_time("2023-08-30T12:00:01Z"):
recordings = self._filter_recordings_by()

# recordings in the future don't show
assert [s["session_id"] for s in recordings.results] == ["29th Aug"]

@snapshot_clickhouse_queries
def test_filter_on_session_ids(self):
user = "test_session_ids-user"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -773,6 +773,26 @@ def test_ttl_days(self):
with freeze_time("2023-09-05T12:00:01Z"):
assert ttl_days(self.team) == 35

@snapshot_clickhouse_queries
def test_listing_ignores_future_replays(self):
with freeze_time("2023-08-29T12:00:01Z"):
produce_replay_summary(team_id=self.team.id, session_id="29th Aug")

with freeze_time("2023-09-01T12:00:01Z"):
produce_replay_summary(team_id=self.team.id, session_id="1st-sep")

with freeze_time("2023-09-02T12:00:01Z"):
produce_replay_summary(team_id=self.team.id, session_id="2nd-sep")

with freeze_time("2023-09-03T12:00:01Z"):
produce_replay_summary(team_id=self.team.id, session_id="3rd-sep")

with freeze_time("2023-08-30T12:00:01Z"):
recordings = self._filter_recordings_by()

# recordings in the future don't show
assert [s["session_id"] for s in recordings.results] == ["29th Aug"]

@snapshot_clickhouse_queries
def test_filter_on_session_ids(self):
user = "test_session_ids-user"
Expand Down
12 changes: 7 additions & 5 deletions posthog/temporal/batch_exports/redshift_batch_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,11 @@
start_batch_export_run,
start_produce_batch_export_record_batches,
)
from posthog.temporal.batch_exports.heartbeat import (
BatchExportRangeHeartbeatDetails,
DateRange,
should_resume_from_activity_heartbeat,
)
from posthog.temporal.batch_exports.metrics import get_rows_exported_metric
from posthog.temporal.batch_exports.postgres_batch_export import (
Fields,
Expand All @@ -47,11 +52,6 @@
from posthog.temporal.common.clickhouse import get_client
from posthog.temporal.common.heartbeat import Heartbeater
from posthog.temporal.common.logger import configure_temporal_worker_logger
from posthog.temporal.batch_exports.heartbeat import (
BatchExportRangeHeartbeatDetails,
DateRange,
should_resume_from_activity_heartbeat,
)


def remove_escaped_whitespace_recursive(value):
Expand Down Expand Up @@ -715,6 +715,8 @@ async def run(self, inputs: RedshiftBatchExportInputs):
"StringDataRightTruncation",
# Raised by our PostgreSQL client when failing to connect after several attempts.
"PostgreSQLConnectionError",
# Column missing in Redshift, likely the schema was altered.
"UndefinedColumn",
],
finish_inputs=finish_inputs,
)
4 changes: 3 additions & 1 deletion posthog/temporal/batch_exports/s3_batch_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import datetime as dt
import io
import json
import operator
import posixpath
import typing

Expand Down Expand Up @@ -285,12 +286,13 @@ async def complete(self) -> str:
if self.is_upload_in_progress() is False:
raise NoUploadInProgressError()

sorted_parts = sorted(self.parts, key=operator.itemgetter("PartNumber"))
async with self.s3_client() as s3_client:
response = await s3_client.complete_multipart_upload(
Bucket=self.bucket_name,
Key=self.key,
UploadId=self.upload_id,
MultipartUpload={"Parts": self.parts},
MultipartUpload={"Parts": sorted_parts},
)

self.upload_id = None
Expand Down

0 comments on commit 98c6eba

Please sign in to comment.