diff --git a/posthog/temporal/workflows/bigquery_batch_export.py b/posthog/temporal/workflows/bigquery_batch_export.py index 30d593f9c8adc..a3c8998554116 100644 --- a/posthog/temporal/workflows/bigquery_batch_export.py +++ b/posthog/temporal/workflows/bigquery_batch_export.py @@ -261,10 +261,10 @@ async def run(self, inputs: BigQueryBatchExportInputs): except exceptions.ActivityError as e: if isinstance(e.cause, exceptions.CancelledError): - logger.exception("BigQuery BatchExport was cancelled.", exc_info=e) + logger.error("BigQuery BatchExport was cancelled.") update_inputs.status = "Cancelled" else: - logger.exception("BigQuery BatchExport failed.", exc_info=e) + logger.exception("BigQuery BatchExport failed.", exc_info=e.cause) update_inputs.status = "Failed" update_inputs.latest_error = str(e.cause) diff --git a/posthog/temporal/workflows/postgres_batch_export.py b/posthog/temporal/workflows/postgres_batch_export.py index f1354bd29ba9a..e6eb2a6de4228 100644 --- a/posthog/temporal/workflows/postgres_batch_export.py +++ b/posthog/temporal/workflows/postgres_batch_export.py @@ -260,10 +260,10 @@ async def run(self, inputs: PostgresBatchExportInputs): except exceptions.ActivityError as e: if isinstance(e.cause, exceptions.CancelledError): - logger.exception("Postgres BatchExport was cancelled.", exc_info=e) + logger.error("Postgres BatchExport was cancelled.") update_inputs.status = "Cancelled" else: - logger.exception("Postgres BatchExport failed.", exc_info=e) + logger.exception("Postgres BatchExport failed.", exc_info=e.cause) update_inputs.status = "Failed" update_inputs.latest_error = str(e.cause) diff --git a/posthog/temporal/workflows/s3_batch_export.py b/posthog/temporal/workflows/s3_batch_export.py index a07873605a4b8..bc5eb686e3b79 100644 --- a/posthog/temporal/workflows/s3_batch_export.py +++ b/posthog/temporal/workflows/s3_batch_export.py @@ -487,10 +487,10 @@ async def run(self, inputs: S3BatchExportInputs): except exceptions.ActivityError as e: if isinstance(e.cause, exceptions.CancelledError): - logger.exception("S3 BatchExport was cancelled.", exc_info=e) + logger.error("S3 BatchExport was cancelled.") update_inputs.status = "Cancelled" else: - logger.exception("S3 BatchExport failed.", exc_info=e) + logger.exception("S3 BatchExport failed.", exc_info=e.cause) update_inputs.status = "Failed" update_inputs.latest_error = str(e.cause) diff --git a/posthog/temporal/workflows/snowflake_batch_export.py b/posthog/temporal/workflows/snowflake_batch_export.py index 226b00ecf2ffa..247c9ba152b0e 100644 --- a/posthog/temporal/workflows/snowflake_batch_export.py +++ b/posthog/temporal/workflows/snowflake_batch_export.py @@ -350,10 +350,10 @@ async def run(self, inputs: SnowflakeBatchExportInputs): except exceptions.ActivityError as e: if isinstance(e.cause, exceptions.CancelledError): - logger.exception("Snowflake BatchExport was cancelled.", exc_info=e) + logger.error("Snowflake BatchExport was cancelled.") update_inputs.status = "Cancelled" else: - logger.exception("Snowflake BatchExport failed.", exc_info=e) + logger.exception("Snowflake BatchExport failed.", exc_info=e.cause) update_inputs.status = "Failed" update_inputs.latest_error = str(e.cause)