Skip to content

Commit

Permalink
fix(data-warehouse): Use better mechanics for resyncing a table (#24557)
Browse files Browse the repository at this point in the history
  • Loading branch information
Gilbert09 authored Aug 24, 2024
1 parent 145cacf commit 38d7a3b
Showing 1 changed file with 3 additions and 12 deletions.
15 changes: 3 additions & 12 deletions posthog/warehouse/api/external_data_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
trigger_external_data_workflow,
unpause_external_data_schedule,
cancel_external_data_workflow,
delete_data_import_folder,
)
from posthog.warehouse.models.external_data_schema import (
filter_mysql_incremental_fields,
Expand Down Expand Up @@ -251,17 +250,9 @@ def resync(self, request: Request, *args: Any, **kwargs: Any):
if latest_running_job and latest_running_job.workflow_id and latest_running_job.status == "Running":
cancel_external_data_workflow(latest_running_job.workflow_id)

all_jobs = ExternalDataJob.objects.filter(
schema_id=instance.pk, team_id=instance.team_id, status="Completed"
).all()

# Unnecessary to iterate for incremental jobs since they'll all by identified by the schema_id. Be over eager just to clear remnants
for job in all_jobs:
try:
delete_data_import_folder(job.folder_path())
except Exception as e:
logger.exception(f"Could not clean up data import folder: {job.folder_path()}", exc_info=e)
pass
source: ExternalDataSource = instance.source
source.job_inputs.update({"reset_pipeline": True})
source.save()

try:
trigger_external_data_workflow(instance)
Expand Down

0 comments on commit 38d7a3b

Please sign in to comment.