diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py index e6fd99f2340e..86098172de86 100644 --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -2001,13 +2001,13 @@ def fail(message=job.info, exception=None): # Once datasets are collected, set the total dataset size (includes extra files) for dataset_assoc in job.output_datasets: dataset = dataset_assoc.dataset.dataset - if not dataset.purged: - # assume all datasets in a job get written to the same objectstore - quota_source_info = dataset.quota_source_info - collected_bytes += dataset.set_total_size() - else: + # assume all datasets in a job get written to the same objectstore + quota_source_info = dataset.quota_source_info + collected_bytes += dataset.set_total_size() + if dataset.purged: # Purge, in case job wrote directly to object store dataset.full_delete() + collected_bytes = 0 user = job.user if user and collected_bytes > 0 and quota_source_info is not None and quota_source_info.use: diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index 4880b83e7b96..1ea9fcf69855 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -665,9 +665,9 @@ def handle_dataset_object_edit(dataset_instance, dataset_attrs): assert file_source_root dataset_extra_files_path = os.path.join(file_source_root, dataset_extra_files_path) persist_extra_files(self.object_store, dataset_extra_files_path, dataset_instance) - # Don't trust serialized file size - dataset_instance.dataset.file_size = None - dataset_instance.dataset.set_total_size() # update the filesize record in the database + # Only trust file size if the dataset is purged. If we keep the data we should check the file size. + dataset_instance.dataset.file_size = None + dataset_instance.dataset.set_total_size() # update the filesize record in the database if dataset_instance.deleted: dataset_instance.dataset.deleted = True