diff --git a/lib/galaxy/objectstore/azure_blob.py b/lib/galaxy/objectstore/azure_blob.py index 2253854093c2..31c0c8b3fa33 100644 --- a/lib/galaxy/objectstore/azure_blob.py +++ b/lib/galaxy/objectstore/azure_blob.py @@ -301,25 +301,37 @@ def _delete(self, obj, entire_dir=False, **kwargs): # but requires iterating through each individual blob in Azure and deleing it. if entire_dir and extra_dir: shutil.rmtree(self._get_cache_path(rel_path), ignore_errors=True) - blobs = self.service.get_container_client(self.container_name).list_blobs(name_starts_with=rel_path) - for blob in blobs: - log.debug("Deleting from Azure: %s", blob) - self._blob_client(blob.name).delete_blob() - return True + return self._delete_remote_all(rel_path) else: # Delete from cache first unlink(self._get_cache_path(rel_path), ignore_errors=True) # Delete from S3 as well if self._exists_remotely(rel_path): log.debug("Deleting from Azure: %s", rel_path) - self._blob_client(rel_path).delete_blob() - return True - except AzureHttpError: - log.exception("Could not delete blob '%s' from Azure", rel_path) + return self._delete_existing_remote(rel_path) except OSError: log.exception("%s delete error", self._get_filename(obj, **kwargs)) return False + def _delete_remote_all(self, rel_path: str) -> bool: + try: + blobs = self.service.get_container_client(self.container_name).list_blobs(name_starts_with=rel_path) + for blob in blobs: + log.debug("Deleting from Azure: %s", blob) + self._blob_client(blob.name).delete_blob() + return True + except AzureHttpError: + log.exception("Could not delete blob '%s' from Azure", rel_path) + return False + + def _delete_existing_remote(self, rel_path: str) -> bool: + try: + self._blob_client(rel_path).delete_blob() + return True + except AzureHttpError: + log.exception("Could not delete blob '%s' from Azure", rel_path) + return False + def _get_object_url(self, obj, **kwargs): if self._exists(obj, **kwargs): rel_path = self._construct_path(obj, **kwargs) diff --git a/lib/galaxy/objectstore/cloud.py b/lib/galaxy/objectstore/cloud.py index 6aaa89e6e5ff..1c6e6aacd68c 100644 --- a/lib/galaxy/objectstore/cloud.py +++ b/lib/galaxy/objectstore/cloud.py @@ -398,26 +398,38 @@ def _delete(self, obj, entire_dir=False, **kwargs): # but requires iterating through each individual key in S3 and deleing it. if entire_dir and extra_dir: shutil.rmtree(self._get_cache_path(rel_path), ignore_errors=True) - results = self.bucket.objects.list(prefix=rel_path) - for key in results: - log.debug("Deleting key %s", key.name) - key.delete() - return True + return self._delete_remote_all(rel_path) else: # Delete from cache first unlink(self._get_cache_path(rel_path), ignore_errors=True) # Delete from S3 as well if self._exists_remotely(rel_path): - key = self.bucket.objects.get(rel_path) - log.debug("Deleting key %s", key.name) - key.delete() - return True - except Exception: - log.exception("Could not delete key '%s' from cloud", rel_path) + return self._delete_existing_remote(rel_path) except OSError: log.exception("%s delete error", self._get_filename(obj, **kwargs)) return False + def _delete_remote_all(self, rel_path: str) -> bool: + try: + results = self.bucket.objects.list(prefix=rel_path) + for key in results: + log.debug("Deleting key %s", key.name) + key.delete() + return True + except Exception: + log.exception("Could not delete key '%s' from cloud", rel_path) + return False + + def _delete_existing_remote(self, rel_path: str) -> bool: + try: + key = self.bucket.objects.get(rel_path) + log.debug("Deleting key %s", key.name) + key.delete() + return True + except Exception: + log.exception("Could not delete key '%s' from cloud", rel_path) + return False + def _get_object_url(self, obj, **kwargs): if self._exists(obj, **kwargs): rel_path = self._construct_path(obj, **kwargs) diff --git a/lib/galaxy/objectstore/pithos.py b/lib/galaxy/objectstore/pithos.py index 75eb8e3b05f1..e1c40d5dc360 100644 --- a/lib/galaxy/objectstore/pithos.py +++ b/lib/galaxy/objectstore/pithos.py @@ -252,18 +252,31 @@ def _delete(self, obj, **kwargs): extra_dir = kwargs.get("extra_dir", False) if entire_dir and extra_dir: shutil.rmtree(cache_path) - log.debug(f"On Pithos: delete -r {path}/") - self.pithos.del_object(path, delimiter="/") - return True + return self._delete_remote_all(path) else: os.unlink(cache_path) - self.pithos.del_object(path) + return self._delete_existing_remote(path) except OSError: log.exception(f"{self._get_filename(obj, **kwargs)} delete error") - except ClientError as ce: - log.exception(f"Could not delete {path} from Pithos, {ce}") return False + def _delete_remote_all(self, path: str) -> bool: + try: + log.debug(f"On Pithos: delete -r {path}/") + self.pithos.del_object(path, delimiter="/") + return True + except ClientError: + log.exception(f"Could not delete path '{path}' from Pithos") + return False + + def _delete_existing_remote(self, path: str) -> bool: + try: + self.pithos.del_object(path) + return True + except ClientError: + log.exception(f"Could not delete path '{path}' from Pithos") + return False + def _get_object_url(self, obj, **kwargs): """ :returns: URL for direct access, None if no object diff --git a/lib/galaxy/objectstore/s3.py b/lib/galaxy/objectstore/s3.py index 77b2bbb6a175..fd6803c2ad93 100644 --- a/lib/galaxy/objectstore/s3.py +++ b/lib/galaxy/objectstore/s3.py @@ -436,27 +436,39 @@ def _delete(self, obj, entire_dir=False, **kwargs): # but requires iterating through each individual key in S3 and deleing it. if entire_dir and extra_dir: shutil.rmtree(self._get_cache_path(rel_path), ignore_errors=True) - results = self._bucket.get_all_keys(prefix=rel_path) - for key in results: - log.debug("Deleting key %s", key.name) - key.delete() - return True + return self._delete_remote_all(rel_path) else: # Delete from cache first unlink(self._get_cache_path(rel_path), ignore_errors=True) # Delete from S3 as well if self._exists_remotely(rel_path): - key = Key(self._bucket, rel_path) - log.debug("Deleting key %s", key.name) - key.delete() - return True - except S3ResponseError: - log.exception("Could not delete key '%s' from S3", rel_path) + self._delete_existing_remote(rel_path) except OSError: log.exception("%s delete error", self._get_filename(obj, **kwargs)) return False # return cache_path # Until the upload tool does not explicitly create the dataset, return expected path + def _delete_remote_all(self, rel_path: str) -> bool: + try: + results = self._bucket.get_all_keys(prefix=rel_path) + for key in results: + log.debug("Deleting key %s", key.name) + key.delete() + return True + except S3ResponseError: + log.exception("Could not delete blob '%s' from S3", rel_path) + return False + + def _delete_existing_remote(self, rel_path: str) -> bool: + try: + key = Key(self._bucket, rel_path) + log.debug("Deleting key %s", key.name) + key.delete() + return True + except S3ResponseError: + log.exception("Could not delete blob '%s' from S3", rel_path) + return False + def _download_directory_into_cache(self, rel_path, cache_path): download_directory(self._bucket, rel_path, cache_path)