diff --git a/lib/galaxy/managers/datasets.py b/lib/galaxy/managers/datasets.py index c790cc8627ee..6b6f9f365463 100644 --- a/lib/galaxy/managers/datasets.py +++ b/lib/galaxy/managers/datasets.py @@ -13,6 +13,8 @@ TypeVar, ) +from sqlalchemy import select + from galaxy import ( exceptions, model, @@ -25,6 +27,10 @@ secured, users, ) +from galaxy.model import ( + Dataset, + DatasetHash, +) from galaxy.model.base import transaction from galaxy.schema.tasks import ( ComputeDatasetHashTaskRequest, @@ -103,7 +109,7 @@ def purge_datasets(self, request: PurgeDatasetsTaskRequest): self.error_unless_dataset_purge_allowed() with self.session().begin(): for dataset_id in request.dataset_ids: - dataset: model.Dataset = self.session().query(model.Dataset).get(dataset_id) + dataset: Dataset = self.session().get(Dataset, dataset_id) if dataset.user_can_purge: try: dataset.full_delete() @@ -158,15 +164,7 @@ def compute_hash(self, request: ComputeDatasetHashTaskRequest): # TODO: replace/update if the combination of dataset_id/hash_function has already # been stored. sa_session = self.session() - hash = ( - sa_session.query(model.DatasetHash) - .filter( - model.DatasetHash.dataset_id == dataset.id, - model.DatasetHash.hash_function == hash_function, - model.DatasetHash.extra_files_path == extra_files_path, - ) - .one_or_none() - ) + hash = get_dataset_hash(sa_session, dataset.id, hash_function, extra_files_path) if hash is None: sa_session.add(dataset_hash) with transaction(sa_session): @@ -477,7 +475,7 @@ def ensure_can_set_metadata(self, dataset: model.DatasetInstance, raiseException def detect_datatype(self, trans, dataset_assoc): """Sniff and assign the datatype to a given dataset association (ldda or hda)""" - data = trans.sa_session.query(self.model_class).get(dataset_assoc.id) + data = trans.sa_session.get(self.model_class, dataset_assoc.id) self.ensure_can_change_datatype(data) self.ensure_can_set_metadata(data) path = data.dataset.file_name @@ -489,7 +487,7 @@ def detect_datatype(self, trans, dataset_assoc): def set_metadata(self, trans, dataset_assoc, overwrite=False, validate=True): """Trigger a job that detects and sets metadata on a given dataset association (ldda or hda)""" - data = trans.sa_session.query(self.model_class).get(dataset_assoc.id) + data = trans.sa_session.get(self.model_class, dataset_assoc.id) self.ensure_can_set_metadata(data) if overwrite: self.overwrite_metadata(data) @@ -874,3 +872,13 @@ def isinstance_datatype(self, dataset_assoc, class_strs): if datatype_class: comparison_classes.append(datatype_class) return comparison_classes and isinstance(dataset_assoc.datatype, tuple(comparison_classes)) + + +def get_dataset_hash(session, dataset_id, hash_function, extra_files_path): + stmt = ( + select(DatasetHash) + .where(DatasetHash.dataset_id == dataset_id) + .where(DatasetHash.hash_function == hash_function) + .where(DatasetHash.extra_files_path == extra_files_path) + ) + return session.scalars(stmt).one_or_none()