Skip to content

Commit

Permalink
Fix SA2.0 usage in managers.datasets
Browse files Browse the repository at this point in the history
  • Loading branch information
jdavcs committed Oct 19, 2023
1 parent 37f3bf5 commit 328d4d0
Showing 1 changed file with 20 additions and 12 deletions.
32 changes: 20 additions & 12 deletions lib/galaxy/managers/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
TypeVar,
)

from sqlalchemy import select

from galaxy import (
exceptions,
model,
Expand All @@ -25,6 +27,10 @@
secured,
users,
)
from galaxy.model import (
Dataset,
DatasetHash,
)
from galaxy.model.base import transaction
from galaxy.schema.tasks import (
ComputeDatasetHashTaskRequest,
Expand Down Expand Up @@ -103,7 +109,7 @@ def purge_datasets(self, request: PurgeDatasetsTaskRequest):
self.error_unless_dataset_purge_allowed()
with self.session().begin():
for dataset_id in request.dataset_ids:
dataset: model.Dataset = self.session().query(model.Dataset).get(dataset_id)
dataset: Dataset = self.session().get(Dataset, dataset_id)
if dataset.user_can_purge:
try:
dataset.full_delete()
Expand Down Expand Up @@ -158,15 +164,7 @@ def compute_hash(self, request: ComputeDatasetHashTaskRequest):
# TODO: replace/update if the combination of dataset_id/hash_function has already
# been stored.
sa_session = self.session()
hash = (
sa_session.query(model.DatasetHash)
.filter(
model.DatasetHash.dataset_id == dataset.id,
model.DatasetHash.hash_function == hash_function,
model.DatasetHash.extra_files_path == extra_files_path,
)
.one_or_none()
)
hash = get_dataset_hash(sa_session, dataset.id, hash_function, extra_files_path)
if hash is None:
sa_session.add(dataset_hash)
with transaction(sa_session):
Expand Down Expand Up @@ -477,7 +475,7 @@ def ensure_can_set_metadata(self, dataset: model.DatasetInstance, raiseException

def detect_datatype(self, trans, dataset_assoc):
"""Sniff and assign the datatype to a given dataset association (ldda or hda)"""
data = trans.sa_session.query(self.model_class).get(dataset_assoc.id)
data = trans.sa_session.get(self.model_class, dataset_assoc.id)
self.ensure_can_change_datatype(data)
self.ensure_can_set_metadata(data)
path = data.dataset.file_name
Expand All @@ -489,7 +487,7 @@ def detect_datatype(self, trans, dataset_assoc):

def set_metadata(self, trans, dataset_assoc, overwrite=False, validate=True):
"""Trigger a job that detects and sets metadata on a given dataset association (ldda or hda)"""
data = trans.sa_session.query(self.model_class).get(dataset_assoc.id)
data = trans.sa_session.get(self.model_class, dataset_assoc.id)
self.ensure_can_set_metadata(data)
if overwrite:
self.overwrite_metadata(data)
Expand Down Expand Up @@ -874,3 +872,13 @@ def isinstance_datatype(self, dataset_assoc, class_strs):
if datatype_class:
comparison_classes.append(datatype_class)
return comparison_classes and isinstance(dataset_assoc.datatype, tuple(comparison_classes))


def get_dataset_hash(session, dataset_id, hash_function, extra_files_path):
stmt = (
select(DatasetHash)
.where(DatasetHash.dataset_id == dataset_id)
.where(DatasetHash.hash_function == hash_function)
.where(DatasetHash.extra_files_path == extra_files_path)
)
return session.scalars(stmt).one_or_none()

0 comments on commit 328d4d0

Please sign in to comment.