Skip to content

Commit

Permalink
Fix various mypy issues around mapped attributes
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Nov 6, 2024
1 parent f1a1f67 commit cd4c4f7
Show file tree
Hide file tree
Showing 6 changed files with 37 additions and 31 deletions.
2 changes: 1 addition & 1 deletion lib/galaxy/celery/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def set_metadata(
try:
if overwrite:
hda_manager.overwrite_metadata(dataset_instance)
dataset_instance.datatype.set_meta(dataset_instance) # type:ignore [arg-type]
dataset_instance.datatype.set_meta(dataset_instance)
dataset_instance.set_peek()
# Reset SETTING_METADATA state so the dataset instance getter picks the dataset state
dataset_instance.set_metadata_success_state()
Expand Down
12 changes: 9 additions & 3 deletions lib/galaxy/datatypes/protocols.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,16 @@
Location of protocols used in datatypes
"""

from typing import Any
from typing import (
Any,
TYPE_CHECKING,
)

from typing_extensions import Protocol

if TYPE_CHECKING:
from sqlalchemy.orm import Mapped


class HasClearAssociatedFiles(Protocol):
def clear_associated_files(self, metadata_safe: bool = False, purge: bool = False) -> None: ...
Expand All @@ -17,7 +23,7 @@ def creating_job(self): ...


class HasDeleted(Protocol):
deleted: bool
deleted: "Mapped[bool]"


class HasExt(Protocol):
Expand All @@ -39,7 +45,7 @@ class HasHid(Protocol):


class HasId(Protocol):
id: int
id: "Mapped[int]"


class HasInfo(Protocol):
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/managers/hdas.py
Original file line number Diff line number Diff line change
Expand Up @@ -371,7 +371,7 @@ def get_discarded_summary(self, user: model.User) -> CleanableItemsSummary:
.where(
and_(
HistoryDatasetAssociation.deleted == true(),
HistoryDatasetAssociation.purged == false(), # type:ignore[arg-type]
HistoryDatasetAssociation.purged == false(),
model.History.user_id == user.id,
)
)
Expand Down Expand Up @@ -401,7 +401,7 @@ def get_discarded(
.where(
and_(
HistoryDatasetAssociation.deleted == true(),
HistoryDatasetAssociation.purged == false(), # type:ignore[arg-type]
HistoryDatasetAssociation.purged == false(),
model.History.user_id == user.id,
)
)
Expand Down
8 changes: 4 additions & 4 deletions lib/galaxy/managers/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -623,14 +623,14 @@ def _build_stmt_for_hda(self, stmt: S, data_conditions, used_ids, k, v, identifi
c = aliased(model.HistoryDatasetAssociation)
d = aliased(model.JobParameter)
e = aliased(model.HistoryDatasetAssociationHistory)
stmt = stmt.add_columns(a.dataset_id)
stmt = cast(S, stmt.add_columns(a.dataset_id))
used_ids.append(a.dataset_id)
stmt = stmt.join(a, a.job_id == model.Job.id)
hda_stmt = select(model.HistoryDatasetAssociation.id).where(
model.HistoryDatasetAssociation.id == e.history_dataset_association_id
)
# b is the HDA used for the job
stmt = stmt.join(b, a.dataset_id == b.id) # type:ignore[attr-defined]
stmt = stmt.join(b, a.dataset_id == b.id)
stmt = has_same_source(stmt, b, c)
name_condition = []
if identifier:
Expand Down Expand Up @@ -729,7 +729,7 @@ def _build_stmt_for_dce(self, stmt, data_conditions, used_ids, k, v):
),
)
.outerjoin(d, d.id == c.hda_id)
.outerjoin(e, e.dataset_id == d.dataset_id) # type:ignore[attr-defined]
.outerjoin(e, e.dataset_id == d.dataset_id)
)
data_conditions.append(
and_(
Expand All @@ -739,7 +739,7 @@ def _build_stmt_for_dce(self, stmt, data_conditions, used_ids, k, v):
and_(
c.hda_id == b.hda_id,
d.id == c.hda_id,
e.dataset_id == d.dataset_id, # type:ignore[attr-defined]
e.dataset_id == d.dataset_id,
),
),
c.id == v,
Expand Down
40 changes: 20 additions & 20 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3152,8 +3152,8 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
active_datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship(
primaryjoin=(
lambda: and_(
HistoryDatasetAssociation.history_id == History.id, # type: ignore[arg-type]
not_(HistoryDatasetAssociation.deleted), # type: ignore[has-type]
HistoryDatasetAssociation.history_id == History.id,
not_(HistoryDatasetAssociation.deleted),
)
),
order_by=lambda: asc(HistoryDatasetAssociation.hid), # type: ignore[has-type]
Expand All @@ -3165,7 +3165,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
lambda: (
and_(
HistoryDatasetCollectionAssociation.history_id == History.id,
not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[arg-type]
not_(HistoryDatasetCollectionAssociation.deleted),
)
)
),
Expand All @@ -3175,8 +3175,8 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
visible_datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship(
primaryjoin=(
lambda: and_(
HistoryDatasetAssociation.history_id == History.id, # type: ignore[arg-type]
not_(HistoryDatasetAssociation.deleted), # type: ignore[has-type]
HistoryDatasetAssociation.history_id == History.id,
not_(HistoryDatasetAssociation.deleted),
HistoryDatasetAssociation.visible, # type: ignore[has-type]
)
),
Expand All @@ -3187,7 +3187,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
primaryjoin=(
lambda: and_(
HistoryDatasetCollectionAssociation.history_id == History.id,
not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[arg-type]
not_(HistoryDatasetCollectionAssociation.deleted),
HistoryDatasetCollectionAssociation.visible, # type: ignore[arg-type]
)
),
Expand Down Expand Up @@ -4110,27 +4110,27 @@ class Dataset(Base, StorableObject, Serializable):
active_history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship(
primaryjoin=(
lambda: and_(
Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined]
HistoryDatasetAssociation.deleted == false(), # type: ignore[has-type]
HistoryDatasetAssociation.purged == false(), # type: ignore[arg-type]
Dataset.id == HistoryDatasetAssociation.dataset_id,
HistoryDatasetAssociation.deleted == false(),
HistoryDatasetAssociation.purged == false(),
)
),
viewonly=True,
)
purged_history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship(
primaryjoin=(
lambda: and_(
Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined]
HistoryDatasetAssociation.purged == true(), # type: ignore[arg-type]
Dataset.id == HistoryDatasetAssociation.dataset_id,
HistoryDatasetAssociation.purged == true(),
)
),
viewonly=True,
)
active_library_associations: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship(
primaryjoin=(
lambda: and_(
Dataset.id == LibraryDatasetDatasetAssociation.dataset_id, # type: ignore[attr-defined]
LibraryDatasetDatasetAssociation.deleted == false(), # type: ignore[has-type]
Dataset.id == LibraryDatasetDatasetAssociation.dataset_id,
LibraryDatasetDatasetAssociation.deleted == false(),
)
),
viewonly=True,
Expand Down Expand Up @@ -4596,11 +4596,13 @@ def datatype_for_extension(extension, datatypes_registry=None) -> "Data":
class DatasetInstance(RepresentById, UsesCreateAndUpdateTime, _HasTable):
"""A base class for all 'dataset instances', HDAs, LDDAs, etc"""

purged: Mapped[Optional[bool]]
deleted: Mapped[bool]
dataset_id: Mapped[Optional[int]]
_state: Mapped[Optional[str]]
states = Dataset.states
_state: Optional[str]
conversion_messages = Dataset.conversion_messages
permitted_actions = Dataset.permitted_actions
purged: bool
creating_job_associations: List[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]]
copied_from_history_dataset_association: Optional["HistoryDatasetAssociation"]
copied_from_library_dataset_dataset_association: Optional["LibraryDatasetDatasetAssociation"]
Expand Down Expand Up @@ -5055,9 +5057,7 @@ def find_conversion_destination(
self, accepted_formats: List[str], **kwd
) -> Tuple[bool, Optional[str], Optional["DatasetInstance"]]:
"""Returns ( target_ext, existing converted dataset )"""
return self.datatype.find_conversion_destination(
self, accepted_formats, _get_datatypes_registry(), **kwd # type:ignore[arg-type]
)
return self.datatype.find_conversion_destination(self, accepted_formats, _get_datatypes_registry(), **kwd)

def add_validation_error(self, validation_error):
self.validation_errors.append(validation_error)
Expand Down Expand Up @@ -5260,7 +5260,7 @@ class HistoryDatasetAssociation(DatasetInstance, HasTags, Dictifiable, UsesAnnot
Resource class that creates a relation between a dataset and a user history.
"""

history_id: Optional[int]
history_id: Mapped[Optional[int]]

def __init__(
self,
Expand Down Expand Up @@ -6947,7 +6947,7 @@ class HistoryDatasetCollectionAssociation(
name: Mapped[Optional[str]] = mapped_column(TrimmedString(255))
hid: Mapped[Optional[int]]
visible: Mapped[Optional[bool]]
deleted: Mapped[Optional[bool]] = mapped_column(default=False)
deleted: Mapped[bool] = mapped_column(default=False)
copied_from_history_dataset_collection_association_id: Mapped[Optional[int]] = mapped_column(
ForeignKey("history_dataset_collection_association.id")
)
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/model/store/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -718,7 +718,7 @@ def handle_dataset_object_edit(dataset_instance, dataset_attrs):
# Try to set metadata directly. @mvdbeek thinks we should only record the datasets
try:
if dataset_instance.has_metadata_files:
dataset_instance.datatype.set_meta(dataset_instance) # type:ignore[arg-type]
dataset_instance.datatype.set_meta(dataset_instance)
except Exception:
log.debug(f"Metadata setting failed on {dataset_instance}", exc_info=True)
dataset_instance.state = dataset_instance.dataset.states.FAILED_METADATA
Expand Down

0 comments on commit cd4c4f7

Please sign in to comment.