diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index d78689534474..2519eaec39af 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2927,7 +2927,7 @@ def prune(cls, sa_session): session.execute(q) -class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable): +class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable, UsesCreateAndUpdateTime): __tablename__ = "history" __table_args__ = (Index("ix_history_slug", "slug", mysql_length=200),) @@ -3094,6 +3094,9 @@ def username(self): def count(self): return self.hid_counter - 1 + def update(self): + self._update_time = now() + def add_pending_items(self, set_output_hid=True): # These are assumed to be either copies of existing datasets or new, empty datasets, # so we don't need to set the quota. @@ -7362,7 +7365,7 @@ def __init__(self): self.user = None -class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): +class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime): """ StoredWorkflow represents the root node of a tree of objects that compose a workflow, including workflow revisions, steps, and subworkflows. It is responsible for the metadata associated with a workflow including owner, name, published, and create/update time. @@ -7740,7 +7743,7 @@ def log_str(self): InputConnDictType = Dict[str, Union[Dict[str, Any], List[Dict[str, Any]]]] -class WorkflowStep(Base, RepresentById): +class WorkflowStep(Base, RepresentById, UsesCreateAndUpdateTime): """ WorkflowStep represents a tool or subworkflow, its inputs, annotations, and any outputs that are flagged as workflow outputs. @@ -10061,7 +10064,7 @@ def equals(self, user_id, provider, authn_id, config): ) -class Page(Base, HasTags, Dictifiable, RepresentById): +class Page(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime): __tablename__ = "page" __table_args__ = (Index("ix_page_slug", "slug", mysql_length=200),) @@ -10175,7 +10178,7 @@ class PageUserShareAssociation(Base, UserShareAssociation): page = relationship("Page", back_populates="users_shared_with") -class Visualization(Base, HasTags, Dictifiable, RepresentById): +class Visualization(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime): __tablename__ = "visualization" __table_args__ = ( Index("ix_visualization_dbkey", "dbkey", mysql_length=200), diff --git a/lib/galaxy/model/tags.py b/lib/galaxy/model/tags.py index 22e7ae63ef75..6e9ca0592660 100644 --- a/lib/galaxy/model/tags.py +++ b/lib/galaxy/model/tags.py @@ -95,6 +95,7 @@ def set_tags_from_list( if flush: with transaction(self.sa_session): self.sa_session.commit() + item.update() return item.tags def get_tag_assoc_class(self, item_class): diff --git a/lib/galaxy/webapps/galaxy/services/history_contents.py b/lib/galaxy/webapps/galaxy/services/history_contents.py index 962f3f2b9d38..79968b13b5d6 100644 --- a/lib/galaxy/webapps/galaxy/services/history_contents.py +++ b/lib/galaxy/webapps/galaxy/services/history_contents.py @@ -1445,6 +1445,9 @@ def _undelete(self, item: HistoryItemModel): raise exceptions.ItemDeletionException("This item has been permanently deleted and cannot be recovered.") manager = self._get_item_manager(item) manager.undelete(item, flush=self.flush) + # Again, we need to force an update in the edge case where all selected items are already undeleted + # or when the item was purged as undelete will not trigger an update + item.update() def _purge(self, item: HistoryItemModel, trans: ProvidesHistoryContext): if getattr(item, "purged", False): diff --git a/lib/galaxy_test/api/test_history_contents.py b/lib/galaxy_test/api/test_history_contents.py index f894d206891a..18d8a93c3f80 100644 --- a/lib/galaxy_test/api/test_history_contents.py +++ b/lib/galaxy_test/api/test_history_contents.py @@ -1625,6 +1625,7 @@ def _get_item_with_id_from_history_contents( return None def _apply_bulk_operation(self, history_id: str, payload, query: str = "", expected_status_code: int = 200): + original_history_update_time = self._get_history_update_time(history_id) if query: query = f"?{query}" response = self._put( @@ -1633,8 +1634,22 @@ def _apply_bulk_operation(self, history_id: str, payload, query: str = "", expec json=True, ) self._assert_status_code_is(response, expected_status_code) - return response.json() + result = response.json() + + if "err_msg" in result or result.get("success_count", 0) == 0: + # We don't need to check the history update time if there was an error or no items were updated + return result + + # After a successful operation, history update time should be updated so the changes can be detected by the frontend + after_bulk_operation_history_update_time = self._get_history_update_time(history_id) + assert after_bulk_operation_history_update_time > original_history_update_time + + return result def _assert_bulk_success(self, bulk_operation_result, expected_success_count: int): assert bulk_operation_result["success_count"] == expected_success_count, bulk_operation_result assert not bulk_operation_result["errors"] + + def _get_history_update_time(self, history_id: str): + history = self._get(f"histories/{history_id}").json() + return history.get("update_time")