Skip to content

Commit

Permalink
Merge branch 'release_24.0' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
nsoranzo committed Apr 4, 2024
2 parents 7aedfa4 + e970ae5 commit 4a4ead2
Show file tree
Hide file tree
Showing 8 changed files with 51 additions and 39 deletions.
7 changes: 3 additions & 4 deletions doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@
import os
import sys

import sphinx_rtd_theme

# Set GALAXY_DOCS_SKIP_VIEW_CODE=1 to skip embedding highlighted source
# code into docs.
SKIP_VIEW_CODE = os.environ.get("GALAXY_DOCS_SKIP_VIEW_CODE", False) == "1"
Expand Down Expand Up @@ -161,11 +159,10 @@ def setup(app):
"collapse_navigation": False,
"display_version": True,
"navigation_depth": 2,
"canonical_url": "https://docs.galaxyproject.org/en/master/",
}

# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme_path = []

# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
Expand All @@ -174,6 +171,8 @@ def setup(app):
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None

html_baseurl = "https://docs.galaxyproject.org/en/master/"

# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
Expand Down
23 changes: 15 additions & 8 deletions lib/galaxy/managers/collections_util.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import logging
import math
from typing import (
Any,
Dict,
)

from galaxy import (
exceptions,
Expand Down Expand Up @@ -153,35 +157,38 @@ def dictify_dataset_collection_instance(
return dict_value


def dictify_element_reference(element, rank_fuzzy_counts=None, recursive=True, security=None):
def dictify_element_reference(
element: model.DatasetCollectionElement, rank_fuzzy_counts=None, recursive=True, security=None
):
"""Load minimal details of elements required to show outline of contents in history panel.
History panel can use this reference to expand to full details if individual dataset elements
are clicked.
"""
dictified = element.to_dict(view="element")
if (element_object := element.element_object) is not None:
object_details = dict(
object_details: Dict[str, Any] = dict(
id=element_object.id,
model_class=element_object.__class__.__name__,
)
if element.child_collection:
if isinstance(element_object, model.DatasetCollection):
object_details["collection_type"] = element_object.collection_type
object_details["element_count"] = element_object.element_count
object_details["populated"] = element_object.populated_optimized

# Recursively yield elements for each nested collection...
if recursive:
child_collection = element.child_collection
elements, rest_fuzzy_counts = get_fuzzy_count_elements(child_collection, rank_fuzzy_counts)
elements, rest_fuzzy_counts = get_fuzzy_count_elements(element_object, rank_fuzzy_counts)
object_details["elements"] = [
dictify_element_reference(_, rank_fuzzy_counts=rest_fuzzy_counts, recursive=recursive)
for _ in elements
]
object_details["element_count"] = child_collection.element_count
else:
object_details["state"] = element_object.state
object_details["hda_ldda"] = "hda"
object_details["history_id"] = element_object.history_id
object_details["tags"] = element_object.make_tag_string_list()
if isinstance(element_object, model.HistoryDatasetAssociation):
object_details["history_id"] = element_object.history_id
object_details["tags"] = element_object.make_tag_string_list()

dictified["object"] = object_details
else:
Expand Down
20 changes: 11 additions & 9 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1757,15 +1757,13 @@ def set_state(self, state: JobState) -> bool:
return False
session = object_session(self)
if session and self.id and state not in Job.finished_states:
# generate statement that will not revert DELETING or DELETED back to anything non-terminal
# Do not update if job is in a terminal state
rval = session.execute(
update(Job)
.where(Job.id == self.id, ~Job.state.in_((Job.states.DELETING, Job.states.DELETED)))
.values(state=state)
update(Job).where(Job.id == self.id, ~Job.state.in_((state, *Job.finished_states))).values(state=state)
)
with transaction(session):
session.commit()
if rval.rowcount == 1:
# Need to expire state since we just updated it, but ORM doesn't know about it.
session.expire(self, ["state"])
self.state_history.append(JobStateHistory(self))
return True
else:
Expand Down Expand Up @@ -3065,7 +3063,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
"HistoryDatasetAssociation",
primaryjoin=(
lambda: and_(
HistoryDatasetAssociation.history_id == History.id, # type: ignore[attr-defined]
HistoryDatasetAssociation.history_id == History.id, # type: ignore[arg-type]
not_(HistoryDatasetAssociation.deleted), # type: ignore[has-type]
)
),
Expand All @@ -3090,7 +3088,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
"HistoryDatasetAssociation",
primaryjoin=(
lambda: and_(
HistoryDatasetAssociation.history_id == History.id, # type: ignore[attr-defined]
HistoryDatasetAssociation.history_id == History.id, # type: ignore[arg-type]
not_(HistoryDatasetAssociation.deleted), # type: ignore[has-type]
HistoryDatasetAssociation.visible, # type: ignore[has-type]
)
Expand Down Expand Up @@ -5108,6 +5106,8 @@ class HistoryDatasetAssociation(DatasetInstance, HasTags, Dictifiable, UsesAnnot
Resource class that creates a relation between a dataset and a user history.
"""

history_id: Optional[int]

def __init__(
self,
hid=None,
Expand Down Expand Up @@ -7301,7 +7301,9 @@ def is_collection(self):
return self.element_type == "dataset_collection"

@property
def element_object(self):
def element_object(
self,
) -> Optional[Union[HistoryDatasetAssociation, LibraryDatasetDatasetAssociation, DatasetCollection]]:
if self.hda:
return self.hda
elif self.ldda:
Expand Down
23 changes: 15 additions & 8 deletions lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3461,7 +3461,9 @@ def _get_new_elements(self, history, elements_to_copy):

@staticmethod
def element_is_valid(element: model.DatasetCollectionElement):
return element.element_object.is_ok
element_object = element.element_object
assert isinstance(element_object, model.DatasetInstance)
return element_object.is_ok

def produce_outputs(self, trans, out_data, output_collections, incoming, history, **kwds):
collection = incoming["input"]
Expand Down Expand Up @@ -3503,7 +3505,9 @@ class FilterFailedDatasetsTool(FilterDatasetsTool):

@staticmethod
def element_is_valid(element: model.DatasetCollectionElement):
return element.element_object.is_ok
element_object = element.element_object
assert isinstance(element_object, model.DatasetInstance)
return element_object.is_ok


class KeepSuccessDatasetsTool(FilterDatasetsTool):
Expand All @@ -3514,12 +3518,14 @@ class KeepSuccessDatasetsTool(FilterDatasetsTool):

@staticmethod
def element_is_valid(element: model.DatasetCollectionElement):
element_object = element.element_object
assert isinstance(element_object, model.DatasetInstance)
if (
element.element_object.state != model.Dataset.states.PAUSED
and element.element_object.state in model.Dataset.non_ready_states
element_object.state != model.Dataset.states.PAUSED
and element_object.state in model.Dataset.non_ready_states
):
raise ToolInputsNotReadyException("An input dataset is pending.")
return element.element_object.is_ok
return element_object.is_ok


class FilterEmptyDatasetsTool(FilterDatasetsTool):
Expand All @@ -3528,10 +3534,11 @@ class FilterEmptyDatasetsTool(FilterDatasetsTool):

@staticmethod
def element_is_valid(element: model.DatasetCollectionElement):
dataset_instance: model.DatasetInstance = element.element_object
if dataset_instance.has_data():
element_object = element.element_object
assert isinstance(element_object, model.DatasetInstance)
if element_object.has_data():
# We have data, but it might just be a compressed archive of nothing
file_name = dataset_instance.get_file_name()
file_name = element_object.get_file_name()
_, fh = get_fileobj_raw(file_name, mode="rb")
if len(fh.read(1)):
return True
Expand Down
4 changes: 3 additions & 1 deletion lib/galaxy_test/api/test_dataset_collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ def test_show_dataset_collection_contents(self, history_id):
# Get contents_url from history contents, use it to show the first level
# of collection contents in the created HDCA, then use it again to drill
# down into the nested collection contents
hdca = self.dataset_collection_populator.create_list_of_list_in_history(history_id).json()
hdca = self.dataset_collection_populator.create_list_of_list_in_history(history_id, wait=True).json()
root_contents_url = self._get_contents_url_for_hdca(history_id, hdca)

# check root contents for this collection
Expand All @@ -466,6 +466,8 @@ def test_show_dataset_collection_contents(self, history_id):
# drill down, retrieve nested collection contents
assert "object" in root_contents[0]
assert "contents_url" in root_contents[0]["object"]
assert root_contents[0]["object"]["element_count"] == 3
assert root_contents[0]["object"]["populated"]
drill_contents_url = root_contents[0]["object"]["contents_url"]
drill_contents = self._get(drill_contents_url).json()
assert len(drill_contents) == len(hdca["elements"][0]["object"]["elements"])
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy_test/api/test_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -5477,7 +5477,7 @@ def test_optional_workflow_output(self):
tool_id: output_filter
state:
produce_out_1: False
filter_text_1: '1'
filter_text_1: 'foo'
produce_collection: False
""",
test_data={},
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy_test/base/populators.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,7 +630,8 @@ def has_active_jobs():
raise TimeoutAssertionError(message)

if assert_ok:
self.wait_for_history(history_id, assert_ok=True, timeout=timeout)
for job in self.history_jobs(history_id=history_id):
assert job["state"] in ("ok", "skipped"), f"Job {job} not in expected state"

def wait_for_jobs(
self,
Expand Down
8 changes: 1 addition & 7 deletions test/functional/tools/output_filter.xml
Original file line number Diff line number Diff line change
Expand Up @@ -59,15 +59,9 @@ echo 'p2.reverse' > p2.reverse
</assert_contents>
</output>
</test>
<!-- tool runs with no outputs should fail -->
<test expect_num_outputs="0" expect_test_failure="true">
<test expect_num_outputs="0">
<param name="produce_out_1" value="false" />
<param name="filter_text_1" value="not_foo_or_bar" />
<output name="out_3">
<assert_contents>
<has_line line="test" />
</assert_contents>
</output>
<assert_stdout>
<has_n_lines n="0"/>
</assert_stdout>
Expand Down

0 comments on commit 4a4ead2

Please sign in to comment.