diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py
index 7e7df3908e5c..f1640fd437d4 100644
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -3204,6 +3204,7 @@ class DatabaseOperationTool(Tool):
require_terminal_states = True
require_dataset_ok = True
tool_type_local = True
+ require_terminal_or_paused_states = False
@property
def valid_input_states(self):
@@ -3211,6 +3212,8 @@ def valid_input_states(self):
return (model.Dataset.states.OK,)
elif self.require_terminal_states:
return model.Dataset.terminal_states
+ elif self.require_terminal_or_paused_states:
+ return model.Dataset.terminal_states or model.Dataset.states.PAUSED
else:
return model.Dataset.valid_input_states
@@ -3503,6 +3506,22 @@ def element_is_valid(element: model.DatasetCollectionElement):
return element.element_object.is_ok
+class KeepSuccessDatasetsTool(FilterDatasetsTool):
+ tool_type = "keep_success_datasets_collection"
+ require_terminal_states = False
+ require_dataset_ok = False
+ require_terminal_or_paused_states = True
+
+ @staticmethod
+ def element_is_valid(element: model.DatasetCollectionElement):
+ if (
+ element.element_object.state != model.Dataset.states.PAUSED
+ and element.element_object.state in model.Dataset.non_ready_states
+ ):
+ raise ToolInputsNotReadyException("An input dataset is pending.")
+ return element.element_object.is_ok
+
+
class FilterEmptyDatasetsTool(FilterDatasetsTool):
tool_type = "filter_empty_datasets_collection"
require_dataset_ok = False
diff --git a/lib/galaxy/tools/keep_success_collection.xml b/lib/galaxy/tools/keep_success_collection.xml
new file mode 100644
index 000000000000..69a0653df214
--- /dev/null
+++ b/lib/galaxy/tools/keep_success_collection.xml
@@ -0,0 +1,64 @@
+
+
+
+
+
+ operation_3695
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/galaxy/webapps/base/webapp.py b/lib/galaxy/webapps/base/webapp.py
index c2151b942517..155511afb585 100644
--- a/lib/galaxy/webapps/base/webapp.py
+++ b/lib/galaxy/webapps/base/webapp.py
@@ -665,7 +665,8 @@ def _ensure_valid_session(self, session_cookie: str, create: bool = True) -> Non
galaxy_session = self.__create_new_session(prev_galaxy_session, user_for_new_session)
galaxy_session_requires_flush = True
self.galaxy_session = galaxy_session
- self.get_or_create_default_history()
+ if self.webapp.name == "galaxy":
+ self.get_or_create_default_history()
self.__update_session_cookie(name=session_cookie)
else:
self.galaxy_session = galaxy_session
diff --git a/lib/galaxy_test/api/test_workflows.py b/lib/galaxy_test/api/test_workflows.py
index cc5b8ccbf65e..4f6f9a5cbe05 100644
--- a/lib/galaxy_test/api/test_workflows.py
+++ b/lib/galaxy_test/api/test_workflows.py
@@ -3255,6 +3255,128 @@ def filter_jobs_by_tool(tool_id):
# Follow proves one job was filtered out of the result of cat1
assert len(filter_jobs_by_tool("cat1")) == 1, jobs
+ def test_keep_success_mapping_error(self):
+ with self.dataset_populator.test_history() as history_id:
+ summary = self._run_workflow(
+ """
+class: GalaxyWorkflow
+inputs:
+ input_c: collection
+
+steps:
+ mixed_collection:
+ tool_id: exit_code_from_file
+ in:
+ input: input_c
+
+ filtered_collection:
+ tool_id: "__KEEP_SUCCESS_DATASETS__"
+ in:
+ input: mixed_collection/out_file1
+
+ cat:
+ tool_id: cat1
+ in:
+ input1: filtered_collection/output
+""",
+ test_data="""
+input_c:
+ collection_type: list
+ elements:
+ - identifier: i1
+ content: "0"
+ - identifier: i2
+ content: "1"
+""",
+ history_id=history_id,
+ wait=True,
+ assert_ok=False,
+ )
+ jobs = summary.jobs
+
+ def filter_jobs_by_tool(tool_id):
+ return [j for j in summary.jobs if j["tool_id"] == tool_id]
+
+ assert len(filter_jobs_by_tool("exit_code_from_file")) == 2, jobs
+ assert len(filter_jobs_by_tool("__KEEP_SUCCESS_DATASETS__")) == 1, jobs
+ # Follow proves one job was filtered out of the exit_code_from_file
+ # And a single one has been sent to cat1
+ assert len(filter_jobs_by_tool("cat1")) == 1, jobs
+
+ def test_keep_success_mapping_paused(self):
+ with self.dataset_populator.test_history() as history_id:
+ summary = self._run_workflow(
+ """
+class: GalaxyWorkflow
+inputs:
+ input_c: collection
+
+steps:
+ mixed_collection:
+ tool_id: exit_code_from_file
+ in:
+ input: input_c
+
+ cat:
+ tool_id: cat1
+ in:
+ input1: mixed_collection/out_file1
+
+ filtered_collection:
+ tool_id: "__KEEP_SUCCESS_DATASETS__"
+ in:
+ input: cat/out_file1
+""",
+ test_data="""
+input_c:
+ collection_type: list
+ elements:
+ - identifier: i1
+ content: "0"
+ - identifier: i2
+ content: "1"
+ - identifier: i3
+ content: "0"
+""",
+ history_id=history_id,
+ wait=True,
+ assert_ok=False,
+ )
+ jobs = summary.jobs
+
+ def filter_jobs_by_tool(tool_id):
+ return [j for j in summary.jobs if j["tool_id"] == tool_id]
+
+ # Get invocation to access output collections
+ invocation = self.workflow_populator.get_invocation(summary.invocation_id, step_details=True)
+ # Check there are 3 exit_code_from_file
+ assert len(filter_jobs_by_tool("exit_code_from_file")) == 3, jobs
+ # Check output collection has 3 elements
+ output_mixed_collection_id = invocation["steps"][1]["output_collections"]["out_file1"]["id"]
+ mixed_collection = self.dataset_populator.get_history_collection_details(
+ history_id, content_id=output_mixed_collection_id, assert_ok=False
+ )
+ assert mixed_collection["element_count"] == 3, mixed_collection
+ # Check 3 jobs cat1 has been "scheduled":
+ assert len(filter_jobs_by_tool("cat1")) == 3, jobs
+ # Check 2 are 'ok' the other is 'paused'
+ output_cat_id = invocation["steps"][2]["output_collections"]["out_file1"]["id"]
+ cat_collection = self.dataset_populator.get_history_collection_details(
+ history_id, content_id=output_cat_id, assert_ok=False
+ )
+ assert cat_collection["element_count"] == 3, cat_collection
+ cat1_states = [e["object"]["state"] for e in cat_collection["elements"]]
+ assert "paused" in cat1_states, jobs
+ assert len([s for s in cat1_states if s == "ok"]) == 2, cat_collection
+ # Check the KEEP_SUCCESS_DATASETS have been run
+ assert len(filter_jobs_by_tool("__KEEP_SUCCESS_DATASETS__")) == 1, jobs
+ # Check the output has 2 elements
+ output_filtered_id = invocation["steps"][3]["output_collections"]["output"]["id"]
+ output_filtered = self.dataset_populator.get_history_collection_details(
+ history_id, content_id=output_filtered_id, assert_ok=False
+ )
+ assert output_filtered["element_count"] == 2, output_filtered
+
def test_workflow_request(self):
workflow = self.workflow_populator.load_workflow(name="test_for_queue")
workflow_request, history_id, workflow_id = self._setup_workflow_run(workflow)
diff --git a/lib/tool_shed/webapp/buildapp.py b/lib/tool_shed/webapp/buildapp.py
index c74928f3cc6b..6988874a6b52 100644
--- a/lib/tool_shed/webapp/buildapp.py
+++ b/lib/tool_shed/webapp/buildapp.py
@@ -37,6 +37,10 @@ class ToolShedGalaxyWebTransaction(GalaxyWebTransaction):
def repositories_hostname(self) -> str:
return url_for("/", qualified=True).rstrip("/")
+ def get_or_create_default_history(self):
+ # tool shed has no concept of histories
+ raise NotImplementedError
+
class CommunityWebApplication(galaxy.webapps.base.webapp.WebApplication):
injection_aware: bool = True
diff --git a/test/functional/tools/sample_tool_conf.xml b/test/functional/tools/sample_tool_conf.xml
index 0cdbb1690d49..fcbddef6a2d6 100644
--- a/test/functional/tools/sample_tool_conf.xml
+++ b/test/functional/tools/sample_tool_conf.xml
@@ -292,6 +292,7 @@
+