Skip to content

Commit

Permalink
Merge pull request #18078 from mvdbeek/better_exception_extract_dataset
Browse files Browse the repository at this point in the history
[24.0] Improve error message for ``Extract dataset`` tool
  • Loading branch information
mvdbeek authored May 3, 2024
2 parents 0a784e4 + 1bea8a7 commit 2bb61ea
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 7 deletions.
1 change: 1 addition & 0 deletions lib/galaxy/celery/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,7 @@ def abort_when_job_stops(function: Callable, session: galaxy_scoped_session, job
return future.result(timeout=1)
except TimeoutError:
if is_aborted(session, job_id):
future.cancel()
return


Expand Down
10 changes: 8 additions & 2 deletions lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3356,9 +3356,15 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
if how == "first":
extracted_element = collection.first_dataset_element
elif how == "by_identifier":
extracted_element = collection[incoming["which"]["identifier"]]
try:
extracted_element = collection[incoming["which"]["identifier"]]
except KeyError as e:
raise exceptions.MessageException(e.args[0])
elif how == "by_index":
extracted_element = collection[int(incoming["which"]["index"])]
try:
extracted_element = collection[int(incoming["which"]["index"])]
except KeyError as e:
raise exceptions.MessageException(e.args[0])
else:
raise exceptions.MessageException("Invalid tool parameters.")
extracted = extracted_element.element_object
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/tools/extract_dataset.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
</param>
<when value="first" />
<when value="by_identifier">
<param name="identifier" label="Element identifier:" type="text">
<param name="identifier" label="Element identifier:" type="text" optional="false">
<sanitizer invalid_char="">
<valid initial="string.ascii_letters,string.digits">
<add value="_" />
Expand Down Expand Up @@ -52,9 +52,9 @@ Description
The tool allow extracting datasets based on position (**The first dataset** and **Select by index** options) or name (**Select by element identifier** option). This tool effectively collapses the inner-most collection into a dataset. For nested collections (e.g a list of lists of lists: outer:middle:inner, extracting the inner dataset element) a new list is created where the selected element takes the position of the inner-most collection (so outer:middle, where middle is not a collection but the inner dataset element).
.. class:: warningmark
.. class:: warningmark
**Note**: Dataset index (numbering) begins with 0 (zero).
**Note**: Dataset index (numbering) begins with 0 (zero).
.. class:: infomark
Expand Down
23 changes: 21 additions & 2 deletions lib/galaxy_test/api/test_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -701,23 +701,42 @@ def test_database_operation_tool_with_pending_inputs(self):
hdca1_id = self.dataset_collection_populator.create_list_in_history(
history_id, contents=["a\nb\nc\nd", "e\nf\ng\nh"], wait=True
).json()["outputs"][0]["id"]
self.dataset_populator.run_tool(
run_response = self.dataset_populator.run_tool(
tool_id="cat_data_and_sleep",
inputs={
"sleep_time": 15,
"input1": {"batch": True, "values": [{"src": "hdca", "id": hdca1_id}]},
},
history_id=history_id,
)
output_hdca_id = run_response["implicit_collections"][0]["id"]
run_response = self.dataset_populator.run_tool(
tool_id="__EXTRACT_DATASET__",
inputs={
"data_collection": {"src": "hdca", "id": hdca1_id},
"data_collection": {"src": "hdca", "id": output_hdca_id},
},
history_id=history_id,
)
assert run_response["outputs"][0]["state"] != "ok"

@skip_without_tool("__EXTRACT_DATASET__")
def test_extract_dataset_invalid_element_identifier(self):
with self.dataset_populator.test_history(require_new=False) as history_id:
hdca1_id = self.dataset_collection_populator.create_list_in_history(
history_id, contents=["a\nb\nc\nd", "e\nf\ng\nh"], wait=True
).json()["outputs"][0]["id"]
run_response = self.dataset_populator.run_tool_raw(
tool_id="__EXTRACT_DATASET__",
inputs={
"data_collection": {"src": "hdca", "id": hdca1_id},
"which": {"which_dataset": "by_index", "index": 100},
},
history_id=history_id,
input_format="21.01",
)
assert run_response.status_code == 400
assert run_response.json()["err_msg"] == "Dataset collection has no element_index with key 100."

@skip_without_tool("__FILTER_FAILED_DATASETS__")
def test_filter_failed_list(self):
with self.dataset_populator.test_history(require_new=False) as history_id:
Expand Down

0 comments on commit 2bb61ea

Please sign in to comment.