Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add test for optional select with default and fix xsd docs #13565

Draft
wants to merge 13 commits into
base: dev
Choose a base branch
from
20 changes: 14 additions & 6 deletions lib/galaxy/tool_util/verify/interactor.py
Original file line number Diff line number Diff line change
Expand Up @@ -591,11 +591,11 @@ def stage_data_async(
outputs = submit_response["outputs"]
assert len(outputs) > 0, f"Invalid response from server [{submit_response}], expecting an output dataset."
dataset = outputs[0]
hid = dataset["id"]
self.uploads[os.path.basename(fname)] = self.uploads[fname] = self.uploads[name] = {"src": "hda", "id": hid}
assert (
"jobs" in submit_response
), f"Invalid response from server [{submit_response}], expecting jobs in response."
hid = dataset['id']
self.uploads[name] = {"src": "hda", "id": hid}
if fname:
self.uploads[os.path.basename(fname)] = self.uploads[fname] = self.uploads[name]
assert "jobs" in submit_response, f"Invalid response from server [{submit_response}], expecting jobs in response."
jobs = submit_response["jobs"]
assert len(jobs) > 0, f"Invalid response from server [{submit_response}], expecting a job."
return lambda: self.wait_for_job(jobs[0]["id"], history_id, maxseconds=maxseconds)
Expand All @@ -610,12 +610,15 @@ def _ensure_valid_location_in(self, test_data: dict) -> Optional[str]:
def run_tool(self, testdef, history_id, resource_parameters=None) -> RunToolResponse:
# We need to handle the case where we've uploaded a valid compressed file since the upload
# tool will have uncompressed it on the fly.
log.error(f"run_tool self.uploads {self.uploads}")
log.error(f"run_tool testdef.inputs {testdef.inputs}")
resource_parameters = resource_parameters or {}
inputs_tree = testdef.inputs.copy()
for key, value in inputs_tree.items():
values = [value] if not isinstance(value, list) else value
new_values = []
for value in values:
log.error(f"run_tool value {value} tcd {isinstance(value, TestCollectionDef)} in uploads {value in self.uploads}")
if isinstance(value, TestCollectionDef):
hdca_id = self._create_collection(history_id, value)
new_values = [dict(src="hdca", id=hdca_id)]
Expand All @@ -624,17 +627,20 @@ def run_tool(self, testdef, history_id, resource_parameters=None) -> RunToolResp
else:
new_values.append(value)
inputs_tree[key] = new_values

log.error(f"run_tool inputs_tree {inputs_tree}")
if resource_parameters:
inputs_tree["__job_resource|__job_resource__select"] = "yes"
for key, value in resource_parameters.items():
inputs_tree[f"__job_resource|{key}"] = value
log.error(f"run_tool inputs_tree {inputs_tree}")

# HACK: Flatten single-value lists. Required when using expand_grouping
for key, value in inputs_tree.items():
log.error(f"run_tool hack key {key} {value}")
if isinstance(value, list) and len(value) == 1:
inputs_tree[key] = value[0]

log.error(f"run_tool inputs_tree {inputs_tree}")
submit_response = None
for _ in range(DEFAULT_TOOL_TEST_WAIT):
submit_response = self.__submit_tool(
Expand Down Expand Up @@ -819,6 +825,7 @@ def __submit_tool(self, history_id, tool_id, tool_input, extra_data=None, files=
data = dict(
history_id=history_id, tool_id=tool_id, inputs=dumps(tool_input), tool_version=tool_version, **extra_data
)
log.error(f"__submit_tool files {files} data {data}")
return self._post("tools", files=files, data=data)

def ensure_user_with_email(self, email, password=None):
Expand Down Expand Up @@ -936,6 +943,7 @@ def _post(
url = self.get_api_url(path)
kwd = self._prepare_request_params(data=data, files=files, as_json=json, headers=headers)
kwd["timeout"] = kwd.pop("timeout", util.DEFAULT_SOCKET_TIMEOUT)
log.error(f"_post url {url} kwd {kwd}")
return requests.post(url, **kwd)

def _delete(self, path, data=None, key=None, headers=None, admin=False, anon=False, json=False):
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/tool_util/xsd/galaxy.xsd
Original file line number Diff line number Diff line change
Expand Up @@ -1538,8 +1538,8 @@ Data input parameters can be given as a file name. The file should exist in the
The value of a select parameter should be specified as the value of one of the
legal options. If more than one option is selected (`multiple="true"`) they
should be given as comma separated list. For optional selects
(`optional="true"`) the case that no option is selected can be specified with
`value=""`.
(`optional="true"`) the case that no option is selected can be specified by
omitting the `value` attribute.

While in general it is preferred to specify the selected cases by their values it
is also possible to specify them by their name (i.e. the content of the
Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1792,6 +1792,7 @@ def visit_inputs(self, values, callback):
visit_input_values(self.inputs, values, callback)

def expand_incoming(self, trans, incoming, request_context, input_format="legacy"):
log.error(f"expand_incoming incoming {incoming}")
rerun_remap_job_id = None
if "rerun_remap_job_id" in incoming:
try:
Expand All @@ -1807,6 +1808,7 @@ def expand_incoming(self, trans, incoming, request_context, input_format="legacy
# Fixed set of input parameters may correspond to any number of jobs.
# Expand these out to individual parameters for given jobs (tool executions).
expanded_incomings, collection_info = expand_meta_parameters(trans, self, incoming)
log.error(f"expand_incoming expanded_incomings {expanded_incomings}")

# Remapping a single job to many jobs doesn't make sense, so disable
# remap if multi-runs of tools are being used.
Expand Down Expand Up @@ -1881,6 +1883,7 @@ def handle_input(
to the form or execute the tool (only if 'execute' was clicked and
there were no errors).
"""
log.error(f"handle_input incoming {incoming}")
request_context = proxy_work_context_for_history(trans, history=history)
all_params, all_errors, rerun_remap_job_id, collection_info = self.expand_incoming(
trans=trans, incoming=incoming, request_context=request_context, input_format=input_format
Expand Down
5 changes: 5 additions & 0 deletions lib/galaxy/tools/parameters/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Classes encapsulating Galaxy tool parameters.
"""
import logging

from json import dumps
from typing import (
Expand Down Expand Up @@ -30,6 +31,8 @@
UploadDataset,
)

log = logging.getLogger(__name__)

REPLACE_ON_TRUTHY = object()

# Some tools use the code tag and access the code base, expecting certain tool parameters to be available here.
Expand Down Expand Up @@ -515,6 +518,7 @@ def populate_state(
def _populate_state_legacy(
request_context, inputs, incoming, state, errors, prefix="", context=None, check=True, simple_errors=True
):
log.error(f"_populate_state_legacy inputs {inputs}")
context = ExpressionContext(state, context)
for input in inputs.values():
state[input.name] = input.get_initial_value(request_context, context)
Expand Down Expand Up @@ -631,6 +635,7 @@ def _get_incoming_value(incoming, key, default):
Fetch value from incoming dict directly or check special nginx upload
created variants of this key.
"""
log.error(f"_get_incoming_value incoming {incoming} key {key} default {default}")
if f"__{key}__is_composite" in incoming:
composite_keys = incoming[f"__{key}__keys"].split()
value = dict()
Expand Down
2 changes: 2 additions & 0 deletions lib/galaxy/tools/parameters/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -988,6 +988,7 @@ def get_legal_names(self, trans, other_values):
return {n: v for n, v, _ in self.get_options(trans, other_values)}

def from_json(self, value, trans, other_values=None, require_legal_value=True):
log.error(f"from_json value {value} trans {trans} other_values {other_values} require_legal_value {require_legal_value}")
other_values = other_values or {}
try:
legal_values = self.get_legal_values(trans, other_values, value)
Expand Down Expand Up @@ -1057,6 +1058,7 @@ def from_json(self, value, trans, other_values=None, require_legal_value=True):
)
if is_runtime_value(value):
return None
log.error(f"value {value} legal_values {legal_values}")
if value in legal_values:
return value
elif value in fallback_values:
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/webapps/galaxy/services/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ def _create(self, trans: ProvidesHistoryContext, payload, **kwd):

# Set up inputs.
inputs = payload.get("inputs", {})
log.error(f"_create inputs {inputs}")
if not isinstance(inputs, dict):
raise exceptions.RequestParameterInvalidException(f"inputs invalid {inputs}")

Expand All @@ -157,7 +158,7 @@ def _create(self, trans: ProvidesHistoryContext, payload, **kwd):
# TODO: handle dbkeys
params = util.Params(inputs, sanitize=False)
incoming = params.__dict__

log.error(f"_create inputs {incoming}")
# use_cached_job can be passed in via the top-level payload or among the tool inputs.
# I think it should be a top-level parameter, but because the selector is implemented
# as a regular tool parameter we accept both.
Expand Down
35 changes: 27 additions & 8 deletions test/functional/tools/select_optional.xml
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
<tool id="select_optional" name="Optional select" version="1.0.0" profile="20.01">
<command><![CDATA[
echo select_opt $select_opt >> '$output1' &&
echo select_mult_opt $select_mult_opt >> '$output1' &&
echo col_opt $col_opt >> '$output1' &&
echo col_mult_opt $col_mult_opt >> '$output1' &&
echo groups_opt $groups_opt >> '$output1' &&
echo groups_mult_opt $groups_mult_opt >> '$output1' &&
echo build_opt $build_opt >> '$output1' &&
echo build_mult_opt $build_mult_opt >> '$output1'
echo "select_opt $select_opt" >> '$output1' &&
echo "select_mult_opt $select_mult_opt" >> '$output1' &&
echo "col_opt $col_opt" >> '$output1' &&
echo "col_mult_opt $col_mult_opt" >> '$output1' &&
echo "groups_opt $groups_opt" >> '$output1' &&
echo "groups_mult_opt $groups_mult_opt" >> '$output1' &&
echo "build_opt $build_opt" >> '$output1' &&
echo "build_mult_opt $build_mult_opt" >> '$output1' &&
echo "select_opt_wdefault $select_opt_wdefault" >> '$output1'
]]></command>
<inputs>
<!-- verify that it is possible to select no options
Expand Down Expand Up @@ -47,12 +48,21 @@

<param name="build_opt" type="genomebuild" optional="true"/>
<param name="build_mult_opt" type="genomebuild" multiple="true" optional="true"/>

<!-- check if an optional select with an option selected per default can be unselected in tests -->
<param name="select_opt_wdefault" type="select" optional="true">
<option value="">empty</option>
<option value="A" selected="true">A</option>
<option value="B">B</option>
</param>

</inputs>
<outputs>
<data name="output1" format="tabular" />
</outputs>
<tests>
<test>
<param name="select_opt_wdefault"/>
<output name="output1">
<assert_contents>
<has_line line="select_opt None" />
Expand All @@ -63,6 +73,15 @@
<has_line line="groups_mult_opt None" />
<has_line line="build_opt None" />
<has_line line="build_mult_opt None" />
<has_line line="select_opt_wdefault None" />
</assert_contents>
</output>
</test>
<test>
<param name="select_opt_wdefault" value=""/>
<output name="output1">
<assert_contents>
<has_line line="select_opt_wdefault " />
</assert_contents>
</output>
</test>
Expand Down
Loading