Skip to content

Commit

Permalink
Add more complicated workflow run test case.
Browse files Browse the repository at this point in the history
Re-use test_data specification used by many of the API workflow tests and a workflow used by another test. Add abstractions and DOM element for mapping labeled test data to tool form inputs.
  • Loading branch information
jmchilton committed Apr 13, 2018
1 parent 8f529d1 commit aafd9de
Show file tree
Hide file tree
Showing 6 changed files with 118 additions and 63 deletions.
3 changes: 3 additions & 0 deletions client/galaxy/scripts/mvc/tool/tool-form-composite.js
Original file line number Diff line number Diff line change
Expand Up @@ -434,6 +434,9 @@ var View = Backbone.View.extend({
step
)
);
if(step.step_label) {
form.$el.attr("step-label", step.step_label);
}
}
self.forms[step.index] = form;
self._append(self.$steps, form.$el);
Expand Down
65 changes: 4 additions & 61 deletions test/api/test_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
DatasetCollectionPopulator,
DatasetPopulator,
flakey,
load_data_dict,
skip_without_tool,
wait_on,
WorkflowPopulator
Expand Down Expand Up @@ -102,11 +103,8 @@ def _build_ds_map(self, workflow_id, label_map):
ds_map[key] = label_map[label]
return dumps(ds_map)

def _ds_entry(self, hda):
src = 'hda'
if 'history_content_type' in hda and hda['history_content_type'] == "dataset_collection":
src = 'hdca'
return dict(src=src, id=hda["id"])
def _ds_entry(self, history_content):
return self.dataset_populator.ds_entry(history_content)

def _workflow_inputs(self, uploaded_workflow_id):
workflow_show_resposne = self._get("workflows/%s" % uploaded_workflow_id)
Expand Down Expand Up @@ -137,62 +135,7 @@ def read_test_data(test_dict):
jobs_descriptions = yaml.safe_load(has_workflow)

test_data = jobs_descriptions.get("test_data", {})

label_map = {}
inputs = {}
has_uploads = False

for key, value in test_data.items():
is_dict = isinstance(value, dict)
if is_dict and ("elements" in value or value.get("type", None) in ["list:paired", "list", "paired"]):
elements_data = value.get("elements", [])
elements = []
for element_data in elements_data:
identifier = element_data["identifier"]
input_type = element_data.get("type", "raw")
if input_type == "File":
content = read_test_data(element_data)
else:
content = element_data["content"]
elements.append((identifier, content))
# TODO: make this collection_type
collection_type = value["type"]
new_collection_kwds = {}
if "name" in value:
new_collection_kwds["name"] = value["name"]
if collection_type == "list:paired":
hdca = self.dataset_collection_populator.create_list_of_pairs_in_history(history_id, **new_collection_kwds).json()
elif collection_type == "list":
hdca = self.dataset_collection_populator.create_list_in_history(history_id, contents=elements, **new_collection_kwds).json()
else:
hdca = self.dataset_collection_populator.create_pair_in_history(history_id, contents=elements, **new_collection_kwds).json()
label_map[key] = self._ds_entry(hdca)
inputs[key] = hdca
has_uploads = True
elif is_dict and "type" in value:
input_type = value["type"]
if input_type == "File":
content = read_test_data(value)
new_dataset_kwds = {
"content": content
}
if "name" in value:
new_dataset_kwds["name"] = value["name"]
if "file_type" in value:
new_dataset_kwds["file_type"] = value["file_type"]
hda = self.dataset_populator.new_dataset(history_id, **new_dataset_kwds)
label_map[key] = self._ds_entry(hda)
has_uploads = True
elif input_type == "raw":
label_map[key] = value["value"]
inputs[key] = value["value"]
elif not is_dict:
has_uploads = True
hda = self.dataset_populator.new_dataset(history_id, content=value)
label_map[key] = self._ds_entry(hda)
inputs[key] = hda
else:
raise ValueError("Invalid test_data def %" % test_data)
inputs, label_map, has_uploads = load_data_dict(history_id, test_data, self.dataset_populator, self.dataset_collection_populator)
workflow_request = dict(
history="hist_id=%s" % history_id,
workflow_id=workflow_id,
Expand Down
75 changes: 75 additions & 0 deletions test/base/populators.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,14 @@
from pkg_resources import resource_string
from six import StringIO

from galaxy.tools.verify.test_data import TestDataResolver
from . import api_asserts
from .workflows_format_2 import (
convert_and_import_workflow,
ImporterGalaxyInterface,
)


# Simple workflow that takes an input and call cat wrapper on it.
workflow_str = resource_string(__name__, "data/test_workflow_1.ga")
# Simple workflow that takes an input and filters with random lines twice in a
Expand Down Expand Up @@ -367,6 +369,12 @@ def _get_contents_request(self, history_id, suffix="", data={}):
url = "%s%s" % (url, suffix)
return self._get(url, data=data)

def ds_entry(self, history_content):
src = 'hda'
if 'history_content_type' in history_content and history_content['history_content_type'] == "dataset_collection":
src = 'hdca'
return dict(src=src, id=history_content["id"])


class DatasetPopulator(BaseDatasetPopulator):

Expand Down Expand Up @@ -794,6 +802,73 @@ def _create_collection(self, payload):
return create_response


def load_data_dict(history_id, test_data, dataset_populator, dataset_collection_populator):

def read_test_data(test_dict):
test_data_resolver = TestDataResolver()
filename = test_data_resolver.get_filename(test_dict["value"])
content = open(filename, "r").read()
return content

inputs = {}
label_map = {}
has_uploads = False

for key, value in test_data.items():
is_dict = isinstance(value, dict)
if is_dict and ("elements" in value or value.get("type", None) in ["list:paired", "list", "paired"]):
elements_data = value.get("elements", [])
elements = []
for element_data in elements_data:
identifier = element_data["identifier"]
input_type = element_data.get("type", "raw")
if input_type == "File":
content = read_test_data(element_data)
else:
content = element_data["content"]
elements.append((identifier, content))
# TODO: make this collection_type
collection_type = value["type"]
new_collection_kwds = {}
if "name" in value:
new_collection_kwds["name"] = value["name"]
if collection_type == "list:paired":
hdca = dataset_collection_populator.create_list_of_pairs_in_history(history_id, **new_collection_kwds).json()
elif collection_type == "list":
hdca = dataset_collection_populator.create_list_in_history(history_id, contents=elements, **new_collection_kwds).json()
else:
hdca = dataset_collection_populator.create_pair_in_history(history_id, contents=elements, **new_collection_kwds).json()
label_map[key] = dataset_populator.ds_entry(hdca)
inputs[key] = hdca
has_uploads = True
elif is_dict and "type" in value:
input_type = value["type"]
if input_type == "File":
content = read_test_data(value)
new_dataset_kwds = {
"content": content
}
if "name" in value:
new_dataset_kwds["name"] = value["name"]
if "file_type" in value:
new_dataset_kwds["file_type"] = value["file_type"]
hda = dataset_populator.new_dataset(history_id, **new_dataset_kwds)
label_map[key] = dataset_populator.ds_entry(hda)
has_uploads = True
elif input_type == "raw":
label_map[key] = value["value"]
inputs[key] = value["value"]
elif not is_dict:
has_uploads = True
hda = dataset_populator.new_dataset(history_id, content=value)
label_map[key] = dataset_populator.ds_entry(hda)
inputs[key] = hda
else:
raise ValueError("Invalid test_data def %" % test_data)

return inputs, label_map, has_uploads


def wait_on_state(state_func, desc="state", skip_states=["running", "queued", "new", "ready"], assert_ok=False, timeout=DEFAULT_TIMEOUT):
def get_state():
response = state_func()
Expand Down
5 changes: 5 additions & 0 deletions test/galaxy_selenium/navigation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,11 @@ workflows:
new_button: '#new-workflow'
import_button: '#import-workflow'

workflow_run:

selectors:
input_div: "[step-label='${label}'] .select2-container"

workflow_editor:

node:
Expand Down
4 changes: 2 additions & 2 deletions test/selenium_tests/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -509,9 +509,9 @@ def get_remote_driver():
class SeleniumSessionGetPostMixin:
"""Mixin for adapting Galaxy testing populators helpers to Selenium session backed bioblend."""

def _get(self, route):
def _get(self, route, data={}):
full_url = self.selenium_test_case.build_url("api/" + route, for_selenium=False)
response = requests.get(full_url, cookies=self.selenium_test_case.selenium_to_requests_cookies())
response = requests.get(full_url, data=data, cookies=self.selenium_test_case.selenium_to_requests_cookies())
return response

def _post(self, route, data={}):
Expand Down
29 changes: 29 additions & 0 deletions test/selenium_tests/test_workflow_run.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import yaml
from base.populators import load_data_dict
from base.workflow_fixtures import (
WORKFLOW_SIMPLE_CAT_TWICE,
WORKFLOW_WITH_DYNAMIC_OUTPUT_COLLECTION,
WORKFLOW_WITH_OLD_TOOL_VERSION,
)

Expand Down Expand Up @@ -39,10 +42,36 @@ def test_execution_with_tool_upgrade(self):
self.assert_warning_message(contains="different versions")
self.screenshot("workflow_run_tool_upgrade")

@selenium_test
@managed_history
def test_execution_with_multiple_inputs(self):
history_id, inputs = self.workflow_run_setup_inputs(WORKFLOW_WITH_DYNAMIC_OUTPUT_COLLECTION)
self.open_in_workflow_run(WORKFLOW_WITH_DYNAMIC_OUTPUT_COLLECTION)
self.workflow_run_specify_inputs(inputs)
self.screenshot("workflow_run_two_inputs")
self.workflow_run_submit()

self.history_panel_wait_for_hid_ok(7, allowed_force_refreshes=1)
content = self.dataset_populator.get_history_dataset_content(history_id, hid=7)
self.assertEqual("10.0\n30.0\n20.0\n40.0\n", content)

def open_in_workflow_run(self, yaml_content):
name = self.workflow_upload_yaml_with_random_name(yaml_content)
self.workflow_run_with_name(name)

def workflow_run_setup_inputs(self, content):
history_id = self.current_history_id()
test_data = yaml.safe_load(content)["test_data"]
inputs, _, _ = load_data_dict(history_id, test_data, self.dataset_populator, self.dataset_collection_populator)
self.dataset_populator.wait_for_history(history_id)
return history_id, inputs

def workflow_run_specify_inputs(self, inputs):
workflow_run = self.components.workflow_run
for label, value in inputs.items():
input_div_element = workflow_run.input_div(label=label).wait_for_visible()
self.select2_set_value(input_div_element, "%d: " % value["hid"])

def workflow_run_with_name(self, name):
self.workflow_index_open()
self.workflow_index_search_for(name)
Expand Down

0 comments on commit aafd9de

Please sign in to comment.