diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py index 2dfa30f888e0..e7ebf2180bed 100644 --- a/lib/galaxy/managers/workflows.py +++ b/lib/galaxy/managers/workflows.py @@ -89,12 +89,14 @@ visit_input_values, ) from galaxy.tools.parameters.basic import ( - ConnectedValue, DataCollectionToolParameter, DataToolParameter, +) +from galaxy.tools.parameters.workflow_utils import ( + ConnectedValue, RuntimeValue, + workflow_building_modes, ) -from galaxy.tools.parameters.workflow_building_modes import workflow_building_modes from galaxy.util.hash_util import md5_hash_str from galaxy.util.json import ( safe_dumps, diff --git a/lib/galaxy/tool_util/verify/__init__.py b/lib/galaxy/tool_util/verify/__init__.py index 42b6998501de..8bed1f449fc5 100644 --- a/lib/galaxy/tool_util/verify/__init__.py +++ b/lib/galaxy/tool_util/verify/__init__.py @@ -31,11 +31,12 @@ try: from PIL import Image except ImportError: - pass + Image = None # type: ignore[assignment] try: import tifffile except ImportError: - pass + tifffile = None # type: ignore[assignment] + from galaxy.tool_util.parser.util import ( DEFAULT_DELTA, @@ -193,7 +194,12 @@ def get_filename(filename: str) -> str: elif compare == "contains": files_contains(local_name, temp_name, attributes=attributes) elif compare == "image_diff": - files_image_diff(local_name, temp_name, attributes=attributes) + if Image and tifffile: + files_image_diff(local_name, temp_name, attributes=attributes) + else: + raise Exception( + "pillow and tifffile are not installed, but required to compare files using the 'image_diff' method" + ) else: raise Exception(f"Unimplemented Compare type: {compare}") except AssertionError as err: diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py index 7b8aa98c0ad3..917464e5b45d 100644 --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -142,7 +142,7 @@ ) from galaxy.tools.parameters.input_translation import ToolInputTranslator from galaxy.tools.parameters.meta import expand_meta_parameters -from galaxy.tools.parameters.workflow_building_modes import workflow_building_modes +from galaxy.tools.parameters.workflow_utils import workflow_building_modes from galaxy.tools.parameters.wrapped_json import json_wrap from galaxy.tools.test import parse_tests from galaxy.util import ( diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py index 7aa459e73dbb..8403e0137bbe 100644 --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -33,9 +33,9 @@ from galaxy.tools.parameters.basic import ( DataCollectionToolParameter, DataToolParameter, - RuntimeValue, SelectToolParameter, ) +from galaxy.tools.parameters.workflow_utils import RuntimeValue from galaxy.tools.parameters.wrapped import ( LegacyUnprefixedDict, WrappedParameters, diff --git a/lib/galaxy/tools/execute.py b/lib/galaxy/tools/execute.py index 0b20c0e3c56d..cca565294428 100644 --- a/lib/galaxy/tools/execute.py +++ b/lib/galaxy/tools/execute.py @@ -34,7 +34,7 @@ on_text_for_names, ToolExecutionCache, ) -from galaxy.tools.parameters.basic import is_runtime_value +from galaxy.tools.parameters.workflow_utils import is_runtime_value if typing.TYPE_CHECKING: from galaxy.tools import Tool diff --git a/lib/galaxy/tools/parameters/__init__.py b/lib/galaxy/tools/parameters/__init__.py index e42b5f025938..7e72bf3a9eb1 100644 --- a/lib/galaxy/tools/parameters/__init__.py +++ b/lib/galaxy/tools/parameters/__init__.py @@ -16,9 +16,7 @@ from .basic import ( DataCollectionToolParameter, DataToolParameter, - is_runtime_value, ParameterValueError, - runtime_to_json, SelectToolParameter, ToolParameter, ) @@ -29,6 +27,11 @@ Section, UploadDataset, ) +from .workflow_utils import ( + is_runtime_value, + runtime_to_json, +) +from .wrapped import flat_to_nested_state REPLACE_ON_TRUTHY = object() @@ -515,6 +518,8 @@ def populate_state( def _populate_state_legacy( request_context, inputs, incoming, state, errors, prefix="", context=None, check=True, simple_errors=True ): + if context is None: + context = flat_to_nested_state(incoming) context = ExpressionContext(state, context) for input in inputs.values(): state[input.name] = input.get_initial_value(request_context, context) @@ -600,8 +605,8 @@ def _populate_state_legacy( for upload_item in input.inputs.values(): new_state[upload_item.name] = upload_item.get_initial_value(request_context, context) group_state.append(new_state) - for rep_state in group_state: - rep_index = rep_state["__index__"] + for rep_index, rep_state in enumerate(group_state): + rep_index = rep_state.get("__index__", rep_index) rep_prefix = "%s_%d|" % (key, rep_index) _populate_state_legacy( request_context, diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index 1d00ada2b635..b07667ecdab4 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -42,7 +42,7 @@ from galaxy.model.dataset_collections import builder from galaxy.schema.fetch_data import FilesPayload from galaxy.tool_util.parser import get_input_source as ensure_input_source -from galaxy.tools.parameters.workflow_building_modes import workflow_building_modes +from galaxy.tools.parameters.workflow_utils import workflow_building_modes from galaxy.util import ( sanitize_param, string_as_bool, @@ -61,6 +61,12 @@ ) from .dataset_matcher import get_dataset_matcher_factory from .sanitize import ToolParameterSanitizer +from .workflow_utils import ( + is_runtime_value, + runtime_to_json, + runtime_to_object, + RuntimeValue, +) if TYPE_CHECKING: from sqlalchemy.orm import Session @@ -92,12 +98,6 @@ def contains_workflow_parameter(value, search=False): return False -def is_runtime_value(value): - return isinstance(value, RuntimeValue) or ( - isinstance(value, MutableMapping) and value.get("__class__") in ["RuntimeValue", "ConnectedValue"] - ) - - def is_runtime_context(trans, other_values): if trans.workflow_building_mode: return True @@ -2780,36 +2780,6 @@ def write_elements_to_collection(has_elements, collection_builder): ) -def runtime_to_json(runtime_value): - if isinstance(runtime_value, ConnectedValue) or ( - isinstance(runtime_value, MutableMapping) and runtime_value["__class__"] == "ConnectedValue" - ): - return {"__class__": "ConnectedValue"} - else: - return {"__class__": "RuntimeValue"} - - -def runtime_to_object(runtime_value): - if isinstance(runtime_value, ConnectedValue) or ( - isinstance(runtime_value, MutableMapping) and runtime_value["__class__"] == "ConnectedValue" - ): - return ConnectedValue() - else: - return RuntimeValue() - - -class RuntimeValue: - """ - Wrapper to note a value that is not yet set, but will be required at runtime. - """ - - -class ConnectedValue(RuntimeValue): - """ - Wrapper to note a value that is not yet set, but will be inferred from a connection. - """ - - def history_item_dict_to_python(value, app, name): if isinstance(value, MutableMapping) and "src" in value: if value["src"] not in ("hda", "dce", "ldda", "hdca"): diff --git a/lib/galaxy/tools/parameters/dynamic_options.py b/lib/galaxy/tools/parameters/dynamic_options.py index 5220e5628672..b8cd20dab37d 100644 --- a/lib/galaxy/tools/parameters/dynamic_options.py +++ b/lib/galaxy/tools/parameters/dynamic_options.py @@ -28,7 +28,10 @@ User, ) from galaxy.tools.expressions import do_eval -from galaxy.tools.parameters.workflow_building_modes import workflow_building_modes +from galaxy.tools.parameters.workflow_utils import ( + is_runtime_value, + workflow_building_modes, +) from galaxy.util import ( Element, string_as_bool, @@ -737,16 +740,20 @@ def get_fields(self, trans, other_values): if not hasattr(dataset, "get_file_name"): continue # Ensure parsing dynamic options does not consume more than a megabyte worth memory. - path = dataset.get_file_name() - if os.path.getsize(path) < 1048576: - with open(path) as fh: - options += self.parse_file_fields(fh) - else: - # Pass just the first megabyte to parse_file_fields. - log.warning("Attempting to load options from large file, reading just first megabyte") - with open(path) as fh: - contents = fh.read(1048576) - options += self.parse_file_fields(StringIO(contents)) + try: + path = dataset.get_file_name() + if os.path.getsize(path) < 1048576: + with open(path) as fh: + options += self.parse_file_fields(fh) + else: + # Pass just the first megabyte to parse_file_fields. + log.warning("Attempting to load options from large file, reading just first megabyte") + with open(path) as fh: + contents = fh.read(1048576) + options += self.parse_file_fields(StringIO(contents)) + except Exception as e: + log.warning("Could not read contents from %s: %s", dataset, str(e)) + continue elif self.tool_data_table: options = self.tool_data_table.get_fields() if trans and trans.user and trans.workflow_building_mode != workflow_building_modes.ENABLED: @@ -964,6 +971,8 @@ def _get_ref_data(other_values, ref_name): list, ), ): + if is_runtime_value(ref): + return [] raise ValueError if isinstance(ref, DatasetCollectionElement) and ref.hda: ref = ref.hda diff --git a/lib/galaxy/tools/parameters/grouping.py b/lib/galaxy/tools/parameters/grouping.py index c53023cf410e..e49ba3de3f4d 100644 --- a/lib/galaxy/tools/parameters/grouping.py +++ b/lib/galaxy/tools/parameters/grouping.py @@ -618,8 +618,8 @@ def get_filenames(context): writable_files = d_type.writable_files writable_files_offset = 0 groups_incoming = [None for _ in range(file_count)] - for group_incoming in context.get(self.name, []): - i = int(group_incoming["__index__"]) + for i, group_incoming in enumerate(context.get(self.name, [])): + i = int(group_incoming.get("__index__", i)) groups_incoming[i] = group_incoming if d_type.composite_type is not None or force_composite: # handle uploading of composite datatypes diff --git a/lib/galaxy/tools/parameters/meta.py b/lib/galaxy/tools/parameters/meta.py index cffb9233b212..f4d3a3ea26cc 100644 --- a/lib/galaxy/tools/parameters/meta.py +++ b/lib/galaxy/tools/parameters/meta.py @@ -21,6 +21,7 @@ ) from galaxy.util import permutations from . import visit_input_values +from .wrapped import process_key log = logging.getLogger(__name__) @@ -153,31 +154,6 @@ def is_batch(value): return WorkflowParameterExpansion(param_combinations, params_keys, input_combinations) -def process_key(incoming_key, incoming_value, d): - key_parts = incoming_key.split("|") - if len(key_parts) == 1: - # Regular parameter - if incoming_key in d and not incoming_value: - # In case we get an empty repeat after we already filled in a repeat element - return - d[incoming_key] = incoming_value - elif key_parts[0].rsplit("_", 1)[-1].isdigit(): - # Repeat - input_name, index = key_parts[0].rsplit("_", 1) - index = int(index) - d.setdefault(input_name, []) - newlist = [{} for _ in range(index + 1)] - d[input_name].extend(newlist[len(d[input_name]) :]) - subdict = d[input_name][index] - process_key("|".join(key_parts[1:]), incoming_value=incoming_value, d=subdict) - else: - # Section / Conditional - input_name = key_parts[0] - subdict = {} - d[input_name] = subdict - process_key("|".join(key_parts[1:]), incoming_value=incoming_value, d=subdict) - - ExpandedT = Tuple[List[Dict[str, Any]], Optional[matching.MatchingCollections]] diff --git a/lib/galaxy/tools/parameters/workflow_building_modes.py b/lib/galaxy/tools/parameters/workflow_building_modes.py deleted file mode 100644 index ff3be9c7d9fd..000000000000 --- a/lib/galaxy/tools/parameters/workflow_building_modes.py +++ /dev/null @@ -1,4 +0,0 @@ -class workflow_building_modes: - DISABLED = False - ENABLED = True - USE_HISTORY = 1 diff --git a/lib/galaxy/tools/parameters/workflow_utils.py b/lib/galaxy/tools/parameters/workflow_utils.py new file mode 100644 index 000000000000..73fcb688dfa1 --- /dev/null +++ b/lib/galaxy/tools/parameters/workflow_utils.py @@ -0,0 +1,43 @@ +from collections.abc import MutableMapping + + +class workflow_building_modes: + DISABLED = False + ENABLED = True + USE_HISTORY = 1 + + +def runtime_to_json(runtime_value): + if isinstance(runtime_value, ConnectedValue) or ( + isinstance(runtime_value, MutableMapping) and runtime_value["__class__"] == "ConnectedValue" + ): + return {"__class__": "ConnectedValue"} + else: + return {"__class__": "RuntimeValue"} + + +def runtime_to_object(runtime_value): + if isinstance(runtime_value, ConnectedValue) or ( + isinstance(runtime_value, MutableMapping) and runtime_value["__class__"] == "ConnectedValue" + ): + return ConnectedValue() + else: + return RuntimeValue() + + +class RuntimeValue: + """ + Wrapper to note a value that is not yet set, but will be required at runtime. + """ + + +class ConnectedValue(RuntimeValue): + """ + Wrapper to note a value that is not yet set, but will be inferred from a connection. + """ + + +def is_runtime_value(value): + return isinstance(value, RuntimeValue) or ( + isinstance(value, MutableMapping) and value.get("__class__") in ["RuntimeValue", "ConnectedValue"] + ) diff --git a/lib/galaxy/tools/parameters/wrapped.py b/lib/galaxy/tools/parameters/wrapped.py index 882829af813b..dbcc4d1f0a03 100644 --- a/lib/galaxy/tools/parameters/wrapped.py +++ b/lib/galaxy/tools/parameters/wrapped.py @@ -1,5 +1,9 @@ from collections import UserDict -from typing import Dict +from typing import ( + Any, + Dict, + List, +) from galaxy.tools.parameters.basic import ( DataCollectionToolParameter, @@ -158,4 +162,36 @@ def make_list_copy(from_list): return new_list -__all__ = ("LegacyUnprefixedDict", "WrappedParameters", "make_dict_copy") +def process_key(incoming_key: str, incoming_value: Any, d: Dict[str, Any]): + key_parts = incoming_key.split("|") + if len(key_parts) == 1: + # Regular parameter + if incoming_key in d and not incoming_value: + # In case we get an empty repeat after we already filled in a repeat element + return + d[incoming_key] = incoming_value + elif key_parts[0].rsplit("_", 1)[-1].isdigit(): + # Repeat + input_name, _index = key_parts[0].rsplit("_", 1) + index = int(_index) + d.setdefault(input_name, []) + newlist: List[Dict[Any, Any]] = [{} for _ in range(index + 1)] + d[input_name].extend(newlist[len(d[input_name]) :]) + subdict = d[input_name][index] + process_key("|".join(key_parts[1:]), incoming_value=incoming_value, d=subdict) + else: + # Section / Conditional + input_name = key_parts[0] + subdict = d.get(input_name, {}) + d[input_name] = subdict + process_key("|".join(key_parts[1:]), incoming_value=incoming_value, d=subdict) + + +def flat_to_nested_state(incoming: Dict[str, Any]): + nested_state: Dict[str, Any] = {} + for key, value in incoming.items(): + process_key(key, value, nested_state) + return nested_state + + +__all__ = ("LegacyUnprefixedDict", "WrappedParameters", "make_dict_copy", "process_key", "flat_to_nested_state") diff --git a/lib/galaxy/tools/recommendations.py b/lib/galaxy/tools/recommendations.py index 6ff5f16c5701..bcfbe89b0a8c 100644 --- a/lib/galaxy/tools/recommendations.py +++ b/lib/galaxy/tools/recommendations.py @@ -10,7 +10,7 @@ import yaml from galaxy.tools.parameters import populate_state -from galaxy.tools.parameters.workflow_building_modes import workflow_building_modes +from galaxy.tools.parameters.workflow_utils import workflow_building_modes from galaxy.util import DEFAULT_SOCKET_TIMEOUT from galaxy.workflow.modules import module_factory diff --git a/lib/galaxy/util/image_util.py b/lib/galaxy/util/image_util.py index 4c71be221647..2cf405ae6390 100644 --- a/lib/galaxy/util/image_util.py +++ b/lib/galaxy/util/image_util.py @@ -10,7 +10,7 @@ try: from PIL import Image except ImportError: - Image = None + Image = None # type: ignore[assignment] log = logging.getLogger(__name__) diff --git a/lib/galaxy/webapps/galaxy/api/workflows.py b/lib/galaxy/webapps/galaxy/api/workflows.py index b29263c70a5c..686980264f81 100644 --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -86,7 +86,7 @@ from galaxy.tool_shed.galaxy_install.install_manager import InstallRepositoryManager from galaxy.tools import recommendations from galaxy.tools.parameters import populate_state -from galaxy.tools.parameters.workflow_building_modes import workflow_building_modes +from galaxy.tools.parameters.workflow_utils import workflow_building_modes from galaxy.util.sanitize_html import sanitize_html from galaxy.version import VERSION from galaxy.web import ( diff --git a/lib/galaxy/webapps/galaxy/controllers/workflow.py b/lib/galaxy/webapps/galaxy/controllers/workflow.py index 5ace9ae80e93..4f26c72ce804 100644 --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -17,7 +17,7 @@ ) from galaxy.model.base import transaction from galaxy.model.item_attrs import UsesItemRatings -from galaxy.tools.parameters.workflow_building_modes import workflow_building_modes +from galaxy.tools.parameters.workflow_utils import workflow_building_modes from galaxy.util import FILENAME_VALID_CHARS from galaxy.util.sanitize_html import sanitize_html from galaxy.web import url_for diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py index 96ed9bdd668d..5e2a9b491356 100644 --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -68,16 +68,13 @@ BaseDataToolParameter, BooleanToolParameter, ColorToolParameter, - ConnectedValue, DataCollectionToolParameter, DataToolParameter, FloatToolParameter, HiddenToolParameter, IntegerToolParameter, - is_runtime_value, parameter_types, raw_to_galaxy, - runtime_to_json, SelectToolParameter, TextToolParameter, ) @@ -86,7 +83,12 @@ ConditionalWhen, ) from galaxy.tools.parameters.history_query import HistoryQuery -from galaxy.tools.parameters.workflow_building_modes import workflow_building_modes +from galaxy.tools.parameters.workflow_utils import ( + ConnectedValue, + is_runtime_value, + runtime_to_json, + workflow_building_modes, +) from galaxy.tools.parameters.wrapped import make_dict_copy from galaxy.util import ( listify, @@ -2002,18 +2004,6 @@ def callback(input, prefixed_name, context, **kwargs): and self.trans.workflow_building_mode is workflow_building_modes.USE_HISTORY ): if prefixed_name in input_connections_by_name: - connection = input_connections_by_name[prefixed_name] - output_step = next( - output_step for output_step in steps if connection.output_step_id == output_step.id - ) - if output_step.type.startswith("data"): - output_inputs = output_step.module.get_runtime_inputs(output_step, connections=connections) - output_value = output_inputs["input"].get_initial_value(self.trans, context) - if input_type == "data" and isinstance( - output_value, self.trans.app.model.HistoryDatasetCollectionAssociation - ): - return output_value.to_hda_representative() - return output_value return ConnectedValue() else: return input.get_initial_value(self.trans, context) diff --git a/lib/galaxy/workflow/refactor/execute.py b/lib/galaxy/workflow/refactor/execute.py index 76a019e1968a..d35cbbbc7785 100644 --- a/lib/galaxy/workflow/refactor/execute.py +++ b/lib/galaxy/workflow/refactor/execute.py @@ -6,9 +6,9 @@ from galaxy.exceptions import RequestParameterInvalidException from galaxy.tools.parameters import visit_input_values -from galaxy.tools.parameters.basic import ( +from galaxy.tools.parameters.basic import contains_workflow_parameter +from galaxy.tools.parameters.workflow_utils import ( ConnectedValue, - contains_workflow_parameter, runtime_to_json, ) from .schema import ( diff --git a/lib/galaxy/workflow/steps.py b/lib/galaxy/workflow/steps.py index 8810e1b78771..b7951434d744 100644 --- a/lib/galaxy/workflow/steps.py +++ b/lib/galaxy/workflow/steps.py @@ -3,6 +3,7 @@ """ import math +from itertools import chain from galaxy.util.topsort import ( CycleError, @@ -58,16 +59,16 @@ def order_workflow_steps(steps, comments): else: sortable_comments.append(comment) - # consider comments to find normalization position - min_left_comments = min(comment.position[0] for comment in sortable_comments) - min_top_comments = min(comment.position[1] for comment in sortable_comments) - min_left = min(min_left_comments, min_left) - min_top = min(min_top_comments, min_top) + if sortable_comments: + # consider comments to find normalization position + min_left_comments = min(comment.position[0] for comment in sortable_comments) + min_top_comments = min(comment.position[1] for comment in sortable_comments) + min_left = min(min_left_comments, min_left) + min_top = min(min_top_comments, min_top) # normalize comments by min_left and min_top - for comment_list in [sortable_comments, freehand_comments]: - for comment in comment_list: - comment.position = [comment.position[0] - min_left, comment.position[1] - min_top] + for comment in chain(sortable_comments, freehand_comments): + comment.position = [comment.position[0] - min_left, comment.position[1] - min_top] # order by Euclidean distance to the origin sortable_comments.sort(key=lambda comment: math.sqrt(comment.position[0] ** 2 + comment.position[1] ** 2)) diff --git a/lib/galaxy_test/api/test_workflows.py b/lib/galaxy_test/api/test_workflows.py index 3c91c601b523..a5b884ec3b8a 100644 --- a/lib/galaxy_test/api/test_workflows.py +++ b/lib/galaxy_test/api/test_workflows.py @@ -903,6 +903,33 @@ def test_update_name_empty(self): workflow_dict = self.workflow_populator.download_workflow(workflow["id"]) assert workflow_dict["name"] == original_name + @skip_without_tool("select_from_dataset_in_conditional") + def test_workflow_run_form_with_broken_dataset(self): + workflow_id = self.workflow_populator.upload_yaml_workflow( + """ +class: GalaxyWorkflow +inputs: + dataset: data +steps: + select_from_dataset_in_conditional: + tool_id: select_from_dataset_in_conditional + in: + single: dataset + state: + cond: + cond: single + select_single: abc + inner_cond: + inner_cond: single + select_single: abc +""" + ) + with self.dataset_populator.test_history() as history_id: + self.dataset_populator.new_dataset(history_id, content="a", file_type="tabular", wait=True) + workflow = self._download_workflow(workflow_id, style="run", history_id=history_id) + assert not workflow["has_upgrade_messages"] + assert workflow["steps"][1]["inputs"][0]["value"] == {"__class__": "ConnectedValue"} + def test_refactor(self): workflow_id = self.workflow_populator.upload_yaml_workflow( """ diff --git a/test/functional/tools/sample_tool_conf.xml b/test/functional/tools/sample_tool_conf.xml index 025dfc93efd2..d63a21c73c4f 100644 --- a/test/functional/tools/sample_tool_conf.xml +++ b/test/functional/tools/sample_tool_conf.xml @@ -57,6 +57,7 @@ + diff --git a/test/functional/tools/select_from_dataset_in_conditional.xml b/test/functional/tools/select_from_dataset_in_conditional.xml new file mode 100644 index 000000000000..92cde71d7fdf --- /dev/null +++ b/test/functional/tools/select_from_dataset_in_conditional.xml @@ -0,0 +1,60 @@ + + Create dynamic options from data sets + '$output' + ]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/integration/test_workflow_refactoring.py b/test/integration/test_workflow_refactoring.py index ba26b2a856ba..4fd5a2d712ce 100644 --- a/test/integration/test_workflow_refactoring.py +++ b/test/integration/test_workflow_refactoring.py @@ -21,7 +21,7 @@ WorkflowStepConnection, ) from galaxy.model.base import transaction -from galaxy.tools.parameters.workflow_building_modes import workflow_building_modes +from galaxy.tools.parameters.workflow_utils import workflow_building_modes from galaxy.workflow.refactor.schema import RefactorActionExecutionMessageTypeEnum from galaxy_test.base.populators import WorkflowPopulator from galaxy_test.base.uses_shed_api import UsesShedApi diff --git a/test/unit/app/tools/test_parameter_parsing.py b/test/unit/app/tools/test_parameter_parsing.py index b0b030ea9d5a..28d7565ff1a8 100644 --- a/test/unit/app/tools/test_parameter_parsing.py +++ b/test/unit/app/tools/test_parameter_parsing.py @@ -3,7 +3,7 @@ Dict, ) -from galaxy.tools.parameters.meta import process_key +from galaxy.tools.parameters.wrapped import process_key from .util import BaseParameterTestCase diff --git a/test/unit/app/tools/test_select_parameters.py b/test/unit/app/tools/test_select_parameters.py index 1f3bff215c96..656325490a95 100644 --- a/test/unit/app/tools/test_select_parameters.py +++ b/test/unit/app/tools/test_select_parameters.py @@ -4,7 +4,7 @@ from galaxy import model from galaxy.model.base import transaction -from galaxy.tools.parameters import basic +from galaxy.tools.parameters.workflow_utils import RuntimeValue from .util import BaseParameterTestCase @@ -36,7 +36,7 @@ def test_unvalidated_datasets(self): self.options_xml = """""" self.trans.workflow_building_mode = True assert isinstance( - self.param.from_json(model.HistoryDatasetAssociation(), self.trans, {"input_bam": basic.RuntimeValue()}), + self.param.from_json(model.HistoryDatasetAssociation(), self.trans, {"input_bam": RuntimeValue()}), model.HistoryDatasetAssociation, )