diff --git a/planemo/commands/cmd_create_alias.py b/planemo/commands/cmd_create_alias.py new file mode 100644 index 000000000..c255b57a4 --- /dev/null +++ b/planemo/commands/cmd_create_alias.py @@ -0,0 +1,37 @@ +"""Module describing the planemo ``create_alias`` command.""" +import click + +from planemo import options +from planemo.cli import command_function +from planemo.galaxy import profiles +from planemo.io import info + +try: + import namesgenerator +except ImportError: + namesgenerator = None + + +@click.command('create_alias') +@click.argument( + "obj", + metavar="OBJ", + type=click.STRING, +) +@options.alias_option() +@options.profile_option(required=True) +@command_function +def cli(ctx, alias, obj, profile, **kwds): + """ + Add an alias for a path or a workflow or dataset ID. Aliases are associated with a particular planemo profile. + """ + if not alias: + if not namesgenerator: + raise ImportError(("Random generation of aliases requires installation of the namesgenerator package." + "Either install this, or specify the alias name with --alias.")) + alias = namesgenerator.get_random_name() + + exit_code = profiles.create_alias(ctx, alias, obj, profile) + info("Alias {} created.".format(alias)) + ctx.exit(exit_code) + return diff --git a/planemo/commands/cmd_delete_alias.py b/planemo/commands/cmd_delete_alias.py new file mode 100644 index 000000000..acb549c87 --- /dev/null +++ b/planemo/commands/cmd_delete_alias.py @@ -0,0 +1,31 @@ +"""Module describing the planemo ``delete_alias`` command.""" +import click + +from planemo import options +from planemo.cli import command_function +from planemo.galaxy import profiles +from planemo.io import error, info + +try: + from tabulate import tabulate +except ImportError: + tabulate = None # type: ignore + + +@click.command('delete_alias') +@options.alias_option(required=True) +@options.profile_option(required=True) +@command_function +def cli(ctx, alias, profile, **kwds): + """ + List aliases for a path or a workflow or dataset ID. Aliases are associated with a particular planemo profile. + """ + info("Looking for profiles...") + exit_code = profiles.delete_alias(ctx, alias, profile) + if exit_code == 0: + info('Alias {} was successfully deleted from profile {}'.format(alias, profile)) + else: + error('Alias {} does not exist, so was not deleted from profile {}'.format(alias, profile)) + + ctx.exit(exit_code) + return diff --git a/planemo/commands/cmd_list_alias.py b/planemo/commands/cmd_list_alias.py new file mode 100644 index 000000000..c37eeeec1 --- /dev/null +++ b/planemo/commands/cmd_list_alias.py @@ -0,0 +1,34 @@ +"""Module describing the planemo ``list_alias`` command.""" +import json + +import click + +from planemo import options +from planemo.cli import command_function +from planemo.galaxy import profiles +from planemo.io import info + +try: + from tabulate import tabulate +except ImportError: + tabulate = None # type: ignore + + +@click.command('list_alias') +@options.profile_option(required=True) +@command_function +def cli(ctx, profile, **kwds): + """ + List aliases for a path or a workflow or dataset ID. Aliases are associated with a particular planemo profile. + """ + info("Looking for profiles...") + aliases = profiles.list_alias(ctx, profile) + if tabulate: + print(tabulate({"Alias": aliases.keys(), "Object": aliases.values()}, headers="keys")) + else: + print(json.dumps(aliases, indent=4, sort_keys=True)) + + info("{} aliases were found for profile {}.".format(len(aliases), profile)) + + ctx.exit(0) + return diff --git a/planemo/commands/cmd_list_invocations.py b/planemo/commands/cmd_list_invocations.py new file mode 100644 index 000000000..62e36a811 --- /dev/null +++ b/planemo/commands/cmd_list_invocations.py @@ -0,0 +1,61 @@ +"""Module describing the planemo ``list_invocations`` command.""" +import json + +import click + +from planemo import options +from planemo.cli import command_function +from planemo.galaxy import profiles +from planemo.galaxy.api import get_invocations +from planemo.galaxy.profiles import translate_alias +from planemo.io import error, info + +try: + from tabulate import tabulate +except ImportError: + tabulate = None # type: ignore + + +@click.command('list_invocations') +@click.argument( + "workflow_id", + type=click.STRING, +) +@options.profile_option(required=True) +@command_function +def cli(ctx, workflow_id, **kwds): + """ + Get a list of invocations for a particular workflow ID or alias. + """ + workflow_id = translate_alias(ctx, workflow_id, kwds.get('profile')) + info("Looking for invocations for workflow {}...".format(workflow_id)) + workflow_id = profiles.translate_alias(ctx, workflow_id, kwds.get('profile')) + profile = profiles.ensure_profile(ctx, kwds.get('profile')) + + invocations = get_invocations(url=profile['galaxy_url'], key=profile['galaxy_admin_key'] or profile['galaxy_user_key'], workflow_id=workflow_id) + if tabulate: + state_colors = { + 'ok': '\033[92m', # green + 'running': '\033[93m', # yellow + 'error': '\033[91m', # red + 'paused': '\033[96m', # cyan + 'deleted': '\033[95m', # magenta + 'deleted_new': '\033[95m', # magenta + 'new': '\033[96m', # cyan + 'queued': '\033[93m', # yellow + } + print(tabulate({ + "Invocation ID": invocations.keys(), + "Jobs status": [', '.join(['{}{} jobs {}\033[0m'.format(state_colors[k], v, k) for k, v in inv['states'].items()] + ) for inv in invocations.values()], + "Invocation report URL": ['{}/workflows/invocations/report?id={}'.format(profile['galaxy_url'].strip('/'), inv_id + ) for inv_id in invocations], + "History URL": ['{}/histories/view?id={}'.format(profile['galaxy_url'].strip('/'), invocations[inv_id]['history_id'] + ) for inv_id in invocations] + }, headers="keys")) + else: + error("The tabulate package is not installed, invocations could not be listed correctly.") + print(json.dumps(invocations, indent=4, sort_keys=True)) + info("{} invocations found.".format(len(invocations))) + + return diff --git a/planemo/commands/cmd_profile_create.py b/planemo/commands/cmd_profile_create.py index 74f614268..6f6660d38 100644 --- a/planemo/commands/cmd_profile_create.py +++ b/planemo/commands/cmd_profile_create.py @@ -13,6 +13,9 @@ @options.profile_database_options() @options.serve_engine_option() @options.docker_config_options() +@options.galaxy_url_option() +@options.galaxy_user_key_option() +@options.galaxy_admin_key_option() @command_function def cli(ctx, profile_name, **kwds): """Create a profile.""" diff --git a/planemo/commands/cmd_profile_list.py b/planemo/commands/cmd_profile_list.py index 815ab87ea..4a1f54d0c 100644 --- a/planemo/commands/cmd_profile_list.py +++ b/planemo/commands/cmd_profile_list.py @@ -5,11 +5,15 @@ from planemo.cli import command_function from planemo.galaxy import profiles +from planemo.io import info @click.command('profile_list') @command_function def cli(ctx, **kwds): """List configured profile names.""" + info("Looking for profiles...") profile_names = profiles.list_profiles(ctx, **kwds) - print(profile_names) + for profile in profile_names: + print(profile) + info("{} configured profiles are available.".format(len(profile_names))) diff --git a/planemo/commands/cmd_run.py b/planemo/commands/cmd_run.py index 23b2733fb..05c63e5c7 100644 --- a/planemo/commands/cmd_run.py +++ b/planemo/commands/cmd_run.py @@ -2,6 +2,7 @@ from __future__ import print_function import json +import os import click from galaxy.util import unicodify @@ -9,12 +10,14 @@ from planemo import options from planemo.cli import command_function from planemo.engine import engine_context +from planemo.galaxy.profiles import translate_alias from planemo.io import warn +from planemo.runnable import for_id, for_path from planemo.tools import uri_to_path @click.command('run') -@options.required_tool_arg(allow_uris=True) +@options.required_runnable_arg() @options.required_job_arg() @options.galaxy_run_options() @options.galaxy_config_options() @@ -24,14 +27,20 @@ @options.run_output_json_option() @options.engine_options() @command_function -def cli(ctx, uri, job_path, **kwds): +def cli(ctx, runnable_identifier, job_path, **kwds): """Planemo command for running tools and jobs. \b % planemo run cat1-tool.cwl cat-job.json """ - path = uri_to_path(ctx, uri) - # TODO: convert UI to runnable and do a better test of cwl. + runnable_identifier = translate_alias(ctx, runnable_identifier, kwds.get('profile')) + path = uri_to_path(ctx, runnable_identifier) + if os.path.exists(path): + runnable = for_path(path) + else: # assume galaxy workflow id + runnable = for_id(runnable_identifier) + + # TODO: do a better test of cwl. is_cwl = path.endswith(".cwl") kwds["cwl"] = is_cwl if kwds.get("engine", None) is None: @@ -41,16 +50,12 @@ def cli(ctx, uri, job_path, **kwds): kwds["engine"] = "external_galaxy" else: kwds["engine"] = "galaxy" - with engine_context(ctx, **kwds) as engine: - run_result = engine.run(path, job_path) - + run_result = engine.run(runnable, job_path) if not run_result.was_successful: warn("Run failed [%s]" % unicodify(run_result)) ctx.exit(1) - outputs_dict = run_result.outputs_dict - print(outputs_dict) output_json = kwds.get("output_json", None) if output_json: with open(output_json, "w") as f: diff --git a/planemo/commands/cmd_test.py b/planemo/commands/cmd_test.py index b37195ed8..c4054551a 100644 --- a/planemo/commands/cmd_test.py +++ b/planemo/commands/cmd_test.py @@ -32,10 +32,6 @@ "instances to limit generated traffic.", default="0", ) -@click.option( - "--history_name", - help="Name for history (if a history is generated as part of testing.)" -) @options.galaxy_target_options() @options.galaxy_config_options() @options.test_options() diff --git a/planemo/commands/cmd_workflow_edit.py b/planemo/commands/cmd_workflow_edit.py index ceceb194d..7d8904f7a 100644 --- a/planemo/commands/cmd_workflow_edit.py +++ b/planemo/commands/cmd_workflow_edit.py @@ -1,4 +1,6 @@ """Module describing the planemo ``workflow_edit`` command.""" +import os + import click from planemo import options @@ -7,8 +9,10 @@ engine_context, is_galaxy_engine, ) +from planemo.galaxy.profiles import translate_alias from planemo.galaxy.serve import sleep_for_serve from planemo.runnable import ( + for_id, for_path, ) @@ -17,17 +21,23 @@ @options.required_workflow_arg() @options.galaxy_serve_options() @command_function -def cli(ctx, workflow_path, output=None, force=False, **kwds): +def cli(ctx, workflow_identifier, output=None, force=False, **kwds): """Open a synchronized Galaxy workflow editor. """ assert is_galaxy_engine(**kwds) + workflow_identifier = translate_alias(ctx, workflow_identifier, kwds.get('profile')) + if os.path.exists(workflow_identifier): + runnable = for_path(workflow_identifier) + else: # assume galaxy workflow id + runnable = for_id(workflow_identifier) + kwds["workflows_from_path"] = True - runnable = for_path(workflow_path) with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served([runnable]) as config: - workflow_id = config.workflow_id(workflow_path) + workflow_id = config.workflow_id_for_runnable(runnable) url = "%s/workflow/editor?id=%s" % (config.galaxy_url, workflow_id) click.launch(url) - sleep_for_serve() + if kwds["engine"] != "external_galaxy": + sleep_for_serve() diff --git a/planemo/commands/cmd_workflow_job_init.py b/planemo/commands/cmd_workflow_job_init.py index 407b485a3..9b5843d7e 100644 --- a/planemo/commands/cmd_workflow_job_init.py +++ b/planemo/commands/cmd_workflow_job_init.py @@ -13,7 +13,7 @@ @options.force_option() @options.workflow_output_artifact() @command_function -def cli(ctx, workflow_path, output=None, **kwds): +def cli(ctx, workflow_identifier, output=None, **kwds): """Initialize a Galaxy workflow job description for supplied workflow. Be sure to your lint your workflow with ``workflow_lint`` before calling this @@ -25,9 +25,9 @@ def cli(ctx, workflow_path, output=None, **kwds): as well so this command may be renamed to to job_init at something along those lines at some point. """ - job = job_template(workflow_path) + job = job_template(workflow_identifier) if output is None: - output = new_workflow_associated_path(workflow_path, suffix="job") + output = new_workflow_associated_path(workflow_identifier, suffix="job") if not can_write_to_path(output, **kwds): ctx.exit(1) with open(output, "w") as f_job: diff --git a/planemo/commands/cmd_workflow_test_init.py b/planemo/commands/cmd_workflow_test_init.py index 478fcf44c..20b0237fa 100644 --- a/planemo/commands/cmd_workflow_test_init.py +++ b/planemo/commands/cmd_workflow_test_init.py @@ -20,28 +20,28 @@ @options.workflow_output_artifact() @options.split_job_and_test() @command_function -def cli(ctx, workflow_path, output=None, split_test=False, **kwds): +def cli(ctx, workflow_identifier, output=None, split_test=False, **kwds): """Initialize a Galaxy workflow test description for supplied workflow. Be sure to your lint your workflow with ``workflow_lint`` before calling this to ensure inputs and outputs comply with best practices that make workflow testing easier. """ - path_basename = os.path.basename(workflow_path) - job = job_template(workflow_path) + path_basename = os.path.basename(workflow_identifier) + job = job_template(workflow_identifier) if output is None: - output = new_workflow_associated_path(workflow_path) - job_output = new_workflow_associated_path(workflow_path, suffix="job1") + output = new_workflow_associated_path(workflow_identifier) + job_output = new_workflow_associated_path(workflow_identifier, suffix="job1") if not can_write_to_path(output, **kwds): ctx.exit(1) test_description = [{ 'doc': 'Test outline for %s' % path_basename, 'job': job, - 'outputs': output_stubs_for_workflow(workflow_path), + 'outputs': output_stubs_for_workflow(workflow_identifier), }] if split_test: - job_output = new_workflow_associated_path(workflow_path, suffix="job1") + job_output = new_workflow_associated_path(workflow_identifier, suffix="job1") if not can_write_to_path(job_output, **kwds): ctx.exit(1) diff --git a/planemo/engine/interface.py b/planemo/engine/interface.py index 0f4178dd7..e678e9b95 100644 --- a/planemo/engine/interface.py +++ b/planemo/engine/interface.py @@ -12,7 +12,6 @@ from planemo.io import error from planemo.runnable import ( cases, - for_path, RunnableType, ) from planemo.test.results import StructuredData @@ -53,9 +52,8 @@ def can_run(self, runnable): def cleanup(self): """Default no-op cleanup method.""" - def run(self, path, job_path): + def run(self, runnable, job_path): """Run a job using a compatible artifact (workflow or tool).""" - runnable = for_path(path) self._check_can_run(runnable) run_response = self._run(runnable, job_path) return run_response diff --git a/planemo/galaxy/activity.py b/planemo/galaxy/activity.py index f789f8b53..e0fe708d5 100644 --- a/planemo/galaxy/activity.py +++ b/planemo/galaxy/activity.py @@ -118,7 +118,6 @@ def _execute(ctx, config, runnable, job_path, **kwds): except Exception: ctx.vlog("Problem with staging in data for Galaxy activities...") raise - if runnable.type in [RunnableType.galaxy_tool, RunnableType.cwl_tool]: response_class = GalaxyToolRunResponse tool_id = _verified_tool_id(runnable, user_gi) @@ -154,8 +153,8 @@ def _execute(ctx, config, runnable, job_path, **kwds): summarize_history(ctx, user_gi, history_id) elif runnable.type in [RunnableType.galaxy_workflow, RunnableType.cwl_workflow]: response_class = GalaxyWorkflowRunResponse - workflow_id = config.workflow_id(runnable.path) - ctx.vlog("Found Galaxy workflow ID [%s] for path [%s]" % (workflow_id, runnable.path)) + workflow_id = config.workflow_id_for_runnable(runnable) + ctx.vlog("Found Galaxy workflow ID [%s] for URI [%s]" % (workflow_id, runnable.uri)) # TODO: Use the following when BioBlend 0.14 is released # invocation = user_gi.worklfows.invoke_workflow( # workflow_id, @@ -344,7 +343,8 @@ def get_dataset(dataset_details, filename=None): return {"path": destination, "basename": basename} ctx.vlog("collecting outputs to directory %s" % output_directory) - for runnable_output in get_outputs(self._runnable): + + for runnable_output in get_outputs(self._runnable, gi=self._user_gi): output_id = runnable_output.get_id() if not output_id: ctx.vlog("Workflow output identified without an ID (label), skipping") @@ -585,11 +585,15 @@ def _tool_id(tool_path): def _history_id(gi, **kwds): history_id = kwds.get("history_id", None) if history_id is None: - history_name = kwds.get("history_name", DEFAULT_HISTORY_NAME) + history_name = kwds.get("history_name", DEFAULT_HISTORY_NAME) or DEFAULT_HISTORY_NAME history_id = gi.histories.create_history(history_name)["id"] return history_id +def get_dict_from_workflow(gi, workflow_id): + return gi.workflows.export_workflow_dict(workflow_id) + + def _wait_for_invocation(ctx, gi, history_id, workflow_id, invocation_id, polling_backoff=0): def state_func(): diff --git a/planemo/galaxy/api.py b/planemo/galaxy/api.py index df8154978..f2cd822c8 100644 --- a/planemo/galaxy/api.py +++ b/planemo/galaxy/api.py @@ -23,6 +23,19 @@ def gi(port=None, url=None, key=None): ) +def test_credentials_valid(port=None, url=None, key=None, is_admin=False): + """Test if provided API credentials are valid""" + test_gi = gi(port, url, key) + try: + current_user = test_gi.users.get_current_user() + if is_admin: + return current_user['is_admin'] + else: + return True + except Exception: + return False + + def user_api_key(admin_gi): """Use an admin authenticated account to generate a user API key.""" ensure_module() @@ -98,6 +111,15 @@ def summarize_history(ctx, gi, history_id): print("|") +def get_invocations(url, key, workflow_id): + inv_gi = gi(None, url, key) + invocations = inv_gi.workflows.get_invocations(workflow_id) + return {invocation['id']: { + 'states': inv_gi.invocations.get_invocation_summary(invocation['id'])['states'], + 'history_id': invocation['history_id']} + for invocation in invocations} + + def _format_for_summary(blob, empty_message, prefix="| "): contents = "\n".join(["%s%s" % (prefix, line.strip()) for line in StringIO(blob).readlines() if line.rstrip("\n\r")]) return contents or "%s*%s*" % (prefix, empty_message) diff --git a/planemo/galaxy/config.py b/planemo/galaxy/config.py index d52840e9b..43b6b94de 100644 --- a/planemo/galaxy/config.py +++ b/planemo/galaxy/config.py @@ -24,6 +24,7 @@ from planemo.config import OptionSource from planemo.deps import ensure_dependency_resolvers_conf_configured from planemo.docker import docker_host_args +from planemo.galaxy.workflows import remote_runnable_to_workflow_id from planemo.io import ( communicate, kill_pid_file, @@ -757,7 +758,7 @@ def ready(): def install_workflows(self): for runnable in self.runnables: - if runnable.type.name in ["galaxy_workflow", "cwl_workflow"]: + if runnable.type.name in ["galaxy_workflow", "cwl_workflow"] and not runnable.is_remote_workflow_uri: self._install_workflow(runnable) def _install_workflow(self, runnable): @@ -778,6 +779,13 @@ def _install_workflow(self, runnable): ) self._workflow_ids[runnable.path] = workflow["id"] + def workflow_id_for_runnable(self, runnable): + if runnable.is_remote_workflow_uri: + workflow_id = remote_runnable_to_workflow_id(runnable) + else: + workflow_id = self.workflow_id(runnable.path) + return workflow_id + def workflow_id(self, path): return self._workflow_ids[path] diff --git a/planemo/galaxy/profiles.py b/planemo/galaxy/profiles.py index 95a20dee4..2aaaa52a3 100644 --- a/planemo/galaxy/profiles.py +++ b/planemo/galaxy/profiles.py @@ -9,10 +9,8 @@ from galaxy.util.commands import which -from planemo.config import ( - OptionSource, -) from planemo.database import create_database_source +from planemo.galaxy.api import test_credentials_valid from .config import DATABASE_LOCATION_TEMPLATE PROFILE_OPTIONS_JSON_NAME = "planemo_profile_options.json" @@ -34,14 +32,16 @@ def delete_profile(ctx, profile_name, **kwds): """Delete profile with the specified name.""" profile_directory = _profile_directory(ctx, profile_name) profile_options = _read_profile_options(profile_directory) - database_type = profile_options.get("database_type") - kwds["database_type"] = database_type - if database_type != "sqlite": - database_source = create_database_source(**kwds) - database_identifier = _profile_to_database_identifier(profile_name) - database_source.delete_database( - database_identifier, - ) + profile_options, profile_options_path = _load_profile_to_json(ctx, profile_name) + if profile_options["engine"] != 'external_galaxy': + database_type = profile_options.get("database_type") + kwds["database_type"] = database_type + if database_type != "sqlite": + database_source = create_database_source(**kwds) + database_identifier = _profile_to_database_identifier(profile_name) + database_source.delete_database( + database_identifier, + ) shutil.rmtree(profile_directory) @@ -56,7 +56,14 @@ def create_profile(ctx, profile_name, **kwds): raise Exception(message) os.makedirs(profile_directory) - create_for_engine = _create_profile_docker if engine_type == "docker_galaxy" else _create_profile_local + + if engine_type == "docker_galaxy": + create_for_engine = _create_profile_docker + elif engine_type == "external_galaxy" or kwds.get("galaxy_url"): + create_for_engine = _create_profile_external + else: + create_for_engine = _create_profile_local + stored_profile_options = create_for_engine(ctx, profile_directory, profile_name, kwds) profile_options_path = _stored_profile_options_path(profile_directory) @@ -100,6 +107,20 @@ def _create_profile_local(ctx, profile_directory, profile_name, kwds): } +def _create_profile_external(ctx, profile_directory, profile_name, kwds): + url = kwds.get("galaxy_url") + api_key = kwds.get("galaxy_admin_key") or kwds.get("galaxy_user_key") + if test_credentials_valid(url=url, key=api_key, is_admin=kwds.get("galaxy_admin_key")): + return { + "galaxy_url": url, + "galaxy_user_key": kwds.get("galaxy_user_key"), + "galaxy_admin_key": kwds.get("galaxy_admin_key"), + "engine": "external_galaxy", + } + else: + raise ConnectionError('The credentials provided for an external Galaxy instance are not valid.') + + def ensure_profile(ctx, profile_name, **kwds): """Ensure a Galaxy profile exists and return profile defaults.""" if not profile_exists(ctx, profile_name, **kwds): @@ -108,16 +129,60 @@ def ensure_profile(ctx, profile_name, **kwds): return _profile_options(ctx, profile_name, **kwds) +def create_alias(ctx, alias, obj, profile_name, **kwds): + profile_options, profile_options_path = _load_profile_to_json(ctx, profile_name) + + if profile_options.get('aliases'): + profile_options['aliases'][alias] = obj + else: # no aliases yet defined + profile_options['aliases'] = {alias: obj} + + with open(profile_options_path, 'w') as f: + json.dump(profile_options, f) + + return 0 + + +def list_alias(ctx, profile_name, **kwds): + profile_options, _ = _load_profile_to_json(ctx, profile_name) + return profile_options.get('aliases', {}) + + +def delete_alias(ctx, alias, profile_name, **kwds): + profile_options, profile_options_path = _load_profile_to_json(ctx, profile_name) + if alias not in profile_options.get('aliases', {}): + return 1 + else: + del profile_options['aliases'][alias] + + with open(profile_options_path, 'w') as f: + json.dump(profile_options, f) + + return 0 + + +def translate_alias(ctx, alias, profile_name): + if not profile_name: + return alias + aliases = _load_profile_to_json(ctx, profile_name)[0].get('aliases', {}) + return aliases.get(alias, alias) + + +def _load_profile_to_json(ctx, profile_name): + if not profile_exists(ctx, profile_name): + raise Exception("That profile does not exist. Create it with `planemo profile_create`") + profile_directory = _profile_directory(ctx, profile_name) + profile_options_path = _stored_profile_options_path(profile_directory) + with open(profile_options_path) as f: + profile_options = json.load(f) + return profile_options, profile_options_path + + def _profile_options(ctx, profile_name, **kwds): profile_directory = _profile_directory(ctx, profile_name) profile_options = _read_profile_options(profile_directory) - specified_engine_type = kwds.get("engine", "galaxy") - profile_engine_type = profile_options["engine"] - if specified_engine_type != profile_engine_type: - if ctx.get_option_source("engine") == OptionSource.cli: - raise Exception("Configured profile engine type [%s] does not match specified engine type [%s].") - if profile_engine_type == "docker_galaxy": + if profile_options["engine"] == "docker_galaxy": engine_options = dict( export_directory=os.path.join(profile_directory, "export") ) diff --git a/planemo/galaxy/workflows.py b/planemo/galaxy/workflows.py index 8fcc22a53..78ed1066e 100644 --- a/planemo/galaxy/workflows.py +++ b/planemo/galaxy/workflows.py @@ -14,12 +14,12 @@ from planemo.io import warn FAILED_REPOSITORIES_MESSAGE = "Failed to install one or more repositories." +GALAXY_WORKFLOWS_PREFIX = "gxprofile://workflows/" def load_shed_repos(runnable): if runnable.type.name != "galaxy_workflow": return [] - path = runnable.path if path.endswith(".ga"): generate_tool_list_from_ga_workflow_files.generate_tool_list_from_workflow([path], "Tools from workflows", "tools.yml") @@ -66,13 +66,8 @@ def import_workflow(path, admin_gi, user_gi, from_path=False): workflow = _raw_dict(path, importer) return user_gi.workflows.import_workflow_dict(workflow) else: - # TODO: Update bioblend to allow from_path. path = os.path.abspath(path) - payload = dict( - from_path=path - ) - workflows_url = user_gi.url + '/workflows' - workflow = user_gi.workflows._post(payload, url=workflows_url) + workflow = user_gi.workflows.import_workflow_from_local_path(path) return workflow @@ -112,9 +107,21 @@ def register_tool_ids(tool_ids, workflow): WorkflowOutput = namedtuple("WorkflowOutput", ["order_index", "output_name", "label"]) -def describe_outputs(path): +def remote_runnable_to_workflow_id(runnable): + assert runnable.is_remote_workflow_uri + workflow_id = runnable.uri[len(GALAXY_WORKFLOWS_PREFIX):] + return workflow_id + + +def describe_outputs(runnable, gi=None): """Return a list of :class:`WorkflowOutput` objects for target workflow.""" - workflow = _raw_dict(path) + if runnable.uri.startswith(GALAXY_WORKFLOWS_PREFIX): + workflow_id = remote_runnable_to_workflow_id(runnable) + assert gi is not None + workflow = get_dict_from_workflow(gi, workflow_id) + else: + workflow = _raw_dict(runnable.path) + outputs = [] for (order_index, step) in workflow["steps"].items(): step_outputs = step.get("workflow_outputs", []) @@ -228,6 +235,10 @@ def new_workflow_associated_path(workflow_path, suffix="tests"): return base + sep + suffix + "." + ext +def get_dict_from_workflow(gi, workflow_id): + return gi.workflows.export_workflow_dict(workflow_id) + + __all__ = ( "import_workflow", "describe_outputs", diff --git a/planemo/options.py b/planemo/options.py index d22265c29..9a4175036 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -460,6 +460,14 @@ def galaxy_user_key_option(): ) +def history_name(): + return planemo_option( + "--history_name", + type=str, + help="Name to give a Galaxy history, if one is created.", + ) + + def no_cache_galaxy_option(): return planemo_option( "--no_cache_galaxy", @@ -682,14 +690,11 @@ def single_user_mode_option(): def required_workflow_arg(): - arg_type = click.Path( - exists=True, - file_okay=True, - dir_okay=False, - readable=True, - resolve_path=False, + return click.argument( + 'workflow_identifier', + metavar="WORKFLOW_PATH_OR_ID", + type=str, ) - return click.argument("workflow_path", metavar="WORKFLOW_PATH", type=arg_type) def split_job_and_test(): @@ -709,6 +714,14 @@ def required_job_arg(): return click.argument("job_path", metavar="JOB_PATH", type=arg_type) +def required_runnable_arg(): + return click.argument( + 'runnable_identifier', + metavar="RUNNABLE_PATH_OR_ID", + type=str, + ) + + def _optional_tools_default(ctx, param, value): if param.name in ["paths", "uris"] and len(value) == 0: return [os.path.abspath(os.getcwd())] @@ -1172,16 +1185,27 @@ def daemon_option(): ) -def profile_option(): +def profile_option(required=False): return planemo_option( "--profile", - type=str, + type=click.STRING, + required=required, default=None, help=("Name of profile (created with the profile_create command) to use " "with this command.") ) +def alias_option(required=False): + return planemo_option( + "--alias", + type=click.STRING, + required=required, + default=None, + help=("Name of an alias.") + ) + + def galaxy_serve_options(): return _compose( galaxy_run_options(), @@ -1435,6 +1459,7 @@ def engine_options(): galaxy_url_option(), galaxy_admin_key_option(), galaxy_user_key_option(), + history_name() ) diff --git a/planemo/runnable.py b/planemo/runnable.py index 53e6367fb..f3c94ee6f 100644 --- a/planemo/runnable.py +++ b/planemo/runnable.py @@ -24,7 +24,7 @@ ) from planemo.exit_codes import EXIT_CODE_UNKNOWN_FILE_TYPE, ExitCodeException -from planemo.galaxy.workflows import describe_outputs +from planemo.galaxy.workflows import describe_outputs, GALAXY_WORKFLOWS_PREFIX from planemo.io import error from planemo.test import check_output, for_collections @@ -64,12 +64,21 @@ def is_galaxy_artifact(runnable_type): return "galaxy" in runnable_type.name -_Runnable = collections.namedtuple("Runnable", ["path", "type"]) +_Runnable = collections.namedtuple("Runnable", ["uri", "type"]) class Runnable(_Runnable): """Abstraction describing tools and workflows.""" + @property + def path(self): + assert not self.is_remote_workflow_uri + return self.uri + + @property + def is_remote_workflow_uri(self): + return self.uri.startswith(GALAXY_WORKFLOWS_PREFIX) + @property def test_data_search_path(self): """During testing, path to search for test data files.""" @@ -171,6 +180,13 @@ def for_paths(paths, temp_path=None): return [for_path(path, temp_path=temp_path) for path in paths] +def for_id(runnable_id): + """Produce a class:`Runnable` for supplied Galaxy workflow ID.""" + uri = GALAXY_WORKFLOWS_PREFIX + runnable_id + runnable = Runnable(uri, RunnableType.galaxy_workflow) + return runnable + + def cases(runnable): """Build a `list` of :class:`TestCase` objects for specified runnable.""" cases = [] @@ -424,8 +440,12 @@ def _tests_path(runnable): return None -def get_outputs(runnable): - """Return a list of :class:`RunnableOutput` objects for this runnable.""" +def get_outputs(runnable, gi=None): + """Return a list of :class:`RunnableOutput` objects for this runnable. + + Supply bioblend user Galaxy instance object (as gi) if additional context + needed to resolve workflow details. + """ if not runnable.is_single_artifact: raise NotImplementedError("Cannot generate outputs for a directory.") if runnable.type in [RunnableType.galaxy_tool, RunnableType.cwl_tool]: @@ -435,7 +455,7 @@ def get_outputs(runnable): outputs = [ToolOutput(o) for o in output_datasets.values()] return outputs elif runnable.type == RunnableType.galaxy_workflow: - workflow_outputs = describe_outputs(runnable.path) + workflow_outputs = describe_outputs(runnable, gi=gi) return [GalaxyWorkflowOutput(o) for o in workflow_outputs] elif runnable.type == RunnableType.cwl_workflow: workflow = workflow_proxy(runnable.path, strict_cwl_validation=False) diff --git a/tests/test_external_galaxy_commands.py b/tests/test_external_galaxy_commands.py new file mode 100644 index 000000000..b5c8fad29 --- /dev/null +++ b/tests/test_external_galaxy_commands.py @@ -0,0 +1,66 @@ +"""Tests for planemo commands relating to external Galaxy instances +""" +import os + +from planemo import cli +from planemo.engine import engine_context +from planemo.runnable import for_path +from .test_utils import ( + CliTestCase, + PROJECT_TEMPLATES_DIR, + TEST_DATA_DIR, +) + + +class ExternalGalaxyCommandsTestCase(CliTestCase): + def test_plain_init(self): + ctx = cli.PlanemoCliContext() + ctx.planemo_directory = "/tmp/planemo-test-workspace" + cat_tool = os.path.join(PROJECT_TEMPLATES_DIR, "demo", "cat.xml") + test_workflow_path = os.path.join(TEST_DATA_DIR, 'wf2.ga') + + with engine_context(ctx, extra_tools=(cat_tool,)) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([for_path(test_workflow_path)]) as config: + wfid = config.workflow_id(test_workflow_path) + + # commands to test + profile_list_cmd = ["profile_list"] + profile_create_cmd = ["profile_create", "test_ext_profile", "--galaxy_url", config.galaxy_url, + "--galaxy_user_key", config.user_api_key] + alias_create_cmd = ["create_alias", wfid, "--alias", "test_wf_alias", "--profile", "test_ext_profile"] + alias_list_cmd = ["list_alias", "--profile", "test_ext_profile"] + alias_delete_cmd = ["delete_alias", "--alias", "test_wf_alias", "--profile", "test_ext_profile"] + profile_delete_cmd = ["profile_delete", "test_ext_profile"] + run_cmd = ["run", "test_wf_alias", os.path.join(TEST_DATA_DIR, "wf2-job.yml"), "--profile", "test_ext_profile"] + list_invocs_cmd = ["list_invocations", "test_wf_alias", "--profile", "test_ext_profile"] + + # test alias and profile creation + result = self._check_exit_code(profile_list_cmd) + assert 'test_ext_profile' not in result.output + result = self._check_exit_code(profile_create_cmd) + assert 'Profile [test_ext_profile] created' in result.output + result = self._check_exit_code(profile_list_cmd) + assert 'test_ext_profile' in result.output + result = self._check_exit_code(alias_create_cmd) + assert 'Alias test_wf_alias created.' in result.output + result = self._check_exit_code(alias_list_cmd) + assert 'test_wf_alias' in result.output + assert wfid in result.output + assert '1 aliases were found for profile test_ext_profile.' in result.output + + # test WF execution (from wfid) using created profile and alias + result = self._check_exit_code(run_cmd) + assert 'Run failed' not in result.output + result = self._check_exit_code(list_invocs_cmd) + assert '1 invocations found.' in result.output + assert '1 jobs ok' in result.output or '"ok": 1' in result.output # so it passes regardless if tabulate is installed or not + + # test alias and profile deletion + result = self._check_exit_code(alias_delete_cmd) + assert 'Alias test_wf_alias was successfully deleted from profile test_ext_profile' in result.output + result = self._check_exit_code(alias_list_cmd) + assert '0 aliases were found for profile test_ext_profile.' in result.output + result = self._check_exit_code(profile_delete_cmd) + assert 'Profile deleted.' in result.output + result = self._check_exit_code(profile_list_cmd) + assert 'test_ext_profile' not in result.output diff --git a/tests/test_galaxy_workflow_utils.py b/tests/test_galaxy_workflow_utils.py index bb3d1f8e0..763c557ac 100644 --- a/tests/test_galaxy_workflow_utils.py +++ b/tests/test_galaxy_workflow_utils.py @@ -2,12 +2,14 @@ import os from planemo.galaxy.workflows import describe_outputs +from planemo.runnable import for_path from .test_utils import TEST_DATA_DIR def test_describe_outputs(): wf_path = os.path.join(TEST_DATA_DIR, "wf1.gxwf.yml") - outputs = describe_outputs(wf_path) + runnable = for_path(wf_path) + outputs = describe_outputs(runnable) assert len(outputs) == 1 output = outputs[0] assert output.order_index == 1