Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into serve-test
Browse files Browse the repository at this point in the history
  • Loading branch information
simonbray committed Oct 13, 2021
2 parents 4811cbd + 2c3c594 commit 76af174
Show file tree
Hide file tree
Showing 13 changed files with 619 additions and 68 deletions.
30 changes: 29 additions & 1 deletion HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,32 @@ History
.. to_doc
---------------------
0.75.0 (2021-07-23)
0.75.0.dev0
---------------------



---------------------
0.74.8 (2021-10-10)
---------------------

* Exclude click 8.0.2. `Pull Request 1196`_
* Add tool version numbers to autoupdate logging (thanks to `@simonbray`_).
`Pull Request 1188`_
* Allow tool autoupdate without conda installation (thanks to `@simonbray`_).
`Pull Request 1193`_
* use correct key execution_problem in template (thanks to `@bernt-matthias`_).
`Pull Request 1195`_

---------------------
0.74.7 (2021-09-21)
---------------------

* Fix documentation to include `--download_outputs` flag (thanks to
`@simonbray`_). `Pull Request 1184`_
* Select refgenie config based on Galaxy version `Pull Request 1187`_
* Extend autoupdate subcommand to workflows (thanks to `@simonbray`_). `Pull
Request 1151`_

---------------------
0.74.6 (2021-07-23)
Expand Down Expand Up @@ -1740,6 +1761,13 @@ History
tools - and more experimental features involving Docker and Homebrew. 7d07782_

.. github_links
.. _Pull Request 1188: https://github.com/galaxyproject/planemo/pull/1188
.. _Pull Request 1193: https://github.com/galaxyproject/planemo/pull/1193
.. _Pull Request 1195: https://github.com/galaxyproject/planemo/pull/1195
.. _Pull Request 1196: https://github.com/galaxyproject/planemo/pull/1196
.. _Pull Request 1184: https://github.com/galaxyproject/planemo/pull/1184
.. _Pull Request 1187: https://github.com/galaxyproject/planemo/pull/1187
.. _Pull Request 1151: https://github.com/galaxyproject/planemo/pull/1151
.. _Pull Request 1153: https://github.com/galaxyproject/planemo/pull/1153
.. _Pull Request 1179: https://github.com/galaxyproject/planemo/pull/1179
.. _Pull Request 1180: https://github.com/galaxyproject/planemo/pull/1180
Expand Down
18 changes: 18 additions & 0 deletions docs/autoupdate.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,24 @@ Formatting tools
- A token ``@TOOL_VERSION@`` should be created which corresponds to the version number of the main requirement.
- Optionally, a token ``@VERSION_SUFFIX@`` should be created, which should be an integer representing the number of times the XML wrapper has been updated since ``@TOOL_VERSION@`` was updated.

Updating workflows
=============================

The ``autoupdate`` subcommand can also be used to automatically update workflows so that they are using the most recent Galaxy tools available.

::

planemo autoupdate workflow.ga

In the basic usage, a local Galaxy instance will be spun up and the workflow uploaded, refactored to include the most recent tool versions, and re-downloaded.

Workflows can also be updated against an external galaxy, for example:

::

planemo autoupdate workflow.ga --profile usegalaxy-eu

In this case, the workflow returned will contain the most recent tool and subworkflow versions available on that Galaxy server.

Implementing an autoupdate CI job
=================================
Expand Down
2 changes: 1 addition & 1 deletion planemo/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-

__version__ = '0.75.0'
__version__ = '0.75.0.dev0'


PROJECT_NAME = "planemo"
Expand Down
39 changes: 31 additions & 8 deletions planemo/autoupdate.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import re
import xml.etree.ElementTree as ET

import packaging.version
import requests
from galaxy.tool_util.deps import conda_util

import planemo.conda
Expand Down Expand Up @@ -51,7 +53,11 @@ def check_conda(tool_name, ctx, **kwds):
"""
conda_context = planemo.conda.build_conda_context(ctx, **kwds)
if not conda_context.is_conda_installed():
error("Conda is not installed! Try running planemo conda_init.")
# check directly via Anaconda API
r = requests.get('https://api.anaconda.org/search', params={'name': tool_name})
search_results = sum([n['versions'] for n in r.json() if n['name'] == tool_name and n['owner'] in kwds['conda_ensure_channels']], [])
return sorted(search_results, key=lambda n: packaging.version.parse(n))[-1]

target = planemo.conda.conda_util.CondaTarget(tool_name)
search_results = conda_util.best_search_result(target, conda_context=conda_context)
return search_results[0]['version']
Expand All @@ -62,11 +68,11 @@ def update_xml(tool_path, xml_tree, tags_to_update, wrapper_version_token, is_ma
Write modified XML to tool_path
"""
def update_token(xml_text, tag, token_value):
new_tag = '>{}<'.format(token_value).join(re.split('>.*<', tag))
new_tag = f'>{token_value}<'.join(re.split('>.*<', tag))
return re.sub(tag, new_tag, xml_text)

def update_requirement(xml_text, tag, requirement_value):
new_tag = 'version="{}"'.format(requirement_value).join(re.split('version=".*"', tag))
new_tag = f'version="{requirement_value}"'.join(re.split('version=".*"', tag))
return re.sub(tag, new_tag, xml_text)

with open(tool_path, 'r+', newline='') as f:
Expand Down Expand Up @@ -144,8 +150,7 @@ def perform_required_update(ctx, xml_files, tool_path, requirements, tokens, xml
# finally, update each file separately
for k, v in xml_files.items():
update_xml(k, v, xml_to_update[k], wrapper_version_token, is_macro=(k != tool_path))

info("Tool {} updated.".format(tool_path))
info(f"Tool {tool_path} successfully updated.")
return set(xml_files)


Expand Down Expand Up @@ -182,13 +187,31 @@ def autoupdate_tool(ctx, tool_path, modified_files=set(), **kwds):
tokens, xml_to_update, current_main_req, updated_main_req = create_token_dict(ctx, xml_files, main_req, **kwds)

if current_main_req == updated_main_req and not (modified_files & set(xml_files)):
info("No updates required or made to {}.".format(tool_path))
info(f"No updates required or made to {tool_path}.")
return # end here if no update needed

if kwds.get('dry_run'):
error("Update required to {}! Tool main requirement has version {}, newest conda version is {}".format(
tool_path, current_main_req, updated_main_req))
error(f"Update required to {tool_path}! Tool main requirement has version {current_main_req}, newest conda version is {updated_main_req}")
return

else:
info(f"Updating {tool_path.split('/')[-1]} from version {current_main_req} to {updated_main_req}")
return perform_required_update(ctx, xml_files, tool_path, requirements, tokens, xml_to_update, wrapper_version_token, **kwds)


def _update_wf(config, workflow_id):
"""
Recursively update a workflow, including subworkflows
"""
wf = config.user_gi.workflows.show_workflow(workflow_id)
for step in wf['steps'].values():
if step['type'] == 'subworkflow':
# update subworkflows before the main workflow
_update_wf(config, step['workflow_id'])
config.user_gi.workflows.refactor_workflow(workflow_id, actions=[{"action_type": "upgrade_all_steps"}])


def autoupdate_wf(ctx, config, wf):
workflow_id = config.workflow_id_for_runnable(wf)
_update_wf(config, workflow_id)
return config.user_gi.workflows.export_workflow_dict(workflow_id)
77 changes: 58 additions & 19 deletions planemo/commands/cmd_autoupdate.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
"""Module describing the planemo ``autoupdate`` command."""
import json

import click
from gxformat2 import from_galaxy_native

from planemo import autoupdate, options
from planemo.cli import command_function
from planemo.config import planemo_option
from planemo.engine import (
engine_context,
is_galaxy_engine,
)
from planemo.engine.test import (
test_runnables,
)
Expand All @@ -19,6 +26,7 @@
)
from planemo.runnable import (
for_paths,
RunnableType
)
from planemo.tools import (
is_tool_load_error,
Expand Down Expand Up @@ -62,7 +70,7 @@ def skip_requirements_option():
)


@click.command('autoupdate')
@click.command('autoupdate') # noqa C901
@options.optional_tools_arg(multiple=True)
@dry_run_option()
@options.recursive_option()
Expand All @@ -76,29 +84,60 @@ def skip_requirements_option():
@options.report_xunit()
@options.fail_level_option()
@command_function
def cli(ctx, paths, **kwds):
def cli(ctx, paths, **kwds): # noqa C901
"""Auto-update tool requirements by checking against Conda and updating if newer versions are available."""
assert_tools = kwds.get("assert_tools", True)
recursive = kwds.get("recursive", False)
exit_codes = []
modified_files = set()
tools_to_skip = [line.rstrip() for line in open(kwds['skiplist'])] if kwds['skiplist'] else []
for (tool_path, tool_xml) in yield_tool_sources_on_paths(ctx, paths, recursive):
if tool_path.split('/')[-1] in tools_to_skip:
info("Skipping tool %s" % tool_path)
continue
info("Auto-updating tool %s" % tool_path)
try:
updated = autoupdate.autoupdate_tool(ctx, tool_path, modified_files=modified_files, **kwds)
if updated:
modified_files.update(updated)
except Exception as e:
error("{} could not be updated - the following error was raised: {}".format(tool_path, e.__str__()))
if handle_tool_load_error(tool_path, tool_xml):
exit_codes.append(EXIT_CODE_GENERIC_FAILURE)
continue
else:
exit_codes.append(EXIT_CODE_OK)
runnables = for_paths(paths)

if any(r.type in {RunnableType.galaxy_tool, RunnableType.directory} for r in runnables):
# update Galaxy tools
for (tool_path, tool_xml) in yield_tool_sources_on_paths(ctx, paths, recursive):
if tool_path.split('/')[-1] in tools_to_skip:
info("Skipping tool %s" % tool_path)
continue
info("Auto-updating tool %s" % tool_path)
try:
updated = autoupdate.autoupdate_tool(ctx, tool_path, modified_files=modified_files, **kwds)
if updated:
modified_files.update(updated)
except Exception as e:
error(f"{tool_path} could not be updated - the following error was raised: {e.__str__()}")
if handle_tool_load_error(tool_path, tool_xml):
exit_codes.append(EXIT_CODE_GENERIC_FAILURE)
continue
else:
exit_codes.append(EXIT_CODE_OK)

workflows = [r for r in runnables if r.type == RunnableType.galaxy_workflow]
modified_workflows = []
if workflows:
assert is_galaxy_engine(**kwds)
if kwds.get("engine") != "external_galaxy":
kwds["install_most_recent_revision"] = True
kwds["install_resolver_dependencies"] = False
kwds["install_repository_dependencies"] = False
kwds['shed_install'] = True

with engine_context(ctx, **kwds) as galaxy_engine:
with galaxy_engine.ensure_runnables_served(workflows) as config:
for workflow in workflows:
if config.updated_repos.get(workflow.path) or kwds.get("engine") == "external_galaxy":
info("Auto-updating workflow %s" % workflow.path)
updated_workflow = autoupdate.autoupdate_wf(ctx, config, workflow)
if workflow.path.endswith(".ga"):
with open(workflow.path, 'w') as f:
json.dump(updated_workflow, f, indent=4, sort_keys=True)
else:
format2_wrapper = from_galaxy_native(updated_workflow, json_wrapper=True)
with open(workflow.path, "w") as f:
f.write(format2_wrapper["yaml_content"])
modified_workflows.append(workflow.path)
else:
info("No newer tool versions were found, so the workflow was not updated.")

if kwds['test']:
if not modified_files:
Expand All @@ -108,7 +147,7 @@ def cli(ctx, paths, **kwds):
# only test tools in updated directories
modified_paths = [path for path, tool_xml in yield_tool_sources_on_paths(ctx, paths, recursive) if path in modified_files]
info(f"Running tests for the following auto-updated tools: {', '.join(modified_paths)}")
runnables = for_paths(modified_paths, temp_path=temp_path)
runnables = for_paths(modified_paths + modified_workflows, temp_path=temp_path)
kwds["engine"] = "galaxy"
return_value = test_runnables(ctx, runnables, original_paths=paths, **kwds)
exit_codes.append(return_value)
Expand Down
Loading

0 comments on commit 76af174

Please sign in to comment.