From e49b0806ecc6dbefae856ff359beffebb72b3bcc Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Sun, 2 Jun 2024 17:20:47 +0200 Subject: [PATCH 1/4] start a separate instance for each workflow --- planemo/commands/cmd_autoupdate.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/planemo/commands/cmd_autoupdate.py b/planemo/commands/cmd_autoupdate.py index 40c988810..d25919c12 100644 --- a/planemo/commands/cmd_autoupdate.py +++ b/planemo/commands/cmd_autoupdate.py @@ -135,9 +135,9 @@ def cli(ctx, paths, **kwds): # noqa C901 kwds["install_repository_dependencies"] = False kwds["shed_install"] = True - with engine_context(ctx, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served(modified_workflows) as config: - for workflow in modified_workflows: + for workflow in modified_workflows: + with engine_context(ctx, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([workflow]) as config: if config.updated_repos.get(workflow.path) or kwds.get("engine") == "external_galaxy": info("Auto-updating workflow %s" % workflow.path) updated_workflow = autoupdate.autoupdate_wf(ctx, config, workflow) From 65328f4c14ab2fe7b9561427377978b3356cd8e6 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Mon, 10 Jun 2024 16:05:17 +0200 Subject: [PATCH 2/4] Revert "start a separate instance for each workflow" This reverts commit e49b0806ecc6dbefae856ff359beffebb72b3bcc. --- planemo/commands/cmd_autoupdate.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/planemo/commands/cmd_autoupdate.py b/planemo/commands/cmd_autoupdate.py index d25919c12..40c988810 100644 --- a/planemo/commands/cmd_autoupdate.py +++ b/planemo/commands/cmd_autoupdate.py @@ -135,9 +135,9 @@ def cli(ctx, paths, **kwds): # noqa C901 kwds["install_repository_dependencies"] = False kwds["shed_install"] = True - for workflow in modified_workflows: - with engine_context(ctx, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served([workflow]) as config: + with engine_context(ctx, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served(modified_workflows) as config: + for workflow in modified_workflows: if config.updated_repos.get(workflow.path) or kwds.get("engine") == "external_galaxy": info("Auto-updating workflow %s" % workflow.path) updated_workflow = autoupdate.autoupdate_wf(ctx, config, workflow) From 787feb1cb8b6733bba007112cd7c71bf3422ab4d Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Mon, 10 Jun 2024 16:05:53 +0200 Subject: [PATCH 3/4] drop if instead --- planemo/commands/cmd_autoupdate.py | 41 +++++++++++++++--------------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/planemo/commands/cmd_autoupdate.py b/planemo/commands/cmd_autoupdate.py index 40c988810..ef9a6c940 100644 --- a/planemo/commands/cmd_autoupdate.py +++ b/planemo/commands/cmd_autoupdate.py @@ -138,27 +138,26 @@ def cli(ctx, paths, **kwds): # noqa C901 with engine_context(ctx, **kwds) as galaxy_engine: with galaxy_engine.ensure_runnables_served(modified_workflows) as config: for workflow in modified_workflows: - if config.updated_repos.get(workflow.path) or kwds.get("engine") == "external_galaxy": - info("Auto-updating workflow %s" % workflow.path) - updated_workflow = autoupdate.autoupdate_wf(ctx, config, workflow) - - if workflow.path.endswith(".ga"): - with open(workflow.path) as f: - original_workflow = json.load(f) - edited_workflow = autoupdate.fix_workflow_ga(original_workflow, updated_workflow) - with open(workflow.path, "w") as f: - json.dump(edited_workflow, f, indent=4) - else: - with open(workflow.path) as f: - original_workflow = yaml.load(f, Loader=yaml.SafeLoader) - edited_workflow = autoupdate.fix_workflow_gxformat2(original_workflow, updated_workflow) - with open(workflow.path, "w") as f: - yaml.dump(edited_workflow, f) - if original_workflow.get("release"): - info( - f"The workflow release number has been updated from " - f"{original_workflow.get('release')} to {edited_workflow.get('release')}." - ) + info("Auto-updating workflow %s" % workflow.path) + updated_workflow = autoupdate.autoupdate_wf(ctx, config, workflow) + + if workflow.path.endswith(".ga"): + with open(workflow.path) as f: + original_workflow = json.load(f) + edited_workflow = autoupdate.fix_workflow_ga(original_workflow, updated_workflow) + with open(workflow.path, "w") as f: + json.dump(edited_workflow, f, indent=4) + else: + with open(workflow.path) as f: + original_workflow = yaml.load(f, Loader=yaml.SafeLoader) + edited_workflow = autoupdate.fix_workflow_gxformat2(original_workflow, updated_workflow) + with open(workflow.path, "w") as f: + yaml.dump(edited_workflow, f) + if original_workflow.get("release"): + info( + f"The workflow release number has been updated from " + f"{original_workflow.get('release')} to {edited_workflow.get('release')}." + ) if kwds["test"]: if not modified_files and not modified_workflows: From 393d82f19dbfb67919191595b9fea414e55067ae Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Tue, 24 Sep 2024 10:25:19 +0200 Subject: [PATCH 4/4] Add test for updating multiple workflows with overlapping tools --- tests/test_cmd_autoupdate.py | 59 +++++++++++++++++++++--------------- tests/test_utils.py | 2 +- 2 files changed, 36 insertions(+), 25 deletions(-) diff --git a/tests/test_cmd_autoupdate.py b/tests/test_cmd_autoupdate.py index 0229d9784..1f202b08c 100644 --- a/tests/test_cmd_autoupdate.py +++ b/tests/test_cmd_autoupdate.py @@ -2,6 +2,7 @@ import json import os +import shutil import tempfile from contextlib import contextmanager @@ -33,7 +34,7 @@ def create_tmp_test_tool_file(tool_version): ) as t: t.write(xml_str) t.flush() - yield t.name + yield os.path.realpath(t.name) class CmdAutoupdateTestCase(CliTestCase): @@ -80,31 +81,41 @@ def test_autoupdate_no_update_needed(self): result = self._runner.invoke(self._cli.planemo, autoupdate_command) assert f"No updates required or made to {xmlfile}." in result.output - def test_autoupdate_workflow(self): + def test_autoupdate_multiple_workflows(self): """Test autoupdate command for a workflow is needed.""" - with self._isolate_with_test_data("wf_repos/autoupdate_tests") as f: - wf_file = os.path.realpath(os.path.join(f, "diff-refactor-test.ga")) - autoupdate_command = ["autoupdate", wf_file] + with self._isolate_with_test_data("wf_repos/autoupdate_tests") as f, tempfile.TemporaryDirectory( + dir=f, prefix="autoupdate_test" + ) as isolated_dir: + source_file = os.path.join(f, "diff-refactor-test.ga") + # We update identical workflows in the same autoupdate call, + # both workflows must be updated. + targets = [os.path.join(isolated_dir, wf) for wf in ("wf1.ga", "wf2.ga")] + for target in targets: + shutil.copy(source_file, target) + autoupdate_command = ["autoupdate", *targets] result = self._runner.invoke(self._cli.planemo, autoupdate_command) - assert f"Auto-updating workflow {wf_file}" in result.output - with open(wf_file) as g: - wf = json.load(g) - # check tool within parent wf has updated - assert wf["steps"]["1"]["tool_version"] == "3.7+galaxy0" - # check tool within subworkflow has updated - assert wf["steps"]["2"]["subworkflow"]["steps"]["1"]["tool_version"] == "3.7+galaxy0" - assert ( - wf["steps"]["2"]["subworkflow"]["steps"]["1"]["tool_id"] - == "toolshed.g2.bx.psu.edu/repos/bgruening/diff/diff/3.7+galaxy0" - ) - assert wf["version"] == 2 - assert wf["release"] == "0.1.1" - - result = self._runner.invoke(self._cli.planemo, autoupdate_command) # rerun on already updated WF - assert "No newer tool versions were found, so the workflow was not updated." in result.output - + assert "Auto-updating workflow" in result.output + for wf_file in targets: + with open(wf_file) as g: + wf = json.load(g) + # check tool within parent wf has updated + assert wf["steps"]["1"]["tool_version"] == "3.7+galaxy0" + # check tool within subworkflow has updated + assert wf["steps"]["2"]["subworkflow"]["steps"]["1"]["tool_version"] == "3.7+galaxy0" + assert ( + wf["steps"]["2"]["subworkflow"]["steps"]["1"]["tool_id"] + == "toolshed.g2.bx.psu.edu/repos/bgruening/diff/diff/3.7+galaxy0" + ) + assert wf["version"] == 2 + assert wf["release"] == "0.1.1" + + result = self._runner.invoke(self._cli.planemo, autoupdate_command) # rerun on already updated WF + assert "No newer tool versions were found, so the workflow was not updated." in result.output + + def test_autoupdate_gxformat2_workflow(self): + with self._isolate_with_test_data("wf_repos/autoupdate_tests") as f: wf_file = os.path.join(f, "diff-refactor-test.gxwf.yml") - autoupdate_command[1] = wf_file + autoupdate_command = ["autoupdate", wf_file] result = self._runner.invoke(self._cli.planemo, autoupdate_command) assert f"Auto-updating workflow {wf_file}" in result.output @@ -117,7 +128,7 @@ def test_autoupdate_workflow(self): def test_autoupdate_workflow_from_multiple_tool_sheds(self): with self._isolate_with_test_data("wf_repos/autoupdate_tests") as f: - wf_file = os.path.realpath(os.path.join(f, "wf_autoupdate_test_multiple_repos.ga")) + wf_file = os.path.join(f, "wf_autoupdate_test_multiple_repos.ga") autoupdate_command = ["autoupdate", wf_file] result = self._runner.invoke(self._cli.planemo, autoupdate_command) assert f"Auto-updating workflow {wf_file}" in result.output diff --git a/tests/test_utils.py b/tests/test_utils.py index 98362b41e..15e669f95 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -128,7 +128,7 @@ def _isolate_with_test_data(self, relative_path): with self._isolate() as f: repo = os.path.join(TEST_DATA_DIR, relative_path) self._copy_directory(repo, f) - yield f + yield os.path.realpath(f) def _copy_repo(self, name, dest): repo = os.path.join(TEST_REPOS_DIR, name)