From 6a1335f34c2df8a10b4cf61e07ec57e54295b315 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Mon, 5 Feb 2024 10:16:37 +0100 Subject: [PATCH 01/18] Isolate python env of data_source_async tools and document profile-dependency. --- lib/galaxy/tool_util/xsd/galaxy.xsd | 1 + lib/galaxy/tools/__init__.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/tool_util/xsd/galaxy.xsd b/lib/galaxy/tool_util/xsd/galaxy.xsd index 8f46e0c23f41..0e9f3b6a8542 100644 --- a/lib/galaxy/tool_util/xsd/galaxy.xsd +++ b/lib/galaxy/tool_util/xsd/galaxy.xsd @@ -58,6 +58,7 @@ List of behavior changes associated with profile versions: ### 21.09 - Do not strip leading and trailing whitespaces in `from_work_dir` attribute. +- Do not use Galaxy python environment for `data_source` and `data_source_async` tools. ### 23.0 diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py index 3213f10b218b..291192c749ce 100644 --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -910,13 +910,13 @@ def requires_galaxy_python_environment(self): # seem to require Galaxy's Python. # FIXME: the (instantiated) tool class should emit this behavior, and not # use inspection by string check - if self.tool_type not in ["default", "manage_data", "interactive", "data_source"]: + if self.tool_type not in ["default", "manage_data", "interactive", "data_source", "data_source_async"]: return True if self.tool_type == "manage_data" and self.profile < 18.09: return True - if self.tool_type == "data_source" and self.profile < 21.09: + if self.tool_type in ["data_source", "data_source_async"] and self.profile < 21.09: return True config = self.app.config From 571036b60101269201915f96ca2377304dea7116 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Tue, 6 Feb 2024 13:56:51 +0100 Subject: [PATCH 02/18] Shift data_source_async env isolation to 24.0 profile --- lib/galaxy/tool_util/xsd/galaxy.xsd | 6 +++++- lib/galaxy/tools/__init__.py | 5 ++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/tool_util/xsd/galaxy.xsd b/lib/galaxy/tool_util/xsd/galaxy.xsd index 0e9f3b6a8542..7fd70408d77e 100644 --- a/lib/galaxy/tool_util/xsd/galaxy.xsd +++ b/lib/galaxy/tool_util/xsd/galaxy.xsd @@ -58,13 +58,17 @@ List of behavior changes associated with profile versions: ### 21.09 - Do not strip leading and trailing whitespaces in `from_work_dir` attribute. -- Do not use Galaxy python environment for `data_source` and `data_source_async` tools. +- Do not use Galaxy python environment for `data_source` tools. ### 23.0 - Text parameters that are inferred to be optional (i.e the `optional` tag is not set, but the tool parameter accepts an empty string) are set to `None` for templating in Cheetah. Older tools receive the empty string `""` as the templated value. +### 24.0 + +- Do not use Galaxy python environment for `data_source_async` tools. + ### Examples A normal tool: diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py index 291192c749ce..77875a8a22af 100644 --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -916,7 +916,10 @@ def requires_galaxy_python_environment(self): if self.tool_type == "manage_data" and self.profile < 18.09: return True - if self.tool_type in ["data_source", "data_source_async"] and self.profile < 21.09: + if self.tool_type == "data_source" and self.profile < 21.09: + return True + + if self.tool_type == "data_source_async" and self.profile < 24.0: return True config = self.app.config From 36bbf6fcb22287a03d811bd266269c735ca40872 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Wed, 7 Feb 2024 12:06:34 +0100 Subject: [PATCH 03/18] Preserve request params declared by data_source_async tools --- lib/galaxy/webapps/galaxy/controllers/async.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index bd6187bb14a1..369bfb768199 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -71,11 +71,21 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): data.state = data.blurb = data.states.RUNNING log.debug(f"executing tool {tool.id}") trans.log_event(f"Async executing tool {tool.id}", tool_id=tool.id) + params_dict = {} + if tool.input_translator: + tool.input_translator.translate(params) + tool_declared_params = { + translator.galaxy_name + for translator in tool.input_translator.param_trans_dict.values() + } + for param in params: + if param in tool_declared_params: + params_dict[param] = params.get(param, None) galaxy_url = f"{trans.request.url_path}/async/{tool_id}/{data.id}/{key}" - galaxy_url = params.get("GALAXY_URL", galaxy_url) params = dict( URL=URL, GALAXY_URL=galaxy_url, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext ) + params.update(params_dict) # Assume there is exactly one output file possible TOOL_OUTPUT_TYPE = None From 443a49a54dbf5491ff080812d0c8d3079dbdc83e Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Wed, 7 Feb 2024 12:20:30 +0100 Subject: [PATCH 04/18] Fix linting --- lib/galaxy/webapps/galaxy/controllers/async.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index 369bfb768199..a47bd34ab012 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -75,8 +75,7 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): if tool.input_translator: tool.input_translator.translate(params) tool_declared_params = { - translator.galaxy_name - for translator in tool.input_translator.param_trans_dict.values() + translator.galaxy_name for translator in tool.input_translator.param_trans_dict.values() } for param in params: if param in tool_declared_params: From 3b883e557f6f73ef1824404d4702ae0d31fa836b Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 13:23:38 +0100 Subject: [PATCH 05/18] Refactor async params mapping --- .../webapps/galaxy/controllers/async.py | 52 +++++++++++-------- 1 file changed, 30 insertions(+), 22 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index a47bd34ab012..7be94827d65a 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -39,8 +39,6 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): return trans.response.send_redirect("/index") params = Params(kwd, sanitize=False) - STATUS = params.STATUS - URL = params.URL data_id = params.data_id log.debug(f"async dataid -> {data_id}") @@ -52,39 +50,49 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): if not tool: return f"Tool with id {tool_id} not found" - # - # we have an incoming data_id - # if data_id: - if not URL: - return f"No URL parameter was submitted for data {data_id}" + # + # we have an incoming data_id + # data = trans.sa_session.query(trans.model.HistoryDatasetAssociation).get(data_id) if not data: return f"Data {data_id} does not exist or has already been deleted" + # map params from the tool's section; + # ignore any other params that may have been passed by the remote + # server with the exception of STATUS and URL; + # if name, info, dbkey and data_type are not handled via incoming params, + # use the metadata from the already existing dataset + params_dict = dict( + STATUS=params.STATUS, URL=params.URL, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext + ) + if tool.input_translator: + tool.input_translator.translate(params) + tool_declared_params = { + translator.galaxy_name for translator in tool.input_translator.param_trans_dict.values() + } + for param in params: + if param in tool_declared_params: + params_dict[param] = params.get(param, None) + params = params_dict + + if not params.get("URL"): + return f"No URL parameter was submitted for data {data_id}" + + STATUS = params.get("STATUS") + if STATUS == "OK": key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id)) if key != data_secret: return f"You do not have permission to alter data {data_id}." + if not params.get("GALAXY_URL"): + # provide a fallback for GALAXY_URL + params["GALAXY_URL"] = f"{trans.request.url_path}/async/{tool_id}/{data.id}/{key}" # push the job into the queue data.state = data.blurb = data.states.RUNNING log.debug(f"executing tool {tool.id}") trans.log_event(f"Async executing tool {tool.id}", tool_id=tool.id) - params_dict = {} - if tool.input_translator: - tool.input_translator.translate(params) - tool_declared_params = { - translator.galaxy_name for translator in tool.input_translator.param_trans_dict.values() - } - for param in params: - if param in tool_declared_params: - params_dict[param] = params.get(param, None) - galaxy_url = f"{trans.request.url_path}/async/{tool_id}/{data.id}/{key}" - params = dict( - URL=URL, GALAXY_URL=galaxy_url, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext - ) - params.update(params_dict) # Assume there is exactly one output file possible TOOL_OUTPUT_TYPE = None @@ -180,7 +188,7 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): params.update({"data_id": data.id}) # Use provided URL or fallback to tool action - url = URL or tool.action + url = params.URL or tool.action # Does url already have query params? if "?" in url: url_join_char = "&" From b27666c5951575f6e17ebb5a15689d3bdf673a8c Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 14:08:22 +0100 Subject: [PATCH 06/18] Prevent updating of terminal datasets Async data requests coming in after the corresponding dataset has been finalized are now ignored. --- lib/galaxy/webapps/galaxy/controllers/async.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index 7be94827d65a..28d9ef268167 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -58,6 +58,14 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): if not data: return f"Data {data_id} does not exist or has already been deleted" + if data.state in data.dataset.terminal_states: + log.debug( + f"Tool {tool.id}: execution stopped as data {data_id} has entered terminal state prematurely" + ) + trans.log_event( + f"Tool {tool.id}: execution stopped as data {data_id} has entered terminal state prematurely" + ) + return f"Data {data_id} has finished processing before job could be completed" # map params from the tool's section; # ignore any other params that may have been passed by the remote From 9aab56e2f198c880d6b9b377a73f74a85985dcc1 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 14:19:03 +0100 Subject: [PATCH 07/18] Fix linting --- lib/galaxy/webapps/galaxy/controllers/async.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index 28d9ef268167..e58e68d1268d 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -59,12 +59,8 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): if not data: return f"Data {data_id} does not exist or has already been deleted" if data.state in data.dataset.terminal_states: - log.debug( - f"Tool {tool.id}: execution stopped as data {data_id} has entered terminal state prematurely" - ) - trans.log_event( - f"Tool {tool.id}: execution stopped as data {data_id} has entered terminal state prematurely" - ) + log.debug(f"Tool {tool.id}: execution stopped as data {data_id} has entered terminal state prematurely") + trans.log_event(f"Tool {tool.id}: execution stopped as data {data_id} has entered terminal state prematurely") return f"Data {data_id} has finished processing before job could be completed" # map params from the tool's section; @@ -73,7 +69,12 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): # if name, info, dbkey and data_type are not handled via incoming params, # use the metadata from the already existing dataset params_dict = dict( - STATUS=params.STATUS, URL=params.URL, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext + STATUS=params.STATUS, + URL=params.URL, + name=data.name, + info=data.info, + dbkey=data.dbkey, + data_type=data.ext, ) if tool.input_translator: tool.input_translator.translate(params) From 52c8edebda13c0e5bab1caadc97c8e4cd0158eb5 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 14:25:01 +0100 Subject: [PATCH 08/18] Fix linting once more --- lib/galaxy/webapps/galaxy/controllers/async.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index e58e68d1268d..c1a625dc4bb2 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -60,7 +60,9 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): return f"Data {data_id} does not exist or has already been deleted" if data.state in data.dataset.terminal_states: log.debug(f"Tool {tool.id}: execution stopped as data {data_id} has entered terminal state prematurely") - trans.log_event(f"Tool {tool.id}: execution stopped as data {data_id} has entered terminal state prematurely") + trans.log_event( + f"Tool {tool.id}: execution stopped as data {data_id} has entered terminal state prematurely" + ) return f"Data {data_id} has finished processing before job could be completed" # map params from the tool's section; From 387c9f0c875777e415ab27756c4ef70fb296c4cf Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 15:23:03 +0100 Subject: [PATCH 09/18] Start modernizing data_source.py script - Drop support for very old format of specifying execution params in a tabular file instead of a json one - Use O_TRUNC to discard previous file contents instead of opening for writing, then closing - Do not silently ignore non-integer values for max_file_size --- tools/data_source/data_source.py | 90 ++++++++++---------------------- 1 file changed, 27 insertions(+), 63 deletions(-) diff --git a/tools/data_source/data_source.py b/tools/data_source/data_source.py index 4df9eb35e62c..f94b8847dd56 100644 --- a/tools/data_source/data_source.py +++ b/tools/data_source/data_source.py @@ -1,12 +1,10 @@ #!/usr/bin/env python # Retrieves data from external data source applications and stores in a dataset file. # Data source application parameters are temporarily stored in the dataset file. +import json import os import sys -from json import ( - dumps, - loads, -) + from urllib.parse import ( urlencode, urlparse, @@ -15,7 +13,6 @@ from galaxy.datatypes import sniff from galaxy.datatypes.registry import Registry -from galaxy.jobs import TOOL_PROVIDED_JOB_METADATA_FILE from galaxy.util import ( DEFAULT_SOCKET_TIMEOUT, get_charset_from_http_headers, @@ -32,72 +29,39 @@ def stop_err(msg): sys.exit() -def load_input_parameters(filename, erase_file=True): - datasource_params = {} - try: - json_params = loads(open(filename).read()) - datasource_params = json_params.get("param_dict") - except Exception: - json_params = None - for line in open(filename): - try: - line = line.strip() - fields = line.split("\t") - datasource_params[fields[0]] = fields[1] - except Exception: - continue - if erase_file: - open(filename, "w").close() # open file for writing, then close, removes params from file - return json_params, datasource_params - - def __main__(): - filename = sys.argv[1] - try: + if len(sys.argv) >= 3: max_file_size = int(sys.argv[2]) - except Exception: + else: max_file_size = 0 - job_params, params = load_input_parameters(filename) - if job_params is None: # using an older tabular file - enhanced_handling = False - job_params = dict(param_dict=params) - job_params["output_data"] = [ - dict(out_data_name="output", ext="data", file_name=filename, extra_files_path=None) - ] - job_params["job_config"] = dict( - GALAXY_ROOT_DIR=GALAXY_ROOT_DIR, - GALAXY_DATATYPES_CONF_FILE=GALAXY_DATATYPES_CONF_FILE, - TOOL_PROVIDED_JOB_METADATA_FILE=TOOL_PROVIDED_JOB_METADATA_FILE, - ) - else: - enhanced_handling = True - json_file = open( - job_params["job_config"]["TOOL_PROVIDED_JOB_METADATA_FILE"], "w" - ) # specially named file for output junk to pass onto set metadata + with open(sys.argv[1]) as fh: + params = json.load(fh) + + out_data_name = params['output_data'][0]["out_data_name"] + + URL = params['param_dict'].get("URL", None) # using exactly URL indicates that only one dataset is being downloaded + URL_method = params['param_dict'].get("URL_method", "get") datatypes_registry = Registry() datatypes_registry.load_datatypes( - root_dir=job_params["job_config"]["GALAXY_ROOT_DIR"], - config=job_params["job_config"]["GALAXY_DATATYPES_CONF_FILE"], + root_dir=params["job_config"]["GALAXY_ROOT_DIR"], + config=params["job_config"]["GALAXY_DATATYPES_CONF_FILE"], ) - URL = params.get("URL", None) # using exactly URL indicates that only one dataset is being downloaded - URL_method = params.get("URL_method", None) - - for data_dict in job_params["output_data"]: - cur_filename = data_dict.get("file_name", filename) - cur_URL = params.get("%s|%s|URL" % (GALAXY_PARAM_PREFIX, data_dict["out_data_name"]), URL) + for data_dict in params["output_data"]: + cur_filename = data_dict["file_name"] + cur_URL = params['param_dict'].get("%s|%s|URL" % (GALAXY_PARAM_PREFIX, data_dict["out_data_name"]), URL) if not cur_URL or urlparse(cur_URL).scheme not in ("http", "https", "ftp"): open(cur_filename, "w").write("") stop_err("The remote data source application has not sent back a URL parameter in the request.") # The following calls to urlopen() will use the above default timeout try: - if not URL_method or URL_method == "get": + if URL_method == "get": page = urlopen(cur_URL, timeout=DEFAULT_SOCKET_TIMEOUT) elif URL_method == "post": - page = urlopen(cur_URL, urlencode(params).encode("utf-8"), timeout=DEFAULT_SOCKET_TIMEOUT) + page = urlopen(cur_URL, urlencode(params["param_dict"]).encode("utf-8"), timeout=DEFAULT_SOCKET_TIMEOUT) except Exception as e: stop_err("The remote data source application may be off line, please try again later. Error: %s" % str(e)) if max_file_size: @@ -110,7 +74,7 @@ def __main__(): try: cur_filename = stream_to_open_named_file( page, - os.open(cur_filename, os.O_WRONLY | os.O_CREAT), + os.open(cur_filename, os.O_WRONLY | os.O_TRUNC | os.O_CREAT), cur_filename, source_encoding=get_charset_from_http_headers(page.headers), ) @@ -118,14 +82,14 @@ def __main__(): stop_err("Unable to fetch %s:\n%s" % (cur_URL, e)) # here import checks that upload tool performs - if enhanced_handling: - try: - ext = sniff.handle_uploaded_dataset_file(filename, datatypes_registry, ext=data_dict["ext"]) - except Exception as e: - stop_err(str(e)) - info = dict(type="dataset", dataset_id=data_dict["dataset_id"], ext=ext) - - json_file.write("%s\n" % dumps(info)) + try: + ext = sniff.handle_uploaded_dataset_file(cur_filename, datatypes_registry, ext=data_dict["ext"]) + except Exception as e: + stop_err(str(e)) + info = dict(type="dataset", dataset_id=data_dict["dataset_id"], ext=ext) + + with open(params["job_config"]["TOOL_PROVIDED_JOB_METADATA_FILE"], "w") as json_file: + json.dump(info, json_file) if __name__ == "__main__": From c554023c5e1045d8bda2f70bbed6bb56bd1ab497 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 15:33:33 +0100 Subject: [PATCH 10/18] Simply exit on errors --- tools/data_source/data_source.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/tools/data_source/data_source.py b/tools/data_source/data_source.py index f94b8847dd56..f97538074bfa 100644 --- a/tools/data_source/data_source.py +++ b/tools/data_source/data_source.py @@ -24,11 +24,6 @@ GALAXY_DATATYPES_CONF_FILE = os.path.join(GALAXY_ROOT_DIR, "datatypes_conf.xml") -def stop_err(msg): - sys.stderr.write(msg) - sys.exit() - - def __main__(): if len(sys.argv) >= 3: max_file_size = int(sys.argv[2]) @@ -54,7 +49,7 @@ def __main__(): cur_URL = params['param_dict'].get("%s|%s|URL" % (GALAXY_PARAM_PREFIX, data_dict["out_data_name"]), URL) if not cur_URL or urlparse(cur_URL).scheme not in ("http", "https", "ftp"): open(cur_filename, "w").write("") - stop_err("The remote data source application has not sent back a URL parameter in the request.") + sys.exit("The remote data source application has not sent back a URL parameter in the request.") # The following calls to urlopen() will use the above default timeout try: @@ -63,11 +58,11 @@ def __main__(): elif URL_method == "post": page = urlopen(cur_URL, urlencode(params["param_dict"]).encode("utf-8"), timeout=DEFAULT_SOCKET_TIMEOUT) except Exception as e: - stop_err("The remote data source application may be off line, please try again later. Error: %s" % str(e)) + sys.exit("The remote data source application may be off line, please try again later. Error: %s" % str(e)) if max_file_size: file_size = int(page.info().get("Content-Length", 0)) if file_size > max_file_size: - stop_err( + sys.exit( "The size of the data (%d bytes) you have requested exceeds the maximum allowed (%d bytes) on this server." % (file_size, max_file_size) ) @@ -79,13 +74,13 @@ def __main__(): source_encoding=get_charset_from_http_headers(page.headers), ) except Exception as e: - stop_err("Unable to fetch %s:\n%s" % (cur_URL, e)) + sys.exit("Unable to fetch %s:\n%s" % (cur_URL, e)) # here import checks that upload tool performs try: ext = sniff.handle_uploaded_dataset_file(cur_filename, datatypes_registry, ext=data_dict["ext"]) except Exception as e: - stop_err(str(e)) + sys.exit(str(e)) info = dict(type="dataset", dataset_id=data_dict["dataset_id"], ext=ext) with open(params["job_config"]["TOOL_PROVIDED_JOB_METADATA_FILE"], "w") as json_file: From bd0f25745296be6f23fa1203cf8f50991af9bb2f Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 16:43:39 +0100 Subject: [PATCH 11/18] Switch data_source.py to modern tool-provided metadata Allows moving all data source tools using the script to profile 20.09. For more recent profiles data_source.py would have to work outside of Galaxy's Python environment. --- tools/data_source/biomart.xml | 2 +- tools/data_source/biomart_test.xml | 2 +- tools/data_source/cbi_rice_mart.xml | 2 +- tools/data_source/data_source.py | 5 +++-- tools/data_source/ebi_sra.xml | 2 +- tools/data_source/eupathdb.xml | 2 +- tools/data_source/fly_modencode.xml | 2 +- tools/data_source/flymine.xml | 2 +- tools/data_source/flymine_test.xml | 2 +- tools/data_source/gramene_mart.xml | 2 +- tools/data_source/hapmapmart.xml | 2 +- tools/data_source/hbvar.xml | 2 +- tools/data_source/intermine.xml | 2 +- tools/data_source/metabolicmine.xml | 2 +- tools/data_source/modmine.xml | 2 +- tools/data_source/mousemine.xml | 2 +- tools/data_source/ratmine.xml | 2 +- tools/data_source/sra.xml | 2 +- tools/data_source/ucsc_tablebrowser.xml | 2 +- tools/data_source/ucsc_tablebrowser_archaea.xml | 2 +- tools/data_source/ucsc_tablebrowser_test.xml | 2 +- tools/data_source/worm_modencode.xml | 2 +- tools/data_source/wormbase.xml | 2 +- tools/data_source/wormbase_test.xml | 2 +- tools/data_source/yeastmine.xml | 2 +- tools/data_source/zebrafishmine.xml | 2 +- 26 files changed, 28 insertions(+), 27 deletions(-) diff --git a/tools/data_source/biomart.xml b/tools/data_source/biomart.xml index b79a01f6048c..b8b819e5580e 100644 --- a/tools/data_source/biomart.xml +++ b/tools/data_source/biomart.xml @@ -7,7 +7,7 @@ TODO: Hack to get biomart to work - the 'add_to_URL' param can be eliminated when the Biomart team encodes URL prior to sending, meanwhile everything including and beyond the first '&' is truncated from URL. They said they'll let us know when this is fixed at their end. --> - + Ensembl server operation_0224 diff --git a/tools/data_source/biomart_test.xml b/tools/data_source/biomart_test.xml index 5334b94fbc2b..b99327520586 100644 --- a/tools/data_source/biomart_test.xml +++ b/tools/data_source/biomart_test.xml @@ -7,7 +7,7 @@ TODO: Hack to get biomart to work - the 'add_to_URL' param can be eliminated when the Biomart team encodes URL prior to sending, meanwhile everything including and beyond the first '&' is truncated from URL. They said they'll let us know when this is fixed at their end. --> - + Test server operation_0224 diff --git a/tools/data_source/cbi_rice_mart.xml b/tools/data_source/cbi_rice_mart.xml index 99763f5f178b..658df685448b 100644 --- a/tools/data_source/cbi_rice_mart.xml +++ b/tools/data_source/cbi_rice_mart.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + rice mart operation_0224 diff --git a/tools/data_source/data_source.py b/tools/data_source/data_source.py index f97538074bfa..a2f25986276c 100644 --- a/tools/data_source/data_source.py +++ b/tools/data_source/data_source.py @@ -81,10 +81,11 @@ def __main__(): ext = sniff.handle_uploaded_dataset_file(cur_filename, datatypes_registry, ext=data_dict["ext"]) except Exception as e: sys.exit(str(e)) - info = dict(type="dataset", dataset_id=data_dict["dataset_id"], ext=ext) + + tool_provided_metadata = {out_data_name: {"ext": ext}} with open(params["job_config"]["TOOL_PROVIDED_JOB_METADATA_FILE"], "w") as json_file: - json.dump(info, json_file) + json.dump(tool_provided_metadata, json_file) if __name__ == "__main__": diff --git a/tools/data_source/ebi_sra.xml b/tools/data_source/ebi_sra.xml index 11dbf6eef9b6..5a1e946b815e 100644 --- a/tools/data_source/ebi_sra.xml +++ b/tools/data_source/ebi_sra.xml @@ -1,5 +1,5 @@ - + ENA SRA operation_0224 diff --git a/tools/data_source/eupathdb.xml b/tools/data_source/eupathdb.xml index 9cb410ccaa91..7fe82b765643 100644 --- a/tools/data_source/eupathdb.xml +++ b/tools/data_source/eupathdb.xml @@ -1,4 +1,4 @@ - + server operation_0224 diff --git a/tools/data_source/fly_modencode.xml b/tools/data_source/fly_modencode.xml index f5e776ab7755..2bcd60015a4c 100644 --- a/tools/data_source/fly_modencode.xml +++ b/tools/data_source/fly_modencode.xml @@ -1,5 +1,5 @@ - + server operation_0224 diff --git a/tools/data_source/flymine.xml b/tools/data_source/flymine.xml index 6928cc1fb048..f7b7a2abd0cc 100644 --- a/tools/data_source/flymine.xml +++ b/tools/data_source/flymine.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + server operation_0224 diff --git a/tools/data_source/flymine_test.xml b/tools/data_source/flymine_test.xml index 899b73969c76..ef0524499b80 100644 --- a/tools/data_source/flymine_test.xml +++ b/tools/data_source/flymine_test.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + server operation_0224 diff --git a/tools/data_source/gramene_mart.xml b/tools/data_source/gramene_mart.xml index 64feef7dbece..7abf0b06d1f8 100644 --- a/tools/data_source/gramene_mart.xml +++ b/tools/data_source/gramene_mart.xml @@ -7,7 +7,7 @@ TODO: Hack to get biomart to work - the 'add_to_URL' param can be eliminated when the Biomart team encodes URL prior to sending, meanwhile everything including and beyond the first '&' is truncated from URL. They said they'll let us know when this is fixed at their end. --> - + Central server operation_0224 diff --git a/tools/data_source/hapmapmart.xml b/tools/data_source/hapmapmart.xml index c43fa5761e03..5ad6e0ac7953 100644 --- a/tools/data_source/hapmapmart.xml +++ b/tools/data_source/hapmapmart.xml @@ -11,7 +11,7 @@ TODO: Hack to get biomart to work - the 'add_to_URL' param can be eliminated when the Biomart team encodes URL prior to sending, meanwhile everything including and beyond the first '&' is truncated from URL. They said they'll let us know when this is fixed at their end. --> - + HapMap Biomart operation_0224 diff --git a/tools/data_source/hbvar.xml b/tools/data_source/hbvar.xml index 74f926e8c130..b6867850995c 100644 --- a/tools/data_source/hbvar.xml +++ b/tools/data_source/hbvar.xml @@ -1,5 +1,5 @@ - + Human Hemoglobin Variants and Thalassemias operation_0224 diff --git a/tools/data_source/intermine.xml b/tools/data_source/intermine.xml index 6f574b9b598c..89c8b95e730b 100644 --- a/tools/data_source/intermine.xml +++ b/tools/data_source/intermine.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + server operation_0224 diff --git a/tools/data_source/metabolicmine.xml b/tools/data_source/metabolicmine.xml index 17e2c7a0535b..be4f1c9dd4d2 100644 --- a/tools/data_source/metabolicmine.xml +++ b/tools/data_source/metabolicmine.xml @@ -1,5 +1,5 @@ - + server operation_0224 diff --git a/tools/data_source/modmine.xml b/tools/data_source/modmine.xml index 3fd3bc9595fd..b7c7ee6ab4f0 100644 --- a/tools/data_source/modmine.xml +++ b/tools/data_source/modmine.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + server operation_0224 diff --git a/tools/data_source/mousemine.xml b/tools/data_source/mousemine.xml index 508811f9aacb..bbda4c1aef82 100644 --- a/tools/data_source/mousemine.xml +++ b/tools/data_source/mousemine.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + server operation_0224 diff --git a/tools/data_source/ratmine.xml b/tools/data_source/ratmine.xml index 69aca47743ca..4a33f08cc756 100644 --- a/tools/data_source/ratmine.xml +++ b/tools/data_source/ratmine.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + server operation_0224 diff --git a/tools/data_source/sra.xml b/tools/data_source/sra.xml index 31b014e01cfd..07f01e5fed52 100644 --- a/tools/data_source/sra.xml +++ b/tools/data_source/sra.xml @@ -1,4 +1,4 @@ - + server operation_0224 diff --git a/tools/data_source/ucsc_tablebrowser.xml b/tools/data_source/ucsc_tablebrowser.xml index 928ff6498fee..19852ecf6fcc 100644 --- a/tools/data_source/ucsc_tablebrowser.xml +++ b/tools/data_source/ucsc_tablebrowser.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + table browser operation_0224 diff --git a/tools/data_source/ucsc_tablebrowser_archaea.xml b/tools/data_source/ucsc_tablebrowser_archaea.xml index 62258145892a..a1707e441a1e 100644 --- a/tools/data_source/ucsc_tablebrowser_archaea.xml +++ b/tools/data_source/ucsc_tablebrowser_archaea.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + table browser operation_0224 diff --git a/tools/data_source/ucsc_tablebrowser_test.xml b/tools/data_source/ucsc_tablebrowser_test.xml index e90bc6d14cdd..9782f8d2b41b 100644 --- a/tools/data_source/ucsc_tablebrowser_test.xml +++ b/tools/data_source/ucsc_tablebrowser_test.xml @@ -4,7 +4,7 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + table browser operation_0224 diff --git a/tools/data_source/worm_modencode.xml b/tools/data_source/worm_modencode.xml index 1c6f4be9c746..519430bbb40b 100644 --- a/tools/data_source/worm_modencode.xml +++ b/tools/data_source/worm_modencode.xml @@ -1,5 +1,5 @@ - + server operation_0224 diff --git a/tools/data_source/wormbase.xml b/tools/data_source/wormbase.xml index d279bdf7f761..a1657018ebe5 100644 --- a/tools/data_source/wormbase.xml +++ b/tools/data_source/wormbase.xml @@ -1,5 +1,5 @@ - + server operation_0224 diff --git a/tools/data_source/wormbase_test.xml b/tools/data_source/wormbase_test.xml index 9e2e96094712..a9fb4e05d703 100644 --- a/tools/data_source/wormbase_test.xml +++ b/tools/data_source/wormbase_test.xml @@ -1,5 +1,5 @@ - + test server operation_0224 diff --git a/tools/data_source/yeastmine.xml b/tools/data_source/yeastmine.xml index 87ff7fdf73e4..6a669cd33f60 100644 --- a/tools/data_source/yeastmine.xml +++ b/tools/data_source/yeastmine.xml @@ -1,5 +1,5 @@ - + server operation_0224 diff --git a/tools/data_source/zebrafishmine.xml b/tools/data_source/zebrafishmine.xml index 18c51fce2131..c32961677761 100644 --- a/tools/data_source/zebrafishmine.xml +++ b/tools/data_source/zebrafishmine.xml @@ -1,5 +1,5 @@ - + server operation_0224 From 3c178b559eb930547f3bb6eaef932fd97a0ba608 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 16:58:28 +0100 Subject: [PATCH 12/18] Fix linting --- tools/data_source/data_source.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tools/data_source/data_source.py b/tools/data_source/data_source.py index a2f25986276c..a185632689e9 100644 --- a/tools/data_source/data_source.py +++ b/tools/data_source/data_source.py @@ -33,10 +33,10 @@ def __main__(): with open(sys.argv[1]) as fh: params = json.load(fh) - out_data_name = params['output_data'][0]["out_data_name"] + out_data_name = params["output_data"][0]["out_data_name"] - URL = params['param_dict'].get("URL", None) # using exactly URL indicates that only one dataset is being downloaded - URL_method = params['param_dict'].get("URL_method", "get") + URL = params["param_dict"].get("URL", None) # using exactly URL indicates that only one dataset is being downloaded + URL_method = params["param_dict"].get("URL_method", "get") datatypes_registry = Registry() datatypes_registry.load_datatypes( @@ -46,7 +46,7 @@ def __main__(): for data_dict in params["output_data"]: cur_filename = data_dict["file_name"] - cur_URL = params['param_dict'].get("%s|%s|URL" % (GALAXY_PARAM_PREFIX, data_dict["out_data_name"]), URL) + cur_URL = params["param_dict"].get("%s|%s|URL" % (GALAXY_PARAM_PREFIX, data_dict["out_data_name"]), URL) if not cur_URL or urlparse(cur_URL).scheme not in ("http", "https", "ftp"): open(cur_filename, "w").write("") sys.exit("The remote data source application has not sent back a URL parameter in the request.") From 870af6d68ffc853277784f34f152862796206234 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 21:14:07 +0100 Subject: [PATCH 13/18] Try to fix API test The test tool needs to declare a recent enough profile to expect modern tool-provided metadata. --- test/functional/tools/test_data_source.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/functional/tools/test_data_source.xml b/test/functional/tools/test_data_source.xml index 52c016d80914..df5a2e87813c 100644 --- a/test/functional/tools/test_data_source.xml +++ b/test/functional/tools/test_data_source.xml @@ -4,11 +4,11 @@ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed. --> - + - + go to Ratmine server $GALAXY_URL From 561bf92a1b452124737db735d399b4e91d581209 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Thu, 8 Feb 2024 21:41:19 +0100 Subject: [PATCH 14/18] Format import section with isort --- tools/data_source/data_source.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/data_source/data_source.py b/tools/data_source/data_source.py index a185632689e9..971ef58fdf60 100644 --- a/tools/data_source/data_source.py +++ b/tools/data_source/data_source.py @@ -4,7 +4,6 @@ import json import os import sys - from urllib.parse import ( urlencode, urlparse, From eef015d4e29c3df78fe96ebaed81c47034670aa5 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Fri, 9 Feb 2024 09:14:20 +0100 Subject: [PATCH 15/18] Preserve original request params as nested dict and use them in final POST request --- lib/galaxy/webapps/galaxy/controllers/async.py | 4 +++- lib/galaxy/webapps/galaxy/controllers/tool_runner.py | 3 +++ tools/data_source/data_source.py | 6 +++++- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index c1a625dc4bb2..c17e1c07a662 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -69,7 +69,8 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): # ignore any other params that may have been passed by the remote # server with the exception of STATUS and URL; # if name, info, dbkey and data_type are not handled via incoming params, - # use the metadata from the already existing dataset + # use the metadata from the already existing dataset; + # preserve original params under nested dict params_dict = dict( STATUS=params.STATUS, URL=params.URL, @@ -77,6 +78,7 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): info=data.info, dbkey=data.dbkey, data_type=data.ext, + incoming_request_params=params.__dict__.copy(), ) if tool.input_translator: tool.input_translator.translate(params) diff --git a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py index bf078054ae90..d32418721d4a 100644 --- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py +++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py @@ -77,6 +77,9 @@ def __tool_404__(): # execute tool without displaying form (used for datasource tools) params = galaxy.util.Params(kwd, sanitize=False) + if tool.tool_type == "data_source": + # preserve original params sent by the remote server as extra dict + params.update({"incoming_request_params": params.__dict__.copy()}) # do param translation here, used by datasource tools if tool.input_translator: tool.input_translator.translate(params) diff --git a/tools/data_source/data_source.py b/tools/data_source/data_source.py index 971ef58fdf60..fba58ba1cf9e 100644 --- a/tools/data_source/data_source.py +++ b/tools/data_source/data_source.py @@ -55,7 +55,11 @@ def __main__(): if URL_method == "get": page = urlopen(cur_URL, timeout=DEFAULT_SOCKET_TIMEOUT) elif URL_method == "post": - page = urlopen(cur_URL, urlencode(params["param_dict"]).encode("utf-8"), timeout=DEFAULT_SOCKET_TIMEOUT) + page = urlopen( + cur_URL, + urlencode(params["param_dict"]["incoming_request_params"]).encode("utf-8"), + timeout=DEFAULT_SOCKET_TIMEOUT + ) except Exception as e: sys.exit("The remote data source application may be off line, please try again later. Error: %s" % str(e)) if max_file_size: From fd3a19afd2e8696cac805be1b5acbfd3e6e47708 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Fri, 9 Feb 2024 09:19:33 +0100 Subject: [PATCH 16/18] Fix linting --- tools/data_source/data_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/data_source/data_source.py b/tools/data_source/data_source.py index fba58ba1cf9e..55bf461dbd88 100644 --- a/tools/data_source/data_source.py +++ b/tools/data_source/data_source.py @@ -58,7 +58,7 @@ def __main__(): page = urlopen( cur_URL, urlencode(params["param_dict"]["incoming_request_params"]).encode("utf-8"), - timeout=DEFAULT_SOCKET_TIMEOUT + timeout=DEFAULT_SOCKET_TIMEOUT, ) except Exception as e: sys.exit("The remote data source application may be off line, please try again later. Error: %s" % str(e)) From 2918ac72ed5c00e1e5dbff1e7b90de0f3b30df7e Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Sun, 11 Feb 2024 21:35:48 +0100 Subject: [PATCH 17/18] Improve code comments and XSD --- lib/galaxy/tool_util/xsd/galaxy.xsd | 17 +++++++++++------ lib/galaxy/webapps/galaxy/controllers/async.py | 16 ++++++++++++++-- .../webapps/galaxy/controllers/tool_runner.py | 4 +++- 3 files changed, 28 insertions(+), 9 deletions(-) diff --git a/lib/galaxy/tool_util/xsd/galaxy.xsd b/lib/galaxy/tool_util/xsd/galaxy.xsd index 7fd70408d77e..1417b9a34fe4 100644 --- a/lib/galaxy/tool_util/xsd/galaxy.xsd +++ b/lib/galaxy/tool_util/xsd/galaxy.xsd @@ -270,9 +270,11 @@ this tool is usable within a workflow (defaults to ``true`` for normal tools and - Only used if ``tool_type`` attribute value -is ``data_source`` or ``data_source_async`` - this attribute defines the HTTP request method to use when -communicating with an external data source application (the default is ``get``). + *Deprecated* and ignored, +use a [request_param](#tool-request-param-translation-request-param) element with ``galaxy_name="URL_method"`` instead. +Was only used if ``tool_type`` attribute value is ``data_source`` or ``data_source_async`` - +this attribute defined the HTTP request method to use when communicating with an external data source application +(default: ``get``). @@ -1726,7 +1728,7 @@ useful for non-deterministic output. @@ -1739,7 +1741,7 @@ and this can be used instead. If specified, the target output's checksum should match the value specified here. This value should have the form ``hash_type$hash_value`` (e.g. ``sha1$8156d7ca0f46ed7abac98f82e36cfaddb2aca041``). For large static files -it may be inconvenient to upload the entiry file and this can be used instead. +it may be inconvenient to upload the entire file and this can be used instead. ]]> @@ -2857,7 +2859,9 @@ tools will not need to specify any attributes on this tag itself.]]> - Data source HTTP action (e.g. ``get`` or ``put``) to use. + *Deprecated* and ignored, +use a [request_param](#tool-request-param-translation-request-param) element with ``galaxy_name="URL_method"`` instead. +Data source HTTP action (e.g. ``get`` or ``put``) to use. @@ -7155,6 +7159,7 @@ and ``bibtex`` are the only supported options. + diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py index c17e1c07a662..d9ecae7f2305 100644 --- a/lib/galaxy/webapps/galaxy/controllers/async.py +++ b/lib/galaxy/webapps/galaxy/controllers/async.py @@ -1,5 +1,5 @@ """ -Upload class +Controller to handle communication of tools of type data_source_async """ import logging @@ -134,9 +134,21 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd): return f"Data {data_id} with status {STATUS} received. OK" else: - # # no data_id must be parameter submission # + # create new dataset, put it into running state, + # send request for data to remote server and see if the response + # ends in ok; + # the request that's getting sent goes to the URL found in + # params.URL or, in its absence, to the one found as the value of + # the "action" attribute of the data source tool's "inputs" tag. + # Included in the request are the parameters: + # - data_id, which indicates to the remote server that Galaxy is + # ready to accept data + # - GALAXY_URL, which takes the form: + # {base_url}/async/{tool_id}/{data_id}/{data_secret}, and which + # when used by the remote server to send a data download link, + # will trigger the if branch above. GALAXY_TYPE = None if params.data_type: GALAXY_TYPE = params.data_type diff --git a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py index d32418721d4a..c4ccf28b2a14 100644 --- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py +++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py @@ -75,7 +75,9 @@ def __tool_404__(): if tool.tool_type in ["default", "interactivetool"]: return trans.response.send_redirect(url_for(controller="root", tool_id=tool_id)) - # execute tool without displaying form (used for datasource tools) + # execute tool without displaying form + # (used for datasource tools, but note that data_source_async tools + # are handled separately by the async controller) params = galaxy.util.Params(kwd, sanitize=False) if tool.tool_type == "data_source": # preserve original params sent by the remote server as extra dict From 6ee7663e6cceca3f72f4dff438d8600427682376 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Mon, 12 Feb 2024 14:34:19 +0100 Subject: [PATCH 18/18] Update lib/galaxy/tool_util/xsd/galaxy.xsd Co-authored-by: Marius van den Beek --- lib/galaxy/tool_util/xsd/galaxy.xsd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/tool_util/xsd/galaxy.xsd b/lib/galaxy/tool_util/xsd/galaxy.xsd index 1417b9a34fe4..224dd84d5b0b 100644 --- a/lib/galaxy/tool_util/xsd/galaxy.xsd +++ b/lib/galaxy/tool_util/xsd/galaxy.xsd @@ -58,7 +58,7 @@ List of behavior changes associated with profile versions: ### 21.09 - Do not strip leading and trailing whitespaces in `from_work_dir` attribute. -- Do not use Galaxy python environment for `data_source` tools. +- Do not use Galaxy Python virtual environment for `data_source` tools. `data_source` tools should explicitly use the `galaxy-util` package. ### 23.0