From a5cc09147330928bb6abec0f0b832cbb0ba6ffa9 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 2 Apr 2024 18:17:53 +0100 Subject: [PATCH] Fix ``test_composite_datatype_stage_upload1`` test Can be squashed into commit "Record unnamed_outputs as job outputs, wait for job outputs in staging function" . Fix the following traceback: ``` self = history_id = '333add226b1f5083' @skip_without_datatype("velvet") def test_composite_datatype_stage_upload1(self, history_id: str) -> None: job = { "input1": { "class": "File", "format": "velvet", "composite_data": [ "test-data/simple_line.txt", "test-data/simple_line_alternative.txt", "test-data/simple_line_x2.txt", ], } } > stage_inputs(self.galaxy_interactor, history_id, job, use_path_paste=False, use_fetch_api=False) lib/galaxy_test/api/test_tools_upload.py:497: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ lib/galaxy_test/base/populators.py:3181: in stage_inputs return InteractorStaging(galaxy_interactor, use_fetch_api=use_fetch_api).stage( lib/galaxy/tool_util/client/staging.py:264: in stage return galactic_job_json( lib/galaxy/tool_util/cwl/util.py:392: in galactic_job_json replace_keys[key] = replacement_item(value) lib/galaxy/tool_util/cwl/util.py:220: in replacement_item return replacement_file(value) lib/galaxy/tool_util/cwl/util.py:251: in replacement_file rval_c = upload_file_with_composite_data(None, composite_data, filetype=filetype, **kwd) lib/galaxy/tool_util/cwl/util.py:190: in upload_file_with_composite_data return response_to_hda(target, upload_response) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ target = upload_response = {'implicit_collections': [], 'jobs': [{'create_time': '2024-04-02T13:19:27.579575', 'exit_code': None, 'galaxy_version...ats': ['http://edamontology.org/format_1915'], 'data_type': 'galaxy.datatypes.data.Data', 'deleted': False, ...}], ...} def response_to_hda(target: UploadTarget, upload_response: Dict[str, Any]) -> Dict[str, str]: > dataset = next(iter(upload_response["outputs"].values())) E AttributeError: 'list' object has no attribute 'values' lib/galaxy/tool_util/cwl/util.py:160: AttributeError ``` --- lib/galaxy/tool_util/client/staging.py | 27 +++++++++++++------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/lib/galaxy/tool_util/client/staging.py b/lib/galaxy/tool_util/client/staging.py index 4d03bbedf931..6a4b8cda8cc7 100644 --- a/lib/galaxy/tool_util/client/staging.py +++ b/lib/galaxy/tool_util/client/staging.py @@ -59,18 +59,16 @@ def _post(self, api_path: str, payload: Dict[str, Any]) -> Dict[str, Any]: def _attach_file(self, path: str) -> BinaryIO: return open(path, "rb") - def _tools_post(self, payload: Dict[str, Any]) -> Dict[str, Any]: + def _job_details_from_tool_response(self, tool_response: Dict[str, Any]) -> List[Dict[str, Any]]: + return [self._handle_job(job) for job in tool_response.get("jobs", [])] + + def _tools_post(self, payload: Dict[str, Any]) -> List[Dict[str, Any]]: tool_response = self._post("tools", payload) - for job in tool_response.get("jobs", []): - self._handle_job(job) - return tool_response + return self._job_details_from_tool_response(tool_response) def _fetch_post(self, payload: Dict[str, Any]) -> List[Dict[str, Any]]: tool_response = self._post("tools/fetch", payload) - job_details = [] - for job in tool_response.get("jobs", []): - job_details.append(self._handle_job(job)) - return job_details + return self._job_details_from_tool_response(tool_response) @abc.abstractmethod def _handle_job(self, job_response: Dict[str, Any]) -> Dict[str, Any]: @@ -205,12 +203,12 @@ def _attach_file(upload_payload: Dict[str, Any], uri: str, index: int = 0) -> No _attach_file(upload_payload, composite_data, index=i) self._log(f"upload_payload is {upload_payload}") - return self._tools_post(upload_payload) + return self._tools_post(upload_payload)[0] elif isinstance(upload_target, FileLiteralTarget): # For file literals - take them as is - never convert line endings. payload = _upload_payload(history_id, file_type="auto", auto_decompress=False, to_posix_lines=False) payload["inputs"]["files_0|url_paste"] = upload_target.contents - return self._tools_post(payload) + return self._tools_post(payload)[0] elif isinstance(upload_target, DirectoryUploadTarget): tar_path = upload_target.tar_path @@ -220,20 +218,21 @@ def _attach_file(upload_payload: Dict[str, Any], uri: str, index: int = 0) -> No ) upload_payload["inputs"]["files_0|auto_decompress"] = False _attach_file(upload_payload, tar_path) - tar_upload_response = self._tools_post(upload_payload) + tar_upload_first_job_details = self._tools_post(upload_payload)[0] + tar_upload_first_dataset_id = next(iter(tar_upload_first_job_details["outputs"].values()))["id"] convert_payload = dict( tool_id="CONVERTER_tar_to_directory", - tool_inputs={"input1": {"src": "hda", "id": tar_upload_response["outputs"][0]["id"]}}, + tool_inputs={"input1": {"src": "hda", "id": tar_upload_first_dataset_id}}, history_id=history_id, ) - convert_response = self._tools_post(convert_payload) + convert_response = self._tools_post(convert_payload)[0] assert "outputs" in convert_response, convert_response return convert_response elif isinstance(upload_target, ObjectUploadTarget): content = json.dumps(upload_target.object) payload = _upload_payload(history_id, file_type="expression.json") payload["files_0|url_paste"] = content - return self._tools_post(payload) + return self._tools_post(payload)[0] else: raise ValueError(f"Unsupported type for upload_target: {type(upload_target)}")