diff --git a/pyflask/apis/neuroconv.py b/pyflask/apis/neuroconv.py index 3935cb803..03fa85220 100644 --- a/pyflask/apis/neuroconv.py +++ b/pyflask/apis/neuroconv.py @@ -18,7 +18,7 @@ inspect_multiple_filesystem_objects, upload_to_dandi, upload_folder_to_dandi, - upload_multiple_filesystem_objects_to_dandi + upload_multiple_filesystem_objects_to_dandi, ) from errorHandlers import notBadRequestException @@ -148,8 +148,8 @@ def post(self): try: paths = neuroconv_api.payload["filesystem_paths"] - if (len(paths) == 1 and isdir(paths[0])): - kwargs = { **neuroconv_api.payload } + if len(paths) == 1 and isdir(paths[0]): + kwargs = {**neuroconv_api.payload} del kwargs["filesystem_paths"] kwargs["nwb_folder_path"] = paths[0] return upload_folder_to_dandi(**kwargs) @@ -160,7 +160,8 @@ def post(self): except Exception as e: if notBadRequestException(e): neuroconv_api.abort(500, str(e)) - + + @neuroconv_api.route("/inspect_file") class InspectNWBFile(Resource): @neuroconv_api.doc(responses={200: "Success", 400: "Bad Request", 500: "Internal server error"}) diff --git a/pyflask/manageNeuroconv/manage_neuroconv.py b/pyflask/manageNeuroconv/manage_neuroconv.py index 83b79a253..cf56206e8 100644 --- a/pyflask/manageNeuroconv/manage_neuroconv.py +++ b/pyflask/manageNeuroconv/manage_neuroconv.py @@ -410,13 +410,14 @@ def update_conversion_progress(**kwargs): def upload_multiple_filesystem_objects_to_dandi(**kwargs): - tmp_folder_path = aggregate_in_temp_directory(kwargs['filesystem_paths'], 'upload') - innerKwargs = { **kwargs } - del innerKwargs['filesystem_paths'] - innerKwargs["nwb_folder_path"] = tmp_folder_path - result = upload_folder_to_dandi(**innerKwargs) - rmtree(tmp_folder_path) - return result + tmp_folder_path = aggregate_in_temp_directory(kwargs["filesystem_paths"], "upload") + innerKwargs = {**kwargs} + del innerKwargs["filesystem_paths"] + innerKwargs["nwb_folder_path"] = tmp_folder_path + result = upload_folder_to_dandi(**innerKwargs) + rmtree(tmp_folder_path) + return result + def upload_folder_to_dandi( dandiset_id: str, diff --git a/schemas/json/dandi/standalone.json b/schemas/json/dandi/standalone.json index dfa6369d2..fa210901f 100644 --- a/schemas/json/dandi/standalone.json +++ b/schemas/json/dandi/standalone.json @@ -8,5 +8,5 @@ } } }, - "required": ["filesystem_paths"] + "required": ["filesystem_paths"] } diff --git a/src/renderer/src/stories/pages/uploads/UploadsPage.js b/src/renderer/src/stories/pages/uploads/UploadsPage.js index 9f861bddd..fc759aa58 100644 --- a/src/renderer/src/stories/pages/uploads/UploadsPage.js +++ b/src/renderer/src/stories/pages/uploads/UploadsPage.js @@ -21,8 +21,7 @@ import { DandiResults } from "../../DandiResults.js"; export const isStaging = (id) => parseInt(id) >= 100000; export async function uploadToDandi(info, type = "project" in info ? "" : "folder") { - - const { dandiset_id } = info + const { dandiset_id } = info; const staging = isStaging(dandiset_id); // Automatically detect staging IDs @@ -59,7 +58,9 @@ export async function uploadToDandi(info, type = "project" in info ? "" : "folde if (result) notyf.open({ type: "success", - message: `${info.project ?? `${info[folderPathKey].length} filesystem entries`} successfully uploaded to Dandiset ${dandiset_id}`, + message: `${ + info.project ?? `${info[folderPathKey].length} filesystem entries` + } successfully uploaded to Dandiset ${dandiset_id}`, }); return result;