diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9273adef7..248023898 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,6 +10,10 @@ repos: hooks: - id: black exclude: ^docs/ +- repo: https://github.com/PyCQA/isort + rev: 5.13.2 + hooks: + - id: isort - repo: https://github.com/pre-commit/mirrors-prettier rev: "v4.0.0-alpha.8" hooks: diff --git a/demos/sse/test_sse_display_of_tqdm.py b/demos/sse/test_sse_display_of_tqdm.py index 0b705a971..b32452bd6 100644 --- a/demos/sse/test_sse_display_of_tqdm.py +++ b/demos/sse/test_sse_display_of_tqdm.py @@ -1,12 +1,12 @@ -from flask import Flask, render_template, Response -from typing import List -import random import asyncio +import os +import random +import sys import time -from tqdm import tqdm as base_tqdm +from typing import List -import sys -import os +from flask import Flask, Response, render_template +from tqdm import tqdm as base_tqdm SCRIPT_DIR = os.path.dirname(os.path.abspath(os.path.join(__file__, "..", "..", "pyflask"))) sys.path.append(os.path.dirname(SCRIPT_DIR)) diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css new file mode 100644 index 000000000..4e72cecdb --- /dev/null +++ b/docs/_static/css/custom.css @@ -0,0 +1,33 @@ +/* Improve spacing */ +.version-switcher__container.dropdown { + margin-left: 10px; +} + +button.btn.version-switcher__button { + margin-bottom: 0px; +} + +/* Show on hover */ +.version-switcher__container.dropdown:hover .dropdown-menu { + display: block; + left: 0; + margin-top: var(--bs-dropdown-spacer); + top: 100%; +} + +.dropdown-menu.show { + display: none; +} + +/* Remove underline and borders */ +button.btn.version-switcher__button:hover { + text-decoration: none; +} + +.version-switcher__menu a.list-group-item { + border: none !important; +} + +.version-switcher__menu a.list-group-item:hover { + text-decoration: none !important; +} diff --git a/docs/conf.py b/docs/conf.py index 52a59ff6f..0c466f25e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,15 +1,14 @@ -import sys import inspect -from pathlib import Path import json import os +import sys +from pathlib import Path sys.path.insert(0, str(Path(__file__).resolve().parents[0])) sys.path.insert(0, str(Path(__file__).resolve().parents[1])) from conf_extlinks import extlinks, intersphinx_mapping # noqa: E402, F401 - project = "NWB GUIDE" copyright = "2022, CatalystNeuro" # TODO: how to include NWB? author = "Garrett Flynn, Cody Baker, Ryan Ly, Oliver Ruebel, and Ben Dichter" @@ -45,9 +44,9 @@ ] # These paths are either relative to html_static_path or fully qualified paths (eg. https://...) -# html_css_files = [ -# "css/custom.css", -# ] +html_css_files = [ + "css/custom.css", +] linkcheck_anchors = False @@ -85,6 +84,7 @@ version_match = os.environ.get("READTHEDOCS_VERSION") with open("../package.json") as f: release = json.load(f)["version"] + # If READTHEDOCS_VERSION doesn't exist, we're not on RTD # If it is an integer, we're in a PR build and the version isn't correct. # If it's "latest" → change to "dev" (that's what we want the switcher to call it) @@ -144,3 +144,6 @@ def _correct_signatures(app, what, name, obj, options, signature, return_annotat def setup(app): # This makes the data-interfaces signatures display on the docs/api, they don't otherwise app.connect("autodoc-process-signature", _correct_signatures) + + # Add custom CSS + app.add_css_file("css/custom.css") diff --git a/generateInterfaceSchema.py b/generateInterfaceSchema.py index 9a04a8b85..f63eb41f4 100644 --- a/generateInterfaceSchema.py +++ b/generateInterfaceSchema.py @@ -1,6 +1,7 @@ -from pathlib import Path import json -from neuroconv import converters, datainterfaces, NWBConverter +from pathlib import Path + +from neuroconv import NWBConverter, converters, datainterfaces filepath = Path("guideGlobalMetadata.json") generatedJSONSchemaPath = Path("schemas", "json", "generated") diff --git a/guide_testing_suite.yml b/guide_testing_suite.yml index 82747c095..cec3c8a39 100644 --- a/guide_testing_suite.yml +++ b/guide_testing_suite.yml @@ -24,10 +24,19 @@ pipelines: PhySortingInterface: folder_path: ephy_testing_data/phy/phy_example_0 + SpikeGLX_v1_SingleProbe_AP: SpikeGLXRecordingInterface: file_path: ephy_testing_data/spikeglx/Noise4Sam_g0/Noise4Sam_g0_imec0/Noise4Sam_g0_t0.imec0.ap.bin + + SpikeGLXConverter-Phy: + SpikeGLXConverterPipe: + folder_path: ephy_testing_data/spikeglx/Noise4Sam_g0 + PhySortingInterface: + folder_path: ephy_testing_data/phy/phy_example_0 + + Suite2P_SinglePlane_SingleChannel: metadata: NWBFile: diff --git a/pyflask/apis/__init__.py b/pyflask/apis/__init__.py index 9368a0aa8..f492dc7df 100644 --- a/pyflask/apis/__init__.py +++ b/pyflask/apis/__init__.py @@ -1,3 +1,3 @@ -from .startup import startup_api -from .neuroconv import neuroconv_api from .data import data_api +from .neuroconv import neuroconv_api +from .startup import startup_api diff --git a/pyflask/apis/data.py b/pyflask/apis/data.py index 122559150..de6dfdc6d 100644 --- a/pyflask/apis/data.py +++ b/pyflask/apis/data.py @@ -2,10 +2,9 @@ import traceback -from flask_restx import Namespace, Resource, reqparse - -from manageNeuroconv import generate_test_data, generate_dataset from errorHandlers import notBadRequestException +from flask_restx import Namespace, Resource, reqparse +from manageNeuroconv import generate_dataset, generate_test_data data_api = Namespace("data", description="API route for dataset generation in the NWB GUIDE.") diff --git a/pyflask/apis/neuroconv.py b/pyflask/apis/neuroconv.py index 1877e763e..015cb61e8 100644 --- a/pyflask/apis/neuroconv.py +++ b/pyflask/apis/neuroconv.py @@ -2,32 +2,28 @@ import traceback -from flask_restx import Namespace, Resource, reqparse +from errorHandlers import notBadRequestException from flask import Response, request - -from manageNeuroconv.info import announcer - +from flask_restx import Namespace, Resource, reqparse from manageNeuroconv import ( - get_all_interface_info, - get_all_converter_info, - locate_data, autocomplete_format_string, - get_source_schema, - get_metadata_schema, convert_to_nwb, - validate_metadata, - listen_to_neuroconv_events, + get_all_converter_info, + get_all_interface_info, + get_interface_alignment, + get_metadata_schema, + get_source_schema, + inspect_multiple_filesystem_objects, inspect_nwb_file, inspect_nwb_folder, - inspect_multiple_filesystem_objects, - upload_project_to_dandi, + listen_to_neuroconv_events, + locate_data, upload_folder_to_dandi, upload_multiple_filesystem_objects_to_dandi, - get_interface_alignment, + upload_project_to_dandi, + validate_metadata, ) - -from errorHandlers import notBadRequestException - +from manageNeuroconv.info import announcer neuroconv_api = Namespace("neuroconv", description="Neuroconv neuroconv_api for the NWB GUIDE.") diff --git a/pyflask/apis/startup.py b/pyflask/apis/startup.py index feb59d871..63be4fd0a 100644 --- a/pyflask/apis/startup.py +++ b/pyflask/apis/startup.py @@ -1,8 +1,7 @@ """API endpoint definitions for startup operations.""" -from flask_restx import Namespace, Resource - from errorHandlers import notBadRequestException +from flask_restx import Namespace, Resource startup_api = Namespace("startup", description="API for startup commands related to the NWB GUIDE.") diff --git a/pyflask/app.py b/pyflask/app.py index 924e9454a..d84b27bae 100644 --- a/pyflask/app.py +++ b/pyflask/app.py @@ -1,28 +1,29 @@ """The primary Flask server for the Python backend.""" -import sys import json import multiprocessing -from os import kill, getpid -from os.path import isabs - -from signal import SIGINT -from logging import Formatter, DEBUG +import sys +from logging import DEBUG, Formatter from logging.handlers import RotatingFileHandler +from os import getpid, kill +from os.path import isabs from pathlib import Path +from signal import SIGINT from urllib.parse import unquote - # https://stackoverflow.com/questions/32672596/pyinstaller-loads-script-multiple-times#comment103216434_32677108 multiprocessing.freeze_support() -from flask import Flask, request, send_from_directory, send_file +from apis import data_api, neuroconv_api, startup_api +from flask import Flask, request, send_file, send_from_directory from flask_cors import CORS from flask_restx import Api, Resource - -from apis import startup_api, neuroconv_api, data_api -from manageNeuroconv.info import resource_path, STUB_SAVE_FOLDER_PATH, CONVERSION_SAVE_FOLDER_PATH +from manageNeuroconv.info import ( + CONVERSION_SAVE_FOLDER_PATH, + STUB_SAVE_FOLDER_PATH, + resource_path, +) app = Flask(__name__) diff --git a/pyflask/manageNeuroconv/__init__.py b/pyflask/manageNeuroconv/__init__.py index ddcc2b8b6..f9f55e082 100644 --- a/pyflask/manageNeuroconv/__init__.py +++ b/pyflask/manageNeuroconv/__init__.py @@ -1,23 +1,21 @@ +from .info import CONVERSION_SAVE_FOLDER_PATH, STUB_SAVE_FOLDER_PATH from .manage_neuroconv import ( - get_all_interface_info, - get_all_converter_info, - locate_data, autocomplete_format_string, - get_source_schema, - get_metadata_schema, convert_to_nwb, - validate_metadata, - upload_project_to_dandi, - upload_folder_to_dandi, - upload_multiple_filesystem_objects_to_dandi, - listen_to_neuroconv_events, generate_dataset, + generate_test_data, + get_all_converter_info, + get_all_interface_info, + get_interface_alignment, + get_metadata_schema, + get_source_schema, + inspect_multiple_filesystem_objects, inspect_nwb_file, inspect_nwb_folder, - inspect_multiple_filesystem_objects, - get_interface_alignment, - generate_test_data, + listen_to_neuroconv_events, + locate_data, + upload_folder_to_dandi, + upload_multiple_filesystem_objects_to_dandi, + upload_project_to_dandi, + validate_metadata, ) - - -from .info import STUB_SAVE_FOLDER_PATH, CONVERSION_SAVE_FOLDER_PATH diff --git a/pyflask/manageNeuroconv/info/__init__.py b/pyflask/manageNeuroconv/info/__init__.py index 915d74aee..edde04113 100644 --- a/pyflask/manageNeuroconv/info/__init__.py +++ b/pyflask/manageNeuroconv/info/__init__.py @@ -1,8 +1,7 @@ +from .sse import announcer, format_sse from .urls import ( - resource_path, + CONVERSION_SAVE_FOLDER_PATH, GUIDE_ROOT_FOLDER, STUB_SAVE_FOLDER_PATH, - CONVERSION_SAVE_FOLDER_PATH, + resource_path, ) - -from .sse import announcer, format_sse diff --git a/pyflask/manageNeuroconv/info/sse.py b/pyflask/manageNeuroconv/info/sse.py index bb90f34f7..b9593cba5 100644 --- a/pyflask/manageNeuroconv/info/sse.py +++ b/pyflask/manageNeuroconv/info/sse.py @@ -1,5 +1,5 @@ -import queue import json +import queue def format_sse(data: str, event=None) -> str: diff --git a/pyflask/manageNeuroconv/info/urls.py b/pyflask/manageNeuroconv/info/urls.py index 261f1188c..bf8a65116 100644 --- a/pyflask/manageNeuroconv/info/urls.py +++ b/pyflask/manageNeuroconv/info/urls.py @@ -1,7 +1,7 @@ -from pathlib import Path import json import os import sys +from pathlib import Path def resource_path(relative_path): diff --git a/pyflask/manageNeuroconv/manage_neuroconv.py b/pyflask/manageNeuroconv/manage_neuroconv.py index e0f5488c2..2587cabd2 100644 --- a/pyflask/manageNeuroconv/manage_neuroconv.py +++ b/pyflask/manageNeuroconv/manage_neuroconv.py @@ -1,19 +1,22 @@ """Collection of utility functions used by the NeuroConv Flask API.""" -import os +import copy +import hashlib import json import math -import copy +import os import re -import hashlib -from pathlib import Path from datetime import datetime -from typing import Dict, Optional -from shutil import rmtree, copytree from pathlib import Path -from typing import Any, Dict, List, Optional +from shutil import copytree, rmtree +from typing import Any, Dict, List, Optional, Union -from .info import GUIDE_ROOT_FOLDER, STUB_SAVE_FOLDER_PATH, CONVERSION_SAVE_FOLDER_PATH, announcer +from .info import ( + CONVERSION_SAVE_FOLDER_PATH, + GUIDE_ROOT_FOLDER, + STUB_SAVE_FOLDER_PATH, + announcer, +) EXCLUDED_RECORDING_INTERFACE_PROPERTIES = ["contact_vector", "contact_shapes", "group", "location"] @@ -112,7 +115,7 @@ def replace_nan_with_none(data): return data -def resolve_references(schema, root_schema=None): +def resolve_references(schema: dict, root_schema: Optional[dict] = None) -> dict: """ Recursively resolve references in a JSON schema based on the root schema. @@ -142,7 +145,7 @@ def resolve_references(schema, root_schema=None): return schema -def replace_none_with_nan(json_object, json_schema): +def replace_none_with_nan(json_object: dict, json_schema: dict) -> dict: """ Recursively search a JSON object and replace None values with NaN where appropriate. @@ -254,7 +257,7 @@ def locate_data(info: dict) -> dict: return json.loads(json.dumps(obj=organized_output, cls=NWBMetaDataEncoder)) -def module_to_dict(my_module): +def module_to_dict(my_module) -> dict: # Create an empty dictionary module_dict = {} @@ -278,7 +281,7 @@ def get_class_ref_in_docstring(input_string): return match.group(1) -def derive_interface_info(interface): +def derive_interface_info(interface) -> dict: info = {"keywords": getattr(interface, "keywords", []), "description": ""} @@ -337,7 +340,7 @@ def get_all_interface_info() -> dict: # Combine Multiple Interfaces -def get_custom_converter(interface_class_dict: dict, alignment_info: dict = dict()): # -> NWBConverter: +def get_custom_converter(interface_class_dict: dict, alignment_info: dict = dict()): -> "NWBConverter": from neuroconv import converters, datainterfaces, NWBConverter class CustomNWBConverter(NWBConverter): @@ -366,13 +369,21 @@ def get_source_schema(interface_class_dict: dict) -> dict: return CustomNWBConverter.get_source_schema() -def map_interfaces(BaseRecordingExtractorInterface, callback, converter): +def map_interfaces(callback, converter, to_match: Union["BaseDataInterface", None] = None, parent_name=None) -> list: + from neuroconv import NWBConverter output = [] for name, interface in converter.data_interface_objects.items(): - if isinstance(interface, BaseRecordingExtractorInterface): - result = callback(name, interface) + + associated_name = f"{parent_name} — {name}" if parent_name else name + if isinstance(interface, NWBConverter): + result = map_interfaces( + callback=callback, converter=interface, to_match=to_match, parent_name=associated_name + ) + output.extend(result) + elif to_match is None or isinstance(interface, to_match): + result = callback(associated_name, interface) output.append(result) return output @@ -443,8 +454,6 @@ def on_sorting_interface(name, sorting_interface): return sorting_interface def on_recording_interface(name, recording_interface): - global aggregate_electrode_columns - electrode_columns = get_electrode_columns_json(recording_interface) # Aggregate electrode column information across recording interfaces @@ -478,14 +487,18 @@ def on_recording_interface(name, recording_interface): return recording_interface - from neuroconv.datainterfaces.ecephys.baserecordingextractorinterface import BaseRecordingExtractorInterface - from neuroconv.datainterfaces.ecephys.basesortingextractorinterface import BaseSortingExtractorInterface + from neuroconv.datainterfaces.ecephys.baserecordingextractorinterface import ( + BaseRecordingExtractorInterface, + ) + from neuroconv.datainterfaces.ecephys.basesortingextractorinterface import ( + BaseSortingExtractorInterface, + ) # Map recording interfaces to metadata - map_interfaces(BaseRecordingExtractorInterface, on_recording_interface, converter) + map_interfaces(on_recording_interface, converter=converter, to_match=BaseRecordingExtractorInterface) # Map sorting interfaces to metadata - map_interfaces(BaseSortingExtractorInterface, on_sorting_interface, converter) + map_interfaces(on_sorting_interface, converter=converter, to_match=BaseSortingExtractorInterface) if has_ecephys: @@ -577,7 +590,17 @@ def on_recording_interface(name, recording_interface): "additionalProperties": True, # Allow for new columns } - return json.loads(json.dumps(replace_nan_with_none(dict(results=metadata, schema=schema)), cls=NWBMetaDataEncoder)) + # TODO: generalize logging stuff + log_base = GUIDE_ROOT_FOLDER / "logs" + log_base.mkdir(exist_ok=True) + with open(file=log_base / "file_metadata_page_schema.json", mode="w") as fp: + json.dump(obj=dict(schema=schema), fp=fp, cls=NWBMetaDataEncoder, indent=2) + with open(file=log_base / "file_metadata_page_results.json", mode="w") as fp: + json.dump(obj=dict(results=metadata), fp=fp, cls=NWBMetaDataEncoder, indent=2) + + return json.loads( + json.dumps(obj=replace_nan_with_none(dict(results=metadata, schema=schema)), cls=NWBMetaDataEncoder) + ) def get_check_function(check_function_name: str) -> callable: @@ -596,7 +619,7 @@ def get_check_function(check_function_name: str) -> callable: def run_check_function(check_function: callable, arg: dict) -> dict: """.Function used to run an arbitrary NWB Inspector function.""" - from nwbinspector.register_checks import InspectorMessage, Importance + from nwbinspector.register_checks import Importance, InspectorMessage output = check_function(arg) if isinstance(output, InspectorMessage): @@ -639,8 +662,8 @@ def validate_nwbfile_metadata( def validate_metadata(metadata: dict, check_function_name: str) -> dict: """Function used to validate data using an arbitrary NWB Inspector function.""" - from pynwb.file import NWBFile, Subject from nwbinspector.nwbinspector import InspectorOutputJSONEncoder + from pynwb.file import NWBFile, Subject check_function = get_check_function(check_function_name) @@ -761,6 +784,8 @@ def get_interface_alignment(info: dict) -> dict: def convert_to_nwb(info: dict) -> str: """Function used to convert the source data to NWB format using the specified metadata.""" + from neuroconv import NWBConverter + nwbfile_path = Path(info["nwbfile_path"]) custom_output_directory = info.get("output_folder") project_name = info.get("project_name") @@ -842,13 +867,31 @@ def update_conversion_progress(**kwargs): shared_electrode_columns = ecephys_metadata["ElectrodeColumns"] for interface_name, interface_electrode_results in ecephys_metadata["Electrodes"].items(): - interface = converter.data_interface_objects[interface_name] + name_split = interface_name.split(" — ") - update_recording_properties_from_table_as_json( - interface, - electrode_table_json=interface_electrode_results, - electrode_column_info=shared_electrode_columns, - ) + if len(name_split) == 1: + sub_interface = name_split[0] + elif len(name_split) == 2: + sub_interface, sub_sub_interface = name_split + + interface_or_subconverter = converter.data_interface_objects[sub_interface] + + if isinstance(interface_or_subconverter, NWBConverter): + subconverter = interface_or_subconverter + + update_recording_properties_from_table_as_json( + recording_interface=subconverter.data_interface_objects[sub_sub_interface], + electrode_table_json=interface_electrode_results, + electrode_column_info=shared_electrode_columns, + ) + else: + interface = interface_or_subconverter + + update_recording_properties_from_table_as_json( + recording_interface=interface, + electrode_table_json=interface_electrode_results, + electrode_column_info=shared_electrode_columns, + ) ecephys_metadata["Electrodes"] = [ {"name": entry["name"], "description": entry["description"]} for entry in shared_electrode_columns @@ -886,7 +929,7 @@ def update_conversion_progress(**kwargs): return dict(file=str(resolved_output_path)) -def upload_multiple_filesystem_objects_to_dandi(**kwargs): +def upload_multiple_filesystem_objects_to_dandi(**kwargs) -> list[Path]: tmp_folder_path = _aggregate_symlinks_in_new_directory(kwargs["filesystem_paths"], "upload") innerKwargs = {**kwargs} del innerKwargs["filesystem_paths"] @@ -905,7 +948,7 @@ def upload_folder_to_dandi( number_of_jobs: Optional[int] = None, number_of_threads: Optional[int] = None, ignore_cache: bool = False, -): +) -> list[Path]: from neuroconv.tools.data_transfers import automatic_dandi_upload os.environ["DANDI_API_KEY"] = api_key # Update API Key @@ -934,7 +977,7 @@ def upload_project_to_dandi( number_of_jobs: Optional[int] = None, number_of_threads: Optional[int] = None, ignore_cache: bool = False, -): +) -> list[Path]: from neuroconv.tools.data_transfers import automatic_dandi_upload # CONVERSION_SAVE_FOLDER_PATH.mkdir(exist_ok=True, parents=True) # Ensure base directory exists @@ -964,7 +1007,7 @@ def listen_to_neuroconv_events(): yield msg -def generate_dataset(input_path: str, output_path: str): +def generate_dataset(input_path: str, output_path: str) -> dict: base_path = Path(input_path) output_path = Path(output_path) @@ -1006,7 +1049,7 @@ def generate_dataset(input_path: str, output_path: str): return {"output_path": str(output_path)} -def inspect_nwb_file(payload): +def inspect_nwb_file(payload) -> dict: from nwbinspector import inspect_nwbfile, load_config from nwbinspector.inspector_tools import format_messages, get_report_header from nwbinspector.nwbinspector import InspectorOutputJSONEncoder @@ -1037,12 +1080,12 @@ def _inspect_file_per_job( url, ignore: Optional[List[str]] = None, request_id: Optional[str] = None, -): +) -> list: + import requests from nwbinspector import nwbinspector from pynwb import NWBHDF5IO from tqdm_publisher import TQDMProgressSubscriber - import requests checks = nwbinspector.configure_checks( checks=nwbinspector.available_checks, @@ -1075,6 +1118,7 @@ def _inspect_file_per_job( def inspect_all(url, config): from concurrent.futures import ProcessPoolExecutor, as_completed + from nwbinspector.utils import calculate_number_of_cpu from tqdm_publisher import TQDMProgressSubscriber @@ -1135,11 +1179,12 @@ def on_progress_update(message): return messages -def inspect_nwb_folder(url, payload): +def inspect_nwb_folder(url, payload) -> dict: + from pickle import PicklingError + from nwbinspector import load_config from nwbinspector.inspector_tools import format_messages, get_report_header from nwbinspector.nwbinspector import InspectorOutputJSONEncoder - from pickle import PicklingError kwargs = dict( ignore=[ @@ -1168,7 +1213,7 @@ def inspect_nwb_folder(url, payload): return json.loads(json.dumps(obj=json_report, cls=InspectorOutputJSONEncoder)) -def _aggregate_symlinks_in_new_directory(paths, reason="", folder_path=None): +def _aggregate_symlinks_in_new_directory(paths, reason="", folder_path=None) -> Path: if folder_path is None: folder_path = GUIDE_ROOT_FOLDER / ".temp" / reason / f"temp_{datetime.now().strftime('%Y%m%d-%H%M%S')}" @@ -1187,7 +1232,7 @@ def _aggregate_symlinks_in_new_directory(paths, reason="", folder_path=None): return folder_path -def inspect_multiple_filesystem_objects(url, paths, **kwargs): +def inspect_multiple_filesystem_objects(url, paths, **kwargs) -> dict: tmp_folder_path = _aggregate_symlinks_in_new_directory(paths, "inspect") result = inspect_nwb_folder(url, {"path": tmp_folder_path, **kwargs}) rmtree(tmp_folder_path) @@ -1261,9 +1306,8 @@ def generate_test_data(output_path: str): Consists of a single-probe single-segment SpikeGLX recording (both AP and LF bands) as well as Phy spiking data. """ import spikeinterface - from spikeinterface.extractors import NumpyRecording from spikeinterface.exporters import export_to_phy - from spikeinterface.preprocessing import scale, bandpass_filter, resample + from spikeinterface.preprocessing import bandpass_filter, resample, scale base_path = Path(output_path) spikeglx_output_folder = base_path / "spikeglx" @@ -1333,7 +1377,7 @@ def map_dtype(dtype: str) -> str: return dtype -def get_property_dtype(extractor, property_name: str, ids: list, extra_props: dict): +def get_property_dtype(extractor, property_name: str, ids: list, extra_props: dict) -> str: if property_name in extra_props: dtype = extra_props[property_name]["data_type"] else: diff --git a/pyflask/tests/conftest.py b/pyflask/tests/conftest.py index 4ecf00e8b..ec72c792b 100644 --- a/pyflask/tests/conftest.py +++ b/pyflask/tests/conftest.py @@ -1,5 +1,5 @@ -import pytest import app as flask +import pytest def pytest_addoption(parser): diff --git a/pyflask/tests/test_generate_tutorial_data.py b/pyflask/tests/test_generate_tutorial_data.py index 80a5f8c53..d87e20281 100644 --- a/pyflask/tests/test_generate_tutorial_data.py +++ b/pyflask/tests/test_generate_tutorial_data.py @@ -1,6 +1,7 @@ -from utils import post from pathlib import Path +from utils import post + def test_generate_test_data(client, tmp_path: Path): # assert client is None diff --git a/pyflask/tests/test_neuroconv.py b/pyflask/tests/test_neuroconv.py index 7e6bd33e8..07563763e 100644 --- a/pyflask/tests/test_neuroconv.py +++ b/pyflask/tests/test_neuroconv.py @@ -1,5 +1,5 @@ from jsonschema import validate -from utils import get, post, get_converter_output_schema +from utils import get, get_converter_output_schema, post def test_get_all_interfaces(client): diff --git a/pyflask/tests/test_startup.py b/pyflask/tests/test_startup.py index 42cead8c7..8fa114535 100644 --- a/pyflask/tests/test_startup.py +++ b/pyflask/tests/test_startup.py @@ -1,4 +1,4 @@ -from utils import get, post, get_converter_output_schema +from utils import get, get_converter_output_schema, post def test_preload_imports(client): diff --git a/schemas/source-data.schema.ts b/schemas/source-data.schema.ts index ef189b856..b09c88abf 100644 --- a/schemas/source-data.schema.ts +++ b/schemas/source-data.schema.ts @@ -13,6 +13,7 @@ export default function preprocessSourceDataSchema (schema) { const info = interfaces[key] ?? {} + const files = schema.properties.file_paths ?? schema.properties.file_path const singleLocationInfo = schema.properties.file_path ?? schema.properties.folder_path if (schema.properties.file_paths) { @@ -29,6 +30,11 @@ export default function preprocessSourceDataSchema (schema) { } + if (files) { + const base = singleLocationInfo ? files : files.items + if (!base.accept && info.suffixes) base.accept = info.suffixes + } + // Do not show steps if (schema.properties.gain) schema.properties.gain.step = null diff --git a/src/renderer/src/stories/FileSystemSelector.js b/src/renderer/src/stories/FileSystemSelector.js index 7a38d1f5e..5e33c5be8 100644 --- a/src/renderer/src/stories/FileSystemSelector.js +++ b/src/renderer/src/stories/FileSystemSelector.js @@ -103,6 +103,8 @@ export class FilesystemSelector extends LitElement { if (props.onSelect) this.onSelect = props.onSelect; if (props.onChange) this.onChange = props.onChange; if (props.onThrow) this.onThrow = props.onThrow; + + this.accept = props.accept; this.multiple = props.multiple; this.type = props.type ?? "file"; this.value = props.value ?? ""; @@ -125,6 +127,17 @@ export class FilesystemSelector extends LitElement { #useElectronDialog = async (type) => { const options = { ...this.dialogOptions }; + + if (!options.filters && this.accept) { + options.filters = [ + { + name: "Suggested Files", + extensions: this.accept.map((ext) => (ext[0] === "." ? ext.slice(1) : ext)), + }, + { name: "All Files", extensions: ["*"] }, + ]; + } + options.properties = [ type === "file" ? "openFile" : "openDirectory", "noResolveAliases", @@ -142,7 +155,8 @@ export class FilesystemSelector extends LitElement { return result; }; - #checkType = (value) => { + #check = (value) => { + // Check type const isLikelyFile = fs ? fs.statSync(value).isFile() : value.split(".").length; if ((this.type === "directory" && isLikelyFile) || (this.type === "file" && !isLikelyFile)) this.#onThrow("Incorrect filesystem object", `Please provide a ${this.type} instead.`); @@ -152,8 +166,8 @@ export class FilesystemSelector extends LitElement { const resolvedType = type ?? this.type; if (pathOrPaths) { - if (Array.isArray(pathOrPaths)) pathOrPaths.forEach(this.#checkType); - else if (!type) this.#checkType(pathOrPaths); + if (Array.isArray(pathOrPaths)) pathOrPaths.forEach(this.#check); + else if (!type) this.#check(pathOrPaths); } let resolvedValue = pathOrPaths; diff --git a/src/renderer/src/stories/JSONSchemaInput.js b/src/renderer/src/stories/JSONSchemaInput.js index 701090e79..b21ed98ce 100644 --- a/src/renderer/src/stories/JSONSchemaInput.js +++ b/src/renderer/src/stories/JSONSchemaInput.js @@ -903,6 +903,7 @@ export class JSONSchemaInput extends LitElement { const filesystemSelectorElement = new FilesystemSelector({ type: format, value: this.value, + accept: schema.accept, onSelect: (paths = []) => { const value = paths.length ? paths : undefined; this.#updateData(fullPath, value); diff --git a/src/renderer/src/stories/Main.js b/src/renderer/src/stories/Main.js index 172c036b8..9a68987cd 100644 --- a/src/renderer/src/stories/Main.js +++ b/src/renderer/src/stories/Main.js @@ -1,7 +1,6 @@ import { LitElement, html } from "lit"; import useGlobalStyles from "./utils/useGlobalStyles.js"; import { GuidedFooter } from "./pages/guided-mode/GuidedFooter"; -import { GuidedCapsules } from "./pages/guided-mode/GuidedCapsules.js"; import { GuidedHeader } from "./pages/guided-mode/GuidedHeader.js"; import { unsafeHTML } from "lit/directives/unsafe-html.js"; @@ -131,7 +130,6 @@ export class Main extends LitElement { let footer = page?.footer; // Page-specific footer let header = page?.header; // Page-specific header - let capsules = page?.capsules; // Page-specific capsules if (page) { this.to = page.to; @@ -160,24 +158,9 @@ export class Main extends LitElement { if (footer === true) footer = {}; if (footer && "onNext" in footer && !("next" in footer)) footer.next = "Next"; - // Default Capsules Behavior + // Define header states const section = sections[info.section]; if (section) { - if (capsules === true || !("capsules" in page)) { - let pages = Object.values(section.pages); - const pageIds = Object.keys(section.pages); - if (pages.length > 1) { - const capsulesProps = { - n: pages.length, - skipped: pages.map((page) => page.skipped), - selected: pages.map((page) => page.pageLabel).indexOf(page.info.label), - }; - - capsules = new GuidedCapsules(capsulesProps); - capsules.onClick = (i) => this.toRender.page.to(pageIds[i]); - } - } - if (header === true || !("header" in page) || !("sections" in page.header)) { const sectionNames = Object.entries(sections) .filter(([name, info]) => !Object.values(info.pages).every((state) => state.skipped)) @@ -207,17 +190,10 @@ export class Main extends LitElement { return html` ${headerEl} - ${ - capsules - ? html`
${capsules}
` - : html`` - } ${ title ? html`
@@ -231,9 +207,7 @@ export class Main extends LitElement { : "" } -
+
${page}
${footerEl} diff --git a/src/renderer/src/stories/pages/guided-mode/GuidedCapsules.js b/src/renderer/src/stories/pages/guided-mode/GuidedCapsules.js deleted file mode 100644 index 95db22a8d..000000000 --- a/src/renderer/src/stories/pages/guided-mode/GuidedCapsules.js +++ /dev/null @@ -1,54 +0,0 @@ -import { LitElement, html } from "lit"; - -export class GuidedCapsules extends LitElement { - constructor({ n = 0, selected = 0, skipped = [] } = {}) { - super(); - this.n = n; - this.selected = selected; - this.skipped = skipped; - this.style.width = "100%"; - } - - static get properties() { - return { - n: { type: Number, reflect: true }, - selected: { type: Number, reflect: true }, - skipped: { type: Array }, - }; - } - - attributeChangedCallback(...args) { - const attrs = ["n", "selected"]; - super.attributeChangedCallback(...args); - if (attrs.includes(args[0])) this.requestUpdate(); - } - - createRenderRoot() { - return this; - } - - onClick = () => {}; - - render() { - if (!this.n) return html``; - - return html` -
-
- ${Array.from( - { length: this.n }, - (_, i) => - html`
this.onClick(i)} - class="guided--capsule ${i === this.selected ? `active` : ""} ${this.skipped[i] - ? `skipped` - : ""}" - >
` - )} -
-
- `; - } -} - -customElements.get("nwb-guided-capsules") || customElements.define("nwb-guided-capsules", GuidedCapsules); diff --git a/src/renderer/src/stories/pages/guided-mode/data/GuidedMetadata.js b/src/renderer/src/stories/pages/guided-mode/data/GuidedMetadata.js index 2e0e77d75..3a29b406b 100644 --- a/src/renderer/src/stories/pages/guided-mode/data/GuidedMetadata.js +++ b/src/renderer/src/stories/pages/guided-mode/data/GuidedMetadata.js @@ -85,6 +85,15 @@ const tableRenderConfig = { const imagingPlaneKey = "imaging_plane"; const propsToIgnore = { + NWBFile: { + session_id: true, + source_script: true, + source_script_file_name: true, + identifier: true, + }, + Subject: { + subject_id: true, + }, Ophys: { "*": { starting_time: true, @@ -116,6 +125,7 @@ const propsToIgnore = { ElectricalSeries: true, ElectricalSeriesLF: true, ElectricalSeriesAP: true, + ElectricalSeriesNIDQ: true, Units: { "*": { UnitColumns: { @@ -129,14 +139,6 @@ const propsToIgnore = { Icephys: true, // Always ignore icephys metadata (for now) Behavior: true, // Always ignore behavior metadata (for now) "ndx-dandi-icephys": true, - Subject: { - subject_id: true, - }, - NWBFile: { - session_id: true, - source_script: true, - source_script_file_name: true, - }, }; import { preprocessMetadataSchema } from "../../../../../../../schemas/base-metadata.schema";