diff --git a/.gitignore b/.gitignore index beacf96bb..827e5b48e 100644 --- a/.gitignore +++ b/.gitignore @@ -36,3 +36,6 @@ src/build .env .env.local .env.production + +# Spyder +.spyproject/ diff --git a/docs/conf.py b/docs/conf.py index 4e6207d17..c2ab76e33 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -66,6 +66,26 @@ } add_module_names = False +html_theme_options = { + "use_edit_page_button": True, + "icon_links": [ + { + "name": "GitHub", + "url": "https://github.com/NeurodataWithoutBorders/nwb-guide", + "icon": "fa-brands fa-github", + "type": "fontawesome", + }, + ], +} + +html_context = { + # "github_url": "https://github.com", # or your GitHub Enterprise site + "github_user": "NeurodataWithoutBorders", + "github_repo": "nwb-guide", + "github_version": "main", + "doc_path": "docs", +} + def _correct_signatures(app, what, name, obj, options, signature, return_annotation): if what == "class": diff --git a/environments/environment-Linux.yml b/environments/environment-Linux.yml index 4ccd56d03..847abad91 100644 --- a/environments/environment-Linux.yml +++ b/environments/environment-Linux.yml @@ -17,7 +17,7 @@ dependencies: - flask == 2.3.2 - flask-cors == 4.0.0 - flask_restx == 1.1.0 - - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@try_remove_packaing_bound#neuroconv[full] + - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@main#neuroconv[full] - dandi >= 0.60.0 - pytest == 7.4.0 - pytest-cov == 4.1.0 diff --git a/environments/environment-MAC.yml b/environments/environment-MAC.yml index 73c55d9f5..af203d6e5 100644 --- a/environments/environment-MAC.yml +++ b/environments/environment-MAC.yml @@ -11,6 +11,7 @@ dependencies: - jsonschema = 4.18.0 # installs jsonschema-specifications - pip - pip: + - scipy<1.12.0 # Fix needed for scipy._lib._testutils - chardet == 5.1.0 - configparser == 6.0.0 - flask == 2.3.2 diff --git a/environments/environment-Windows.yml b/environments/environment-Windows.yml index 84bfd2167..fc1e15850 100644 --- a/environments/environment-Windows.yml +++ b/environments/environment-Windows.yml @@ -17,7 +17,7 @@ dependencies: - flask == 2.3.2 - flask-cors === 3.0.10 - flask_restx == 1.1.0 - - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@try_remove_packaing_bound#neuroconv[full] + - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@main#neuroconv[full] - dandi >= 0.60.0 - pytest == 7.2.2 - pytest-cov == 4.1.0 diff --git a/package.json b/package.json index dc69a9e93..63f13a7c4 100644 --- a/package.json +++ b/package.json @@ -20,8 +20,6 @@ "build:mac": "npm run build && npm run build:flask && npm run build:electron:mac", "build:linux": "npm run build && npm run build:flask && npm run build:electron:linux", "build:flask": "python -m PyInstaller nwb-guide.spec --log-level DEBUG --clean --noconfirm --distpath ./build/flask", - "build:flask:spec:base": "pyi-makespec --name nwb-guide --onedir --collect-data jsonschema_specifications --collect-all dandi --collect-all keyrings --collect-all unittest --collect-all nwbinspector --collect-all neuroconv --collect-all pynwb --collect-all hdmf --collect-all hdmf_zarr --collect-all ndx_dandi_icephys --collect-all sklearn --collect-all ci_info ./pyflask/app.py", - "build:flask:spec": "npm run build:flask:spec:base && python prepare_pyinstaller_spec.py", "build:electron:win": "electron-builder build --win --publish never", "build:electron:mac": "electron-builder build --mac --publish never", "build:electron:linux": "electron-builder build --linux --publish never", diff --git a/prepare_pyinstaller_spec.py b/prepare_pyinstaller_spec.py deleted file mode 100644 index 5c0a36849..000000000 --- a/prepare_pyinstaller_spec.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Calling `pyi-makespec` regenerates the base spec file, but we need to extend the recursion limit. - -This script is run automatically as a part of `npm run build:flask:spec` after the `pyi-makespec` call. -""" - -from pathlib import Path - -with open(file=Path(__file__).parent / "nwb-guide.spec", mode="r") as io: - lines = io.readlines() - -lines.insert(1, "import sys\n") -lines.insert(2, "from pathlib import Path\n") -lines.insert(3, "import scipy\n") -lines.insert(4, "from PyInstaller.utils.hooks import collect_submodules\n") -lines.insert(5, "\n") -lines.insert(6, "sys.setrecursionlimit(sys.getrecursionlimit() * 5)\n") -lines.insert(7, "\n") -lines.insert(8, "import scipy\n") - -hiddenImportIdx = lines.index("hiddenimports = []\n") - -lines[hiddenImportIdx] = ( - "hiddenimports = [ 'email_validator', *collect_submodules('scipy.special.cython_special'), *os.path.join(os.path.dirname(scipy.__file__), '.libs')]\n\n" -) - - -# Originally this was a separate `npm` command per platform to account for CLI syntax differences between ; and : -# The spec file is, however, the same across platforms -data_line_index = lines.index("datas = []\n") -lines[data_line_index] = "datas = [('./paths.config.json', '.'), ('./package.json', '.')]\n" - -# Another platform specific difference is the app.py location -app_py_line_index, app_py_line = next((index, line) for index, line in enumerate(lines) if "app.py" in line) -pyflask_start = app_py_line.find("pyflask") # Can change on certain systems -injected_app_py_line_base = app_py_line[: (pyflask_start - 1)] -injected_app_py_line = injected_app_py_line_base + "f\"{Path('pyflask') / 'app.py'}\"],\n" -lines[app_py_line_index] = injected_app_py_line - -with open(file=Path(__file__).parent / "nwb-guide.spec", mode="w") as io: - io.writelines(lines) - -print("Successfully injected recursion depth extension and json paths!") diff --git a/pyflask/manageNeuroconv/manage_neuroconv.py b/pyflask/manageNeuroconv/manage_neuroconv.py index 26432a513..ca7ebce9a 100644 --- a/pyflask/manageNeuroconv/manage_neuroconv.py +++ b/pyflask/manageNeuroconv/manage_neuroconv.py @@ -11,6 +11,7 @@ from typing import Dict, Optional from shutil import rmtree, copytree from pathlib import Path +from typing import Any, Dict, List, Optional from sse import MessageAnnouncer from .info import GUIDE_ROOT_FOLDER, STUB_SAVE_FOLDER_PATH, CONVERSION_SAVE_FOLDER_PATH @@ -18,6 +19,16 @@ announcer = MessageAnnouncer() +EXCLUDED_RECORDING_INTERFACE_PROPERTIES = ["contact_vector", "contact_shapes", "group", "location"] +EXTRA_RECORDING_INTERFACE_PROPERTIES = { + "brain_area": { + "data_type": "str", + "description": "The brain area where the electrode is located.", + "default": "unknown", + } +} + + def is_path_contained(child, parent): parent = Path(parent) child = Path(child) @@ -289,28 +300,17 @@ def get_source_schema(interface_class_dict: dict) -> dict: return CustomNWBConverter.get_source_schema() -def get_first_recording_interface(converter): +def map_recording_interfaces(callback, converter): from neuroconv.datainterfaces.ecephys.baserecordingextractorinterface import BaseRecordingExtractorInterface - for interface in converter.data_interface_objects.values(): - if isinstance(interface, BaseRecordingExtractorInterface): - return interface - - -def is_supported_recording_interface(recording_interface, metadata): - """ - Temporary conditioned access to functionality still in development on NeuroConv. + output = [] - Used to determine display of ecephys metadata depending on the environment. + for name, interface in converter.data_interface_objects.items(): + if isinstance(interface, BaseRecordingExtractorInterface): + result = callback(name, interface) + output.append(result) - Alpha build release should therefore always return False for this. - """ - return ( - recording_interface - and recording_interface.get_electrode_table_json - and metadata["Ecephys"].get("Electrodes") - and all(row.get("data_type") for row in metadata["Ecephys"]["Electrodes"]) - ) + return output def get_metadata_schema(source_data: Dict[str, dict], interfaces: dict) -> Dict[str, dict]: @@ -325,41 +325,95 @@ def get_metadata_schema(source_data: Dict[str, dict], interfaces: dict) -> Dict[ schema = converter.get_metadata_schema() metadata = converter.get_metadata() - # recording_interface = get_first_recording_interface(converter) + # Clear the Electrodes information for being set as a collection of Interfaces + has_ecephys = "Ecephys" in metadata + + if has_ecephys: + metadata["Ecephys"]["Electrodes"] = {} + + schema["properties"]["Ecephys"]["required"].append("Electrodes") + ecephys_properties = schema["properties"]["Ecephys"]["properties"] + original_electrodes_schema = ecephys_properties["Electrodes"] + + ecephys_properties["Electrodes"] = {"type": "object", "properties": {}, "required": []} + + def on_recording_interface(name, recording_interface): - # if is_supported_recording_interface(recording_interface, metadata): - # metadata["Ecephys"]["Electrodes"] = recording_interface.get_electrode_table_json() + metadata["Ecephys"]["Electrodes"][name] = dict( + Electrodes=get_electrode_table_json(recording_interface), + ElectrodeColumns=get_electrode_columns_json(recording_interface), + ) + + ecephys_properties["Electrodes"]["properties"][name] = dict( + type="object", + properties=dict( + Electrodes={ + "type": "array", + "minItems": 0, + "items": {"$ref": "#/properties/Ecephys/properties/definitions/Electrode"}, + }, + ElectrodeColumns={ + "type": "array", + "minItems": 0, + "items": {"$ref": "#/properties/Ecephys/properties/definitions/ElectrodeColumn"}, + }, + ), + required=["Electrodes", "ElectrodeColumns"], + ) - # # Get Electrode metadata - # ecephys_properties = schema["properties"]["Ecephys"]["properties"] - # original_electrodes_schema = ecephys_properties["Electrodes"] + ecephys_properties["Electrodes"]["required"].append(name) - # new_electrodes_properties = { - # properties["name"]: {key: value for key, value in properties.items() if key != "name"} - # for properties in original_electrodes_schema["default"] - # } + return recording_interface - # ecephys_properties["Electrodes"] = { - # "type": "array", - # "minItems": 0, - # "items": { - # "type": "object", - # "properties": new_electrodes_properties, - # "additionalProperties": True, # Allow for new columns - # }, - # } + recording_interfaces = map_recording_interfaces(on_recording_interface, converter) - # metadata["Ecephys"]["ElectrodeColumns"] = original_electrodes_schema["default"] - # defs = ecephys_properties["definitions"] + # Delete Ecephys metadata if ElectrodeTable helper function is not available + if has_ecephys: + if len(recording_interfaces) == 0: + schema["properties"].pop("Ecephys", dict()) - # ecephys_properties["ElectrodeColumns"] = {"type": "array", "items": defs["Electrodes"]} - # ecephys_properties["ElectrodeColumns"]["items"]["required"] = list(defs["Electrodes"]["properties"].keys()) - # del defs["Electrodes"] + else: - # # Delete Ecephys metadata if ElectrodeTable helper function is not available - # else: - if "Ecephys" in schema["properties"]: - schema["properties"].pop("Ecephys", dict()) + defs = ecephys_properties["definitions"] + electrode_def = defs["Electrodes"] + + dtype_descriptions = { + "bool": "logical", + "str": "string", + "ndarray": "n-dimensional array", + "float8": "8-bit number", + "float16": "16-bit number", + "float32": "32-bit number", + "float64": "64-bit number", + "int8": "8-bit integer", + "int16": "16-bit integer", + "int32": "32-bit integer", + "int64": "64-bit integer", + } + + # NOTE: Update to output from NeuroConv + electrode_def["properties"]["data_type"] = { + "type": "string", + "strict": False, + "enum": list(dtype_descriptions.keys()), + "enumLabels": dtype_descriptions, + } + + # Configure electrode columns + defs["ElectrodeColumn"] = electrode_def + defs["ElectrodeColumn"]["required"] = list(electrode_def["properties"].keys()) + + new_electrodes_properties = { + properties["name"]: {key: value for key, value in properties.items() if key != "name"} + for properties in original_electrodes_schema["default"] + if properties["name"] not in EXCLUDED_RECORDING_INTERFACE_PROPERTIES + } + + defs["Electrode"] = { + "type": "object", + "properties": new_electrodes_properties, + "additionalProperties": True, # Allow for new columns + } return json.loads(json.dumps(replace_nan_with_none(dict(results=metadata, schema=schema)), cls=NWBMetaDataEncoder)) @@ -510,29 +564,23 @@ def update_conversion_progress(**kwargs): else None ) - # Update the first recording interface with Ecephys table data - # This will be refactored after the ndx-probe-interface integration - # recording_interface = get_first_recording_interface(converter) + # Ensure Ophys NaN values are resolved + resolved_metadata = replace_none_with_nan(info["metadata"], resolve_references(converter.get_metadata_schema())) - if "Ecephys" not in info["metadata"]: - info["metadata"].update(Ecephys=dict()) + ecephys_metadata = resolved_metadata.get("Ecephys") - resolved_metadata = replace_none_with_nan( - info["metadata"], resolve_references(converter.get_metadata_schema()) - ) # Ensure Ophys NaN values are resolved + if ecephys_metadata: - # if is_supported_recording_interface(recording_interface, info["metadata"]): - # electrode_column_results = ecephys_metadata["ElectrodeColumns"] - # electrode_results = ecephys_metadata["Electrodes"] + for interface_name, interface_electrode_results in ecephys_metadata["Electrodes"].items(): + interface = converter.data_interface_objects[interface_name] - # recording_interface.update_electrode_table( - # electrode_table_json=electrode_results, electrode_column_info=electrode_column_results - # ) - - # # Update with the latest metadata for the electrodes - # ecephys_metadata["Electrodes"] = electrode_column_results + update_recording_properties_from_table_as_json( + interface, + electrode_table_json=interface_electrode_results["Electrodes"], + electrode_column_info=interface_electrode_results["ElectrodeColumns"], + ) - # ecephys_metadata.pop("ElectrodeColumns", dict()) + del ecephys_metadata["Electrodes"] # NOTE: Not sure what this should be now... # Actually run the conversion converter.run_conversion( @@ -899,3 +947,180 @@ def generate_test_data(output_path: str): export_to_phy( waveform_extractor=waveform_extractor, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False ) + + +def map_dtype(dtype: str) -> str: + if " Dict[str, Any]: + """A convenience function for uniformly excluding certain properties of the provided recording extractor.""" + property_names = list(recording_interface.recording_extractor.get_property_keys()) + + properties = { + property_name: recording_interface.recording_extractor.get_property(key=property_name) + for property_name in property_names + if property_name not in EXCLUDED_RECORDING_INTERFACE_PROPERTIES + } + + for property_name, property_info in EXTRA_RECORDING_INTERFACE_PROPERTIES.items(): + if property_name not in properties: + properties[property_name] = property_info + + return properties + + +def get_electrode_columns_json(interface) -> List[Dict[str, Any]]: + """A convenience function for collecting and organizing the properties of the underlying recording extractor.""" + properties = get_recording_interface_properties(interface) + + # Hardcuded for SpikeGLX (NOTE: Update for more interfaces) + property_descriptions = dict( + channel_name="The name of this channel.", + group_name="The name of the ElectrodeGroup this channel's electrode is a part of.", + shank_electrode_number="0-based index of the electrode on the shank.", + contact_shapes="The shape of the electrode.", + inter_sample_shift="Time-delay of each channel sampling in proportion to the per-frame sampling period.", + gain_to_uV="The scaling factor from the data type to microVolts, applied before the offset.", + offset_to_uV="The offset from the data type to microVolts, applied after the gain.", + ) + + for property_name, property_info in EXTRA_RECORDING_INTERFACE_PROPERTIES.items(): + description = property_info.get("description", None) + if description: + property_descriptions[property_name] = description + + # default_column_metadata = interface.get_metadata()["Ecephys"]["ElectrodeColumns"]["properties"] # NOTE: This doesn't exist... + # property_descriptions = {column_name: column_fields["description"] for column_name, column_fields in default_column_metadata} + + recording_extractor = interface.recording_extractor + channel_ids = recording_extractor.get_channel_ids() + + electrode_columns = [ + dict( + name=property_name, + description=property_descriptions.get(property_name, "No description."), + data_type=get_property_dtype( + recording_extractor=recording_extractor, property_name=property_name, channel_ids=[channel_ids[0]] + ), + ) + for property_name in properties.keys() + ] + + # TODO: uncomment when neuroconv supports contact vectors (probe interface) + # contact_vector = properties.pop("contact_vector", None) + # if contact_vector is None: + # return json.loads(json.dumps(obj=electrode_columns)) + # # Unpack contact vector + # for property_name in contact_vector.dtype.names: + # electrode_columns.append( + # dict( + # name=property_name, + # description=property_descriptions.get(property_name, ""), + # data_type=str(contact_vector.dtype.fields[property_name][0]), + # ) + # ) + + return json.loads(json.dumps(obj=electrode_columns)) + + +def get_electrode_table_json(interface) -> List[Dict[str, Any]]: + """ + A convenience function for collecting and organizing the property values of the underlying recording extractor. + """ + + from neuroconv.utils import NWBMetaDataEncoder + + recording = interface.recording_extractor + + properties = get_recording_interface_properties(interface) + + electrode_ids = recording.get_channel_ids() + + table = list() + for electrode_id in electrode_ids: + electrode_column = dict() + for property_name in properties: + if property_name in EXTRA_RECORDING_INTERFACE_PROPERTIES: + recording_property_value = properties[property_name]["default"] + else: + recording_property_value = recording.get_property(key=property_name, ids=[electrode_id])[ + 0 # First axis is always electodes in SI + ] # Since only fetching one electrode at a time, use trivial zero-index + electrode_column.update({property_name: recording_property_value}) + table.append(electrode_column) + table_as_json = json.loads(json.dumps(table, cls=NWBMetaDataEncoder)) + + return table_as_json + + +def update_recording_properties_from_table_as_json( + recording_interface, electrode_column_info: dict, electrode_table_json: List[Dict[str, Any]] +): + import numpy as np + + # # Extract contact vector properties + properties = get_recording_interface_properties(recording_interface) + + # TODO: uncomment and adapt when neuroconv supports contact vectors (probe interface) + # contact_vector = properties.pop("contact_vector", None) + # contact_vector_dtypes = {} + # if contact_vector is not None: + # # Remove names from contact vector from the electrode_column_info and add to reconstructed_contact_vector_info + # contact_vector_dtypes = contact_vector.dtype + # # contact_vector_dtypes = { property_name: next((item for item in electrode_column_info if item['name'] == property_name), None)["data_type"] for property_name in contact_vector.dtype.names} + # # Remove contact vector properties from electrode_column_info + # for property_name in contact_vector.dtype.names: + # found = next((item for item in electrode_column_info if item["name"] == property_name), None) + # if found: + # electrode_column_info.remove(found) + + # Organize dtypes + electrode_column_data_types = {column["name"]: column["data_type"] for column in electrode_column_info} + # electrode_column_data_types["contact_vector"] = contact_vector_dtypes # Provide contact vector information + + recording_extractor = recording_interface.recording_extractor + channel_ids = recording_extractor.get_channel_ids() + stream_prefix = channel_ids[0].split("#")[0] # TODO: see if this generalized across formats + + # TODO: uncomment when neuroconv supports contact vectors (probe interface) + # property_names = recording_extractor.get_property_keys() + # if "contact_vector" in property_names: + # modified_contact_vector = np.array(recording_extractor.get_property(key="contact_vector")) # copy + # contact_vector_property_names = list(modified_contact_vector.dtype.names) + + for entry_index, entry in enumerate(electrode_table_json): + electrode_properties = dict(entry) # copy + channel_name = electrode_properties.pop("channel_name") + for property_name, property_value in electrode_properties.items(): + if property_name not in electrode_column_data_types: # Skip data with missing column information + continue + # TODO: uncomment when neuroconv supports contact vectors (probe interface) + # elif property_name in contact_vector_property_names: + # property_index = contact_vector_property_names.index(property_name) + # modified_contact_vector[entry_index][property_index] = property_value + else: + recording_extractor.set_property( + key=property_name, + values=np.array([property_value], dtype=electrode_column_data_types[property_name]), + ids=[stream_prefix + "#" + channel_name], + ) + + # TODO: uncomment when neuroconv supports contact vectors (probe interface) + # if "contact_vector" in property_names: + # recording_extractor.set_property(key="contact_vector", values=modified_contact_vector) diff --git a/schemas/base-metadata.schema.ts b/schemas/base-metadata.schema.ts index 7fea3bd08..02fd6a61f 100644 --- a/schemas/base-metadata.schema.ts +++ b/schemas/base-metadata.schema.ts @@ -4,6 +4,8 @@ import { header, replaceRefsWithValue } from '../src/renderer/src/stories/forms/ import baseMetadataSchema from './json/base_metadata_schema.json' assert { type: "json" } +const uvMathFormat = `µV`; //`µV` + function getSpeciesNameComponents(arr: any[]) { const split = arr[arr.length - 1].split(' - ') return { @@ -13,6 +15,7 @@ function getSpeciesNameComponents(arr: any[]) { } + function getSpeciesInfo(species: any[][] = []) { @@ -34,6 +37,10 @@ function getSpeciesInfo(species: any[][] = []) { } +const propsToInclude = { + ecephys: ["Device", "ElectrodeGroup", "Electrodes", "ElectrodeColumns", "definitions"] +} + export const preprocessMetadataSchema = (schema: any = baseMetadataSchema, global = false) => { @@ -89,17 +96,40 @@ export const preprocessMetadataSchema = (schema: any = baseMetadataSchema, globa // Override description of keywords nwbProps.keywords.description = 'Terms to describe your dataset (e.g. Neural circuits, V1, etc.)' // Add description to keywords - + const ecephys = copy.properties.Ecephys const ophys = copy.properties.Ophys + if (ecephys) { + + // Change rendering order for electrode table columns + const electrodesProp = ecephys.properties["Electrodes"] + for (let name in electrodesProp.properties) { + const interfaceProps = electrodesProp.properties[name].properties + const electrodeItems = interfaceProps["Electrodes"].items.properties + const uvProperties = ["gain_to_uV", "offset_to_uV"] + + uvProperties.forEach(prop => { + electrodeItems[prop] = {} + electrodeItems[prop].title = prop.replace('uV', uvMathFormat) + console.log(electrodeItems[prop]) + }) + interfaceProps["Electrodes"].items.order = ["channel_name", "group_name", "shank_electrode_number", ...uvProperties]; + interfaceProps["ElectrodeColumns"].items.order = ["name", "description", "data_type"]; + + } + + } + if (ophys) { ophys.required = Object.keys(ophys.properties) const getProp = (name: string) => ophys.properties[name] - if (getProp("TwoPhotonSeries")) { - const tpsItemSchema = getProp("TwoPhotonSeries").items + const tpsItemSchema = getProp("TwoPhotonSeries")?.items + + if (tpsItemSchema) { + tpsItemSchema.order = [ "name", "description", @@ -111,8 +141,9 @@ export const preprocessMetadataSchema = (schema: any = baseMetadataSchema, globa } - if (getProp("ImagingPlane")) { - const imagingPlaneItems = getProp("ImagingPlane").items + const imagingPlaneItems = getProp("ImagingPlane")?.items + + if (imagingPlaneItems) { imagingPlaneItems.order = [ "name", "description", diff --git a/schemas/dandi-upload.schema.ts b/schemas/dandi-upload.schema.ts index a0b56d714..a226f2fa0 100644 --- a/schemas/dandi-upload.schema.ts +++ b/schemas/dandi-upload.schema.ts @@ -42,10 +42,7 @@ onServerOpen(async () => { }); // Resolve Dandiset Information Asynchronously -export const regenerateDandisets = async ({ - newPromise = true -} = {}) => { - if (newPromise) ready.dandisets = createPromise("dandiset") +export const regenerateDandisets = async () => { delete idSchema.enum delete idSchema.enumLabels delete idSchema.enumKeywords @@ -123,6 +120,6 @@ export const addDandiset = async (info) => { return idInfo } -regenerateDandisets({ newPromise: false }) +regenerateDandisets() export default schema diff --git a/schemas/json/base_metadata_schema.json b/schemas/json/base_metadata_schema.json index 8f06043c7..c180408a7 100644 --- a/schemas/json/base_metadata_schema.json +++ b/schemas/json/base_metadata_schema.json @@ -178,7 +178,7 @@ "species": { "type": "string", "description": "Species of subject. Use latin name.", - "pattern": "^[A-Z][a-z]+ [a-z]+" + "pattern": "^[A-Z][a-z]+ [a-z]+|http://purl.obolibrary.org/obo/NCBITaxon_[0-9]+" }, "subject_id": { "type": "string", diff --git a/schemas/json/generated/BlackrockRecordingInterface.json b/schemas/json/generated/BlackrockRecordingInterface.json index 25190ebf2..415e95cc3 100644 --- a/schemas/json/generated/BlackrockRecordingInterface.json +++ b/schemas/json/generated/BlackrockRecordingInterface.json @@ -8,8 +8,8 @@ "properties": { "file_path": { "format": "file", - "type": "string", - "description": "Path to Blackrock file." + "description": "Path to Blackrock file.", + "type": "string" }, "nsx_override": { "format": "file", diff --git a/schemas/json/generated/BlackrockSortingInterface.json b/schemas/json/generated/BlackrockSortingInterface.json index 02661e794..da459b824 100644 --- a/schemas/json/generated/BlackrockSortingInterface.json +++ b/schemas/json/generated/BlackrockSortingInterface.json @@ -8,13 +8,15 @@ "properties": { "file_path": { "format": "file", - "type": "string", - "description": "Path to Blackrock file." + "description": "Path to Blackrock file.", + "type": "string" }, "sampling_frequency": { + "description": "The sampling frequency for the sorting extractor. When the signal data is available (.ncs) those files will be", "type": "number" }, "verbose": { + "description": "Enables verbosity", "type": "boolean", "default": true } diff --git a/schemas/json/generated/CellExplorerRecordingInterface.json b/schemas/json/generated/CellExplorerRecordingInterface.json new file mode 100644 index 000000000..1e4e94428 --- /dev/null +++ b/schemas/json/generated/CellExplorerRecordingInterface.json @@ -0,0 +1,35 @@ +{ + "required": [], + "properties": { + "CellExplorerRecordingInterface": { + "required": [ + "folder_path" + ], + "properties": { + "folder_path": { + "format": "directory", + "description": "The folder where the session data is located. It should contain a\n`{folder.name}.session.mat` file and the binary files `{folder.name}.dat`\nor `{folder.name}.lfp` for the LFP interface.", + "type": "string" + }, + "verbose": { + "description": "Whether to output verbose text.", + "type": "boolean", + "default": true + }, + "es_key": { + "type": "string", + "default": "ElectricalSeries" + } + }, + "type": "object", + "additionalProperties": false + } + }, + "type": "object", + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "source.schema.json", + "title": "Source data schema", + "description": "Schema for the source data, files and directories", + "version": "0.1.0" +} diff --git a/schemas/json/generated/FicTracDataInterface.json b/schemas/json/generated/FicTracDataInterface.json index 47804a267..a04818bf7 100644 --- a/schemas/json/generated/FicTracDataInterface.json +++ b/schemas/json/generated/FicTracDataInterface.json @@ -10,6 +10,13 @@ "format": "file", "type": "string" }, + "radius": { + "type": "number" + }, + "configuration_file_path": { + "format": "file", + "type": "string" + }, "verbose": { "type": "boolean", "default": true diff --git a/schemas/json/generated/OpenEphysRecordingInterface.json b/schemas/json/generated/OpenEphysRecordingInterface.json index f6193ea0c..df1202bc2 100644 --- a/schemas/json/generated/OpenEphysRecordingInterface.json +++ b/schemas/json/generated/OpenEphysRecordingInterface.json @@ -13,6 +13,9 @@ "stream_name": { "type": "string" }, + "block_index": { + "type": "number" + }, "verbose": { "type": "boolean", "default": true diff --git a/schemas/json/generated/PhySortingInterface.json b/schemas/json/generated/PhySortingInterface.json index edcc2183d..a1c9e140b 100644 --- a/schemas/json/generated/PhySortingInterface.json +++ b/schemas/json/generated/PhySortingInterface.json @@ -11,7 +11,10 @@ "type": "string" }, "exclude_cluster_groups": { - "type": "array" + "type": "array", + "items": { + "type": "string" + } }, "verbose": { "type": "boolean", diff --git a/schemas/json/generated/Spike2RecordingInterface.json b/schemas/json/generated/Spike2RecordingInterface.json index 81c785ad8..e9bf8f6b1 100644 --- a/schemas/json/generated/Spike2RecordingInterface.json +++ b/schemas/json/generated/Spike2RecordingInterface.json @@ -8,8 +8,8 @@ "properties": { "file_path": { "format": "file", - "type": "string", - "description": "Path to CED data file." + "description": "Path to CED data file.", + "type": "string" }, "verbose": { "type": "boolean", diff --git a/schemas/json/generated/SpikeGLXConverterPipe.json b/schemas/json/generated/SpikeGLXConverterPipe.json index 06814d990..4b2fb1eae 100644 --- a/schemas/json/generated/SpikeGLXConverterPipe.json +++ b/schemas/json/generated/SpikeGLXConverterPipe.json @@ -8,10 +8,11 @@ "properties": { "folder_path": { "format": "directory", - "type": "string", - "description": "Path to the folder containing SpikeGLX streams." + "description": "Path to the folder containing SpikeGLX streams.", + "type": "string" }, "verbose": { + "description": "Whether to output verbose text.", "type": "boolean", "default": false } diff --git a/schemas/json/generated/SpikeGLXNIDQInterface.json b/schemas/json/generated/SpikeGLXNIDQInterface.json index 14ad82eb8..0903b1580 100644 --- a/schemas/json/generated/SpikeGLXNIDQInterface.json +++ b/schemas/json/generated/SpikeGLXNIDQInterface.json @@ -8,14 +8,16 @@ "properties": { "file_path": { "format": "file", - "type": "string", - "description": "Path to SpikeGLX .nidq file." + "description": "Path to SpikeGLX .nidq file.", + "type": "string" }, "verbose": { + "description": "Whether to output verbose text.", "type": "boolean", "default": true }, "load_sync_channel": { + "description": "Whether to load the last channel in the stream, which is typically used for synchronization.\nIf True, then the probe is not loaded.", "type": "boolean", "default": false }, diff --git a/schemas/json/generated/SpikeGLXRecordingInterface.json b/schemas/json/generated/SpikeGLXRecordingInterface.json index 4b2034df8..fe04f74ee 100644 --- a/schemas/json/generated/SpikeGLXRecordingInterface.json +++ b/schemas/json/generated/SpikeGLXRecordingInterface.json @@ -8,10 +8,11 @@ "properties": { "file_path": { "format": "file", - "type": "string", - "description": "Path to SpikeGLX ap.bin or lf.bin file." + "description": "Path to SpikeGLX ap.bin or lf.bin file.", + "type": "string" }, "verbose": { + "description": "Whether to output verbose text.", "type": "boolean", "default": true }, diff --git a/schemas/json/generated/Suite2pSegmentationInterface.json b/schemas/json/generated/Suite2pSegmentationInterface.json index e5256c2f5..f0e101c81 100644 --- a/schemas/json/generated/Suite2pSegmentationInterface.json +++ b/schemas/json/generated/Suite2pSegmentationInterface.json @@ -10,17 +10,25 @@ "format": "directory", "type": "string" }, - "combined": { - "type": "boolean", - "default": false + "channel_name": { + "type": "string" }, - "plane_no": { - "type": "number", - "default": 0 + "plane_name": { + "type": "string" + }, + "plane_segmentation_name": { + "type": "string" }, "verbose": { "type": "boolean", "default": true + }, + "combined": { + "type": "boolean", + "default": false + }, + "plane_no": { + "type": "number" } }, "type": "object", diff --git a/schemas/json/generated/TdtRecordingInterface.json b/schemas/json/generated/TdtRecordingInterface.json new file mode 100644 index 000000000..d48e0f968 --- /dev/null +++ b/schemas/json/generated/TdtRecordingInterface.json @@ -0,0 +1,41 @@ +{ + "required": [], + "properties": { + "TdtRecordingInterface": { + "required": [ + "folder_path", + "gain" + ], + "properties": { + "folder_path": { + "format": "directory", + "type": "string" + }, + "gain": { + "type": "number" + }, + "stream_id": { + "type": "string", + "default": "0" + }, + "verbose": { + "type": "boolean", + "default": true + }, + "es_key": { + "type": "string", + "default": "ElectricalSeries" + } + }, + "type": "object", + "additionalProperties": false + } + }, + "type": "object", + "additionalProperties": false, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "source.schema.json", + "title": "Source data schema", + "description": "Schema for the source data, files and directories", + "version": "0.1.0" +} diff --git a/schemas/source-data.schema.ts b/schemas/source-data.schema.ts index b47f5b51f..d30cc9194 100644 --- a/schemas/source-data.schema.ts +++ b/schemas/source-data.schema.ts @@ -8,6 +8,7 @@ export default function preprocessSourceDataSchema (schema) { if (key === 'VideoInterface' || key === 'AudioInterface') { if (schema.properties.file_paths) { Object.assign(schema.properties.file_paths, { + items: { type: 'string' }, description: 'Only one file supported at this time. Multiple file support coming soon.', maxItems: 1, }) diff --git a/src/renderer/src/pages.js b/src/renderer/src/pages.js index edf1c3ac2..603edade8 100644 --- a/src/renderer/src/pages.js +++ b/src/renderer/src/pages.js @@ -10,7 +10,6 @@ import { GuidedSubjectsPage } from "./stories/pages/guided-mode/setup/GuidedSubj import { GuidedSourceDataPage } from "./stories/pages/guided-mode/data/GuidedSourceData"; import { GuidedMetadataPage } from "./stories/pages/guided-mode/data/GuidedMetadata"; import { GuidedUploadPage } from "./stories/pages/guided-mode/options/GuidedUpload"; -// import { GuidedConversionOptionsPage } from "./stories/pages/guided-mode/options/GuidedConversionOptions"; import { GuidedResultsPage } from "./stories/pages/guided-mode/results/GuidedResults"; import { Dashboard } from "./stories/Dashboard"; import { GuidedStubPreviewPage } from "./stories/pages/guided-mode/options/GuidedStubPreview"; diff --git a/src/renderer/src/stories/BasicTable.js b/src/renderer/src/stories/BasicTable.js index c56e8329e..30fa73566 100644 --- a/src/renderer/src/stories/BasicTable.js +++ b/src/renderer/src/stories/BasicTable.js @@ -1,14 +1,15 @@ -import { LitElement, css, html } from "lit"; +import { LitElement, css, html, unsafeCSS } from "lit"; import { styleMap } from "lit/directives/style-map.js"; import { header } from "./forms/utils"; import { checkStatus } from "../validation"; -import { errorHue, warningHue } from "./globals"; +import { emojiFontFamily, errorHue, warningHue } from "./globals"; import * as promises from "../promises"; import "./Button"; import { sortTable } from "./Table"; import tippy from "tippy.js"; +import { getIgnore } from "./JSONSchemaForm"; export class BasicTable extends LitElement { static get styles() { @@ -65,6 +66,12 @@ export class BasicTable extends LitElement { user-select: none; } + .relative .info { + margin: 0px 5px; + font-size: 80%; + font-family: ${unsafeCSS(emojiFontFamily)}; + } + th span { display: inline-block; } @@ -115,6 +122,7 @@ export class BasicTable extends LitElement { validateOnChange, onStatusChange, onLoaded, + onUpdate, } = {}) { super(); this.name = name ?? "data_table"; @@ -127,6 +135,7 @@ export class BasicTable extends LitElement { this.ignore = ignore ?? {}; if (validateOnChange) this.validateOnChange = validateOnChange; + if (onUpdate) this.onUpdate = onUpdate; if (onStatusChange) this.onStatusChange = onStatusChange; if (onLoaded) this.onLoaded = onLoaded; } @@ -158,9 +167,29 @@ export class BasicTable extends LitElement { return html`
${header(str)}
`; }; - #renderHeader = (str, { description }) => { - if (description) return html`${this.#renderHeaderContent(str)}`; - return html`${this.#renderHeaderContent(str)}`; + #renderHeader = (prop, { description, title = prop } = {}) => { + const th = document.createElement("th"); + + const required = this.#itemSchema.required ? this.#itemSchema.required.includes(prop) : false; + const container = document.createElement("div"); + container.classList.add("relative"); + const span = document.createElement("span"); + span.innerHTML = header(title); + if (required) span.setAttribute("required", ""); + container.appendChild(span); + + // Add Description Tooltip + if (description) { + const span = document.createElement("span"); + span.classList.add("info"); + span.innerText = "ℹ️"; + container.append(span); + tippy(span, { content: `${description[0].toUpperCase() + description.slice(1)}`, allowHTML: true }); + } + + th.appendChild(container); + + return th; }; #getRowData(row, cols = this.colHeaders) { @@ -169,13 +198,12 @@ export class BasicTable extends LitElement { let value; if (col === this.keyColumn) { if (hasRow) value = row; - else return ""; + else return; } else value = (hasRow ? this.data[row][col] : undefined) ?? // this.globals[col] ?? - this.#itemSchema.properties[col].default ?? - ""; + this.#itemSchema.properties[col]?.default; return value; }); } @@ -210,43 +238,52 @@ export class BasicTable extends LitElement { onStatusChange = () => {}; onLoaded = () => {}; - #validateCell = (value, col, parent) => { + #getType = (value, { type, data_type } = {}) => { + let inferred = typeof value; + if (Array.isArray(value)) inferred = "array"; + if (value == undefined) inferred = "null"; + + const original = type || data_type; + let resolved = original; + + // Handle based on JSON Schema types + if (type) { + if (resolved === "integer") resolved = "number"; // Map to javascript type + } else if (data_type) { + if (resolved.includes("array")) resolved = "array"; + if (resolved.includes("int") || resolved.includes("float")) resolved = "number"; + if (resolved.startsWith("bool")) resolved = "boolean"; + if (resolved.startsWith("str")) resolved = "string"; + } + + return { + type: resolved, + original, + inferred, + }; + }; + + #validateCell = (value, col, row, parent) => { if (!value && !this.validateEmptyCells) return true; // Empty cells are valid if (!this.validateOnChange) return true; let result; const propInfo = this.#itemProps[col] ?? {}; - let thisTypeOf = typeof value; - let ogType; - let type = (ogType = propInfo.type || propInfo.data_type); - // Handle based on JSON Schema types - if ("type" in propInfo) { - // Map to javascript type - if (type === "integer") type = "number"; - - // Convert to json schema type - if (Array.isArray(value)) thisTypeOf = "array"; - if (value == undefined) thisTypeOf = "null"; - } else if ("data_type" in propInfo) { - if (type.includes("array")) type = "array"; - if (type.includes("int") || type.includes("float")) type = "number"; - if (type.startsWith("bool")) type = "boolean"; - if (type.startsWith("str")) type = "string"; - } + let { type, original, inferred } = this.#getType(value, propInfo); // Check if required - if (!value && "required" in this.#itemSchema.required.includes(col)) + if (!value && "required" in this.#itemSchema && this.#itemSchema.required.includes(col)) result = [{ message: `${col} is a required property`, type: "error" }]; - // If not required, check matching types for values that are defined - else if (value !== "" && thisTypeOf !== type) - result = [{ message: `${col} is expected to be of type ${ogType}, not ${thisTypeOf}`, type: "error" }]; + // If not required, check matching types (if provided) for values that are defined + else if (value !== "" && type && inferred !== type) + result = [{ message: `${col} is expected to be of type ${original}, not ${inferred}`, type: "error" }]; // Otherwise validate using the specified onChange function - else result = this.validateOnChange(col, parent, value, this.#itemProps[col]); + else result = this.validateOnChange([row, col], parent, value, this.#itemProps[col]); // Will run synchronously if not a promise result - return promises.resolve(result, () => { + return promises.resolve(result, (result) => { let info = { title: undefined, warning: undefined, @@ -277,7 +314,7 @@ export class BasicTable extends LitElement { const results = this.#data.map((v, i) => { return v.map((vv, j) => { - const info = this.#validateCell(vv, this.colHeaders[j], { ...this.data[rows[i]] }); // Could be a promise or a basic response + const info = this.#validateCell(vv, this.colHeaders[j], i, { ...this.data[rows[i]] }); // Could be a promise or a basic response return promises.resolve(info, (info) => { if (info === true) return; const td = this.shadowRoot.getElementById(`i${i}_j${j}`); @@ -336,7 +373,7 @@ export class BasicTable extends LitElement { let data = text.split("\n").map((row) => row.split("\t").map((v) => { try { - return JSON.parse(v); + return eval(v); } catch { return v.trim(); } @@ -356,19 +393,24 @@ export class BasicTable extends LitElement { Object.keys(data).forEach((row) => { const cols = structuredData[row]; const latest = (this.data[this.keyColumn ? cols[this.keyColumn] : row] = {}); - Object.entries(cols).forEach(([key, value]) => (key in this.#itemProps ? (latest[key] = value) : "")); // Only include data from schema + Object.entries(cols).forEach(([key, value]) => { + if (key in this.#itemProps) { + const { type } = this.#getType(value, this.#itemProps[key]); + if (type === "string") value = `${value}`; // Convert to string if necessary + latest[key] = value; + } + }); // Only include data from schema }); - this.onUpdate(null, null, value); // Update the whole table + if (this.onUpdate) this.onUpdate([], data); // Update the whole table } // Render Code render() { this.#updateRendered(); + this.schema = this.schema; // Always update the schema const entries = this.#itemProps; - for (let key in this.ignore) delete entries[key]; - for (let key in this.ignore["*"] ?? {}) delete entries[key]; // Add existing additional properties to the entries variable if necessary if (this.#itemSchema.additionalProperties) { @@ -384,6 +426,10 @@ export class BasicTable extends LitElement { }, entries); } + // Ignore any additions in the ignore configuration + for (let key in this.ignore) delete entries[key]; + for (let key in this.ignore["*"] ?? {}) delete entries[key]; + // Sort Columns by Key Column and Requirement const keys = (this.#keys = @@ -418,7 +464,9 @@ export class BasicTable extends LitElement { ${data.map( (row, i) => html` - ${row.map((col, j) => html`
${col}
`)} + ${row.map( + (col, j) => html`
${JSON.stringify(col)}
` + )} ` )} diff --git a/src/renderer/src/stories/JSONSchemaForm.js b/src/renderer/src/stories/JSONSchemaForm.js index 43a717cdf..6be7a4cbd 100644 --- a/src/renderer/src/stories/JSONSchemaForm.js +++ b/src/renderer/src/stories/JSONSchemaForm.js @@ -294,6 +294,62 @@ export class JSONSchemaForm extends LitElement { if (props.base) this.base = props.base; } + // Handle wildcards to grab multiple form elements + getAllFormElements = (path, config = { forms: true, tables: true, inputs: true }) => { + const name = path[0]; + const upcomingPath = path.slice(1); + + const isWildcard = name === "*"; + const last = !upcomingPath.length; + + if (isWildcard) { + if (last) { + const allElements = []; + if (config.forms) allElements.push(...this.forms.values()); + if (config.tables) allElements.push(...this.tables.values()); + if (config.inputs) allElements.push(...this.inputs.values()); + return allElements; + } else + return Object.values(this.forms) + .map((form) => form.getAllFormElements(upcomingPath, config)) + .flat(); + } + + // Get Single element + else { + const result = this.#getElementOnForm(path); + if (!result) return []; + + if (last) { + if (result instanceof JSONSchemaForm && config.forms) return [result]; + else if (result instanceof JSONSchemaInput && config.inputs) return [result]; + else if (config.tables) return [result]; + + return []; + } else { + if (result instanceof JSONSchemaForm) return result.getAllFormElements(upcomingPath, config); + else return [result]; + } + } + }; + + // Single later only + #getElementOnForm = (path, { forms = true, tables = true, inputs = true } = {}) => { + if (typeof path === "string") path = path.split("."); + if (!path.length) return this; + + const name = path[0]; + + const form = this.forms[name]; + if (form && forms) return form; + + const table = this.tables[name]; + if (table && tables) return table; + + const foundInput = this.inputs[path.join(".")]; // Check Inputs + if (foundInput && inputs) return foundInput; + }; + // Get the form element defined by the path (stops before table cells) getFormElement = ( path, @@ -306,24 +362,15 @@ export class JSONSchemaForm extends LitElement { if (typeof path === "string") path = path.split("."); if (!path.length) return this; - const name = path[0]; const updatedPath = path.slice(1); - const form = this.forms[name]; // Check forms - if (!form) { - const table = this.tables[name]; // Check tables - if (table && tables) return table; // Skip table cells - } else if (!updatedPath.length && forms) return form; - - // Check Inputs - // const inputContainer = this.shadowRoot.querySelector(`#${encode(path.join("-"))}`); - // if (inputContainer && inputs) return inputContainer.querySelector("jsonschema-input");; - - const foundInput = this.inputs[path.join(".")]; // Check Inputs - if (foundInput && inputs) return foundInput; + const result = this.#getElementOnForm(path, { forms, tables, inputs }); + if (result instanceof JSONSchemaForm) { + if (!updatedPath.length) return result; + else return result.getFormElement(updatedPath, { forms, tables, inputs }); + } - // Check Nested Form Inputs - return form?.getFormElement(updatedPath, { forms, tables, inputs }); + return result; }; #requirements = {}; @@ -429,18 +476,22 @@ export class JSONSchemaForm extends LitElement { const isRow = typeof rowName === "number"; const resolvedValue = e.instance; // Get offending value - const schema = e.schema; // Get offending schema + const resolvedSchema = e.schema; // Get offending schema // ------------ Exclude Certain Errors ------------ // Allow for constructing types from object types - if (e.message.includes("is not of a type(s)") && "properties" in schema && schema.type === "string") + if ( + e.message.includes("is not of a type(s)") && + "properties" in resolvedSchema && + resolvedSchema.type === "string" + ) return; // Ignore required errors if value is empty if (e.name === "required" && this.validateEmptyValues === null && !(e.property in e.instance)) return; // Non-Strict Rule - if (schema.strict === false && e.message.includes("is not one of enum values")) return; + if (resolvedSchema.strict === false && e.message.includes("is not one of enum values")) return; // Allow referring to floats as null (i.e. JSON NaN representation) if (e.message === "is not of a type(s) number") { diff --git a/src/renderer/src/stories/JSONSchemaInput.js b/src/renderer/src/stories/JSONSchemaInput.js index b9a884e34..ba4f33fc7 100644 --- a/src/renderer/src/stories/JSONSchemaInput.js +++ b/src/renderer/src/stories/JSONSchemaInput.js @@ -14,6 +14,9 @@ import { JSONSchemaForm, getIgnore } from "./JSONSchemaForm"; import { Search } from "./Search"; import tippy from "tippy.js"; import { merge } from "./pages/utils"; +import { InspectorListItem } from "./preview/inspector/InspectorList"; + +const isDevelopment = !!import.meta.env; const dateTimeRegex = /(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2})/; @@ -30,6 +33,7 @@ function resolveDateTime(value) { export function createTable(fullPath, { onUpdate, onThrow, overrides = {} }) { const name = fullPath.slice(-1)[0]; const path = fullPath.slice(0, -1); + const relativePath = this.form?.base ? fullPath.slice(this.form.base.length) : fullPath; const schema = this.schema; const validateOnChange = this.validateOnChange; @@ -227,6 +231,7 @@ export function createTable(fullPath, { onUpdate, onThrow, overrides = {} }) { }; const table = this.renderTable(id, tableMetadata, fullPath); + return table; // Try rendering as a nested table with a fake property key (otherwise use nested forms) }; @@ -263,10 +268,10 @@ export function createTable(fullPath, { onUpdate, onThrow, overrides = {} }) { ignore: nestedIgnore, // According to schema onUpdate: function () { - return onUpdate.call(this, fullPath, this.data); // Update all table data + return onUpdate.call(this, relativePath, this.data); // Update all table data }, - validateOnChange: (...args) => commonValidationFunction(fullPath, ...args), + validateOnChange: (...args) => commonValidationFunction(relativePath, ...args), ...commonTableMetadata, }; @@ -502,6 +507,27 @@ export class JSONSchemaInput extends LitElement { required = false; validateOnChange = true; + // Print the default value of the schema if not caught + onUncaughtSchema = (schema) => { + // In development, show uncaught schemas + if (!isDevelopment) { + if (this.form) { + const inputContainer = this.form.shadowRoot.querySelector(`#${this.path.slice(-1)[0]}`); + inputContainer.style.display = "none"; + } + } + + if (schema.default) return `
${JSON.stringify(schema.default, null, 2)}
`; + + const error = new InspectorListItem({ + message: + "

Internal GUIDE Error

Cannot render this property because of a misformatted schema.", + }); + error.style.width = "100%"; + + return error; + }; + constructor(props) { super(); Object.assign(this, props); @@ -870,7 +896,16 @@ export class JSONSchemaInput extends LitElement { if (isArray) { const hasItemsRef = "items" in schema && "$ref" in schema.items; if (!("items" in schema)) schema.items = {}; - if (!("type" in schema.items) && !hasItemsRef) schema.items.type = this.#getType(this.value?.[0]); + if (!("type" in schema.items) && !hasItemsRef) { + // Guess the type of the first item + if (this.value) { + const itemToCheck = this.value[0]; + schema.items.type = itemToCheck ? this.#getType(itemToCheck) : "string"; + } + + // If no value, handle uncaught schema + else return this.onUncaughtSchema(schema); + } } const itemSchema = this.form?.getSchema ? this.form.getSchema("items", schema) : schema["items"]; @@ -889,7 +924,9 @@ export class JSONSchemaInput extends LitElement { }); } - const table = createTable.call(this, resolvedFullPath, { + const externalPath = this.form ? [...this.form.base, ...resolvedFullPath] : resolvedFullPath; + + const table = createTable.call(this, externalPath, { onUpdate: updateFunction, onThrow: this.#onThrow, }); // Ensure change propagates @@ -1134,8 +1171,7 @@ export class JSONSchemaInput extends LitElement { } } - // Print out the immutable default value - return html`
${schema.default ? JSON.stringify(schema.default, null, 2) : "No default value"}
`; + return this.onUncaughtSchema(schema); } } diff --git a/src/renderer/src/stories/List.ts b/src/renderer/src/stories/List.ts index d33220ba3..802d98b82 100644 --- a/src/renderer/src/stories/List.ts +++ b/src/renderer/src/stories/List.ts @@ -6,7 +6,8 @@ type ListItemType = { key: string, content: string, value: any, - controls: HTMLElement[] + controls: HTMLElement[], + originalKey?: string } export interface ListProps { @@ -140,11 +141,14 @@ export class List extends LitElement { return this.items.map(item => item.value) } + #previousItems = [] #items: ListItemType[] = [] - set items(value) { - const oldList = this.#items + set items(value: ListItemType[]) { + + const oldList = this.#previousItems this.#items = value.map(item => this.transform ? this.transform(item) ?? item : item) + this.#previousItems = this.#items.map(item => ({...item})) // Clone items const oldObject = this.object this.#updateObject() @@ -205,7 +209,7 @@ export class List extends LitElement { this.items.splice(draggedIdx, 1) this.items.splice(i, 0, movedItem) - this.items = [...this.items] + this.items = this.items } @@ -225,7 +229,8 @@ export class List extends LitElement { } add = (item: ListItemType) => { - this.items = [...this.items, item] + this.items.push({ ...item }) // Update original + this.items = this.items } #removePlaceholder = () => { @@ -235,6 +240,9 @@ export class List extends LitElement { #renderListItem = (item: ListItemType, i: number) => { const { key, value, content = value } = item; + + if (!item.originalKey) item.originalKey = key + const li = document.createElement("li"); li.id = `item-${i}`; @@ -283,7 +291,7 @@ export class List extends LitElement { let i = 0; while (resolvedKey in this.object) { i++; - resolvedKey = `${originalValue}_${i}`; + resolvedKey = `${originalValue} (${i})`; } const keyEl = editableElement @@ -352,10 +360,13 @@ export class List extends LitElement { delete this.object[oKey]; this.object[newKey] = value; - if (!isUnordered) { + if (isUnordered) { + this.items[i].key = newKey + } else { this.items[i].value = newKey - this.items = [...this.items] } + this.items = this.items + } }; @@ -367,11 +378,13 @@ export class List extends LitElement { delete = (i: number) => { this.items.splice(i, 1) - this.items = [...this.items] + this.items = this.items } clear = () => { - this.items = [] + // Remove items in original list + for (let i = this.items.length - 1; i >= 0; i--) this.items.splice(i, 1) + this.items = this.items } #updateObject = () => { @@ -388,8 +401,8 @@ export class List extends LitElement { // Ensure no duplicate keys let kI = 0; while (resolvedKey in this.object) { - i++; - resolvedKey = `${key}_${kI}`; + kI++; + resolvedKey = `${key} (${kI})`; } this.object[resolvedKey] = value @@ -403,6 +416,7 @@ export class List extends LitElement { render() { + this.removeAttribute('unordered') if (this.unordered) this.setAttribute('unordered', '') diff --git a/src/renderer/src/stories/Main.js b/src/renderer/src/stories/Main.js index 65b163c3c..f5d7205ef 100644 --- a/src/renderer/src/stories/Main.js +++ b/src/renderer/src/stories/Main.js @@ -97,11 +97,10 @@ export class Main extends LitElement { // Go to home screen if there is no next page if (!info.next) { - console.log("setting", info); footer = Object.assign( { exit: false, - next: "Complete Pipeline", + next: "Exit Pipeline", onNext: () => this.toRender.page.to("/"), }, footer && typeof footer === "object" ? footer : {} diff --git a/src/renderer/src/stories/SimpleTable.js b/src/renderer/src/stories/SimpleTable.js index 412483722..7161f52a7 100644 --- a/src/renderer/src/stories/SimpleTable.js +++ b/src/renderer/src/stories/SimpleTable.js @@ -119,8 +119,13 @@ export class SimpleTable extends LitElement { z-index: 1; } + table tr:first-child td { + border-top: 0px; + } + th { border-right: 1px solid gray; + border-bottom: 1px solid gray; color: #222; font-weight: 400; text-align: center; @@ -503,7 +508,7 @@ export class SimpleTable extends LitElement { Object.keys(cols).map((k) => (cols[k] = "")); if (this.validateOnChange) Object.keys(cols).map((k) => { - const res = this.validateOnChange(k, { ...cols }, cols[k]); + const res = this.validateOnChange([k], { ...cols }, cols[k]); if (typeof res === "function") res(); }); diff --git a/src/renderer/src/stories/Table.js b/src/renderer/src/stories/Table.js index ec956b466..27745fb5b 100644 --- a/src/renderer/src/stories/Table.js +++ b/src/renderer/src/stories/Table.js @@ -295,7 +295,7 @@ export class Table extends LitElement { try { const valid = this.validateOnChange ? await this.validateOnChange( - k, + [k], { ...this.data[rowHeaders[row]] }, // Validate on a copy of the parent value, info @@ -551,7 +551,7 @@ export class Table extends LitElement { const rowName = rowHeaders[row]; // const cols = this.data[rowHeaders[row]] // Object.keys(cols).map(k => cols[k] = '') - // if (this.validateOnChange) Object.keys(cols).map(k => this.validateOnChange(k, { ...cols }, cols[k])) // Validate with empty values before removing + // if (this.validateOnChange) Object.keys(cols).map(k => this.validateOnChange([ k ], { ...cols }, cols[k])) // Validate with empty values before removing delete this.data[rowHeaders[row]]; delete unresolved[row]; this.onUpdate(rowName, null, undefined); // NOTE: Global metadata PR might simply set all data values to undefined diff --git a/src/renderer/src/stories/forms/utils.ts b/src/renderer/src/stories/forms/utils.ts index b1d2ded83..e51c312b1 100644 --- a/src/renderer/src/stories/forms/utils.ts +++ b/src/renderer/src/stories/forms/utils.ts @@ -32,15 +32,18 @@ export const textToArray = (value: string) => value.split("\n") const prop = copy[propName]; if (prop && typeof prop === "object" && !Array.isArray(prop)) { const internalCopy = (copy[propName] = { ...prop }); - if (internalCopy["$ref"]) { - const prevItem = path.slice(-1)[0]; - const resolved = parent.properties.definitions?.[prevItem]; + const refValue = internalCopy["$ref"] + if (refValue) { + + const refPath = refValue.split('/').slice(1) // NOTE: Assume from base + const resolved = refPath.reduce((acc, key) => acc[key], parent) + if (resolved) copy[propName] = resolved; else delete copy[propName] } else { for (let key in internalCopy) { const fullPath = [...path, propName, key]; - internalCopy[key] = replaceRefsWithValue(internalCopy[key], fullPath, copy); + internalCopy[key] = replaceRefsWithValue(internalCopy[key], fullPath, parent); } } } diff --git a/src/renderer/src/stories/pages/guided-mode/SourceData.stories.js b/src/renderer/src/stories/pages/guided-mode/SourceData.stories.js index 076277682..348d1fc09 100644 --- a/src/renderer/src/stories/pages/guided-mode/SourceData.stories.js +++ b/src/renderer/src/stories/pages/guided-mode/SourceData.stories.js @@ -12,6 +12,7 @@ import BlackrockRecordingInterfaceSchema from "../../../../../../schemas/json/ge import BlackrockSortingInterfaceSchema from "../../../../../../schemas/json/generated/BlackrockSortingInterface.json"; import CellExplorerSortingInterfaceSchema from "../../../../../../schemas/json/generated/CellExplorerSortingInterface.json"; import KiloSortSortingInterfaceSchema from "../../../../../../schemas/json/generated/KiloSortSortingInterface.json"; +import TdtRecordingInterfaceSchema from "../../../../../../schemas/json/generated/TdtRecordingInterface.json"; import Spike2RecordingInterfaceSchema from "../../../../../../schemas/json/generated/Spike2RecordingInterface.json"; import BrukerTiffSinglePlaneImagingInterfaceSchema from "../../../../../../schemas/json/generated/BrukerTiffSinglePlaneImagingInterface.json"; import ExtractSegmentationInterfaceSchema from "../../../../../../schemas/json/generated/ExtractSegmentationInterface.json"; @@ -28,7 +29,9 @@ import MEArecRecordingInterfaceSchema from "../../../../../../schemas/json/gener import PlexonRecordingInterfaceSchema from "../../../../../../schemas/json/generated/PlexonRecordingInterface.json"; import PlexonSortingInterfaceSchema from "../../../../../../schemas/json/generated/PlexonSortingInterface.json"; import AxonaRecordingInterfaceSchema from "../../../../../../schemas/json/generated/AxonaRecordingInterface.json"; +import VideoInterfaceSchema from "../../../../../../schemas/json/generated/VideoInterface.json"; import NeuralynxRecordingInterfaceSchema from "../../../../../../schemas/json/generated/NeuralynxRecordingInterface.json"; +import Suite2pSegmentationInterfaceSchema from "../../../../../../schemas/json/generated/Suite2pSegmentationInterface.json"; import AlphaOmegaRecordingInterfaceSchema from "../../../../../../schemas/json/generated/AlphaOmegaRecordingInterface.json"; import DeepLabCutInterfaceSchema from "../../../../../../schemas/json/generated/DeepLabCutInterface.json"; import SLEAPInterfaceSchema from "../../../../../../schemas/json/generated/SLEAPInterface.json"; @@ -40,6 +43,7 @@ import SpikeGLXConverterPipeSchema from "../../../../../../schemas/json/generate import BrukerTiffSinglePlaneConverterSchema from "../../../../../../schemas/json/generated/BrukerTiffSinglePlaneConverter.json"; import BrukerTiffMultiPlaneConverterSchema from "../../../../../../schemas/json/generated/BrukerTiffMultiPlaneConverter.json"; import MiniscopeConverterSchema from "../../../../../../schemas/json/generated/MiniscopeConverter.json"; +import CellExplorerRecordingInterfaceSchema from "../../../../../../schemas/json/generated/CellExplorerRecordingInterface.json"; export default { title: "Pages/Guided Mode/Source Data", @@ -77,6 +81,8 @@ globalStateCopy.schema.source_data.properties.CellExplorerSortingInterface = CellExplorerSortingInterfaceSchema.properties.CellExplorerSortingInterface; globalStateCopy.schema.source_data.properties.KiloSortSortingInterface = KiloSortSortingInterfaceSchema.properties.KiloSortSortingInterface; +globalStateCopy.schema.source_data.properties.TdtRecordingInterface = + TdtRecordingInterfaceSchema.properties.TdtRecordingInterface; globalStateCopy.schema.source_data.properties.Spike2RecordingInterface = Spike2RecordingInterfaceSchema.properties.Spike2RecordingInterface; globalStateCopy.schema.source_data.properties.BrukerTiffSinglePlaneImagingInterface = @@ -109,8 +115,11 @@ globalStateCopy.schema.source_data.properties.PlexonSortingInterface = PlexonSortingInterfaceSchema.properties.PlexonSortingInterface; globalStateCopy.schema.source_data.properties.AxonaRecordingInterface = AxonaRecordingInterfaceSchema.properties.AxonaRecordingInterface; +globalStateCopy.schema.source_data.properties.VideoInterface = VideoInterfaceSchema.properties.VideoInterface; globalStateCopy.schema.source_data.properties.NeuralynxRecordingInterface = NeuralynxRecordingInterfaceSchema.properties.NeuralynxRecordingInterface; +globalStateCopy.schema.source_data.properties.Suite2pSegmentationInterface = + Suite2pSegmentationInterfaceSchema.properties.Suite2pSegmentationInterface; globalStateCopy.schema.source_data.properties.AlphaOmegaRecordingInterface = AlphaOmegaRecordingInterfaceSchema.properties.AlphaOmegaRecordingInterface; globalStateCopy.schema.source_data.properties.DeepLabCutInterface = @@ -131,6 +140,8 @@ globalStateCopy.schema.source_data.properties.BrukerTiffMultiPlaneConverter = BrukerTiffMultiPlaneConverterSchema.properties.BrukerTiffMultiPlaneConverter; globalStateCopy.schema.source_data.properties.MiniscopeConverter = MiniscopeConverterSchema.properties.MiniscopeConverter; +globalStateCopy.schema.source_data.properties.CellExplorerRecordingInterface = + CellExplorerRecordingInterfaceSchema.properties.CellExplorerRecordingInterface; const results = globalStateCopy.results; for (let sub in results) { @@ -219,6 +230,12 @@ KiloSortSortingInterfaceGlobalCopy.interfaces.interface = KiloSortSortingInterfa KiloSortSortingInterfaceGlobalCopy.schema.source_data = KiloSortSortingInterfaceSchema; KiloSortSortingInterface.args = { activePage, globalState: KiloSortSortingInterfaceGlobalCopy }; +export const TdtRecordingInterface = PageTemplate.bind({}); +const TdtRecordingInterfaceGlobalCopy = JSON.parse(JSON.stringify(globalState)); +TdtRecordingInterfaceGlobalCopy.interfaces.interface = TdtRecordingInterface; +TdtRecordingInterfaceGlobalCopy.schema.source_data = TdtRecordingInterfaceSchema; +TdtRecordingInterface.args = { activePage, globalState: TdtRecordingInterfaceGlobalCopy }; + export const Spike2RecordingInterface = PageTemplate.bind({}); const Spike2RecordingInterfaceGlobalCopy = JSON.parse(JSON.stringify(globalState)); Spike2RecordingInterfaceGlobalCopy.interfaces.interface = Spike2RecordingInterface; @@ -318,12 +335,24 @@ AxonaRecordingInterfaceGlobalCopy.interfaces.interface = AxonaRecordingInterface AxonaRecordingInterfaceGlobalCopy.schema.source_data = AxonaRecordingInterfaceSchema; AxonaRecordingInterface.args = { activePage, globalState: AxonaRecordingInterfaceGlobalCopy }; +export const VideoInterface = PageTemplate.bind({}); +const VideoInterfaceGlobalCopy = JSON.parse(JSON.stringify(globalState)); +VideoInterfaceGlobalCopy.interfaces.interface = VideoInterface; +VideoInterfaceGlobalCopy.schema.source_data = VideoInterfaceSchema; +VideoInterface.args = { activePage, globalState: VideoInterfaceGlobalCopy }; + export const NeuralynxRecordingInterface = PageTemplate.bind({}); const NeuralynxRecordingInterfaceGlobalCopy = JSON.parse(JSON.stringify(globalState)); NeuralynxRecordingInterfaceGlobalCopy.interfaces.interface = NeuralynxRecordingInterface; NeuralynxRecordingInterfaceGlobalCopy.schema.source_data = NeuralynxRecordingInterfaceSchema; NeuralynxRecordingInterface.args = { activePage, globalState: NeuralynxRecordingInterfaceGlobalCopy }; +export const Suite2pSegmentationInterface = PageTemplate.bind({}); +const Suite2pSegmentationInterfaceGlobalCopy = JSON.parse(JSON.stringify(globalState)); +Suite2pSegmentationInterfaceGlobalCopy.interfaces.interface = Suite2pSegmentationInterface; +Suite2pSegmentationInterfaceGlobalCopy.schema.source_data = Suite2pSegmentationInterfaceSchema; +Suite2pSegmentationInterface.args = { activePage, globalState: Suite2pSegmentationInterfaceGlobalCopy }; + export const AlphaOmegaRecordingInterface = PageTemplate.bind({}); const AlphaOmegaRecordingInterfaceGlobalCopy = JSON.parse(JSON.stringify(globalState)); AlphaOmegaRecordingInterfaceGlobalCopy.interfaces.interface = AlphaOmegaRecordingInterface; @@ -389,3 +418,9 @@ const MiniscopeConverterGlobalCopy = JSON.parse(JSON.stringify(globalState)); MiniscopeConverterGlobalCopy.interfaces.interface = MiniscopeConverter; MiniscopeConverterGlobalCopy.schema.source_data = MiniscopeConverterSchema; MiniscopeConverter.args = { activePage, globalState: MiniscopeConverterGlobalCopy }; + +export const CellExplorerRecordingInterface = PageTemplate.bind({}); +const CellExplorerRecordingInterfaceGlobalCopy = JSON.parse(JSON.stringify(globalState)); +CellExplorerRecordingInterfaceGlobalCopy.interfaces.interface = CellExplorerRecordingInterface; +CellExplorerRecordingInterfaceGlobalCopy.schema.source_data = CellExplorerRecordingInterfaceSchema; +CellExplorerRecordingInterface.args = { activePage, globalState: CellExplorerRecordingInterfaceGlobalCopy }; diff --git a/src/renderer/src/stories/pages/guided-mode/data/GuidedMetadata.js b/src/renderer/src/stories/pages/guided-mode/data/GuidedMetadata.js index 53ddb4065..d68dae20d 100644 --- a/src/renderer/src/stories/pages/guided-mode/data/GuidedMetadata.js +++ b/src/renderer/src/stories/pages/guided-mode/data/GuidedMetadata.js @@ -28,7 +28,6 @@ import globalIcon from "../../../assets/global.svg?raw"; const imagingPlaneKey = "imaging_plane"; const propsToIgnore = { Ophys: { - // NOTE: Get this to work "*": { starting_time: true, rate: true, @@ -55,6 +54,18 @@ const propsToIgnore = { device: true, }, }, + Ecephys: { + UnitProperties: true, + ElectricalSeriesLF: true, + ElectricalSeriesAP: true, + Electrodes: { + "*": { + location: true, + group: true, + contact_vector: true, + }, + }, + }, Icephys: true, // Always ignore icephys metadata (for now) Behavior: true, // Always ignore behavior metadata (for now) "ndx-dandi-icephys": true, @@ -158,29 +169,10 @@ export class GuidedMetadataPage extends ManagedPage { const instanceId = `sub-${subject}/ses-${session}`; // Ignore specific metadata in the form by removing their schema value - const schema = globalState.schema.metadata[subject][session]; + const schema = preprocessMetadataSchema(globalState.schema.metadata[subject][session]); delete schema.description; - // Only include a select group of Ecephys metadata here - if ("Ecephys" in schema.properties) { - const toInclude = ["Device", "ElectrodeGroup", "Electrodes", "ElectrodeColumns", "definitions"]; - const ecephysProps = schema.properties.Ecephys.properties; - Object.keys(ecephysProps).forEach((k) => (!toInclude.includes(k) ? delete ecephysProps[k] : "")); - - // Change rendering order for electrode table columns - const ogElectrodeItemSchema = ecephysProps["Electrodes"].items.properties; - const order = ["channel_name", "group_name", "shank_electrode_number"]; - const sortedProps = Object.keys(ogElectrodeItemSchema).sort((a, b) => { - const iA = order.indexOf(a); - if (iA === -1) return 1; - const iB = order.indexOf(b); - if (iB === -1) return -1; - return iA - iB; - }); - - const newElectrodeItemSchema = (ecephysProps["Electrodes"].items.properties = {}); - sortedProps.forEach((k) => (newElectrodeItemSchema[k] = ogElectrodeItemSchema[k])); - } + const ephys = schema.properties.Ecephys; resolveMetadata(subject, session, globalState); @@ -188,12 +180,11 @@ export class GuidedMetadataPage extends ManagedPage { const patternPropsToRetitle = ["Ophys.Fluorescence", "Ophys.DfOverF", "Ophys.SegmentationImages"]; - const resolvedSchema = preprocessMetadataSchema(schema); - const ophys = resolvedSchema.properties.Ophys; + const ophys = schema.properties.Ophys; if (ophys) { // Set most Ophys tables to have minItems / maxItems equal (i.e. no editing possible) drillSchemaProperties( - resolvedSchema, + schema, (path, schema, target, isPatternProperties, parentSchema) => { if (path[0] === "Ophys") { const name = path.slice(-1)[0]; @@ -217,10 +208,11 @@ export class GuidedMetadataPage extends ManagedPage { ); } + console.log("schema", structuredClone(schema), structuredClone(results)); // Create the form const form = new JSONSchemaForm({ identifier: instanceId, - schema: resolvedSchema, + schema, results, globals: aggregateGlobalMetadata, @@ -402,9 +394,8 @@ export class GuidedMetadataPage extends ManagedPage { } }, - renderTable: function (name, metadata) { + renderTable: function (name, metadata, fullPath) { const updatedSchema = structuredClone(metadata.schema); - metadata.schema = updatedSchema; // NOTE: Handsontable will occasionally have a context menu that doesn't actually trigger any behaviors diff --git a/src/renderer/src/stories/pages/guided-mode/data/GuidedStructure.js b/src/renderer/src/stories/pages/guided-mode/data/GuidedStructure.js index 584c5f708..c5993c30e 100644 --- a/src/renderer/src/stories/pages/guided-mode/data/GuidedStructure.js +++ b/src/renderer/src/stories/pages/guided-mode/data/GuidedStructure.js @@ -84,13 +84,20 @@ export class GuidedStructurePage extends Page { }; beforeSave = async () => { - this.info.globalState.interfaces = { ...this.list.object }; + const interfaces = (this.info.globalState.interfaces = { ...this.list.object }); - // Remove extra interfaces from results + // Remove or reassign extra interfaces in results if (this.info.globalState.results) { this.mapSessions(({ info }) => { - Object.keys(info.source_data).forEach((key) => { - if (!this.info.globalState.interfaces[key]) delete info.source_data[key]; + const metadata = [info.source_data]; + metadata.forEach((results) => { + Object.keys(results).forEach((key) => { + if (!interfaces[key]) { + const renamed = this.list.items.find((item) => item.originalKey === key); + if (renamed) results[renamed.key] = results[key]; + delete results[key]; + } + }); }); }); } diff --git a/src/renderer/src/stories/pages/guided-mode/options/GuidedConversionOptions.js b/src/renderer/src/stories/pages/guided-mode/options/GuidedConversionOptions.js deleted file mode 100644 index 687240c8d..000000000 --- a/src/renderer/src/stories/pages/guided-mode/options/GuidedConversionOptions.js +++ /dev/null @@ -1,59 +0,0 @@ -import { html } from "lit"; -import { JSONSchemaForm } from "../../../JSONSchemaForm.js"; -import { Page } from "../../Page.js"; -import { onThrow } from "../../../../errors"; - -export class GuidedConversionOptionsPage extends Page { - constructor(...args) { - super(...args); - } - - footer = { - onNext: async () => { - await this.save(); // Save in case the conversion fails - await this.form.validate(); // Will throw an error in the callback - - // Preview a random conversion - delete this.info.globalState.stubs; // Clear the preview results - const results = await this.runConversions({ stub_test: true }, 1, { - title: "Testing conversion on a random session", - }); - this.info.globalState.stubs = results; // Save the preview results - - return this.to(1); - }, - }; - - render() { - const schema = { - properties: { - output_folder: { - type: "string", - format: "directory", - }, - }, - required: ["output_folder"], - }; - - let conversionGlobalState = this.info.globalState.conversion; - if (!conversionGlobalState) { - conversionGlobalState = this.info.globalState.conversion = { info: {}, results: null }; - } - - this.form = new JSONSchemaForm({ - schema, - results: conversionGlobalState.info, - dialogType: "showOpenDialog", - dialogOptions: { - properties: ["openDirectory", "createDirectory"], - }, - onUpdate: () => (this.unsavedUpdates = true), - onThrow, - }); - - return html` ${this.form} `; - } -} - -customElements.get("nwbguide-guided-conversion-options-page") || - customElements.define("nwbguide-guided-conversion-options-page", GuidedConversionOptionsPage); diff --git a/src/renderer/src/stories/pages/guided-mode/options/GuidedUpload.js b/src/renderer/src/stories/pages/guided-mode/options/GuidedUpload.js index 80d0cf784..34c0a949d 100644 --- a/src/renderer/src/stories/pages/guided-mode/options/GuidedUpload.js +++ b/src/renderer/src/stories/pages/guided-mode/options/GuidedUpload.js @@ -93,6 +93,7 @@ export class GuidedUploadPage extends Page { } footer = { + next: "Upload", onNext: async () => { await this.save(); // Save in case the conversion fails diff --git a/src/renderer/src/stories/pages/guided-mode/results/GuidedResults.js b/src/renderer/src/stories/pages/guided-mode/results/GuidedResults.js index 3aeb9ddf7..ea4ea4771 100644 --- a/src/renderer/src/stories/pages/guided-mode/results/GuidedResults.js +++ b/src/renderer/src/stories/pages/guided-mode/results/GuidedResults.js @@ -10,6 +10,10 @@ export class GuidedResultsPage extends Page { footer = {}; + updated() { + this.save(); // Save the current state + } + render() { const { conversion } = this.info.globalState; diff --git a/src/renderer/src/stories/pages/inspect/InspectPage.js b/src/renderer/src/stories/pages/inspect/InspectPage.js index efcae1997..04699513a 100644 --- a/src/renderer/src/stories/pages/inspect/InspectPage.js +++ b/src/renderer/src/stories/pages/inspect/InspectPage.js @@ -93,6 +93,7 @@ export class InspectPage extends Page { schema: { type: "array", items: { + type: "string", format: ["file", "directory"], multiple: true, }, diff --git a/src/renderer/src/validation/index.js b/src/renderer/src/validation/index.js index ae9206415..e0d819e18 100644 --- a/src/renderer/src/validation/index.js +++ b/src/renderer/src/validation/index.js @@ -24,35 +24,34 @@ export async function validateOnChange(name, parent, path, value) { else return; }, validationSchema); // Pass the top level until it runs out - let overridden = false; - // Skip wildcard check for categories marked with false if (lastResolved !== false && (functions === undefined || functions === true)) { - // let overridden = false; - let lastWildcard; - toIterate.reduce((acc, key) => { - // Disable the value is a hardcoded list of functions + a wildcard has already been specified - if (acc && lastWildcard && Array.isArray(acc[key] ?? {})) overridden = true; - else if (acc && "*" in acc) { - if (acc["*"] === false && lastWildcard) - overridden = true; // Disable if false and a wildcard has already been specified - // Otherwise set the last wildcard - else { - lastWildcard = typeof acc["*"] === "string" ? acc["*"].replace(`{*}`, `${name}`) : acc["*"]; - overridden = false; // Re-enable if a new one is specified below - } - } else if (lastWildcard && typeof lastWildcard === "object") { - const newWildcard = lastWildcard[key] ?? lastWildcard["*"] ?? lastWildcard["**"] ?? (acc && acc["**"]); // Drill wildcard objects once resolved - // Prioritize continuation of last wildcard - if (newWildcard) lastWildcard = newWildcard; - } - - return acc?.[key]; - }, validationSchema); - - if (overridden && functions !== true) lastWildcard = false; // Disable if not promised to exist - - if (typeof lastWildcard === "function" || typeof lastWildcard === "string") functions = [lastWildcard]; + const getNestedMatches = (result, searchPath, toAlwaysCheck = []) => { + const matches = []; + const isUndefined = result === undefined; + if (Array.isArray(result)) matches.push(...result); + else if (result && typeof result === "object") + matches.push(...getMatches(result, searchPath, toAlwaysCheck)); + else if (!isUndefined) matches.push(result); + if (searchPath.length) + toAlwaysCheck.forEach((obj) => matches.push(...getMatches(obj, searchPath, toAlwaysCheck))); + return matches; + }; + + const getMatches = (obj = {}, searchPath, toAlwaysCheck = []) => { + const updatedAlwaysCheck = [...toAlwaysCheck]; + const updateSearchPath = [...searchPath]; + const nextToken = updateSearchPath.shift(); + const matches = []; + if (obj["*"]) matches.push(...getNestedMatches(obj["*"], updateSearchPath, updatedAlwaysCheck)); + if (obj["**"]) updatedAlwaysCheck.push(obj["**"]); + matches.push(...getNestedMatches(obj[nextToken], updateSearchPath, updatedAlwaysCheck)); // Always search to the end of the search path + return matches; + }; + + const matches = getMatches(validationSchema, toIterate); + const overridden = matches.some((match) => match === false); + functions = overridden && functions !== true ? false : matches; // Disable if not promised to exist—or use matches } if (!functions || (Array.isArray(functions) && functions.length === 0)) return; // No validation for this field @@ -63,12 +62,13 @@ export async function validateOnChange(name, parent, path, value) { if (typeof func === "function") { return func.call(this, name, copy, path, value); // Can specify alternative client-side validation } else { + const resolvedFunctionName = func.replace(`{*}`, `${name}`); return fetch(`${baseUrl}/neuroconv/validate`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ parent: copy, - function_name: func, + function_name: resolvedFunctionName, }), }) .then((res) => res.json()) diff --git a/src/renderer/src/validation/validation.json b/src/renderer/src/validation/validation.json index 96569524f..f2af26e36 100644 --- a/src/renderer/src/validation/validation.json +++ b/src/renderer/src/validation/validation.json @@ -1,10 +1,10 @@ { - "*": "check_{*}", "name": false, "conversion_output_folder": false, "NWBFile": { + "*": "check_{*}", "identifier": false, "session_description": false, "lab": false, @@ -42,6 +42,7 @@ "Behavior": false, "Subject": { + "*": "check_subject_{*}", "sessions": false, "description": false, "genotype": false, @@ -51,7 +52,6 @@ "subject_id": "check_subject_id_exists", "species": ["check_subject_species_form", "check_subject_species_exists"], "date_of_birth": false, - "age": ["check_subject_age", "check_subject_proper_age_range"], - "*": "check_subject_{*}" + "age": ["check_subject_age", "check_subject_proper_age_range"] } } diff --git a/src/renderer/src/validation/validation.ts b/src/renderer/src/validation/validation.ts index 262cafd6b..3a0eaae33 100644 --- a/src/renderer/src/validation/validation.ts +++ b/src/renderer/src/validation/validation.ts @@ -1,15 +1,9 @@ import schema from './validation.json' -import { JSONSchemaForm } from '../stories/JSONSchemaForm.js' +import { JSONSchemaForm, getSchema } from '../stories/JSONSchemaForm' import Swal from 'sweetalert2' -function rerenderTable (this: JSONSchemaForm, linkedPath: string[]) { - const element = this.getFormElement(linkedPath) - if (element) element.requestUpdate() // Re-render table to show updates - // if (element) setTimeout(() => { - // element.requestUpdate() - // }, 100); // Re-render table to show new column - return element -} + +// ----------------- Validation Utility Functions ----------------- // const isNotUnique = (key, currentValue, rows, idx) => { @@ -24,7 +18,6 @@ const isNotUnique = (key, currentValue, rows, idx) => { type: 'error' } ] - } const get = (object: any, path: string[]) => { @@ -44,90 +37,95 @@ const get = (object: any, path: string[]) => { } } -// NOTE: Does this maintain separation between multiple sessions? -schema.Ecephys.ElectrodeGroup = { - ["*"]: { - name: function (this: JSONSchemaForm, _, __, ___, value) { - const groups = this.results.Ecephys.ElectrodeGroup.map(({ name }) => name) - // Check if the latest value will be new. Run function after validation - if (!value || !groups.includes(value)) { - return () => { - setTimeout(() => rerenderTable.call(this, ['Ecephys', 'Electrodes'])) // Allow for the updates to occur - } - } - }, - device: function (this: JSONSchemaForm, name, parent, path) { - const devices = this.results.Ecephys.Device.map(({ name }) => name) - if (devices.includes(parent[name])) return true - else { - return [ - { - message: 'Not a valid device', - type: 'error' - } - ] - } - } - } +function ensureUnique(this: JSONSchemaForm, name, parent, path, value) { + const { + values, + value: row + } = get(this.results, path) // NOTE: this.results is out of sync with the actual row contents at the moment of validation + + + if (!row) return true // Allow blank rows + + const rows = values.slice(-1)[0] + const idx = path.slice(-1)[0] + const isUniqueError = isNotUnique(name, value, rows, idx) + if (isUniqueError) return isUniqueError + + return true } -schema.Ecephys.Electrodes = { - ["*"]:{ - - // Label columns as invalid if not registered on the ElectrodeColumns table - // NOTE: If not present in the schema, these are not being rendered... - ['*']: function (this: JSONSchemaForm, name, parent, path) { - const electrodeColumns = this.results.ElectrodeColumns - if (electrodeColumns && !electrodeColumns.find((row: any) => row.name === name)) return [ - { - message: 'Not a valid column', - type: 'error' - } - ] - }, - group_name: function (this: JSONSchemaForm, _, __, ___, value) { +const getTablePathInfo = (path: string[]) => { + const modality = path[0] as Modality + const slice = path.slice(-2) + const table = slice[1] + const row = slice[2] - const groups = this.results.Ecephys.ElectrodeGroup.map(({ name }) => name) - if (groups.includes(value)) return true - else { - return [ - { - message: 'Not a valid group name', - type: 'error' - } - ] + return { modality, table, row } +} + + +// ----------------- Joint Ophys and Ecephys Validation ----------------- // + +const dependencies = { + Ophys: { + devices: [ + { + path: [ 'ImagingPlane' ], + key: 'device' + }, + { + path: [ 'TwoPhotonSeries' ], + key: 'imaging_plane' + }, + { + path: [ 'OnePhotonSeries' ], + key: 'imaging_plane' } - } + ] + }, + Ecephys: { + devices: [ + { + path: [ 'ElectrodeGroup' ], + key: 'device' + } + ], + groups: [ + { + path: [ 'Electrodes', '*', 'Electrodes' ], + key: 'group_name' + } + ] } } +type Modality = keyof typeof dependencies -// Update the columns available on the Electrodes table when there is a new name in the ElectrodeColumns table -schema.Ecephys.ElectrodeColumns = { +schema.Ophys = schema.Ecephys = { ['*']: { - ['*']: function (this: JSONSchemaForm, prop, parent, path) { - - const name = parent['name'] - if (!name) return true // Allow blank rows - - if (prop === 'name' && !(name in this.schema.properties.Ecephys.properties.Electrodes.items.properties)) { - const element = rerender.call(this, ['Ecephys', 'Electrodes']) - element.schema.properties[name] = {} // Ensure property is present in the schema now - element.data.forEach(row => name in row ? undefined : row[name] = '') // Set column value as blank if not existent on row + '**': { + ['name']: ensureUnique, } } - } } -function ensureUnique(this: JSONSchemaForm, name, parent, path, value) { +async function safeRename (this: JSONSchemaForm, name, parent, path, value, options = {}) { + + const { + dependencies = {}, + swalOptions = {} + } = options + const { values, value: row } = get(this.results, path) + const info = getTablePathInfo(path) + if (!row) return true // Allow blank rows const rows = values.slice(-1)[0] @@ -135,82 +133,201 @@ function ensureUnique(this: JSONSchemaForm, name, parent, path, value) { const isUniqueError = isNotUnique(name, value, rows, idx) if (isUniqueError) return isUniqueError + const prevValue = row[name] + + if (prevValue === value || prevValue === undefined) return true // No change + + const prevUniqueError = isNotUnique(name, prevValue, rows, idx) + if (prevUniqueError) return true // Register as valid + + const resolvedSwalOptions = {} + for (const key in swalOptions) resolvedSwalOptions[key] = typeof swalOptions[key] === 'function' ? swalOptions[key](value, prevValue) : swalOptions[key] + + const result = await Swal.fire({ + ...resolvedSwalOptions, + icon: "warning", + heightAuto: false, + backdrop: "rgba(0,0,0, 0.4)", + confirmButtonText: "I understand", + showConfirmButton: true, + showCancelButton: true, + cancelButtonText: "Cancel" + }) + + if (!result.isConfirmed) return null + + // Update Dependent Tables + const modalityDependencies = dependencies[info.modality] ?? [] + + modalityDependencies.forEach(({ key, path }) => { + const fullPath = [info.modality, ...path] + const tables = this.getAllFormElements(fullPath, { tables: true }) + console.log('Got all tables', tables, fullPath) + tables.forEach(table => { + const data = table.data + data.forEach(row => { + if (row[key] === prevValue) row[key] = value + }) + table.data = data + table.requestUpdate() + }) + }) + return true } -schema.Ophys = { - ['*']: { - '**': { - ['name']: ensureUnique, +// Ophys +schema.Ophys.Device = schema.Ecephys.Device = { + ["*"]: { + + ['name']: function(...args) { + return safeRename.call(this, ...args, { + dependencies: { Ophys: dependencies.Ophys.devices, Ecephys: dependencies.Ecephys.devices }, + swalOptions: { + title: (current, prev) => `Are you sure you want to rename the ${prev} device?`, + text: () => `We will attempt to auto-update your Ophys devices to reflect this.`, + } + }) + }, + + } +} + +// ----------------- Ecephys Validation ----------------- // + +// NOTE: Does this maintain separation between multiple sessions? +schema.Ecephys.ElectrodeGroup = { + ["*"]: { + + name: function(...args) { + return safeRename.call(this, ...args, { + dependencies: { Ecephys: dependencies.Ecephys.groups }, + swalOptions: { + title: (current, prev) => `Are you sure you want to rename the ${prev} group?`, + text: () => `We will attempt to auto-update your electrode groups to reflect this.`, + } + }) + }, + + device: function (this: JSONSchemaForm, name, parent, path, value) { + const devices = this.results.Ecephys.Device.map(({ name }) => name) + + if (devices.includes(value)) return true + else { + return [ + { + message: 'Not a valid device', + type: 'error' + } + ] + } } } } -// Ophys -schema.Ophys.Device = { + +// Label columns as invalid if not registered on the ElectrodeColumns table +// NOTE: If not present in the schema, these are not being rendered... + +schema.Ecephys.Electrodes = { + + // All interfaces ["*"]: { - ['name']: async function (this: JSONSchemaForm, name, parent, path, value) { + Electrodes: { - const { - values, - value: row - } = get(this.results, path) + // All other column + ['*']: function (this: JSONSchemaForm, name, _, path) { - if (!row) return true // Allow blank rows + const commonPath = path.slice(0, -2) - const rows = values.slice(-1)[0] - const idx = path.slice(-1)[0] - const isUniqueError = isNotUnique(name, value, rows, idx) - if (isUniqueError) return isUniqueError + const colPath = [...commonPath, 'ElectrodeColumns'] - const prevValue = row[name] + const { value: electrodeColumns } = get(this.results, colPath) // NOTE: this.results is out of sync with the actual row contents at the moment of validation - if (prevValue === value || prevValue === undefined) return true // No change + if (electrodeColumns && !electrodeColumns.find((row: any) => row.name === name)) { + return [ + { + message: 'Not a valid column', + type: 'error' + } + ] + } + }, - const prevUniqueError = isNotUnique(name, prevValue, rows, idx) - if (prevUniqueError) return true // Register as valid + // Group name column + group_name: function (this: JSONSchemaForm, _, __, ___, value) { - const result = await Swal.fire({ - title: `Are you sure you want to rename the ${prevValue} device?`, - icon: "warning", - text: `We will attempt to auto-update your Ophys devices to reflect this.`, - heightAuto: false, - backdrop: "rgba(0,0,0, 0.4)", - confirmButtonText: "I understand", - showConfirmButton: true, - showCancelButton: true, - cancelButtonText: "Cancel" - }) + const groups = this.results.Ecephys.ElectrodeGroup.map(({ name }) => name) // Groups are validated across all interfaces - if (!result.isConfirmed) return null - - // Update Dependent Tables - const dependencies = [ - ['Ophys', 'ImagingPlane'], - ['Ophys', 'OnePhotonSeries'], - ['Ophys', 'TwoPhotonSeries'] - ] - - dependencies.forEach(path => { - const table = this.getFormElement(path, { tables: true }) - if (table) { - const data = table.data - data.forEach(row => { - if (row.device === prevValue) row.device = value - }) - table.data = data + if (groups.includes(value)) return true + else { + return [ + { + message: 'Not a valid group name', + type: 'error' + } + ] } + } + }, - rerenderTable.call(this, path) - }) + // Update the columns available on the Electrodes table when there is a new name in the ElectrodeColumns table + ElectrodeColumns: { + ['*']: { + '*': function (this: JSONSchemaForm, propName, __, path, value) { - return true - } + const commonPath = path.slice(0, -2) + const electrodesTablePath = [ ...commonPath, 'Electrodes'] + const electrodesTable = this.getFormElement(electrodesTablePath) + const electrodesSchema = electrodesTable.schema // Manipulate the schema that is on the table + const globalElectrodeSchema = getSchema(electrodesTablePath, this.schema) + + const { value: row } = get(this.results, path) + + const currentName = row?.['name'] + + const hasNameUpdate = propName == 'name' && !(value in electrodesSchema.items.properties) + + const resolvedName = hasNameUpdate ? value : currentName + + if (value === currentName) return true // No change + if (!resolvedName) return true // Only set when name is actually present + + const schemaToEdit = [electrodesSchema, globalElectrodeSchema] + schemaToEdit.forEach(schema => { + + const properties = schema.items.properties + const oldRef = properties[currentName] + if (row) delete properties[currentName] // Delete previous name from schema + + properties[resolvedName] = { + ...oldRef ?? {}, + description: propName === 'description' ? value : row?.description, + data_type: propName === 'data_type' ? value : row?.data_type, + } + }) + + // Swap the new and current name information + if (hasNameUpdate) { + const electrodesTable = this.getFormElement([ ...commonPath, 'Electrodes']) + electrodesTable.data.forEach(row => { + if (!(value in row)) row[value] = row[currentName] // Initialize new column with old values + delete row[currentName] // Delete old column + }) + } + + // Always re-render the Electrodes table on column changes + electrodesTable.requestUpdate() + } + }, + } } } +// ----------------- Ophys Validation ----------------- // + schema.Ophys.ImagingPlane = { ["*"]: { device: function (this: JSONSchemaForm, name, parent, path, value) {