diff --git a/src/electron/frontend/core/components/BasicTable.js b/src/electron/frontend/core/components/BasicTable.js index 4f6b182b48..2c133f6a73 100644 --- a/src/electron/frontend/core/components/BasicTable.js +++ b/src/electron/frontend/core/components/BasicTable.js @@ -384,7 +384,6 @@ export class BasicTable extends LitElement { }; #readTSV(text) { - console.log(text, text.split("\n")); let data = text.split("\n").map((row) => row.split("\t").map((v) => { try { diff --git a/src/electron/frontend/core/components/Dashboard.js b/src/electron/frontend/core/components/Dashboard.js index 126aa62704..ce93c02ced 100644 --- a/src/electron/frontend/core/components/Dashboard.js +++ b/src/electron/frontend/core/components/Dashboard.js @@ -183,7 +183,7 @@ export class Dashboard extends LitElement { else if (typeof page === "object") return this.getPage(Object.values(page)[0]); } - updateSections({ sidebar = true, main = false } = {}, globalState = this.page.info.globalState) { + updateSections({ sidebar = true, main = false, header = false } = {}, globalState = this.page.info.globalState) { const info = this.page.info; let parent = info.parent; @@ -199,7 +199,7 @@ export class Dashboard extends LitElement { page: this.page, sections, }); - } + } else if (header) this.main.header.sections = sections; // Update header sections return sections; } @@ -332,7 +332,7 @@ export class Dashboard extends LitElement { let state = globalState.sections[section]; if (!state) state = globalState.sections[section] = { - open: false, + open: undefined, active: false, pages: {}, }; @@ -378,9 +378,9 @@ export class Dashboard extends LitElement { this.#transitionPromise.value ?? (this.#transitionPromise.value = new Promise( (resolve) => - (this.#transitionPromise.trigger = (value) => { - delete this.#transitionPromise.value; - resolve(value); + (this.#transitionPromise.trigger = (v) => { + this.#transitionPromise.value = null; // Reset promise + resolve(v); }) )); diff --git a/src/electron/frontend/core/components/InstanceManager.js b/src/electron/frontend/core/components/InstanceManager.js index cc50717135..3b9dc49bbb 100644 --- a/src/electron/frontend/core/components/InstanceManager.js +++ b/src/electron/frontend/core/components/InstanceManager.js @@ -116,6 +116,7 @@ export class InstanceManager extends LitElement { .controls > div { display: flex; gap: 10px; + align-items: center; } #new-info { @@ -357,6 +358,11 @@ export class InstanceManager extends LitElement { }); } + const controls = this.#controls(); + const controlDiv = this.shadowRoot.querySelector(".controls > div"); + controlDiv.innerHTML = ""; + controlDiv.append(...controls); + this.#onSelected(); }; @@ -370,6 +376,27 @@ export class InstanceManager extends LitElement { #hasMultiple = () => this.#items.length > 1; + #controls = () => { + return this.controls.map((item) => { + if (item instanceof HTMLElement) + return item; // Custom element + else if (typeof item === "function") return item(this.#selected); // Function + + // Button configuration + const { name, icon, primary, onClick } = item; + return html` div:not([hidden])"); + onClick.call(this, activeContentElement.getAttribute("data-instance"), activeContentElement); + }} + >${name}`; + }); + }; + render() { this.#info = {}; this.#items = []; @@ -445,26 +472,7 @@ export class InstanceManager extends LitElement {
-
- ${this.controls.map(({ name, icon, primary, onClick }) => { - return html` div:not([hidden])" - ); - onClick.call( - this, - activeContentElement.getAttribute("data-instance"), - activeContentElement - ); - }} - >${name}`; - })} -
+
${this.#controls()}
diff --git a/src/electron/frontend/core/components/JSONSchemaForm.js b/src/electron/frontend/core/components/JSONSchemaForm.js index 9c9572d3fb..075627d3ee 100644 --- a/src/electron/frontend/core/components/JSONSchemaForm.js +++ b/src/electron/frontend/core/components/JSONSchemaForm.js @@ -581,7 +581,8 @@ export class JSONSchemaForm extends LitElement { const allErrors = flaggedInputs .map((inputElement) => Array.from(inputElement.nextElementSibling.children).map((li) => li.message)) - .flat(); + .flat() + .filter((v) => !!v); const nMissingRequired = allErrors.reduce((acc, curr) => { return (acc += curr.includes(this.#isARequiredPropertyString) ? 1 : 0); diff --git a/src/electron/frontend/core/components/JSONSchemaInput.js b/src/electron/frontend/core/components/JSONSchemaInput.js index 9e186fc381..0a3b150a57 100644 --- a/src/electron/frontend/core/components/JSONSchemaInput.js +++ b/src/electron/frontend/core/components/JSONSchemaInput.js @@ -501,6 +501,19 @@ export class JSONSchemaInput extends LitElement { }; } + #description; + get description() { + return this.#description ?? this.schema.description; + } + + set description(value) { + this.#description = value; + + const descriptionEl = this.shadowRoot.querySelector(".guided--text-input-instructions"); + if (!descriptionEl) return; + descriptionEl.innerHTML = value; + } + // Enforce dynamic required properties attributeChangedCallback(key, _, latest) { super.attributeChangedCallback(...arguments); @@ -537,6 +550,7 @@ export class JSONSchemaInput extends LitElement { controls = []; // required; validateOnChange = true; + allowNaN = true; constructor(props = {}) { super(); @@ -664,7 +678,11 @@ export class JSONSchemaInput extends LitElement { >${(schema.title ? unsafeHTML(schema.title) : null) ?? header(this.path.slice(-1)[0])}` : ""} -
${input}${this.controls ? html`
${this.controls}
` : ""}
+
+ ${input}${this.controls && this.controls.length + ? html`
${this.controls}
` + : ""} +
${descriptionHTML}
`; @@ -857,6 +875,17 @@ export class JSONSchemaInput extends LitElement { #render() { const { validateOnChange, schema, path: fullPath } = this; + // Resolve anyof inside the schema + const anyOf = schema.anyOf; + if (anyOf) { + delete schema.anyOf; + for (let key in anyOf[0]) { + schema[key] = anyOf[0][key]; + } + + // schema = {...schema, ...anyOf[0]} + } + this.removeAttribute("data-table"); // Do your best to fill in missing schema values @@ -907,26 +936,53 @@ export class JSONSchemaInput extends LitElement { return filesystemSelectorElement; }; - // Transform to single item if maxItems is 1 - if (isArray && schema.maxItems === 1 && !isTable) { - return new JSONSchemaInput({ - value: this.value?.[0], - schema: { - ...schema.items, - strict: schema.strict, - }, - path: fullPath, - validateEmptyValue: this.validateEmptyValue, - required: this.required, - validateOnChange: () => (validateOnChange ? this.#triggerValidation(name, path) : ""), - form: this.form, - onUpdate: (value) => this.#updateData(fullPath, [value]), + // Transform to single item if maxItems is 1 OR the array has a fixed length + if ( + isArray && + (schema.maxItems === 1 || (schema.maxItems && schema.minItems && schema.maxItems === schema.minItems)) && + !isTable + ) { + const len = schema.maxItems ?? 1; + const array = this.value ?? []; + + // JSONified arrays will convert undefined to null + const jsonify = (value) => (value === undefined ? null : value); + const jsonschemify = (value) => (value === null ? undefined : value); + + this.required = false; + + const inputs = Array.from({ length: len }).map((_, i) => { + const input = new JSONSchemaInput({ + value: jsonify(array[i]), + schema: { + ...schema.items, + strict: schema.strict, + }, + path: fullPath, + validateEmptyValue: this.validateEmptyValue, + required: this.required, + allowNaN: false, + validateOnChange: () => (validateOnChange ? this.#triggerValidation(name, path) : ""), + form: this.form, + onUpdate: (value) => { + array[i] = jsonschemify(value); + this.#updateData(fullPath, [...array]); + }, + }); + + array[i] = jsonschemify(array[i]); + + return input; }); + + return inputs; } if (isArray || canAddProperties) { // if ('value' in this && !Array.isArray(this.value)) this.value = [ this.value ] + const editableInline = ["string", "number"]; + const allowPatternProperties = isPatternProperties(this.pattern); const allowAdditionalProperties = isAdditionalProperties(this.pattern); @@ -949,12 +1005,18 @@ export class JSONSchemaInput extends LitElement { const fileSystemFormat = isFilesystemSelector(name, itemSchema?.format); if (fileSystemFormat) return createFilesystemSelector(fileSystemFormat); // Create tables if possible - else if (itemSchema?.type === "string" && !itemSchema.properties) { + else if (editableInline.includes(itemSchema?.type) && !itemSchema.properties) { + const postprocess = (v) => { + if (itemSchema?.type === "number") return parseFloat(v); + else return v; + }; + const list = new List({ items: this.value, - emptyMessage: "No items", + emptyMessage: schema.empty ?? "No items selected.", + unordered: false, onChange: ({ items }) => { - this.#updateData(fullPath, items.length ? items.map((o) => o.value) : undefined); + this.#updateData(fullPath, items.length ? items.map((o) => postprocess(o.value)) : undefined); if (validateOnChange) this.#triggerValidation(name, path); }, }); @@ -972,6 +1034,7 @@ export class JSONSchemaInput extends LitElement { }; }), value: this.value, + placeholder: schema.placeholder, listMode: schema.strict === false ? "click" : "append", showAllWhenEmpty: false, onSelect: async ({ label, value }) => { @@ -981,12 +1044,16 @@ export class JSONSchemaInput extends LitElement { }, }); - search.style.height = "auto"; - return html`
${search}${list}
`; + Object.assign(search.style, { width: "100%" }); + + return html`
+
${search}
+ ${list} +
`; } else { const input = document.createElement("input"); input.classList.add("guided--input"); - input.placeholder = "Provide an item for the list"; + input.placeholder = schema.placeholder ?? "Provide an item for the list"; const submitButton = new Button({ label: "Submit", @@ -1006,11 +1073,13 @@ export class JSONSchemaInput extends LitElement { }); return html`
validateOnChange && this.#triggerValidation(name, path)} > -
${input}${submitButton}
+
+ ${input}${submitButton} +
${list}
`; } @@ -1150,6 +1219,7 @@ export class JSONSchemaInput extends LitElement { const search = new Search({ options, strict: schema.strict, + placeholder: schema.placeholder, value: { value: this.value, key: this.value, @@ -1188,7 +1258,7 @@ export class JSONSchemaInput extends LitElement { if (isInteger) schema.type = "number"; const isNumber = schema.type === "number"; - const isRequiredNumber = isNumber && this.required; + const isRequiredNumber = isNumber && this.required && this.allowNaN; const fileSystemFormat = isFilesystemSelector(name, schema.format); if (fileSystemFormat) return createFilesystemSelector(fileSystemFormat); @@ -1240,11 +1310,37 @@ export class JSONSchemaInput extends LitElement { else if (isNumber) value = newValue = parseFloat(value); else if (isDateTime) value = newValue = resolveDateTime(value); + const isStrict = schema.strict ? true : false; if (isNumber) { - if ("min" in schema && newValue < schema.min) newValue = schema.min; - else if ("max" in schema && newValue > schema.max) newValue = schema.max; - - if (isNaN(newValue)) newValue = undefined; + // exclusiveMinimum + if ("exclusiveMinimum" in schema && newValue <= schema.exclusiveMinimum) { + if (isStrict) + newValue = this.value; // Set back to last value + else newValue = schema.exclusiveMinimum + 1; // (schema.step ?? 1); + } + // exclusiveMaximum + else if ("exclusiveMaximum" in schema && newValue >= schema.exclusiveMaximum) { + if (isStrict) + newValue = this.value; // Set back to last value + else newValue = schema.exclusiveMaximum - 1; // (schema.step ?? 1); + } + // minimum + else if ("minimum" in schema && newValue < schema.minimum) { + if (isStrict) + newValue = this.value; // Set back to last value + else newValue = schema.minimum; + } + // maximum + else if ("maximum" in schema && newValue > schema.maximum) { + if (isStrict) + newValue = this.value; // Set back to last value + else newValue = schema.maximum; + } + if (isNaN(newValue)) { + if (isStrict) + newValue = this.value; // Set back to last value + else newValue = undefined; + } } if (schema.transform) newValue = schema.transform(newValue, this.value, schema); @@ -1256,7 +1352,7 @@ export class JSONSchemaInput extends LitElement { // } if (isNumber && newValue !== value) { - ev.target.value = newValue; + if (newValue !== undefined) ev.target.value = newValue; // Avoids unnecessary error message value = newValue; } diff --git a/src/electron/frontend/core/components/List.ts b/src/electron/frontend/core/components/List.ts index 5fe983e47c..e6b8f24fc9 100644 --- a/src/electron/frontend/core/components/List.ts +++ b/src/electron/frontend/core/components/List.ts @@ -36,6 +36,11 @@ export class List extends LitElement { overflow: auto; } + ol { + margin: 0px; + } + + #empty { margin: 1rem; margin-left: -40px; diff --git a/src/electron/frontend/core/components/Main.js b/src/electron/frontend/core/components/Main.js index a718756107..7270ac1ec7 100644 --- a/src/electron/frontend/core/components/Main.js +++ b/src/electron/frontend/core/components/Main.js @@ -111,7 +111,6 @@ export class Main extends LitElement { let { page = "", sections = {} } = this.toRender ?? {}; let footer = page?.footer; // Page-specific footer - let header = page?.header; // Page-specific header if (page) { this.to = page.to; @@ -139,62 +138,99 @@ export class Main extends LitElement { if (footer === true) footer = {}; if (footer && "onNext" in footer && !("next" in footer)) footer.next = "Next"; + } + + const footerEl = footer ? (this.footer = new GuidedFooter(footer)) : html`
`; // Render for grid + if (!footer) delete this.footer; // Reset footer + + this.header = new MainHeader(sections, page); + + return html` + ${this.header} + +
+
${page}
+
+ ${footerEl} + `; + } +} + +customElements.get("nwb-main") || customElements.define("nwb-main", Main); + +class MainHeader extends LitElement { + static get properties() { + return { + sections: { type: Object }, + }; + } + + constructor(sections, page) { + super(); + this.sections = sections; + this.page = page; + } + + createRenderRoot() { + return this; + } + + render() { + let { page = "", sections = {} } = this; + + delete this.title; // Reset title + + let config = page?.header; // Page-specific header + + if (page) { + const info = page.info ?? {}; - // Define header states const section = sections[info.section]; if (section) { - if (header === true || !("header" in page) || !("sections" in page.header)) { + if (config === true || !("header" in page) || !("sections" in page.header)) { const sectionNames = Object.entries(sections) .filter(([name, info]) => !Object.values(info.pages).every((state) => state.skipped)) .map(([name]) => name); - header = page.header && typeof page.header === "object" ? page.header : {}; - header.sections = sectionNames; - header.selected = sectionNames.indexOf(info.section); + + config = page.header && typeof page.header === "object" ? page.header : {}; + config.sections = sectionNames; + config.selected = sectionNames.indexOf(info.section); } } } - const headerEl = header ? (this.header = new GuidedHeader(header)) : html`
`; // Render for grid - if (!header) delete this.header; // Reset header - - if (!header) delete this.header; // Reset header - - const footerEl = footer ? (this.footer = new GuidedFooter(footer)) : html`
`; // Render for grid - if (!footer) delete this.footer; // Reset footer + const headerEl = config ? (this.header = new GuidedHeader(config)) : html`
`; // Render for grid + if (!config) delete this.header; // Reset header - const title = header?.title ?? page.info?.title; + const title = config?.title ?? page.info?.title; + if (title) this.title = title; // Set title if not undefined - let subtitle = header?.subtitle; + let subtitle = config?.subtitle; if (typeof subtitle === "function") subtitle = subtitle(); // Generate custom header content if required - let controls = header?.controls; + let controls = config?.controls; if (typeof controls === "function") controls = controls(); // Generate custom header content if required return html` - ${headerEl} - ${ - title +
+ ${headerEl} + ${this.title ? html`
-

${title}

- ${unsafeHTML(subtitle)} +

${this.title}

+ ${subtitle instanceof HTMLElement ? subtitle : unsafeHTML(subtitle)}
${controls}

` - : "" - } - -
-
${page}
-
- ${footerEl} + : ""} +
`; } } -customElements.get("nwb-main") || customElements.define("nwb-main", Main); +customElements.get("nwb-main-header") || customElements.define("nwb-main-header", MainHeader); diff --git a/src/electron/frontend/core/components/NavigationSidebar.js b/src/electron/frontend/core/components/NavigationSidebar.js index 52bb50c34b..c30ca4ee1b 100644 --- a/src/electron/frontend/core/components/NavigationSidebar.js +++ b/src/electron/frontend/core/components/NavigationSidebar.js @@ -2,6 +2,8 @@ import { LitElement, html } from "lit"; import useGlobalStyles from "./utils/useGlobalStyles.js"; import { unsafeHTML } from "lit/directives/unsafe-html.js"; +const autoOpenValue = Symbol("SECTION_AUTO_OPEN"); + const componentCSS = ` `; @@ -61,8 +63,9 @@ export class NavigationSidebar extends LitElement { !isAllSkipped ); - if (isActive) this.#toggleDropdown(sectionName, true); - else this.#toggleDropdown(sectionName, false); + if (isActive) this.#toggleDropdown(sectionName, autoOpenValue); + else if (info.open === autoOpenValue) this.#toggleDropdown(sectionName, false); + else this.#toggleDropdown(sectionName, info.open); }); if (this.#queue.length) { diff --git a/src/electron/frontend/core/components/ProgressBar.ts b/src/electron/frontend/core/components/ProgressBar.ts index 46369e77bd..8eeb71dab8 100644 --- a/src/electron/frontend/core/components/ProgressBar.ts +++ b/src/electron/frontend/core/components/ProgressBar.ts @@ -1,6 +1,7 @@ import { LitElement, html, css, unsafeCSS } from 'lit'; +import { humanReadableBytes } from './utils/size'; export type ProgressProps = { size?: string, @@ -14,28 +15,6 @@ export type ProgressProps = { } } -export function humanReadableBytes(size: number | string) { - - // Define the units - const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']; - - // Initialize the index to 0 - let index = 0; - - // Convert the size to a floating point number - size = parseFloat(size); - - // Loop until the size is less than 1024 and increment the unit - while (size >= 1000 && index < units.length - 1) { - size /= 1000; - index += 1; - } - - // Return the size formatted with 2 decimal places and the appropriate unit - return `${size.toFixed(2)} ${units[index]}`; -} - - const animationDuration = 500 // ms export class ProgressBar extends LitElement { diff --git a/src/electron/frontend/core/components/Search.js b/src/electron/frontend/core/components/Search.js index dcd2647618..bb7d889de2 100644 --- a/src/electron/frontend/core/components/Search.js +++ b/src/electron/frontend/core/components/Search.js @@ -17,11 +17,13 @@ export class Search extends LitElement { headerStyles = {}, disabledLabel, onSelect, + placeholder = "Type here to search", strict = false, } = {}) { super(); this.#value = value; this.options = options; + this.placeholder = placeholder; this.showAllWhenEmpty = showAllWhenEmpty; this.disabledLabel = disabledLabel; this.listMode = listMode; @@ -225,6 +227,7 @@ export class Search extends LitElement { static get properties() { return { + placeholder: { type: String }, options: { type: Object }, showAllWhenEmpty: { type: Boolean }, listMode: { type: String, reflect: true }, @@ -264,7 +267,7 @@ export class Search extends LitElement { if (inputMode) this.setAttribute("active", false); if (this.strict && !selectedOption) { - input.value = this.#value.label; + input.value = this.#value.label ?? this.#value.key ?? ""; return; } @@ -488,7 +491,7 @@ export class Search extends LitElement {
- { + { clickEvent.stopPropagation(); if (ALTERNATIVE_MODES.includes(this.listMode)) { const input = clickEvent.target.value; diff --git a/src/electron/frontend/core/components/pages/Page.js b/src/electron/frontend/core/components/pages/Page.js index e340ad6ef5..672068b1eb 100644 --- a/src/electron/frontend/core/components/pages/Page.js +++ b/src/electron/frontend/core/components/pages/Page.js @@ -119,18 +119,21 @@ export class Page extends LitElement { mapSessions = (callback, data = this.info.globalState.results) => mapSessions(callback, data); - async convert({ preview } = {}) { + async convert({ preview, ...conversionOptions } = {}, options = {}) { const key = preview ? "preview" : "conversion"; delete this.info.globalState[key]; // Clear the preview results if (preview) { - const stubs = await this.runConversions({ stub_test: true }, undefined, { - title: "Creating conversion preview for all sessions...", - }); + if (!options.title) options.title = "Running preview conversion on all sessions..."; + + const stubs = await this.runConversions({ stub_test: true, ...conversionOptions }, undefined, options); + this.info.globalState[key] = { stubs }; } else { - this.info.globalState[key] = await this.runConversions({}, true, { title: "Running all conversions" }); + if (!options.title) options.title = "Running all conversions"; + + this.info.globalState[key] = await this.runConversions(conversionOptions, true, options); } this.unsavedUpdates = true; @@ -142,7 +145,9 @@ export class Page extends LitElement { await this.save({}, false); } - async runConversions(conversionOptions = {}, toRun, options = {}) { + async runConversions(conversionOptions = {}, toRun, options = {}, backendFunctionToRun = null) { + const hasCustomFunction = !!backendFunctionToRun; + let original = toRun; if (!Array.isArray(toRun)) toRun = this.mapSessions(); @@ -152,11 +157,14 @@ export class Page extends LitElement { else if (typeof original === "string") toRun = toRun.filter(({ subject }) => subject === original); else if (typeof original === "function") toRun = toRun.filter(original); - const results = {}; + const conversionOutput = {}; - const swalOpts = await createProgressPopup({ title: `Running conversion`, ...options }); + const swalOpts = hasCustomFunction + ? options + : await createProgressPopup({ title: `Running conversion`, ...options }); const { close: closeProgressPopup } = swalOpts; + const fileConfiguration = []; try { @@ -168,68 +176,87 @@ export class Page extends LitElement { const sessionResults = globalState.results[subject][session]; + const configurationCopy = { ...(sessionResults.configuration ?? {}) }; + const sourceDataCopy = structuredClone(sessionResults.source_data); - // Resolve the correct session info from all of the metadata for this conversion - const metadata = resolveMetadata(subject, session, globalState); + if (!configurationCopy.backend) configurationCopy.backend = this.workflow.file_format.value; + // Resolve the correct session info from all of the metadata for this conversion const sessionInfo = { - ...sessionResults, - metadata, + configuration: configurationCopy, + metadata: resolveMetadata(subject, session, globalState), source_data: merge(SourceData, sourceDataCopy), }; + const optsCopy = structuredClone(conversionOptions); + + if (optsCopy.configuration === false) { + delete sessionInfo.configuration; // Skip backend configuration options if specified as such + delete optsCopy.backend; + } else { + if (typeof optsCopy.configuration === "object") merge(optsCopy.configuration, configurationCopy); + } + + delete optsCopy.configuration; + const payload = { - output_folder: conversionOptions.stub_test ? undefined : conversion_output_folder, + output_folder: optsCopy.stub_test ? undefined : conversion_output_folder, project_name: name, nwbfile_path: file, overwrite: true, // We assume override is true because the native NWB file dialog will not allow the user to select an existing file (unless they approve the overwrite) ...sessionInfo, // source_data and metadata are passed in here - ...conversionOptions, // Any additional conversion options override the defaults + ...optsCopy, // Any additional conversion options override the defaults interfaces: globalState.interfaces, alignment, timezone: this.workflow.timezone.value, }; - fileConfiguration.push(payload); + if (hasCustomFunction) { + const result = await backendFunctionToRun(payload, swalOpts); // Already handling Swal popup + const subRef = conversionOutput[subject] ?? (conversionOutput[subject] = {}); + subRef[session] = result; + } else fileConfiguration.push(payload); } - const conversionResults = await run( - `neuroconv/convert`, - { - files: fileConfiguration, - max_workers: 2, // TODO: Make this configurable and confirm default value - request_id: swalOpts.id, - }, - { - title: "Running the conversion", - onError: () => "Conversion failed with current metadata. Please try again.", - ...swalOpts, - } - ).catch(async (error) => { - let message = error.message; - - if (message.includes("The user aborted a request.")) { - this.notify("Conversion was cancelled.", "warning"); + if (fileConfiguration.length) { + const results = await run( + `neuroconv/convert`, + { + files: fileConfiguration, + max_workers: 2, // TODO: Make this configurable and confirm default value + request_id: swalOpts.id, + }, + { + title: "Running the conversion", + onError: () => "Conversion failed with current metadata. Please try again.", + ...swalOpts, + } + ).catch(async (error) => { + let message = error.message; + + if (message.includes("The user aborted a request.")) { + this.notify("Conversion was cancelled.", "warning"); + throw error; + } + + this.notify(message, "error"); throw error; - } + }); - this.notify(message, "error"); - throw error; - }); - - conversionResults.forEach((info) => { - const { file } = info; - const fileName = file.split("/").pop(); - const [subject, session] = fileName.match(/sub-(.+)_ses-(.+)\.nwb/).slice(1); - const subRef = results[subject] ?? (results[subject] = {}); - subRef[session] = info; - }); + results.forEach((info) => { + const { file } = info; + const fileName = file.split("/").pop(); + const [subject, session] = fileName.match(/sub-(.+)_ses-(.+)\.nwb/).slice(1); + const subRef = conversionOutput[subject] ?? (conversionOutput[subject] = {}); + subRef[session] = info; + }); + } } finally { - await closeProgressPopup(); + closeProgressPopup && (await closeProgressPopup()); } - return results; + return conversionOutput; } // NOTE: Until the shadow DOM is supported in Storybook, we can't use this render function how we'd intend to. @@ -257,7 +284,7 @@ export class Page extends LitElement { updateSections = () => { const dashboard = document.querySelector("nwb-dashboard"); - dashboard.updateSections({ sidebar: true, main: true }, this.info.globalState); + dashboard.updateSections({ sidebar: true, header: true }, this.info.globalState); }; #unsaved = false; diff --git a/src/electron/frontend/core/components/pages/guided-mode/data/GuidedBackendConfiguration.js b/src/electron/frontend/core/components/pages/guided-mode/data/GuidedBackendConfiguration.js new file mode 100644 index 0000000000..035e576f31 --- /dev/null +++ b/src/electron/frontend/core/components/pages/guided-mode/data/GuidedBackendConfiguration.js @@ -0,0 +1,396 @@ +import { JSONSchemaForm, get } from "../../../JSONSchemaForm.js"; + +import { ManagedPage } from "./ManagedPage.js"; + +import { onThrow } from "../../../../errors"; +import { merge } from "../../utils.js"; + +import { html } from "lit"; + +import { run } from "../options/utils.js"; +import { until } from "lit/directives/until.js"; + +import { resolve } from "../../../../promises"; +import { InstanceManager } from "../../../InstanceManager.js"; +import { InspectorListItem } from "../../../preview/inspector/InspectorList.js"; + +import { getResourceUsageBytes } from "../../../../validation/backend-configuration"; + +import { resolveBackendResults, updateSchema } from "../../../../../../../schemas/backend-configuration.schema"; +import { getInfoFromId } from "./utils.js"; + +const itemIgnore = { + full_shape: true, + buffer_shape: true, + compression_options: true, + filter_options: true, +}; + +const backendMap = { + zarr: "Zarr", + hdf5: "HDF5", +}; + +export class GuidedBackendConfigurationPage extends ManagedPage { + constructor(...args) { + super(...args); + this.style.height = "100%"; // Fix main section + } + + getBackendConfigurations = (info, options = {}) => + run(`neuroconv/configuration`, info, options).catch((e) => { + this.notify(e.message, "error"); + throw e; + }); + + beforeSave = () => { + merge(this.localState, this.info.globalState); + }; + + form; + instances = []; + #getForm = (sub, ses) => { + const found = this.instances.find((o) => o.session === ses && o.subject === sub); + return found?.instance instanceof JSONSchemaForm ? found.instance : null; + }; + + header = {}; + + workflow = { + // Ensure conversion is completed when skipped + backend_configuration: { + skip: async () => { + await this.convert({ + preview: true, + configuration: false, + }); + }, + }, + }; + + footer = { + onNext: async () => { + await this.save(); // Save in case the conversion fails + + for (let { instance } of this.instances) { + if (instance instanceof JSONSchemaForm) await instance.validate(); // Will throw an error in the callback + } + + await this.validate(); // Validate all backend configurations + + await this.convert({ preview: true }, { title: "Running preview conversion on all sessions" }); // Validate by trying to set backend configuration with the latest values + + return this.to(1); + }, + }; + + #toggleRendered; + #rendered; + #updateRendered = (force) => + force || this.#rendered === true + ? (this.#rendered = new Promise( + (resolve) => (this.#toggleRendered = () => resolve((this.#rendered = true))) + )) + : this.#rendered; + + get rendered() { + return resolve(this.#rendered, () => true); + } + + async updated() { + await this.rendered; + } + + renderInstance = (info) => { + const { session, subject, info: configuration } = info; + const { results, schema: itemSchema, itemsizes } = configuration; + + let instance; + if (Object.keys(results).length === 0) { + instance = document.createElement("span"); + instance.innerText = "No configuration options available for this session"; + } else { + const schema = { type: "object", properties: {} }; + + const reorganized = Object.entries(results).reduce((acc, [name, item]) => { + const splitName = name.split("/"); + + const itemsize = itemsizes[name]; + + const resolved = { schema, results: acc }; + + const lenSplit = splitName.length; + splitName.reduce((acc, key, i) => { + const { schema, results } = acc; + + const upperProps = schema.properties ?? (schema.properties = {}); + if (!schema.required) schema.required = []; + schema.required.push(key); + + // Set directly on last iteration + if (i === lenSplit - 1) { + const { schema, resolved } = resolveBackendResults(itemSchema, item, itemsize); + upperProps[key] = schema; + results[key] = resolved; + updateSchema(schema, item, itemsize); + return; + } + + // Otherwise drill into the results + else { + const thisSchema = upperProps[key] ?? (upperProps[key] = {}); + if (!results[key]) results[key] = {}; + return { schema: thisSchema, results: results[key] }; + } + }, resolved); + + return acc; + }, {}); + + const existingForm = this.#getForm(subject, session); + if (existingForm) { + existingForm.schema = schema; // Update schema + existingForm.results = reorganized; // Update resolved values + return { session, subject, instance: existingForm }; + } + + instance = new JSONSchemaForm({ + schema, + results: reorganized, + ignore: { + "*": itemIgnore, + }, + onUpdate: (updatedPath) => { + this.unsavedUpdates = "conversions"; // Trigger conversion updates + + const parentPath = updatedPath.slice(0, -1); + const form = instance.getFormElement(parentPath); + const name = updatedPath.slice(-1)[0]; + + // Update used schema + const schema = form.schema; + updateSchema(schema, form.results, itemsizes[parentPath.join("/")]); + + // Update rendered description + const input = form.inputs[name]; + input.description = schema.properties[name].description; + + // // Buffer shape depends on chunk shape + // if (name === "chunk_shape") form.inputs["buffer_shape"].schema = { ...form.inputs["buffer_shape"].schema }; // Force schema update + }, + onThrow, + + validateOnChange: async (name, _, path, value) => { + const errors = []; + + if (name === "chunk_shape") { + const input = instance.getFormElement(path).inputs["chunk_shape"]; + + const mbUsage = getResourceUsageBytes(value, itemsizes[path.join("/")], 1e6); + + if (mbUsage > 20) + errors.push({ + message: + "Recommended maximum chunk size is 20MB. You may want to reduce the size of the chunks.", + type: "warning", + }); + // NOTE: Generalize for more axes + else if (mbUsage < 10 && value[0] !== input.schema.items.maximum) + errors.push({ + message: + "Recommended minimum chunk size is 10MB. You may want to increase the size of the chunks.", + type: "warning", + }); + } + + return errors.length ? errors : true; + }, + }); + } + + return { session, subject, instance }; + }; + + getMissingBackendConfigurations = () => { + const toRun = this.mapSessions(({ session, subject }) => { + const sesResult = this.info.globalState.results[subject][session].configuration; + if (!sesResult) return { subject, session, skip: false }; + + const backend = sesResult.backend ?? this.workflow.file_format.value; + + return { + subject, + session, + skip: !!sesResult.results[backend], + }; + }).filter(({ skip }) => !skip); + + return this.runConversions( + {}, + toRun, // All or specific session + { + title: "Getting backend options", + }, + this.getBackendConfigurations + ); + }; + + validate = (toRun) => { + if (!toRun) + return this.runConversions( + {}, + true, + { title: "Validating backend options" }, + this.getBackendConfigurations + ); + + const { subject, session } = toRun; + return this.runConversions( + { configuration: this.info.globalState.results[subject][session].configuration }, + [{ subject, session }], // All or specific session + { + title: "Validating backend options", + showCancelButton: false, + }, + this.getBackendConfigurations + ); + }; + + #getManager = () => { + const instances = {}; + + // Provide references to local state to each instance + this.instances = this.mapSessions(({ subject, session, info }) => { + const backend = info.configuration.backend ?? this.workflow.file_format.value; // Use the default backend if none is set + + return this.renderInstance({ + subject, + session, + info: { + backend, + results: info.configuration.results[backend], // Get the configuration options for the current session + itemsizes: info.configuration.itemsizes[backend], // Get the item sizes for the current session + schema: this.info.globalState.schema.configuration[subject][session][backend], // Get the schema for the current session + }, + }); + }, this.localState.results); + + this.instances.forEach(({ subject, session, instance }) => { + if (!instances[`sub-${subject}`]) instances[`sub-${subject}`] = {}; + instances[`sub-${subject}`][`ses-${session}`] = instance; + }); + + const ogManager = this.manager; + + this.manager = new InstanceManager({ + header: "Sessions", + instanceType: "Session", + instances, + + controls: [ + // // NOTE: Removes session-specific control over the backend type since Zarr is not completely supported yet + // (id) => { + // const instanceInfo = id + // .split("/") + // .reduce((acc, key) => acc[key.split("-").slice(1).join("-")], this.localState.results); + + // const backend = instanceInfo.configuration.backend ?? this.workflow.file_format.value; + + // return new JSONSchemaInput({ + // path: [], + // schema: { + // type: "string", + // placeholder: "Select backend type", + // enum: Object.keys(backendMap), + // enumLabels: backendMap, + // strict: true, + // }, + // value: backend, + // onUpdate: async (value) => { + // if (instanceInfo.configuration.backend === value) return; + // instanceInfo.configuration.backend = value; // Ensure new backend choice is persistent + // await this.save(); + // await this.#update(); + // }, + // }); + // }, + { + name: "Save & Validate", + primary: true, + onClick: async (id) => { + const { subject, session } = getInfoFromId(id); + await this.save(); + await this.validate({ session, subject }); + }, + }, + ], + }); + + if (ogManager) ogManager.replaceWith(this.manager); + + return this.manager; + }; + + #update = () => { + return this.getMissingBackendConfigurations() + .then(async (update) => { + if (Object.keys(update)) { + this.mapSessions(({ subject, session, info }) => { + const { results, schema, backend, itemsizes } = info; + + const sesResults = this.localState.results[subject][session]; + if (!sesResults.configuration) sesResults.configuration = {}; + if (!sesResults.configuration.results) sesResults.configuration.results = {}; + if (!sesResults.configuration.itemsizes) sesResults.configuration.itemsizes = {}; + + sesResults.configuration.itemsizes[backend] = itemsizes; // Set the item sizes for the current session + sesResults.configuration.results[backend] = results; // Set the configuration options for the current session + + // Set the schema for the current session + const path = [subject, session, backend]; + path.reduce((acc, key, i) => { + if (i === path.length - 1) acc[key] = schema; + if (!acc[key]) acc[key] = {}; + return acc[key]; + }, this.localState.schema.configuration); + }, update); + + await this.save(); // Save data as soon as it arrives from the server + } + + return this.#getManager(); + }) + + .catch((error) => { + const split = error.message.split(":"); + console.error(error); + return new InspectorListItem({ + message: split.length > 1 ? error.message.split(":")[1].slice(1) : error.message, + type: "error", + }); + }); + }; + + render() { + delete this.manager; // Delete any existing manager + + this.#updateRendered(true); + + const globalSchemas = this.info.globalState.schema; + if (!globalSchemas.configuration) globalSchemas.configuration = {}; + + this.localState = { + results: structuredClone(this.info.globalState.results), + schema: { configuration: structuredClone(globalSchemas.configuration) }, + }; + + const promise = this.#update(); + + const untilResult = until(promise, html`Loading form contents...`); + promise.then(() => this.#toggleRendered()); + return untilResult; + } +} + +customElements.get("nwbguide-guided-backend-configuration-page") || + customElements.define("nwbguide-guided-backend-configuration-page", GuidedBackendConfigurationPage); diff --git a/src/electron/frontend/core/components/pages/guided-mode/data/GuidedPathExpansion.js b/src/electron/frontend/core/components/pages/guided-mode/data/GuidedPathExpansion.js index af89aea7da..2805deec60 100644 --- a/src/electron/frontend/core/components/pages/guided-mode/data/GuidedPathExpansion.js +++ b/src/electron/frontend/core/components/pages/guided-mode/data/GuidedPathExpansion.js @@ -178,9 +178,6 @@ export class GuidedPathExpansionPage extends Page { #initialize = () => (this.localState = merge(this.info.globalState.structure, { results: {} })); workflow = { - subject_id: {}, - session_id: {}, - base_directory: {}, locate_data: { skip: () => { this.#initialize(); diff --git a/src/electron/frontend/core/components/pages/guided-mode/data/GuidedSourceData.js b/src/electron/frontend/core/components/pages/guided-mode/data/GuidedSourceData.js index 8bda2a9e54..9eeaafa610 100644 --- a/src/electron/frontend/core/components/pages/guided-mode/data/GuidedSourceData.js +++ b/src/electron/frontend/core/components/pages/guided-mode/data/GuidedSourceData.js @@ -220,11 +220,6 @@ export class GuidedSourceDataPage extends ManagedPage { if (this.#globalModal) this.#globalModal.remove(); } - updated() { - const dashboard = document.querySelector("nwb-dashboard"); - const page = dashboard.page; - } - render() { this.localState = { results: structuredClone(this.info.globalState.results ?? {}) }; diff --git a/src/electron/frontend/core/components/pages/guided-mode/data/GuidedStructure.js b/src/electron/frontend/core/components/pages/guided-mode/data/GuidedStructure.js index 03ad6d742a..6bf9f36087 100644 --- a/src/electron/frontend/core/components/pages/guided-mode/data/GuidedStructure.js +++ b/src/electron/frontend/core/components/pages/guided-mode/data/GuidedStructure.js @@ -35,6 +35,10 @@ export class GuidedStructurePage extends Page { this.searchModal.toggle(false); }; + Object.assign(this.addButton.style, { + marginTop: "10px", + }); + this.addButton.innerText = "Add Format"; this.addButton.onClick = () => { this.search.shadowRoot.querySelector("input").focus(); diff --git a/src/electron/frontend/core/components/pages/guided-mode/options/GuidedInspectorPage.js b/src/electron/frontend/core/components/pages/guided-mode/options/GuidedInspectorPage.js index 036712a9fa..80e96c708e 100644 --- a/src/electron/frontend/core/components/pages/guided-mode/options/GuidedInspectorPage.js +++ b/src/electron/frontend/core/components/pages/guided-mode/options/GuidedInspectorPage.js @@ -47,10 +47,6 @@ export class GuidedInspectorPage extends Page { }); } - workflow = { - multiple_sessions: {}, - }; - headerButtons = [ new Button({ label: "JSON", diff --git a/src/electron/frontend/core/components/pages/guided-mode/options/utils.js b/src/electron/frontend/core/components/pages/guided-mode/options/utils.js index 259c49c74b..7e6e8bc439 100644 --- a/src/electron/frontend/core/components/pages/guided-mode/options/utils.js +++ b/src/electron/frontend/core/components/pages/guided-mode/options/utils.js @@ -102,8 +102,8 @@ export const run = async (pathname, payload, options = {}) => { export const runConversion = async (info, options = {}) => run(`neuroconv/convert`, info, { title: "Running the conversion", - onError: (results) => { - if (results.message.includes("already exists")) { + onError: (error) => { + if (error.message.includes("already exists")) { return "File already exists. Please specify another location to store the conversion results"; } else { return "Conversion failed with current metadata. Please try again."; diff --git a/src/electron/frontend/core/components/pages/guided-mode/setup/Preform.js b/src/electron/frontend/core/components/pages/guided-mode/setup/Preform.js index 7ba682da87..5e9f128abd 100644 --- a/src/electron/frontend/core/components/pages/guided-mode/setup/Preform.js +++ b/src/electron/frontend/core/components/pages/guided-mode/setup/Preform.js @@ -2,7 +2,7 @@ import { html } from "lit"; import { JSONSchemaForm } from "../../../JSONSchemaForm.js"; import { Page } from "../../Page.js"; import { onThrow } from "../../../../errors"; - +import { merge } from "../../utils.js"; import timezoneSchema from "../../../../../../../schemas/timezone.schema"; // ------------------------------------------------------------------------------ @@ -10,6 +10,12 @@ import timezoneSchema from "../../../../../../../schemas/timezone.schema"; // ------------------------------------------------------------------------------ const questions = { + timezone: { + ...timezoneSchema, + title: "What timezone is your data in?", + required: true, + }, + multiple_sessions: { type: "boolean", title: "Will this pipeline be run on multiple sessions?", @@ -41,7 +47,7 @@ const questions = { type: "boolean", title: "Would you like to locate the source data programmatically?", dependencies: { - multiple_sessions: { default: false }, + multiple_sessions: {}, }, default: false, }, @@ -57,15 +63,32 @@ const questions = { }, }, - timezone: { - ...timezoneSchema, - title: "What timezone is your data in?", - required: true, + file_format: { + type: "string", + enum: ["hdf5", "zarr"], + enumLabels: { + hdf5: "HDF5", + zarr: "Zarr", + }, + strict: true, + title: "What file format would you like to use?", + description: "Choose a default file format for your data.", + default: "hdf5", + ignore: true, // NOTE: This ensures that users can only use the default (HDF5) format + }, + + backend_configuration: { + type: "boolean", + title: "Would you like to customize low-level data storage options?", + + description: + "Dataset chunking, compression, etc.
This also allows you to change file formats per-session", + default: false, }, upload_to_dandi: { type: "boolean", - title: "Would you like to upload your data to DANDI?", + title: "Will you publish data on DANDI?", default: true, }, }; @@ -92,7 +115,7 @@ const getSchema = (questions) => { else Object.entries(deps).forEach(([dep, opts]) => { if (!acc[dep]) acc[dep] = []; - acc[dep].push({ name, ...opts }); + acc[dep].push({ name, default: info.default, ...opts }); }); } return acc; @@ -108,6 +131,11 @@ const getSchema = (questions) => { return acc; }, []); + const ignore = Object.entries(questions).reduce((acc, [name, info]) => { + if (info.ignore) acc[name] = true; + return acc; + }, {}); + const projectWorkflowSchema = { type: "object", properties: Object.entries(questions).reduce((acc, [name, info]) => { @@ -123,6 +151,7 @@ const getSchema = (questions) => { schema: structuredClone(projectWorkflowSchema), defaults, dependents, + ignore, }; }; @@ -142,9 +171,11 @@ export class GuidedPreform extends Page { subtitle: "Answer the following questions to simplify your workflow through the GUIDE", }; + #setWorkflow = () => (this.info.globalState.project.workflow = this.state); // NOTE: Defaults already populated + beforeSave = async () => { await this.form.validate(); - this.info.globalState.project.workflow = this.state; + this.#setWorkflow(); }; footer = { @@ -155,7 +186,7 @@ export class GuidedPreform extends Page { }; updateForm = () => { - const { schema, dependents, defaults } = getSchema(questions); + const { schema, dependents, defaults, ignore } = getSchema(questions); const projectState = this.info.globalState.project ?? {}; if (!projectState.workflow) projectState.workflow = {}; @@ -168,6 +199,7 @@ export class GuidedPreform extends Page { this.form = new JSONSchemaForm({ schema, + ignore, results: this.state, validateEmptyValues: false, // Only show errors after submission validateOnChange: function (name, parent, path, value) { @@ -193,28 +225,46 @@ export class GuidedPreform extends Page { condition = (v) => dependent.condition.some((condition) => v == condition); else console.warn("Invalid condition", dependent.condition); + // Is set to true if (uniformDeps.every(({ name }) => condition(parent[name]))) { dependentParent.removeAttribute(attr); if ("required" in dependent) dependentEl.required = dependent.required; if ("__cached" in dependent) dependentEl.updateData(dependent.__cached); - } else { + } + + // Is set to false + else { if (dependentEl.value !== undefined) dependent.__cached = dependentEl.value; dependentEl.updateData(dependent.default); dependentParent.setAttribute(attr, true); if ("required" in dependent) dependentEl.required = !dependent.required; } }); + + const { upload_to_dandi, file_format } = parent; + + // Only check file format because of global re-render + if (name === "file_format") { + if (upload_to_dandi === true && file_format === "zarr") + return [ + { + type: "error", + message: + "

Zarr files are not supported by DANDI

Please change the file format to HDF5 or disable DANDI upload.", + }, + ]; + } }, - // Immediately re-render boolean values + // Save all changes onUpdate: async (path, value) => { - if (typeof value === "boolean") { - this.unsavedUpdates = true; - this.info.globalState.project.workflow = this.state; - this.updateSections(); // Trigger section changes with new workflow - await this.save({}, false); // Save new workflow and section changes - } + const willUpdateFlow = typeof value === "boolean"; + this.unsavedUpdates = true; + this.#setWorkflow(); + if (willUpdateFlow) this.updateSections(); // Trigger section changes with new workflow + await this.save({}, false); // Save new workflow and section changes }, + onThrow, // groups: [ // { diff --git a/src/electron/frontend/core/components/pages/settings/SettingsPage.js b/src/electron/frontend/core/components/pages/settings/SettingsPage.js index d5ea234eab..5c70885068 100644 --- a/src/electron/frontend/core/components/pages/settings/SettingsPage.js +++ b/src/electron/frontend/core/components/pages/settings/SettingsPage.js @@ -32,7 +32,8 @@ import examplePipelines from "../../../../../../example_pipelines.yml"; import { run } from "../guided-mode/options/utils.js"; import { joinPath } from "../../../globals"; import { Modal } from "../../Modal"; -import { ProgressBar, humanReadableBytes } from "../../ProgressBar"; +import { ProgressBar } from "../../ProgressBar"; +import { humanReadableBytes } from "../../utils/size"; const DATA_OUTPUT_PATH = joinPath(testDataFolderPath, "single_session_data"); const DATASET_OUTPUT_PATH = joinPath(testDataFolderPath, "multi_session_dataset"); diff --git a/src/electron/frontend/core/components/table/cells/input.ts b/src/electron/frontend/core/components/table/cells/input.ts index 17572ead1d..4c02b508de 100644 --- a/src/electron/frontend/core/components/table/cells/input.ts +++ b/src/electron/frontend/core/components/table/cells/input.ts @@ -43,8 +43,6 @@ export class NestedEditor extends LitElement { const schema = this.schema - - console.log('schema', schema, 'data', data) const container = document.createElement('div') const input = this.#input = new JSONSchemaInput({ schema, diff --git a/src/electron/frontend/core/components/utils/size.ts b/src/electron/frontend/core/components/utils/size.ts new file mode 100644 index 0000000000..2e08881342 --- /dev/null +++ b/src/electron/frontend/core/components/utils/size.ts @@ -0,0 +1,20 @@ +export function humanReadableBytes(size: number | string) { + + // Define the units + const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']; + + // Initialize the index to 0 + let index = 0; + + // Convert the size to a floating point number + size = parseFloat(size); + + // Loop until the size is less than 1024 and increment the unit + while (size >= 1000 && index < units.length - 1) { + size /= 1000; + index += 1; + } + + // Return the size formatted with 2 decimal places and the appropriate unit + return `${size.toFixed(2)} ${units[index]}`; +} diff --git a/src/electron/frontend/core/pages.js b/src/electron/frontend/core/pages.js index 11f042adcf..3371f27950 100644 --- a/src/electron/frontend/core/pages.js +++ b/src/electron/frontend/core/pages.js @@ -4,9 +4,13 @@ import { GuidedHomePage } from "./components/pages/guided-mode/GuidedHome"; import { GuidedNewDatasetPage } from "./components/pages/guided-mode/setup/GuidedNewDatasetInfo"; import { GuidedStructurePage } from "./components/pages/guided-mode/data/GuidedStructure"; import { sections } from "./components/pages/globals"; + import { GuidedSubjectsPage } from "./components/pages/guided-mode/setup/GuidedSubjects"; + import { GuidedSourceDataPage } from "./components/pages/guided-mode/data/GuidedSourceData"; import { GuidedMetadataPage } from "./components/pages/guided-mode/data/GuidedMetadata"; +import { GuidedBackendConfigurationPage } from "./components/pages/guided-mode/data/GuidedBackendConfiguration"; + import { GuidedUploadPage } from "./components/pages/guided-mode/options/GuidedUpload"; import { GuidedResultsPage } from "./components/pages/guided-mode/results/GuidedResults"; import { Dashboard } from "./components/Dashboard"; @@ -125,6 +129,12 @@ const pages = { section: sections[1], }), + backend: new GuidedBackendConfigurationPage({ + title: "Backend Configuration", + label: "Backend configuration", + section: sections[1], + }), + inspect: new GuidedInspectorPage({ title: "Inspector Report", label: "Validate metadata", diff --git a/src/electron/frontend/core/validation/backend-configuration.ts b/src/electron/frontend/core/validation/backend-configuration.ts new file mode 100644 index 0000000000..16431dc93c --- /dev/null +++ b/src/electron/frontend/core/validation/backend-configuration.ts @@ -0,0 +1,7 @@ +import { humanReadableBytes } from "../components/utils/size"; + +const prod = (arr: number[]) => arr.reduce((accumulator, currentValue) => accumulator * currentValue, 1); + +export const getResourceUsageBytes = (shape: number[], itemsize: number, scale=1e9) => prod(shape) * (itemsize / scale) // Default to GB + +export const getResourceUsage = (shape: number[], itemsize: number) => humanReadableBytes(getResourceUsageBytes(shape, itemsize, 1)) diff --git a/src/pyflask/app.py b/src/pyflask/app.py index 28b0f8a308..3301c21a88 100644 --- a/src/pyflask/app.py +++ b/src/pyflask/app.py @@ -107,10 +107,15 @@ def post(self): type = payload["type"] header = payload["header"] inputs = payload["inputs"] - traceback = payload["traceback"] + traceback = payload.get("traceback", "") + + message = f"{header}\n{'-'*len(header)}\n\n{json.dumps(inputs, indent=2)}\n" + + if traceback: + message += f"\n{traceback}\n" - message = f"{header}\n{'-'*len(header)}\n\n{json.dumps(inputs, indent=2)}\n\n{traceback}\n" selected_logger = getattr(api.logger, type) + api.logger.info(f"Logging {type} message: {header}") selected_logger(message) diff --git a/src/pyflask/manageNeuroconv/__init__.py b/src/pyflask/manageNeuroconv/__init__.py index 0793acb240..8401b0f374 100644 --- a/src/pyflask/manageNeuroconv/__init__.py +++ b/src/pyflask/manageNeuroconv/__init__.py @@ -7,6 +7,7 @@ generate_test_data, get_all_converter_info, get_all_interface_info, + get_backend_configuration, get_interface_alignment, get_metadata_schema, get_source_schema, diff --git a/src/pyflask/manageNeuroconv/manage_neuroconv.py b/src/pyflask/manageNeuroconv/manage_neuroconv.py index f875874459..2d7825411b 100644 --- a/src/pyflask/manageNeuroconv/manage_neuroconv.py +++ b/src/pyflask/manageNeuroconv/manage_neuroconv.py @@ -510,6 +510,9 @@ def on_recording_interface(name, recording_interface): if has_ecephys: + if "definitions" not in ecephys_schema: + ecephys_schema["definitions"] = ecephys_properties["definitions"] + has_electrodes = "ElectrodeColumns" in ecephys_metadata original_units_schema = ecephys_properties.pop("UnitProperties", None) @@ -863,52 +866,45 @@ def get_interface_alignment(info: dict) -> dict: ) -def convert_to_nwb( +def create_file( info: dict, - log_url=None, -) -> str: - """Function used to convert the source data to NWB format using the specified metadata.""" + log_url: Optional[str] = None, +) -> dict: import requests - from neuroconv import NWBConverter from tqdm_publisher import TQDMProgressSubscriber - url = info.get("url", None) - request_id = info.get("request_id", None) - - nwbfile_path = Path(info["nwbfile_path"]) - custom_output_directory = info.get("output_folder") project_name = info.get("project_name") + run_stub_test = info.get("stub_test", False) - default_output_base = STUB_SAVE_FOLDER_PATH if run_stub_test else CONVERSION_SAVE_FOLDER_PATH - default_output_directory = default_output_base / project_name - try: + overwrite = info.get("overwrite", False) - # add a subdirectory to a filepath if stub_test is true - resolved_output_base = Path(custom_output_directory) if custom_output_directory else default_output_base - resolved_output_directory = resolved_output_base / project_name - resolved_output_path = resolved_output_directory / nwbfile_path + # Progress update info + url = info.get("url") + request_id = info.get("request_id") - # Remove symlink placed at the default_output_directory if this will hold real data - if resolved_output_directory == default_output_directory and default_output_directory.is_symlink(): - default_output_directory.unlink() + # Backend configuration info + backend_configuration = info.get("configuration", {}) + backend = backend_configuration.get("backend", "hdf5") - resolved_output_path.parent.mkdir(exist_ok=True, parents=True) # Ensure all parent directories exist + converter, metadata, path_info = get_conversion_info(info) - resolved_source_data = replace_none_with_nan( - info["source_data"], resolve_references(get_custom_converter(info["interfaces"]).get_source_schema()) - ) + nwbfile_path = path_info["file"] - converter = instantiate_custom_converter( - source_data=resolved_source_data, - interface_class_dict=info["interfaces"], - alignment_info=info.get("alignment", dict()), - ) + try: + + # Delete files manually if using Zarr + if overwrite: + if nwbfile_path.exists(): + if nwbfile_path.is_dir(): + rmtree(nwbfile_path) + else: + nwbfile_path.unlink() def update_conversion_progress(message): update_dict = dict(request_id=request_id, **message) - if (url) or not run_stub_test: + if url or not run_stub_test: requests.post(url=url, json=update_dict) else: progress_handler.announce(update_dict) @@ -938,126 +934,273 @@ def update_conversion_progress(message): progress_bar_options=progress_bar_options, ) - # Ensure Ophys NaN values are resolved - resolved_metadata = replace_none_with_nan(info["metadata"], resolve_references(converter.get_metadata_schema())) + run_conversion_kwargs = dict( + metadata=metadata, + nwbfile_path=nwbfile_path, + overwrite=overwrite, + conversion_options=options, + backend=backend, + ) - ecephys_metadata = resolved_metadata.get("Ecephys") + if not run_stub_test: + run_conversion_kwargs.update(dict(backend_configuration=update_backend_configuration(info))) - if ecephys_metadata: + converter.run_conversion(**run_conversion_kwargs) - # Quick fix to remove units - has_units = "Units" in ecephys_metadata + except Exception as e: + if log_url: + requests.post( + url=log_url, + json=dict( + header=f"Conversion failed for {project_name} — {nwbfile_path} (convert_to_nwb)", + inputs=dict(info=info), + traceback=traceback.format_exc(), + type="error", + ), + ) - if has_units: + raise e - ## NOTE: Currently do not allow editing units properties - # shared_units_columns = ecephys_metadata["UnitColumns"] - # for interface_name, interface_unit_results in ecephys_metadata["Units"].items(): - # interface = converter.data_interface_objects[interface_name] - # update_sorting_properties_from_table_as_json( - # interface, - # unit_table_json=interface_unit_results, - # unit_column_info=shared_units_columns, - # ) +def update_backend_configuration(info: dict) -> dict: - # ecephys_metadata["UnitProperties"] = [ - # {"name": entry["name"], "description": entry["description"]} for entry in shared_units_columns - # ] + from neuroconv.tools.nwb_helpers import ( + get_default_backend_configuration, + make_nwbfile_from_metadata, + ) - del ecephys_metadata["Units"] - del ecephys_metadata["UnitColumns"] + PROPS_TO_IGNORE = ["full_shape"] - has_electrodes = "Electrodes" in ecephys_metadata - if has_electrodes: + info_from_frontend = info.get("configuration", {}) + backend = info_from_frontend.get("backend", "hdf5") + backend_configuration_from_frontend = info_from_frontend.get("results", {}).get(backend, {}) - shared_electrode_columns = ecephys_metadata["ElectrodeColumns"] + converter, metadata, __ = get_conversion_info(info) - for interface_name, interface_electrode_results in ecephys_metadata["Electrodes"].items(): - name_split = interface_name.split(" — ") + nwbfile = make_nwbfile_from_metadata(metadata=metadata) + converter.add_to_nwbfile(nwbfile, metadata=metadata) - if len(name_split) == 1: - sub_interface = name_split[0] - elif len(name_split) == 2: - sub_interface, sub_sub_interface = name_split + backend_configuration = get_default_backend_configuration(nwbfile=nwbfile, backend=backend) - interface_or_subconverter = converter.data_interface_objects[sub_interface] + for location_in_file, dataset_configuration in backend_configuration_from_frontend.items(): + for key, value in dataset_configuration.items(): + if key not in PROPS_TO_IGNORE: + # Pydantic models only allow setting of attributes + setattr(backend_configuration.dataset_configurations[location_in_file], key, value) - if isinstance(interface_or_subconverter, NWBConverter): - subconverter = interface_or_subconverter + return backend_configuration - update_recording_properties_from_table_as_json( - recording_interface=subconverter.data_interface_objects[sub_sub_interface], - electrode_table_json=interface_electrode_results, - electrode_column_info=shared_electrode_columns, - ) - else: - interface = interface_or_subconverter - update_recording_properties_from_table_as_json( - recording_interface=interface, - electrode_table_json=interface_electrode_results, - electrode_column_info=shared_electrode_columns, - ) +def get_backend_configuration(info: dict) -> dict: - ecephys_metadata["Electrodes"] = [ - {"name": entry["name"], "description": entry["description"]} for entry in shared_electrode_columns - ] + import numpy as np - del ecephys_metadata["ElectrodeColumns"] + PROPS_TO_REMOVE = [ + # Immutable + "object_id", + "dataset_name", + "location_in_file", + "dtype", + ] - # Correct timezone in metadata fields - resolved_metadata["NWBFile"]["session_start_time"] = datetime.fromisoformat( - resolved_metadata["NWBFile"]["session_start_time"] - ).replace(tzinfo=zoneinfo.ZoneInfo(info["timezone"])) + info["overwrite"] = True # Always overwrite the file - if "date_of_birth" in resolved_metadata["Subject"]: - resolved_metadata["Subject"]["date_of_birth"] = datetime.fromisoformat( - resolved_metadata["Subject"]["date_of_birth"] - ).replace(tzinfo=zoneinfo.ZoneInfo(info["timezone"])) + backend = info.get("backend", "hdf5") + configuration = update_backend_configuration(info) - # Actually run the conversion - converter.run_conversion( - metadata=resolved_metadata, - nwbfile_path=resolved_output_path, - overwrite=info.get("overwrite", False), - conversion_options=options, - ) + def custom_encoder(obj): + if isinstance(obj, np.ndarray): + return obj.tolist() + if isinstance(obj, np.dtype): + return str(obj) + raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable") - # Create a symlink between the fake data and custom data - if not resolved_output_directory == default_output_directory: - if default_output_directory.exists(): - # If default default_output_directory is not a symlink, delete all contents and create a symlink there - if not default_output_directory.is_symlink(): - rmtree(default_output_directory) + # Provide metadata on configuration dictionary + configuration_dict = configuration.dict() - # If the location is already a symlink, but points to a different output location - # remove the existing symlink before creating a new one - elif ( - default_output_directory.is_symlink() - and default_output_directory.readlink() is not resolved_output_directory - ): - default_output_directory.unlink() + itemsizes = {} + for key, dataset in configuration_dict["dataset_configurations"].items(): + itemsizes[key] = dataset["dtype"].itemsize - # Create a pointer to the actual conversion outputs - if not default_output_directory.exists(): - os.symlink(resolved_output_directory, default_output_directory) + serialized = json.loads(json.dumps(configuration_dict, default=custom_encoder)) - return dict(file=str(resolved_output_path)) + dataset_configurations = serialized["dataset_configurations"] # Only provide dataset configurations - except Exception as e: - if log_url: - requests.post( - url=log_url, - json=dict( - header=f"Conversion failed for {project_name} — {nwbfile_path} (convert_to_nwb)", - inputs=dict(info=info), - traceback=traceback.format_exc(), - type="error", - ), - ) + for dataset in dataset_configurations.values(): + for key in PROPS_TO_REMOVE: + del dataset[key] - raise e + schema = list(configuration.schema()["$defs"].values())[0] + for key in PROPS_TO_REMOVE: + existed = schema["properties"].pop(key, None) # Why is dtype not included but the rest are? + if existed: + schema["required"].remove(key) + + return dict(results=dataset_configurations, schema=schema, backend=backend, itemsizes=itemsizes) + + +def get_conversion_path_info(info: dict) -> dict: + """Function used to resolve the path details for the conversion.""" + + nwbfile_path = Path(info["nwbfile_path"]) + custom_output_directory = info.get("output_folder") + project_name = info.get("project_name") + run_stub_test = info.get("stub_test", False) + default_output_base = STUB_SAVE_FOLDER_PATH if run_stub_test else CONVERSION_SAVE_FOLDER_PATH + default_output_directory = default_output_base / project_name + + # add a subdirectory to a filepath if stub_test is true + resolved_output_base = Path(custom_output_directory) if custom_output_directory else default_output_base + resolved_output_directory = resolved_output_base / project_name + resolved_output_path = resolved_output_directory / nwbfile_path + + return dict(file=resolved_output_path, directory=resolved_output_directory, default=default_output_directory) + + +def get_conversion_info(info: dict) -> dict: + """Function used to organize the required information for conversion.""" + + from neuroconv import NWBConverter + + path_info = get_conversion_path_info(info) + resolved_output_path = path_info["file"] + resolved_output_directory = path_info["directory"] + default_output_directory = path_info["default"] + + # Remove symlink placed at the default_output_directory if this will hold real data + if resolved_output_directory == default_output_directory and default_output_directory.is_symlink(): + default_output_directory.unlink() + + resolved_output_path.parent.mkdir(exist_ok=True, parents=True) # Ensure all parent directories exist + + resolved_source_data = replace_none_with_nan( + info["source_data"], resolve_references(get_custom_converter(info["interfaces"]).get_source_schema()) + ) + + converter = instantiate_custom_converter( + source_data=resolved_source_data, + interface_class_dict=info["interfaces"], + alignment_info=info.get("alignment", dict()), + ) + + # Ensure Ophys NaN values are resolved + resolved_metadata = replace_none_with_nan(info["metadata"], resolve_references(converter.get_metadata_schema())) + + ecephys_metadata = resolved_metadata.get("Ecephys") + + if ecephys_metadata: + + # Quick fix to remove units + has_units = "Units" in ecephys_metadata + + if has_units: + + ## NOTE: Currently do not allow editing units properties + # shared_units_columns = ecephys_metadata["UnitColumns"] + # for interface_name, interface_unit_results in ecephys_metadata["Units"].items(): + # interface = converter.data_interface_objects[interface_name] + + # update_sorting_properties_from_table_as_json( + # interface, + # unit_table_json=interface_unit_results, + # unit_column_info=shared_units_columns, + # ) + + # ecephys_metadata["UnitProperties"] = [ + # {"name": entry["name"], "description": entry["description"]} for entry in shared_units_columns + # ] + + del ecephys_metadata["Units"] + del ecephys_metadata["UnitColumns"] + + has_electrodes = "Electrodes" in ecephys_metadata + if has_electrodes: + + shared_electrode_columns = ecephys_metadata["ElectrodeColumns"] + + for interface_name, interface_electrode_results in ecephys_metadata["Electrodes"].items(): + name_split = interface_name.split(" — ") + + if len(name_split) == 1: + sub_interface = name_split[0] + elif len(name_split) == 2: + sub_interface, sub_sub_interface = name_split + + interface_or_subconverter = converter.data_interface_objects[sub_interface] + + if isinstance(interface_or_subconverter, NWBConverter): + subconverter = interface_or_subconverter + + update_recording_properties_from_table_as_json( + recording_interface=subconverter.data_interface_objects[sub_sub_interface], + electrode_table_json=interface_electrode_results, + electrode_column_info=shared_electrode_columns, + ) + else: + interface = interface_or_subconverter + + update_recording_properties_from_table_as_json( + recording_interface=interface, + electrode_table_json=interface_electrode_results, + electrode_column_info=shared_electrode_columns, + ) + + ecephys_metadata["Electrodes"] = [ + {"name": entry["name"], "description": entry["description"]} for entry in shared_electrode_columns + ] + + del ecephys_metadata["ElectrodeColumns"] + + # Correct timezone in metadata fields + resolved_metadata["NWBFile"]["session_start_time"] = datetime.fromisoformat( + resolved_metadata["NWBFile"]["session_start_time"] + ).replace(tzinfo=zoneinfo.ZoneInfo(info["timezone"])) + + if "date_of_birth" in resolved_metadata["Subject"]: + resolved_metadata["Subject"]["date_of_birth"] = datetime.fromisoformat( + resolved_metadata["Subject"]["date_of_birth"] + ).replace(tzinfo=zoneinfo.ZoneInfo(info["timezone"])) + + return ( + converter, + resolved_metadata, + path_info, + ) + + +def convert_to_nwb( + info: dict, + log_url: Optional[str] = None, +) -> str: + """Function used to convert the source data to NWB format using the specified metadata.""" + + path_info = get_conversion_path_info(info) + output_path = path_info["file"] + resolved_output_directory = path_info["directory"] + default_output_directory = path_info["default"] + + create_file(info, log_url=log_url) + + # Create a symlink between the fake data and custom data + if not resolved_output_directory == default_output_directory: + if default_output_directory.exists(): + # If default default_output_directory is not a symlink, delete all contents and create a symlink there + if not default_output_directory.is_symlink(): + rmtree(default_output_directory) + + # If the location is already a symlink, but points to a different output location + # remove the existing symlink before creating a new one + elif ( + default_output_directory.is_symlink() + and default_output_directory.readlink() is not resolved_output_directory + ): + default_output_directory.unlink() + + # Create a pointer to the actual conversion outputs + if not default_output_directory.exists(): + os.symlink(resolved_output_directory, default_output_directory) + + return dict(file=str(output_path)) def convert_all_to_nwb( diff --git a/src/pyflask/namespaces/neuroconv.py b/src/pyflask/namespaces/neuroconv.py index 3d37dcc156..fc3fba9814 100644 --- a/src/pyflask/namespaces/neuroconv.py +++ b/src/pyflask/namespaces/neuroconv.py @@ -7,6 +7,7 @@ convert_all_to_nwb, get_all_converter_info, get_all_interface_info, + get_backend_configuration, get_interface_alignment, get_metadata_schema, get_source_schema, @@ -93,6 +94,13 @@ def post(self): return get_interface_alignment(neuroconv_namespace.payload) +@neuroconv_namespace.route("/configuration") +class GetBackendConfiguration(Resource): + @neuroconv_namespace.doc(responses={200: "Success", 400: "Bad Request", 500: "Internal server error"}) + def post(self): + return get_backend_configuration(neuroconv_namespace.payload) + + validate_parser = neuroconv_namespace.parser() validate_parser.add_argument("parent", type=dict, required=True) validate_parser.add_argument("function_name", type=str, required=True) diff --git a/src/schemas/backend-configuration.schema.ts b/src/schemas/backend-configuration.schema.ts new file mode 100644 index 0000000000..2b36b5aedf --- /dev/null +++ b/src/schemas/backend-configuration.schema.ts @@ -0,0 +1,80 @@ +import { getResourceUsage } from "../../src/electron/frontend/core/validation/backend-configuration" + +export const resolveBackendResults = (schema, results, itemsize) => { + const copy = structuredClone(schema) + + // results.buffer_shape = results.chunk_shape.map(() => null); // Provide an unspecified buffer shape for now + + // Do not handle compression options or any filter options for now + if (copy.properties.compression_options) results.compression_options = null; + if (copy.properties.filter_methods) results.filter_methods = [] + if (copy.properties.filter_options) results.filter_options = null; + + + const { full_shape } = results; + if (copy.properties.filter_methods) copy.properties.filter_methods.description = "The ordered collection of filtering methods to apply to this dataset prior to compression.
Set blank to disable filtering" + copy.properties.compression_method.description = "The specified compression method to apply to this dataset.
Set blank to disable compression" + delete copy.properties.compression_method.default // Remove gzip as the default compression method + copy.description = `Full Shape: ${full_shape.join(' x ')}
Source size: ${getResourceUsage(full_shape, itemsize)}`; // This is static + + updateSchema(copy, results, itemsize) + + return { schema: copy, resolved: results } +} + + +const propertiesToUpdate = [ + 'chunk_shape', + // 'buffer_shape' +] + +// const bufferShapeDescription = (value, itemsize) => { +// return `Expected RAM usage: ${getResourceUsage(value, itemsize)}.`; +// } + +const chunkShapeDescription = (value, itemsize) => { + const hasNull = value.includes(null) || value.includes(undefined); // Both null after JSON processing + const diskSpaceMessage = hasNull ? 'Disk space usage will be determined automatically' : `Disk space usage per chunk: ${getResourceUsage(value, itemsize)}`; + return `${diskSpaceMessage}
Leave blank to auto-specify the axis`; +} + + +export const updateSchema = (schema, results, itemsize) => { + + const { + chunk_shape, + // buffer_shape, + full_shape + } = results; + + + const chunkSchema = schema.properties.chunk_shape; + const chunkArraySchema = chunkSchema.anyOf?.[0] || chunkSchema; + // const bufferSchema = schema.properties.buffer_shape; + + const shapeMax = full_shape[0] + + if (propertiesToUpdate.includes('chunk_shape')) { + chunkArraySchema.items.minimum = 1 + chunkArraySchema.maxItems = chunkArraySchema.minItems = chunk_shape.length; + chunkArraySchema.items.maximum = shapeMax + chunkArraySchema.description = chunkShapeDescription( + chunk_shape, + itemsize + ); + + } + + // if (propertiesToUpdate.includes('buffer_shape')) { + // bufferSchema.items.minimum = 1 + // bufferSchema.items.maximum = shapeMax + // bufferSchema.items.step = chunk_shape[0] // Constrain to increments of chunk size + // bufferSchema.strict = true + + // bufferSchema.maxItems = bufferSchema.minItems = buffer_shape.length; + // bufferSchema.description = bufferShapeDescription( + // buffer_shape, + // itemsize + // ); + // } +} diff --git a/src/schemas/base-metadata.schema.ts b/src/schemas/base-metadata.schema.ts index 466993ebe3..1dfa0de588 100644 --- a/src/schemas/base-metadata.schema.ts +++ b/src/schemas/base-metadata.schema.ts @@ -174,10 +174,11 @@ export const preprocessMetadataSchema = (schema: any = baseMetadataSchema, globa order: ["channel_name", "group_name", "shank_electrode_number", ...UV_PROPERTIES] }) - if (ecephys.properties["Units"]) { + const units = ecephys.properties["Units"] - ecephys.properties["Units"].title = "Summarized Units" + if (units) { + units.title = "Summarized Units" updateEcephysTable("Units", copy, { properties: { @@ -187,7 +188,6 @@ export const preprocessMetadataSchema = (schema: any = baseMetadataSchema, globa }, order: ["unit_id", "unit_name", "clu_id", "group_id"] }) - } } diff --git a/src/schemas/dandi-upload.schema.ts b/src/schemas/dandi-upload.schema.ts index d39a05234b..16b416a8db 100644 --- a/src/schemas/dandi-upload.schema.ts +++ b/src/schemas/dandi-upload.schema.ts @@ -32,12 +32,12 @@ onServerOpen(async () => { .then((res) => res.json()) .then(({ physical, logical }) => { const { number_of_jobs, number_of_threads } = additionalSettings as any; - number_of_jobs.max = physical; - number_of_threads.max = logical / physical; + number_of_jobs.maximum = physical; + number_of_threads.maximum = logical / physical; setReady.cpus({ number_of_jobs, number_of_threads }) }) .catch(() => { - if (isStorybook) setReady.cpus({ number_of_jobs: { max: 1, default: 1 }, number_of_threads: { max: 1, default: 1 } }) + if (isStorybook) setReady.cpus({ number_of_jobs: { maximum: 1, default: 1 }, number_of_threads: { maximum: 1, default: 1 } }) }); }); diff --git a/src/schemas/json/dandi/create.json b/src/schemas/json/dandi/create.json index aeca70a26b..faedd35041 100644 --- a/src/schemas/json/dandi/create.json +++ b/src/schemas/json/dandi/create.json @@ -27,7 +27,8 @@ "main": "Main Archive" }, "enum": ["main", "staging"], - "description": "Which DANDI server to upload to.
Note: The Development Server is recommended for developers, or users learning to use DANDI" + "description": "Which DANDI server to upload to.
Note: The Development Server is recommended for developers, or users learning to use DANDI", + "strict": true }, "description": {