Skip to content

Commit

Permalink
Merge branch 'backend-configuration' of https://github.com/NeurodataW…
Browse files Browse the repository at this point in the history
…ithoutBorders/nwb-guide into backend-configuration
  • Loading branch information
garrettmflynn committed Apr 9, 2024
2 parents d687e5f + 8d839a6 commit 16150a1
Show file tree
Hide file tree
Showing 8 changed files with 109 additions and 130 deletions.
2 changes: 1 addition & 1 deletion pyflask/apis/neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
upload_folder_to_dandi,
upload_multiple_filesystem_objects_to_dandi,
get_interface_alignment,
get_backend_configuration
get_backend_configuration,
)

from errorHandlers import notBadRequestException
Expand Down
6 changes: 3 additions & 3 deletions pyflask/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,15 +111,15 @@ def get_cpu_count():
@app.route("/compression/hdf5")
def get_hdf5_compression_methods():
from neuroconv.tools.nwb_helpers import AVAILABLE_HDF5_COMPRESSION_METHODS

return list(AVAILABLE_HDF5_COMPRESSION_METHODS)


@app.route("/compression/zarr")
def get_zarr_compression_methods():
from neuroconv.tools.nwb_helpers import AVAILABLE_ZARR_COMPRESSION_METHODS
return list(AVAILABLE_ZARR_COMPRESSION_METHODS)



return list(AVAILABLE_ZARR_COMPRESSION_METHODS)


@app.route("/get-recommended-species")
Expand Down
21 changes: 9 additions & 12 deletions pyflask/manageNeuroconv/manage_neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -703,13 +703,7 @@ def get_backend_configuration(info: dict) -> dict:

converter, metadata, path_info = get_conversion_info(info)


PROPS_TO_REMOVE = [
"object_id",
"dataset_name",
"location_in_file",
"dtype"
]
PROPS_TO_REMOVE = ["object_id", "dataset_name", "location_in_file", "dtype"]

with make_or_load_nwbfile(
nwbfile_path=path_info["file"],
Expand Down Expand Up @@ -737,12 +731,12 @@ def custom_encoder(obj):

serialized = json.loads(json.dumps(configuration.dict(), default=custom_encoder))

dataset_configurations = serialized["dataset_configurations"] # Only provide dataset configurations
dataset_configurations = serialized["dataset_configurations"] # Only provide dataset configurations

for dataset in dataset_configurations.values():
for key in PROPS_TO_REMOVE:
del dataset[key]

return dataset_configurations


Expand Down Expand Up @@ -824,7 +818,11 @@ def update_conversion_progress(**kwargs):

del ecephys_metadata["ElectrodeColumns"]

return converter, resolved_metadata, dict(file=resolved_output_path, directory=resolved_output_directory, default=default_output_directory)
return (
converter,
resolved_metadata,
dict(file=resolved_output_path, directory=resolved_output_directory, default=default_output_directory),
)


def convert_to_nwb(info: dict) -> str:
Expand All @@ -834,7 +832,6 @@ def convert_to_nwb(info: dict) -> str:
source_data = info.get("source_data", False)
backend_configuration = info.get("configuration")


converter, metadata, path_info = get_conversion_info(info)

output_path = path_info["file"]
Expand All @@ -846,7 +843,7 @@ def convert_to_nwb(info: dict) -> str:
options = (
{
interface: (
{ "stub_test": run_stub_test } # , "iter_opts": {"report_hook": update_conversion_progress}}
{"stub_test": run_stub_test} # , "iter_opts": {"report_hook": update_conversion_progress}}
if available_options.get("properties").get(interface).get("properties", {}).get("stub_test")
else {}
)
Expand Down
2 changes: 1 addition & 1 deletion schemas/backend-configuration.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ export const getSchema = (method='hdf5') => {
copy.properties["compression_method"].enum = resolved[method] ?? sharedCompressionMethods
return copy
}


const setReady: any = {}

Expand Down
2 changes: 1 addition & 1 deletion schemas/base-metadata.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ export const preprocessMetadataSchema = (schema: any = baseMetadataSchema, globa
})

const units = ecephys.properties["Units"]

if (units) {

units.title = "Summarized Units"
Expand Down
11 changes: 4 additions & 7 deletions src/renderer/src/stories/JSONSchemaInput.js
Original file line number Diff line number Diff line change
Expand Up @@ -926,7 +926,7 @@ export class JSONSchemaInput extends LitElement {
const allowPatternProperties = isPatternProperties(this.pattern);
const allowAdditionalProperties = isAdditionalProperties(this.pattern);

const editableInline = [ 'string', 'number' ]
const editableInline = ["string", "number"];

// Provide default item types
if (isArray) {
Expand All @@ -946,15 +946,12 @@ export class JSONSchemaInput extends LitElement {

const fileSystemFormat = isFilesystemSelector(name, itemSchema?.format);
if (fileSystemFormat) return createFilesystemSelector(fileSystemFormat);

// Create tables if possible
else if (editableInline.includes(itemSchema?.type) && !itemSchema.properties) {


const postprocess = (v) => {
if (itemSchema?.type === 'number') return parseFloat(v)
else return v
}
if (itemSchema?.type === "number") return parseFloat(v);
else return v;
};

const list = new List({
items: this.value,
Expand Down
8 changes: 1 addition & 7 deletions src/renderer/src/stories/pages/Page.js
Original file line number Diff line number Diff line change
Expand Up @@ -140,12 +140,7 @@ export class Page extends LitElement {
}
}

async runConversions(
conversionOptions = {},
toRun,
options = {},
backendFunctionToRun = runConversion
) {
async runConversions(conversionOptions = {}, toRun, options = {}, backendFunctionToRun = runConversion) {
let original = toRun;
if (!Array.isArray(toRun)) toRun = this.mapSessions();

Expand Down Expand Up @@ -200,7 +195,6 @@ export class Page extends LitElement {

const sourceDataCopy = structuredClone(sessionResults.source_data);


// Resolve the correct session info from all of the metadata for this conversion
const sessionInfo = {
...sessionResults,
Expand Down
Loading

0 comments on commit 16150a1

Please sign in to comment.