Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Apr 9, 2024
1 parent 2f2ed5f commit 8d839a6
Show file tree
Hide file tree
Showing 8 changed files with 118 additions and 139 deletions.
2 changes: 1 addition & 1 deletion pyflask/apis/neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
upload_folder_to_dandi,
upload_multiple_filesystem_objects_to_dandi,
get_interface_alignment,
get_backend_configuration
get_backend_configuration,
)

from errorHandlers import notBadRequestException
Expand Down
6 changes: 3 additions & 3 deletions pyflask/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,15 +111,15 @@ def get_cpu_count():
@app.route("/compression/hdf5")
def get_hdf5_compression_methods():
from neuroconv.tools.nwb_helpers import AVAILABLE_HDF5_COMPRESSION_METHODS

return list(AVAILABLE_HDF5_COMPRESSION_METHODS)


@app.route("/compression/zarr")
def get_zarr_compression_methods():
from neuroconv.tools.nwb_helpers import AVAILABLE_ZARR_COMPRESSION_METHODS
return list(AVAILABLE_ZARR_COMPRESSION_METHODS)



return list(AVAILABLE_ZARR_COMPRESSION_METHODS)


@app.route("/get-recommended-species")
Expand Down
39 changes: 18 additions & 21 deletions pyflask/manageNeuroconv/manage_neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -671,24 +671,20 @@ def get_interface_alignment(info: dict) -> dict:
def get_backend_configuration(info: dict) -> dict:

import numpy as np
from neuroconv.tools.nwb_helpers import make_or_load_nwbfile, get_default_backend_configuration, HDF5BackendConfiguration, configure_backend
from neuroconv.tools.nwb_helpers import (
make_or_load_nwbfile,
get_default_backend_configuration,
HDF5BackendConfiguration,
configure_backend,
)

backend_configuration = info.get("configuration")

converter, metadata, path_info = get_conversion_info(info)


PROPS_TO_REMOVE = [
"object_id",
"dataset_name",
"location_in_file",
"dtype"
]
PROPS_TO_REMOVE = ["object_id", "dataset_name", "location_in_file", "dtype"]

PROPS_TO_AVOID = [
"full_shape"
]

PROPS_TO_AVOID = ["full_shape"]

with make_or_load_nwbfile(
nwbfile_path=path_info["file"],
Expand All @@ -708,17 +704,15 @@ def get_backend_configuration(info: dict) -> dict:
for key, value in item.items():

# Avoid setting compression options if unspecified
if (key == 'compression_options' and len(value) == 0):
if key == "compression_options" and len(value) == 0:
setattr(configuration.dataset_configurations[name], key, None)

# Avoid certain properties passed to the GUIDE
elif (key not in PROPS_TO_AVOID):
elif key not in PROPS_TO_AVOID:
setattr(configuration.dataset_configurations[name], key, value)


configure_backend(nwbfile=nwbfile, backend_configuration=configuration)


def custom_encoder(obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
Expand All @@ -731,12 +725,12 @@ def custom_encoder(obj):

serialized = json.loads(json.dumps(configuration.dict(), default=custom_encoder))

dataset_configurations = serialized["dataset_configurations"] # Only provide dataset configurations
dataset_configurations = serialized["dataset_configurations"] # Only provide dataset configurations

for dataset in dataset_configurations.values():
for key in PROPS_TO_REMOVE:
del dataset[key]

return dataset_configurations


Expand Down Expand Up @@ -818,7 +812,11 @@ def update_conversion_progress(**kwargs):

del ecephys_metadata["ElectrodeColumns"]

return converter, resolved_metadata, dict(file=resolved_output_path, directory=resolved_output_directory, default=default_output_directory)
return (
converter,
resolved_metadata,
dict(file=resolved_output_path, directory=resolved_output_directory, default=default_output_directory),
)


def convert_to_nwb(info: dict) -> str:
Expand All @@ -828,7 +826,6 @@ def convert_to_nwb(info: dict) -> str:
source_data = info.get("source_data", False)
backend_configuration = info.get("configuration")


converter, metadata, path_info = get_conversion_info(info)

output_path = path_info["file"]
Expand All @@ -840,7 +837,7 @@ def convert_to_nwb(info: dict) -> str:
options = (
{
interface: (
{ "stub_test": run_stub_test } # , "iter_opts": {"report_hook": update_conversion_progress}}
{"stub_test": run_stub_test} # , "iter_opts": {"report_hook": update_conversion_progress}}
if available_options.get("properties").get(interface).get("properties", {}).get("stub_test")
else {}
)
Expand Down
2 changes: 1 addition & 1 deletion schemas/backend-configuration.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ export const getSchema = (method='hdf5') => {
copy.properties["compression_method"].enum = resolved[method] ?? sharedCompressionMethods
return copy
}


const setReady: any = {}

Expand Down
2 changes: 1 addition & 1 deletion schemas/base-metadata.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ export const preprocessMetadataSchema = (schema: any = baseMetadataSchema, globa
})

const units = ecephys.properties["Units"]

if (units) {

units.title = "Summarized Units"
Expand Down
11 changes: 4 additions & 7 deletions src/renderer/src/stories/JSONSchemaInput.js
Original file line number Diff line number Diff line change
Expand Up @@ -926,7 +926,7 @@ export class JSONSchemaInput extends LitElement {
const allowPatternProperties = isPatternProperties(this.pattern);
const allowAdditionalProperties = isAdditionalProperties(this.pattern);

const editableInline = [ 'string', 'number' ]
const editableInline = ["string", "number"];

// Provide default item types
if (isArray) {
Expand All @@ -946,15 +946,12 @@ export class JSONSchemaInput extends LitElement {

const fileSystemFormat = isFilesystemSelector(name, itemSchema?.format);
if (fileSystemFormat) return createFilesystemSelector(fileSystemFormat);

// Create tables if possible
else if (editableInline.includes(itemSchema?.type) && !itemSchema.properties) {


const postprocess = (v) => {
if (itemSchema?.type === 'number') return parseFloat(v)
else return v
}
if (itemSchema?.type === "number") return parseFloat(v);
else return v;
};

const list = new List({
items: this.value,
Expand Down
8 changes: 1 addition & 7 deletions src/renderer/src/stories/pages/Page.js
Original file line number Diff line number Diff line change
Expand Up @@ -140,12 +140,7 @@ export class Page extends LitElement {
}
}

async runConversions(
conversionOptions = {},
toRun,
options = {},
backendFunctionToRun = runConversion
) {
async runConversions(conversionOptions = {}, toRun, options = {}, backendFunctionToRun = runConversion) {
let original = toRun;
if (!Array.isArray(toRun)) toRun = this.mapSessions();

Expand Down Expand Up @@ -200,7 +195,6 @@ export class Page extends LitElement {

const sourceDataCopy = structuredClone(sessionResults.source_data);


// Resolve the correct session info from all of the metadata for this conversion
const sessionInfo = {
...sessionResults,
Expand Down
Loading

0 comments on commit 8d839a6

Please sign in to comment.