Skip to content

Commit

Permalink
Merge pull request #83 from catalystneuro/use_backend_configuration
Browse files Browse the repository at this point in the history
Update backend configuration
  • Loading branch information
CodyCBakerPhD authored Sep 24, 2024
2 parents aa21959 + 59911e3 commit e9556af
Show file tree
Hide file tree
Showing 8 changed files with 59 additions and 120 deletions.
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ keywords = ["nwb", "dandi", "ibl"]
license = {file = "license.txt"}
requires-python = ">=3.9"
dependencies = [
"dandi",
"neuroconv",
"spikeinterface",
"probeinterface",
Expand Down
56 changes: 33 additions & 23 deletions src/ibl_to_nwb/converters/_iblconverter.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,26 @@
"""Primary base class for all IBL converters."""

from datetime import datetime
from typing import Optional
from typing import Literal, Optional

from dateutil import tz
from ndx_ibl import IblSubject
from neuroconv import ConverterPipe
from neuroconv.tools.nwb_helpers import make_or_load_nwbfile
from neuroconv.tools.nwb_helpers import HDF5BackendConfiguration, configure_backend, make_or_load_nwbfile
from one.api import ONE
from pydantic import FilePath
from pynwb import NWBFile
from typing_extensions import Self


class IblConverter(ConverterPipe):
def __init__(self, one: ONE, session: str, data_interfaces: list, verbose: bool = True):
def __init__(self, one: ONE, session: str, data_interfaces: list, verbose: bool = True) -> Self:
self.one = one
self.session = session
super().__init__(data_interfaces=data_interfaces, verbose=verbose)

def get_metadata_schema(self) -> dict:
metadata_schema = super().get_metadata_schema()

# way of manually overriding custom metadata for interfaces we don't care about validating
metadata_schema["additionalProperties"] = True

return metadata_schema
Expand Down Expand Up @@ -80,44 +80,51 @@ def get_metadata(self) -> dict:

def run_conversion(
self,
nwbfile_path: Optional[str] = None,
nwbfile_path: Optional[FilePath] = None,
nwbfile: Optional[NWBFile] = None,
metadata: Optional[dict] = None,
overwrite: bool = False,
# TODO: when all H5DataIO prewraps are gone, introduce Zarr safely
# backend: Union[Literal["hdf5", "zarr"]],
# backend_configuration: Optional[Union[HDF5BackendConfiguration, ZarrBackendConfiguration]] = None,
backend: Optional[Literal["hdf5"]] = None,
backend_configuration: Optional[HDF5BackendConfiguration] = None,
conversion_options: Optional[dict] = None,
) -> NWBFile:
"""
Run the NWB conversion over all the instantiated data interfaces.
Parameters
----------
nwbfile_path: FilePathType
nwbfile_path : FilePathType
Path for where to write or load (if overwrite=False) the NWBFile.
If specified, the context will always write to this location.
nwbfile: NWBFile, optional
nwbfile : NWBFile, optional
An in-memory NWBFile object to write to the location.
metadata: dict, optional
metadata : dict, optional
Metadata dictionary with information used to create the NWBFile when one does not exist or overwrite=True.
overwrite: bool, optional
Whether or not to overwrite the NWBFile if one exists at the nwbfile_path.
overwrite : bool, default: False
Whether to overwrite the NWBFile if one exists at the nwbfile_path.
The default is False (append mode).
verbose: bool, optional
If 'nwbfile_path' is specified, informs user after a successful write operation.
The default is True.
conversion_options: dict, optional
backend : "hdf5", optional
The type of backend to use when writing the file.
If a `backend_configuration` is not specified, the default type will be "hdf5".
If a `backend_configuration` is specified, then the type will be auto-detected.
backend_configuration : HDF5BackendConfiguration, optional
The configuration model to use when configuring the datasets for this backend.
To customize, call the `.get_default_backend_configuration(...)` method, modify the returned
BackendConfiguration object, and pass that instead.
Otherwise, all datasets will use default configuration settings.
conversion_options : dict, optional
Similar to source_data, a dictionary containing keywords for each interface for which non-default
conversion specification is requested.
Returns
-------
nwbfile: NWBFile
The in-memory NWBFile object after all conversion operations are complete.
"""
if metadata is None:
metadata = self.get_metadata()

subject_metadata = metadata.pop("Subject")
ibl_subject = IblSubject(**subject_metadata)

if metadata is None:
metadata = self.get_metadata()
self.validate_metadata(metadata=metadata)

conversion_options = conversion_options or dict()
Expand All @@ -136,6 +143,9 @@ def run_conversion(
nwbfile=nwbfile_out, metadata=metadata, **conversion_options.get(interface_name, dict())
)

super().run_conversion()
if backend_configuration is None:
backend_configuration = self.get_default_backend_configuration(nwbfile=nwbfile_out, backend=backend)

configure_backend(nwbfile=nwbfile_out, backend_configuration=backend_configuration)

return nwbfile_out
28 changes: 4 additions & 24 deletions src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from neuroconv.basedatainterface import BaseDataInterface
from neuroconv.utils import load_dict_from_file
from one.api import ONE
from pynwb import H5DataIO, NWBFile
from pynwb import NWBFile
from pynwb.epoch import TimeIntervals


Expand All @@ -19,15 +19,6 @@ def get_metadata(self) -> dict:
metadata.update(trial_metadata)
return metadata

def get_original_timestamps(self):
pass

def get_timestamps(self):
pass

def align_timestamps(self):
pass

def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict):
trials = self.one.load_object(id=self.session, obj="trials", collection="alf")

Expand All @@ -49,20 +40,20 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict):
VectorData(
name="start_time",
description="The beginning of the trial.",
data=H5DataIO(trials["intervals"][:, 0], compression=True),
data=trials["intervals"][:, 0],
),
VectorData(
name="stop_time",
description="The end of the trial.",
data=H5DataIO(trials["intervals"][:, 1], compression=True),
data=trials["intervals"][:, 1],
),
]
for ibl_key in column_ordering:
columns.append(
VectorData(
name=metadata["Trials"][ibl_key]["name"],
description=metadata["Trials"][ibl_key]["description"],
data=H5DataIO(trials[ibl_key], compression=True),
data=trials[ibl_key],
)
)
nwbfile.add_time_intervals(
Expand All @@ -72,14 +63,3 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict):
columns=columns,
)
)

# compression only works using the method above; method below fails
# for start_time, stop_time in trials["intervals"]:
# nwbfile.add_trial(start_time=start_time, stop_time=stop_time)

# for ibl_key in column_ordering:
# nwbfile.add_trial_column(
# name=metadata["Trials"][ibl_key]["name"],
# description=metadata["Trials"][ibl_key]["description"],
# data=H5DataIO(trials[ibl_key], compression=True),
# )
20 changes: 3 additions & 17 deletions src/ibl_to_nwb/datainterfaces/_lick_times.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from neuroconv.basedatainterface import BaseDataInterface
from neuroconv.tools.nwb_helpers import get_module
from one.api import ONE
from pynwb import H5DataIO
from pynwb import NWBFile
from pynwb.file import DynamicTable


Expand All @@ -11,16 +11,7 @@ def __init__(self, one: ONE, session: str):
self.one = one
self.session = session

def get_original_timestamps(self):
pass

def get_timestamps(self):
pass

def align_timestamps(self):
pass

def add_to_nwbfile(self, nwbfile, metadata: dict):
def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict):
licks = self.one.load_object(id=self.session, obj="licks", collection="alf")

lick_events_table = DynamicTable(
Expand All @@ -33,15 +24,10 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
VectorData(
name="lick_time",
description="Time stamps of licks as detected from tongue dlc traces",
data=H5DataIO(licks["times"], compression=True),
data=licks["times"],
)
],
)
# lick_events_table.add_column(
# name="lick_time",
# description="Time stamps of licks as detected from tongue dlc traces",
# data=H5DataIO(licks["times"], compression=True),
# )

behavior_module = get_module(nwbfile=nwbfile, name="behavior", description="Processed behavioral data.")
behavior_module.add(lick_events_table)
25 changes: 7 additions & 18 deletions src/ibl_to_nwb/datainterfaces/_pose_estimation.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from neuroconv.basedatainterface import BaseDataInterface
from neuroconv.tools.nwb_helpers import get_module
from one.api import ONE
from pynwb import H5DataIO
from pynwb import NWBFile
from pynwb.image import ImageSeries


Expand All @@ -17,16 +17,7 @@ def __init__(self, one: ONE, session: str, camera_name: str, include_video: bool
self.include_video = include_video
self.include_pose = include_pose

def get_original_timestamps(self):
pass

def get_timestamps(self):
pass

def align_timestamps(self):
pass

def add_to_nwbfile(self, nwbfile, metadata: dict):
def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict):
# Sometimes the DLC data has been revised, possibly multiple times
# Always use the most recent revision available
session_files = self.one.list_datasets(eid=self.session, filename=f"*{self.camera_name}.dlc*")
Expand All @@ -50,7 +41,6 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
)

left_right_or_body = self.camera_name[:5].rstrip("C")
# camera_name_snake_case = f"{left_right_or_body}_camera"
reused_timestamps = None
all_pose_estimation_series = list()
if self.include_pose:
Expand All @@ -62,15 +52,15 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
pose_estimation_series = PoseEstimationSeries(
name=body_part,
description=f"Marker placed on or around, labeled '{body_part}'.",
data=H5DataIO(body_part_data, compression=True),
data=body_part_data,
unit="px",
reference_frame="(0,0) corresponds to the upper left corner when using width by height convention.",
timestamps=reused_timestamps or H5DataIO(timestamps, compression=True),
timestamps=reused_timestamps or timestamps,
confidence=np.array(dlc_data[f"{body_part}_likelihood"]),
)
all_pose_estimation_series.append(pose_estimation_series)

reused_timestamps = all_pose_estimation_series[0] # trick for linking timestamps across series
reused_timestamps = all_pose_estimation_series[0] # A trick for linking timestamps across series

pose_estimation_kwargs = dict(
name=f"PoseEstimation{left_right_or_body.capitalize()}Camera",
Expand All @@ -88,7 +78,7 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
):
all_pose_estimation_series.append(pose_estimation_series)

reused_timestamps = all_pose_estimation_series[0] # trick for linking timestamps across series
reused_timestamps = all_pose_estimation_series[0] # A trick for linking timestamps across series

original_video_file = self.one.load_dataset(
id=self.session, dataset=f"raw_video_data/*{self.camera_name}*", download_only=True
Expand All @@ -99,7 +89,6 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
unit="n.a.",
external_file=[str(original_video_file)],
format="external",
timestamps=reused_timestamps or H5DataIO(timestamps, compression=True),
timestamps=reused_timestamps or timestamps,
)
# pose_estimation_kwargs.update(original_videos_series=[image_series]) # For future version of ndx-pose
nwbfile.add_acquisition(image_series)
13 changes: 2 additions & 11 deletions src/ibl_to_nwb/datainterfaces/_pupil_tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from neuroconv.tools.nwb_helpers import get_module
from neuroconv.utils import load_dict_from_file
from one.api import ONE
from pynwb import H5DataIO, TimeSeries
from pynwb import TimeSeries
from pynwb.behavior import PupilTracking


Expand All @@ -25,15 +25,6 @@ def get_metadata(self) -> dict:

return metadata

def get_original_timestamps(self):
pass

def get_timestamps(self):
pass

def align_timestamps(self):
pass

def add_to_nwbfile(self, nwbfile, metadata: dict):
left_or_right = self.camera_name[:5].rstrip("C")

Expand All @@ -45,7 +36,7 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
TimeSeries(
name=left_or_right.capitalize() + metadata["Pupils"][ibl_key]["name"],
description=metadata["Pupils"][ibl_key]["description"],
data=H5DataIO(np.array(camera_data["features"][ibl_key]), compression=True),
data=np.array(camera_data["features"][ibl_key]),
timestamps=camera_data["times"],
unit="px",
)
Expand Down
15 changes: 3 additions & 12 deletions src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from neuroconv.basedatainterface import BaseDataInterface
from neuroconv.tools.nwb_helpers import get_module
from one.api import ONE
from pynwb import H5DataIO, TimeSeries
from pynwb import TimeSeries


class RoiMotionEnergyInterface(BaseDataInterface):
Expand All @@ -12,15 +12,6 @@ def __init__(self, one: ONE, session: str, camera_name: str):
self.session = session
self.camera_name = camera_name

def get_original_timestamps(self):
pass

def get_timestamps(self):
pass

def align_timestamps(self):
pass

def add_to_nwbfile(self, nwbfile, metadata: dict):
left_right_or_body = self.camera_name[:5].rstrip("C")

Expand All @@ -42,8 +33,8 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
motion_energy_series = TimeSeries(
name=f"{left_right_or_body.capitalize()}CameraMotionEnergy",
description=description,
data=H5DataIO(camera_data["ROIMotionEnergy"]),
timestamps=H5DataIO(camera_data["times"]),
data=camera_data["ROIMotionEnergy"],
timestamps=camera_data["times"],
unit="a.u.",
)

Expand Down
Loading

0 comments on commit e9556af

Please sign in to comment.