Skip to content

Commit

Permalink
Merge pull request #8 from catalystneuro/vprobe-electrodes-locations
Browse files Browse the repository at this point in the history
vprobe electrodes locations
  • Loading branch information
CodyCBakerPhD authored Dec 19, 2023
2 parents 3557448 + 8d0700d commit 60464d4
Show file tree
Hide file tree
Showing 8 changed files with 74 additions and 121 deletions.
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
neuroconv==0.4.4
spikeinterface==0.98.2
neuroconv==0.4.7
spikeinterface==0.99.1
nwbwidgets
nwbinspector
pre-commit
3 changes: 2 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
from pathlib import Path
from setuptools import setup, find_packages

from setuptools import find_packages, setup

requirements_file_path = Path(__file__).parent / "requirements.txt"
with open(requirements_file_path) as file:
Expand Down
4 changes: 0 additions & 4 deletions src/jazayeri_lab_to_nwb/watters/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +0,0 @@
from .wattersbehaviorinterface import WattersEyePositionInterface, WattersPupilSizeInterface
from .watterstrialsinterface import WattersTrialsInterface
from .wattersrecordinginterface import WattersDatRecordingInterface
from .wattersnwbconverter import WattersNWBConverter
7 changes: 4 additions & 3 deletions src/jazayeri_lab_to_nwb/watters/watters_convert_session.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
"""Primary script to run to convert an entire session for of data using the NWBConverter."""
import os
import datetime
import glob
import json
import logging
import os
from pathlib import Path
from typing import Union
from uuid import uuid4
from zoneinfo import ZoneInfo

from neuroconv.tools.data_transfers import automatic_dandi_upload
from neuroconv.utils import load_dict_from_file, dict_deep_update
from neuroconv.utils import dict_deep_update, load_dict_from_file

from jazayeri_lab_to_nwb.watters import WattersNWBConverter

Expand Down Expand Up @@ -64,6 +64,7 @@ def session_to_nwb(
"""
if dandiset_id is not None:
import dandi # check importability

assert os.getenv("DANDI_API_KEY"), (
"Unable to find environment variable 'DANDI_API_KEY'. "
"Please retrieve your token from DANDI and set this environment variable."
Expand Down Expand Up @@ -249,5 +250,5 @@ def session_to_nwb(
output_dir_path=output_dir_path,
stub_test=stub_test,
overwrite=overwrite,
# dandiset_id = "000620",
# dandiset_id = "000620",
)
10 changes: 5 additions & 5 deletions src/jazayeri_lab_to_nwb/watters/wattersbehaviorinterface.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
"""Primary classes for converting experiment-specific behavior."""
import numpy as np
from pathlib import Path
from pynwb import NWBFile, TimeSeries
from pynwb.behavior import SpatialSeries
from hdmf.backends.hdf5 import H5DataIO

import numpy as np
from hdmf.backends.hdf5 import H5DataIO
from neuroconv.basetemporalalignmentinterface import BaseTemporalAlignmentInterface
from neuroconv.utils import DeepDict, FolderPathType, FilePathType
from neuroconv.tools.nwb_helpers import get_module
from neuroconv.utils import DeepDict, FilePathType, FolderPathType
from pynwb import NWBFile, TimeSeries
from pynwb.behavior import SpatialSeries


class NumpyTemporalAlignmentMixin:
Expand Down
22 changes: 11 additions & 11 deletions src/jazayeri_lab_to_nwb/watters/wattersnwbconverter.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,30 @@
"""Primary NWBConverter class for this dataset."""
import json
import logging
import numpy as np
from typing import Optional
from pathlib import Path
from typing import Optional

import numpy as np
from neuroconv import NWBConverter
from neuroconv.utils import FolderPathType
from neuroconv.basetemporalalignmentinterface import BaseTemporalAlignmentInterface
from neuroconv.datainterfaces import (
SpikeGLXRecordingInterface,
KiloSortSortingInterface,
SpikeGLXRecordingInterface,
)
from neuroconv.datainterfaces.ecephys.basesortingextractorinterface import (
BaseSortingExtractorInterface,
)
from neuroconv.datainterfaces.ecephys.baserecordingextractorinterface import BaseRecordingExtractorInterface
from neuroconv.datainterfaces.ecephys.basesortingextractorinterface import BaseSortingExtractorInterface
from neuroconv.basetemporalalignmentinterface import BaseTemporalAlignmentInterface
from neuroconv.datainterfaces.text.timeintervalsinterface import TimeIntervalsInterface

from neuroconv.utils import FolderPathType
from spikeinterface.core.waveform_tools import has_exceeding_spikes
from spikeinterface.curation import remove_excess_spikes

from . import (
WattersDatRecordingInterface,
from .wattersbehaviorinterface import (
WattersEyePositionInterface,
WattersPupilSizeInterface,
WattersTrialsInterface,
)
from .wattersrecordinginterface import WattersDatRecordingInterface
from .watterstrialsinterface import WattersTrialsInterface


class WattersNWBConverter(NWBConverter):
Expand Down
137 changes: 46 additions & 91 deletions src/jazayeri_lab_to_nwb/watters/wattersrecordinginterface.py
Original file line number Diff line number Diff line change
@@ -1,82 +1,22 @@
"""Primary class for Watters Plexon probe data."""
import os
import json
import numpy as np
from pynwb import NWBFile
import os
from pathlib import Path
from typing import Optional, Union

from neuroconv.datainterfaces.ecephys.baserecordingextractorinterface import BaseRecordingExtractorInterface
import numpy as np
import probeinterface as pi
from neuroconv.datainterfaces.ecephys.baserecordingextractorinterface import (
BaseRecordingExtractorInterface,
)
from neuroconv.utils import FilePathType
from pynwb import NWBFile
from spikeinterface import BaseRecording


def add_electrode_locations(
recording_extractor: BaseRecording,
probe_metadata_file: FilePathType,
probe_name: str,
probe_key: str,
) -> list[dict]:
with open(probe_metadata_file, "r") as f:
all_probe_metadata = json.load(f)
probe_metadata = None
for entry in all_probe_metadata:
if entry["label"] == probe_key:
probe_metadata = entry

if probe_metadata is None:
return []

probe_coord_system = probe_metadata["coordinate_system"]
coord_names = probe_coord_system.split("[")[1].split("]")[0].split(",")
electrode_metadata = [
{
"name": "x",
"description": f"{coord_names[0].strip()} coordinate. {probe_coord_system}",
},
{
"name": "y",
"description": f"{coord_names[1].strip()} coordinate. {probe_coord_system}",
},
]
if len(coord_names) == 3:
electrode_metadata.append(
{
"name": "z",
"description": f"{coord_names[2].strip()} coordinate. {probe_coord_system}",
},
)

channel_ids = recording_extractor.get_channel_ids()
recording_extractor.set_property(
key="group_name",
ids=channel_ids,
values=[probe_name] * len(channel_ids),
)
coordinates = probe_metadata["coordinates"]
recording_extractor.set_property(
key="x",
values=[coordinates["first_channel"][0], coordinates["last_channel"][0]],
ids=channel_ids[[0, -1]],
)
recording_extractor.set_property(
key="y",
values=[coordinates["first_channel"][1], coordinates["last_channel"][1]],
ids=channel_ids[[0, -1]],
)
if len(coord_names) == 3:
recording_extractor.set_property(
key="z",
values=[coordinates["first_channel"][2], coordinates["last_channel"][2]],
ids=channel_ids[[0, -1]],
)

return electrode_metadata


class WattersDatRecordingInterface(BaseRecordingExtractorInterface):

ExtractorName = "NumpyRecording"
ExtractorName = "BinaryRecordingExtractor"

def __init__(
self,
Expand All @@ -88,37 +28,56 @@ def __init__(
t_start: float = 0.0,
sampling_frequency: float = 30000.0,
channel_ids: Optional[list] = None,
gain_to_uv: list = [1.0],
gain_to_uv: list = 1.0,
offset_to_uv: list = 0.0,
probe_metadata_file: Optional[FilePathType] = None,
probe_name: str = "vprobe",
probe_key: Optional[str] = None,
):
traces = np.memmap(file_path, dtype=dtype, mode="r").reshape(-1, channel_count)
source_data = {
"traces_list": [traces],
"file_paths": [file_path],
"sampling_frequency": sampling_frequency,
"num_channels": channel_count,
"t_starts": [t_start],
"channel_ids": channel_ids,
"gain_to_uV": gain_to_uv,
"offset_to_uV": offset_to_uv,
"dtype": dtype,
}
super().__init__(verbose=verbose, es_key=es_key, **source_data)
if gain_to_uv is not None:
if len(gain_to_uv) == 1:
gain_to_uv = np.full((channel_count,), gain_to_uv[0], dtype=float)
else:
assert len(gain_to_uv) == channel_count, (
f"There are {channel_count} channels " f"but `gain_to_uv` has length {len(gain_to_uv)}"
)
gain_to_uv = np.array(gain_to_uv, dtype=float)
self.recording_extractor.set_property("gain_to_uV", gain_to_uv)
self.probe_metadata_file = probe_metadata_file

# this is used for metadata naming
self.probe_name = probe_name
self.probe_key = probe_key

self.electrode_metadata = None
if self.probe_metadata_file is not None and self.probe_key is not None:
self.electrode_metadata = add_electrode_locations(
self.recording_extractor, self.probe_metadata_file, self.probe_name, self.probe_key
)
# add probe information
probe_metadata = None
if probe_metadata_file is not None and probe_key is not None:
with open(probe_metadata_file, "r") as f:
all_probe_metadata = json.load(f)
for entry in all_probe_metadata:
if entry["label"] == probe_key:
probe_metadata = entry

if probe_metadata is not None and "electrodes_locations" in probe_metadata:
# Grab electrode position from metadata
locations_array = np.array(probe_metadata["electrodes_locations"])
ndim = locations_array.shape[1]
probe = pi.Probe(ndim=ndim)
probe.set_contacts(locations_array)
else:
# Generate V-probe geometry: 64 channels arranged vertically with 50 um spacing
probe = pi.generate_linear_probe(num_elec=channel_count, ypitch=50)
probe.set_device_channel_indices(np.arange(channel_count))
probe.name = probe_name

# set probe to interface recording
self.set_probe(probe, group_mode="by_probe")

# set group_name property to match electrode group name in metadata
self.recording_extractor.set_property(
key="group_name",
values=[probe_name] * len(self.recording_extractor.channel_ids),
)

def get_metadata(self) -> dict:
metadata = super().get_metadata()
Expand All @@ -139,8 +98,4 @@ def get_metadata(self) -> dict:
]
metadata["Ecephys"]["ElectrodeGroup"] = electrode_groups

if self.electrode_metadata is None:
return metadata

metadata["Ecephys"]["Electrodes"] = self.electrode_metadata
return metadata
8 changes: 4 additions & 4 deletions src/jazayeri_lab_to_nwb/watters/watterstrialsinterface.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"""Primary class for converting experiment-specific behavior."""
import json
import numpy as np
import pandas as pd
import warnings
from pathlib import Path
from pynwb import NWBFile
from typing import Optional

import numpy as np
import pandas as pd
from neuroconv.datainterfaces.text.timeintervalsinterface import TimeIntervalsInterface
from neuroconv.utils import DeepDict, FolderPathType, FilePathType
from neuroconv.utils import DeepDict, FilePathType, FolderPathType
from pynwb import NWBFile


class WattersTrialsInterface(TimeIntervalsInterface):
Expand Down

0 comments on commit 60464d4

Please sign in to comment.