Skip to content

Commit

Permalink
revision argument in all datainterfaces
Browse files Browse the repository at this point in the history
  • Loading branch information
grg2rsr committed Dec 11, 2024
1 parent caabeb6 commit 3ab8de3
Show file tree
Hide file tree
Showing 10 changed files with 83 additions and 68 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
cleanup: bool = False

# assert len(os.environ.get("DANDI_API_KEY", "")) > 0, "Run `export DANDI_API_KEY=...`!"
revision = None

nwbfile_path.parent.mkdir(exist_ok=True)

Expand All @@ -52,32 +53,29 @@

# These interfaces should always be present in source data
data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting"))
data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session))
data_interfaces.append(WheelInterface(one=session_one, session=session))
data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session, revision=revision))
data_interfaces.append(WheelInterface(one=session_one, session=session, revision=revision))

# These interfaces may not be present; check if they are before adding to list
pose_estimation_files = session_one.list_datasets(eid=session, filename="*.dlc*")
for pose_estimation_file in pose_estimation_files:
camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "")
data_interfaces.append(
# IblPoseEstimationInterface(
# one=session_one, session=session, camera_name=camera_name, include_pose=True, include_video=False
# )
IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name)
IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, revision=revision)
)

pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*")
for pupil_tracking_file in pupil_tracking_files:
camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "")
data_interfaces.append(PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name))
data_interfaces.append(PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name, revision=revision))

roi_motion_energy_files = session_one.list_datasets(eid=session, filename="*ROIMotionEnergy.npy*")
for roi_motion_energy_file in roi_motion_energy_files:
camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "")
data_interfaces.append(RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name))
data_interfaces.append(RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name, revision=revision))

if session_one.list_datasets(eid=session, collection="alf", filename="licks*"):
data_interfaces.append(LickInterface(one=session_one, session=session))
data_interfaces.append(LickInterface(one=session_one, session=session, revision=revision))

# Run conversion
session_converter = BrainwideMapConverter(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,20 +1,22 @@
# %%
from pathlib import Path

from one.api import ONE

from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter
from ibl_to_nwb.datainterfaces import RawVideoInterface

# session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71"
session_id = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe
data_folder = Path(
"/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001"
)
spikeglx_source_folder_path = data_folder / "raw_ephys_data"
# eid = "d32876dd-8303-4720-8e7e-20678dc2fd71"
eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe

# %%
# one_cache_folder = '/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache'
# data_folder = Path(
# "/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001"
# )
# spikeglx_source_folder_path = data_folder / "raw_ephys_data"

# Specify the revision of the pose estimation data
# Setting to 'None' will use whatever the latest released revision is
revision = None
# revision = None

# base_path = Path("E:/IBL")
base_path = Path.home() / "ibl_scratch" # local directory
Expand All @@ -23,50 +25,55 @@
nwbfiles_folder_path.mkdir(exist_ok=True)

# Initialize IBL (ONE) client to download processed data for this session
one_cache_folder_path = base_path / "cache"
ibl_client = ONE(
# one_cache_folder_path = base_path / "cache"
one_cache_folder_path = "/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache"
one = ONE(
base_url="https://openalyx.internationalbrainlab.org",
password="international",
silent=True,
cache_dir=one_cache_folder_path,
)

# Specify the path to the SpikeGLX files on the server but use ONE API for timestamps
data_interfaces = []

# spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0")
spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=ibl_client, eid=session_id)
data_interfaces.append(spikeglx_subconverter)
# %% ephys
# session_folder = one.eid2path(eid)
# spikeglx_source_folder_path = session_folder / 'raw_ephys_data'

# # Raw video takes some special handling
# metadata_retrieval = BrainwideMapConverter(one=ibl_client, session=session_id, data_interfaces=[], verbose=False)
# subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"]

# pose_estimation_files = ibl_client.list_datasets(eid=session_id, filename="*.dlc*")
# for pose_estimation_file in pose_estimation_files:
# camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "")
# Specify the path to the SpikeGLX files on the server but use ONE API for timestamps
# spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid)
# data_interfaces.append(spikeglx_subconverter)


# %% video
# Raw video takes some special handling
metadata_retrieval = BrainwideMapConverter(one=one, session=eid, data_interfaces=[], verbose=False)
subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"]

# video_interface = RawVideoInterface(
# nwbfiles_folder_path=nwbfiles_folder_path,
# subject_id=subject_id,
# one=ibl_client,
# session=session_id,
# camera_name=camera_name,
# )
# data_interfaces.append(video_interface)
pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*")
for pose_estimation_file in pose_estimation_files:
camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "")

video_interface = RawVideoInterface(
nwbfiles_folder_path=nwbfiles_folder_path,
subject_id=subject_id,
one=one,
session=eid,
camera_name=camera_name,
)
data_interfaces.append(video_interface)

# Run conversion
session_converter = BrainwideMapConverter(
one=ibl_client, session=session_id, data_interfaces=data_interfaces, verbose=False
)
session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=False)

metadata = session_converter.get_metadata()
metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"]
metadata["NWBFile"]["eid"] = metadata["NWBFile"]["eid"]
subject_id = metadata["Subject"]["subject_id"]

subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}"
subject_folder_path.mkdir(exist_ok=True)
nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{session_id}_desc-raw_ecephys+image.nwb"
nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-video.nwb"

session_converter.run_conversion(
nwbfile_path=nwbfile_path,
Expand All @@ -75,4 +82,4 @@
)

# TODO: add some kind of raw-specific check
# check_written_nwbfile_for_consistency(one=ibl_client, nwbfile_path=nwbfile_path)
# check_written_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path)
5 changes: 3 additions & 2 deletions src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@


class BrainwideMapTrialsInterface(BaseDataInterface):
def __init__(self, one: ONE, session: str):
def __init__(self, one: ONE, session: str, revision: str | None = None):
self.one = one
self.session = session
self.revision = one.list_revisions(session)[-1] if revision is None else revision

def get_metadata(self) -> dict:
metadata = super().get_metadata()
Expand All @@ -20,7 +21,7 @@ def get_metadata(self) -> dict:
return metadata

def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict):
trials = self.one.load_object(id=self.session, obj="trials", collection="alf")
trials = self.one.load_object(id=self.session, obj="trials", collection="alf", revision=self.revision)

column_ordering = [
"choice",
Expand Down
7 changes: 5 additions & 2 deletions src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class IblSortingExtractor(BaseSorting):
installation_mesg = ""
name = "iblsorting"

def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None):
def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None):
from brainbox.io.one import SpikeSortingLoader
from iblatlas.atlas import AllenAtlas
from iblatlas.regions import BrainRegions
Expand All @@ -28,6 +28,9 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None):
silent=True,
cache_dir=cache_folder,
)
if revision is None: # latest
revision = one.list_revisions(session)[-1]

atlas = AllenAtlas()
brain_regions = BrainRegions()

Expand All @@ -45,7 +48,7 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None):
for probe_name in probe_names:
sorting_loader = SpikeSortingLoader(eid=session, one=one, pname=probe_name, atlas=atlas)
sorting_loaders.update({probe_name: sorting_loader})
spikes, clusters, channels = sorting_loader.load_spike_sorting()
spikes, clusters, channels = sorting_loader.load_spike_sorting(revision=revision)
# cluster_ids.extend(list(np.array(clusters["metrics"]["cluster_id"]) + unit_id_per_probe_shift))
number_of_units = len(np.unique(spikes["clusters"]))
cluster_ids.extend(list(np.arange(number_of_units).astype("int32") + unit_id_per_probe_shift))
Expand Down
3 changes: 2 additions & 1 deletion src/ibl_to_nwb/datainterfaces/_ibl_streaming_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,8 @@ def __init__(self, **kwargs):
self.recording_extractor.set_property(key="ibl_y", values=ibl_coords[:, 1])
self.recording_extractor.set_property(key="ibl_z", values=ibl_coords[:, 2])
self.recording_extractor.set_property( # SpikeInterface refers to this as 'brain_area'
key="brain_area", values=list(channels["acronym"]) # NeuroConv remaps to 'location', a required field
key="brain_area",
values=list(channels["acronym"]), # NeuroConv remaps to 'location', a required field
) # Acronyms are symmetric, do not differentiate hemisphere
self.recording_extractor.set_property(
key="beryl_location",
Expand Down
5 changes: 3 additions & 2 deletions src/ibl_to_nwb/datainterfaces/_lick_times.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,13 @@


class LickInterface(BaseDataInterface):
def __init__(self, one: ONE, session: str):
def __init__(self, one: ONE, session: str, revision: str | None = None):
self.one = one
self.session = session
self.revision = one.list_revisions(session)[-1] if revision is None else revision

def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict):
licks = self.one.load_object(id=self.session, obj="licks", collection="alf")
licks = self.one.load_object(id=self.session, obj="licks", collection="alf", revision=self.revision)

lick_events_table = DynamicTable(
name="LickTimes",
Expand Down
21 changes: 11 additions & 10 deletions src/ibl_to_nwb/datainterfaces/_pose_estimation.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,17 +37,18 @@ def __init__(

self.revision = revision
if self.revision is None:
session_files = self.one.list_datasets(eid=self.session, filename=f"*{self.camera_name}.dlc*")
revision_datetime_format = "%Y-%m-%d"
revisions = [
datetime.strptime(session_file.split("#")[1], revision_datetime_format)
for session_file in session_files
if "#" in session_file
]
self.revision = one.list_revisions(session)[-1]
# session_files = self.one.list_datasets(eid=self.session, filename=f"*{self.camera_name}.dlc*")
# revision_datetime_format = "%Y-%m-%d"
# revisions = [
# datetime.strptime(session_file.split("#")[1], revision_datetime_format)
# for session_file in session_files
# if "#" in session_file
# ]

if any(revisions):
most_recent = max(revisions)
self.revision = most_recent.strftime("%Y-%m-%d")
# if any(revisions):
# most_recent = max(revisions)
# self.revision = most_recent.strftime("%Y-%m-%d")

def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict) -> None:
camera_data = self.one.load_object(
Expand Down
5 changes: 3 additions & 2 deletions src/ibl_to_nwb/datainterfaces/_pupil_tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,11 @@


class PupilTrackingInterface(BaseDataInterface):
def __init__(self, one: ONE, session: str, camera_name: str):
def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None):
self.one = one
self.session = session
self.camera_name = camera_name
self.revision = one.list_revisions(session)[-1] if revision is None else revision

def get_metadata(self) -> dict:
metadata = super().get_metadata()
Expand All @@ -28,7 +29,7 @@ def get_metadata(self) -> dict:
def add_to_nwbfile(self, nwbfile, metadata: dict):
left_or_right = self.camera_name[:5].rstrip("C")

camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf")
camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf", revision=self.revision)

pupil_time_series = list()
for ibl_key in ["pupilDiameter_raw", "pupilDiameter_smooth"]:
Expand Down
5 changes: 3 additions & 2 deletions src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,18 @@


class RoiMotionEnergyInterface(BaseDataInterface):
def __init__(self, one: ONE, session: str, camera_name: str):
def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None):
self.one = one
self.session = session
self.camera_name = camera_name
self.revision = one.list_revisions(session)[-1] if revision is None else revision

def add_to_nwbfile(self, nwbfile, metadata: dict):
left_right_or_body = self.camera_name[:5].rstrip("C")

camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf")
motion_energy_video_region = self.one.load_object(
id=self.session, obj=f"{left_right_or_body}ROIMotionEnergy", collection="alf"
id=self.session, obj=f"{left_right_or_body}ROIMotionEnergy", collection="alf", revision=self.revision
)

width, height, x, y = motion_energy_video_region["position"]
Expand Down
7 changes: 4 additions & 3 deletions src/ibl_to_nwb/datainterfaces/_wheel_movement.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,10 @@


class WheelInterface(BaseDataInterface):
def __init__(self, one: ONE, session: str):
def __init__(self, one: ONE, session: str, revision: str | None = None):
self.one = one
self.session = session
self.revision = one.list_revisions(session) if revision is None else revision

def get_metadata(self) -> dict:
metadata = super().get_metadata()
Expand All @@ -23,8 +24,8 @@ def get_metadata(self) -> dict:
return metadata

def add_to_nwbfile(self, nwbfile, metadata: dict):
wheel_moves = self.one.load_object(id=self.session, obj="wheelMoves", collection="alf")
wheel = self.one.load_object(id=self.session, obj="wheel", collection="alf")
wheel_moves = self.one.load_object(id=self.session, obj="wheelMoves", collection="alf", revision=self.revision)
wheel = self.one.load_object(id=self.session, obj="wheel", collection="alf", revision=self.revision)

# Estimate velocity and acceleration
interpolation_frequency = 1000.0 # Hz
Expand Down

0 comments on commit 3ab8de3

Please sign in to comment.