From 9907435904b1106436ac66961a7cbb4a1ed2bd07 Mon Sep 17 00:00:00 2001 From: grg2rsr Date: Thu, 24 Oct 2024 15:59:47 +0100 Subject: [PATCH 01/24] changes for testing conversions locally --- .../_scripts/convert_brainwide_map_processed_only.py | 6 ++++-- .../_scripts/convert_brainwide_map_raw_only.py | 9 ++++++--- src/ibl_to_nwb/converters/_brainwide_map_converter.py | 2 +- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only.py index af1ba5e..3fb6c4c 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only.py @@ -14,13 +14,15 @@ ) from ibl_to_nwb.testing import check_written_nwbfile_for_consistency -session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +# session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +session_id = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe # Specify the revision of the pose estimation data # Setting to 'None' will use whatever the latest released revision is revision = None -base_path = Path("E:/IBL") +# base_path = Path("E:/IBL") +base_path = Path.home() / "ibl_scratch" # local directory base_path.mkdir(exist_ok=True) nwbfiles_folder_path = base_path / "nwbfiles" nwbfiles_folder_path.mkdir(exist_ok=True) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py index 09388d1..ebde212 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py @@ -5,13 +5,16 @@ from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter from ibl_to_nwb.datainterfaces import RawVideoInterface -session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +# session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +session_id = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe + # Specify the revision of the pose estimation data # Setting to 'None' will use whatever the latest released revision is revision = None -base_path = Path("E:/IBL") +# base_path = Path("E:/IBL") +base_path = Path.home() / "ibl_scratch" # local directory base_path.mkdir(exist_ok=True) nwbfiles_folder_path = base_path / "nwbfiles" nwbfiles_folder_path.mkdir(exist_ok=True) @@ -28,7 +31,7 @@ # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps data_interfaces = [] -spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") +# spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=ibl_client) data_interfaces.append(spikeglx_subconverter) diff --git a/src/ibl_to_nwb/converters/_brainwide_map_converter.py b/src/ibl_to_nwb/converters/_brainwide_map_converter.py index a0aa4ca..a212a66 100644 --- a/src/ibl_to_nwb/converters/_brainwide_map_converter.py +++ b/src/ibl_to_nwb/converters/_brainwide_map_converter.py @@ -2,7 +2,7 @@ from neuroconv.utils import dict_deep_update, load_dict_from_file -from src.ibl_to_nwb.converters._iblconverter import IblConverter +from ibl_to_nwb.converters._iblconverter import IblConverter class BrainwideMapConverter(IblConverter): From 0c6b01bbff36b6cf27437030a94cf9dbaeff0e80 Mon Sep 17 00:00:00 2001 From: grg2rsr Date: Thu, 24 Oct 2024 16:58:06 +0100 Subject: [PATCH 02/24] some bugfixes to pass the processed-only checks --- ...inwide_map_processed_only_local_testing.py | 29 +++++++------------ src/ibl_to_nwb/testing/_consistency_checks.py | 11 +++++-- 2 files changed, 19 insertions(+), 21 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 8200505..3961870 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -4,24 +4,12 @@ import os -# import traceback -# from concurrent.futures import ProcessPoolExecutor, as_completed from pathlib import Path from shutil import rmtree - -# from tempfile import mkdtemp -# from dandi.download import download as dandi_download -# from dandi.organize import organize as dandi_organize -# from dandi.upload import upload as dandi_upload -# from neuroconv.tools.data_transfers import automatic_dandi_upload -# from nwbinspector.tools import get_s3_urls_and_dandi_paths from one.api import ONE -# from pynwb import NWBHDF5IO -# from pynwb.image import ImageSeries -# from tqdm import tqdm -from ibl_to_nwb.brainwide_map import BrainwideMapConverter -from ibl_to_nwb.brainwide_map.datainterfaces import ( +from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter +from ibl_to_nwb.datainterfaces import ( BrainwideMapTrialsInterface, ) from ibl_to_nwb.datainterfaces import ( @@ -33,6 +21,8 @@ WheelInterface, ) +from ibl_to_nwb.testing._consistency_checks import check_written_nwbfile_for_consistency + base_path = Path.home() / "ibl_scratch" # local directory # session = "d32876dd-8303-4720-8e7e-20678dc2fd71" session = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe @@ -70,9 +60,10 @@ for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") data_interfaces.append( - IblPoseEstimationInterface( - one=session_one, session=session, camera_name=camera_name, include_pose=True, include_video=False - ) + # IblPoseEstimationInterface( + # one=session_one, session=session, camera_name=camera_name, include_pose=True, include_video=False + # ) + IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name) ) pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*") @@ -94,7 +85,7 @@ ) metadata = session_converter.get_metadata() -metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] + "-processed-only" +metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] # + "-processed-only" session_converter.run_conversion( nwbfile_path=nwbfile_path, @@ -109,3 +100,5 @@ if cleanup: rmtree(cache_folder) rmtree(nwbfile_path.parent) + +check_written_nwbfile_for_consistency(one=session_one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index ec5484a..544745b 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -22,8 +22,14 @@ def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): nwbfile = io.read() eid = nwbfile.session_id + # run all consistentcy checks _check_wheel_data(eid=eid, nwbfile=nwbfile, one=one) - # TODO: fill in the rest of the routed calls + _check_lick_data(eid=eid, nwbfile=nwbfile, one=one) + _check_roi_motion_energy_data(eid=eid, nwbfile=nwbfile, one=one) + _check_pose_estimation_data(eid=eid, nwbfile=nwbfile, one=one) + _check_trials_data(eid=eid, nwbfile=nwbfile, one=one) + _check_pupil_tracking_data(eid=eid, nwbfile=nwbfile, one=one) + _check_spike_sorting_data(eid=eid, nwbfile=nwbfile, one=one) def _check_wheel_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: str = None): @@ -180,7 +186,6 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): # get and prep data once for probe_name in probe_names: - # include revision TODO FIXME this will likely change - check back in with Miles if revision is not None: collection = f"alf/{probe_name}/pykilosort/{revision}" @@ -198,7 +203,7 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): for ix in units_table.index: probe_name = units_table.loc[ix, "probe_name"] - uuid = units_table.loc[ix, "uuid"] + uuid = units_table.loc[ix, "cluster_uuid"] spike_times_from_NWB = units_table.loc[ix, "spike_times"] cluster_id = np.where(cluster_uuids[probe_name] == uuid)[0][0] From a1614735e4f81de2923aa2a6804fc0909cc6f8d1 Mon Sep 17 00:00:00 2001 From: grg2rsr Date: Fri, 25 Oct 2024 15:09:30 +0100 Subject: [PATCH 03/24] for local testing --- ...rt_brainwide_map_raw_only_local_testing.py | 78 +++++++++++++++++++ .../converters/_ibl_spikeglx_converter.py | 49 ++++++------ 2 files changed, 104 insertions(+), 23 deletions(-) create mode 100644 src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py new file mode 100644 index 0000000..2695525 --- /dev/null +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -0,0 +1,78 @@ +from pathlib import Path + +from one.api import ONE + +from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter +from ibl_to_nwb.datainterfaces import RawVideoInterface + +# session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +session_id = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe +data_folder = Path( + "/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001" +) +spikeglx_source_folder_path = data_folder / "raw_ephys_data" + +# Specify the revision of the pose estimation data +# Setting to 'None' will use whatever the latest released revision is +revision = None + +# base_path = Path("E:/IBL") +base_path = Path.home() / "ibl_scratch" # local directory +base_path.mkdir(exist_ok=True) +nwbfiles_folder_path = base_path / "nwbfiles" +nwbfiles_folder_path.mkdir(exist_ok=True) + +# Initialize IBL (ONE) client to download processed data for this session +one_cache_folder_path = base_path / "cache" +ibl_client = ONE( + base_url="https://openalyx.internationalbrainlab.org", + password="international", + silent=True, + cache_dir=one_cache_folder_path, +) + +# Specify the path to the SpikeGLX files on the server but use ONE API for timestamps +data_interfaces = [] + +# spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") +spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=ibl_client, eid=session_id) +data_interfaces.append(spikeglx_subconverter) + +# # Raw video takes some special handling +# metadata_retrieval = BrainwideMapConverter(one=ibl_client, session=session_id, data_interfaces=[], verbose=False) +# subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"] + +# pose_estimation_files = ibl_client.list_datasets(eid=session_id, filename="*.dlc*") +# for pose_estimation_file in pose_estimation_files: +# camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") + +# video_interface = RawVideoInterface( +# nwbfiles_folder_path=nwbfiles_folder_path, +# subject_id=subject_id, +# one=ibl_client, +# session=session_id, +# camera_name=camera_name, +# ) +# data_interfaces.append(video_interface) + +# Run conversion +session_converter = BrainwideMapConverter( + one=ibl_client, session=session_id, data_interfaces=data_interfaces, verbose=False +) + +metadata = session_converter.get_metadata() +metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] +subject_id = metadata["Subject"]["subject_id"] + +subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" +subject_folder_path.mkdir(exist_ok=True) +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{session_id}_desc-raw_ecephys+image.nwb" + +session_converter.run_conversion( + nwbfile_path=nwbfile_path, + metadata=metadata, + overwrite=True, +) + +# TODO: add some kind of raw-specific check +# check_written_nwbfile_for_consistency(one=ibl_client, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py index 3f9cc9a..b52c74a 100644 --- a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py +++ b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py @@ -2,39 +2,42 @@ from one.api import ONE from pydantic import DirectoryPath from pynwb import NWBFile +import numpy as np +from brainbox.io.one import SpikeSortingLoader, EphysSessionLoader class IblSpikeGlxConverter(SpikeGLXConverterPipe): - - def __init__(self, folder_path: DirectoryPath, one: ONE) -> None: + def __init__(self, folder_path: DirectoryPath, one: ONE, eid: str) -> None: super().__init__(folder_path=folder_path) self.one = one + self.eid = eid # probably should better name this session_id ? def temporally_align_data_interfaces(self) -> None: """Align the raw data timestamps to the other data streams using the ONE API.""" # This is the syntax for aligning the raw timestamps; I cannot test this without the actual data as stored # on your end, so please work with Heberto if there are any problems after uncommenting - # probe_to_imec_map = { - # "probe00": 0, - # "probe01": 1, - # } - # - # ephys_session_loader = EphysSessionLoader(one=self.one, eid=session_id) - # probes = ephys_session_loader.probes - # for probe_name, pid in ephys_session_loader.probes.items(): - # spike_sorting_loader = SpikeSortingLoader(pid=pid, one=ibl_client) - # - # probe_index = probe_to_imec_map[probe_name] - # for band in ["ap", "lf"]: - # recording_interface = next( - # interface - # for interface in self.data_interface_objects - # if f"imec{probe_index}.{band}" in interface.source_data["file_path"] - # ) - # - # band_info = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) - # aligned_timestamps = spike_sorting_loader.samples2times(numpy.arange(0, band_info.ns), direction='forward') - # recording_interface.set_aligned_timestamps(aligned_timestamps=aligned_timestamps) + probe_to_imec_map = { + "probe00": 0, + "probe01": 1, + } + + ephys_session_loader = EphysSessionLoader(one=self.one, eid=self.eid) + probes = ephys_session_loader.probes + for probe_name, pid in ephys_session_loader.probes.items(): + spike_sorting_loader = SpikeSortingLoader(pid=pid, one=self.one) + + probe_index = probe_to_imec_map[probe_name] + for band in ["ap", "lf"]: + recording_interface = self.data_interface_objects[f"imec{probe_index}.{band}"] + # recording_interface = next( + # interface + # for interface in self.data_interface_objects + # if f"imec{probe_index}.{band}" in interface.source_data["file_path"] + # ) + + band_info = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) + aligned_timestamps = spike_sorting_loader.samples2times(np.arange(0, band_info.ns), direction="forward") + recording_interface.set_aligned_timestamps(aligned_timestamps=aligned_timestamps) pass def add_to_nwbfile(self, nwbfile: NWBFile, metadata) -> None: From caabeb6d45ee0b1b3fad9c38b67b97633c85ded2 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 10 Dec 2024 11:06:08 +0000 Subject: [PATCH 04/24] read after write for raw ephys and video data added --- src/ibl_to_nwb/testing/_consistency_checks.py | 94 +++++++++++++++++-- 1 file changed, 85 insertions(+), 9 deletions(-) diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 544745b..7cc6811 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -5,6 +5,7 @@ from one.api import ONE from pandas.testing import assert_frame_equal from pynwb import NWBHDF5IO, NWBFile +from brainbox.io.one import SpikeSortingLoader def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): @@ -186,15 +187,10 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): # get and prep data once for probe_name in probe_names: - # include revision TODO FIXME this will likely change - check back in with Miles - if revision is not None: - collection = f"alf/{probe_name}/pykilosort/{revision}" - else: - collection = f"alf/{probe_name}/pykilosort" - - spike_times[probe_name] = one.load_dataset(eid, "spikes.times", collection=collection) - spike_clusters[probe_name] = one.load_dataset(eid, "spikes.clusters", collection=collection) - cluster_uuids[probe_name] = one.load_dataset(eid, "clusters.uuids", collection=collection) + collection = f"alf/{probe_name}/pykilosort" + spike_times[probe_name] = one.load_dataset(eid, "spikes.times", collection=collection, revision=revision) + spike_clusters[probe_name] = one.load_dataset(eid, "spikes.clusters", collection=collection, revision=revision) + cluster_uuids[probe_name] = one.load_dataset(eid, "clusters.uuids", collection=collection, revision=revision) # pre-sort for fast access sort_ix = np.argsort(spike_clusters[probe_name]) @@ -214,3 +210,83 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): # testing assert_array_less(np.max((spike_times_from_ONE - spike_times_from_NWB) * 30000), 1) + + +def _check_raw_ephys_data(*, eid: str, one: ONE, nwbfile: NWBFile, pname: str = None, band: str = "ap"): + # data_one + pids, pnames_one = one.eid2pid(eid) + pidname_map = dict(zip(pnames_one, pids)) + pid = pidname_map[pname] + spike_sorting_loader = SpikeSortingLoader(pid=pid, one=one) + sglx_streamer = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) + data_one = sglx_streamer._raw + + pname_to_imec = { + "probe00": "Imec0", + "probe01": "Imec1", + } + imec_to_pname = dict(zip(pname_to_imec.values(), pname_to_imec.keys())) + imecs = [key.split(band.upper())[1] for key in list(nwbfile.acquisition.keys()) if band.upper() in key] + pnames_nwb = [imec_to_pname[imec] for imec in imecs] + + assert set(pnames_one) == set(pnames_nwb) + + # nwb ephys data + imec = pname_to_imec[pname] + data_nwb = nwbfile.acquisition[f"ElectricalSeries{band.upper()}{imec}"].data + + # compare number of samples in both + n_samples_one = data_one.shape[0] + n_samples_nwb = data_nwb.shape[0] + + assert n_samples_nwb == n_samples_one + + # draw a random set of samples and check if they are equal in value + n_samples, n_channels = data_nwb.shape + + ix = np.column_stack( + [ + np.random.randint(n_samples, size=10), + np.random.randint(n_channels, size=10), + ] + ) + + samples_nwb = np.array([data_nwb[*i] for i in ix]) + samples_one = np.array([data_one[*i] for i in ix]) + np.testing.assert_array_equal(samples_nwb, samples_one) + + # check the time stamps + nwb_timestamps = nwbfile.acquisition[f"ElectricalSeries{band.upper()}{imec}"].timestamps[:] + + # from brainbox.io + brainbox_timestamps = spike_sorting_loader.samples2times(np.arange(0, sglx_streamer.ns), direction="forward") + np.testing.assert_array_equal(nwb_timestamps, brainbox_timestamps) + + +def _check_raw_video_data(*, eid: str, one: ONE, nwbfile: NWBFile, nwbfile_path: str): + # timestamps + datasets = one.list_datasets(eid, "*Camera.times*", collection="alf") + cameras = [key for key in nwbfile.acquisition.keys() if key.endswith("Camera")] + for camera in cameras: + timestamps_nwb = nwbfile.acquisition[camera].timestamps[:] + + dataset = [dataset for dataset in datasets if camera.split("OriginalVideo")[1].lower() in dataset.lower()] + timestamps_one = one.load_dataset(eid, dataset) + np.testing.assert_array_equal(timestamps_nwb, timestamps_one) + + # values (the first 100 bytes) + datasets = one.list_datasets(eid, collection="raw_video_data") + cameras = [key for key in nwbfile.acquisition.keys() if key.endswith("Camera")] + + for camera in cameras: + cam = camera.split("OriginalVideo")[1].lower() + dataset = [dataset for dataset in datasets if cam in dataset.lower()] + one_video_path = one.load_dataset(eid, dataset) + with open(one_video_path, "rb") as fH: + one_video_bytes = fH.read(100) + + nwb_video_path = nwbfile_path.parent / Path(nwbfile.acquisition[camera].external_file[:][0]) + with open(nwb_video_path, "rb") as fH: + nwb_video_bytes = fH.read(100) + + assert one_video_bytes == nwb_video_bytes From 3ab8de39293b4f35102d4d2f0a2d9fa350818807 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 11 Dec 2024 14:02:48 +0000 Subject: [PATCH 05/24] revision argument in all datainterfaces --- ...inwide_map_processed_only_local_testing.py | 16 ++-- ...rt_brainwide_map_raw_only_local_testing.py | 77 ++++++++++--------- .../datainterfaces/_brainwide_map_trials.py | 5 +- .../datainterfaces/_ibl_sorting_extractor.py | 7 +- .../_ibl_streaming_interface.py | 3 +- src/ibl_to_nwb/datainterfaces/_lick_times.py | 5 +- .../datainterfaces/_pose_estimation.py | 21 ++--- .../datainterfaces/_pupil_tracking.py | 5 +- .../datainterfaces/_roi_motion_energy.py | 5 +- .../datainterfaces/_wheel_movement.py | 7 +- 10 files changed, 83 insertions(+), 68 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 3961870..bddd4bd 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -34,6 +34,7 @@ cleanup: bool = False # assert len(os.environ.get("DANDI_API_KEY", "")) > 0, "Run `export DANDI_API_KEY=...`!" +revision = None nwbfile_path.parent.mkdir(exist_ok=True) @@ -52,32 +53,29 @@ # These interfaces should always be present in source data data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting")) -data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session)) -data_interfaces.append(WheelInterface(one=session_one, session=session)) +data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session, revision=revision)) +data_interfaces.append(WheelInterface(one=session_one, session=session, revision=revision)) # These interfaces may not be present; check if they are before adding to list pose_estimation_files = session_one.list_datasets(eid=session, filename="*.dlc*") for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") data_interfaces.append( - # IblPoseEstimationInterface( - # one=session_one, session=session, camera_name=camera_name, include_pose=True, include_video=False - # ) - IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name) + IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) ) pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*") for pupil_tracking_file in pupil_tracking_files: camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") - data_interfaces.append(PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name)) + data_interfaces.append(PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name, revision=revision)) roi_motion_energy_files = session_one.list_datasets(eid=session, filename="*ROIMotionEnergy.npy*") for roi_motion_energy_file in roi_motion_energy_files: camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") - data_interfaces.append(RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name)) + data_interfaces.append(RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name, revision=revision)) if session_one.list_datasets(eid=session, collection="alf", filename="licks*"): - data_interfaces.append(LickInterface(one=session_one, session=session)) + data_interfaces.append(LickInterface(one=session_one, session=session, revision=revision)) # Run conversion session_converter = BrainwideMapConverter( diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 2695525..3c162c8 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -1,20 +1,22 @@ +# %% from pathlib import Path - from one.api import ONE - from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter from ibl_to_nwb.datainterfaces import RawVideoInterface -# session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" -session_id = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe -data_folder = Path( - "/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001" -) -spikeglx_source_folder_path = data_folder / "raw_ephys_data" +# eid = "d32876dd-8303-4720-8e7e-20678dc2fd71" +eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe + +# %% +# one_cache_folder = '/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache' +# data_folder = Path( +# "/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001" +# ) +# spikeglx_source_folder_path = data_folder / "raw_ephys_data" # Specify the revision of the pose estimation data # Setting to 'None' will use whatever the latest released revision is -revision = None +# revision = None # base_path = Path("E:/IBL") base_path = Path.home() / "ibl_scratch" # local directory @@ -23,50 +25,55 @@ nwbfiles_folder_path.mkdir(exist_ok=True) # Initialize IBL (ONE) client to download processed data for this session -one_cache_folder_path = base_path / "cache" -ibl_client = ONE( +# one_cache_folder_path = base_path / "cache" +one_cache_folder_path = "/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache" +one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True, cache_dir=one_cache_folder_path, ) -# Specify the path to the SpikeGLX files on the server but use ONE API for timestamps data_interfaces = [] -# spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") -spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=ibl_client, eid=session_id) -data_interfaces.append(spikeglx_subconverter) +# %% ephys +# session_folder = one.eid2path(eid) +# spikeglx_source_folder_path = session_folder / 'raw_ephys_data' -# # Raw video takes some special handling -# metadata_retrieval = BrainwideMapConverter(one=ibl_client, session=session_id, data_interfaces=[], verbose=False) -# subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"] -# pose_estimation_files = ibl_client.list_datasets(eid=session_id, filename="*.dlc*") -# for pose_estimation_file in pose_estimation_files: -# camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") +# Specify the path to the SpikeGLX files on the server but use ONE API for timestamps +# spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) +# data_interfaces.append(spikeglx_subconverter) + + +# %% video +# Raw video takes some special handling +metadata_retrieval = BrainwideMapConverter(one=one, session=eid, data_interfaces=[], verbose=False) +subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"] -# video_interface = RawVideoInterface( -# nwbfiles_folder_path=nwbfiles_folder_path, -# subject_id=subject_id, -# one=ibl_client, -# session=session_id, -# camera_name=camera_name, -# ) -# data_interfaces.append(video_interface) +pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") +for pose_estimation_file in pose_estimation_files: + camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") + + video_interface = RawVideoInterface( + nwbfiles_folder_path=nwbfiles_folder_path, + subject_id=subject_id, + one=one, + session=eid, + camera_name=camera_name, + ) + data_interfaces.append(video_interface) # Run conversion -session_converter = BrainwideMapConverter( - one=ibl_client, session=session_id, data_interfaces=data_interfaces, verbose=False -) +session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=False) metadata = session_converter.get_metadata() -metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] +metadata["NWBFile"]["eid"] = metadata["NWBFile"]["eid"] subject_id = metadata["Subject"]["subject_id"] subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" subject_folder_path.mkdir(exist_ok=True) -nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{session_id}_desc-raw_ecephys+image.nwb" +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-video.nwb" session_converter.run_conversion( nwbfile_path=nwbfile_path, @@ -75,4 +82,4 @@ ) # TODO: add some kind of raw-specific check -# check_written_nwbfile_for_consistency(one=ibl_client, nwbfile_path=nwbfile_path) +# check_written_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index 723b7d5..c7594f7 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -9,9 +9,10 @@ class BrainwideMapTrialsInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str): + def __init__(self, one: ONE, session: str, revision: str | None = None): self.one = one self.session = session + self.revision = one.list_revisions(session)[-1] if revision is None else revision def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -20,7 +21,7 @@ def get_metadata(self) -> dict: return metadata def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - trials = self.one.load_object(id=self.session, obj="trials", collection="alf") + trials = self.one.load_object(id=self.session, obj="trials", collection="alf", revision=self.revision) column_ordering = [ "choice", diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 38cbc7e..b698da3 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -16,7 +16,7 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None): + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions @@ -28,6 +28,9 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None): silent=True, cache_dir=cache_folder, ) + if revision is None: # latest + revision = one.list_revisions(session)[-1] + atlas = AllenAtlas() brain_regions = BrainRegions() @@ -45,7 +48,7 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None): for probe_name in probe_names: sorting_loader = SpikeSortingLoader(eid=session, one=one, pname=probe_name, atlas=atlas) sorting_loaders.update({probe_name: sorting_loader}) - spikes, clusters, channels = sorting_loader.load_spike_sorting() + spikes, clusters, channels = sorting_loader.load_spike_sorting(revision=revision) # cluster_ids.extend(list(np.array(clusters["metrics"]["cluster_id"]) + unit_id_per_probe_shift)) number_of_units = len(np.unique(spikes["clusters"])) cluster_ids.extend(list(np.arange(number_of_units).astype("int32") + unit_id_per_probe_shift)) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_streaming_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_streaming_interface.py index f8aac0b..5064633 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_streaming_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_streaming_interface.py @@ -81,7 +81,8 @@ def __init__(self, **kwargs): self.recording_extractor.set_property(key="ibl_y", values=ibl_coords[:, 1]) self.recording_extractor.set_property(key="ibl_z", values=ibl_coords[:, 2]) self.recording_extractor.set_property( # SpikeInterface refers to this as 'brain_area' - key="brain_area", values=list(channels["acronym"]) # NeuroConv remaps to 'location', a required field + key="brain_area", + values=list(channels["acronym"]), # NeuroConv remaps to 'location', a required field ) # Acronyms are symmetric, do not differentiate hemisphere self.recording_extractor.set_property( key="beryl_location", diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index 76f9a9e..375f854 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -7,12 +7,13 @@ class LickInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str): + def __init__(self, one: ONE, session: str, revision: str | None = None): self.one = one self.session = session + self.revision = one.list_revisions(session)[-1] if revision is None else revision def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - licks = self.one.load_object(id=self.session, obj="licks", collection="alf") + licks = self.one.load_object(id=self.session, obj="licks", collection="alf", revision=self.revision) lick_events_table = DynamicTable( name="LickTimes", diff --git a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py index abf30d3..94946b7 100644 --- a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py +++ b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py @@ -37,17 +37,18 @@ def __init__( self.revision = revision if self.revision is None: - session_files = self.one.list_datasets(eid=self.session, filename=f"*{self.camera_name}.dlc*") - revision_datetime_format = "%Y-%m-%d" - revisions = [ - datetime.strptime(session_file.split("#")[1], revision_datetime_format) - for session_file in session_files - if "#" in session_file - ] + self.revision = one.list_revisions(session)[-1] + # session_files = self.one.list_datasets(eid=self.session, filename=f"*{self.camera_name}.dlc*") + # revision_datetime_format = "%Y-%m-%d" + # revisions = [ + # datetime.strptime(session_file.split("#")[1], revision_datetime_format) + # for session_file in session_files + # if "#" in session_file + # ] - if any(revisions): - most_recent = max(revisions) - self.revision = most_recent.strftime("%Y-%m-%d") + # if any(revisions): + # most_recent = max(revisions) + # self.revision = most_recent.strftime("%Y-%m-%d") def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict) -> None: camera_data = self.one.load_object( diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index c307ef6..0477795 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -12,10 +12,11 @@ class PupilTrackingInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str): + def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None): self.one = one self.session = session self.camera_name = camera_name + self.revision = one.list_revisions(session)[-1] if revision is None else revision def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -28,7 +29,7 @@ def get_metadata(self) -> dict: def add_to_nwbfile(self, nwbfile, metadata: dict): left_or_right = self.camera_name[:5].rstrip("C") - camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf") + camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf", revision=self.revision) pupil_time_series = list() for ibl_key in ["pupilDiameter_raw", "pupilDiameter_smooth"]: diff --git a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py index 4218647..8ea21d3 100644 --- a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py +++ b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py @@ -7,17 +7,18 @@ class RoiMotionEnergyInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str): + def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None): self.one = one self.session = session self.camera_name = camera_name + self.revision = one.list_revisions(session)[-1] if revision is None else revision def add_to_nwbfile(self, nwbfile, metadata: dict): left_right_or_body = self.camera_name[:5].rstrip("C") camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf") motion_energy_video_region = self.one.load_object( - id=self.session, obj=f"{left_right_or_body}ROIMotionEnergy", collection="alf" + id=self.session, obj=f"{left_right_or_body}ROIMotionEnergy", collection="alf", revision=self.revision ) width, height, x, y = motion_energy_video_region["position"] diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index 234b307..00ec234 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -11,9 +11,10 @@ class WheelInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str): + def __init__(self, one: ONE, session: str, revision: str | None = None): self.one = one self.session = session + self.revision = one.list_revisions(session) if revision is None else revision def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -23,8 +24,8 @@ def get_metadata(self) -> dict: return metadata def add_to_nwbfile(self, nwbfile, metadata: dict): - wheel_moves = self.one.load_object(id=self.session, obj="wheelMoves", collection="alf") - wheel = self.one.load_object(id=self.session, obj="wheel", collection="alf") + wheel_moves = self.one.load_object(id=self.session, obj="wheelMoves", collection="alf", revision=self.revision) + wheel = self.one.load_object(id=self.session, obj="wheel", collection="alf", revision=self.revision) # Estimate velocity and acceleration interpolation_frequency = 1000.0 # Hz From 58ccd217f734c87327cc092e5a0c8f28c592b6ff Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 11 Dec 2024 14:18:42 +0000 Subject: [PATCH 06/24] cleanups --- ...inwide_map_processed_only_local_testing.py | 1 - ...rt_brainwide_map_raw_only_local_testing.py | 33 +++++--------- src/ibl_to_nwb/_scripts/download_data.py | 44 +++++++++++++++++++ 3 files changed, 54 insertions(+), 24 deletions(-) create mode 100644 src/ibl_to_nwb/_scripts/download_data.py diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index bddd4bd..836c8a8 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -24,7 +24,6 @@ from ibl_to_nwb.testing._consistency_checks import check_written_nwbfile_for_consistency base_path = Path.home() / "ibl_scratch" # local directory -# session = "d32876dd-8303-4720-8e7e-20678dc2fd71" session = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb" diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 3c162c8..37b4751 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -4,29 +4,18 @@ from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter from ibl_to_nwb.datainterfaces import RawVideoInterface -# eid = "d32876dd-8303-4720-8e7e-20678dc2fd71" -eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe +# select eid +# -> run download_data_local first with this eid to set up the local folder structure and one cache +eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" -# %% -# one_cache_folder = '/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache' -# data_folder = Path( -# "/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001" -# ) -# spikeglx_source_folder_path = data_folder / "raw_ephys_data" - -# Specify the revision of the pose estimation data -# Setting to 'None' will use whatever the latest released revision is -# revision = None - -# base_path = Path("E:/IBL") -base_path = Path.home() / "ibl_scratch" # local directory +# folders +base_path = Path.home() / "ibl_scratch" base_path.mkdir(exist_ok=True) nwbfiles_folder_path = base_path / "nwbfiles" nwbfiles_folder_path.mkdir(exist_ok=True) # Initialize IBL (ONE) client to download processed data for this session -# one_cache_folder_path = base_path / "cache" -one_cache_folder_path = "/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache" +one_cache_folder_path = base_path / 'ibl_conversion' / eid / 'cache' one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", @@ -37,14 +26,12 @@ data_interfaces = [] # %% ephys -# session_folder = one.eid2path(eid) -# spikeglx_source_folder_path = session_folder / 'raw_ephys_data' - +session_folder = one.eid2path(eid) +spikeglx_source_folder_path = session_folder / 'raw_ephys_data' # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps -# spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) -# data_interfaces.append(spikeglx_subconverter) - +spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) +data_interfaces.append(spikeglx_subconverter) # %% video # Raw video takes some special handling diff --git a/src/ibl_to_nwb/_scripts/download_data.py b/src/ibl_to_nwb/_scripts/download_data.py new file mode 100644 index 0000000..b0e07ca --- /dev/null +++ b/src/ibl_to_nwb/_scripts/download_data.py @@ -0,0 +1,44 @@ +# %% +from pathlib import Path +from one.api import ONE + +# %% +eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM eid with dual probe + +base_path = Path.home() / "ibl_scratch" # local directory + +# Download behavior and spike sorted data for this eid +session_path = base_path / "ibl_conversion" / eid +cache_folder = base_path / "ibl_conversion" / eid / "cache" +session_one = ONE( + base_url="https://openalyx.internationalbrainlab.org", + password="international", + silent=False, + cache_dir=cache_folder, +) + +# %% latest revision +revisions = session_one.list_revisions(eid) +revision = revisions[-1] + +# %% list all datasets +datasets = session_one.list_datasets(eid) + +# %% list all collections +collections = session_one.list_collections(eid) + +# %% +for dataset in datasets: + session_one.load_dataset(eid, dataset, download_only=True) + +# %% downloads all raw ephys data! +collections = session_one.list_collections(eid, collection="raw_ephys_data/*") +for collection in collections: + datasets = session_one.list_datasets(eid, collection=collection) + for dataset in datasets: + session_one.load_dataset(eid, dataset, download_only=True) + +# %% just the video data +datasets = session_one.list_datasets(eid, collection="raw_video_data") +for dataset in datasets: + session_one.load_dataset(eid, dataset, download_only=True) From 5e17eeca245dbbd9b0408177b64d898908affc70 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:48:28 +0000 Subject: [PATCH 07/24] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- ..._brainwide_map_processed_only_local_testing.py | 15 ++++++++------- ...onvert_brainwide_map_raw_only_local_testing.py | 6 ++++-- src/ibl_to_nwb/_scripts/download_data.py | 1 + .../converters/_ibl_spikeglx_converter.py | 4 ++-- .../datainterfaces/_ibl_sorting_extractor.py | 2 +- src/ibl_to_nwb/datainterfaces/_pose_estimation.py | 1 - src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 4 +++- src/ibl_to_nwb/testing/_consistency_checks.py | 2 +- 8 files changed, 20 insertions(+), 15 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 836c8a8..6a3da46 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -3,16 +3,14 @@ os.environ["JUPYTER_PLATFORM_DIRS"] = "1" # Annoying import os - from pathlib import Path from shutil import rmtree + from one.api import ONE -from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter +from ibl_to_nwb.converters import BrainwideMapConverter from ibl_to_nwb.datainterfaces import ( BrainwideMapTrialsInterface, -) -from ibl_to_nwb.datainterfaces import ( IblPoseEstimationInterface, IblSortingInterface, LickInterface, @@ -20,7 +18,6 @@ RoiMotionEnergyInterface, WheelInterface, ) - from ibl_to_nwb.testing._consistency_checks import check_written_nwbfile_for_consistency base_path = Path.home() / "ibl_scratch" # local directory @@ -66,12 +63,16 @@ pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*") for pupil_tracking_file in pupil_tracking_files: camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") - data_interfaces.append(PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name, revision=revision)) + data_interfaces.append( + PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + ) roi_motion_energy_files = session_one.list_datasets(eid=session, filename="*ROIMotionEnergy.npy*") for roi_motion_energy_file in roi_motion_energy_files: camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") - data_interfaces.append(RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name, revision=revision)) + data_interfaces.append( + RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + ) if session_one.list_datasets(eid=session, collection="alf", filename="licks*"): data_interfaces.append(LickInterface(one=session_one, session=session, revision=revision)) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 37b4751..904acdf 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -1,6 +1,8 @@ # %% from pathlib import Path + from one.api import ONE + from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter from ibl_to_nwb.datainterfaces import RawVideoInterface @@ -15,7 +17,7 @@ nwbfiles_folder_path.mkdir(exist_ok=True) # Initialize IBL (ONE) client to download processed data for this session -one_cache_folder_path = base_path / 'ibl_conversion' / eid / 'cache' +one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", @@ -27,7 +29,7 @@ # %% ephys session_folder = one.eid2path(eid) -spikeglx_source_folder_path = session_folder / 'raw_ephys_data' +spikeglx_source_folder_path = session_folder / "raw_ephys_data" # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) diff --git a/src/ibl_to_nwb/_scripts/download_data.py b/src/ibl_to_nwb/_scripts/download_data.py index b0e07ca..93fd184 100644 --- a/src/ibl_to_nwb/_scripts/download_data.py +++ b/src/ibl_to_nwb/_scripts/download_data.py @@ -1,5 +1,6 @@ # %% from pathlib import Path + from one.api import ONE # %% diff --git a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py index b52c74a..a9bf223 100644 --- a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py +++ b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py @@ -1,9 +1,9 @@ +import numpy as np +from brainbox.io.one import EphysSessionLoader, SpikeSortingLoader from neuroconv.converters import SpikeGLXConverterPipe from one.api import ONE from pydantic import DirectoryPath from pynwb import NWBFile -import numpy as np -from brainbox.io.one import SpikeSortingLoader, EphysSessionLoader class IblSpikeGlxConverter(SpikeGLXConverterPipe): diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index b698da3..c7bb03f 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -28,7 +28,7 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, r silent=True, cache_dir=cache_folder, ) - if revision is None: # latest + if revision is None: # latest revision = one.list_revisions(session)[-1] atlas = AllenAtlas() diff --git a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py index 94946b7..5e0e49f 100644 --- a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py +++ b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py @@ -1,4 +1,3 @@ -from datetime import datetime from typing import Optional import numpy as np diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index 0477795..fb7b626 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -29,7 +29,9 @@ def get_metadata(self) -> dict: def add_to_nwbfile(self, nwbfile, metadata: dict): left_or_right = self.camera_name[:5].rstrip("C") - camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf", revision=self.revision) + camera_data = self.one.load_object( + id=self.session, obj=self.camera_name, collection="alf", revision=self.revision + ) pupil_time_series = list() for ibl_key in ["pupilDiameter_raw", "pupilDiameter_smooth"]: diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 7cc6811..dc922df 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -1,11 +1,11 @@ from pathlib import Path import numpy as np +from brainbox.io.one import SpikeSortingLoader from numpy.testing import assert_array_equal, assert_array_less from one.api import ONE from pandas.testing import assert_frame_equal from pynwb import NWBHDF5IO, NWBFile -from brainbox.io.one import SpikeSortingLoader def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): From 7999b4a81e66416d8da6d0fb4448467f320521c1 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Fri, 13 Dec 2024 09:06:20 -0600 Subject: [PATCH 08/24] add signature to sorting interface --- .../convert_brainwide_map_processed_only_local_testing.py | 3 +-- src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py | 2 +- src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py | 8 ++++++++ 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 836c8a8..29c991f 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -25,9 +25,8 @@ base_path = Path.home() / "ibl_scratch" # local directory session = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe - nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb" -nwbfile_path.parent.mkdir(exist_ok=True) +nwbfile_path.parent.mkdir(exist_ok=True, parents=True) stub_test: bool = False cleanup: bool = False diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index b698da3..dc7dc68 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -16,7 +16,7 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str]=None): from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index 18c478f..2d5401d 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -1,6 +1,8 @@ """The interface for loading spike sorted data via ONE access.""" from pathlib import Path +from typing import Optional +from pydantic import DirectoryPath from neuroconv.datainterfaces.ecephys.basesortingextractorinterface import ( BaseSortingExtractorInterface, @@ -13,6 +15,7 @@ class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor + def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -27,3 +30,8 @@ def get_metadata(self) -> dict: ) return metadata + + + + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None, verbose: bool = False): + super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) \ No newline at end of file From c58da111c8a59a14fe3624797c348ac40b3321e1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:08:47 +0000 Subject: [PATCH 09/24] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../datainterfaces/_ibl_sorting_extractor.py | 2 +- .../datainterfaces/_ibl_sorting_interface.py | 15 +++++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index ad5777c..54e374c 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -16,7 +16,7 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str]=None): + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index 2d5401d..8f81e4f 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -2,12 +2,12 @@ from pathlib import Path from typing import Optional -from pydantic import DirectoryPath from neuroconv.datainterfaces.ecephys.basesortingextractorinterface import ( BaseSortingExtractorInterface, ) from neuroconv.utils import load_dict_from_file +from pydantic import DirectoryPath from ._ibl_sorting_extractor import IblSortingExtractor @@ -15,7 +15,6 @@ class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor - def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -31,7 +30,11 @@ def get_metadata(self) -> dict: return metadata - - - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None, verbose: bool = False): - super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) \ No newline at end of file + def __init__( + self, + session: str, + cache_folder: Optional[DirectoryPath] = None, + revision: Optional[str] = None, + verbose: bool = False, + ): + super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) From 6c805835c2842aac3a27351d3d3e14ba587c531a Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Fri, 13 Dec 2024 09:30:08 -0600 Subject: [PATCH 10/24] fix typing --- src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py | 3 ++- src/ibl_to_nwb/datainterfaces/_lick_times.py | 4 +++- src/ibl_to_nwb/datainterfaces/_wheel_movement.py | 3 ++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index c7594f7..e82b548 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -1,4 +1,5 @@ from pathlib import Path +from typing import Optional from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface @@ -9,7 +10,7 @@ class BrainwideMapTrialsInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index 375f854..f94f71d 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -1,3 +1,5 @@ +from typing import Optional + from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module @@ -7,7 +9,7 @@ class LickInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index 00ec234..f5420f5 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -1,4 +1,5 @@ from pathlib import Path +from typing import Optional from brainbox.behavior import wheel as wheel_methods from neuroconv.basedatainterface import BaseDataInterface @@ -11,7 +12,7 @@ class WheelInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session) if revision is None else revision From 9a1c01cd26804f56ce2439a1658d5397471f993a Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Fri, 13 Dec 2024 09:48:31 -0600 Subject: [PATCH 11/24] fix more typing errors --- src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 3 ++- src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index fb7b626..9b4901f 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -1,6 +1,7 @@ """Data Interface for the pupil tracking.""" from pathlib import Path +from typing import Optional import numpy as np from neuroconv.basedatainterface import BaseDataInterface @@ -12,7 +13,7 @@ class PupilTrackingInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str | None] = None): self.one = one self.session = session self.camera_name = camera_name diff --git a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py index 8ea21d3..0a40f2e 100644 --- a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py +++ b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py @@ -1,4 +1,5 @@ """Data Interface for the special data type of ROI Motion Energy.""" +from typing import Optional from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module @@ -7,7 +8,7 @@ class RoiMotionEnergyInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): self.one = one self.session = session self.camera_name = camera_name From 2b9c4bf96dc2cf9377de9d26e2acecd8aa9025e5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:48:40 +0000 Subject: [PATCH 12/24] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py index 0a40f2e..a3c6007 100644 --- a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py +++ b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py @@ -1,4 +1,5 @@ """Data Interface for the special data type of ROI Motion Energy.""" + from typing import Optional from neuroconv.basedatainterface import BaseDataInterface From 5f9d77e66b9863dde32cf60f06c97dd2b6a9180b Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Fri, 13 Dec 2024 10:02:58 -0600 Subject: [PATCH 13/24] optional --- src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index 9b4901f..c0c4972 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -13,7 +13,7 @@ class PupilTrackingInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str | None] = None): + def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): self.one = one self.session = session self.camera_name = camera_name From 05d29588be4c3b9964609471d2517bf8e171ee26 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 17 Dec 2024 10:34:10 +0000 Subject: [PATCH 14/24] integration of mine and hebertos changes --- .../convert_brainwide_map_processed_only_local_testing.py | 2 +- src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py | 7 +++++-- src/ibl_to_nwb/testing/_consistency_checks.py | 3 +++ 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 836c8a8..be5c0ce 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -51,7 +51,7 @@ data_interfaces = list() # These interfaces should always be present in source data -data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting")) +data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting", revision=revision)) data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session, revision=revision)) data_interfaces.append(WheelInterface(one=session_one, session=session, revision=revision)) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index 18c478f..47d65af 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -8,11 +8,14 @@ from neuroconv.utils import load_dict_from_file from ._ibl_sorting_extractor import IblSortingExtractor - +from typing import Optional +from pydantic import DirectoryPath class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor - + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): + super().__init__(session=session, cache_folder=cache_folder, revision=revision) + def get_metadata(self) -> dict: metadata = super().get_metadata() diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 7cc6811..f3cc375 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -175,6 +175,9 @@ def _check_spike_sorting_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: units_table = nwbfile.units[:] probe_names = units_table["probe_name"].unique() + if revision is None: + revision = one.list_revisions(eid)[-1] + spike_times = {} spike_clusters = {} cluster_uuids = {} From e608b5dcee31d8a3bffbe91865e21ff2ce1344db Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 10:38:43 +0000 Subject: [PATCH 15/24] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index ea4407d..40fb865 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -10,14 +10,14 @@ from pydantic import DirectoryPath from ._ibl_sorting_extractor import IblSortingExtractor -from typing import Optional -from pydantic import DirectoryPath + class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): super().__init__(session=session, cache_folder=cache_folder, revision=revision) - + def get_metadata(self) -> dict: metadata = super().get_metadata() From 22feb6a9424a8d02e6948b5b7ba5835899861e25 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 17 Dec 2024 11:12:27 +0000 Subject: [PATCH 16/24] added automatic last revision to consistency checking --- src/ibl_to_nwb/testing/_consistency_checks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index e129c33..6714772 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -1,11 +1,11 @@ from pathlib import Path import numpy as np -from brainbox.io.one import SpikeSortingLoader from numpy.testing import assert_array_equal, assert_array_less from one.api import ONE from pandas.testing import assert_frame_equal from pynwb import NWBHDF5IO, NWBFile +from brainbox.io.one import SpikeSortingLoader def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): @@ -176,7 +176,7 @@ def _check_spike_sorting_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: probe_names = units_table["probe_name"].unique() if revision is None: - revision = one.list_revisions(eid)[-1] + revision = one.list_revisions(session)[-1] spike_times = {} spike_clusters = {} From 76fc999f024bae786ad723b57e5e15d145a8e4fa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 11:16:06 +0000 Subject: [PATCH 17/24] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/testing/_consistency_checks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 6714772..c9b4c83 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -1,11 +1,11 @@ from pathlib import Path import numpy as np +from brainbox.io.one import SpikeSortingLoader from numpy.testing import assert_array_equal, assert_array_less from one.api import ONE from pandas.testing import assert_frame_equal from pynwb import NWBHDF5IO, NWBFile -from brainbox.io.one import SpikeSortingLoader def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): From 62d7a400143e23e8a978b96c8b57834c76f8eab5 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 17 Dec 2024 14:58:38 +0100 Subject: [PATCH 18/24] output path related fixes / cleanups --- ...inwide_map_processed_only_local_testing.py | 65 ++++++++++--------- ...rt_brainwide_map_raw_only_local_testing.py | 3 +- src/ibl_to_nwb/testing/_consistency_checks.py | 2 +- 3 files changed, 38 insertions(+), 32 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 8d9450d..f45f83a 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -20,10 +20,15 @@ ) from ibl_to_nwb.testing._consistency_checks import check_written_nwbfile_for_consistency -base_path = Path.home() / "ibl_scratch" # local directory -session = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe -nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb" -nwbfile_path.parent.mkdir(exist_ok=True, parents=True) +# select eid +# -> run download_data_local first with this eid to set up the local folder structure and one cache +eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" + +# folders +base_path = Path.home() / "ibl_scratch" +base_path.mkdir(exist_ok=True) +nwbfiles_folder_path = base_path / "nwbfiles" +nwbfiles_folder_path.mkdir(exist_ok=True) stub_test: bool = False cleanup: bool = False @@ -31,58 +36,59 @@ # assert len(os.environ.get("DANDI_API_KEY", "")) > 0, "Run `export DANDI_API_KEY=...`!" revision = None -nwbfile_path.parent.mkdir(exist_ok=True) - -# Download behavior and spike sorted data for this session -session_path = base_path / "ibl_conversion" / session -cache_folder = base_path / "ibl_conversion" / session / "cache" -session_one = ONE( +# Initialize IBL (ONE) client to download processed data for this session +one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" +one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", - silent=False, - cache_dir=cache_folder, + silent=True, + cache_dir=one_cache_folder_path, ) # Initialize as many of each interface as we need across the streams data_interfaces = list() # These interfaces should always be present in source data -data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting", revision=revision)) -data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session, revision=revision)) -data_interfaces.append(WheelInterface(one=session_one, session=session, revision=revision)) +data_interfaces.append(IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision)) +data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) +data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) # These interfaces may not be present; check if they are before adding to list -pose_estimation_files = session_one.list_datasets(eid=session, filename="*.dlc*") +pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") data_interfaces.append( - IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + IblPoseEstimationInterface(one=one, session=eid, camera_name=camera_name, revision=revision) ) -pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*") +pupil_tracking_files = one.list_datasets(eid=eid, filename="*features*") for pupil_tracking_file in pupil_tracking_files: camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") data_interfaces.append( - PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + PupilTrackingInterface(one=one, session=eid, camera_name=camera_name, revision=revision) ) -roi_motion_energy_files = session_one.list_datasets(eid=session, filename="*ROIMotionEnergy.npy*") +roi_motion_energy_files = one.list_datasets(eid=eid, filename="*ROIMotionEnergy.npy*") for roi_motion_energy_file in roi_motion_energy_files: camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") data_interfaces.append( - RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + RoiMotionEnergyInterface(one=one, session=eid, camera_name=camera_name, revision=revision) ) -if session_one.list_datasets(eid=session, collection="alf", filename="licks*"): - data_interfaces.append(LickInterface(one=session_one, session=session, revision=revision)) +if one.list_datasets(eid=eid, collection="alf", filename="licks*"): + data_interfaces.append(LickInterface(one=one, session=eid, revision=revision)) # Run conversion session_converter = BrainwideMapConverter( - one=session_one, session=session, data_interfaces=data_interfaces, verbose=True + one=one, session=eid, data_interfaces=data_interfaces, verbose=True ) metadata = session_converter.get_metadata() -metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] # + "-processed-only" +subject_id = metadata["Subject"]["subject_id"] + +subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" +subject_folder_path.mkdir(exist_ok=True) +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-processed.nwb" session_converter.run_conversion( nwbfile_path=nwbfile_path, @@ -94,8 +100,9 @@ # nwb_folder_path=nwbfile_path.parent, # cleanup=cleanup, # ) -if cleanup: - rmtree(cache_folder) - rmtree(nwbfile_path.parent) -check_written_nwbfile_for_consistency(one=session_one, nwbfile_path=nwbfile_path) +# if cleanup: +# rmtree(cache_folder) +# rmtree(nwbfile_path.parent) + +check_written_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 904acdf..11d3c1a 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -57,12 +57,11 @@ session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=False) metadata = session_converter.get_metadata() -metadata["NWBFile"]["eid"] = metadata["NWBFile"]["eid"] subject_id = metadata["Subject"]["subject_id"] subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" subject_folder_path.mkdir(exist_ok=True) -nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-video.nwb" +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" session_converter.run_conversion( nwbfile_path=nwbfile_path, diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index c9b4c83..e129c33 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -176,7 +176,7 @@ def _check_spike_sorting_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: probe_names = units_table["probe_name"].unique() if revision is None: - revision = one.list_revisions(session)[-1] + revision = one.list_revisions(eid)[-1] spike_times = {} spike_clusters = {} From 4202759ace2b7bed0656e1473988bba73b11a59e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 13:58:55 +0000 Subject: [PATCH 19/24] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- ...inwide_map_processed_only_local_testing.py | 21 +++++++------------ 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index f45f83a..4fc8da9 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -4,7 +4,6 @@ import os from pathlib import Path -from shutil import rmtree from one.api import ONE @@ -49,7 +48,9 @@ data_interfaces = list() # These interfaces should always be present in source data -data_interfaces.append(IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision)) +data_interfaces.append( + IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision) +) data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) @@ -57,31 +58,23 @@ pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") - data_interfaces.append( - IblPoseEstimationInterface(one=one, session=eid, camera_name=camera_name, revision=revision) - ) + data_interfaces.append(IblPoseEstimationInterface(one=one, session=eid, camera_name=camera_name, revision=revision)) pupil_tracking_files = one.list_datasets(eid=eid, filename="*features*") for pupil_tracking_file in pupil_tracking_files: camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") - data_interfaces.append( - PupilTrackingInterface(one=one, session=eid, camera_name=camera_name, revision=revision) - ) + data_interfaces.append(PupilTrackingInterface(one=one, session=eid, camera_name=camera_name, revision=revision)) roi_motion_energy_files = one.list_datasets(eid=eid, filename="*ROIMotionEnergy.npy*") for roi_motion_energy_file in roi_motion_energy_files: camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") - data_interfaces.append( - RoiMotionEnergyInterface(one=one, session=eid, camera_name=camera_name, revision=revision) - ) + data_interfaces.append(RoiMotionEnergyInterface(one=one, session=eid, camera_name=camera_name, revision=revision)) if one.list_datasets(eid=eid, collection="alf", filename="licks*"): data_interfaces.append(LickInterface(one=one, session=eid, revision=revision)) # Run conversion -session_converter = BrainwideMapConverter( - one=one, session=eid, data_interfaces=data_interfaces, verbose=True -) +session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=True) metadata = session_converter.get_metadata() subject_id = metadata["Subject"]["subject_id"] From b640ee6d48025c06d71eec4df859ebd63ca4e8ac Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 18 Dec 2024 10:54:44 +0100 Subject: [PATCH 20/24] attempting to pass one to IblSortingInterface - fails currently by pydantic --- .gitignore | 1 + ...inwide_map_processed_only_local_testing.py | 6 ++-- ...rt_brainwide_map_raw_only_local_testing.py | 6 +--- .../datainterfaces/_brainwide_map_trials.py | 12 +++++--- .../datainterfaces/_ibl_sorting_extractor.py | 29 +++++++++++-------- .../datainterfaces/_ibl_sorting_interface.py | 22 +++++++------- src/ibl_to_nwb/datainterfaces/_lick_times.py | 5 +++- .../datainterfaces/_pupil_tracking.py | 6 ++-- .../datainterfaces/_wheel_movement.py | 6 ++-- 9 files changed, 53 insertions(+), 40 deletions(-) diff --git a/.gitignore b/.gitignore index 213258b..44e36ed 100644 --- a/.gitignore +++ b/.gitignore @@ -134,3 +134,4 @@ dmypy.json #misc endpoint_schemas/ tests/ +src/local \ No newline at end of file diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 4fc8da9..40415ae 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -48,9 +48,9 @@ data_interfaces = list() # These interfaces should always be present in source data -data_interfaces.append( - IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision) -) +# data_interfaces.append(IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision)) + +data_interfaces.append(IblSortingInterface(one=one, session=eid, revision=revision)) data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 11d3c1a..04f1422 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -63,11 +63,7 @@ subject_folder_path.mkdir(exist_ok=True) nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" -session_converter.run_conversion( - nwbfile_path=nwbfile_path, - metadata=metadata, - overwrite=True, -) +session_converter.run_conversion(nwbfile_path=nwbfile_path, metadata=metadata, overwrite=True) # TODO: add some kind of raw-specific check # check_written_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index e82b548..9b50398 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -7,6 +7,7 @@ from one.api import ONE from pynwb import NWBFile from pynwb.epoch import TimeIntervals +from brainbox.io.one import SessionLoader class BrainwideMapTrialsInterface(BaseDataInterface): @@ -14,6 +15,8 @@ def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision + self.session_loader = SessionLoader(one=self.one, eid=self.session, revision=self.revision) + self.session_loader.load_trials() def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -22,7 +25,8 @@ def get_metadata(self) -> dict: return metadata def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - trials = self.one.load_object(id=self.session, obj="trials", collection="alf", revision=self.revision) + # trials = self.one.load_object(id=self.session, obj="trials", collection="alf", revision=self.revision) + trials = self.session_loader.trials column_ordering = [ "choice", @@ -42,12 +46,12 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): VectorData( name="start_time", description="The beginning of the trial.", - data=trials["intervals"][:, 0], + data=trials["intervals_0"].values, ), VectorData( name="stop_time", description="The end of the trial.", - data=trials["intervals"][:, 1], + data=trials["intervals_1"].values, ), ] for ibl_key in column_ordering: @@ -55,7 +59,7 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): VectorData( name=metadata["Trials"][ibl_key]["name"], description=metadata["Trials"][ibl_key]["description"], - data=trials[ibl_key], + data=trials[ibl_key].values, ) ) nwbfile.add_time_intervals( diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 54e374c..760a3fb 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -7,6 +7,10 @@ import pandas as pd from pydantic import DirectoryPath from spikeinterface import BaseSorting, BaseSortingSegment +from one.api import ONE +from brainbox.io.one import SpikeSortingLoader +from iblatlas.atlas import AllenAtlas +from iblatlas.regions import BrainRegions class IblSortingExtractor(BaseSorting): @@ -16,18 +20,19 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): - from brainbox.io.one import SpikeSortingLoader - from iblatlas.atlas import AllenAtlas - from iblatlas.regions import BrainRegions - from one.api import ONE - - one = ONE( - base_url="https://openalyx.internationalbrainlab.org", - password="international", - silent=True, - cache_dir=cache_folder, - ) + # def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): + def __init__( + self, + one: ONE, + session: str, + revision: Optional[str] = None, + ): + # one = ONE( + # base_url="https://openalyx.internationalbrainlab.org", + # password="international", + # silent=True, + # cache_dir=cache_folder, + # ) if revision is None: # latest revision = one.list_revisions(session)[-1] diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index 40fb865..a27471b 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -10,13 +10,21 @@ from pydantic import DirectoryPath from ._ibl_sorting_extractor import IblSortingExtractor - +from one.api import ONE class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): - super().__init__(session=session, cache_folder=cache_folder, revision=revision) + def __init__( + self, + one: ONE, + session: str, + # cache_folder: Optional[DirectoryPath] = None, + revision: Optional[str] = None, + # verbose: bool = False, + ): + # super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) + super().__init__(one=one, session=session, revision=revision) def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -33,11 +41,3 @@ def get_metadata(self) -> dict: return metadata - def __init__( - self, - session: str, - cache_folder: Optional[DirectoryPath] = None, - revision: Optional[str] = None, - verbose: bool = False, - ): - super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index f94f71d..f885a70 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -6,16 +6,19 @@ from one.api import ONE from pynwb import NWBFile from pynwb.file import DynamicTable - +from brainbox.io.one import SessionLoader class LickInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision + # self.session_loader = SessionLoader(one=self.one, eid=self.session, revision=self.revision) + # self.session_loader.load_licks() def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): licks = self.one.load_object(id=self.session, obj="licks", collection="alf", revision=self.revision) + # licks = self.session_loader.licks lick_events_table = DynamicTable( name="LickTimes", diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index c0c4972..d8eb6bf 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -10,7 +10,7 @@ from one.api import ONE from pynwb import TimeSeries from pynwb.behavior import PupilTracking - +from brainbox.io.one import SessionLoader class PupilTrackingInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): @@ -18,13 +18,15 @@ def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[ self.session = session self.camera_name = camera_name self.revision = one.list_revisions(session)[-1] if revision is None else revision + self.session_loader = SessionLoader(one=one, eid=session, revision=revision) + self.session_loader.load_pupil() def get_metadata(self) -> dict: metadata = super().get_metadata() pupils_metadata = load_dict_from_file(file_path=Path(__file__).parent.parent / "_metadata" / "pupils.yml") metadata.update(pupils_metadata) - + return metadata def add_to_nwbfile(self, nwbfile, metadata: dict): diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index f5420f5..31d8709 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -9,13 +9,15 @@ from pynwb import TimeSeries from pynwb.behavior import CompassDirection, SpatialSeries from pynwb.epoch import TimeIntervals - +from brainbox.io.one import SessionLoader class WheelInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session) if revision is None else revision + self.session_loader = SessionLoader(eid=session, one=one, revision=revision) + self.session_loader.load_wheel() def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -59,7 +61,7 @@ def add_to_nwbfile(self, nwbfile, metadata: dict): description=metadata["WheelPosition"]["description"], data=wheel["position"], timestamps=wheel["timestamps"], - unit="rad", + unit="radians", reference_frame="Initial angle at start time is zero. Counter-clockwise is positive.", ) ) From 6487907383d49c84c35cda43e3ba45fb3b44b514 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 09:55:23 +0000 Subject: [PATCH 21/24] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .gitignore | 2 +- src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py | 2 +- src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py | 5 ++--- src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py | 5 ++--- src/ibl_to_nwb/datainterfaces/_lick_times.py | 2 +- src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 5 +++-- src/ibl_to_nwb/datainterfaces/_wheel_movement.py | 3 ++- 7 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index 44e36ed..5ca4667 100644 --- a/.gitignore +++ b/.gitignore @@ -134,4 +134,4 @@ dmypy.json #misc endpoint_schemas/ tests/ -src/local \ No newline at end of file +src/local diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index 9b50398..e0a5688 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -1,13 +1,13 @@ from pathlib import Path from typing import Optional +from brainbox.io.one import SessionLoader from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import NWBFile from pynwb.epoch import TimeIntervals -from brainbox.io.one import SessionLoader class BrainwideMapTrialsInterface(BaseDataInterface): diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 760a3fb..75cbbaa 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -5,12 +5,11 @@ import numpy as np import pandas as pd -from pydantic import DirectoryPath -from spikeinterface import BaseSorting, BaseSortingSegment -from one.api import ONE from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions +from one.api import ONE +from spikeinterface import BaseSorting, BaseSortingSegment class IblSortingExtractor(BaseSorting): diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index a27471b..61f88b6 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -7,10 +7,10 @@ BaseSortingExtractorInterface, ) from neuroconv.utils import load_dict_from_file -from pydantic import DirectoryPath +from one.api import ONE from ._ibl_sorting_extractor import IblSortingExtractor -from one.api import ONE + class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor @@ -40,4 +40,3 @@ def get_metadata(self) -> dict: ) return metadata - diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index f885a70..b971a13 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -6,7 +6,7 @@ from one.api import ONE from pynwb import NWBFile from pynwb.file import DynamicTable -from brainbox.io.one import SessionLoader + class LickInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index d8eb6bf..5946c02 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -4,13 +4,14 @@ from typing import Optional import numpy as np +from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import TimeSeries from pynwb.behavior import PupilTracking -from brainbox.io.one import SessionLoader + class PupilTrackingInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): @@ -26,7 +27,7 @@ def get_metadata(self) -> dict: pupils_metadata = load_dict_from_file(file_path=Path(__file__).parent.parent / "_metadata" / "pupils.yml") metadata.update(pupils_metadata) - + return metadata def add_to_nwbfile(self, nwbfile, metadata: dict): diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index 31d8709..42dd014 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -2,6 +2,7 @@ from typing import Optional from brainbox.behavior import wheel as wheel_methods +from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file @@ -9,7 +10,7 @@ from pynwb import TimeSeries from pynwb.behavior import CompassDirection, SpatialSeries from pynwb.epoch import TimeIntervals -from brainbox.io.one import SessionLoader + class WheelInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): From 45cbaca11f009332a94bb099c6e4d78eb46a607f Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 18 Dec 2024 12:46:20 +0100 Subject: [PATCH 22/24] one instantiation removed in IblSortingInterface, but requires hack in neuroconv --- ...rt_brainwide_map_processed_only_local_testing.py | 7 ------- .../_scripts/convert_brainwide_map_raw_only.py | 2 ++ .../converters/_ibl_spikeglx_converter.py | 13 +++---------- .../datainterfaces/_brainwide_map_trials.py | 2 +- .../datainterfaces/_ibl_sorting_extractor.py | 12 +++--------- .../datainterfaces/_ibl_sorting_interface.py | 12 ++++-------- src/ibl_to_nwb/datainterfaces/_lick_times.py | 2 +- src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 3 ++- src/ibl_to_nwb/datainterfaces/_wheel_movement.py | 3 ++- 9 files changed, 18 insertions(+), 38 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 40415ae..d197a48 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -48,8 +48,6 @@ data_interfaces = list() # These interfaces should always be present in source data -# data_interfaces.append(IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision)) - data_interfaces.append(IblSortingInterface(one=one, session=eid, revision=revision)) data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) @@ -88,11 +86,6 @@ metadata=metadata, overwrite=True, ) -# automatic_dandi_upload( -# dandiset_id="000409", -# nwb_folder_path=nwbfile_path.parent, -# cleanup=cleanup, -# ) # if cleanup: # rmtree(cache_folder) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py index ebde212..0c0ebf1 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py @@ -32,6 +32,8 @@ data_interfaces = [] # spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") +session_folder = ibl_client.eid2path(session_id) +spikeglx_source_folder_path = session_folder / "raw_ephys_data" spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=ibl_client) data_interfaces.append(spikeglx_subconverter) diff --git a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py index a9bf223..8e63c7f 100644 --- a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py +++ b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py @@ -10,7 +10,7 @@ class IblSpikeGlxConverter(SpikeGLXConverterPipe): def __init__(self, folder_path: DirectoryPath, one: ONE, eid: str) -> None: super().__init__(folder_path=folder_path) self.one = one - self.eid = eid # probably should better name this session_id ? + self.eid = eid def temporally_align_data_interfaces(self) -> None: """Align the raw data timestamps to the other data streams using the ONE API.""" @@ -22,21 +22,14 @@ def temporally_align_data_interfaces(self) -> None: } ephys_session_loader = EphysSessionLoader(one=self.one, eid=self.eid) - probes = ephys_session_loader.probes for probe_name, pid in ephys_session_loader.probes.items(): spike_sorting_loader = SpikeSortingLoader(pid=pid, one=self.one) probe_index = probe_to_imec_map[probe_name] for band in ["ap", "lf"]: recording_interface = self.data_interface_objects[f"imec{probe_index}.{band}"] - # recording_interface = next( - # interface - # for interface in self.data_interface_objects - # if f"imec{probe_index}.{band}" in interface.source_data["file_path"] - # ) - - band_info = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) - aligned_timestamps = spike_sorting_loader.samples2times(np.arange(0, band_info.ns), direction="forward") + sl = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) + aligned_timestamps = spike_sorting_loader.samples2times(np.arange(0, sl.ns), direction="forward") recording_interface.set_aligned_timestamps(aligned_timestamps=aligned_timestamps) pass diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index 9b50398..e0a5688 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -1,13 +1,13 @@ from pathlib import Path from typing import Optional +from brainbox.io.one import SessionLoader from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import NWBFile from pynwb.epoch import TimeIntervals -from brainbox.io.one import SessionLoader class BrainwideMapTrialsInterface(BaseDataInterface): diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 760a3fb..9d50966 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -5,12 +5,12 @@ import numpy as np import pandas as pd -from pydantic import DirectoryPath -from spikeinterface import BaseSorting, BaseSortingSegment -from one.api import ONE from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions +from one.api import ONE +from pydantic import DirectoryPath +from spikeinterface import BaseSorting, BaseSortingSegment class IblSortingExtractor(BaseSorting): @@ -27,12 +27,6 @@ def __init__( session: str, revision: Optional[str] = None, ): - # one = ONE( - # base_url="https://openalyx.internationalbrainlab.org", - # password="international", - # silent=True, - # cache_dir=cache_folder, - # ) if revision is None: # latest revision = one.list_revisions(session)[-1] diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index a27471b..296b5b0 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -7,24 +7,21 @@ BaseSortingExtractorInterface, ) from neuroconv.utils import load_dict_from_file -from pydantic import DirectoryPath +from one.api import ONE from ._ibl_sorting_extractor import IblSortingExtractor -from one.api import ONE + class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor def __init__( self, - one: ONE, session: str, - # cache_folder: Optional[DirectoryPath] = None, + one: ONE, revision: Optional[str] = None, - # verbose: bool = False, ): - # super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) - super().__init__(one=one, session=session, revision=revision) + super().__init__(session=session, one=one, revision=revision) def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -40,4 +37,3 @@ def get_metadata(self) -> dict: ) return metadata - diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index f885a70..b971a13 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -6,7 +6,7 @@ from one.api import ONE from pynwb import NWBFile from pynwb.file import DynamicTable -from brainbox.io.one import SessionLoader + class LickInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index d8eb6bf..bd2cc95 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -4,13 +4,14 @@ from typing import Optional import numpy as np +from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import TimeSeries from pynwb.behavior import PupilTracking -from brainbox.io.one import SessionLoader + class PupilTrackingInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index 31d8709..42dd014 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -2,6 +2,7 @@ from typing import Optional from brainbox.behavior import wheel as wheel_methods +from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file @@ -9,7 +10,7 @@ from pynwb import TimeSeries from pynwb.behavior import CompassDirection, SpatialSeries from pynwb.epoch import TimeIntervals -from brainbox.io.one import SessionLoader + class WheelInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): From cff20a532dbc0daa4d6909b77633f9f83cb254e4 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 18 Dec 2024 15:56:36 +0100 Subject: [PATCH 23/24] for heberto --- ...ert_brainwide_map_processed_only_local_testing.py | 2 +- .../convert_brainwide_map_raw_only_local_testing.py | 2 +- .../datainterfaces/_ibl_sorting_extractor.py | 12 ++++++++++++ 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index d197a48..b3a4429 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -79,7 +79,7 @@ subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" subject_folder_path.mkdir(exist_ok=True) -nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-processed.nwb" +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-processed_.nwb" session_converter.run_conversion( nwbfile_path=nwbfile_path, diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 04f1422..085b31b 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -61,7 +61,7 @@ subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" subject_folder_path.mkdir(exist_ok=True) -nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-raw_ecephys+raw_video_.nwb" session_converter.run_conversion(nwbfile_path=nwbfile_path, metadata=metadata, overwrite=True) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 9a0b480..3602a79 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -10,6 +10,7 @@ from iblatlas.regions import BrainRegions from one.api import ONE from spikeinterface import BaseSorting, BaseSortingSegment +from neuroconv.utils import get_json_schema_from_method_signature class IblSortingExtractor(BaseSorting): @@ -19,6 +20,17 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" + def get_source_schema(cls) -> dict: + """ + Infer the JSON schema for the source_data from the method signature (annotation typing). + + Returns + ------- + dict + The JSON schema for the source_data. + """ + return get_json_schema_from_method_signature(cls, exclude=["source_data", "one"]) + # def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): def __init__( self, From 86144df71f26157ea9ac1dc9a02a8409b8f40b62 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 14:57:19 +0000 Subject: [PATCH 24/24] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 3602a79..2f9749c 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -8,9 +8,9 @@ from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions +from neuroconv.utils import get_json_schema_from_method_signature from one.api import ONE from spikeinterface import BaseSorting, BaseSortingSegment -from neuroconv.utils import get_json_schema_from_method_signature class IblSortingExtractor(BaseSorting):