diff --git a/ibl_to_nwb/brainwide_map/convert_brainwide_map.py b/ibl_to_nwb/brainwide_map/convert_brainwide_map.py index 82900f8..60c6850 100644 --- a/ibl_to_nwb/brainwide_map/convert_brainwide_map.py +++ b/ibl_to_nwb/brainwide_map/convert_brainwide_map.py @@ -145,13 +145,13 @@ def automatic_dandi_upload( base_path = Path("/home/jovyan/IBL") # prototype on DANDI Hub for now -#session_retrieval_one = ONE( +# session_retrieval_one = ONE( # base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True -#) -#brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM") +# ) +# brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM") -#session = session_info["id"] -session="3e7ae7c0-fe8b-487c-9354-036236fa1010" +# session = session_info["id"] +session = "3e7ae7c0-fe8b-487c-9354-036236fa1010" nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb" nwbfile_path.parent.mkdir(exist_ok=True) @@ -180,15 +180,11 @@ def automatic_dandi_upload( data_interfaces = list() for stream_name in ap_stream_names: data_interfaces.append( - IblStreamingApInterface( - session=session, stream_name=stream_name, cache_folder=cache_folder / "ap_recordings" - ) + IblStreamingApInterface(session=session, stream_name=stream_name, cache_folder=cache_folder / "ap_recordings") ) for stream_name in lf_stream_names: data_interfaces.append( - IblStreamingLfInterface( - session=session, stream_name=stream_name, cache_folder=cache_folder / "lf_recordings" - ) + IblStreamingLfInterface(session=session, stream_name=stream_name, cache_folder=cache_folder / "lf_recordings") ) # These interfaces should always be present in source data @@ -226,9 +222,7 @@ def automatic_dandi_upload( if stub_test: for data_interface_name in session_converter.data_interface_objects: if "Ap" in data_interface_name or "Lf" in data_interface_name: - conversion_options.update( - {data_interface_name: dict(stub_test=True)} - ) + conversion_options.update({data_interface_name: dict(stub_test=True)}) session_converter.run_conversion( nwbfile_path=nwbfile_path, diff --git a/ibl_to_nwb/brainwide_map/convert_brainwide_map_parallel.py b/ibl_to_nwb/brainwide_map/convert_brainwide_map_parallel.py index 4952ade..115a4e8 100644 --- a/ibl_to_nwb/brainwide_map/convert_brainwide_map_parallel.py +++ b/ibl_to_nwb/brainwide_map/convert_brainwide_map_parallel.py @@ -214,26 +214,32 @@ def convert_and_upload_session( session_converter = BrainwideMapConverter( one=session_one, session=session, data_interfaces=data_interfaces, verbose=False ) - + conversion_options = dict() if stub_test: for data_interface_name in session_converter.data_interface_objects: if "Ap" in data_interface_name or "Lf" in data_interface_name: conversion_options.update( - {data_interface_name: dict( - progress_position=progress_position, - stub_test=True, - )} + { + data_interface_name: dict( + progress_position=progress_position, + stub_test=True, + ) + } ) else: for data_interface_name in session_converter.data_interface_objects: if "Ap" in data_interface_name or "Lf" in data_interface_name: - conversion_options.update({data_interface_name: dict( - progress_position=progress_position, - )}) + conversion_options.update( + { + data_interface_name: dict( + progress_position=progress_position, + ) + } + ) metadata = session_converter.get_metadata() - + session_converter.run_conversion( nwbfile_path=nwbfile_path, metadata=metadata, diff --git a/ibl_to_nwb/brainwide_map/convert_brainwide_map_parallel_chunking_experiment.py b/ibl_to_nwb/brainwide_map/convert_brainwide_map_parallel_chunking_experiment.py index 13a66e0..1cbecf8 100644 --- a/ibl_to_nwb/brainwide_map/convert_brainwide_map_parallel_chunking_experiment.py +++ b/ibl_to_nwb/brainwide_map/convert_brainwide_map_parallel_chunking_experiment.py @@ -177,7 +177,7 @@ def convert_and_upload_session( session=session, stream_name=stream_name, cache_folder=cache_folder / "ap_recordings" ) ) - #for stream_name in lf_stream_names: + # for stream_name in lf_stream_names: # data_interfaces.append( # IblStreamingLfInterface( # session=session, stream_name=stream_name, cache_folder=cache_folder / "lf_recordings" @@ -194,7 +194,9 @@ def convert_and_upload_session( for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") data_interfaces.append( - IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, include_video=False) + IblPoseEstimationInterface( + one=session_one, session=session, camera_name=camera_name, include_video=False + ) ) pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*") @@ -231,19 +233,25 @@ def convert_and_upload_session( for data_interface_name in session_converter.data_interface_objects: if "Ap" in data_interface_name in data_interface_name: conversion_options.update( - {data_interface_name: dict( - progress_position=progress_position, - stub_test=True, - iterator_opts=dict(chunk_shape=(x,y)), - )} + { + data_interface_name: dict( + progress_position=progress_position, + stub_test=True, + iterator_opts=dict(chunk_shape=(x, y)), + ) + } ) else: for data_interface_name in session_converter.data_interface_objects: if "Ap" in data_interface_name in data_interface_name: - conversion_options.update({data_interface_name: dict( - progress_position=progress_position, - iterator_opts=dict(chunk_shape=(x,y)), - )}) + conversion_options.update( + { + data_interface_name: dict( + progress_position=progress_position, + iterator_opts=dict(chunk_shape=(x, y)), + ) + } + ) metadata = session_converter.get_metadata() metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] + f"-chunking-{x}-{y}" @@ -276,16 +284,16 @@ def convert_and_upload_session( session_retrieval_one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True ) -#brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM") -brain_wide_sessions = [ - "3e7ae7c0-fe8b-487c-9354-036236fa1010" -] * 6 +# brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM") +brain_wide_sessions = ["3e7ae7c0-fe8b-487c-9354-036236fa1010"] * 6 with ProcessPoolExecutor(max_workers=number_of_parallel_jobs) as executor: with tqdm(total=len(brain_wide_sessions), position=0, desc="Converting sessions...") as main_progress_bar: futures = [] for progress_position, session in enumerate(brain_wide_sessions): - nwbfile_path = base_path / "nwbfiles" / f"{session}_{progress_position}" / f"{session}_{progress_position}.nwb" + nwbfile_path = ( + base_path / "nwbfiles" / f"{session}_{progress_position}" / f"{session}_{progress_position}.nwb" + ) nwbfile_path.parent.mkdir(exist_ok=True) futures.append( executor.submit( diff --git a/ibl_to_nwb/brainwide_map/convert_brainwide_map_processed_only.py b/ibl_to_nwb/brainwide_map/convert_brainwide_map_processed_only.py index cc3c0ec..6897ab0 100644 --- a/ibl_to_nwb/brainwide_map/convert_brainwide_map_processed_only.py +++ b/ibl_to_nwb/brainwide_map/convert_brainwide_map_processed_only.py @@ -13,11 +13,11 @@ from dandi.organize import organize as dandi_organize from dandi.upload import upload as dandi_upload from neuroconv.tools.data_transfers import automatic_dandi_upload +from nwbinspector.tools import get_s3_urls_and_dandi_paths from one.api import ONE from pynwb import NWBHDF5IO from pynwb.image import ImageSeries from tqdm import tqdm -from nwbinspector.tools import get_s3_urls_and_dandi_paths from ibl_to_nwb.updated_conversion.brainwide_map import BrainwideMapConverter from ibl_to_nwb.updated_conversion.brainwide_map.datainterfaces import ( @@ -34,26 +34,25 @@ WheelInterface, ) - base_path = Path("/home/jovyan/IBL") # prototype on DANDI Hub for now session = "d32876dd-8303-4720-8e7e-20678dc2fd71" -#session_retrieval_one = ONE( +# session_retrieval_one = ONE( # base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True -#) -#brain_wide_sessions = [ +# ) +# brain_wide_sessions = [ # session_info["id"] # for session_info in session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM") -#] +# ] # Already written sessions -#dandi_file_paths = list(get_s3_urls_and_dandi_paths(dandiset_id="000409").values()) -#dandi_processed_file_paths = [dandi_file_path for dandi_file_path in dandi_file_paths if "processed" in dandi_file_path] -#already_written_processed_sessions = [ +# dandi_file_paths = list(get_s3_urls_and_dandi_paths(dandiset_id="000409").values()) +# dandi_processed_file_paths = [dandi_file_path for dandi_file_path in dandi_file_paths if "processed" in dandi_file_path] +# already_written_processed_sessions = [ # processed_dandi_file_path.split("ses-")[1].split("_")[0].strip("-processed-only") # for processed_dandi_file_path in dandi_processed_file_paths -#] -#sessions_to_run = list(set(brain_wide_sessions) - set(already_written_processed_sessions)) +# ] +# sessions_to_run = list(set(brain_wide_sessions) - set(already_written_processed_sessions)) nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb" nwbfile_path.parent.mkdir(exist_ok=True) @@ -118,7 +117,9 @@ overwrite=True, ) automatic_dandi_upload( - dandiset_id="000409", nwb_folder_path=nwbfile_path.parent, cleanup=cleanup, + dandiset_id="000409", + nwb_folder_path=nwbfile_path.parent, + cleanup=cleanup, ) if cleanup: rmtree(cache_folder) diff --git a/ibl_to_nwb/brainwide_map/convert_brainwide_map_raw_only_parallel.py b/ibl_to_nwb/brainwide_map/convert_brainwide_map_raw_only_parallel.py index ee32aee..1bd6fa6 100644 --- a/ibl_to_nwb/brainwide_map/convert_brainwide_map_raw_only_parallel.py +++ b/ibl_to_nwb/brainwide_map/convert_brainwide_map_raw_only_parallel.py @@ -189,7 +189,9 @@ def convert_and_upload_session( for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") data_interfaces.append( - IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, include_video=True, include_pose=False) + IblPoseEstimationInterface( + one=session_one, session=session, camera_name=camera_name, include_video=True, include_pose=False + ) ) # Run conversion @@ -204,19 +206,25 @@ def convert_and_upload_session( for data_interface_name in session_converter.data_interface_objects: if "Ap" in data_interface_name or "Lf" in data_interface_name: conversion_options.update( - {data_interface_name: dict( - progress_position=progress_position, - stub_test=True, - iterator_opts=dict(chunk_shape=chunk_shape), - )} + { + data_interface_name: dict( + progress_position=progress_position, + stub_test=True, + iterator_opts=dict(chunk_shape=chunk_shape), + ) + } ) else: for data_interface_name in session_converter.data_interface_objects: if "Ap" in data_interface_name or "Lf" in data_interface_name: - conversion_options.update({data_interface_name: dict( - progress_position=progress_position, - iterator_opts=dict(chunk_shape=chunk_shape), - )}) + conversion_options.update( + { + data_interface_name: dict( + progress_position=progress_position, + iterator_opts=dict(chunk_shape=chunk_shape), + ) + } + ) metadata = session_converter.get_metadata() metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] + f"-raw-only" @@ -249,16 +257,16 @@ def convert_and_upload_session( session_retrieval_one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True ) -#brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM") -brain_wide_sessions = [ - "c51f34d8-42f6-4c9c-bb5b-669fd9c42cd9" -] +# brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM") +brain_wide_sessions = ["c51f34d8-42f6-4c9c-bb5b-669fd9c42cd9"] with ProcessPoolExecutor(max_workers=number_of_parallel_jobs) as executor: with tqdm(total=len(brain_wide_sessions), position=0, desc="Converting sessions...") as main_progress_bar: futures = [] for progress_position, session in enumerate(brain_wide_sessions): - nwbfile_path = base_path / "nwbfiles" / f"{session}_{progress_position}" / f"{session}_{progress_position}.nwb" + nwbfile_path = ( + base_path / "nwbfiles" / f"{session}_{progress_position}" / f"{session}_{progress_position}.nwb" + ) nwbfile_path.parent.mkdir(exist_ok=True) futures.append( executor.submit( diff --git a/ibl_to_nwb/datainterfaces/iblposeestimationinterface.py b/ibl_to_nwb/datainterfaces/iblposeestimationinterface.py index 192734a..4d745eb 100644 --- a/ibl_to_nwb/datainterfaces/iblposeestimationinterface.py +++ b/ibl_to_nwb/datainterfaces/iblposeestimationinterface.py @@ -82,8 +82,10 @@ def add_to_nwbfile(self, nwbfile, metadata: dict): pose_estimation_container = PoseEstimation(**pose_estimation_kwargs) behavior_module = get_module(nwbfile=nwbfile, name="behavior", description="Processed behavioral data.") behavior_module.add(pose_estimation_container) - - if self.include_video and self.one.list_datasets(eid=self.session, filename=f"raw_video_data/*{self.camera_name}*"): + + if self.include_video and self.one.list_datasets( + eid=self.session, filename=f"raw_video_data/*{self.camera_name}*" + ): all_pose_estimation_series.append(pose_estimation_series) reused_timestamps = all_pose_estimation_series[0] # trick for linking timestamps across series diff --git a/ibl_to_nwb/datainterfaces/iblstreaminginterface.py b/ibl_to_nwb/datainterfaces/iblstreaminginterface.py index 4844b8b..88b3ee2 100644 --- a/ibl_to_nwb/datainterfaces/iblstreaminginterface.py +++ b/ibl_to_nwb/datainterfaces/iblstreaminginterface.py @@ -18,7 +18,9 @@ class IblStreamingApInterface(BaseRecordingExtractorInterface): @classmethod def get_stream_names(cls, session: str): - return [stream_name for stream_name in cls.get_extractor().get_stream_names(session=session) if "ap" in stream_name] + return [ + stream_name for stream_name in cls.get_extractor().get_stream_names(session=session) if "ap" in stream_name + ] def __init__(self, **kwargs): self.session = kwargs["session"] @@ -145,8 +147,8 @@ def add_to_nwbfile(self, iterator_opts: dict, progress_position: int, **kwargs): kwargs.update( iterator_opts=dict( display_progress=True, - #chunk_shape=(chunk_frames, 16), # ~1 MB - #buffer_shape=(buffer_frames, 384), # 100 MB + # chunk_shape=(chunk_frames, 16), # ~1 MB + # buffer_shape=(buffer_frames, 384), # 100 MB buffer_gb=0.1, progress_bar_options=dict( desc=f"Converting stream '{self.stream_name}' session '{self.session}'...",