Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Oct 9, 2023
1 parent 2b98a59 commit 09b79a3
Show file tree
Hide file tree
Showing 7 changed files with 92 additions and 71 deletions.
22 changes: 8 additions & 14 deletions ibl_to_nwb/brainwide_map/convert_brainwide_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,13 +145,13 @@ def automatic_dandi_upload(

base_path = Path("/home/jovyan/IBL") # prototype on DANDI Hub for now

#session_retrieval_one = ONE(
# session_retrieval_one = ONE(
# base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True
#)
#brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")
# )
# brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")

#session = session_info["id"]
session="3e7ae7c0-fe8b-487c-9354-036236fa1010"
# session = session_info["id"]
session = "3e7ae7c0-fe8b-487c-9354-036236fa1010"

nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb"
nwbfile_path.parent.mkdir(exist_ok=True)
Expand Down Expand Up @@ -180,15 +180,11 @@ def automatic_dandi_upload(
data_interfaces = list()
for stream_name in ap_stream_names:
data_interfaces.append(
IblStreamingApInterface(
session=session, stream_name=stream_name, cache_folder=cache_folder / "ap_recordings"
)
IblStreamingApInterface(session=session, stream_name=stream_name, cache_folder=cache_folder / "ap_recordings")
)
for stream_name in lf_stream_names:
data_interfaces.append(
IblStreamingLfInterface(
session=session, stream_name=stream_name, cache_folder=cache_folder / "lf_recordings"
)
IblStreamingLfInterface(session=session, stream_name=stream_name, cache_folder=cache_folder / "lf_recordings")
)

# These interfaces should always be present in source data
Expand Down Expand Up @@ -226,9 +222,7 @@ def automatic_dandi_upload(
if stub_test:
for data_interface_name in session_converter.data_interface_objects:
if "Ap" in data_interface_name or "Lf" in data_interface_name:
conversion_options.update(
{data_interface_name: dict(stub_test=True)}
)
conversion_options.update({data_interface_name: dict(stub_test=True)})

session_converter.run_conversion(
nwbfile_path=nwbfile_path,
Expand Down
24 changes: 15 additions & 9 deletions ibl_to_nwb/brainwide_map/convert_brainwide_map_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,26 +214,32 @@ def convert_and_upload_session(
session_converter = BrainwideMapConverter(
one=session_one, session=session, data_interfaces=data_interfaces, verbose=False
)

conversion_options = dict()
if stub_test:
for data_interface_name in session_converter.data_interface_objects:
if "Ap" in data_interface_name or "Lf" in data_interface_name:
conversion_options.update(
{data_interface_name: dict(
progress_position=progress_position,
stub_test=True,
)}
{
data_interface_name: dict(
progress_position=progress_position,
stub_test=True,
)
}
)
else:
for data_interface_name in session_converter.data_interface_objects:
if "Ap" in data_interface_name or "Lf" in data_interface_name:
conversion_options.update({data_interface_name: dict(
progress_position=progress_position,
)})
conversion_options.update(
{
data_interface_name: dict(
progress_position=progress_position,
)
}
)

metadata = session_converter.get_metadata()

session_converter.run_conversion(
nwbfile_path=nwbfile_path,
metadata=metadata,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def convert_and_upload_session(
session=session, stream_name=stream_name, cache_folder=cache_folder / "ap_recordings"
)
)
#for stream_name in lf_stream_names:
# for stream_name in lf_stream_names:
# data_interfaces.append(
# IblStreamingLfInterface(
# session=session, stream_name=stream_name, cache_folder=cache_folder / "lf_recordings"
Expand All @@ -194,7 +194,9 @@ def convert_and_upload_session(
for pose_estimation_file in pose_estimation_files:
camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "")
data_interfaces.append(
IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, include_video=False)
IblPoseEstimationInterface(
one=session_one, session=session, camera_name=camera_name, include_video=False
)
)

pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*")
Expand Down Expand Up @@ -231,19 +233,25 @@ def convert_and_upload_session(
for data_interface_name in session_converter.data_interface_objects:
if "Ap" in data_interface_name in data_interface_name:
conversion_options.update(
{data_interface_name: dict(
progress_position=progress_position,
stub_test=True,
iterator_opts=dict(chunk_shape=(x,y)),
)}
{
data_interface_name: dict(
progress_position=progress_position,
stub_test=True,
iterator_opts=dict(chunk_shape=(x, y)),
)
}
)
else:
for data_interface_name in session_converter.data_interface_objects:
if "Ap" in data_interface_name in data_interface_name:
conversion_options.update({data_interface_name: dict(
progress_position=progress_position,
iterator_opts=dict(chunk_shape=(x,y)),
)})
conversion_options.update(
{
data_interface_name: dict(
progress_position=progress_position,
iterator_opts=dict(chunk_shape=(x, y)),
)
}
)

metadata = session_converter.get_metadata()
metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] + f"-chunking-{x}-{y}"
Expand Down Expand Up @@ -276,16 +284,16 @@ def convert_and_upload_session(
session_retrieval_one = ONE(
base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True
)
#brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")
brain_wide_sessions = [
"3e7ae7c0-fe8b-487c-9354-036236fa1010"
] * 6
# brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")
brain_wide_sessions = ["3e7ae7c0-fe8b-487c-9354-036236fa1010"] * 6

with ProcessPoolExecutor(max_workers=number_of_parallel_jobs) as executor:
with tqdm(total=len(brain_wide_sessions), position=0, desc="Converting sessions...") as main_progress_bar:
futures = []
for progress_position, session in enumerate(brain_wide_sessions):
nwbfile_path = base_path / "nwbfiles" / f"{session}_{progress_position}" / f"{session}_{progress_position}.nwb"
nwbfile_path = (
base_path / "nwbfiles" / f"{session}_{progress_position}" / f"{session}_{progress_position}.nwb"
)
nwbfile_path.parent.mkdir(exist_ok=True)
futures.append(
executor.submit(
Expand Down
25 changes: 13 additions & 12 deletions ibl_to_nwb/brainwide_map/convert_brainwide_map_processed_only.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@
from dandi.organize import organize as dandi_organize
from dandi.upload import upload as dandi_upload
from neuroconv.tools.data_transfers import automatic_dandi_upload
from nwbinspector.tools import get_s3_urls_and_dandi_paths
from one.api import ONE
from pynwb import NWBHDF5IO
from pynwb.image import ImageSeries
from tqdm import tqdm
from nwbinspector.tools import get_s3_urls_and_dandi_paths

from ibl_to_nwb.updated_conversion.brainwide_map import BrainwideMapConverter
from ibl_to_nwb.updated_conversion.brainwide_map.datainterfaces import (
Expand All @@ -34,26 +34,25 @@
WheelInterface,
)


base_path = Path("/home/jovyan/IBL") # prototype on DANDI Hub for now
session = "d32876dd-8303-4720-8e7e-20678dc2fd71"

#session_retrieval_one = ONE(
# session_retrieval_one = ONE(
# base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True
#)
#brain_wide_sessions = [
# )
# brain_wide_sessions = [
# session_info["id"]
# for session_info in session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")
#]
# ]

# Already written sessions
#dandi_file_paths = list(get_s3_urls_and_dandi_paths(dandiset_id="000409").values())
#dandi_processed_file_paths = [dandi_file_path for dandi_file_path in dandi_file_paths if "processed" in dandi_file_path]
#already_written_processed_sessions = [
# dandi_file_paths = list(get_s3_urls_and_dandi_paths(dandiset_id="000409").values())
# dandi_processed_file_paths = [dandi_file_path for dandi_file_path in dandi_file_paths if "processed" in dandi_file_path]
# already_written_processed_sessions = [
# processed_dandi_file_path.split("ses-")[1].split("_")[0].strip("-processed-only")
# for processed_dandi_file_path in dandi_processed_file_paths
#]
#sessions_to_run = list(set(brain_wide_sessions) - set(already_written_processed_sessions))
# ]
# sessions_to_run = list(set(brain_wide_sessions) - set(already_written_processed_sessions))

nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb"
nwbfile_path.parent.mkdir(exist_ok=True)
Expand Down Expand Up @@ -118,7 +117,9 @@
overwrite=True,
)
automatic_dandi_upload(
dandiset_id="000409", nwb_folder_path=nwbfile_path.parent, cleanup=cleanup,
dandiset_id="000409",
nwb_folder_path=nwbfile_path.parent,
cleanup=cleanup,
)
if cleanup:
rmtree(cache_folder)
Expand Down
38 changes: 23 additions & 15 deletions ibl_to_nwb/brainwide_map/convert_brainwide_map_raw_only_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,9 @@ def convert_and_upload_session(
for pose_estimation_file in pose_estimation_files:
camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "")
data_interfaces.append(
IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, include_video=True, include_pose=False)
IblPoseEstimationInterface(
one=session_one, session=session, camera_name=camera_name, include_video=True, include_pose=False
)
)

# Run conversion
Expand All @@ -204,19 +206,25 @@ def convert_and_upload_session(
for data_interface_name in session_converter.data_interface_objects:
if "Ap" in data_interface_name or "Lf" in data_interface_name:
conversion_options.update(
{data_interface_name: dict(
progress_position=progress_position,
stub_test=True,
iterator_opts=dict(chunk_shape=chunk_shape),
)}
{
data_interface_name: dict(
progress_position=progress_position,
stub_test=True,
iterator_opts=dict(chunk_shape=chunk_shape),
)
}
)
else:
for data_interface_name in session_converter.data_interface_objects:
if "Ap" in data_interface_name or "Lf" in data_interface_name:
conversion_options.update({data_interface_name: dict(
progress_position=progress_position,
iterator_opts=dict(chunk_shape=chunk_shape),
)})
conversion_options.update(
{
data_interface_name: dict(
progress_position=progress_position,
iterator_opts=dict(chunk_shape=chunk_shape),
)
}
)

metadata = session_converter.get_metadata()
metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] + f"-raw-only"
Expand Down Expand Up @@ -249,16 +257,16 @@ def convert_and_upload_session(
session_retrieval_one = ONE(
base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True
)
#brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")
brain_wide_sessions = [
"c51f34d8-42f6-4c9c-bb5b-669fd9c42cd9"
]
# brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")
brain_wide_sessions = ["c51f34d8-42f6-4c9c-bb5b-669fd9c42cd9"]

with ProcessPoolExecutor(max_workers=number_of_parallel_jobs) as executor:
with tqdm(total=len(brain_wide_sessions), position=0, desc="Converting sessions...") as main_progress_bar:
futures = []
for progress_position, session in enumerate(brain_wide_sessions):
nwbfile_path = base_path / "nwbfiles" / f"{session}_{progress_position}" / f"{session}_{progress_position}.nwb"
nwbfile_path = (
base_path / "nwbfiles" / f"{session}_{progress_position}" / f"{session}_{progress_position}.nwb"
)
nwbfile_path.parent.mkdir(exist_ok=True)
futures.append(
executor.submit(
Expand Down
6 changes: 4 additions & 2 deletions ibl_to_nwb/datainterfaces/iblposeestimationinterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,10 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
pose_estimation_container = PoseEstimation(**pose_estimation_kwargs)
behavior_module = get_module(nwbfile=nwbfile, name="behavior", description="Processed behavioral data.")
behavior_module.add(pose_estimation_container)

if self.include_video and self.one.list_datasets(eid=self.session, filename=f"raw_video_data/*{self.camera_name}*"):

if self.include_video and self.one.list_datasets(
eid=self.session, filename=f"raw_video_data/*{self.camera_name}*"
):
all_pose_estimation_series.append(pose_estimation_series)

reused_timestamps = all_pose_estimation_series[0] # trick for linking timestamps across series
Expand Down
8 changes: 5 additions & 3 deletions ibl_to_nwb/datainterfaces/iblstreaminginterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@ class IblStreamingApInterface(BaseRecordingExtractorInterface):

@classmethod
def get_stream_names(cls, session: str):
return [stream_name for stream_name in cls.get_extractor().get_stream_names(session=session) if "ap" in stream_name]
return [
stream_name for stream_name in cls.get_extractor().get_stream_names(session=session) if "ap" in stream_name
]

def __init__(self, **kwargs):
self.session = kwargs["session"]
Expand Down Expand Up @@ -145,8 +147,8 @@ def add_to_nwbfile(self, iterator_opts: dict, progress_position: int, **kwargs):
kwargs.update(
iterator_opts=dict(
display_progress=True,
#chunk_shape=(chunk_frames, 16), # ~1 MB
#buffer_shape=(buffer_frames, 384), # 100 MB
# chunk_shape=(chunk_frames, 16), # ~1 MB
# buffer_shape=(buffer_frames, 384), # 100 MB
buffer_gb=0.1,
progress_bar_options=dict(
desc=f"Converting stream '{self.stream_name}' session '{self.session}'...",
Expand Down

0 comments on commit 09b79a3

Please sign in to comment.