Skip to content

Commit

Permalink
Merge pull request #78 from catalystneuro/pre_commit
Browse files Browse the repository at this point in the history
Setup modernization & DevOps
CodyCBakerPhD authored Sep 24, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
2 parents b33e0a0 + dbff76a commit a5933e4
Showing 45 changed files with 596 additions and 1,078 deletions.
35 changes: 0 additions & 35 deletions .github/workflows/add-to-dashboard.yml

This file was deleted.

35 changes: 0 additions & 35 deletions .github/workflows/workflow.yml

This file was deleted.

24 changes: 16 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,18 +1,26 @@

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v4.6.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace

- repo: https://github.com/psf/black
rev: 23.10.1
rev: 24.8.0
hooks:
- id: black
exclude: ^docs/
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
- id: isort
exclude: ^docs/

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.5
hooks:
- id: ruff
args: [ --fix ]

- repo: https://github.com/codespell-project/codespell
rev: v2.3.0
hooks:
- id: codespell
additional_dependencies:
- tomli
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -2,7 +2,7 @@
[![PyPI version](https://badge.fury.io/py/ibl-to-nwb.svg)](https://badge.fury.io/py/ibl-to-nwb)
[![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)

This repository houses conversion piplines for the IBL data releases, including the Brain Wide Map project.
This repository houses conversion pipelines for the IBL data releases, including the Brain Wide Map project.



238 changes: 0 additions & 238 deletions ibl_to_nwb/brainwide_map/convert_brainwide_map.py

This file was deleted.

This file was deleted.

41 changes: 0 additions & 41 deletions ibl_to_nwb/repeated_site/convert_repeated_site.py

This file was deleted.

4 changes: 0 additions & 4 deletions ibl_to_nwb/repeated_site/experiment_metadata.yml

This file was deleted.

7 changes: 0 additions & 7 deletions ibl_to_nwb/repeated_site/repeatedsitebehaviorinterface.py

This file was deleted.

19 changes: 0 additions & 19 deletions ibl_to_nwb/repeated_site/repeatedsiteconverter.py

This file was deleted.

File renamed without changes.
87 changes: 74 additions & 13 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,12 +1,56 @@
[build-system]
requires = ["hatchling", "hatch-vcs"]
build-backend = "hatchling.build"

[tool.hatch.version]
source = "vcs"

[tool.hatch.build.targets.wheel]
packages = ["src/ibl-to-nwb"]

[project]
name = "ibl_to_nwb"
version="0.3.0"
authors = [
{ name="Cody Baker", email="cody.c.baker.phd@gmail.com" },
]
description = "Tools to convert IBL data to NWB format.."
readme = "README.md"
keywords = ["nwb", "dandi", "ibl"]
license = {file = "license.txt"}
requires-python = ">=3.9"
dependencies = [
"neuroconv",
"ndx-pose>=0.1.1",
"ndx-ibl==0.1.0",
"ONE-api",
"ibllib",
"iblatlas",
]
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Intended Audience :: Developers",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"License :: OSI Approved :: BSD License",
]



[tool.black]
line-length = 120
target-version = ['py38']
target-version = ['py312']
include = '\.pyi?$'
extend-exclude = '''
/(
|\.toml
|\.txt
\.toml
|\.yml
|\.txt
|\.sh
|\.git
|\.ini
@@ -18,14 +62,31 @@ extend-exclude = '''
| dist
)/
'''
force-exclude = '''
/(
/docs
/docs/*
)\
'''

[tool.isort]
profile = "black"
reverse_relative = true
known_first_party = ["ibl_to_nwb"]


[tool.ruff]
exclude = [
"*/__init__.py"
]
line-length = 120

[tool.ruff.lint]
select = ["F", "E", "I"]
ignore = [
"PTH123",
"D203",
"D212",
"T201",
"FIX002",
"TD003",
"TD002",
"S101",
"ICN001",
"INP001",
]
fixable = ["ALL"]

[tool.ruff.lint.isort]
relative-imports-order = "closest-to-furthest"
known-first-party = ["ibl_to_nwb"]
Empty file removed requirements.txt
Empty file.
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Setup and package the ibl-to-nwb project."""

from pathlib import Path

from setuptools import setup
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from pathlib import Path

from neuroconv.utils import dict_deep_update, load_dict_from_file
from one.api import ONE

from ..iblconverter import IblConverter

228 changes: 228 additions & 0 deletions src/ibl_to_nwb/brainwide_map/convert_brainwide_map.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
import os

os.environ["JUPYTER_PLATFORM_DIRS"] = "1" # Annoying

import os
from pathlib import Path
from shutil import rmtree

# from neuroconv.tools.data_transfers import automatic_dandi_upload as neuroconv_automatic_dandi_upload
from one.api import ONE

from src.ibl_to_nwb.brainwide_map import BrainwideMapConverter
from src.ibl_to_nwb.brainwide_map.datainterfaces import (
BrainwideMapTrialsInterface,
)
from src.ibl_to_nwb.datainterfaces import (
IblPoseEstimationInterface,
IblSortingInterface,
IblStreamingApInterface,
IblStreamingLfInterface,
LickInterface,
PupilTrackingInterface,
RoiMotionEnergyInterface,
WheelInterface,
)

# def automatic_dandi_upload(
# dandiset_id: str,
# nwb_folder_path: str,
# dandiset_folder_path: str = None,
# version: str = "draft",
# files_mode: str = "move",
# staging: bool = False,
# cleanup: bool = False,
# ):
# """
# Fully automated upload of NWBFiles to a DANDISet.
#
# Requires an API token set as an envrinment variable named DANDI_API_KEY.
#
# To set this in your bash terminal in Linux or MacOS, run
# export DANDI_API_KEY=...
# or in Windows
# set DANDI_API_KEY=...
#
# DO NOT STORE THIS IN ANY PUBLICLY SHARED CODE.
#
# Parameters
# ----------
# dandiset_id : str
# Six-digit string identifier for the DANDISet the NWBFiles will be uploaded to.
# nwb_folder_path : folder path
# Folder containing the NWBFiles to be uploaded.
# dandiset_folder_path : folder path, optional
# A separate folder location within which to download the dandiset.
# Used in cases where you do not have write permissions for the parent of the 'nwb_folder_path' directory.
# Default behavior downloads the DANDISet to a folder adjacent to the 'nwb_folder_path'.
# version : {None, "draft", "version"}
# The default is "draft".
# staging : bool, default: False
# Is the DANDISet hosted on the staging server? This is mostly for testing purposes.
# The default is False.
# cleanup : bool, default: False
# Whether to remove the dandiset folder path and nwb_folder_path.
# Defaults to False.
# """
# nwb_folder_path = Path(nwb_folder_path)
# dandiset_folder_path = (
# Path(mkdtemp(dir=nwb_folder_path.parent)) if dandiset_folder_path is None else dandiset_folder_path
# )
# dandiset_path = dandiset_folder_path / dandiset_id
# assert os.getenv("DANDI_API_KEY"), (
# "Unable to find environment variable 'DANDI_API_KEY'. "
# "Please retrieve your token from DANDI and set this environment variable."
# )
#
# url_base = "https://gui-staging.dandiarchive.org" if staging else "https://dandiarchive.org"
# dandiset_url = f"{url_base}/dandiset/{dandiset_id}/{version}"
# dandi_download(urls=dandiset_url, output_dir=str(dandiset_folder_path), get_metadata=True, get_assets=False)
# assert dandiset_path.exists(), "DANDI download failed!"
#
# dandi_organize(
# paths=str(nwb_folder_path),
# dandiset_path=str(dandiset_path),
# update_external_file_paths=True,
# files_mode=files_mode,
# media_files_mode=files_mode,
# )
# organized_nwbfiles = dandiset_path.rglob("*.nwb")
#
# # DANDI has yet to implement forcing of session_id inclusion in organize step
# # This manually enforces it when only a single session per subject is organized
# for organized_nwbfile in organized_nwbfiles:
# if "ses" not in organized_nwbfile.stem:
# with NWBHDF5IO(path=organized_nwbfile, mode="r", load_namespaces=True) as io:
# nwbfile = io.read()
# session_id = nwbfile.session_id
# dandi_stem = organized_nwbfile.stem
# dandi_stem_split = dandi_stem.split("_")
# dandi_stem_split.insert(1, f"ses-{session_id}")
# corrected_name = "_".join(dandi_stem_split) + ".nwb"
# organized_nwbfile.rename(organized_nwbfile.parent / corrected_name)
# organized_nwbfiles = list(dandiset_path.rglob("*.nwb"))
# # The above block can be removed once they add the feature
#
# # If any external images
# image_folders = set(dandiset_path.rglob("*image*")) - set(organized_nwbfiles)
# for image_folder in image_folders:
# if "ses" not in image_folder.stem and len(organized_nwbfiles) == 1: # Think about multiple file case
# corrected_name = "_".join(dandi_stem_split)
# image_folder = image_folder.rename(image_folder.parent / corrected_name)
#
# # For image in folder, rename
# with NWBHDF5IO(path=organized_nwbfiles[0], mode="r+", load_namespaces=True) as io:
# nwbfile = io.read()
# for _, object in nwbfile.objects.items():
# if isinstance(object, ImageSeries):
# this_external_file = image_folder / Path(str(object.external_file[0])).name
# corrected_name = "_".join(dandi_stem_split[:2]) + f"_{object.name}{this_external_file.suffix}"
# this_external_file = this_external_file.rename(this_external_file.parent / corrected_name)
# object.external_file[0] = "./" + str(this_external_file.relative_to(organized_nwbfile.parent))
#
# assert len(list(dandiset_path.iterdir())) > 1, "DANDI organize failed!"
#
# dandi_instance = "dandi-staging" if staging else "dandi"
# dandi_upload(paths=[dandiset_folder_path / dandiset_id], dandi_instance=dandi_instance)
#
# # Cleanup should be confirmed manually; Windows especially can complain
# if cleanup:
# try:
# rmtree(path=dandiset_folder_path)
# except PermissionError: # pragma: no cover
# warn("Unable to clean up source files and dandiset! Please manually delete them.", stacklevel=2)


base_path = Path("/home/jovyan/IBL") # prototype on DANDI Hub for now

# session_retrieval_one = ONE(
# base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True
# )
# brain_wide_sessions = session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")

# session = session_info["id"]
session = "3e7ae7c0-fe8b-487c-9354-036236fa1010"

nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb"
nwbfile_path.parent.mkdir(exist_ok=True)

stub_test = False
cleanup = False
files_mode = "move"

assert len(os.environ.get("DANDI_API_KEY", "")) > 0, "Run `export DANDI_API_KEY=...`!"

# Download behavior and spike sorted data for this session
session_path = base_path / "ibl_conversion" / session
cache_folder = base_path / "ibl_conversion" / session / "cache"
session_one = ONE(
base_url="https://openalyx.internationalbrainlab.org",
password="international",
silent=True,
cache_dir=cache_folder,
)

# Get stream names from SI
ap_stream_names = IblStreamingApInterface.get_stream_names(session=session)
lf_stream_names = IblStreamingLfInterface.get_stream_names(session=session)

# Initialize as many of each interface as we need across the streams
data_interfaces = list()
for stream_name in ap_stream_names:
data_interfaces.append(
IblStreamingApInterface(session=session, stream_name=stream_name, cache_folder=cache_folder / "ap_recordings")
)
for stream_name in lf_stream_names:
data_interfaces.append(
IblStreamingLfInterface(session=session, stream_name=stream_name, cache_folder=cache_folder / "lf_recordings")
)

# These interfaces should always be present in source data
data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting"))
data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session))
data_interfaces.append(WheelInterface(one=session_one, session=session))

# These interfaces may not be present; check if they are before adding to list
pose_estimation_files = session_one.list_datasets(eid=session, filename="*.dlc*")
for pose_estimation_file in pose_estimation_files:
camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "")
data_interfaces.append(
IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, include_video=True)
)

pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*")
for pupil_tracking_file in pupil_tracking_files:
camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "")
data_interfaces.append(PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name))

roi_motion_energy_files = session_one.list_datasets(eid=session, filename="*ROIMotionEnergy.npy*")
for roi_motion_energy_file in roi_motion_energy_files:
camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "")
data_interfaces.append(RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name))

if session_one.list_datasets(eid=session, collection="alf", filename="licks*"):
data_interfaces.append(LickInterface(one=session_one, session=session))

# Run conversion
session_converter = BrainwideMapConverter(
one=session_one, session=session, data_interfaces=data_interfaces, verbose=False
)

conversion_options = dict()
if stub_test:
for data_interface_name in session_converter.data_interface_objects:
if "Ap" in data_interface_name or "Lf" in data_interface_name:
conversion_options.update({data_interface_name: dict(stub_test=True)})

session_converter.run_conversion(
nwbfile_path=nwbfile_path,
metadata=session_converter.get_metadata(),
conversion_options=conversion_options,
overwrite=True,
)
# automatic_dandi_upload(
# dandiset_id="000409", nwb_folder_path=nwbfile_path.parent, cleanup=cleanup, files_mode=files_mode
# )
if cleanup:
rmtree(cache_folder)
rmtree(nwbfile_path.parent)
Original file line number Diff line number Diff line change
@@ -7,22 +7,15 @@
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from shutil import rmtree
from tempfile import mkdtemp

from dandi.download import download as dandi_download
from dandi.organize import organize as dandi_organize
from dandi.upload import upload as dandi_upload
from neuroconv.tools.data_transfers import automatic_dandi_upload
from one.api import ONE
from pynwb import NWBHDF5IO
from pynwb.image import ImageSeries
from tqdm import tqdm

from ibl_to_nwb.brainwide_map import BrainwideMapConverter
from ibl_to_nwb.brainwide_map.datainterfaces import (
from src.ibl_to_nwb.brainwide_map import BrainwideMapConverter
from src.ibl_to_nwb.brainwide_map.datainterfaces import (
BrainwideMapTrialsInterface,
)
from ibl_to_nwb.datainterfaces import (
from src.ibl_to_nwb.datainterfaces import (
IblPoseEstimationInterface,
IblSortingInterface,
IblStreamingApInterface,
@@ -33,114 +26,113 @@
WheelInterface,
)


def automatic_dandi_upload(
dandiset_id: str,
nwb_folder_path: str,
dandiset_folder_path: str = None,
version: str = "draft",
files_mode: str = "move",
staging: bool = False,
cleanup: bool = False,
):
"""
Fully automated upload of NWBFiles to a DANDISet.
Requires an API token set as an envrinment variable named DANDI_API_KEY.
To set this in your bash terminal in Linux or MacOS, run
export DANDI_API_KEY=...
or in Windows
set DANDI_API_KEY=...
DO NOT STORE THIS IN ANY PUBLICLY SHARED CODE.
Parameters
----------
dandiset_id : str
Six-digit string identifier for the DANDISet the NWBFiles will be uploaded to.
nwb_folder_path : folder path
Folder containing the NWBFiles to be uploaded.
dandiset_folder_path : folder path, optional
A separate folder location within which to download the dandiset.
Used in cases where you do not have write permissions for the parent of the 'nwb_folder_path' directory.
Default behavior downloads the DANDISet to a folder adjacent to the 'nwb_folder_path'.
version : {None, "draft", "version"}
The default is "draft".
staging : bool, default: False
Is the DANDISet hosted on the staging server? This is mostly for testing purposes.
The default is False.
cleanup : bool, default: False
Whether to remove the dandiset folder path and nwb_folder_path.
Defaults to False.
"""
nwb_folder_path = Path(nwb_folder_path)
dandiset_folder_path = (
Path(mkdtemp(dir=nwb_folder_path.parent)) if dandiset_folder_path is None else dandiset_folder_path
)
dandiset_path = dandiset_folder_path / dandiset_id
assert os.getenv("DANDI_API_KEY"), (
"Unable to find environment variable 'DANDI_API_KEY'. "
"Please retrieve your token from DANDI and set this environment variable."
)

url_base = "https://gui-staging.dandiarchive.org" if staging else "https://dandiarchive.org"
dandiset_url = f"{url_base}/dandiset/{dandiset_id}/{version}"
dandi_download(urls=dandiset_url, output_dir=str(dandiset_folder_path), get_metadata=True, get_assets=False)
assert dandiset_path.exists(), "DANDI download failed!"

dandi_organize(
paths=str(nwb_folder_path),
dandiset_path=str(dandiset_path),
update_external_file_paths=True,
files_mode=files_mode,
media_files_mode=files_mode,
)
organized_nwbfiles = dandiset_path.rglob("*.nwb")

# DANDI has yet to implement forcing of session_id inclusion in organize step
# This manually enforces it when only a single sesssion per subject is organized
for organized_nwbfile in organized_nwbfiles:
if "ses" not in organized_nwbfile.stem:
with NWBHDF5IO(path=organized_nwbfile, mode="r", load_namespaces=True) as io:
nwbfile = io.read()
session_id = nwbfile.session_id
dandi_stem = organized_nwbfile.stem
dandi_stem_split = dandi_stem.split("_")
dandi_stem_split.insert(1, f"ses-{session_id}")
corrected_name = "_".join(dandi_stem_split) + ".nwb"
organized_nwbfile.rename(organized_nwbfile.parent / corrected_name)
organized_nwbfiles = list(dandiset_path.rglob("*.nwb"))
# The above block can be removed once they add the feature

# If any external images
image_folders = set(dandiset_path.rglob("*image*")) - set(organized_nwbfiles)
for image_folder in image_folders:
if "ses" not in image_folder.stem and len(organized_nwbfiles) == 1: # Think about multiple file case
corrected_name = "_".join(dandi_stem_split)
image_folder = image_folder.rename(image_folder.parent / corrected_name)

# For image in folder, rename
with NWBHDF5IO(path=organized_nwbfiles[0], mode="r+", load_namespaces=True) as io:
nwbfile = io.read()
for _, object in nwbfile.objects.items():
if isinstance(object, ImageSeries):
this_external_file = image_folder / Path(str(object.external_file[0])).name
corrected_name = "_".join(dandi_stem_split[:2]) + f"_{object.name}{this_external_file.suffix}"
this_external_file = this_external_file.rename(this_external_file.parent / corrected_name)
object.external_file[0] = "./" + str(this_external_file.relative_to(organized_nwbfile.parent))

assert len(list(dandiset_path.iterdir())) > 1, "DANDI organize failed!"

dandi_instance = "dandi-staging" if staging else "dandi"
dandi_upload(paths=[dandiset_folder_path / dandiset_id], dandi_instance=dandi_instance)

# Cleanup should be confirmed manually; Windows especially can complain
if cleanup:
try:
rmtree(path=dandiset_folder_path)
except PermissionError: # pragma: no cover
warn("Unable to clean up source files and dandiset! Please manually delete them.", stacklevel=2)
# def automatic_dandi_upload(
# dandiset_id: str,
# nwb_folder_path: str,
# dandiset_folder_path: str = None,
# version: str = "draft",
# files_mode: str = "move",
# staging: bool = False,
# cleanup: bool = False,
# ):
# """
# Fully automated upload of NWBFiles to a DANDISet.
#
# Requires an API token set as an envrinment variable named DANDI_API_KEY.
#
# To set this in your bash terminal in Linux or MacOS, run
# export DANDI_API_KEY=...
# or in Windows
# set DANDI_API_KEY=...
#
# DO NOT STORE THIS IN ANY PUBLICLY SHARED CODE.
#
# Parameters
# ----------
# dandiset_id : str
# Six-digit string identifier for the DANDISet the NWBFiles will be uploaded to.
# nwb_folder_path : folder path
# Folder containing the NWBFiles to be uploaded.
# dandiset_folder_path : folder path, optional
# A separate folder location within which to download the dandiset.
# Used in cases where you do not have write permissions for the parent of the 'nwb_folder_path' directory.
# Default behavior downloads the DANDISet to a folder adjacent to the 'nwb_folder_path'.
# version : {None, "draft", "version"}
# The default is "draft".
# staging : bool, default: False
# Is the DANDISet hosted on the staging server? This is mostly for testing purposes.
# The default is False.
# cleanup : bool, default: False
# Whether to remove the dandiset folder path and nwb_folder_path.
# Defaults to False.
# """
# nwb_folder_path = Path(nwb_folder_path)
# dandiset_folder_path = (
# Path(mkdtemp(dir=nwb_folder_path.parent)) if dandiset_folder_path is None else dandiset_folder_path
# )
# dandiset_path = dandiset_folder_path / dandiset_id
# assert os.getenv("DANDI_API_KEY"), (
# "Unable to find environment variable 'DANDI_API_KEY'. "
# "Please retrieve your token from DANDI and set this environment variable."
# )
#
# url_base = "https://gui-staging.dandiarchive.org" if staging else "https://dandiarchive.org"
# dandiset_url = f"{url_base}/dandiset/{dandiset_id}/{version}"
# dandi_download(urls=dandiset_url, output_dir=str(dandiset_folder_path), get_metadata=True, get_assets=False)
# assert dandiset_path.exists(), "DANDI download failed!"
#
# dandi_organize(
# paths=str(nwb_folder_path),
# dandiset_path=str(dandiset_path),
# update_external_file_paths=True,
# files_mode=files_mode,
# media_files_mode=files_mode,
# )
# organized_nwbfiles = dandiset_path.rglob("*.nwb")
#
# # DANDI has yet to implement forcing of session_id inclusion in organize step
# # This manually enforces it when only a single session per subject is organized
# for organized_nwbfile in organized_nwbfiles:
# if "ses" not in organized_nwbfile.stem:
# with NWBHDF5IO(path=organized_nwbfile, mode="r", load_namespaces=True) as io:
# nwbfile = io.read()
# session_id = nwbfile.session_id
# dandi_stem = organized_nwbfile.stem
# dandi_stem_split = dandi_stem.split("_")
# dandi_stem_split.insert(1, f"ses-{session_id}")
# corrected_name = "_".join(dandi_stem_split) + ".nwb"
# organized_nwbfile.rename(organized_nwbfile.parent / corrected_name)
# organized_nwbfiles = list(dandiset_path.rglob("*.nwb"))
# # The above block can be removed once they add the feature
#
# # If any external images
# image_folders = set(dandiset_path.rglob("*image*")) - set(organized_nwbfiles)
# for image_folder in image_folders:
# if "ses" not in image_folder.stem and len(organized_nwbfiles) == 1: # Think about multiple file case
# corrected_name = "_".join(dandi_stem_split)
# image_folder = image_folder.rename(image_folder.parent / corrected_name)
#
# # For image in folder, rename
# with NWBHDF5IO(path=organized_nwbfiles[0], mode="r+", load_namespaces=True) as io:
# nwbfile = io.read()
# for _, object in nwbfile.objects.items():
# if isinstance(object, ImageSeries):
# this_external_file = image_folder / Path(str(object.external_file[0])).name
# corrected_name = "_".join(dandi_stem_split[:2]) + f"_{object.name}{this_external_file.suffix}"
# this_external_file = this_external_file.rename(this_external_file.parent / corrected_name)
# object.external_file[0] = "./" + str(this_external_file.relative_to(organized_nwbfile.parent))
#
# assert len(list(dandiset_path.iterdir())) > 1, "DANDI organize failed!"
#
# dandi_instance = "dandi-staging" if staging else "dandi"
# dandi_upload(paths=[dandiset_folder_path / dandiset_id], dandi_instance=dandi_instance)
#
# # Cleanup should be confirmed manually; Windows especially can complain
# if cleanup:
# try:
# rmtree(path=dandiset_folder_path)
# except PermissionError: # pragma: no cover
# warn("Unable to clean up source files and dandiset! Please manually delete them.", stacklevel=2)


def convert_and_upload_session(
@@ -156,7 +148,7 @@ def convert_and_upload_session(
assert len(os.environ.get("DANDI_API_KEY", "")) > 0, "Run `export DANDI_API_KEY=...`!"

# Download behavior and spike sorted data for this session
session_path = base_path / "ibl_conversion" / session
# session_path = base_path / "ibl_conversion" / session
cache_folder = base_path / "ibl_conversion" / session / "cache"
session_one = ONE(
base_url="https://openalyx.internationalbrainlab.org",
@@ -246,9 +238,9 @@ def convert_and_upload_session(
conversion_options=conversion_options,
overwrite=True,
)
automatic_dandi_upload(
dandiset_id="000409", nwb_folder_path=nwbfile_path.parent, cleanup=cleanup, files_mode=files_mode
)
# automatic_dandi_upload(
# dandiset_id="000409", nwb_folder_path=nwbfile_path.parent, cleanup=cleanup, files_mode=files_mode
# )
if cleanup:
rmtree(cache_folder)
rmtree(nwbfile_path.parent)
Original file line number Diff line number Diff line change
@@ -3,69 +3,34 @@
os.environ["JUPYTER_PLATFORM_DIRS"] = "1" # Annoying

import os
import traceback
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from shutil import rmtree
from tempfile import mkdtemp

from dandi.download import download as dandi_download
from dandi.organize import organize as dandi_organize
from dandi.upload import upload as dandi_upload
from neuroconv.tools.data_transfers import automatic_dandi_upload
from nwbinspector.tools import get_s3_urls_and_dandi_paths

from one.api import ONE
from pynwb import NWBHDF5IO
from pynwb.image import ImageSeries
from tqdm import tqdm

from ibl_to_nwb.updated_conversion.brainwide_map import BrainwideMapConverter
from ibl_to_nwb.updated_conversion.brainwide_map.datainterfaces import (
from src.ibl_to_nwb import (
BrainwideMapConverter,
BrainwideMapTrialsInterface,
)
from ibl_to_nwb.updated_conversion.datainterfaces import (
IblPoseEstimationInterface,
IblSortingInterface,
IblStreamingApInterface,
IblStreamingLfInterface,
LickInterface,
PupilTrackingInterface,
RoiMotionEnergyInterface,
WheelInterface,
)

base_path = Path("/home/jovyan/IBL") # prototype on DANDI Hub for now
session = "d32876dd-8303-4720-8e7e-20678dc2fd71"
stub_test = False
cleanup = False

# session_retrieval_one = ONE(
# base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True
# )
# brain_wide_sessions = [
# session_info["id"]
# for session_info in session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")
# ]

# Already written sessions
# dandi_file_paths = list(get_s3_urls_and_dandi_paths(dandiset_id="000409").values())
# dandi_processed_file_paths = [dandi_file_path for dandi_file_path in dandi_file_paths if "processed" in dandi_file_path]
# already_written_processed_sessions = [
# processed_dandi_file_path.split("ses-")[1].split("_")[0].strip("-processed-only")
# for processed_dandi_file_path in dandi_processed_file_paths
# ]
# sessions_to_run = list(set(brain_wide_sessions) - set(already_written_processed_sessions))
base_path = Path("E:/IBL")
session = "d32876dd-8303-4720-8e7e-20678dc2fd71"

nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb"
nwbfile_path.parent.mkdir(exist_ok=True)

stub_test: bool = False
cleanup: bool = False

assert len(os.environ.get("DANDI_API_KEY", "")) > 0, "Run `export DANDI_API_KEY=...`!"
nwbfile_path.parent.mkdir(parents=True, exist_ok=True)

nwbfile_path.parent.mkdir(exist_ok=True)

# Download behavior and spike sorted data for this session
session_path = base_path / "ibl_conversion" / session
# session_path = base_path / "ibl_conversion" / session
cache_folder = base_path / "ibl_conversion" / session / "cache"
session_one = ONE(
base_url="https://openalyx.internationalbrainlab.org",
@@ -116,11 +81,3 @@
metadata=metadata,
overwrite=True,
)
automatic_dandi_upload(
dandiset_id="000409",
nwb_folder_path=nwbfile_path.parent,
cleanup=cleanup,
)
if cleanup:
rmtree(cache_folder)
rmtree(nwbfile_path.parent)
Original file line number Diff line number Diff line change
@@ -3,31 +3,30 @@
os.environ["JUPYTER_PLATFORM_DIRS"] = "1" # Annoying

import os

# import traceback
# from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from shutil import rmtree
# from tempfile import mkdtemp

# from tempfile import mkdtemp
# from dandi.download import download as dandi_download
# from dandi.organize import organize as dandi_organize
# from dandi.upload import upload as dandi_upload
# from neuroconv.tools.data_transfers import automatic_dandi_upload
# from nwbinspector.tools import get_s3_urls_and_dandi_paths
from one.api import ONE

# from pynwb import NWBHDF5IO
# from pynwb.image import ImageSeries
# from tqdm import tqdm

from ibl_to_nwb.brainwide_map import BrainwideMapConverter
from ibl_to_nwb.brainwide_map.datainterfaces import (
from src.ibl_to_nwb.brainwide_map import BrainwideMapConverter
from src.ibl_to_nwb.brainwide_map.datainterfaces import (
BrainwideMapTrialsInterface,
)
from ibl_to_nwb.datainterfaces import (
from src.ibl_to_nwb.datainterfaces import (
IblPoseEstimationInterface,
IblSortingInterface,
IblStreamingApInterface,
IblStreamingLfInterface,
LickInterface,
PupilTrackingInterface,
RoiMotionEnergyInterface,
@@ -36,24 +35,7 @@

base_path = Path.home() / "ibl_scratch" # local directory
# session = "d32876dd-8303-4720-8e7e-20678dc2fd71"
session = 'caa5dddc-9290-4e27-9f5e-575ba3598614' # a BWM sesssion with dual probe

# session_retrieval_one = ONE(
# base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True
# )
# brain_wide_sessions = [
# session_info["id"]
# for session_info in session_retrieval_one.alyx.rest(url="sessions", action="list", tag="2022_Q4_IBL_et_al_BWM")
# ]

# Already written sessions
# dandi_file_paths = list(get_s3_urls_and_dandi_paths(dandiset_id="000409").values())
# dandi_processed_file_paths = [dandi_file_path for dandi_file_path in dandi_file_paths if "processed" in dandi_file_path]
# already_written_processed_sessions = [
# processed_dandi_file_path.split("ses-")[1].split("_")[0].strip("-processed-only")
# for processed_dandi_file_path in dandi_processed_file_paths
# ]
# sessions_to_run = list(set(brain_wide_sessions) - set(already_written_processed_sessions))
session = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe

nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb"
nwbfile_path.parent.mkdir(exist_ok=True)
@@ -88,7 +70,9 @@
for pose_estimation_file in pose_estimation_files:
camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "")
data_interfaces.append(
IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, include_pose=True, include_video=False)
IblPoseEstimationInterface(
one=session_one, session=session, camera_name=camera_name, include_pose=True, include_video=False
)
)

pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*")
Original file line number Diff line number Diff line change
@@ -7,27 +7,19 @@
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from shutil import rmtree
from tempfile import mkdtemp

from dandi.download import download as dandi_download
from dandi.organize import organize as dandi_organize
from dandi.upload import upload as dandi_upload
from neuroconv.tools.data_transfers import automatic_dandi_upload
from nwbinspector.tools import get_s3_urls_and_dandi_paths
from one.api import ONE
from pynwb import NWBHDF5IO
from pynwb.image import ImageSeries
from tqdm import tqdm

from ibl_to_nwb.brainwide_map import BrainwideMapConverter
from ibl_to_nwb.brainwide_map.datainterfaces import (
from src.ibl_to_nwb.brainwide_map import BrainwideMapConverter
from src.ibl_to_nwb.brainwide_map.datainterfaces import (
BrainwideMapTrialsInterface,
)
from ibl_to_nwb.datainterfaces import (
from src.ibl_to_nwb.datainterfaces import (
IblPoseEstimationInterface,
IblSortingInterface,
IblStreamingApInterface,
IblStreamingLfInterface,
LickInterface,
PupilTrackingInterface,
RoiMotionEnergyInterface,
@@ -50,7 +42,7 @@ def convert_and_upload_parallel_processed_only(
nwbfile_path.parent.mkdir(exist_ok=True)

# Download behavior and spike sorted data for this session
session_path = base_path / "ibl_conversion" / session
# session_path = base_path / "ibl_conversion" / session
cache_folder = base_path / "ibl_conversion" / session / "cache"
session_one = ONE(
base_url="https://openalyx.internationalbrainlab.org",
Original file line number Diff line number Diff line change
@@ -7,140 +7,125 @@
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from shutil import rmtree
from tempfile import mkdtemp

from dandi.download import download as dandi_download
from dandi.organize import organize as dandi_organize
from dandi.upload import upload as dandi_upload
from neuroconv.tools.data_transfers import automatic_dandi_upload
from one.api import ONE
from pynwb import NWBHDF5IO
from pynwb.image import ImageSeries
from tqdm import tqdm

from ibl_to_nwb.brainwide_map import BrainwideMapConverter
from ibl_to_nwb.brainwide_map.datainterfaces import (
BrainwideMapTrialsInterface,
)
from ibl_to_nwb.datainterfaces import (
from src.ibl_to_nwb.brainwide_map import BrainwideMapConverter
from src.ibl_to_nwb.datainterfaces import (
IblPoseEstimationInterface,
IblSortingInterface,
IblStreamingApInterface,
IblStreamingLfInterface,
LickInterface,
PupilTrackingInterface,
RoiMotionEnergyInterface,
WheelInterface,
)


def automatic_dandi_upload(
dandiset_id: str,
nwb_folder_path: str,
dandiset_folder_path: str = None,
version: str = "draft",
files_mode: str = "move",
staging: bool = False,
cleanup: bool = False,
):
"""
Fully automated upload of NWBFiles to a DANDISet.
Requires an API token set as an envrinment variable named DANDI_API_KEY.
To set this in your bash terminal in Linux or MacOS, run
export DANDI_API_KEY=...
or in Windows
set DANDI_API_KEY=...
DO NOT STORE THIS IN ANY PUBLICLY SHARED CODE.
Parameters
----------
dandiset_id : str
Six-digit string identifier for the DANDISet the NWBFiles will be uploaded to.
nwb_folder_path : folder path
Folder containing the NWBFiles to be uploaded.
dandiset_folder_path : folder path, optional
A separate folder location within which to download the dandiset.
Used in cases where you do not have write permissions for the parent of the 'nwb_folder_path' directory.
Default behavior downloads the DANDISet to a folder adjacent to the 'nwb_folder_path'.
version : {None, "draft", "version"}
The default is "draft".
staging : bool, default: False
Is the DANDISet hosted on the staging server? This is mostly for testing purposes.
The default is False.
cleanup : bool, default: False
Whether to remove the dandiset folder path and nwb_folder_path.
Defaults to False.
"""
nwb_folder_path = Path(nwb_folder_path)
dandiset_folder_path = (
Path(mkdtemp(dir=nwb_folder_path.parent)) if dandiset_folder_path is None else dandiset_folder_path
)
dandiset_path = dandiset_folder_path / dandiset_id
assert os.getenv("DANDI_API_KEY"), (
"Unable to find environment variable 'DANDI_API_KEY'. "
"Please retrieve your token from DANDI and set this environment variable."
)

url_base = "https://gui-staging.dandiarchive.org" if staging else "https://dandiarchive.org"
dandiset_url = f"{url_base}/dandiset/{dandiset_id}/{version}"
dandi_download(urls=dandiset_url, output_dir=str(dandiset_folder_path), get_metadata=True, get_assets=False)
assert dandiset_path.exists(), "DANDI download failed!"

dandi_organize(
paths=str(nwb_folder_path),
dandiset_path=str(dandiset_path),
update_external_file_paths=True,
files_mode=files_mode,
media_files_mode=files_mode,
)
organized_nwbfiles = dandiset_path.rglob("*.nwb")

# DANDI has yet to implement forcing of session_id inclusion in organize step
# This manually enforces it when only a single sesssion per subject is organized
for organized_nwbfile in organized_nwbfiles:
if "ses" not in organized_nwbfile.stem:
with NWBHDF5IO(path=organized_nwbfile, mode="r", load_namespaces=True) as io:
nwbfile = io.read()
session_id = nwbfile.session_id
dandi_stem = organized_nwbfile.stem
dandi_stem_split = dandi_stem.split("_")
dandi_stem_split.insert(1, f"ses-{session_id}")
corrected_name = "_".join(dandi_stem_split) + ".nwb"
organized_nwbfile.rename(organized_nwbfile.parent / corrected_name)
organized_nwbfiles = list(dandiset_path.rglob("*.nwb"))
# The above block can be removed once they add the feature

# If any external images
image_folders = set(dandiset_path.rglob("*image*")) - set(organized_nwbfiles)
for image_folder in image_folders:
if "ses" not in image_folder.stem and len(organized_nwbfiles) == 1: # Think about multiple file case
corrected_name = "_".join(dandi_stem_split)
image_folder = image_folder.rename(image_folder.parent / corrected_name)

# For image in folder, rename
with NWBHDF5IO(path=organized_nwbfiles[0], mode="r+", load_namespaces=True) as io:
nwbfile = io.read()
for _, object in nwbfile.objects.items():
if isinstance(object, ImageSeries):
this_external_file = image_folder / Path(str(object.external_file[0])).name
corrected_name = "_".join(dandi_stem_split[:2]) + f"_{object.name}{this_external_file.suffix}"
this_external_file = this_external_file.rename(this_external_file.parent / corrected_name)
object.external_file[0] = "./" + str(this_external_file.relative_to(organized_nwbfile.parent))

assert len(list(dandiset_path.iterdir())) > 1, "DANDI organize failed!"

dandi_instance = "dandi-staging" if staging else "dandi"
dandi_upload(paths=[dandiset_folder_path / dandiset_id], dandi_instance=dandi_instance)

# Cleanup should be confirmed manually; Windows especially can complain
if cleanup:
try:
rmtree(path=dandiset_folder_path)
except PermissionError: # pragma: no cover
warn("Unable to clean up source files and dandiset! Please manually delete them.", stacklevel=2)
# def automatic_dandi_upload(
# dandiset_id: str,
# nwb_folder_path: str,
# dandiset_folder_path: str = None,
# version: str = "draft",
# files_mode: str = "move",
# staging: bool = False,
# cleanup: bool = False,
# ):
# """
# Fully automated upload of NWBFiles to a DANDISet.
#
# Requires an API token set as an envrinment variable named DANDI_API_KEY.
#
# To set this in your bash terminal in Linux or MacOS, run
# export DANDI_API_KEY=...
# or in Windows
# set DANDI_API_KEY=...
#
# DO NOT STORE THIS IN ANY PUBLICLY SHARED CODE.
#
# Parameters
# ----------
# dandiset_id : str
# Six-digit string identifier for the DANDISet the NWBFiles will be uploaded to.
# nwb_folder_path : folder path
# Folder containing the NWBFiles to be uploaded.
# dandiset_folder_path : folder path, optional
# A separate folder location within which to download the dandiset.
# Used in cases where you do not have write permissions for the parent of the 'nwb_folder_path' directory.
# Default behavior downloads the DANDISet to a folder adjacent to the 'nwb_folder_path'.
# version : {None, "draft", "version"}
# The default is "draft".
# staging : bool, default: False
# Is the DANDISet hosted on the staging server? This is mostly for testing purposes.
# The default is False.
# cleanup : bool, default: False
# Whether to remove the dandiset folder path and nwb_folder_path.
# Defaults to False.
# """
# nwb_folder_path = Path(nwb_folder_path)
# dandiset_folder_path = (
# Path(mkdtemp(dir=nwb_folder_path.parent)) if dandiset_folder_path is None else dandiset_folder_path
# )
# dandiset_path = dandiset_folder_path / dandiset_id
# assert os.getenv("DANDI_API_KEY"), (
# "Unable to find environment variable 'DANDI_API_KEY'. "
# "Please retrieve your token from DANDI and set this environment variable."
# )
#
# url_base = "https://gui-staging.dandiarchive.org" if staging else "https://dandiarchive.org"
# dandiset_url = f"{url_base}/dandiset/{dandiset_id}/{version}"
# dandi_download(urls=dandiset_url, output_dir=str(dandiset_folder_path), get_metadata=True, get_assets=False)
# assert dandiset_path.exists(), "DANDI download failed!"
#
# dandi_organize(
# paths=str(nwb_folder_path),
# dandiset_path=str(dandiset_path),
# update_external_file_paths=True,
# files_mode=files_mode,
# media_files_mode=files_mode,
# )
# organized_nwbfiles = dandiset_path.rglob("*.nwb")
#
# # DANDI has yet to implement forcing of session_id inclusion in organize step
# # This manually enforces it when only a single session per subject is organized
# for organized_nwbfile in organized_nwbfiles:
# if "ses" not in organized_nwbfile.stem:
# with NWBHDF5IO(path=organized_nwbfile, mode="r", load_namespaces=True) as io:
# nwbfile = io.read()
# session_id = nwbfile.session_id
# dandi_stem = organized_nwbfile.stem
# dandi_stem_split = dandi_stem.split("_")
# dandi_stem_split.insert(1, f"ses-{session_id}")
# corrected_name = "_".join(dandi_stem_split) + ".nwb"
# organized_nwbfile.rename(organized_nwbfile.parent / corrected_name)
# organized_nwbfiles = list(dandiset_path.rglob("*.nwb"))
# # The above block can be removed once they add the feature
#
# # If any external images
# image_folders = set(dandiset_path.rglob("*image*")) - set(organized_nwbfiles)
# for image_folder in image_folders:
# if "ses" not in image_folder.stem and len(organized_nwbfiles) == 1: # Think about multiple file case
# corrected_name = "_".join(dandi_stem_split)
# image_folder = image_folder.rename(image_folder.parent / corrected_name)
#
# # For image in folder, rename
# with NWBHDF5IO(path=organized_nwbfiles[0], mode="r+", load_namespaces=True) as io:
# nwbfile = io.read()
# for _, object in nwbfile.objects.items():
# if isinstance(object, ImageSeries):
# this_external_file = image_folder / Path(str(object.external_file[0])).name
# corrected_name = "_".join(dandi_stem_split[:2]) + f"_{object.name}{this_external_file.suffix}"
# this_external_file = this_external_file.rename(this_external_file.parent / corrected_name)
# object.external_file[0] = "./" + str(this_external_file.relative_to(organized_nwbfile.parent))
#
# assert len(list(dandiset_path.iterdir())) > 1, "DANDI organize failed!"
#
# dandi_instance = "dandi-staging" if staging else "dandi"
# dandi_upload(paths=[dandiset_folder_path / dandiset_id], dandi_instance=dandi_instance)
#
# # Cleanup should be confirmed manually; Windows especially can complain
# if cleanup:
# try:
# rmtree(path=dandiset_folder_path)
# except PermissionError: # pragma: no cover
# warn("Unable to clean up source files and dandiset! Please manually delete them.", stacklevel=2)


def convert_and_upload_session(
@@ -156,7 +141,7 @@ def convert_and_upload_session(
assert len(os.environ.get("DANDI_API_KEY", "")) > 0, "Run `export DANDI_API_KEY=...`!"

# Download behavior and spike sorted data for this session
session_path = base_path / "ibl_conversion" / f"{session}_{progress_position}"
# session_path = base_path / "ibl_conversion" / f"{session}_{progress_position}"
cache_folder = base_path / "ibl_conversion" / f"{session}_{progress_position}" / "cache"
session_one = ONE(
base_url="https://openalyx.internationalbrainlab.org",
@@ -227,7 +212,7 @@ def convert_and_upload_session(
)

metadata = session_converter.get_metadata()
metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] + f"-raw-only"
metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] + "-raw-only"

session_converter.run_conversion(
nwbfile_path=nwbfile_path,
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -50,7 +50,7 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
)

left_right_or_body = self.camera_name[:5].rstrip("C")
camera_name_snake_case = f"{left_right_or_body}_camera"
# camera_name_snake_case = f"{left_right_or_body}_camera"
reused_timestamps = None
all_pose_estimation_series = list()
if self.include_pose:
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
"""The interface for loadding spike sorted data via ONE access."""

from collections import defaultdict
from typing import Dict, Optional, Union

import numpy as np
import pandas as pd
from pydantic import DirectoryPath
from spikeinterface import BaseSorting, BaseSortingSegment

import pandas as pd


class IblSortingExtractor(BaseSorting):
extractor_name = "IblSorting"
@@ -48,8 +48,8 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None):
spikes, clusters, channels = sorting_loader.load_spike_sorting()
# cluster_ids.extend(list(np.array(clusters["metrics"]["cluster_id"]) + unit_id_per_probe_shift))
number_of_units = len(np.unique(spikes["clusters"]))
cluster_ids.extend(list(np.arange(number_of_units).astype('int32') + unit_id_per_probe_shift))
cluster_ids.extend(list(np.arange(number_of_units).astype("int32") + unit_id_per_probe_shift))

# TODO - compare speed against iterating over unique cluster IDs + vector index search
for spike_cluster, spike_times, spike_amplitudes, spike_depths in zip(
spikes["clusters"], spikes["times"], spikes["amps"], spikes["depths"]
@@ -86,10 +86,10 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None):
cluster_uuid="cluster_uuid",
cluster_id="cluster_id",
)
cluster_metrics = clusters['metrics'].reset_index(drop=True).join(pd.DataFrame(clusters['uuids']))
cluster_metrics.rename(columns={'uuids':'cluster_uuid'}, inplace=True)

cluster_metrics = clusters["metrics"].reset_index(drop=True).join(pd.DataFrame(clusters["uuids"]))
cluster_metrics.rename(columns={"uuids": "cluster_uuid"}, inplace=True)

for ibl_metric_key, property_name in ibl_metric_key_to_property_name.items():
all_unit_properties[property_name].extend(list(cluster_metrics[ibl_metric_key]))

Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""The interface for loading spike sorted data via ONE access."""

from pathlib import Path

from neuroconv.datainterfaces.ecephys.basesortingextractorinterface import (
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Data interface wrapper around the SpikeInterface extractor - also sets atlas information."""

from pathlib import Path

import numpy as np
@@ -116,8 +117,8 @@ def get_metadata(self) -> dict:
device_name = f"NeuropixelsProbe{self.probe_number}"
group_name = f"NeuropixelsShank{self.probe_number}"
else:
device_name = f"NeuropixelsProbe"
group_name = f"NeuropixelsShank"
device_name = "NeuropixelsProbe"
group_name = "NeuropixelsShank"
# set_channel_groups removes probe
self.recording_extractor.set_property(
key="group_name", values=np.array([group_name] * self.recording_extractor.get_num_channels())
@@ -142,8 +143,8 @@ def get_metadata(self) -> dict:
def add_to_nwbfile(self, iterator_opts: dict, progress_position: int, **kwargs):
# The buffer and chunk shapes must be set explicitly for good performance with the streaming
# Otherwise, the default buffer/chunk shapes might re-request the same data packet multiple times
chunk_frames = 100 if kwargs.get("stub_test", False) else 30_000
buffer_frames = 100 if kwargs.get("stub_test", False) else 5 * 30_000
# chunk_frames = 100 if kwargs.get("stub_test", False) else 30_000
# buffer_frames = 100 if kwargs.get("stub_test", False) else 5 * 30_000
kwargs.update(
iterator_opts=dict(
display_progress=True,
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Data Interface for the pupil tracking."""

from pathlib import Path

import numpy as np
@@ -50,7 +51,7 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):
)
)
# Normally best practice convention would be PupilTrackingLeft or PupilTrackingRight but
# in this case I'd say LeftPupilTracking and RightPupilTracking reads bettter
# in this case I'd say LeftPupilTracking and RightPupilTracking reads better
pupil_tracking = PupilTracking(name=f"{left_or_right.capitalize()}PupilTracking", time_series=pupil_time_series)

behavior_module = get_module(nwbfile=nwbfile, name="behavior", description="Processed behavioral data.")
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
"""Data Interface for the special data type of ROI Motion Energy."""

from neuroconv.basedatainterface import BaseDataInterface
from neuroconv.tools.nwb_helpers import get_module
from one.api import ONE
from pydantic import DirectoryPath
from pynwb import H5DataIO, TimeSeries


@@ -31,13 +31,13 @@ def add_to_nwbfile(self, nwbfile, metadata: dict):

width, height, x, y = motion_energy_video_region["position"]

description = f"""
Motion energy calculated for a region of the {left_right_or_body} camera video that is {width} pixels wide, {height} pixels tall, and the top-left corner of the region is the pixel ({x}, {y}).
CAUTION: As each software will load the video in a different orientation, the ROI might need to be adapted.
For example, when loading the video with cv2 in Python, x and y axes are flipped from the convention used above.
The region then becomes [{y}:{y+height}, {x}:{x+width}].
"""
description = (
f"Motion energy calculated for a region of the {left_right_or_body} camera video that is {width} pixels "
f"wide, {height} pixels tall, and the top-left corner of the region is the pixel ({x}, {y}).\n\n"
"CAUTION: As each software will load the video in a different orientation, the ROI might need to be "
"adapted. For example, when loading the video with cv2 in Python, x and y axes are flipped from the "
f"convention used above. The region then becomes [{y}:{y+height}, {x}:{x+width}]."
)

motion_energy_series = TimeSeries(
name=f"{left_right_or_body.capitalize()}CameraMotionEnergy",
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
"""Primary base class for all IBL converters."""
import json

from datetime import datetime
from typing import Optional

from dateutil import tz
from ndx_ibl import IblSubject
from neuroconv import ConverterPipe
from neuroconv.tools.nwb_helpers import make_or_load_nwbfile
from neuroconv.utils import dict_deep_update
from one.api import ONE
from pynwb import NWBFile

File renamed without changes.
Original file line number Diff line number Diff line change
@@ -85,4 +85,4 @@ Ecephys:
- name: cluster_uuid
description: Unique identifier given to each local cluster after spike sorting. Enables comparison of each unit in this .nwb file to its counterpart cluster UUID from ONE.
- name: cluster_id
description: The original id given to each cluster after spike sorting. Note that as spike sorting is computed per probe, cluster_ids are only unique per probe insertion, experiments with dual insertions can have clusters with the same id.
description: The original id given to each cluster after spike sorting. Note that as spike sorting is computed per probe, cluster_ids are only unique per probe insertion, experiments with dual insertions can have clusters with the same id.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -5,4 +5,4 @@ ibllib>=2.21.0
ndx-pose>=0.1.1
ndx-ibl==0.1.0
probeinterface
iblatlas
iblatlas

0 comments on commit a5933e4

Please sign in to comment.