Skip to content

Commit

Permalink
Merge branch 'main' into dlc_csv
Browse files Browse the repository at this point in the history
  • Loading branch information
pauladkisson authored Nov 13, 2024
2 parents 05591cd + 6960872 commit ec417c0
Show file tree
Hide file tree
Showing 13 changed files with 87 additions and 168 deletions.
4 changes: 3 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
# Upcoming

## Deprecations
* Completely removed compression settings from most places[PR #1126](https://github.com/catalystneuro/neuroconv/pull/1126)

## Bug Fixes

## Features
* Imaging interfaces have a new conversion option `always_write_timestamps` that can be used to force writing timestamps even if neuroconv's heuristics indicates regular sampling rate [PR #1125](https://github.com/catalystneuro/neuroconv/pull/1125)
* Added .csv support to DeepLabCutInterface [PR #1140](https://github.com/catalystneuro/neuroconv/pull/1140)

## Improvements
Expand Down Expand Up @@ -46,7 +48,7 @@
* Added automated EFS volume creation and mounting to the `submit_aws_job` helper function. [PR #1018](https://github.com/catalystneuro/neuroconv/pull/1018)
* Added a mock for segmentation extractors interfaces in ophys: `MockSegmentationInterface` [PR #1067](https://github.com/catalystneuro/neuroconv/pull/1067)
* Added a `MockSortingInterface` for testing purposes. [PR #1065](https://github.com/catalystneuro/neuroconv/pull/1065)
* BaseRecordingInterfaces have a new conversion options `always_write_timestamps` that ca be used to force writing timestamps even if neuroconv heuristic indicates regular sampling rate [PR #1091](https://github.com/catalystneuro/neuroconv/pull/1091)
* BaseRecordingInterfaces have a new conversion options `always_write_timestamps` that can be used to force writing timestamps even if neuroconv heuristic indicates regular sampling rate [PR #1091](https://github.com/catalystneuro/neuroconv/pull/1091)


## Improvements
Expand Down
26 changes: 13 additions & 13 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -270,50 +270,50 @@ icephys = [

## Ophys
brukertiff = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
"tifffile>=2023.3.21",
]
caiman = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
]
cnmfe = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
]
extract = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
]
hdf5 = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
]
micromanagertiff = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
"tifffile>=2023.3.21",
]
miniscope = [
"natsort>=8.3.1",
"ndx-miniscope>=0.5.1",
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
]
sbx = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
]
scanimage = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
"scanimage-tiff-reader>=1.4.1",
]
sima = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
]
suite2p = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
]
tdt_fp = [
"ndx-fiber-photometry",
"roiextractors>=0.5.7",
"roiextractors>=0.5.10",
"tdt",
]
tiff = [
"roiextractors>=0.5.7",
"roiextractors>=0.5.9",
"tiffile>=2018.10.18",
]
ophys = [
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import json
import re
import warnings
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional, Union
Expand Down Expand Up @@ -210,8 +209,6 @@ def add_to_nwbfile(
self,
nwbfile: NWBFile,
metadata: Optional[dict] = None,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
compression_opts: Optional[int] = None, # TODO: remove completely after 10/1/2024
):
"""
Parameters
Expand All @@ -223,17 +220,6 @@ def add_to_nwbfile(
"""
import pandas as pd

# TODO: remove completely after 10/1/2024
if compression is not None or compression_opts is not None:
warnings.warn(
message=(
"Specifying compression methods and their options at the level of tool functions has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

fictrac_data_df = pd.read_csv(self.file_path, sep=",", header=None, names=self.columns_in_dat_file)

# Get the timestamps
Expand Down
13 changes: 0 additions & 13 deletions src/neuroconv/datainterfaces/behavior/video/videodatainterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,8 +269,6 @@ def add_to_nwbfile(
chunk_data: bool = True,
module_name: Optional[str] = None,
module_description: Optional[str] = None,
compression: Optional[str] = "gzip",
compression_options: Optional[int] = None,
):
"""
Convert the video data files to :py:class:`~pynwb.image.ImageSeries` and write them in the
Expand Down Expand Up @@ -431,17 +429,6 @@ def add_to_nwbfile(
pbar.update(1)
iterable = video

# TODO: remove completely after 03/1/2024
if compression is not None or compression_options is not None:
warnings.warn(
message=(
"Specifying compression methods and their options for this interface has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

image_series_kwargs.update(data=iterable)

if timing_type == "starting_time and rate":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ def add_to_nwbfile(
starting_time: Optional[float] = None,
write_as: Literal["raw", "lfp", "processed"] = "lfp",
write_electrical_series: bool = True,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
compression_opts: Optional[int] = None,
iterator_type: str = "v2",
iterator_opts: Optional[dict] = None,
):
Expand All @@ -38,8 +36,6 @@ def add_to_nwbfile(
starting_time=starting_time,
write_as=write_as,
write_electrical_series=write_electrical_series,
compression=compression,
compression_opts=compression_opts,
iterator_type=iterator_type,
iterator_opts=iterator_opts,
)
Original file line number Diff line number Diff line change
Expand Up @@ -308,8 +308,6 @@ def add_to_nwbfile(
starting_time: Optional[float] = None,
write_as: Literal["raw", "lfp", "processed"] = "raw",
write_electrical_series: bool = True,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
compression_opts: Optional[int] = None,
iterator_type: Optional[str] = "v2",
iterator_opts: Optional[dict] = None,
always_write_timestamps: bool = False,
Expand Down Expand Up @@ -388,8 +386,6 @@ def add_to_nwbfile(
write_as=write_as,
write_electrical_series=write_electrical_series,
es_key=self.es_key,
compression=compression,
compression_opts=compression_opts,
iterator_type=iterator_type,
iterator_opts=iterator_opts,
always_write_timestamps=always_write_timestamps,
Expand Down
47 changes: 27 additions & 20 deletions src/neuroconv/datainterfaces/ophys/baseimagingextractorinterface.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Author: Ben Dichter."""

import warnings
from typing import Literal, Optional

import numpy as np
Expand Down Expand Up @@ -46,17 +45,9 @@ def __init__(
self.photon_series_type = photon_series_type

def get_metadata_schema(
self, photon_series_type: Optional[Literal["OnePhotonSeries", "TwoPhotonSeries"]] = None
self,
) -> dict:

if photon_series_type is not None:
warnings.warn(
"The 'photon_series_type' argument is deprecated and will be removed in a future version. "
"Please set 'photon_series_type' during the initialization of the BaseImagingExtractorInterface instance.",
DeprecationWarning,
stacklevel=2,
)
self.photon_series_type = photon_series_type
metadata_schema = super().get_metadata_schema()

metadata_schema["required"] = ["Ophys"]
Expand Down Expand Up @@ -100,18 +91,9 @@ def get_metadata_schema(
return metadata_schema

def get_metadata(
self, photon_series_type: Optional[Literal["OnePhotonSeries", "TwoPhotonSeries"]] = None
self,
) -> DeepDict:

if photon_series_type is not None:
warnings.warn(
"The 'photon_series_type' argument is deprecated and will be removed in a future version. "
"Please set 'photon_series_type' during the initialization of the BaseImagingExtractorInterface instance.",
DeprecationWarning,
stacklevel=2,
)
self.photon_series_type = photon_series_type

from ...tools.roiextractors import get_nwb_imaging_metadata

metadata = super().get_metadata()
Expand Down Expand Up @@ -146,7 +128,31 @@ def add_to_nwbfile(
parent_container: Literal["acquisition", "processing/ophys"] = "acquisition",
stub_test: bool = False,
stub_frames: int = 100,
always_write_timestamps: bool = False,
):
"""
Add imaging data to the NWB file
Parameters
----------
nwbfile : NWBFile
The NWB file where the imaging data will be added.
metadata : dict, optional
Metadata for the NWBFile, by default None.
photon_series_type : {"TwoPhotonSeries", "OnePhotonSeries"}, optional
The type of photon series to be added, by default "TwoPhotonSeries".
photon_series_index : int, optional
The index of the photon series in the provided imaging data, by default 0.
parent_container : {"acquisition", "processing/ophys"}, optional
Specifies the parent container to which the photon series should be added, either as part of "acquisition" or
under the "processing/ophys" module, by default "acquisition".
stub_test : bool, optional
If True, only writes a small subset of frames for testing purposes, by default False.
stub_frames : int, optional
The number of frames to write when stub_test is True. Will use min(stub_frames, total_frames) to avoid
exceeding available frames, by default 100.
"""

from ...tools.roiextractors import add_imaging_to_nwbfile

if stub_test:
Expand All @@ -162,4 +168,5 @@ def add_to_nwbfile(
photon_series_type=photon_series_type,
photon_series_index=photon_series_index,
parent_container=parent_container,
always_write_timestamps=always_write_timestamps,
)
36 changes: 0 additions & 36 deletions src/neuroconv/tools/neo/neo.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,6 @@ def add_icephys_recordings(
icephys_experiment_type: str = "voltage_clamp",
stimulus_type: str = "not described",
skip_electrodes: tuple[int] = (),
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
):
"""
Add icephys recordings (stimulus/response pairs) to nwbfile object.
Expand All @@ -230,16 +229,6 @@ def add_icephys_recordings(
skip_electrodes : tuple, default: ()
Electrode IDs to skip.
"""
# TODO: remove completely after 10/1/2024
if compression is not None:
warn(
message=(
"Specifying compression methods and their options at the level of tool functions has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

n_segments = get_number_of_segments(neo_reader, block=0)

Expand Down Expand Up @@ -380,7 +369,6 @@ def add_neo_to_nwb(
neo_reader,
nwbfile: pynwb.NWBFile,
metadata: dict = None,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
icephys_experiment_type: str = "voltage_clamp",
stimulus_type: Optional[str] = None,
skip_electrodes: tuple[int] = (),
Expand Down Expand Up @@ -409,15 +397,6 @@ def add_neo_to_nwb(
assert isinstance(nwbfile, pynwb.NWBFile), "'nwbfile' should be of type pynwb.NWBFile"

# TODO: remove completely after 10/1/2024
if compression is not None:
warn(
message=(
"Specifying compression methods and their options at the level of tool functions has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

add_device_from_metadata(nwbfile=nwbfile, modality="Icephys", metadata=metadata)

Expand All @@ -443,7 +422,6 @@ def write_neo_to_nwb(
overwrite: bool = False,
nwbfile=None,
metadata: dict = None,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
icephys_experiment_type: Optional[str] = None,
stimulus_type: Optional[str] = None,
skip_electrodes: Optional[tuple] = (),
Expand Down Expand Up @@ -499,9 +477,6 @@ def write_neo_to_nwb(
Note that data intended to be added to the electrodes table of the NWBFile should be set as channel
properties in the RecordingExtractor object.
compression: str (optional, defaults to "gzip")
Type of compression to use. Valid types are "gzip" and "lzf".
Set to None to disable all compression.
icephys_experiment_type: str (optional)
Type of Icephys experiment. Allowed types are: 'voltage_clamp', 'current_clamp' and 'izero'.
If no value is passed, 'voltage_clamp' is used as default.
Expand All @@ -518,17 +493,6 @@ def write_neo_to_nwb(

assert save_path is None or nwbfile is None, "Either pass a save_path location, or nwbfile object, but not both!"

# TODO: remove completely after 10/1/2024
if compression is not None:
warn(
message=(
"Specifying compression methods and their options at the level of tool functions has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

if metadata is None:
metadata = get_nwb_metadata(neo_reader=neo_reader)

Expand Down
Loading

0 comments on commit ec417c0

Please sign in to comment.