Skip to content

Commit

Permalink
Merge pull request #120 from catalystneuro/add_compression
Browse files Browse the repository at this point in the history
Add compression to traces in write_segmentation
  • Loading branch information
CodyCBakerPhD authored Sep 2, 2022
2 parents 471f119 + e554b40 commit ae08b4a
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 16 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
* Implemented format-wise and modality-wise extra installation requirements. If there are any requirements to use a module or data interface, these are defined in individual requirements files at the corresponding level of the package. These are in turn easily accessible from the commands `pip install neuroconv[format_name]`. `pip install neuroconv[modality_name]` will also install all dependencies necessary to make full use of any interfaces from that modality. [PR #100](https://github.com/catalystneuro/neuroconv/pull/100)
* Added frame stubbing to the `BaseSegmentationExtractorInterface`. [PR #116](https://github.com/catalystneuro/neuroconv/pull/116)
* Added `mask_type: str` and `include_roi_centroids: bool` to the `add_plane_segmentation` helper and `write_segmentation` functions for the `tools.roiextractors` submodule. [PR #117](https://github.com/catalystneuro/neuroconv/pull/117)
* Added compression and iteration (with options control) to all Fluorescence traces in `write_segmentation`. [PR #120](https://github.com/catalystneuro/neuroconv/pull/120)

### Documentation and tutorial enhancements:
* Unified the documentation of NeuroConv structure in the User Guide readthedocs. [PR #39](https://github.com/catalystneuro/neuroconv/pull/39)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@ def run_conversion(
stub_test: bool = False,
stub_frames: int = 100,
include_roi_centroids: bool = True,
iterator_options: Optional[dict] = None,
compression_options: Optional[dict] = None,
):
from ...tools.roiextractors import write_segmentation

Expand All @@ -84,4 +86,6 @@ def run_conversion(
overwrite=overwrite,
verbose=self.verbose,
include_roi_centroids=include_roi_centroids,
iterator_options=iterator_options,
compression_options=compression_options,
)
2 changes: 1 addition & 1 deletion src/neuroconv/datainterfaces/ophys/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
roiextractors @ git+https://github.com/catalystneuro/roiextractors.git@9e955248d2532c74522cf896b7c27367304041d9
roiextractors @ git+https://github.com/catalystneuro/roiextractors.git@e668d4bb3f378a308abc128fa3f4a000f0109ef1
27 changes: 17 additions & 10 deletions src/neuroconv/tools/roiextractors/roiextractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from hdmf.backends.hdf5.h5_utils import H5DataIO

from .imagingextractordatachunkiterator import ImagingExtractorDataChunkIterator
from ..hdmf import SliceableDataChunkIterator
from ..nwb_helpers import get_default_nwbfile_metadata, make_or_load_nwbfile, get_module
from ...utils import OptionalFilePathType, dict_deep_update, calculate_regular_series_rate

Expand Down Expand Up @@ -581,10 +582,8 @@ def add_plane_segmentation(
NWBFile
The nwbfile passed as an input with the plane segmentation added.
"""
if iterator_options is None:
iterator_options = dict()
if compression_options is None:
compression_options = dict()
iterator_options = iterator_options or dict()
compression_options = compression_options or dict(compression="gzip")

def image_mask_iterator():
for roi_id in segmentation_extractor.get_roi_ids():
Expand Down Expand Up @@ -669,6 +668,8 @@ def add_fluorescence_traces(
nwbfile: NWBFile,
metadata: Optional[dict],
plane_index: int = 0,
iterator_options: Optional[dict] = None,
compression_options: Optional[dict] = None,
) -> NWBFile:
"""
Adds the fluorescence traces specified by the metadata to the nwb file.
Expand All @@ -691,6 +692,8 @@ def add_fluorescence_traces(
NWBFile
The nwbfile passed as an input with the fluorescence traces added.
"""
iterator_options = iterator_options or dict()
compression_options = compression_options or dict(compression="gzip")

# Set the defaults and required infrastructure
metadata_copy = deepcopy(metadata)
Expand Down Expand Up @@ -776,7 +779,7 @@ def add_fluorescence_traces(

# Build the roi response series
roi_response_series_kwargs.update(
data=np.array(trace).T,
data=H5DataIO(SliceableDataChunkIterator(trace, **iterator_options), **compression_options),
rois=roi_table_region,
**trace_metadata,
)
Expand Down Expand Up @@ -884,6 +887,8 @@ def write_segmentation(
buffer_size: int = 10,
plane_num: int = 0,
include_roi_centroids: bool = True,
iterator_options: Optional[dict] = None,
compression_options: Optional[dict] = None,
):
"""
Primary method for writing an SegmentationExtractor object to an NWBFile.
Expand Down Expand Up @@ -924,6 +929,9 @@ def write_segmentation(
nwbfile_path is None or nwbfile is None
), "Either pass a nwbfile_path location, or nwbfile object, but not both!"

iterator_options = iterator_options or dict()
compression_options = compression_options or dict(compression="gzip")

# parse metadata correctly considering the MultiSegmentationExtractor function:
if isinstance(segmentation_extractor, MultiSegmentationExtractor):
segmentation_extractors = segmentation_extractor.segmentations
Expand Down Expand Up @@ -978,18 +986,17 @@ def write_segmentation(
nwbfile=nwbfile_out,
metadata=metadata,
include_roi_centroids=include_roi_centroids,
iterator_options=dict(buffer_size=buffer_size),
compression_options=dict(
compression=True,
compression_opts=9,
),
iterator_options=iterator_options,
compression_options=compression_options,
)

# Add fluorescence traces:
add_fluorescence_traces(
segmentation_extractor=segmentation_extractor,
nwbfile=nwbfile_out,
metadata=metadata,
iterator_options=iterator_options,
compression_options=compression_options,
)

# Adding summary images (mean and correlation)
Expand Down
26 changes: 21 additions & 5 deletions tests/test_ophys/test_tools_roiextractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from hdmf.testing import TestCase
from numpy.testing import assert_array_equal, assert_raises
from parameterized import parameterized, param
from pynwb import NWBFile, NWBHDF5IO
from pynwb import NWBFile, NWBHDF5IO, H5DataIO
from pynwb.device import Device
from roiextractors.testing import (
generate_dummy_imaging_extractor,
Expand Down Expand Up @@ -616,9 +616,18 @@ def test_add_fluorescence_traces(self):

traces = self.segmentation_extractor.get_traces_dict()

assert_array_equal(fluorescence["RoiResponseSeries"].data, traces["raw"].T)
assert_array_equal(fluorescence["Deconvolved"].data, traces["deconvolved"].T)
assert_array_equal(fluorescence["Neuropil"].data, traces["neuropil"].T)
for nwb_series_name, roiextractors_name in zip(
["RoiResponseSeries", "Deconvolved", "Neuropil"], ["raw", "deconvolved", "neuropil"]
):
series_outer_data = fluorescence[nwb_series_name].data
assert_array_equal(series_outer_data.data.data, traces[roiextractors_name])

# Check compression options are set
assert isinstance(series_outer_data, H5DataIO)

compression_parameters = series_outer_data.get_io_params()
assert compression_parameters["compression"] == "gzip"

# Check that df/F trace data is not being written to the Fluorescence container
df_over_f = ophys.get(self.df_over_f_name)
assert_raises(
Expand Down Expand Up @@ -670,7 +679,14 @@ def test_add_df_over_f_trace(self):

traces = segmentation_extractor.get_traces_dict()

assert_array_equal(df_over_f[trace_name].data, traces["dff"].T)
series_outer_data = df_over_f[trace_name].data
assert_array_equal(series_outer_data.data.data, traces["dff"])

# Check compression options are set
assert isinstance(series_outer_data, H5DataIO)

compression_parameters = series_outer_data.get_io_params()
assert compression_parameters["compression"] == "gzip"

def test_add_fluorescence_one_of_the_traces_is_none(self):
"""Test that roi response series with None values are not added to the
Expand Down

0 comments on commit ae08b4a

Please sign in to comment.