diff --git a/CHANGELOG.md b/CHANGELOG.md index fa679434a..75c6ea917 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Upcoming ## Deprecations +* Completely removed compression settings from most places[PR #1126](https://github.com/catalystneuro/neuroconv/pull/1126) ## Bug Fixes diff --git a/src/neuroconv/datainterfaces/behavior/fictrac/fictracdatainterface.py b/src/neuroconv/datainterfaces/behavior/fictrac/fictracdatainterface.py index 1b9686fd1..1d822f919 100644 --- a/src/neuroconv/datainterfaces/behavior/fictrac/fictracdatainterface.py +++ b/src/neuroconv/datainterfaces/behavior/fictrac/fictracdatainterface.py @@ -1,6 +1,5 @@ import json import re -import warnings from datetime import datetime, timezone from pathlib import Path from typing import Optional, Union @@ -210,8 +209,6 @@ def add_to_nwbfile( self, nwbfile: NWBFile, metadata: Optional[dict] = None, - compression: Optional[str] = None, # TODO: remove completely after 10/1/2024 - compression_opts: Optional[int] = None, # TODO: remove completely after 10/1/2024 ): """ Parameters @@ -223,17 +220,6 @@ def add_to_nwbfile( """ import pandas as pd - # TODO: remove completely after 10/1/2024 - if compression is not None or compression_opts is not None: - warnings.warn( - message=( - "Specifying compression methods and their options at the level of tool functions has been deprecated. " - "Please use the `configure_backend` tool function for this purpose." - ), - category=DeprecationWarning, - stacklevel=2, - ) - fictrac_data_df = pd.read_csv(self.file_path, sep=",", header=None, names=self.columns_in_dat_file) # Get the timestamps diff --git a/src/neuroconv/datainterfaces/ecephys/baselfpextractorinterface.py b/src/neuroconv/datainterfaces/ecephys/baselfpextractorinterface.py index 7ce6bb9e4..af16601bb 100644 --- a/src/neuroconv/datainterfaces/ecephys/baselfpextractorinterface.py +++ b/src/neuroconv/datainterfaces/ecephys/baselfpextractorinterface.py @@ -26,8 +26,6 @@ def add_to_nwbfile( starting_time: Optional[float] = None, write_as: Literal["raw", "lfp", "processed"] = "lfp", write_electrical_series: bool = True, - compression: Optional[str] = None, # TODO: remove completely after 10/1/2024 - compression_opts: Optional[int] = None, iterator_type: str = "v2", iterator_opts: Optional[dict] = None, ): @@ -38,8 +36,6 @@ def add_to_nwbfile( starting_time=starting_time, write_as=write_as, write_electrical_series=write_electrical_series, - compression=compression, - compression_opts=compression_opts, iterator_type=iterator_type, iterator_opts=iterator_opts, ) diff --git a/src/neuroconv/datainterfaces/ecephys/baserecordingextractorinterface.py b/src/neuroconv/datainterfaces/ecephys/baserecordingextractorinterface.py index e2c747378..6d0df14c1 100644 --- a/src/neuroconv/datainterfaces/ecephys/baserecordingextractorinterface.py +++ b/src/neuroconv/datainterfaces/ecephys/baserecordingextractorinterface.py @@ -308,8 +308,6 @@ def add_to_nwbfile( starting_time: Optional[float] = None, write_as: Literal["raw", "lfp", "processed"] = "raw", write_electrical_series: bool = True, - compression: Optional[str] = None, # TODO: remove completely after 10/1/2024 - compression_opts: Optional[int] = None, iterator_type: Optional[str] = "v2", iterator_opts: Optional[dict] = None, always_write_timestamps: bool = False, @@ -388,8 +386,6 @@ def add_to_nwbfile( write_as=write_as, write_electrical_series=write_electrical_series, es_key=self.es_key, - compression=compression, - compression_opts=compression_opts, iterator_type=iterator_type, iterator_opts=iterator_opts, always_write_timestamps=always_write_timestamps, diff --git a/src/neuroconv/datainterfaces/ophys/baseimagingextractorinterface.py b/src/neuroconv/datainterfaces/ophys/baseimagingextractorinterface.py index 5125af3cc..9f88b861f 100644 --- a/src/neuroconv/datainterfaces/ophys/baseimagingextractorinterface.py +++ b/src/neuroconv/datainterfaces/ophys/baseimagingextractorinterface.py @@ -1,6 +1,5 @@ """Author: Ben Dichter.""" -import warnings from typing import Literal, Optional import numpy as np @@ -46,17 +45,9 @@ def __init__( self.photon_series_type = photon_series_type def get_metadata_schema( - self, photon_series_type: Optional[Literal["OnePhotonSeries", "TwoPhotonSeries"]] = None + self, ) -> dict: - if photon_series_type is not None: - warnings.warn( - "The 'photon_series_type' argument is deprecated and will be removed in a future version. " - "Please set 'photon_series_type' during the initialization of the BaseImagingExtractorInterface instance.", - DeprecationWarning, - stacklevel=2, - ) - self.photon_series_type = photon_series_type metadata_schema = super().get_metadata_schema() metadata_schema["required"] = ["Ophys"] @@ -100,18 +91,9 @@ def get_metadata_schema( return metadata_schema def get_metadata( - self, photon_series_type: Optional[Literal["OnePhotonSeries", "TwoPhotonSeries"]] = None + self, ) -> DeepDict: - if photon_series_type is not None: - warnings.warn( - "The 'photon_series_type' argument is deprecated and will be removed in a future version. " - "Please set 'photon_series_type' during the initialization of the BaseImagingExtractorInterface instance.", - DeprecationWarning, - stacklevel=2, - ) - self.photon_series_type = photon_series_type - from ...tools.roiextractors import get_nwb_imaging_metadata metadata = super().get_metadata() @@ -147,6 +129,29 @@ def add_to_nwbfile( stub_test: bool = False, stub_frames: int = 100, ): + """ + Add imaging data to the NWB file + + Parameters + ---------- + nwbfile : NWBFile + The NWB file where the imaging data will be added. + metadata : dict, optional + Metadata for the NWBFile, by default None. + photon_series_type : {"TwoPhotonSeries", "OnePhotonSeries"}, optional + The type of photon series to be added, by default "TwoPhotonSeries". + photon_series_index : int, optional + The index of the photon series in the provided imaging data, by default 0. + parent_container : {"acquisition", "processing/ophys"}, optional + Specifies the parent container to which the photon series should be added, either as part of "acquisition" or + under the "processing/ophys" module, by default "acquisition". + stub_test : bool, optional + If True, only writes a small subset of frames for testing purposes, by default False. + stub_frames : int, optional + The number of frames to write when stub_test is True. Will use min(stub_frames, total_frames) to avoid + exceeding available frames, by default 100. + """ + from ...tools.roiextractors import add_imaging_to_nwbfile if stub_test: diff --git a/src/neuroconv/tools/neo/neo.py b/src/neuroconv/tools/neo/neo.py index 220c64de0..ccef706e5 100644 --- a/src/neuroconv/tools/neo/neo.py +++ b/src/neuroconv/tools/neo/neo.py @@ -214,7 +214,6 @@ def add_icephys_recordings( icephys_experiment_type: str = "voltage_clamp", stimulus_type: str = "not described", skip_electrodes: tuple[int] = (), - compression: Optional[str] = None, # TODO: remove completely after 10/1/2024 ): """ Add icephys recordings (stimulus/response pairs) to nwbfile object. @@ -230,16 +229,6 @@ def add_icephys_recordings( skip_electrodes : tuple, default: () Electrode IDs to skip. """ - # TODO: remove completely after 10/1/2024 - if compression is not None: - warn( - message=( - "Specifying compression methods and their options at the level of tool functions has been deprecated. " - "Please use the `configure_backend` tool function for this purpose." - ), - category=DeprecationWarning, - stacklevel=2, - ) n_segments = get_number_of_segments(neo_reader, block=0) @@ -380,7 +369,6 @@ def add_neo_to_nwb( neo_reader, nwbfile: pynwb.NWBFile, metadata: dict = None, - compression: Optional[str] = None, # TODO: remove completely after 10/1/2024 icephys_experiment_type: str = "voltage_clamp", stimulus_type: Optional[str] = None, skip_electrodes: tuple[int] = (), @@ -409,15 +397,6 @@ def add_neo_to_nwb( assert isinstance(nwbfile, pynwb.NWBFile), "'nwbfile' should be of type pynwb.NWBFile" # TODO: remove completely after 10/1/2024 - if compression is not None: - warn( - message=( - "Specifying compression methods and their options at the level of tool functions has been deprecated. " - "Please use the `configure_backend` tool function for this purpose." - ), - category=DeprecationWarning, - stacklevel=2, - ) add_device_from_metadata(nwbfile=nwbfile, modality="Icephys", metadata=metadata) @@ -443,7 +422,6 @@ def write_neo_to_nwb( overwrite: bool = False, nwbfile=None, metadata: dict = None, - compression: Optional[str] = None, # TODO: remove completely after 10/1/2024 icephys_experiment_type: Optional[str] = None, stimulus_type: Optional[str] = None, skip_electrodes: Optional[tuple] = (), @@ -499,9 +477,6 @@ def write_neo_to_nwb( Note that data intended to be added to the electrodes table of the NWBFile should be set as channel properties in the RecordingExtractor object. - compression: str (optional, defaults to "gzip") - Type of compression to use. Valid types are "gzip" and "lzf". - Set to None to disable all compression. icephys_experiment_type: str (optional) Type of Icephys experiment. Allowed types are: 'voltage_clamp', 'current_clamp' and 'izero'. If no value is passed, 'voltage_clamp' is used as default. @@ -518,17 +493,6 @@ def write_neo_to_nwb( assert save_path is None or nwbfile is None, "Either pass a save_path location, or nwbfile object, but not both!" - # TODO: remove completely after 10/1/2024 - if compression is not None: - warn( - message=( - "Specifying compression methods and their options at the level of tool functions has been deprecated. " - "Please use the `configure_backend` tool function for this purpose." - ), - category=DeprecationWarning, - stacklevel=2, - ) - if metadata is None: metadata = get_nwb_metadata(neo_reader=neo_reader) diff --git a/src/neuroconv/tools/spikeinterface/spikeinterface.py b/src/neuroconv/tools/spikeinterface/spikeinterface.py index 1be86862a..5aa3c8925 100644 --- a/src/neuroconv/tools/spikeinterface/spikeinterface.py +++ b/src/neuroconv/tools/spikeinterface/spikeinterface.py @@ -749,8 +749,6 @@ def add_electrical_series( write_as: Literal["raw", "processed", "lfp"] = "raw", es_key: str = None, write_scaled: bool = False, - compression: Optional[str] = None, - compression_opts: Optional[int] = None, iterator_type: Optional[str] = "v2", iterator_opts: Optional[dict] = None, ): @@ -772,8 +770,6 @@ def add_electrical_series( write_as=write_as, es_key=es_key, write_scaled=write_scaled, - compression=compression, - compression_opts=compression_opts, iterator_type=iterator_type, iterator_opts=iterator_opts, ) @@ -810,8 +806,6 @@ def add_electrical_series_to_nwbfile( write_as: Literal["raw", "processed", "lfp"] = "raw", es_key: str = None, write_scaled: bool = False, - compression: Optional[str] = None, - compression_opts: Optional[int] = None, iterator_type: Optional[str] = "v2", iterator_opts: Optional[dict] = None, always_write_timestamps: bool = False, @@ -847,7 +841,6 @@ def add_electrical_series_to_nwbfile( write_scaled : bool, default: False If True, writes the traces in uV with the right conversion. If False , the data is stored as it is and the right conversions factors are added to the nwbfile. - Only applies to compression="gzip". Controls the level of the GZIP. iterator_type: {"v2", None}, default: 'v2' The type of DataChunkIterator to use. 'v1' is the original DataChunkIterator of the hdmf data_utils. @@ -868,16 +861,6 @@ def add_electrical_series_to_nwbfile( Missing keys in an element of metadata['Ecephys']['ElectrodeGroup'] will be auto-populated with defaults whenever possible. """ - # TODO: remove completely after 10/1/2024 - if compression is not None or compression_opts is not None: - warnings.warn( - message=( - "Specifying compression methods and their options at the level of tool functions has been deprecated. " - "Please use the `configure_backend` tool function for this purpose." - ), - category=DeprecationWarning, - stacklevel=2, - ) assert write_as in [ "raw", @@ -1042,8 +1025,6 @@ def add_recording( es_key: Optional[str] = None, write_electrical_series: bool = True, write_scaled: bool = False, - compression: Optional[str] = "gzip", - compression_opts: Optional[int] = None, iterator_type: str = "v2", iterator_opts: Optional[dict] = None, ): @@ -1065,8 +1046,6 @@ def add_recording( es_key=es_key, write_electrical_series=write_electrical_series, write_scaled=write_scaled, - compression=compression, - compression_opts=compression_opts, iterator_type=iterator_type, iterator_opts=iterator_opts, ) @@ -1081,8 +1060,6 @@ def add_recording_to_nwbfile( es_key: Optional[str] = None, write_electrical_series: bool = True, write_scaled: bool = False, - compression: Optional[str] = "gzip", - compression_opts: Optional[int] = None, iterator_type: str = "v2", iterator_opts: Optional[dict] = None, always_write_timestamps: bool = False, @@ -1163,8 +1140,6 @@ def add_recording_to_nwbfile( write_as=write_as, es_key=es_key, write_scaled=write_scaled, - compression=compression, - compression_opts=compression_opts, iterator_type=iterator_type, iterator_opts=iterator_opts, always_write_timestamps=always_write_timestamps, @@ -1183,8 +1158,6 @@ def write_recording( es_key: Optional[str] = None, write_electrical_series: bool = True, write_scaled: bool = False, - compression: Optional[str] = "gzip", - compression_opts: Optional[int] = None, iterator_type: Optional[str] = "v2", iterator_opts: Optional[dict] = None, ): @@ -1209,8 +1182,6 @@ def write_recording( es_key=es_key, write_electrical_series=write_electrical_series, write_scaled=write_scaled, - compression=compression, - compression_opts=compression_opts, iterator_type=iterator_type, iterator_opts=iterator_opts, ) @@ -1228,8 +1199,6 @@ def write_recording_to_nwbfile( es_key: Optional[str] = None, write_electrical_series: bool = True, write_scaled: bool = False, - compression: Optional[str] = "gzip", - compression_opts: Optional[int] = None, iterator_type: Optional[str] = "v2", iterator_opts: Optional[dict] = None, ) -> pynwb.NWBFile: @@ -1303,11 +1272,6 @@ def write_recording_to_nwbfile( and electrodes are written to NWB. write_scaled: bool, default: True If True, writes the scaled traces (return_scaled=True) - compression: {None, 'gzip', 'lzp'}, default: 'gzip' - Type of compression to use. Set to None to disable all compression. - To use the `configure_backend` function, you should set this to None. - compression_opts: int, optional, default: 4 - Only applies to compression="gzip". Controls the level of the GZIP. iterator_type: {"v2", "v1", None} The type of DataChunkIterator to use. 'v1' is the original DataChunkIterator of the hdmf data_utils. @@ -1348,8 +1312,6 @@ def write_recording_to_nwbfile( es_key=es_key, write_electrical_series=write_electrical_series, write_scaled=write_scaled, - compression=compression, - compression_opts=compression_opts, iterator_type=iterator_type, iterator_opts=iterator_opts, ) diff --git a/src/neuroconv/tools/testing/mock_interfaces.py b/src/neuroconv/tools/testing/mock_interfaces.py index 4ba7bb639..44d1adf61 100644 --- a/src/neuroconv/tools/testing/mock_interfaces.py +++ b/src/neuroconv/tools/testing/mock_interfaces.py @@ -265,9 +265,9 @@ def __init__( self.verbose = verbose self.photon_series_type = photon_series_type - def get_metadata(self, photon_series_type: Optional[Literal["OnePhotonSeries", "TwoPhotonSeries"]] = None) -> dict: + def get_metadata(self) -> dict: session_start_time = datetime.now().astimezone() - metadata = super().get_metadata(photon_series_type=photon_series_type) + metadata = super().get_metadata() metadata["NWBFile"]["session_start_time"] = session_start_time return metadata diff --git a/tests/test_ophys/test_baseimagingextractorinterface.py b/tests/test_ophys/test_baseimagingextractorinterface.py deleted file mode 100644 index 863a978d2..000000000 --- a/tests/test_ophys/test_baseimagingextractorinterface.py +++ /dev/null @@ -1,15 +0,0 @@ -from hdmf.testing import TestCase - -from neuroconv.tools.testing.mock_interfaces import MockImagingInterface - - -class TestBaseImagingExtractorInterface(TestCase): - def setUp(self): - self.mock_imaging_interface = MockImagingInterface() - - def test_photon_series_type_warning_triggered_in_get_metadata(self): - with self.assertWarnsWith( - warn_type=DeprecationWarning, - exc_msg="The 'photon_series_type' argument is deprecated and will be removed in a future version. Please set 'photon_series_type' during the initialization of the BaseImagingExtractorInterface instance.", - ): - self.mock_imaging_interface.get_metadata(photon_series_type="TwoPhotonSeries")