diff --git a/src/nwbinspector/utils/_utils.py b/src/nwbinspector/utils/_utils.py index 63c686847..a98a17897 100644 --- a/src/nwbinspector/utils/_utils.py +++ b/src/nwbinspector/utils/_utils.py @@ -34,13 +34,13 @@ def _cache_data_retrieval_command( def cache_data_selection( - data: Union[h5py.Dataset, zarr.Array, ArrayLike], selection: Union[slice, tuple[slice]] + data: Union[h5py.Dataset, ArrayLike], selection: Union[slice, tuple[slice]] ) -> np.ndarray: """Extract the selection lazily from the data object for efficient caching (most beneficial during streaming).""" if isinstance(data, np.memmap): # np.memmap objects are not hashable - simply return the selection lazily return data[selection] if not ( - isinstance(data, (h5py.Dataset, zarr.Array)) or isinstance(data, H5Dataset) + isinstance(data, h5py.Dataset) or isinstance(data, H5Dataset) ): # No need to attempt to cache if data is already in-memory # Cast as numpy array for efficient fancy indexing # Note that this technically copies the entire data, so could use more than 2x RAM for that object