Skip to content

Commit

Permalink
Assert nFrames key; remove bincount2D
Browse files Browse the repository at this point in the history
  • Loading branch information
k1o0 committed Jul 1, 2024
1 parent d3322c6 commit d773c1e
Show file tree
Hide file tree
Showing 4 changed files with 24 additions and 67 deletions.
70 changes: 20 additions & 50 deletions brainbox/processing.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
'''
Processes data from one form into another, e.g. taking spike times and binning them into
non-overlapping bins and convolving spike times with a gaussian kernel.
'''
"""Process data from one form into another.
For example, taking spike times and binning them into non-overlapping bins and convolving spike
times with a gaussian kernel.
"""

import numpy as np
import pandas as pd
from scipy import interpolate, sparse
from brainbox import core
from iblutil.numerical import bincount2D as _bincount2D
from iblutil.numerical import bincount2D
from iblutil.util import Bunch
import logging
import warnings
import traceback

_logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -118,35 +117,6 @@ def sync(dt, times=None, values=None, timeseries=None, offsets=None, interp='zer
return syncd


def bincount2D(x, y, xbin=0, ybin=0, xlim=None, ylim=None, weights=None):
"""
Computes a 2D histogram by aggregating values in a 2D array.
:param x: values to bin along the 2nd dimension (c-contiguous)
:param y: values to bin along the 1st dimension
:param xbin:
scalar: bin size along 2nd dimension
0: aggregate according to unique values
array: aggregate according to exact values (count reduce operation)
:param ybin:
scalar: bin size along 1st dimension
0: aggregate according to unique values
array: aggregate according to exact values (count reduce operation)
:param xlim: (optional) 2 values (array or list) that restrict range along 2nd dimension
:param ylim: (optional) 2 values (array or list) that restrict range along 1st dimension
:param weights: (optional) defaults to None, weights to apply to each value for aggregation
:return: 3 numpy arrays MAP [ny,nx] image, xscale [nx], yscale [ny]
"""
for line in traceback.format_stack():
print(line.strip())
warning_text = """Future warning: bincount2D() is now a part of iblutil.
brainbox.processing.bincount2D will be removed in future versions.
Please replace imports with iblutil.numerical.bincount2D."""
_logger.warning(warning_text)
warnings.warn(warning_text, FutureWarning)
return _bincount2D(x, y, xbin, ybin, xlim, ylim, weights)


def compute_cluster_average(spike_clusters, spike_var):
"""
Quickish way to compute the average of some quantity across spikes in each cluster given
Expand Down Expand Up @@ -197,7 +167,7 @@ def bin_spikes(spikes, binsize, interval_indices=False):


def get_units_bunch(spks_b, *args):
'''
"""
Returns a bunch, where the bunch keys are keys from `spks` with labels of spike information
(e.g. unit IDs, times, features, etc.), and the values for each key are arrays with values for
each unit: these arrays are ordered and can be indexed by unit id.
Expand All @@ -223,18 +193,18 @@ def get_units_bunch(spks_b, *args):
--------
1) Create a units bunch given a spikes bunch, and get the amps for unit #4 from the units
bunch.
>>> import brainbox as bb
>>> import alf.io as aio
>>> from brainbox import processing
>>> import one.alf.io as alfio
>>> import ibllib.ephys.spikes as e_spks
(*Note, if there is no 'alf' directory, make 'alf' directory from 'ks2' output directory):
>>> e_spks.ks2_to_alf(path_to_ks_out, path_to_alf_out)
>>> spks_b = aio.load_object(path_to_alf_out, 'spikes')
>>> units_b = bb.processing.get_units_bunch(spks_b)
>>> spks_b = alfio.load_object(path_to_alf_out, 'spikes')
>>> units_b = processing.get_units_bunch(spks_b)
# Get amplitudes for unit 4.
>>> amps = units_b['amps']['4']
TODO add computation time estimate?
'''
"""

# Initialize `units`
units_b = Bunch()
Expand All @@ -261,7 +231,7 @@ def get_units_bunch(spks_b, *args):


def filter_units(units_b, t, **kwargs):
'''
"""
Filters units according to some parameters. **kwargs are the keyword parameters used to filter
the units.
Expand Down Expand Up @@ -299,24 +269,24 @@ def filter_units(units_b, t, **kwargs):
Examples
--------
1) Filter units according to the default parameters.
>>> import brainbox as bb
>>> import alf.io as aio
>>> from brainbox import processing
>>> import one.alf.io as alfio
>>> import ibllib.ephys.spikes as e_spks
(*Note, if there is no 'alf' directory, make 'alf' directory from 'ks2' output directory):
>>> e_spks.ks2_to_alf(path_to_ks_out, path_to_alf_out)
# Get a spikes bunch, units bunch, and filter the units.
>>> spks_b = aio.load_object(path_to_alf_out, 'spikes')
>>> units_b = bb.processing.get_units_bunch(spks_b, ['times', 'amps', 'clusters'])
>>> spks_b = alfio.load_object(path_to_alf_out, 'spikes')
>>> units_b = processing.get_units_bunch(spks_b, ['times', 'amps', 'clusters'])
>>> T = spks_b['times'][-1] - spks_b['times'][0]
>>> filtered_units = bb.processing.filter_units(units_b, T)
>>> filtered_units = processing.filter_units(units_b, T)
2) Filter units with no minimum amplitude, a minimum firing rate of 1 Hz, and a max false
positive rate of 0.2, given a refractory period of 2 ms.
>>> filtered_units = bb.processing.filter_units(units_b, T, min_amp=0, min_fr=1)
>>> filtered_units = processing.filter_units(units_b, T, min_amp=0, min_fr=1)
TODO: `units_b` input arg could eventually be replaced by `clstrs_b` if the required metrics
are in `clstrs_b['metrics']`
'''
"""

# Set params
params = {'min_amp': 50e-6, 'min_fr': 0.5, 'max_fpr': 0.2, 'rp': 0.002} # defaults
Expand Down
15 changes: 0 additions & 15 deletions brainbox/tests/test_processing.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from brainbox import processing, core
import unittest
import numpy as np
import datetime


class TestProcessing(unittest.TestCase):
Expand Down Expand Up @@ -63,15 +62,6 @@ def test_sync(self):
self.assertTrue(times2.min() >= resamp2.times.min())
self.assertTrue(times2.max() <= resamp2.times.max())

def test_bincount2D_deprecation(self):
# Timer to remove bincount2D (now in iblutil)
# Once this test fails:
# - Remove the bincount2D method in processing.py
# - Remove the import from iblutil at the top of that file
# - Delete this test
if datetime.datetime.now() > datetime.datetime(2024, 6, 30):
raise NotImplementedError

def test_compute_cluster_averag(self):
# Create fake data for 3 clusters
clust1 = np.ones(40)
Expand Down Expand Up @@ -103,11 +93,6 @@ def test_compute_cluster_averag(self):
self.assertEqual(avg_val[2], 0.75)
self.assertTrue(np.all(count == (40, 40, 50)))

def test_deprecations(self):
"""Ensure removal of bincount2D function."""
from datetime import datetime
self.assertTrue(datetime.today() < datetime(2024, 8, 1), 'remove brainbox.processing.bincount2D')


if __name__ == '__main__':
np.random.seed(0)
Expand Down
1 change: 1 addition & 0 deletions ibllib/io/extractors/mesoscope.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ def patch_imaging_meta(meta: dict) -> dict:
for fov in meta.get('FOV', []):
if 'roiUuid' in fov:
fov['roiUUID'] = fov.pop('roiUuid')
assert 'nFrames' in meta, '"nFrames" key missing from meta data; rawImagingData.meta.json likely an old version'
return meta


Expand Down
5 changes: 3 additions & 2 deletions ibllib/tests/test_mesoscope.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ def test_meta(self):
}

meta = {
'nFrames': 2000,
'scanImageParams': {'hStackManager': {'zs': 320},
'hRoiManager': {'scanVolumeRate': 6.8}},
'FOV': [{'topLeftDeg': [-1, 1.3], 'topRightDeg': [3, 1.3], 'bottomLeftDeg': [-1, 5.2],
Expand Down Expand Up @@ -256,10 +257,10 @@ class TestImagingMeta(unittest.TestCase):
"""Test raw imaging metadata versioning."""
def test_patch_imaging_meta(self):
"""Test for ibllib.io.extractors.mesoscope.patch_imaging_meta function."""
meta = {'version': '0.1.0', 'FOV': [{'roiUuid': None}, {'roiUUID': None}]}
meta = {'version': '0.1.0', 'nFrames': 2000, 'FOV': [{'roiUuid': None}, {'roiUUID': None}]}
new_meta = mesoscope.patch_imaging_meta(meta)
self.assertEqual(set(chain(*map(dict.keys, new_meta['FOV']))), {'roiUUID'})
meta = {'FOV': [
meta = {'nFrames': 2000, 'FOV': [
dict.fromkeys(['topLeftDeg', 'topRightDeg', 'bottomLeftDeg', 'bottomRightDeg']),
dict.fromkeys(['topLeftMM', 'topRightMM', 'bottomLeftMM', 'bottomRightMM'])
]}
Expand Down

0 comments on commit d773c1e

Please sign in to comment.