diff --git a/doc/how_to/analyse_neuropixels.rst b/doc/how_to/analyse_neuropixels.rst index 31dbc7422c..c921b13719 100644 --- a/doc/how_to/analyse_neuropixels.rst +++ b/doc/how_to/analyse_neuropixels.rst @@ -426,7 +426,7 @@ Let’s use here the ``locally_exclusive`` method for detection and the job_kwargs = dict(n_jobs=40, chunk_duration='1s', progress_bar=True) peaks = detect_peaks(rec, method='locally_exclusive', noise_levels=noise_levels_int16, - detect_threshold=5, local_radius_um=50., **job_kwargs) + detect_threshold=5, radius_um=50., **job_kwargs) peaks @@ -451,7 +451,7 @@ Let’s use here the ``locally_exclusive`` method for detection and the from spikeinterface.sortingcomponents.peak_localization import localize_peaks - peak_locations = localize_peaks(rec, peaks, method='center_of_mass', local_radius_um=50., **job_kwargs) + peak_locations = localize_peaks(rec, peaks, method='center_of_mass', radius_um=50., **job_kwargs) diff --git a/doc/how_to/get_started.rst b/doc/how_to/get_started.rst index 0f6aa9eb3f..0dd618e972 100644 --- a/doc/how_to/get_started.rst +++ b/doc/how_to/get_started.rst @@ -266,7 +266,7 @@ available parameters are dictionaries and can be accessed with: 'clustering': {}, 'detection': {'detect_threshold': 5, 'peak_sign': 'neg'}, 'filtering': {'dtype': 'float32'}, - 'general': {'local_radius_um': 100, 'ms_after': 2, 'ms_before': 2}, + 'general': {'radius_um': 100, 'ms_after': 2, 'ms_before': 2}, 'job_kwargs': {}, 'localization': {}, 'matching': {}, diff --git a/doc/how_to/handle_drift.rst b/doc/how_to/handle_drift.rst index c0a27ff0a3..7ff98a666b 100644 --- a/doc/how_to/handle_drift.rst +++ b/doc/how_to/handle_drift.rst @@ -118,10 +118,10 @@ to load them later. 'peak_sign': 'neg', 'detect_threshold': 8.0, 'exclude_sweep_ms': 0.1, - 'local_radius_um': 50}, + 'radius_um': 50}, 'select_kwargs': None, 'localize_peaks_kwargs': {'method': 'grid_convolution', - 'local_radius_um': 30.0, + 'radius_um': 30.0, 'upsampling_um': 3.0, 'sigma_um': array([ 5. , 12.5, 20. ]), 'sigma_ms': 0.25, diff --git a/doc/modules/motion_correction.rst b/doc/modules/motion_correction.rst index 6dc949625d..62c0d6b8d4 100644 --- a/doc/modules/motion_correction.rst +++ b/doc/modules/motion_correction.rst @@ -159,7 +159,7 @@ The high-level :py:func:`~spikeinterface.preprocessing.correct_motion()` is inte peaks = detect_peaks(rec, method="locally_exclusive", detect_threshold=8.0, **job_kwargs) # (optional) sub-select some peaks to speed up the localization peaks = select_peaks(peaks, ...) - peak_locations = localize_peaks(rec, peaks, method="monopolar_triangulation",local_radius_um=75.0, + peak_locations = localize_peaks(rec, peaks, method="monopolar_triangulation",radius_um=75.0, max_distance_um=150.0, **job_kwargs) # Step 2: motion inference diff --git a/doc/modules/sortingcomponents.rst b/doc/modules/sortingcomponents.rst index b4380fc587..aa62ea5b33 100644 --- a/doc/modules/sortingcomponents.rst +++ b/doc/modules/sortingcomponents.rst @@ -51,7 +51,7 @@ follows: peak_sign='neg', detect_threshold=5, exclude_sweep_ms=0.2, - local_radius_um=100, + radius_um=100, noise_levels=None, random_chunk_kwargs={}, outputs='numpy_compact', @@ -95,7 +95,7 @@ follows: job_kwargs = dict(chunk_duration='1s', n_jobs=8, progress_bar=True) peak_locations = localize_peaks(recording, peaks, method='center_of_mass', - local_radius_um=70., ms_before=0.3, ms_after=0.6, + radius_um=70., ms_before=0.3, ms_after=0.6, **job_kwargs) diff --git a/examples/how_to/analyse_neuropixels.py b/examples/how_to/analyse_neuropixels.py index 637120a591..eed05a0ee5 100644 --- a/examples/how_to/analyse_neuropixels.py +++ b/examples/how_to/analyse_neuropixels.py @@ -170,13 +170,13 @@ job_kwargs = dict(n_jobs=40, chunk_duration='1s', progress_bar=True) peaks = detect_peaks(rec, method='locally_exclusive', noise_levels=noise_levels_int16, - detect_threshold=5, local_radius_um=50., **job_kwargs) + detect_threshold=5, radius_um=50., **job_kwargs) peaks # + from spikeinterface.sortingcomponents.peak_localization import localize_peaks -peak_locations = localize_peaks(rec, peaks, method='center_of_mass', local_radius_um=50., **job_kwargs) +peak_locations = localize_peaks(rec, peaks, method='center_of_mass', radius_um=50., **job_kwargs) # - # ### Check for drift diff --git a/examples/modules_gallery/widgets/plot_4_peaks_gallery.py b/examples/modules_gallery/widgets/plot_4_peaks_gallery.py index df7d9dbf2c..addd87c065 100644 --- a/examples/modules_gallery/widgets/plot_4_peaks_gallery.py +++ b/examples/modules_gallery/widgets/plot_4_peaks_gallery.py @@ -30,7 +30,7 @@ peaks = detect_peaks( rec_filtred, method='locally_exclusive', peak_sign='neg', detect_threshold=6, exclude_sweep_ms=0.3, - local_radius_um=100, + radius_um=100, noise_levels=None, random_chunk_kwargs={}, chunk_memory='10M', n_jobs=1, progress_bar=True) diff --git a/src/spikeinterface/postprocessing/unit_localization.py b/src/spikeinterface/postprocessing/unit_localization.py index 9f303de6e1..740fdd234b 100644 --- a/src/spikeinterface/postprocessing/unit_localization.py +++ b/src/spikeinterface/postprocessing/unit_localization.py @@ -568,7 +568,7 @@ def enforce_decrease_shells_data(wf_data, maxchan, radial_parents, in_place=Fals def get_grid_convolution_templates_and_weights( - contact_locations, local_radius_um=50, upsampling_um=5, sigma_um=np.linspace(10, 50.0, 5), margin_um=50 + contact_locations, radius_um=50, upsampling_um=5, sigma_um=np.linspace(10, 50.0, 5), margin_um=50 ): x_min, x_max = contact_locations[:, 0].min(), contact_locations[:, 0].max() y_min, y_max = contact_locations[:, 1].min(), contact_locations[:, 1].max() @@ -597,7 +597,7 @@ def get_grid_convolution_templates_and_weights( # mask to get nearest template given a channel dist = sklearn.metrics.pairwise_distances(contact_locations, template_positions) - nearest_template_mask = dist < local_radius_um + nearest_template_mask = dist < radius_um weights = np.zeros((len(sigma_um), len(contact_locations), nb_templates), dtype=np.float32) for count, sigma in enumerate(sigma_um): diff --git a/src/spikeinterface/preprocessing/motion.py b/src/spikeinterface/preprocessing/motion.py index 56c7e4fa05..8b0c8006d2 100644 --- a/src/spikeinterface/preprocessing/motion.py +++ b/src/spikeinterface/preprocessing/motion.py @@ -18,12 +18,12 @@ peak_sign="neg", detect_threshold=8.0, exclude_sweep_ms=0.1, - local_radius_um=50, + radius_um=50, ), "select_kwargs": None, "localize_peaks_kwargs": dict( method="monopolar_triangulation", - local_radius_um=75.0, + radius_um=75.0, max_distance_um=150.0, optimizer="minimize_with_log_penality", enforce_decrease=True, @@ -81,12 +81,12 @@ peak_sign="neg", detect_threshold=8.0, exclude_sweep_ms=0.1, - local_radius_um=50, + radius_um=50, ), "select_kwargs": None, "localize_peaks_kwargs": dict( method="center_of_mass", - local_radius_um=75.0, + radius_um=75.0, feature="ptp", ), "estimate_motion_kwargs": dict( @@ -109,12 +109,12 @@ peak_sign="neg", detect_threshold=8.0, exclude_sweep_ms=0.1, - local_radius_um=50, + radius_um=50, ), "select_kwargs": None, "localize_peaks_kwargs": dict( method="grid_convolution", - local_radius_um=40.0, + radius_um=40.0, upsampling_um=5.0, sigma_um=np.linspace(5.0, 25.0, 5), sigma_ms=0.25, diff --git a/src/spikeinterface/sorters/internal/spyking_circus2.py b/src/spikeinterface/sorters/internal/spyking_circus2.py index 24c4a7ccfc..9de2762562 100644 --- a/src/spikeinterface/sorters/internal/spyking_circus2.py +++ b/src/spikeinterface/sorters/internal/spyking_circus2.py @@ -21,7 +21,7 @@ class Spykingcircus2Sorter(ComponentsBasedSorter): sorter_name = "spykingcircus2" _default_params = { - "general": {"ms_before": 2, "ms_after": 2, "local_radius_um": 100}, + "general": {"ms_before": 2, "ms_after": 2, "radius_um": 100}, "waveforms": {"max_spikes_per_unit": 200, "overwrite": True}, "filtering": {"dtype": "float32"}, "detection": {"peak_sign": "neg", "detect_threshold": 5}, @@ -75,8 +75,8 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose): ## Then, we are detecting peaks with a locally_exclusive method detection_params = params["detection"].copy() detection_params.update(job_kwargs) - if "local_radius_um" not in detection_params: - detection_params["local_radius_um"] = params["general"]["local_radius_um"] + if "radius_um" not in detection_params: + detection_params["radius_um"] = params["general"]["radius_um"] if "exclude_sweep_ms" not in detection_params: detection_params["exclude_sweep_ms"] = max(params["general"]["ms_before"], params["general"]["ms_after"]) diff --git a/src/spikeinterface/sorters/internal/tridesclous2.py b/src/spikeinterface/sorters/internal/tridesclous2.py index a812d4ce49..42f51d3a77 100644 --- a/src/spikeinterface/sorters/internal/tridesclous2.py +++ b/src/spikeinterface/sorters/internal/tridesclous2.py @@ -12,7 +12,7 @@ class Tridesclous2Sorter(ComponentsBasedSorter): _default_params = { "apply_preprocessing": True, - "general": {"ms_before": 2.5, "ms_after": 3.5, "local_radius_um": 100}, + "general": {"ms_before": 2.5, "ms_after": 3.5, "radius_um": 100}, "filtering": {"freq_min": 300, "freq_max": 8000.0}, "detection": {"peak_sign": "neg", "detect_threshold": 5, "exclude_sweep_ms": 0.4}, "hdbscan_kwargs": { @@ -68,7 +68,7 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose): # detection detection_params = params["detection"].copy() - detection_params["local_radius_um"] = params["general"]["local_radius_um"] + detection_params["radius_um"] = params["general"]["radius_um"] detection_params["noise_levels"] = noise_levels peaks = detect_peaks(recording, method="locally_exclusive", **detection_params, **job_kwargs) @@ -89,7 +89,7 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose): # localization localization_params = params["localization"].copy() - localization_params["local_radius_um"] = params["general"]["local_radius_um"] + localization_params["radius_um"] = params["general"]["radius_um"] peak_locations = localize_peaks( recording, some_peaks, method="monopolar_triangulation", **localization_params, **job_kwargs ) @@ -127,7 +127,7 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose): matching_params["noise_levels"] = noise_levels matching_params["peak_sign"] = params["detection"]["peak_sign"] matching_params["detect_threshold"] = params["detection"]["detect_threshold"] - matching_params["local_radius_um"] = params["general"]["local_radius_um"] + matching_params["radius_um"] = params["general"]["radius_um"] # TODO: route that params # ~ 'num_closest' : 5, diff --git a/src/spikeinterface/sortingcomponents/clustering/circus.py b/src/spikeinterface/sortingcomponents/clustering/circus.py index a6185f5193..46aba7e96f 100644 --- a/src/spikeinterface/sortingcomponents/clustering/circus.py +++ b/src/spikeinterface/sortingcomponents/clustering/circus.py @@ -37,7 +37,7 @@ class CircusClustering: }, "cleaning_kwargs": {}, "tmp_folder": None, - "local_radius_um": 100, + "radius_um": 100, "n_pca": 10, "max_spikes_per_unit": 200, "ms_before": 1.5, @@ -104,7 +104,7 @@ def main_function(cls, recording, peaks, params): chan_distances = get_channel_distances(recording) for main_chan in unit_inds: - (closest_chans,) = np.nonzero(chan_distances[main_chan, :] <= params["local_radius_um"]) + (closest_chans,) = np.nonzero(chan_distances[main_chan, :] <= params["radius_um"]) sparsity_mask[main_chan, closest_chans] = True if params["waveform_mode"] == "shared_memory": diff --git a/src/spikeinterface/sortingcomponents/clustering/position_and_features.py b/src/spikeinterface/sortingcomponents/clustering/position_and_features.py index 082d2dc0ba..8d21041599 100644 --- a/src/spikeinterface/sortingcomponents/clustering/position_and_features.py +++ b/src/spikeinterface/sortingcomponents/clustering/position_and_features.py @@ -35,7 +35,7 @@ class PositionAndFeaturesClustering: "cluster_selection_method": "leaf", }, "cleaning_kwargs": {}, - "local_radius_um": 100, + "radius_um": 100, "max_spikes_per_unit": 200, "selection_method": "random", "ms_before": 1.5, @@ -69,9 +69,9 @@ def main_function(cls, recording, peaks, params): features_list = [position_method, "ptp", "energy"] features_params = { - position_method: {"local_radius_um": params["local_radius_um"]}, - "ptp": {"all_channels": False, "local_radius_um": params["local_radius_um"]}, - "energy": {"local_radius_um": params["local_radius_um"]}, + position_method: {"radius_um": params["radius_um"]}, + "ptp": {"all_channels": False, "radius_um": params["radius_um"]}, + "energy": {"radius_um": params["radius_um"]}, } features_data = compute_features_from_peaks( diff --git a/src/spikeinterface/sortingcomponents/clustering/random_projections.py b/src/spikeinterface/sortingcomponents/clustering/random_projections.py index 02247dd288..fcbcac097f 100644 --- a/src/spikeinterface/sortingcomponents/clustering/random_projections.py +++ b/src/spikeinterface/sortingcomponents/clustering/random_projections.py @@ -34,7 +34,7 @@ class RandomProjectionClustering: "cluster_selection_method": "leaf", }, "cleaning_kwargs": {}, - "local_radius_um": 100, + "radius_um": 100, "max_spikes_per_unit": 200, "selection_method": "closest_to_centroid", "nb_projections": {"ptp": 8, "energy": 2}, @@ -106,7 +106,7 @@ def main_function(cls, recording, peaks, params): projections = np.random.randn(num_chans, d["nb_projections"][proj_type]) features_params[f"random_projections_{proj_type}"] = { - "local_radius_um": params["local_radius_um"], + "radius_um": params["radius_um"], "projections": projections, "min_values": min_values, } diff --git a/src/spikeinterface/sortingcomponents/features_from_peaks.py b/src/spikeinterface/sortingcomponents/features_from_peaks.py index c075e8e7c1..adc025e829 100644 --- a/src/spikeinterface/sortingcomponents/features_from_peaks.py +++ b/src/spikeinterface/sortingcomponents/features_from_peaks.py @@ -105,15 +105,15 @@ def compute(self, traces, peaks, waveforms): class PeakToPeakFeature(PipelineNode): def __init__( - self, recording, name="ptp_feature", return_output=True, parents=None, local_radius_um=150.0, all_channels=True + self, recording, name="ptp_feature", return_output=True, parents=None, radius_um=150.0, all_channels=True ): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um + self.neighbours_mask = self.channel_distance < radius_um self.all_channels = all_channels - self._kwargs.update(dict(local_radius_um=local_radius_um, all_channels=all_channels)) + self._kwargs.update(dict(radius_um=radius_um, all_channels=all_channels)) self._dtype = recording.get_dtype() def get_dtype(self): @@ -139,19 +139,19 @@ def __init__( name="ptp_lag_feature", return_output=True, parents=None, - local_radius_um=150.0, + radius_um=150.0, all_channels=True, ): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) self.all_channels = all_channels - self.local_radius_um = local_radius_um + self.radius_um = radius_um self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um + self.neighbours_mask = self.channel_distance < radius_um - self._kwargs.update(dict(local_radius_um=local_radius_um, all_channels=all_channels)) + self._kwargs.update(dict(radius_um=radius_um, all_channels=all_channels)) self._dtype = recording.get_dtype() def get_dtype(self): @@ -184,20 +184,20 @@ def __init__( return_output=True, parents=None, projections=None, - local_radius_um=150.0, + radius_um=150.0, min_values=None, ): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) self.projections = projections - self.local_radius_um = local_radius_um + self.radius_um = radius_um self.min_values = min_values self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um + self.neighbours_mask = self.channel_distance < radius_um - self._kwargs.update(dict(projections=projections, local_radius_um=local_radius_um, min_values=min_values)) + self._kwargs.update(dict(projections=projections, radius_um=radius_um, min_values=min_values)) self._dtype = recording.get_dtype() @@ -230,19 +230,19 @@ def __init__( return_output=True, parents=None, projections=None, - local_radius_um=150.0, + radius_um=150.0, min_values=None, ): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um + self.neighbours_mask = self.channel_distance < radius_um self.projections = projections self.min_values = min_values - self.local_radius_um = local_radius_um - self._kwargs.update(dict(projections=projections, min_values=min_values, local_radius_um=local_radius_um)) + self.radius_um = radius_um + self._kwargs.update(dict(projections=projections, min_values=min_values, radius_um=radius_um)) self._dtype = recording.get_dtype() def get_dtype(self): @@ -267,14 +267,14 @@ def compute(self, traces, peaks, waveforms): class StdPeakToPeakFeature(PipelineNode): - def __init__(self, recording, name="std_ptp_feature", return_output=True, parents=None, local_radius_um=150.0): + def __init__(self, recording, name="std_ptp_feature", return_output=True, parents=None, radius_um=150.0): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um + self.neighbours_mask = self.channel_distance < radius_um - self._kwargs.update(dict(local_radius_um=local_radius_um)) + self._kwargs.update(dict(radius_um=radius_um)) self._dtype = recording.get_dtype() @@ -292,14 +292,14 @@ def compute(self, traces, peaks, waveforms): class GlobalPeakToPeakFeature(PipelineNode): - def __init__(self, recording, name="global_ptp_feature", return_output=True, parents=None, local_radius_um=150.0): + def __init__(self, recording, name="global_ptp_feature", return_output=True, parents=None, radius_um=150.0): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um + self.neighbours_mask = self.channel_distance < radius_um - self._kwargs.update(dict(local_radius_um=local_radius_um)) + self._kwargs.update(dict(radius_um=radius_um)) self._dtype = recording.get_dtype() @@ -317,14 +317,14 @@ def compute(self, traces, peaks, waveforms): class KurtosisPeakToPeakFeature(PipelineNode): - def __init__(self, recording, name="kurtosis_ptp_feature", return_output=True, parents=None, local_radius_um=150.0): + def __init__(self, recording, name="kurtosis_ptp_feature", return_output=True, parents=None, radius_um=150.0): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um + self.neighbours_mask = self.channel_distance < radius_um - self._kwargs.update(dict(local_radius_um=local_radius_um)) + self._kwargs.update(dict(radius_um=radius_um)) self._dtype = recording.get_dtype() @@ -344,14 +344,14 @@ def compute(self, traces, peaks, waveforms): class EnergyFeature(PipelineNode): - def __init__(self, recording, name="energy_feature", return_output=True, parents=None, local_radius_um=50.0): + def __init__(self, recording, name="energy_feature", return_output=True, parents=None, radius_um=50.0): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um + self.neighbours_mask = self.channel_distance < radius_um - self._kwargs.update(dict(local_radius_um=local_radius_um)) + self._kwargs.update(dict(radius_um=radius_um)) def get_dtype(self): return np.dtype("float32") diff --git a/src/spikeinterface/sortingcomponents/matching/naive.py b/src/spikeinterface/sortingcomponents/matching/naive.py index ba4c0e93f3..4e2625acec 100644 --- a/src/spikeinterface/sortingcomponents/matching/naive.py +++ b/src/spikeinterface/sortingcomponents/matching/naive.py @@ -35,7 +35,7 @@ class NaiveMatching(BaseTemplateMatchingEngine): "exclude_sweep_ms": 0.1, "detect_threshold": 5, "noise_levels": None, - "local_radius_um": 100, + "radius_um": 100, "random_chunk_kwargs": {}, } @@ -54,7 +54,7 @@ def initialize_and_check_kwargs(cls, recording, kwargs): d["abs_threholds"] = d["noise_levels"] * d["detect_threshold"] channel_distance = get_channel_distances(recording) - d["neighbours_mask"] = channel_distance < d["local_radius_um"] + d["neighbours_mask"] = channel_distance < d["radius_um"] d["nbefore"] = we.nbefore d["nafter"] = we.nafter diff --git a/src/spikeinterface/sortingcomponents/matching/tdc.py b/src/spikeinterface/sortingcomponents/matching/tdc.py index 5fbe1b94f3..7d6d707ea2 100644 --- a/src/spikeinterface/sortingcomponents/matching/tdc.py +++ b/src/spikeinterface/sortingcomponents/matching/tdc.py @@ -50,7 +50,7 @@ class TridesclousPeeler(BaseTemplateMatchingEngine): "peak_shift_ms": 0.2, "detect_threshold": 5, "noise_levels": None, - "local_radius_um": 100, + "radius_um": 100, "num_closest": 5, "sample_shift": 3, "ms_before": 0.8, @@ -103,7 +103,7 @@ def initialize_and_check_kwargs(cls, recording, kwargs): d["abs_threholds"] = d["noise_levels"] * d["detect_threshold"] channel_distance = get_channel_distances(recording) - d["neighbours_mask"] = channel_distance < d["local_radius_um"] + d["neighbours_mask"] = channel_distance < d["radius_um"] sparsity = compute_sparsity(we, method="snr", peak_sign=d["peak_sign"], threshold=d["detect_threshold"]) template_sparsity_inds = sparsity.unit_id_to_channel_indices @@ -154,7 +154,7 @@ def initialize_and_check_kwargs(cls, recording, kwargs): # distance channel from unit distances = scipy.spatial.distance.cdist(channel_locations, unit_locations, metric="euclidean") - near_cluster_mask = distances < d["local_radius_um"] + near_cluster_mask = distances < d["radius_um"] # nearby cluster for each channel possible_clusters_by_channel = [] diff --git a/src/spikeinterface/sortingcomponents/peak_detection.py b/src/spikeinterface/sortingcomponents/peak_detection.py index df3374b39d..4fd7611bb7 100644 --- a/src/spikeinterface/sortingcomponents/peak_detection.py +++ b/src/spikeinterface/sortingcomponents/peak_detection.py @@ -504,7 +504,7 @@ class DetectPeakLocallyExclusive(PeakDetectorWrapper): params_doc = ( DetectPeakByChannel.params_doc + """ - local_radius_um: float + radius_um: float The radius to use to select neighbour channels for locally exclusive detection. """ ) @@ -516,7 +516,7 @@ def check_params( peak_sign="neg", detect_threshold=5, exclude_sweep_ms=0.1, - local_radius_um=50, + radius_um=50, noise_levels=None, random_chunk_kwargs={}, ): @@ -533,7 +533,7 @@ def check_params( ) channel_distance = get_channel_distances(recording) - neighbours_mask = channel_distance < local_radius_um + neighbours_mask = channel_distance < radius_um return args + (neighbours_mask,) @classmethod @@ -580,7 +580,7 @@ class DetectPeakLocallyExclusiveTorch(PeakDetectorWrapper): params_doc = ( DetectPeakByChannel.params_doc + """ - local_radius_um: float + radius_um: float The radius to use to select neighbour channels for locally exclusive detection. """ ) @@ -594,7 +594,7 @@ def check_params( exclude_sweep_ms=0.1, noise_levels=None, device=None, - local_radius_um=50, + radius_um=50, return_tensor=False, random_chunk_kwargs={}, ): @@ -615,7 +615,7 @@ def check_params( neighbour_indices_by_chan = [] num_channels = recording.get_num_channels() for chan in range(num_channels): - neighbour_indices_by_chan.append(np.nonzero(channel_distance[chan] < local_radius_um)[0]) + neighbour_indices_by_chan.append(np.nonzero(channel_distance[chan] < radius_um)[0]) max_neighbs = np.max([len(neigh) for neigh in neighbour_indices_by_chan]) neighbours_idxs = num_channels * np.ones((num_channels, max_neighbs), dtype=int) for i, neigh in enumerate(neighbour_indices_by_chan): @@ -836,7 +836,7 @@ def check_params( peak_sign="neg", detect_threshold=5, exclude_sweep_ms=0.1, - local_radius_um=50, + radius_um=50, noise_levels=None, random_chunk_kwargs={}, ): @@ -847,7 +847,7 @@ def check_params( abs_threholds = noise_levels * detect_threshold exclude_sweep_size = int(exclude_sweep_ms * recording.get_sampling_frequency() / 1000.0) channel_distance = get_channel_distances(recording) - neighbours_mask = channel_distance < local_radius_um + neighbours_mask = channel_distance < radius_um executor = OpenCLDetectPeakExecutor(abs_threholds, exclude_sweep_size, neighbours_mask, peak_sign) diff --git a/src/spikeinterface/sortingcomponents/peak_localization.py b/src/spikeinterface/sortingcomponents/peak_localization.py index d1df720624..bd793b3f53 100644 --- a/src/spikeinterface/sortingcomponents/peak_localization.py +++ b/src/spikeinterface/sortingcomponents/peak_localization.py @@ -101,14 +101,14 @@ def localize_peaks(recording, peaks, method="center_of_mass", ms_before=0.5, ms_ class LocalizeBase(PipelineNode): - def __init__(self, recording, return_output=True, parents=None, local_radius_um=75.0): + def __init__(self, recording, return_output=True, parents=None, radius_um=75.0): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) - self.local_radius_um = local_radius_um + self.radius_um = radius_um self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um - self._kwargs["local_radius_um"] = local_radius_um + self.neighbours_mask = self.channel_distance < radius_um + self._kwargs["radius_um"] = radius_um def get_dtype(self): return self._dtype @@ -152,18 +152,14 @@ class LocalizeCenterOfMass(LocalizeBase): need_waveforms = True name = "center_of_mass" params_doc = """ - local_radius_um: float + radius_um: float Radius in um for channel sparsity. feature: str ['ptp', 'mean', 'energy', 'peak_voltage'] Feature to consider for computation. Default is 'ptp' """ - def __init__( - self, recording, return_output=True, parents=["extract_waveforms"], local_radius_um=75.0, feature="ptp" - ): - LocalizeBase.__init__( - self, recording, return_output=return_output, parents=parents, local_radius_um=local_radius_um - ) + def __init__(self, recording, return_output=True, parents=["extract_waveforms"], radius_um=75.0, feature="ptp"): + LocalizeBase.__init__(self, recording, return_output=return_output, parents=parents, radius_um=radius_um) self._dtype = np.dtype(dtype_localize_by_method["center_of_mass"]) assert feature in ["ptp", "mean", "energy", "peak_voltage"], f"{feature} is not a valid feature" @@ -216,7 +212,7 @@ class LocalizeMonopolarTriangulation(PipelineNode): need_waveforms = False name = "monopolar_triangulation" params_doc = """ - local_radius_um: float + radius_um: float For channel sparsity. max_distance_um: float, default: 1000 Boundary for distance estimation. @@ -234,15 +230,13 @@ def __init__( recording, return_output=True, parents=["extract_waveforms"], - local_radius_um=75.0, + radius_um=75.0, max_distance_um=150.0, optimizer="minimize_with_log_penality", enforce_decrease=True, feature="ptp", ): - LocalizeBase.__init__( - self, recording, return_output=return_output, parents=parents, local_radius_um=local_radius_um - ) + LocalizeBase.__init__(self, recording, return_output=return_output, parents=parents, radius_um=radius_um) assert feature in ["ptp", "energy", "peak_voltage"], f"{feature} is not a valid feature" self.max_distance_um = max_distance_um @@ -309,7 +303,7 @@ class LocalizeGridConvolution(PipelineNode): need_waveforms = True name = "grid_convolution" params_doc = """ - local_radius_um: float + radius_um: float Radius in um for channel sparsity. upsampling_um: float Upsampling resolution for the grid of templates @@ -333,7 +327,7 @@ def __init__( recording, return_output=True, parents=["extract_waveforms"], - local_radius_um=40.0, + radius_um=40.0, upsampling_um=5.0, sigma_um=np.linspace(5.0, 25.0, 5), sigma_ms=0.25, @@ -344,7 +338,7 @@ def __init__( ): PipelineNode.__init__(self, recording, return_output=return_output, parents=parents) - self.local_radius_um = local_radius_um + self.radius_um = radius_um self.sigma_um = sigma_um self.margin_um = margin_um self.upsampling_um = upsampling_um @@ -371,7 +365,7 @@ def __init__( self.prototype = self.prototype[:, np.newaxis] self.template_positions, self.weights, self.nearest_template_mask = get_grid_convolution_templates_and_weights( - contact_locations, self.local_radius_um, self.upsampling_um, self.sigma_um, self.margin_um + contact_locations, self.radius_um, self.upsampling_um, self.sigma_um, self.margin_um ) self.weights_sparsity_mask = self.weights > self.sparsity_threshold @@ -379,7 +373,7 @@ def __init__( self._dtype = np.dtype(dtype_localize_by_method["grid_convolution"]) self._kwargs.update( dict( - local_radius_um=self.local_radius_um, + radius_um=self.radius_um, prototype=self.prototype, template_positions=self.template_positions, nearest_template_mask=self.nearest_template_mask, diff --git a/src/spikeinterface/sortingcomponents/peak_pipeline.py b/src/spikeinterface/sortingcomponents/peak_pipeline.py index 9e43fd2d78..6f0f26201f 100644 --- a/src/spikeinterface/sortingcomponents/peak_pipeline.py +++ b/src/spikeinterface/sortingcomponents/peak_pipeline.py @@ -223,7 +223,7 @@ def __init__( ms_after: float, parents: Optional[List[PipelineNode]] = None, return_output: bool = False, - local_radius_um: float = 100.0, + radius_um: float = 100.0, ): """ Extract sparse waveforms from a recording. The strategy in this specific node is to reshape the waveforms @@ -260,10 +260,10 @@ def __init__( return_output=return_output, ) - self.local_radius_um = local_radius_um + self.radius_um = radius_um self.contact_locations = recording.get_channel_locations() self.channel_distance = get_channel_distances(recording) - self.neighbours_mask = self.channel_distance < local_radius_um + self.neighbours_mask = self.channel_distance < radius_um self.max_num_chans = np.max(np.sum(self.neighbours_mask, axis=1)) def get_trace_margin(self): diff --git a/src/spikeinterface/sortingcomponents/tests/test_features_from_peaks.py b/src/spikeinterface/sortingcomponents/tests/test_features_from_peaks.py index e46d037c9e..b3b5f656cb 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_features_from_peaks.py +++ b/src/spikeinterface/sortingcomponents/tests/test_features_from_peaks.py @@ -34,8 +34,8 @@ def test_features_from_peaks(): feature_params = { "amplitude": {"all_channels": False, "peak_sign": "neg"}, "ptp": {"all_channels": False}, - "center_of_mass": {"local_radius_um": 120.0}, - "energy": {"local_radius_um": 160.0}, + "center_of_mass": {"radius_um": 120.0}, + "energy": {"radius_um": 160.0}, } features = compute_features_from_peaks(recording, peaks, feature_list, feature_params=feature_params, **job_kwargs) diff --git a/src/spikeinterface/sortingcomponents/tests/test_motion_estimation.py b/src/spikeinterface/sortingcomponents/tests/test_motion_estimation.py index 9860275739..0558c16cca 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_motion_estimation.py +++ b/src/spikeinterface/sortingcomponents/tests/test_motion_estimation.py @@ -45,7 +45,7 @@ def setup_module(): extract_dense_waveforms = ExtractDenseWaveforms(recording, ms_before=0.1, ms_after=0.3, return_output=False) pipeline_nodes = [ extract_dense_waveforms, - LocalizeCenterOfMass(recording, parents=[extract_dense_waveforms], local_radius_um=60.0), + LocalizeCenterOfMass(recording, parents=[extract_dense_waveforms], radius_um=60.0), ] peaks, peak_locations = detect_peaks( recording, diff --git a/src/spikeinterface/sortingcomponents/tests/test_peak_detection.py b/src/spikeinterface/sortingcomponents/tests/test_peak_detection.py index 380bd67a94..f3ca8bf96d 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_peak_detection.py +++ b/src/spikeinterface/sortingcomponents/tests/test_peak_detection.py @@ -139,7 +139,7 @@ def peak_detector_kwargs(recording): exclude_sweep_ms=1.0, peak_sign="both", detect_threshold=5, - local_radius_um=50, + radius_um=50, ) return peak_detector_keyword_arguments @@ -194,12 +194,12 @@ def test_iterative_peak_detection_sparse(recording, job_kwargs, pca_model_folder ms_before = 1.0 ms_after = 1.0 - local_radius_um = 40 + radius_um = 40 waveform_extraction_node = ExtractSparseWaveforms( recording=recording, ms_before=ms_before, ms_after=ms_after, - local_radius_um=local_radius_um, + radius_um=radius_um, ) waveform_denoising_node = TemporalPCADenoising( @@ -368,7 +368,7 @@ def test_peak_detection_with_pipeline(recording, job_kwargs, torch_job_kwargs): pipeline_nodes = [ extract_dense_waveforms, PeakToPeakFeature(recording, all_channels=False, parents=[extract_dense_waveforms]), - LocalizeCenterOfMass(recording, local_radius_um=50.0, parents=[extract_dense_waveforms]), + LocalizeCenterOfMass(recording, radius_um=50.0, parents=[extract_dense_waveforms]), ] peaks, ptp, peak_locations = detect_peaks( recording, diff --git a/src/spikeinterface/sortingcomponents/tests/test_waveforms/test_temporal_pca.py b/src/spikeinterface/sortingcomponents/tests/test_waveforms/test_temporal_pca.py index c4192c5fcf..34bc93fbfa 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_waveforms/test_temporal_pca.py +++ b/src/spikeinterface/sortingcomponents/tests/test_waveforms/test_temporal_pca.py @@ -83,7 +83,7 @@ def test_pca_denoising_sparse(mearec_recording, detected_peaks, model_path_of_tr peaks = detected_peaks # Parameters - local_radius_um = 40 + radius_um = 40 ms_before = 1.0 ms_after = 1.0 @@ -94,7 +94,7 @@ def test_pca_denoising_sparse(mearec_recording, detected_peaks, model_path_of_tr parents=[peak_retriever], ms_before=ms_before, ms_after=ms_after, - local_radius_um=local_radius_um, + radius_um=radius_um, return_output=True, ) pca_denoising = TemporalPCADenoising( @@ -143,7 +143,7 @@ def test_pca_projection_sparsity(mearec_recording, detected_peaks, model_path_of peaks = detected_peaks # Parameters - local_radius_um = 40 + radius_um = 40 ms_before = 1.0 ms_after = 1.0 @@ -154,7 +154,7 @@ def test_pca_projection_sparsity(mearec_recording, detected_peaks, model_path_of parents=[peak_retriever], ms_before=ms_before, ms_after=ms_after, - local_radius_um=local_radius_um, + radius_um=radius_um, return_output=True, ) temporal_pca = TemporalPCAProjection( @@ -181,7 +181,7 @@ def test_initialization_with_wrong_parents_failure(mearec_recording, model_path_ model_folder_path = model_path_of_trained_pca dummy_parent = PipelineNode(recording=recording) extract_waveforms = ExtractSparseWaveforms( - recording=recording, ms_before=1, ms_after=1, local_radius_um=40, return_output=True + recording=recording, ms_before=1, ms_after=1, radius_um=40, return_output=True ) match_error = f"TemporalPCA should have a single {WaveformsNode.__name__} in its parents" diff --git a/src/spikeinterface/sortingcomponents/tools.py b/src/spikeinterface/sortingcomponents/tools.py index 5283fd0f99..14b66fc847 100644 --- a/src/spikeinterface/sortingcomponents/tools.py +++ b/src/spikeinterface/sortingcomponents/tools.py @@ -29,7 +29,7 @@ def get_prototype_spike(recording, peaks, job_kwargs, nb_peaks=1000, ms_before=0 ms_before=ms_before, ms_after=ms_after, return_output=True, - local_radius_um=5, + radius_um=5, ) nbefore = sparse_waveforms.nbefore diff --git a/src/spikeinterface/sortingcomponents/waveforms/temporal_pca.py b/src/spikeinterface/sortingcomponents/waveforms/temporal_pca.py index de96fe445a..28cf8a3be0 100644 --- a/src/spikeinterface/sortingcomponents/waveforms/temporal_pca.py +++ b/src/spikeinterface/sortingcomponents/waveforms/temporal_pca.py @@ -93,7 +93,7 @@ def fit( ms_before: float = 1.0, ms_after: float = 1.0, whiten: bool = True, - local_radius_um: float = None, + radius_um: float = None, ) -> IncrementalPCA: """ Train a pca model using the data in the recording object and the parameters provided. @@ -114,7 +114,7 @@ def fit( The parameters for peak selection. whiten : bool, optional Whether to whiten the data, by default True. - local_radius_um : float, optional + radius_um : float, optional The radius (in micrometers) to use for definint sparsity, by default None. ms_before : float, optional The number of milliseconds to include before the peak of the spike, by default 1. @@ -148,7 +148,7 @@ def fit( ) # compute PCA by_channel_global (with sparsity) - sparsity = ChannelSparsity.from_radius(we, radius_um=local_radius_um) if local_radius_um else None + sparsity = ChannelSparsity.from_radius(we, radius_um=radius_um) if radius_um else None pc = compute_principal_components( we, n_components=n_components, mode="by_channel_global", sparsity=sparsity, whiten=whiten )