From fdb84668137ba71b1ca36787032551da52764842 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 28 Sep 2023 08:21:36 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../sortingcomponents/clustering/random_projections.py | 4 +++- src/spikeinterface/sortingcomponents/matching/circus.py | 8 +++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/clustering/random_projections.py b/src/spikeinterface/sortingcomponents/clustering/random_projections.py index df9290a1f5..864548e7d4 100644 --- a/src/spikeinterface/sortingcomponents/clustering/random_projections.py +++ b/src/spikeinterface/sortingcomponents/clustering/random_projections.py @@ -127,7 +127,9 @@ def sigmoid(x, L, x0, k, b): pipeline_nodes = [node0, node1, node2, node3] - hdbscan_data = run_node_pipeline(recording, pipeline_nodes, params["job_kwargs"], job_name="extracting features") + hdbscan_data = run_node_pipeline( + recording, pipeline_nodes, params["job_kwargs"], job_name="extracting features" + ) import sklearn diff --git a/src/spikeinterface/sortingcomponents/matching/circus.py b/src/spikeinterface/sortingcomponents/matching/circus.py index b963447ba2..358691cd25 100644 --- a/src/spikeinterface/sortingcomponents/matching/circus.py +++ b/src/spikeinterface/sortingcomponents/matching/circus.py @@ -687,16 +687,18 @@ def main_function(cls, traces, d): # Filter using overlap-and-add convolution if len(ignored_ids) > 0: mask = ~np.isin(np.arange(num_templates), ignored_ids) - spatially_filtered_data = np.matmul(d["spatial"][:, mask, :], traces.T[np.newaxis, :, :]) + spatially_filtered_data = np.matmul(d["spatial"][:, mask, :], traces.T[np.newaxis, :, :]) scaled_filtered_data = spatially_filtered_data * d["singular"][:, mask, :] - objective_by_rank = scipy.signal.oaconvolve(scaled_filtered_data, d["temporal"][:, mask, :], axes=2, mode="valid") + objective_by_rank = scipy.signal.oaconvolve( + scaled_filtered_data, d["temporal"][:, mask, :], axes=2, mode="valid" + ) scalar_products[mask] += np.sum(objective_by_rank, axis=0) scalar_products[ignored_ids] = -np.inf else: spatially_filtered_data = np.matmul(d["spatial"], traces.T[np.newaxis, :, :]) scaled_filtered_data = spatially_filtered_data * d["singular"] objective_by_rank = scipy.signal.oaconvolve(scaled_filtered_data, d["temporal"], axes=2, mode="valid") - scalar_products += np.sum(objective_by_rank, axis=0) + scalar_products += np.sum(objective_by_rank, axis=0) num_spikes = 0