Skip to content

Commit

Permalink
Some more optimizations, to reduce memory after merges
Browse files Browse the repository at this point in the history
  • Loading branch information
yger committed Jul 8, 2024
1 parent 8a25138 commit a3ead81
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
4 changes: 2 additions & 2 deletions src/spikeinterface/core/analyzer_extension_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,7 @@ def _merge_extension_data(

if sparsity is not None:

max_num_chans = np.sum(new_sorting_analyzer.sparsity.mask, 1).max()
new_data["waveforms"] = waveforms.copy()
for to_be_merged, unit_id in zip(units_to_merge, new_unit_ids):
new_channel_ids = sparsity.unit_id_to_channel_ids[unit_id]
Expand All @@ -263,8 +264,7 @@ def _merge_extension_data(
)
num_chans = new_waveforms.shape[2]
new_data["waveforms"][spike_indices, :, :num_chans] = new_waveforms
new_data["waveforms"][spike_indices, :, num_chans:] = 0

new_data["waveforms"] = new_data["waveforms"][:, :, :max_num_chans]
else:
new_data["waveforms"] = waveforms

Expand Down
3 changes: 2 additions & 1 deletion src/spikeinterface/postprocessing/principal_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ def _merge_extension_data(

if sparsity is not None:

max_num_chans = np.sum(new_sorting_analyzer.sparsity.mask, 1).max()
new_data["pca_projection"] = pca_projections.copy()
for to_be_merge, unit_id in zip(units_to_merge, new_unit_ids):
new_channel_ids = sparsity.unit_id_to_channel_ids[unit_id]
Expand All @@ -126,7 +127,7 @@ def _merge_extension_data(
)
num_chans = new_projections.shape[2]
new_data["pca_projection"][spike_indices, :, :num_chans] = new_projections
new_data["pca_projection"][spike_indices, :, num_chans:] = 0
new_data["pca_projection"] = new_data["pca_projection"][:, :, :max_num_chans]
else:
new_data["pca_projection"] = pca_projections

Expand Down

0 comments on commit a3ead81

Please sign in to comment.