diff --git a/src/spikeinterface/core/job_tools.py b/src/spikeinterface/core/job_tools.py index fa79a8ce01..9cf22563d7 100644 --- a/src/spikeinterface/core/job_tools.py +++ b/src/spikeinterface/core/job_tools.py @@ -47,7 +47,6 @@ "chunk_duration", "progress_bar", "mp_context", - "verbose", "max_threads_per_process", ) diff --git a/src/spikeinterface/core/recording_tools.py b/src/spikeinterface/core/recording_tools.py index e698302ee1..8f9e67c954 100644 --- a/src/spikeinterface/core/recording_tools.py +++ b/src/spikeinterface/core/recording_tools.py @@ -73,6 +73,7 @@ def write_binary_recording( add_file_extension: bool = True, byte_offset: int = 0, auto_cast_uint: bool = True, + verbose: bool = True, **job_kwargs, ): """ @@ -98,6 +99,8 @@ def write_binary_recording( auto_cast_uint: bool, default: True If True, unsigned integers are automatically cast to int if the specified dtype is signed .. deprecated:: 0.103, use the `unsigned_to_signed` function instead. + verbose: bool + If True, output is verbose {} """ job_kwargs = fix_job_kwargs(job_kwargs) @@ -138,7 +141,7 @@ def write_binary_recording( init_func = _init_binary_worker init_args = (recording, file_path_dict, dtype, byte_offset, cast_unsigned) executor = ChunkRecordingExecutor( - recording, func, init_func, init_args, job_name="write_binary_recording", **job_kwargs + recording, func, init_func, init_args, job_name="write_binary_recording", verbose=verbose, **job_kwargs ) executor.run() diff --git a/src/spikeinterface/core/tests/test_recording_tools.py b/src/spikeinterface/core/tests/test_recording_tools.py index 5e0b77a151..d83e4d76fc 100644 --- a/src/spikeinterface/core/tests/test_recording_tools.py +++ b/src/spikeinterface/core/tests/test_recording_tools.py @@ -37,8 +37,8 @@ def test_write_binary_recording(tmp_path): file_paths = [tmp_path / "binary01.raw"] # Write binary recording - job_kwargs = dict(verbose=False, n_jobs=1) - write_binary_recording(recording, file_paths=file_paths, dtype=dtype, **job_kwargs) + job_kwargs = dict(n_jobs=1) + write_binary_recording(recording, file_paths=file_paths, dtype=dtype, verbose=False, **job_kwargs) # Check if written data matches original data recorder_binary = BinaryRecordingExtractor( @@ -64,9 +64,11 @@ def test_write_binary_recording_offset(tmp_path): file_paths = [tmp_path / "binary01.raw"] # Write binary recording - job_kwargs = dict(verbose=False, n_jobs=1) + job_kwargs = dict(n_jobs=1) byte_offset = 125 - write_binary_recording(recording, file_paths=file_paths, dtype=dtype, byte_offset=byte_offset, **job_kwargs) + write_binary_recording( + recording, file_paths=file_paths, dtype=dtype, byte_offset=byte_offset, verbose=False, **job_kwargs + ) # Check if written data matches original data recorder_binary = BinaryRecordingExtractor( @@ -97,8 +99,8 @@ def test_write_binary_recording_parallel(tmp_path): file_paths = [tmp_path / "binary01.raw", tmp_path / "binary02.raw"] # Write binary recording - job_kwargs = dict(verbose=False, n_jobs=2, chunk_memory="100k", mp_context="spawn") - write_binary_recording(recording, file_paths=file_paths, dtype=dtype, **job_kwargs) + job_kwargs = dict(n_jobs=2, chunk_memory="100k", mp_context="spawn") + write_binary_recording(recording, file_paths=file_paths, dtype=dtype, verbose=False, **job_kwargs) # Check if written data matches original data recorder_binary = BinaryRecordingExtractor( @@ -127,8 +129,8 @@ def test_write_binary_recording_multiple_segment(tmp_path): file_paths = [tmp_path / "binary01.raw", tmp_path / "binary02.raw"] # Write binary recording - job_kwargs = dict(verbose=False, n_jobs=2, chunk_memory="100k", mp_context="spawn") - write_binary_recording(recording, file_paths=file_paths, dtype=dtype, **job_kwargs) + job_kwargs = dict(n_jobs=2, chunk_memory="100k", mp_context="spawn") + write_binary_recording(recording, file_paths=file_paths, dtype=dtype, verbose=False, **job_kwargs) # Check if written data matches original data recorder_binary = BinaryRecordingExtractor( diff --git a/src/spikeinterface/sorters/internal/simplesorter.py b/src/spikeinterface/sorters/internal/simplesorter.py index 314c552d6d..199352ab73 100644 --- a/src/spikeinterface/sorters/internal/simplesorter.py +++ b/src/spikeinterface/sorters/internal/simplesorter.py @@ -71,7 +71,7 @@ def get_sorter_version(cls): def _run_from_folder(cls, sorter_output_folder, params, verbose): job_kwargs = params["job_kwargs"] job_kwargs = fix_job_kwargs(job_kwargs) - job_kwargs.update({"verbose": verbose, "progress_bar": verbose}) + job_kwargs.update({"progress_bar": verbose}) from spikeinterface.sortingcomponents.peak_detection import detect_peaks from spikeinterface.sortingcomponents.tools import extract_waveform_at_max_channel diff --git a/src/spikeinterface/sorters/internal/spyking_circus2.py b/src/spikeinterface/sorters/internal/spyking_circus2.py index 2af28fb179..c1021e787a 100644 --- a/src/spikeinterface/sorters/internal/spyking_circus2.py +++ b/src/spikeinterface/sorters/internal/spyking_circus2.py @@ -113,7 +113,7 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose): job_kwargs = params["job_kwargs"] job_kwargs = fix_job_kwargs(job_kwargs) - job_kwargs.update({"verbose": verbose, "progress_bar": verbose}) + job_kwargs.update({"progress_bar": verbose}) recording = cls.load_recording_from_folder(sorter_output_folder.parent, with_warnings=False) diff --git a/src/spikeinterface/sortingcomponents/clustering/circus.py b/src/spikeinterface/sortingcomponents/clustering/circus.py index 06d2f1f6db..c9aaee1329 100644 --- a/src/spikeinterface/sortingcomponents/clustering/circus.py +++ b/src/spikeinterface/sortingcomponents/clustering/circus.py @@ -63,6 +63,7 @@ class CircusClustering: "noise_levels": None, "tmp_folder": None, "job_kwargs": {}, + "verbose": True, } @classmethod @@ -72,7 +73,7 @@ def main_function(cls, recording, peaks, params): job_kwargs = fix_job_kwargs(params["job_kwargs"]) d = params - verbose = job_kwargs.get("verbose", True) + verbose = d["verbose"] fs = recording.get_sampling_frequency() ms_before = params["ms_before"] @@ -250,7 +251,6 @@ def main_function(cls, recording, peaks, params): cleaning_matching_params.pop(value) cleaning_matching_params["chunk_duration"] = "100ms" cleaning_matching_params["n_jobs"] = 1 - cleaning_matching_params["verbose"] = False cleaning_matching_params["progress_bar"] = False cleaning_params = params["cleaning_kwargs"].copy() diff --git a/src/spikeinterface/sortingcomponents/clustering/position_and_features.py b/src/spikeinterface/sortingcomponents/clustering/position_and_features.py index a07a6140e1..d23eb26239 100644 --- a/src/spikeinterface/sortingcomponents/clustering/position_and_features.py +++ b/src/spikeinterface/sortingcomponents/clustering/position_and_features.py @@ -42,7 +42,7 @@ class PositionAndFeaturesClustering: "ms_before": 1.5, "ms_after": 1.5, "cleaning_method": "dip", - "job_kwargs": {"n_jobs": -1, "chunk_memory": "10M", "verbose": True, "progress_bar": True}, + "job_kwargs": {"n_jobs": -1, "chunk_memory": "10M", "progress_bar": True}, } @classmethod diff --git a/src/spikeinterface/sortingcomponents/clustering/position_and_pca.py b/src/spikeinterface/sortingcomponents/clustering/position_and_pca.py index 0b1b8cc742..4dfe3c960c 100644 --- a/src/spikeinterface/sortingcomponents/clustering/position_and_pca.py +++ b/src/spikeinterface/sortingcomponents/clustering/position_and_pca.py @@ -38,7 +38,7 @@ class PositionAndPCAClustering: "ms_after": 2.5, "n_components_by_channel": 3, "n_components": 5, - "job_kwargs": {"n_jobs": -1, "chunk_memory": "10M", "verbose": True, "progress_bar": True}, + "job_kwargs": {"n_jobs": -1, "chunk_memory": "10M", "progress_bar": True}, "hdbscan_global_kwargs": {"min_cluster_size": 20, "allow_single_cluster": True, "core_dist_n_jobs": -1}, "hdbscan_local_kwargs": {"min_cluster_size": 20, "allow_single_cluster": True, "core_dist_n_jobs": -1}, "waveform_mode": "shared_memory", diff --git a/src/spikeinterface/sortingcomponents/clustering/position_ptp_scaled.py b/src/spikeinterface/sortingcomponents/clustering/position_ptp_scaled.py index 2195362543..788addf1e6 100644 --- a/src/spikeinterface/sortingcomponents/clustering/position_ptp_scaled.py +++ b/src/spikeinterface/sortingcomponents/clustering/position_ptp_scaled.py @@ -26,7 +26,7 @@ class PositionPTPScaledClustering: "ptps": None, "scales": (1, 1, 10), "peak_localization_kwargs": {"method": "center_of_mass"}, - "job_kwargs": {"n_jobs": -1, "chunk_memory": "10M", "verbose": True, "progress_bar": True}, + "job_kwargs": {"n_jobs": -1, "chunk_memory": "10M", "progress_bar": True}, "hdbscan_kwargs": { "min_cluster_size": 20, "min_samples": 20, diff --git a/src/spikeinterface/sortingcomponents/clustering/random_projections.py b/src/spikeinterface/sortingcomponents/clustering/random_projections.py index 6c1ad75383..77d47aec16 100644 --- a/src/spikeinterface/sortingcomponents/clustering/random_projections.py +++ b/src/spikeinterface/sortingcomponents/clustering/random_projections.py @@ -56,6 +56,7 @@ class RandomProjectionClustering: "smoothing_kwargs": {"window_length_ms": 0.25}, "tmp_folder": None, "job_kwargs": {}, + "verbose": True, } @classmethod @@ -65,7 +66,7 @@ def main_function(cls, recording, peaks, params): job_kwargs = fix_job_kwargs(params["job_kwargs"]) d = params - verbose = job_kwargs.get("verbose", True) + verbose = d["verbose"] fs = recording.get_sampling_frequency() radius_um = params["radius_um"] @@ -161,7 +162,6 @@ def main_function(cls, recording, peaks, params): cleaning_matching_params[value] = None cleaning_matching_params["chunk_duration"] = "100ms" cleaning_matching_params["n_jobs"] = 1 - cleaning_matching_params["verbose"] = False cleaning_matching_params["progress_bar"] = False cleaning_params = params["cleaning_kwargs"].copy() diff --git a/src/spikeinterface/sortingcomponents/clustering/sliding_hdbscan.py b/src/spikeinterface/sortingcomponents/clustering/sliding_hdbscan.py index 2ae22ce07d..8b9acbc92d 100644 --- a/src/spikeinterface/sortingcomponents/clustering/sliding_hdbscan.py +++ b/src/spikeinterface/sortingcomponents/clustering/sliding_hdbscan.py @@ -55,7 +55,7 @@ class SlidingHdbscanClustering: "auto_merge_quantile_limit": 0.8, "ratio_num_channel_intersect": 0.5, # ~ 'auto_trash_misalignment_shift' : 4, - "job_kwargs": {"n_jobs": -1, "chunk_memory": "10M", "verbose": True, "progress_bar": True}, + "job_kwargs": {"n_jobs": -1, "chunk_memory": "10M", "progress_bar": True}, } @classmethod diff --git a/src/spikeinterface/sortingcomponents/tests/test_clustering.py b/src/spikeinterface/sortingcomponents/tests/test_clustering.py index 427d120c5d..76a9d8a85e 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_clustering.py +++ b/src/spikeinterface/sortingcomponents/tests/test_clustering.py @@ -13,7 +13,7 @@ def job_kwargs(): - return dict(n_jobs=1, chunk_size=10000, progress_bar=True, verbose=True, mp_context="spawn") + return dict(n_jobs=1, chunk_size=10000, progress_bar=True, mp_context="spawn") @pytest.fixture(name="job_kwargs", scope="module") @@ -78,6 +78,7 @@ def test_find_cluster_from_peaks(clustering_method, recording, peaks, peak_locat peak_locations = run_peak_locations(recording, peaks, job_kwargs) # method = "position_and_pca" # method = "circus" - method = "tdc_clustering" + # method = "tdc_clustering" + method = "random_projections" test_find_cluster_from_peaks(method, recording, peaks, peak_locations) diff --git a/src/spikeinterface/sortingcomponents/tests/test_motion_estimation.py b/src/spikeinterface/sortingcomponents/tests/test_motion_estimation.py index 36d2d34f4d..36f623ebf8 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_motion_estimation.py +++ b/src/spikeinterface/sortingcomponents/tests/test_motion_estimation.py @@ -47,7 +47,6 @@ def setup_module(): detect_threshold=5, exclude_sweep_ms=0.1, chunk_size=10000, - verbose=1, progress_bar=True, pipeline_nodes=pipeline_nodes, ) @@ -156,12 +155,14 @@ def test_estimate_motion(): bin_um=10.0, margin_um=5, output_extra_check=True, - progress_bar=False, - verbose=False, ) kwargs.update(cases_kwargs) - motion, temporal_bins, spatial_bins, extra_check = estimate_motion(recording, peaks, peak_locations, **kwargs) + job_kwargs = dict(progress_bar=False) + + motion, temporal_bins, spatial_bins, extra_check = estimate_motion( + recording, peaks, peak_locations, **kwargs, **job_kwargs + ) motions[name] = motion diff --git a/src/spikeinterface/sortingcomponents/tests/test_peak_detection.py b/src/spikeinterface/sortingcomponents/tests/test_peak_detection.py index d36c59dc69..2ecccb421c 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_peak_detection.py +++ b/src/spikeinterface/sortingcomponents/tests/test_peak_detection.py @@ -60,7 +60,7 @@ def sorting(dataset): def job_kwargs(): - return dict(n_jobs=1, chunk_size=10000, progress_bar=True, verbose=True, mp_context="spawn") + return dict(n_jobs=1, chunk_size=10000, progress_bar=True, mp_context="spawn") @pytest.fixture(name="job_kwargs", scope="module") diff --git a/src/spikeinterface/sortingcomponents/tests/test_peak_localization.py b/src/spikeinterface/sortingcomponents/tests/test_peak_localization.py index 33d45af6c4..a10a81ec80 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_peak_localization.py +++ b/src/spikeinterface/sortingcomponents/tests/test_peak_localization.py @@ -10,8 +10,8 @@ def test_localize_peaks(): recording, _ = make_dataset() - # job_kwargs = dict(n_jobs=2, chunk_size=10000, verbose=False, progress_bar=True) - job_kwargs = dict(n_jobs=1, chunk_size=10000, verbose=False, progress_bar=True) + # job_kwargs = dict(n_jobs=2, chunk_size=10000, progress_bar=True) + job_kwargs = dict(n_jobs=1, chunk_size=10000, progress_bar=True) peaks = detect_peaks( recording, method="locally_exclusive", peak_sign="neg", detect_threshold=5, exclude_sweep_ms=0.1, **job_kwargs diff --git a/src/spikeinterface/sortingcomponents/tests/test_peak_selection.py b/src/spikeinterface/sortingcomponents/tests/test_peak_selection.py index 4326f21512..d133a0f9d2 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_peak_selection.py +++ b/src/spikeinterface/sortingcomponents/tests/test_peak_selection.py @@ -23,13 +23,12 @@ def test_select_peaks(): detect_threshold=5, exclude_sweep_ms=0.1, chunk_size=10000, - verbose=1, progress_bar=False, noise_levels=noise_levels, ) peak_locations = localize_peaks( - recording, peaks, method="center_of_mass", n_jobs=2, chunk_size=10000, verbose=True, progress_bar=True + recording, peaks, method="center_of_mass", n_jobs=2, chunk_size=10000, progress_bar=True ) n_peaks = 100