Skip to content

Commit

Permalink
Add unsaved files
Browse files Browse the repository at this point in the history
  • Loading branch information
alejoe91 committed Oct 27, 2023
1 parent d6e1e0d commit c15d6d3
Show file tree
Hide file tree
Showing 7 changed files with 18 additions and 18 deletions.
4 changes: 2 additions & 2 deletions src/spikeinterface/core/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,8 +269,8 @@ def copy_metadata(
If True, only the main annotations/properties are copied.
ids: list
List of ids to copy the metadata to. If None, all ids are copied.
skip_properties: list
List of properties to skip. Default is None.
skip_properties: list, default: None
List of properties to skip
"""

if ids is None:
Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/extractors/neoextractors/openephys.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ def read_openephys(folder_path, **kwargs):

def read_openephys_event(folder_path, block_index=None):
"""
Read Open Ephys events from 'binary' format.
Read Open Ephys events from "binary" format.
Parameters
----------
Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/extractors/neuropixels_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def synchronize_neuropixel_streams(recording_ref, recording_other):
Method used :
1. detect pulse times on both streams.
2. make a linear regression from 'other' to 'ref'.
2. make a linear regression from "other" to "ref".
The slope is nclose to 1 and corresponds to the sample rate correction
The intercept is close to 0 and corresponds to the delta time start
Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/sorters/launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,7 @@ def run_sorters(
* "raise" : raise error if subfolder exists
* "overwrite" : delete and force recompute
* "keep" : do not compute again if f=subfolder exists and log is OK
engine: "loop" | "joblib" | "dask', default: "loop"
engine: "loop" | "joblib" | "dask", default: "loop"
Which engine to use to run sorter.
engine_kwargs: dict
This contains kwargs specific to the launcher engine:
Expand Down
10 changes: 5 additions & 5 deletions src/spikeinterface/sortingcomponents/motion_estimation.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,10 +226,10 @@ class DecentralizedRegistration:
error_sigma: float, default: 0.2
In case weight_scale="exp" this controls the sigma of the exponential.
conv_engine: "numpy" or "torch" or None, default: None
In case of pairwise_displacement_method="conv', what library to use to compute
In case of pairwise_displacement_method="conv", what library to use to compute
the underlying correlation
torch_device=None
In case of conv_engine='torch', you can control which device (cpu or gpu)
In case of conv_engine="torch", you can control which device (cpu or gpu)
batch_size: int
Size of batch for the convolution. Increasing this will speed things up dramatically
on GPUs and sometimes on CPU as well.
Expand All @@ -241,10 +241,10 @@ class DecentralizedRegistration:
When not None the parwise discplament matrix is computed in a small time horizon.
In short only pair of bins close in time.
So the pariwaise matrix is super sparse and have values only the diagonal.
convergence_method: 'lsmr', 'lsqr_robust', 'gradient_descent'
convergence_method: "lsmr" | "lsqr_robust" | "gradient_descent", default: "lsqr_robust"
Which method to use to compute the global displacement vector from the pairwise matrix.
robust_regression_sigma: float
Use for convergence_method='lsqr_robust' for iterative selection of the regression.
Use for convergence_method="lsqr_robust" for iterative selection of the regression.
temporal_prior : bool, default: True
Ensures continuity across time, unless there is evidence in the recording for jumps.
spatial_prior : bool, default: False
Expand All @@ -259,7 +259,7 @@ class DecentralizedRegistration:
- "time" : the displacement at a given time (in seconds) is subtracted
- "mode_search" : an attempt is made to guess the mode. needs work.
lsqr_robust_n_iter: int
Number of iteration for convergence_method='lsqr_robust'.
Number of iteration for convergence_method="lsqr_robust".
"""

@classmethod
Expand Down
12 changes: 6 additions & 6 deletions src/spikeinterface/sortingcomponents/peak_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ def detect_peaks(
):
"""Peak detection based on threshold crossing in term of k x MAD.
In 'by_channel' : peak are detected in each channel independently
In 'locally_exclusive' : a single best peak is taken from a set of neighboring channels
In "by_channel" : peak are detected in each channel independently
In "locally_exclusive" : a single best peak is taken from a set of neighboring channels
Parameters
----------
Expand Down Expand Up @@ -357,7 +357,7 @@ def compute(self, traces, start_frame, end_frame, segment_index, max_margin):


class DetectPeakByChannel(PeakDetectorWrapper):
"""Detect peaks using the 'by channel' method."""
"""Detect peaks using the "by channel" method."""

name = "by_channel"
engine = "numpy"
Expand Down Expand Up @@ -439,7 +439,7 @@ def detect_peaks(cls, traces, peak_sign, abs_threholds, exclude_sweep_size):


class DetectPeakByChannelTorch(PeakDetectorWrapper):
"""Detect peaks using the 'by channel' method with pytorch."""
"""Detect peaks using the "by channel" method with pytorch."""

name = "by_channel_torch"
engine = "torch"
Expand Down Expand Up @@ -505,7 +505,7 @@ def detect_peaks(cls, traces, peak_sign, abs_threholds, exclude_sweep_size, devi


class DetectPeakLocallyExclusive(PeakDetectorWrapper):
"""Detect peaks using the 'locally exclusive' method."""
"""Detect peaks using the "locally exclusive" method."""

name = "locally_exclusive"
engine = "numba"
Expand Down Expand Up @@ -581,7 +581,7 @@ def detect_peaks(cls, traces, peak_sign, abs_threholds, exclude_sweep_size, neig


class DetectPeakLocallyExclusiveTorch(PeakDetectorWrapper):
"""Detect peaks using the 'locally exclusive' method with pytorch."""
"""Detect peaks using the "locally exclusive" method with pytorch."""

name = "locally_exclusive_torch"
engine = "torch"
Expand Down
4 changes: 2 additions & 2 deletions src/spikeinterface/widgets/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

def get_default_plotter_backend():
"""Return the default backend for spikeinterface widgets.
The default backend is 'matplotlib' at init.
The default backend is "matplotlib" at init.
It can be be globally set with `set_default_plotter_backend(backend)`
"""

Expand Down Expand Up @@ -123,7 +123,7 @@ def __init__(self, d):
Helper function that transform a dict into
an object where attributes are the keys of the dict
d = {'a': 1, 'b': 'yep'}
d = {"a": 1, "b": "yep"}
o = to_attr(d)
print(o.a, o.b)
"""
Expand Down

0 comments on commit c15d6d3

Please sign in to comment.