Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Port SpikeInterface update for tutorial generation #917

Draft
wants to merge 11 commits into
base: main
Choose a base branch
from
5 changes: 1 addition & 4 deletions environments/environment-Linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,7 @@ dependencies:
- flask-cors == 4.0.0
- flask_restx == 1.1.0
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
- spikeinterface == 0.100.5
- neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.5
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
7 changes: 1 addition & 6 deletions environments/environment-MAC-apple-silicon.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,7 @@ dependencies:
- flask-cors == 4.0.0
- flask_restx == 1.1.0
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
# NOTE: the NeuroConv wheel on PyPI includes sonpy which is not compatible with arm64, so build and install
# NeuroConv from GitHub, which will remove the sonpy dependency when building from Mac arm64
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
- spikeinterface == 0.100.5
- neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.5 - scikit-learn == 1.4.0 # Tutorial data generation
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
5 changes: 1 addition & 4 deletions environments/environment-MAC-intel.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,7 @@ dependencies:
- flask-cors == 4.0.0
- flask_restx == 1.1.0
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
- spikeinterface == 0.100.5
- neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.5 - scikit-learn == 1.4.0 # Tutorial data generation
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
5 changes: 1 addition & 4 deletions environments/environment-Windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,7 @@ dependencies:
- flask-cors === 3.0.10
- flask_restx == 1.1.0
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
- spikeinterface == 0.100.5
- neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.5
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
35 changes: 22 additions & 13 deletions src/pyflask/manageNeuroconv/manage_neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -1668,11 +1668,11 @@ def generate_test_data(output_path: str):
"""
Autogenerate the data formats needed for the tutorial pipeline.

Consists of a single-probe single-segment SpikeGLX recording (both AP and LF bands) as well as Phy spiking data.
Consists of a single-probe single-segment SpikeGLX recording (both AP and LF bands) as well as Phy sorting data.
"""
import spikeinterface
from spikeinterface.exporters import export_to_phy
from spikeinterface.preprocessing import bandpass_filter, resample, scale
import spikeinterface.exporters
import spikeinterface.preprocessing

base_path = Path(output_path)
spikeglx_output_folder = base_path / "spikeglx"
Expand All @@ -1687,8 +1687,8 @@ def generate_test_data(output_path: str):
lf_sampling_frequency = 2_500.0
downsample_factor = int(ap_sampling_frequency / lf_sampling_frequency)

# Generate synthetic spiking and voltage traces with waveforms around them
artificial_ap_band_in_uV, spiking = spikeinterface.generate_ground_truth_recording(
# Generate synthetic sorting and voltage traces with waveforms around them
artificial_ap_band_in_uV, sorting = spikeinterface.generate_ground_truth_recording(
durations=[duration_in_s],
sampling_frequency=ap_sampling_frequency,
num_channels=number_of_channels,
Expand All @@ -1697,12 +1697,18 @@ def generate_test_data(output_path: str):
seed=0, # Fixed seed for reproducibility
)

unscaled_artificial_ap_band = scale(recording=artificial_ap_band_in_uV, gain=1 / conversion_factor_to_uV)
unscaled_artificial_ap_band = spikeinterface.preprocessing.scale(
recording=artificial_ap_band_in_uV, gain=1 / conversion_factor_to_uV
)
int16_artificial_ap_band = unscaled_artificial_ap_band.astype(dtype="int16")
int16_artificial_ap_band.set_channel_gains(conversion_factor_to_uV)

unscaled_artificial_lf_filter = bandpass_filter(recording=unscaled_artificial_ap_band, freq_min=0.5, freq_max=1_000)
unscaled_artificial_lf_band = resample(recording=unscaled_artificial_lf_filter, resample_rate=2_500)
unscaled_artificial_lf_filter = spikeinterface.preprocessing.bandpass_filter(
recording=unscaled_artificial_ap_band, freq_min=0.5, freq_max=1_000
)
unscaled_artificial_lf_band = spikeinterface.preprocessing.decimate(
recording=unscaled_artificial_lf_filter, decimation_factor=downsample_factor
)
int16_artificial_lf_band = unscaled_artificial_lf_band.astype(dtype="int16")
int16_artificial_lf_band.set_channel_gains(conversion_factor_to_uV)

Expand All @@ -1725,13 +1731,16 @@ def generate_test_data(output_path: str):
with open(file=lf_meta_file_path, mode="w") as io:
io.write(lf_meta_content)

# Make Phy folder
waveform_extractor = spikeinterface.extract_waveforms(
recording=artificial_ap_band_in_uV, sorting=spiking, mode="memory"
# Make Phy folder - see https://spikeinterface.readthedocs.io/en/latest/modules/exporters.html
sorting_analyzer = spikeinterface.create_sorting_analyzer(
sorting=sorting, recording=artificial_ap_band_in_uV, mode="memory", sparse=False
)
sorting_analyzer.compute(["random_spikes", "waveforms", "templates", "noise_levels"])
sorting_analyzer.compute("spike_amplitudes")
sorting_analyzer.compute("principal_components", n_components=5, mode="by_channel_local")

export_to_phy(
waveform_extractor=waveform_extractor, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False
spikeinterface.exporters.export_to_phy(
sorting_analyzer=sorting_analyzer, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False
)


Expand Down
Loading