-
Notifications
You must be signed in to change notification settings - Fork 0
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
can not use docker in jupyter #2
Comments
While trying to fixing this issue, another issue occurred unexpectedly, which run quite smooth before i requested to fixed the above issue. code: /home/zhangdaohan20h / dredge-main / notebook/ prepro.py import numpy as np
import matplotlib.pyplot as plt
import spikeinterface.full as si # import core only
import spikeinterface.extractors as se
import spikeinterface.preprocessing as spre
import spikeinterface.sorters as ss
import spikeinterface.postprocessing as spost
import spikeinterface.qualitymetrics as sqm
import spikeinterface.comparison as sc
import spikeinterface.exporters as sexp
import spikeinterface.curation as scur
import spikeinterface.widgets as sw
import os
#os.system('newgrp docker')
#os.system('export KILOSORT2_5_PATH=/home/zhangdaohan20h/Kilosort-kilosort25/')
#os.system('export KILOSORT3_PATH=/home/zhangdaohan20h/Kilosort-kilosort3/')
#Kilosort3Sorter.set_kilosort3_path('/home/zhangdaohan20h/Kilosort-kilosort3/')
#os.system('export SPIKEINTERFACE_DEV_PATH=/home/zhangdaohan20h/spikeinterface-main/')
#os.system('export SPIKEINTERFACE_DEV_PATH=~/.conda/envs/kilosort4/lib/python3.9/site-packages/')
#from spikeinterface.sortingcomponents.motion_interpolation import interpolate_motion
# these imports will move around as things evolve
# this is where the main online registration lives now
#from dredge.dredge_lfp import register_online_lfp
# this has some helpers for plotting
#import dredge.motion_util as mu
print('#')
# AP registration lives here for now
#from dredge.dredge_ap import register
# spikeinterface peak detection + localization
#from spikeinterface.sortingcomponents.peak_detection import detect_peaks
#from spikeinterface.sortingcomponents.peak_localization import localize_peaks
print('#')
lfpraw = se.read_spikeglx("/home/zhangdaohan20h/public_data/NPX_examples/Pt01/", load_sync_channel=False,
stream_id="imec0.lf")
## the file named Pt02.imec0.ap.bin & Pt02.imec0.ap.meta
lfpraw
# convert to floating point
lfprec = si.astype(lfpraw, np.float32)
# ensure depth order
lfprec = si.depth_order(lfprec)
# optional: remove channels outside the brain
# you could use similar logic to extract a single column
# or trim both ends of the probe, whatever you like
cutoff_um = 8000
if cutoff_um is not None:
geom = lfprec.get_channel_locations()
lfprec = lfprec.remove_channels(lfprec.channel_ids[geom[:, 1] > cutoff_um])
# bandpass filter
# we do an aggressive one since we plan to downsample
lfprec = si.bandpass_filter(
lfprec,
freq_min=0.5,
freq_max=250,
margin_ms=1000,
filter_order=3,
dtype="float32",
add_reflect_padding=True,
)
# fancy bad channels detection and removal from the International Brain Lab
bad_chans, labels = si.detect_bad_channels(lfprec, psd_hf_threshold=1.4, num_random_chunks=100, seed=0)
print("Found bad channels", bad_chans)
lfprec = lfprec.remove_channels(bad_chans)
# correct for ADC sample shifts
lfprec = si.phase_shift(lfprec)
# common median reference
lfprec = si.common_reference(lfprec)
# downsample to 250Hz
lfprec = si.resample(lfprec, 250, margin_ms=1000)
# spatial filters: second derivative and averageing same-depth channels
lfprec = si.directional_derivative(lfprec, order=2, edge_order=1)
lfprec = si.average_across_direction(lfprec)
from spikeinterface.sortingcomponents.motion import estimate_motion
motion = estimate_motion(lfprec, method='dredge_lfp', rigid=False, progress_bar=True, max_disp_um=1000)
# Load the recording
#from spikeinterface.core import BinaryFolderRecording
#rec = si.read_zarr('recording.zarr')
rec = se.read_spikeglx("/home/zhangdaohan20h/public_data/NPX_examples/Pt01",
load_sync_channel=False, stream_id="imec0.ap")
from spikeinterface.sortingcomponents.motion import interpolate_motion
rec = si.astype(rec, np.float32)
print(motion.dim)
rec = interpolate_motion(rec, motion, border_mode='remove_channels',
spatial_interpolation_method='kriging', sigma_um=20.0, p=1,
num_closest=3, interpolation_time_bin_centers_s=None,
interpolation_time_bin_size_s=None, dtype=None)
rec = si.bandpass_filter(rec)
rec = si.common_reference(rec)
'''
common median reference (CMR)中使用的"median"或"average"操作符以及"global"、"local"或"single"参考方式的区别如下:
操作符:
"median"使用通道信号的中值作为参考信号。中值相比平均值更不容易受到异常值的影响,因此更鲁棒。
"average"使用通道信号的平均值作为参考信号。平均值计算更快,但相比中值更容易受到异常值的影响。
参考方式:
"global"使用所有通道的中值/平均值作为全局参考信号。
"local"使用邻近通道的中值/平均值作为局部参考信号。这可以更好地去除局部噪声。
"single"使用单个通道的中值/平均值作为参考信号。这种方式最简单,但可能无法很好地去除共同噪声。
'''
rec
# optional
# Save the recording
rec.save(folder='recording', format='zarr',overwrite=True, engine='joblib', engine_kwargs={"n_jobs": 20},) #binary_folder
import docker
sorting_KS2 = ss.run_sorter(sorter_name="kilosort2_5", recording=rec, docker_image='docker.mrxn.net/spikeinterface/kilosort2_5-compiled-base:latest', n_jobs = 20, verbose=True, installation_mode = 'dev')
'''
#'docker.mrxn.net/spikeinterface/kilosort2_5-compiled-base:latest'
#sorting_KS2 = ss.run_sorter(sorter_name="kilosort2_5", recording=rec, docker_image=False, n_jobs = 20, verbose=True)
#sorting_KS3 = ss.run_sorter(sorter_name="kilosort3", recording=rec, docker_image=False, n_jobs = 20, verbose=True)
print('finish')
sorting_KS2.save(folder='sorting', format='npz_folder')
'''
from spikeinterface.core import NpzFolderSorting
sorting = NpzFolderSorting('sorting')
folder = "analyzer_folder"
from spikeinterface.core import create_sorting_analyzer,load_sorting_analyzer
analyzer = create_sorting_analyzer(sorting=sorting,
recording=rec,
overwrite=True,
format="binary_folder",
return_scaled=True, # this is the default to attempt to return scaled
folder=folder,
n_jobs = 20
)
print(analyzer)
|
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
notebook: /home/zhangdaohan20h / dredge-main / notebook/wulab_DREDge_ks2.ipynb
envs: kilosort4 in zhangdaohan20h
The text was updated successfully, but these errors were encountered: