-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path06-iEEG_decoding.py
172 lines (147 loc) · 7.47 KB
/
06-iEEG_decoding.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
import os
import json
import pickle
import mne
import time
from joblib import Parallel, delayed
from tqdm import tqdm
from pathlib import Path
import numpy as np
import argparse
from sklearn.pipeline import make_pipeline
from sklearn import svm
from sklearn.preprocessing import StandardScaler
from mne.decoding import SlidingEstimator
import environment_variables as ev
from helper_function.helper_general import create_super_subject, get_roi_channels, \
decoding, moving_average
# Set list of views:
views = ['lateral', 'medial', 'rostral', 'caudal', 'ventral', 'dorsal']
def decoding_pipeline(parameters_file, subjects, data_root, analysis_name="decoding", task_conditions=None):
"""
Perform decoding analysis on iEEG data.
:param parameters_file: (str) Path to the parameters file in JSON format.
:param subjects: (list) List of subjects.
:param data_root: (str) Root directory for data.
:param analysis_name: (str) Name of the analysis.
:param task_conditions: (list) List of task conditions.
:return: None
"""
# Parse command line inputs:
parser = argparse.ArgumentParser(
description="Implements analysis of EDFs for experiment1")
parser.add_argument('--config', type=str, default=None,
help="Config file for analysis parameters (file name + path)")
parser.add_argument('--roi', type=str, default=None,
help="Name of the ROI on which to run the analysis")
args = parser.parse_args()
if task_conditions is None:
task_conditions = {"tr": "Relevant non-target", "ti": "Irrelevant", "targets": "Target"}
with open(args.config) as json_file:
param = json.load(json_file)
save_dir = Path(ev.bids_root, "derivatives", analysis_name, param["task"], args.config.split(".json")[0])
os.makedirs(save_dir, exist_ok=True)
roi_results = {}
times = []
if len(args.roi) == 0:
labels = mne.read_labels_from_annot("fsaverage", parc='aparc.a2009s', hemi='both', surf_name='pial',
subjects_dir=ev.fs_directory, sort=True)
roi_names = list(set(lbl.name.replace("-lh", "").replace("-rh", "")
for lbl in labels if "unknown" not in lbl.name))
elif isinstance(args.roi, str):
roi_names = [args.roi]
else:
raise Exception("The ROI must be a single string!")
for ii, roi in enumerate(roi_names):
print("=========================================")
print("ROI")
print(roi)
subjects_epochs = {}
roi_results[roi] = {}
# Convert to volumetric labels:
vol_roi = ["ctx_lh_" + roi, "ctx_rh_" + roi]
print(vol_roi)
for sub in subjects:
epochs_file = Path(data_root, "derivatives", "preprocessing", f"sub-{sub}", f"ses-{param["session"]}",
param["data_type"], param["preprocessing_folder"], param["signal"],
f"sub-{sub}_ses-{param["session"]}_task-{param["task"]}_desc-epoching_ieeg-epo.fif")
epochs = mne.read_epochs(epochs_file)
# Extract the conditions of interest:
epochs = epochs[param["conditions"]]
# Crop the data:
epochs.crop(tmin=param["crop"][0], tmax=param["crop"][1])
# Get the channels within this ROI:
picks = get_roi_channels(data_root, sub, param["session"], param["atlas"], vol_roi)
# Skip if no channels in that ROI for this subject
if not picks:
print(f"sub-{sub} has no electrodes in {roi}")
continue
# Append to the rest:
subjects_epochs[sub] = epochs.pick(picks)
# Skip if no channels were found in this ROI:
if not subjects_epochs:
continue
# 1. Create the model:
# ============================
clf = make_pipeline(StandardScaler(), svm.SVC(kernel='linear', class_weight='balanced'))
time_res = SlidingEstimator(clf, n_jobs=1, scoring=param["metric"], verbose="ERROR")
# Loop through each task conditions:
for tsk in task_conditions.keys():
# 2. Create the super subject:
# ============================
tsk_epochs = {sub: subjects_epochs[sub][task_conditions[tsk]] for sub in subjects_epochs}
times = [tsk_epochs[sub].times for sub in tsk_epochs.keys()][0]
data, labels = create_super_subject(tsk_epochs, param["labels_col"], n_trials=param["min_ntrials"])
n_channels = data.shape[1]
# 3. Moving average:
# ============================
window_size = int((param["mvavg_window_ms"] * 0.001) / (times[1] - times[0]))
print(f"Window size: {window_size}")
data = moving_average(data, window_size, axis=-1, overlapping=False)
times = moving_average(times, window_size, axis=-1, overlapping=False)
# 4. Apply decoding (pseudotrials happen inside)
# ============================
scores = []
for i in range(param["n_iter"]):
# Repeat the decoding sev
scr = decoding(time_res, data, labels,
n_pseudotrials=param["pseudotrials"],
kfolds=param["kfold"],
verbose=True,
label_shuffle=False)
scores.append(scr)
# Compute the null distribution:
scores_shuffle = Parallel(n_jobs=param["n_jobs"])(delayed(decoding)(time_res,
data,
labels,
n_pseudotrials=param["pseudotrials"],
kfolds=param["kfold"],
verbose=False,
label_shuffle=True
) for i in
tqdm(range(int(param["n_perm"]))))
# Average across iterations:
scores = np.mean(np.stack(scores, axis=2), axis=-1)
scores_shuffle = np.mean(np.stack(scores_shuffle, axis=0), axis=1)
# Package the results:
roi_results[roi].update({
f"scores_{tsk}": scores,
f"scores_shuffle_{tsk}": scores_shuffle,
"n_channels": n_channels,
"times": times
})
# Save results to file:
with open(Path(save_dir, f'results-{roi}.pkl'), 'wb') as f:
pickle.dump(roi_results[roi], f)
# Save results to file:
with open(Path(save_dir, 'results-all_roi.pkl'), 'wb') as f:
pickle.dump(roi_results, f)
if __name__ == "__main__":
parameters = (
r"C:\Users\alexander.lepauvre\Documents\GitHub\Reconstructed_time_analysis"
r"\06-iEEG_decoding_parameters_all-dur.json"
)
bids_root = "/hpc/users/alexander.lepauvre/ReconstructedTime/bids-curate"
decoding_pipeline(parameters, ev.subjects_lists_ecog["dur"], bids_root,
analysis_name="decoding",
task_conditions={"tr": "Relevant non-target", "ti": "Irrelevant"})