8
8
os .environ ["CUDA_VISIBLE_DEVICES" ]= "0" #specify GPU to use
9
9
from run_nn_models import run_nn_models
10
10
from transfer_learn_nn import transfer_learn_nn
11
- from model_utils import unseen_modality_test , diff_specs
11
+ from model_utils import unseen_modality_test , diff_specs , ntrain_combine_df , frac_combine_df
12
12
from transfer_learn_nn_eeg import transfer_learn_nn_eeg
13
13
14
14
t_start = time .time ()
15
15
##################USER-DEFINED PARAMETERS##################
16
- data_lp = '.../' # data load path
17
-
18
16
# Where data will be saved: rootpath + dataset + '/'
19
17
rootpath = '.../'
20
18
dataset = 'move_rest_ecog'
21
19
20
+ # Data load paths
21
+ ecog_lp = rootpath + 'ecog_dataset/' # data load path
22
+ ecog_roi_proj_lp = ecog_lp + 'proj_mat/' #
23
+
22
24
### Tailored decoder params (within participant) ###
23
25
n_folds_tail = 3 # number of folds (per participant)
24
- spec_meas_tail = ['power' ] # 'power' , 'power_log', 'relative_power', 'phase', 'freqslide'
26
+ spec_meas_tail = ['power' , 'power_log' , 'relative_power' , 'phase' , 'freqslide' ]
25
27
hyps_tail = {'F1' : 20 , 'dropoutRate' : 0.693 , 'kernLength' : 64 ,
26
28
'kernLength_sep' : 56 , 'dropoutType' : 'SpatialDropout2D' ,
27
29
'D' : 2 , 'n_estimators' : 240 , 'max_depth' : 9 }
28
30
hyps_tail ['F2' ] = hyps_tail ['F1' ] * hyps_tail ['D' ] # F2 = F1 * D
31
+ epochs_tail = 300
32
+ patience_tail = 30
29
33
30
34
### Same modality decoder params (across participants) ###
31
35
n_folds_same = 36 # number of total folds
32
- spec_meas_same = ['power' ] # 'power' , 'power_log', 'relative_power', 'phase', 'freqslide'
36
+ spec_meas_same = ['power' , 'power_log' , 'relative_power' , 'phase' , 'freqslide' ]
33
37
hyps_same = {'F1' : 19 , 'dropoutRate' : 0.342 , 'kernLength' : 24 ,
34
38
'kernLength_sep' : 88 , 'dropoutType' : 'Dropout' ,
35
39
'D' : 2 , 'n_estimators' : 240 , 'max_depth' : 6 }
36
40
hyps_same ['F2' ] = hyps_same ['F1' ] * hyps_same ['D' ] # F2 = F1 * D
41
+ epochs_same = 300
42
+ patience_same = 20
37
43
38
44
### Unseen modality testing params (across participants) ###
39
- eeg_lp = '... /' # path to EEG xarray data
40
- eeg_roi_proj_lp = '... /' # path to EEG projection matrix
45
+ eeg_lp = rootpath + 'eeg_dataset /' # path to EEG xarray data
46
+ eeg_roi_proj_lp = eeg_lp + 'proj_mat /' # path to EEG projection matrix
41
47
42
48
### Fine-tune same modality decoders ###
43
49
model_type_finetune = 'eegnet_hilb' # NN model type to fine-tune (must be either 'eegnet_hilb' or 'eegnet')
52
58
sp_finetune = [rootpath + dataset + '/tf_all_per/' ,
53
59
rootpath + dataset + '/tf_per_1dconv/' ,
54
60
rootpath + dataset + '/tf_depth_per/' ,
55
- rootpath + dataset + '/tf_sep_per/' ,
56
- rootpath + dataset + '/tf_single_sub/' ] # where to save output (should match layers_to_finetune)
61
+ rootpath + dataset + '/tf_sep_per/' ] # where to save output (should match layers_to_finetune)
57
62
58
63
# How much train/val data to use, either by number of trials or percentage of available data
59
64
use_per_vals = True #if True, use percentage values (otherwise, use number of trials)
67
72
n_val_parts = 1 # number of validation participants to use
68
73
##################USER-DEFINED PARAMETERS##################
69
74
70
-
71
75
#### Tailored decoder training ####
72
76
for s ,val in enumerate (spec_meas_tail ):
73
77
do_log = True if val == 'power_log' else False
74
78
compute_val = 'power' if val == 'power_log' else val
75
79
single_sp = rootpath + dataset + '/single_sbjs_' + val + '/'
76
80
combined_sbjs = False
77
81
if not os .path .exists (single_sp ):
78
- os .mkdirs (single_sp )
82
+ os .makedirs (single_sp )
79
83
if s == 0 :
80
84
models = ['eegnet_hilb' ,'eegnet' ,'rf' ,'riemann' ] # fit all decoder types
81
85
else :
82
86
models = ['eegnet_hilb' ] # avoid fitting non-HTNet models again
83
- run_nn_models (single_sp , n_folds_tail , combined_sbjs , lp = data_lp , test_day = 'last' , do_log = do_log ,
84
- epochs = 300 , patience = 30 , models = models , compute_val = compute_val ,
87
+ run_nn_models (single_sp , n_folds_tail , combined_sbjs , ecog_lp , ecog_roi_proj_lp , test_day = 'last' , do_log = do_log ,
88
+ epochs = epochs_tail , patience = patience_tail , models = models , compute_val = compute_val ,
85
89
F1 = hyps_tail ['F1' ], dropoutRate = hyps_tail ['dropoutRate' ], kernLength = hyps_tail ['kernLength' ],
86
90
kernLength_sep = hyps_tail ['kernLength_sep' ], dropoutType = hyps_tail ['dropoutType' ],
87
91
D = hyps_tail ['D' ], F2 = hyps_tail ['F2' ], n_estimators = hyps_tail ['n_estimators' ], max_depth = hyps_tail ['max_depth' ])
93
97
compute_val = 'power' if val == 'power_log' else val
94
98
multi_sp = rootpath + dataset + '/combined_sbjs_' + val + '/'
95
99
if not os .path .exists (multi_sp ):
96
- os .mkdirs (multi_sp )
100
+ os .makedirs (multi_sp )
97
101
combined_sbjs = True
98
102
if s == 0 :
99
103
models = ['eegnet_hilb' ,'eegnet' ,'rf' ,'riemann' ] # fit all decoder types
100
104
else :
101
105
models = ['eegnet_hilb' ] # avoid fitting non-HTNet models again
102
- run_nn_models (multi_sp , n_folds_same , combined_sbjs , lp = data_lp , test_day = 'last' , do_log = do_log ,
103
- epochs = 300 , patience = 20 , models = models , compute_val = compute_val ,
106
+ run_nn_models (multi_sp , n_folds_same , combined_sbjs , ecog_lp , ecog_roi_proj_lp , test_day = 'last' , do_log = do_log ,
107
+ epochs = epochs_same , patience = patience_same , models = models , compute_val = compute_val ,
104
108
F1 = hyps_same ['F1' ], dropoutRate = hyps_same ['dropoutRate' ], kernLength = hyps_same ['kernLength' ],
105
109
kernLength_sep = hyps_same ['kernLength_sep' ], dropoutType = hyps_same ['dropoutType' ],
106
110
D = hyps_same ['D' ], F2 = hyps_same ['F2' ], n_estimators = hyps_same ['n_estimators' ], max_depth = hyps_same ['max_depth' ])
132
136
lp_finetune = rootpath + dataset + '/combined_sbjs_' + spec_meas + '/'
133
137
if use_per_vals :
134
138
for i in range (len (per_train_trials )):
135
- transfer_learn_nn (lp_finetune , sp_finetune [j ], eeg_lp ,
139
+ transfer_learn_nn (lp_finetune , sp_finetune [j ],
136
140
model_type = model_type_finetune , layers_to_finetune = curr_layer ,
137
141
use_per_vals = use_per_vals , per_train_trials = per_train_trials [i ],
138
- per_val_trials = per_val_trials [i ],single_sub = single_sub , epochs = 300 , patience = 20 )
142
+ per_val_trials = per_val_trials [i ],single_sub = single_sub , epochs = epochs_same , patience = patience_same )
139
143
else :
140
144
for i in range (len (n_train_trials )):
141
- transfer_learn_nn (lp_finetune , sp_finetune [j ], eeg_lp ,
145
+ transfer_learn_nn (lp_finetune , sp_finetune [j ],
142
146
model_type = model_type_finetune , layers_to_finetune = curr_layer ,
143
147
use_per_vals = use_per_vals , n_train_trials = n_train_trials [i ],
144
- n_val_trials = n_val_trials [i ], single_sub = single_sub , epochs = 300 , patience = 20 )
148
+ n_val_trials = n_val_trials [i ], single_sub = single_sub , epochs = epochs_same , patience = patience_same )
145
149
146
150
#### Unseen modality fine-tuning ####
147
151
spec_meas = 'relative_power'
148
152
for j ,curr_layer in enumerate (layers_to_finetune ):
153
+ sp_finetune_eeg = sp_finetune [j ][:- 1 ]+ '_eeg/'
149
154
# Create save directory if does not exist already
150
- if not os .path .exists (sp_finetune [ j ] ):
151
- os .makedirs (sp_finetune [ j ] )
155
+ if not os .path .exists (sp_finetune_eeg ):
156
+ os .makedirs (sp_finetune_eeg )
152
157
153
158
# Fine-tune with each amount of train/val data
154
159
if curr_layer == layers_to_finetune [- 1 ]:
159
164
lp_finetune = rootpath + dataset + '/combined_sbjs_' + spec_meas + '/'
160
165
if use_per_vals :
161
166
for i in range (len (per_train_trials )):
162
- transfer_learn_nn_eeg (lp_finetune , sp_finetune [ j ][: - 1 ] + '_eeg/' ,
167
+ transfer_learn_nn_eeg (lp_finetune , sp_finetune_eeg , eeg_lp ,
163
168
model_type = model_type_finetune , layers_to_finetune = curr_layer ,
164
169
use_per_vals = use_per_vals , per_train_trials = per_train_trials [i ],
165
- per_val_trials = per_val_trials [i ],single_sub = single_sub , epochs = 300 , patience = 20 )
170
+ per_val_trials = per_val_trials [i ],single_sub = single_sub , epochs = epochs_same , patience = patience_same )
166
171
else :
167
172
for i in range (len (n_train_trials )):
168
- transfer_learn_nn_eeg (lp_finetune , sp_finetune [ j ][: - 1 ] + '_eeg/' ,
173
+ transfer_learn_nn_eeg (lp_finetune , sp_finetune_eeg , eeg_lp ,
169
174
model_type = model_type_finetune , layers_to_finetune = curr_layer ,
170
175
use_per_vals = use_per_vals , n_train_trials = n_train_trials [i ],
171
- n_val_trials = n_val_trials [i ], single_sub = single_sub , epochs = 300 , patience = 20 )
176
+ n_val_trials = n_val_trials [i ], single_sub = single_sub , epochs = epochs_same , patience = patience_same )
172
177
173
178
174
179
#### Training same modality decoders with different numbers of training participants ####
175
180
for i in range (max_train_parts ):
176
181
sp_curr = rootpath + dataset + '/combined_sbjs_ntra' + str (i + 1 )+ '/'
177
182
combined_sbjs = True
178
183
if not os .path .exists (sp_curr ):
179
- os .mkdirs (sp_curr )
180
- run_nn_models (sp_curr ,n_folds_same ,combined_sbjs ,test_day = 'last' , do_log = False ,
181
- epochs = 300 , patience = 20 , models = ['eegnet_hilb' ,'eegnet' ,'rf' ,'riemann' ], compute_val = 'power' ,
184
+ os .makedirs (sp_curr )
185
+ run_nn_models (sp_curr ,n_folds_same ,combined_sbjs ,ecog_lp , ecog_roi_proj_lp , test_day = 'last' , do_log = False ,
186
+ epochs = epochs_same , patience = patience_same , models = ['eegnet_hilb' ,'eegnet' ,'rf' ,'riemann' ], compute_val = 'power' ,
182
187
n_val = n_val_parts , n_train = i + 1 , F1 = hyps_same ['F1' ], dropoutRate = hyps_same ['dropoutRate' ],
183
188
kernLength = hyps_same ['kernLength' ], kernLength_sep = hyps_same ['kernLength_sep' ], dropoutType = hyps_same ['dropoutType' ],
184
189
D = hyps_same ['D' ], F2 = hyps_same ['F2' ], n_estimators = hyps_same ['n_estimators' ], max_depth = hyps_same ['max_depth' ])
185
-
190
+ # Combine results into dataframes
191
+ ntrain_combine_df (rootpath + dataset )
192
+ frac_combine_df (rootpath + dataset , ecog_roi_proj_lp )
193
+
194
+
186
195
#### Pre-compute difference spectrograms for ECoG and EEG datasets ####
187
- diff_specs (rootpath + dataset + '/combined_sbjs /' , data_lp , ecog = True )
188
- diff_specs (rootpath + dataset + '/combined_sbjs /' , eeg_lp , ecog = False )
196
+ diff_specs (rootpath + dataset + '/combined_sbjs_power /' , ecog_lp , ecog = True )
197
+ diff_specs (rootpath + dataset + '/combined_sbjs_power /' , eeg_lp , ecog = False )
189
198
190
199
print ('Elapsed time: ' + str (time .time () - t_start ))
0 commit comments