@@ -180,18 +180,18 @@ def automate_training(config, param, fixed_split, all_combin, n_iterations=1, ru
180
180
181
181
# Split dataset if not already done
182
182
if fixed_split and (initial_config .get ("split_path" ) is None ):
183
- train_lst , valid_lst , test_lst = imed_loader_utils .split_dataset (path_folder = initial_config ["bids_path" ],
184
- center_test_lst = initial_config ["center_test" ],
185
- split_method = initial_config ["split_method" ],
186
- random_seed = initial_config ["random_seed" ],
187
- train_frac = initial_config ["train_fraction" ],
188
- test_frac = initial_config ["test_fraction" ])
183
+ train_lst , valid_lst , test_lst = imed_loader_utils .split_dataset (path_folder = initial_config ["loader_parameters" ][ " bids_path" ],
184
+ center_test_lst = initial_config ["split_dataset" ][ " center_test" ],
185
+ split_method = initial_config ["split_dataset" ][ " split_method" ],
186
+ random_seed = initial_config ["split_dataset" ][ " random_seed" ],
187
+ train_frac = initial_config ["split_dataset" ][ " train_fraction" ],
188
+ test_frac = initial_config ["split_dataset" ][ " test_fraction" ])
189
189
190
190
# save the subject distribution
191
191
split_dct = {'train' : train_lst , 'valid' : valid_lst , 'test' : test_lst }
192
192
split_path = "./" + "common_split_datasets.joblib"
193
193
joblib .dump (split_dct , split_path )
194
- initial_config ["split_path " ] = split_path
194
+ initial_config ["split_dataset" ][ "fname_split " ] = split_path
195
195
196
196
config_list = []
197
197
# Test all combinations (change multiple parameters for each test)
0 commit comments