1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Sun Mar 15 21:10:29 2020
4
+
5
+ @author: mauro
6
+ """
7
+ from snakeClass import run
8
+ from GPyOpt .methods import BayesianOptimization
9
+ import datetime
10
+
11
+ ################################################
12
+ # Set parameters for Bayesian Optimization #
13
+ ################################################
14
+
15
+ class BayesianOptimizer ():
16
+ def __init__ (self , params ):
17
+ self .params = params
18
+
19
+ def optimize_RL (self ):
20
+ def optimize (inputs ):
21
+ print ("INPUT" , inputs )
22
+ inputs = inputs [0 ]
23
+
24
+ # Variables to optimize
25
+ self .params ["learning_rate" ] = inputs [0 ]
26
+ lr_string = '{:.8f}' .format (self .params ["learning_rate" ])[2 :]
27
+ self .params ["first_layer_size" ] = int (inputs [1 ])
28
+ self .params ["second_layer_size" ] = int (inputs [2 ])
29
+ self .params ["third_layer_size" ] = int (inputs [3 ])
30
+ self .params ["epsilon_decay_linear" ] = int (inputs [4 ])
31
+
32
+ self .params ['name_scenario' ] = 'snake_lr{}_struct{}_{}_{}_eps{}' .format (lr_string ,
33
+ self .params ['first_layer_size' ],
34
+ self .params ['second_layer_size' ],
35
+ self .params ['third_layer_size' ],
36
+ self .params ['epsilon_decay_linear' ])
37
+
38
+ self .params ['weights_path' ] = 'weights/weights_' + self .params ['name_scenario' ] + '.h5'
39
+ self .params ['load_weights' ] = False
40
+ self .params ['train' ] = True
41
+ print (self .params )
42
+ score , mean , stdev = run (self .params )
43
+ print ('Total score: {} Mean: {} Std dev: {}' .format (score , mean , stdev ))
44
+ with open (self .params ['log_path' ], 'a' ) as f :
45
+ f .write (str (self .params ['name_scenario' ]) + '\n ' )
46
+ f .write ('Params: ' + str (self .params ) + '\n ' )
47
+ return score
48
+
49
+ optim_params = [
50
+ {"name" : "learning_rate" , "type" : "continuous" , "domain" : (0.00005 , 0.001 )},
51
+ {"name" : "first_layer_size" , "type" : "discrete" , "domain" : (20 ,50 ,100 ,200 )},
52
+ {"name" : "second_layer_size" , "type" : "discrete" , "domain" : (20 ,50 ,100 ,200 )},
53
+ {"name" : "third_layer_size" , "type" : "discrete" , "domain" : (20 ,50 ,100 ,200 )},
54
+ {"name" :'epsilon_decay_linear' , "type" : "discrete" , "domain" : (self .params ['episodes' ]* 0.2 ,
55
+ self .params ['episodes' ]* 0.4 ,
56
+ self .params ['episodes' ]* 0.6 ,
57
+ self .params ['episodes' ]* 0.8 ,
58
+ self .params ['episodes' ]* 1 )}
59
+ ]
60
+
61
+ bayes_optimizer = BayesianOptimization (f = optimize ,
62
+ domain = optim_params ,
63
+ initial_design_numdata = 6 ,
64
+ acquisition_type = "EI" ,
65
+ exact_feval = True ,
66
+ maximize = True )
67
+
68
+ bayes_optimizer .run_optimization (max_iter = 20 )
69
+ print ('Optimized learning rate: ' , bayes_optimizer .x_opt [0 ])
70
+ print ('Optimized first layer: ' , bayes_optimizer .x_opt [1 ])
71
+ print ('Optimized second layer: ' , bayes_optimizer .x_opt [2 ])
72
+ print ('Optimized third layer: ' , bayes_optimizer .x_opt [3 ])
73
+ print ('Optimized epsilon linear decay: ' , bayes_optimizer .x_opt [4 ])
74
+ return self .params
75
+
76
+
77
+ ##################
78
+ # Main #
79
+ ##################
80
+ if __name__ == '__main__' :
81
+ # Define optimizer
82
+ bayesOpt = BayesianOptimizer (params )
83
+ bayesOpt .optimize_RL ()
0 commit comments