@@ -31,7 +31,7 @@ def add_layer(inputs, in_size, out_size, layer_name, activation_function=None, )
31
31
outputs = Wx_plus_b
32
32
else :
33
33
outputs = activation_function (Wx_plus_b , )
34
- tf .histogram_summary (layer_name + '/outputs' , outputs )
34
+ tf .summary . histogram (layer_name + '/outputs' , outputs )
35
35
return outputs
36
36
37
37
@@ -47,14 +47,14 @@ def add_layer(inputs, in_size, out_size, layer_name, activation_function=None, )
47
47
# the loss between prediction and real data
48
48
cross_entropy = tf .reduce_mean (- tf .reduce_sum (ys * tf .log (prediction ),
49
49
reduction_indices = [1 ])) # loss
50
- tf .scalar_summary ('loss' , cross_entropy )
50
+ tf .summary . scalar ('loss' , cross_entropy )
51
51
train_step = tf .train .GradientDescentOptimizer (0.5 ).minimize (cross_entropy )
52
52
53
53
sess = tf .Session ()
54
- merged = tf .merge_all_summaries ()
54
+ merged = tf .summary . merge_all ()
55
55
# summary writer goes in here
56
- train_writer = tf .train . SummaryWriter ("logs/train" , sess .graph )
57
- test_writer = tf .train . SummaryWriter ("logs/test" , sess .graph )
56
+ train_writer = tf .summary . FileWriter ("logs/train" , sess .graph )
57
+ test_writer = tf .summary . FileWriter ("logs/test" , sess .graph )
58
58
59
59
# tf.initialize_all_variables() no long valid from
60
60
# 2017-03-02 if using tensorflow >= 0.12
0 commit comments