Skip to content

Commit a9fc3ee

Browse files
committed
tf BN
1 parent 4c76458 commit a9fc3ee

File tree

2 files changed

+8
-158
lines changed

2 files changed

+8
-158
lines changed

tensorflowTUT/tf23_BN/test.py

-153
This file was deleted.

tensorflowTUT/tf23_BN/tf23_BN.py

+8-5
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ def fix_seed(seed=1):
2828

2929
def plot_his(inputs, inputs_norm):
3030
# plot histogram for the inputs of every layer
31-
3231
for j, all_inputs in enumerate([inputs, inputs_norm]):
3332
for i, input in enumerate(all_inputs):
3433
plt.subplot(2, len(all_inputs), j*len(all_inputs)+(i+1))
@@ -59,6 +58,7 @@ def add_layer(inputs, in_size, out_size, activation_function=None, norm=False):
5958
# normalize fully connected product
6059
if norm:
6160
# Batch Normalize
61+
# when testing, you should fix fc_mean, fc_var instead of using tf.nn.moments!
6262
fc_mean, fc_var = tf.nn.moments(
6363
Wx_plus_b,
6464
axes=[0], # the dimension you wanna normalize, here [0] for batch
@@ -67,9 +67,10 @@ def add_layer(inputs, in_size, out_size, activation_function=None, norm=False):
6767
scale = tf.Variable(tf.ones([out_size]))
6868
shift = tf.Variable(tf.zeros([out_size]))
6969
epsilon = 0.001
70-
# similar with this:
71-
# Wx_plus_b = (Wx_plus_b - fc_mean) / tf.sqrt(fc_var + 0.001)
7270
Wx_plus_b = tf.nn.batch_normalization(Wx_plus_b, fc_mean, fc_var, shift, scale, epsilon)
71+
# similar with this two steps:
72+
# Wx_plus_b = (Wx_plus_b - fc_mean) / tf.sqrt(fc_var + 0.001)
73+
# Wx_plus_b = Wx_plus_b * scale + shift
7374

7475
# activation
7576
if activation_function is None:
@@ -111,8 +112,8 @@ def add_layer(inputs, in_size, out_size, activation_function=None, norm=False):
111112
y_data = np.square(x_data) - 5 + noise
112113

113114
# plot input data
114-
# plt.scatter(x_data, y_data)
115-
# plt.show()
115+
plt.scatter(x_data, y_data)
116+
plt.show()
116117

117118
xs = tf.placeholder(tf.float32, [None, 1]) # [num_samples, num_features]
118119
ys = tf.placeholder(tf.float32, [None, 1])
@@ -150,4 +151,6 @@ def add_layer(inputs, in_size, out_size, activation_function=None, norm=False):
150151
plt.legend()
151152
plt.show()
152153

154+
# when testing, you should fix fc_mean, fc_var instead of using tf.nn.moments!
155+
153156

0 commit comments

Comments
 (0)