forked from MorvanZhou/Tensorflow-Tutorial
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
f33cfd5
commit 71d2124
Showing
12 changed files
with
316 additions
and
53 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,8 +4,6 @@ | |
Dependencies: | ||
tensorflow: 1.1.0 | ||
matplotlib | ||
numpy | ||
""" | ||
import tensorflow as tf | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
""" | ||
Know more, visit my Python tutorial page: https://morvanzhou.github.io/tutorials/ | ||
My Youtube Channel: https://www.youtube.com/user/MorvanZhou | ||
Dependencies: | ||
tensorflow: 1.1.0 | ||
""" | ||
import tensorflow as tf | ||
|
||
var = tf.Variable(0) # our first variable in the "global_variable" set | ||
|
||
add_operation = tf.add(var, 1) | ||
update_operation = tf.assign(var, add_operation) | ||
|
||
with tf.Session() as sess: | ||
# once define variables, you have to initialize them by doing this | ||
sess.run(tf.global_variables_initializer()) | ||
for _ in range(3): | ||
sess.run(update_operation) | ||
print(sess.run(var)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
""" | ||
Know more, visit my Python tutorial page: https://morvanzhou.github.io/tutorials/ | ||
My Youtube Channel: https://www.youtube.com/user/MorvanZhou | ||
Dependencies: | ||
tensorflow: 1.1.0 | ||
matplotlib | ||
""" | ||
import tensorflow as tf | ||
import numpy as np | ||
import matplotlib.pyplot as plt | ||
|
||
# fake data | ||
x = np.linspace(-5, 5, 200) # x data, shape=(100, 1) | ||
|
||
# following are popular activation functions | ||
y_relu = tf.nn.relu(x) | ||
y_sigmoid = tf.nn.sigmoid(x) | ||
y_tanh = tf.nn.tanh(x) | ||
y_softplus = tf.nn.softplus(x) | ||
# y_softmax = tf.nn.softmax(x) softmax is a special kind of activation function, it is about probability | ||
|
||
sess = tf.Session() | ||
y_relu, y_sigmoid, y_tanh, y_softplus = sess.run([y_relu, y_sigmoid, y_tanh, y_softplus]) | ||
|
||
# plt to visualize these activation function | ||
plt.figure(1, figsize=(8, 6)) | ||
plt.subplot(221) | ||
plt.plot(x, y_relu, c='red', label='relu') | ||
plt.ylim((-1, 5)) | ||
plt.legend(loc='best') | ||
|
||
plt.subplot(222) | ||
plt.plot(x, y_sigmoid, c='red', label='sigmoid') | ||
plt.ylim((-0.2, 1.2)) | ||
plt.legend(loc='best') | ||
|
||
plt.subplot(223) | ||
plt.plot(x, y_tanh, c='red', label='tanh') | ||
plt.ylim((-1.2, 1.2)) | ||
plt.legend(loc='best') | ||
|
||
plt.subplot(224) | ||
plt.plot(x, y_softplus, c='red', label='softplus') | ||
plt.ylim((-0.2, 6)) | ||
plt.legend(loc='best') | ||
|
||
plt.show() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,80 @@ | ||
""" | ||
Know more, visit my Python tutorial page: https://morvanzhou.github.io/tutorials/ | ||
My Youtube Channel: https://www.youtube.com/user/MorvanZhou | ||
Dependencies: | ||
tensorflow: 1.1.0 | ||
matplotlib | ||
numpy | ||
""" | ||
import tensorflow as tf | ||
import matplotlib.pyplot as plt | ||
import numpy as np | ||
|
||
tf.set_random_seed(1) | ||
np.random.seed(1) | ||
|
||
# fake data | ||
x = np.linspace(-1, 1, 100)[:, np.newaxis] # shape (100, 1) | ||
noise = np.random.normal(0, 0.1, size=x.shape) | ||
y = np.power(x, 2) + noise # shape (100, 1) + some noise | ||
|
||
|
||
def save(): | ||
print('This is save') | ||
# build neural network | ||
tf_x = tf.placeholder(tf.float32, x.shape) # input x | ||
tf_y = tf.placeholder(tf.float32, y.shape) # input y | ||
l = tf.layers.dense(tf_x, 10, tf.nn.relu) # hidden layer | ||
o = tf.layers.dense(l, 1) # output layer | ||
loss = tf.losses.mean_squared_error(tf_y, o) # compute cost | ||
train_op = tf.train.GradientDescentOptimizer(learning_rate=0.5).minimize(loss) | ||
|
||
sess = tf.Session() | ||
sess.run(tf.global_variables_initializer()) # initialize var in graph | ||
|
||
saver = tf.train.Saver() # define a saver for saving and restoring | ||
|
||
for step in range(100): # train | ||
sess.run(train_op, {tf_x: x, tf_y: y}) | ||
|
||
saver.save(sess, 'params', write_meta_graph=False) # meta_graph is not recommended | ||
|
||
# plotting | ||
pred, l = sess.run([o, loss], {tf_x: x, tf_y: y}) | ||
plt.figure(1, figsize=(10, 5)) | ||
plt.subplot(121) | ||
plt.scatter(x, y) | ||
plt.plot(x, pred, 'r-', lw=5) | ||
plt.text(-1, 1.2, 'Save Loss=%.4f' % l, fontdict={'size': 15, 'color': 'red'}) | ||
|
||
|
||
def reload(): | ||
print('This is reload') | ||
# build entire net again and restore | ||
tf_x = tf.placeholder(tf.float32, x.shape) # input x | ||
tf_y = tf.placeholder(tf.float32, y.shape) # input y | ||
l_ = tf.layers.dense(tf_x, 10, tf.nn.relu) # hidden layer | ||
o_ = tf.layers.dense(l_, 1) # output layer | ||
loss_ = tf.losses.mean_squared_error(tf_y, o_) # compute cost | ||
|
||
sess = tf.Session() | ||
# don't need to initialize variables, just restoring trained variables | ||
saver = tf.train.Saver() # define a saver for saving and restoring | ||
saver.restore(sess, 'params') | ||
|
||
# plotting | ||
pred, l = sess.run([o_, loss_], {tf_x: x, tf_y: y}) | ||
plt.subplot(122) | ||
plt.scatter(x, y) | ||
plt.plot(x, pred, 'r-', lw=5) | ||
plt.text(-1, 1.2, 'Reload Loss=%.4f' % l, fontdict={'size': 15, 'color': 'red'}) | ||
plt.show() | ||
|
||
|
||
save() | ||
|
||
# destroy previous net | ||
tf.reset_default_graph() | ||
|
||
reload() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
""" | ||
Know more, visit my Python tutorial page: https://morvanzhou.github.io/tutorials/ | ||
My Youtube Channel: https://www.youtube.com/user/MorvanZhou | ||
Dependencies: | ||
tensorflow: 1.1.0 | ||
matplotlib | ||
numpy | ||
""" | ||
import tensorflow as tf | ||
import matplotlib.pyplot as plt | ||
import numpy as np | ||
|
||
tf.set_random_seed(1) | ||
np.random.seed(1) | ||
|
||
LR = 0.01 | ||
BATCH_SIZE = 32 | ||
|
||
# fake data | ||
x = np.linspace(-1, 1, 100)[:, np.newaxis] # shape (100, 1) | ||
noise = np.random.normal(0, 0.1, size=x.shape) | ||
y = np.power(x, 2) + noise # shape (100, 1) + some noise | ||
|
||
# plot dataset | ||
plt.scatter(x, y) | ||
plt.show() | ||
|
||
# default network | ||
class Net: | ||
def __init__(self, opt, **kwargs): | ||
self.x = tf.placeholder(tf.float32, [None, 1]) | ||
self.y = tf.placeholder(tf.float32, [None, 1]) | ||
l = tf.layers.dense(self.x, 20, tf.nn.relu) | ||
out = tf.layers.dense(l, 1) | ||
self.loss = tf.losses.mean_squared_error(self.y, out) | ||
self.train = opt(LR, **kwargs).minimize(self.loss) | ||
|
||
# different nets | ||
net_SGD = Net(tf.train.GradientDescentOptimizer) | ||
net_Momentum = Net(tf.train.MomentumOptimizer, momentum=0.9) | ||
net_RMSprop = Net(tf.train.RMSPropOptimizer) | ||
net_Adam = Net(tf.train.AdamOptimizer) | ||
nets = [net_SGD, net_Momentum, net_RMSprop, net_Adam] | ||
|
||
sess = tf.Session() | ||
sess.run(tf.global_variables_initializer()) | ||
|
||
losses_his = [[], [], [], []] # record loss | ||
|
||
# training | ||
for step in range(300): # for each training step | ||
index = np.random.randint(0, x.shape[0], BATCH_SIZE) | ||
b_x = x[index] | ||
b_y = y[index] | ||
|
||
for net, l_his in zip(nets, losses_his): | ||
_, l = sess.run([net.train, net.loss], {net.x: b_x, net.y: b_y}) | ||
l_his.append(l) # loss recoder | ||
|
||
# plot loss history | ||
labels = ['SGD', 'Momentum', 'RMSprop', 'Adam'] | ||
for i, l_his in enumerate(losses_his): | ||
plt.plot(l_his, label=labels[i]) | ||
plt.legend(loc='best') | ||
plt.xlabel('Steps') | ||
plt.ylabel('Loss') | ||
plt.ylim((0, 0.2)) | ||
plt.show() |
Oops, something went wrong.