forked from lazyprogrammer/machine_learning_examples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathautoencoder_tf.py
95 lines (77 loc) · 2.63 KB
/
autoencoder_tf.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
# https://deeplearningcourses.com/c/deep-learning-gans-and-variational-autoencoders
# https://www.udemy.com/deep-learning-gans-and-variational-autoencoders
from __future__ import print_function, division
from builtins import range, input
# Note: you may need to update your version of future
# sudo pip install -U future
import util
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
class Autoencoder:
def __init__(self, D, M):
# represents a batch of training data
self.X = tf.placeholder(tf.float32, shape=(None, D))
# input -> hidden
self.W = tf.Variable(tf.random_normal(shape=(D, M)) * np.sqrt(2.0 / M))
self.b = tf.Variable(np.zeros(M).astype(np.float32))
# hidden -> output
self.V = tf.Variable(tf.random_normal(shape=(M, D)) * np.sqrt(2.0 / D))
self.c = tf.Variable(np.zeros(D).astype(np.float32))
# construct the reconstruction
self.Z = tf.nn.relu(tf.matmul(self.X, self.W) + self.b)
logits = tf.matmul(self.Z, self.V) + self.c
self.X_hat = tf.nn.sigmoid(logits)
# compute the cost
self.cost = tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(
labels=self.X,
logits=logits
)
)
# make the trainer
self.train_op = tf.train.RMSPropOptimizer(learning_rate=0.001).minimize(self.cost)
# set up session and variables for later
self.init_op = tf.global_variables_initializer()
self.sess = tf.InteractiveSession()
self.sess.run(self.init_op)
def fit(self, X, epochs=30, batch_sz=64):
costs = []
n_batches = len(X) // batch_sz
print("n_batches:", n_batches)
for i in range(epochs):
print("epoch:", i)
np.random.shuffle(X)
for j in range(n_batches):
batch = X[j*batch_sz:(j+1)*batch_sz]
_, c, = self.sess.run((self.train_op, self.cost), feed_dict={self.X: batch})
c /= batch_sz # just debugging
costs.append(c)
if j % 100 == 0:
print("iter: %d, cost: %.3f" % (j, c))
plt.plot(costs)
plt.show()
def predict(self, X):
return self.sess.run(self.X_hat, feed_dict={self.X: X})
def main():
X, Y = util.get_mnist()
model = Autoencoder(784, 300)
model.fit(X)
# plot reconstruction
done = False
while not done:
i = np.random.choice(len(X))
x = X[i]
im = model.predict([x]).reshape(28, 28)
plt.subplot(1,2,1)
plt.imshow(x.reshape(28, 28), cmap='gray')
plt.title("Original")
plt.subplot(1,2,2)
plt.imshow(im, cmap='gray')
plt.title("Reconstruction")
plt.show()
ans = input("Generate another?")
if ans and ans[0] in ('n' or 'N'):
done = True
if __name__ == '__main__':
main()