Skip to content

Commit

Permalink
initialize_all_variables -> global_variables_initializer
Browse files Browse the repository at this point in the history
  • Loading branch information
pkmital committed Jan 4, 2017
1 parent b95c1ee commit b13cadb
Show file tree
Hide file tree
Showing 11 changed files with 15 additions and 15 deletions.
2 changes: 1 addition & 1 deletion GLOSSARY.md
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ y = x * 5
g = tf.gradients(y, x)

sess = tf.InteractiveSession()
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
g[0].eval()
# prints 5
```
Expand Down
4 changes: 2 additions & 2 deletions session-3/libs/vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def train_vae(files,
# We create a session to use the graph
sess = tf.Session()
saver = tf.train.Saver()
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())

# This will handle our threaded image pipeline
coord = tf.train.Coordinator()
Expand Down Expand Up @@ -434,7 +434,7 @@ def test_mnist():

# We create a session to use the graph
sess = tf.Session()
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())

# Fit all training data
t_i = 0
Expand Down
2 changes: 1 addition & 1 deletion session-4/libs/deepdream.py
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ def guided_dream(input_img,
l2_loss = l2_loss_weight * tf.reduce_mean(tf.nn.l2_loss(x))

ascent = tf.gradients(feature_loss + softmax_loss + tv_loss + l2_loss, x)[0]
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
imgs = []
for it_i in range(n_iterations):
this_res, this_feature_loss, this_softmax_loss, this_tv_loss, this_l2_loss = sess.run(
Expand Down
2 changes: 1 addition & 1 deletion session-4/libs/stylenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def stylize(content_img, style_img, base_img=None, saveto=None, gif_step=5,
loss = content_weight * content_loss + style_weight * style_loss
optimizer = tf.train.AdamOptimizer(0.01).minimize(loss)

sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
imgs = []
for it_i in range(n_iterations):
_, this_loss, synth = sess.run(
Expand Down
4 changes: 2 additions & 2 deletions session-4/libs/vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def train_vae(files,
# We create a session to use the graph
sess = tf.Session()
saver = tf.train.Saver()
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())

# This will handle our threaded image pipeline
coord = tf.train.Coordinator()
Expand Down Expand Up @@ -434,7 +434,7 @@ def test_mnist():

# We create a session to use the graph
sess = tf.Session()
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())

# Fit all training data
t_i = 0
Expand Down
4 changes: 2 additions & 2 deletions session-5/libs/charrnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def train(txt, batch_size=100, sequence_length=150, n_cells=100, n_layers=3,
gradient_clip=gradient_clip,
learning_rate=learning_rate)

init_op = tf.initialize_all_variables()
init_op = tf.global_variables_initializer()
saver = tf.train.Saver()
sess.run(init_op)
if os.path.exists(ckpt_name):
Expand Down Expand Up @@ -186,7 +186,7 @@ def infer(txt, ckpt_name, n_iterations, n_cells=512, n_layers=3,
gradient_clip=gradient_clip,
learning_rate=learning_rate)

init_op = tf.initialize_all_variables()
init_op = tf.global_variables_initializer()
saver = tf.train.Saver()
sess.run(init_op)
if os.path.exists(ckpt_name):
Expand Down
2 changes: 1 addition & 1 deletion session-5/libs/deepdream.py
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ def guided_dream(input_img,
l2_loss = l2_loss_weight * tf.reduce_mean(tf.nn.l2_loss(x))

ascent = tf.gradients(feature_loss + softmax_loss + tv_loss + l2_loss, x)[0]
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
imgs = []
for it_i in range(n_iterations):
this_res, this_feature_loss, this_softmax_loss, this_tv_loss, this_l2_loss = sess.run(
Expand Down
2 changes: 1 addition & 1 deletion session-5/libs/gan.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,7 @@ def train_ds():
# %%
# We create a session to use the graph
sess = tf.Session()
init_op = tf.initialize_all_variables()
init_op = tf.global_variables_initializer()

saver = tf.train.Saver()
sums = gan['sums']
Expand Down
2 changes: 1 addition & 1 deletion session-5/libs/stylenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def stylize(content_img, style_img, base_img=None, saveto=None, gif_step=5,
loss = content_weight * content_loss + style_weight * style_loss
optimizer = tf.train.AdamOptimizer(0.01).minimize(loss)

sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
imgs = []
for it_i in range(n_iterations):
_, this_loss, synth = sess.run(
Expand Down
4 changes: 2 additions & 2 deletions session-5/libs/vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def train_vae(files,
# We create a session to use the graph
sess = tf.Session()
saver = tf.train.Saver()
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())

# This will handle our threaded image pipeline
coord = tf.train.Coordinator()
Expand Down Expand Up @@ -434,7 +434,7 @@ def test_mnist():

# We create a session to use the graph
sess = tf.Session()
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())

# Fit all training data
t_i = 0
Expand Down
2 changes: 1 addition & 1 deletion session-5/libs/vaegan.py
Original file line number Diff line number Diff line change
Expand Up @@ -531,7 +531,7 @@ def train_vaegan(files,

sess = tf.Session()
saver = tf.train.Saver()
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
coord = tf.train.Coordinator()
tf.get_default_graph().finalize()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
Expand Down

0 comments on commit b13cadb

Please sign in to comment.