Skip to content

Commit

Permalink
Replace deprecated get_or_create_global_step
Browse files Browse the repository at this point in the history
tf.contrib.framework.get_or_create_global_step -> tf.train.get_or_create_global_step
  • Loading branch information
martinkersner committed Nov 15, 2017
1 parent 4364390 commit a2b2088
Show file tree
Hide file tree
Showing 7 changed files with 9 additions and 8 deletions.
2 changes: 1 addition & 1 deletion research/adversarial_text/graphs.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ class VatxtModel(object):
"""

def __init__(self, cl_logits_input_dim=None):
self.global_step = tf.contrib.framework.get_or_create_global_step()
self.global_step = tf.train.get_or_create_global_step()
self.vocab_freqs = _get_vocab_freqs()

# Cache VatxtInput objects
Expand Down
2 changes: 1 addition & 1 deletion research/learning_to_remember_rare_events/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def __init__(self, input_dim, output_dim, rep_dim, memory_size, vocab_size,
self.memory = self.get_memory()
self.classifier = self.get_classifier()

self.global_step = tf.contrib.framework.get_or_create_global_step()
self.global_step = tf.train.get_or_create_global_step()

def get_embedder(self):
return LeNet(int(self.input_dim ** 0.5), 1, self.rep_dim)
Expand Down
4 changes: 2 additions & 2 deletions research/pcl_rl/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ def init_fn(sess, saver):

if FLAGS.supervisor:
with tf.device(tf.ReplicaDeviceSetter(FLAGS.ps_tasks, merge_devices=True)):
self.global_step = tf.contrib.framework.get_or_create_global_step()
self.global_step = tf.train.get_or_create_global_step()
tf.set_random_seed(FLAGS.tf_seed)
self.controller = self.get_controller()
self.model = self.controller.model
Expand All @@ -382,7 +382,7 @@ def init_fn(sess, saver):
sess = sv.PrepareSession(FLAGS.master)
else:
tf.set_random_seed(FLAGS.tf_seed)
self.global_step = tf.contrib.framework.get_or_create_global_step()
self.global_step = tf.train.get_or_create_global_step()
self.controller = self.get_controller()
self.model = self.controller.model
self.controller.setup()
Expand Down
2 changes: 1 addition & 1 deletion research/resnet/resnet_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def __init__(self, hps, images, labels, mode):

def build_graph(self):
"""Build a whole graph for the model."""
self.global_step = tf.contrib.framework.get_or_create_global_step()
self.global_step = tf.train.get_or_create_global_step()
self._build_model()
if self.mode == 'train':
self._build_train_op()
Expand Down
3 changes: 2 additions & 1 deletion research/slim/nets/nasnet/nasnet_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,8 +411,9 @@ def _apply_drop_path(self, net):
tf.summary.scalar('layer_ratio', layer_ratio)
drop_path_keep_prob = 1 - layer_ratio * (1 - drop_path_keep_prob)
# Decrease the keep probability over time
current_step = tf.cast(tf.contrib.framework.get_or_create_global_step(),
current_step = tf.cast(tf.train.get_or_create_global_step(),
tf.float32)
print("HERE")
drop_path_burn_in_steps = self._total_training_steps
current_ratio = (
current_step / drop_path_burn_in_steps)
Expand Down
2 changes: 1 addition & 1 deletion tutorials/image/cifar10/cifar10_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@
def train():
"""Train CIFAR-10 for a number of steps."""
with tf.Graph().as_default():
global_step = tf.contrib.framework.get_or_create_global_step()
global_step = tf.train.get_or_create_global_step()

# Get images and labels for CIFAR-10.
# Force input pipeline to CPU:0 to avoid operations sometimes ending up on
Expand Down
2 changes: 1 addition & 1 deletion tutorials/rnn/ptb/ptb_word_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def __init__(self, is_training, config, input_):
optimizer = tf.train.GradientDescentOptimizer(self._lr)
self._train_op = optimizer.apply_gradients(
zip(grads, tvars),
global_step=tf.contrib.framework.get_or_create_global_step())
global_step=tf.train.get_or_create_global_step())

self._new_lr = tf.placeholder(
tf.float32, shape=[], name="new_learning_rate")
Expand Down

0 comments on commit a2b2088

Please sign in to comment.