From 8605e58d7a599dab01b28ade049a172a3a254349 Mon Sep 17 00:00:00 2001 From: YellowStar5 Date: Mon, 17 Jul 2017 21:50:11 +0800 Subject: [PATCH] =?UTF-8?q?Update=202.=20=E6=94=B9=E9=80=A0=E5=90=8E?= =?UTF-8?q?=E7=9A=84mnist=5Ftrain.ipynb?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...71\351\200\240\345\220\216\347\232\204mnist_train.ipynb" | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git "a/Deep_Learning_with_TensorFlow/1.0.0/Chapter09/2. \346\224\271\351\200\240\345\220\216\347\232\204mnist_train.ipynb" "b/Deep_Learning_with_TensorFlow/1.0.0/Chapter09/2. \346\224\271\351\200\240\345\220\216\347\232\204mnist_train.ipynb" index 1594c97a..35db9866 100644 --- "a/Deep_Learning_with_TensorFlow/1.0.0/Chapter09/2. \346\224\271\351\200\240\345\220\216\347\232\204mnist_train.ipynb" +++ "b/Deep_Learning_with_TensorFlow/1.0.0/Chapter09/2. \346\224\271\351\200\240\345\220\216\347\232\204mnist_train.ipynb" @@ -87,6 +87,7 @@ " # 训练模型。\n", " with tf.Session() as sess:\n", " tf.global_variables_initializer().run()\n", + " writer = tf.summary.FileWriter(\"/log/modified_mnist_train.log\", tf.get_default_graph())\n", " for i in range(TRAINING_STEPS):\n", " xs, ys = mnist.train.next_batch(BATCH_SIZE)\n", "\n", @@ -98,13 +99,14 @@ " _, loss_value, step = sess.run(\n", " [train_op, loss, global_step], feed_dict={x: xs, y_: ys},\n", " options=run_options, run_metadata=run_metadata)\n", + " writer.add_run_metadata(run_metadata=run_metadata, tag=(\"tag%d\"%i), global_step=i), " print(\"After %d training step(s), loss on training batch is %g.\" % (step, loss_value))\n", " else:\n", " _, loss_value, step = sess.run([train_op, loss, global_step], feed_dict={x: xs, y_: ys})\n", "\n", "\n", - " writer = tf.summary.FileWriter(\"/log/modified_mnist_train.log\", tf.get_default_graph())\n", - " writer.close()" + " + " writer.close()" ] }, {