From aa66e8f6b2e1f2c025214e0fa4eb338eeba153f8 Mon Sep 17 00:00:00 2001 From: Jongmin Date: Fri, 3 Mar 2017 03:56:04 +0900 Subject: [PATCH] Update lab-13-3-mnist_save_restore.py to do annotation "import matplotlib.pyplot as plt" code. modified an annotation for describing this file on line 1. to erase useless tags on tf.variable_scope() lines. modified "Savor -> Saver" on line 113. --- lab-13-3-mnist_save_restore.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/lab-13-3-mnist_save_restore.py b/lab-13-3-mnist_save_restore.py index a40ae2fe..cfde24e2 100644 --- a/lab-13-3-mnist_save_restore.py +++ b/lab-13-3-mnist_save_restore.py @@ -1,7 +1,7 @@ -# Lab 7 Learning rate and Evaluation +# Lab 13 Saver and Restore import tensorflow as tf import random -import matplotlib.pyplot as plt +# import matplotlib.pyplot as plt import os from tensorflow.examples.tutorials.mnist import input_data @@ -33,7 +33,7 @@ # weights & bias for nn layers # http://stackoverflow.com/questions/33640581/how-to-do-xavier-initialization-on-tensorflow -with tf.variable_scope('layer1') as scope: +with tf.variable_scope('layer1'): W1 = tf.get_variable("W", shape=[784, 512], initializer=tf.contrib.layers.xavier_initializer()) b1 = tf.Variable(tf.random_normal([512])) @@ -45,7 +45,7 @@ tf.summary.histogram("bias", b1) tf.summary.histogram("layer", L1) -with tf.variable_scope('layer2') as scope: +with tf.variable_scope('layer2'): W2 = tf.get_variable("W", shape=[512, 512], initializer=tf.contrib.layers.xavier_initializer()) b2 = tf.Variable(tf.random_normal([512])) @@ -56,7 +56,7 @@ tf.summary.histogram("bias", b2) tf.summary.histogram("layer", L2) -with tf.variable_scope('layer3') as scope: +with tf.variable_scope('layer3'): W3 = tf.get_variable("W", shape=[512, 512], initializer=tf.contrib.layers.xavier_initializer()) b3 = tf.Variable(tf.random_normal([512])) @@ -67,7 +67,7 @@ tf.summary.histogram("bias", b3) tf.summary.histogram("layer", L3) -with tf.variable_scope('layer4') as scope: +with tf.variable_scope('layer4'): W4 = tf.get_variable("W", shape=[512, 512], initializer=tf.contrib.layers.xavier_initializer()) b4 = tf.Variable(tf.random_normal([512])) @@ -78,7 +78,7 @@ tf.summary.histogram("bias", b4) tf.summary.histogram("layer", L4) -with tf.variable_scope('layer5') as scope: +with tf.variable_scope('layer5'): W5 = tf.get_variable("W", shape=[512, 10], initializer=tf.contrib.layers.xavier_initializer()) b5 = tf.Variable(tf.random_normal([10])) @@ -110,7 +110,7 @@ writer.add_graph(sess.graph) global_step = 0 -# Savor and Restore +# Saver and Restore saver = tf.train.Saver() checkpoint = tf.train.get_checkpoint_state(CHECK_POINT_DIR)