forked from hunkim/DeepLearningZeroToAll
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Added XOR Tensorboard Updated .gitignore file to ignore logs directory * Renamed lab-09-7 to lab-09-3
- Loading branch information
1 parent
255fac7
commit be1b26c
Showing
2 changed files
with
78 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -9,3 +9,4 @@ tb | |
Untitled*.ipynb | ||
.ropeproject | ||
xlab* | ||
logs |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,77 @@ | ||
# Lab 9 XOR | ||
# This example does not work | ||
import tensorflow as tf | ||
import numpy as np | ||
tf.set_random_seed(777) # for reproducibility | ||
|
||
x_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]], dtype=np.float32) | ||
y_data = np.array([[0], [1], [1], [0]], dtype=np.float32) | ||
|
||
X = tf.placeholder(tf.float32, [None, 2], name='x-input') | ||
Y = tf.placeholder(tf.float32, [None, 1], name='y-input') | ||
|
||
W1 = tf.Variable(tf.random_normal([2, 2]), name='weight1') | ||
b1 = tf.Variable(tf.random_normal([2]), name='bias1') | ||
|
||
with tf.name_scope("layer2") as scope: | ||
layer1 = tf.sigmoid(tf.matmul(X, W1) + b1) | ||
|
||
W2 = tf.Variable(tf.random_normal([2, 1]), name='weight2') | ||
b2 = tf.Variable(tf.random_normal([1]), name='bias2') | ||
|
||
with tf.name_scope("layer3") as scope: | ||
hypothesis = tf.sigmoid(tf.matmul(layer1, W2) + b2) | ||
|
||
w1_hist = tf.summary.histogram("weights1", W1) | ||
w2_hist = tf.summary.histogram("weights2", W2) | ||
b1_hist = tf.summary.histogram("biases1", b1) | ||
b2_hist = tf.summary.histogram("biases2", b2) | ||
y_hist = tf.summary.histogram("y", Y) | ||
|
||
# cost/loss function | ||
with tf.name_scope("cost") as scope: | ||
cost = -tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) * | ||
tf.log(1 - hypothesis)) | ||
cost_summ = tf.summary.scalar("cost", cost) | ||
|
||
with tf.name_scope("train") as scope: | ||
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost) | ||
|
||
# Accuracy computation | ||
# True if hypothesis>0.5 else False | ||
predicted = tf.cast(hypothesis > 0.5, dtype=tf.float32) | ||
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32)) | ||
|
||
# Launch graph | ||
with tf.Session() as sess: | ||
#tensorboard --logdir=./logs/xor_logs | ||
merged = tf.summary.merge_all() | ||
writer = tf.summary.FileWriter("./logs/xor_logs", sess.graph) | ||
|
||
# Initialize TensorFlow variables | ||
sess.run(tf.global_variables_initializer()) | ||
|
||
for step in range(10001): | ||
summary, _ = sess.run([merged, train], feed_dict={X: x_data, Y: y_data}) | ||
if step % 100 == 0: | ||
print(step, sess.run(cost, feed_dict={ | ||
X: x_data, Y: y_data}), sess.run([W1, W2])) | ||
writer.add_summary(summary, step) | ||
|
||
# Accuracy report | ||
h, c, a = sess.run([hypothesis, predicted, accuracy], | ||
feed_dict={X: x_data, Y: y_data}) | ||
print("\nHypothesis: ", h, "\nCorrect: ", c, "\nAccuracy: ", a) | ||
|
||
|
||
''' | ||
Hypothesis: [[ 0.01338218] | ||
[ 0.98166394] | ||
[ 0.98809403] | ||
[ 0.01135799]] | ||
Correct: [[ 0.] | ||
[ 1.] | ||
[ 1.] | ||
[ 0.]] | ||
Accuracy: 1.0 | ||
''' |