Skip to content

Commit

Permalink
Update lab-09-1-xor.py (hunkim#236)
Browse files Browse the repository at this point in the history
* Update lab-09-1-xor.py

Removed unnecessary codes.

* Update lab-09-1-xor.py

Add numpy again.

* Update lab-09-1-xor.py

Commit suggestion.
  • Loading branch information
qoocrab authored and kkweon committed Jan 14, 2019
1 parent db9125b commit d1461d4
Showing 1 changed file with 13 additions and 22 deletions.
35 changes: 13 additions & 22 deletions lab-09-1-xor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,33 +3,22 @@
import numpy as np

tf.set_random_seed(777) # for reproducibility
learning_rate = 0.1

x_data = [[0, 0],
[0, 1],
[1, 0],
[1, 1]]
y_data = [[0],
[1],
[1],
[0]]
x_data = np.array(x_data, dtype=np.float32)
y_data = np.array(y_data, dtype=np.float32)
x_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]], dtype=np.float32)
y_data = np.array([[0], [1], [1], [0]], dtype=np.float32)

X = tf.placeholder(tf.float32, [None, 2])
Y = tf.placeholder(tf.float32, [None, 1])

W = tf.Variable(tf.random_normal([2, 1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
W = tf.Variable(tf.random_normal([2, 1]), name="weight")
b = tf.Variable(tf.random_normal([1]), name="bias")

# Hypothesis using sigmoid: tf.div(1., 1. + tf.exp(tf.matmul(X, W)))
hypothesis = tf.sigmoid(tf.matmul(X, W) + b)

# cost/loss function
cost = -tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) *
tf.log(1 - hypothesis))

train = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)
cost = -tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) * tf.log(1 - hypothesis))
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost)

# Accuracy computation
# True if hypothesis>0.5 else False
Expand All @@ -42,14 +31,16 @@
sess.run(tf.global_variables_initializer())

for step in range(10001):
sess.run(train, feed_dict={X: x_data, Y: y_data})
_, cost_val, w_val = sess.run(
[train, cost, W], feed_dict={X: x_data, Y: y_data}
)
if step % 100 == 0:
print(step, sess.run(cost, feed_dict={
X: x_data, Y: y_data}), sess.run(W))
print(step, cost_val, w_val)

# Accuracy report
h, c, a = sess.run([hypothesis, predicted, accuracy],
feed_dict={X: x_data, Y: y_data})
h, c, a = sess.run(
[hypothesis, predicted, accuracy], feed_dict={X: x_data, Y: y_data}
)
print("\nHypothesis: ", h, "\nCorrect: ", c, "\nAccuracy: ", a)

'''
Expand Down

0 comments on commit d1461d4

Please sign in to comment.