Skip to content

Commit

Permalink
Modify MultiRNNCell problems (hunkim#164)
Browse files Browse the repository at this point in the history
* Modify MultiRNNCell problems

* Modify duplicated code
  • Loading branch information
IamSSam authored and kkweon committed May 19, 2017
1 parent 35e12d4 commit 566d851
Showing 1 changed file with 8 additions and 3 deletions.
11 changes: 8 additions & 3 deletions lab-12-4-rnn_long_char.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import tensorflow as tf
import numpy as np
from tensorflow.contrib import rnn

tf.set_random_seed(777) # reproducibility

sentence = ("if you want to build a ship, don't drum up people together to "
Expand Down Expand Up @@ -40,12 +41,16 @@
X_one_hot = tf.one_hot(X, num_classes)
print(X_one_hot) # check out the shape


# Make a lstm cell with hidden_size (each unit output vector size)
cell = rnn.BasicLSTMCell(hidden_size, state_is_tuple=True)
cell = rnn.MultiRNNCell([cell] * 2, state_is_tuple=True)
def lstm_cell():
cell = rnn.BasicLSTMCell(hidden_size, state_is_tuple=True)
return cell

multi_cells = rnn.MultiRNNCell([lstm_cell() for _ in range(2)], state_is_tuple=True)

# outputs: unfolding size x hidden size, state = hidden size
outputs, _states = tf.nn.dynamic_rnn(cell, X_one_hot, dtype=tf.float32)
outputs, _states = tf.nn.dynamic_rnn(multi_cells, X_one_hot, dtype=tf.float32)

# FC layer
X_for_fc = tf.reshape(outputs, [-1, hidden_size])
Expand Down

0 comments on commit 566d851

Please sign in to comment.