Skip to content

Commit

Permalink
Update lab-12-0-rnn_basics.ipynb
Browse files Browse the repository at this point in the history
1. tf.contrib.rnn.BasicRNNCell will be removed in a future version. so changed to tf.keras.layers.SimpleRNNCell.
2. tf.contrib.rnn.BasicLSTMCell will be removed in a future version. so changed to tf.nn.rnn_cell.LSTMCell.
  • Loading branch information
qoocrab authored and kkweon committed Jan 17, 2019
1 parent edf393e commit dbbe397
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions lab-12-0-rnn_basics.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
"with tf.variable_scope('one_cell') as scope:\n",
" # One cell RNN input_dim (4) -> output_dim (2)\n",
" hidden_size = 2\n",
" cell = tf.contrib.rnn.BasicRNNCell(num_units=hidden_size)\n",
" cell = tf.keras.layers.SimpleRNNCell(units=hidden_size)\n",
" print(cell.output_size, cell.state_size)\n",
"\n",
" x_data = np.array([[h]], dtype=np.float32) # x_data = [[[1,0,0,0]]]\n",
Expand Down Expand Up @@ -110,7 +110,7 @@
"with tf.variable_scope('two_sequances') as scope:\n",
" # One cell RNN input_dim (4) -> output_dim (2). sequence: 5\n",
" hidden_size = 2\n",
" cell = tf.contrib.rnn.BasicRNNCell(num_units=hidden_size)\n",
" cell = tf.keras.layers.SimpleRNNCell(units=hidden_size)\n",
" x_data = np.array([[h, e, l, l, o]], dtype=np.float32)\n",
" print(x_data.shape)\n",
" pp.pprint(x_data)\n",
Expand Down Expand Up @@ -184,7 +184,7 @@
" pp.pprint(x_data)\n",
" \n",
" hidden_size = 2\n",
" cell = rnn.BasicLSTMCell(num_units=hidden_size, state_is_tuple=True)\n",
" cell = tf.nn.rnn_cell.LSTMCell(num_units=hidden_size, state_is_tuple=True)\n",
" outputs, _states = tf.nn.dynamic_rnn(\n",
" cell, x_data, dtype=tf.float32)\n",
" sess.run(tf.global_variables_initializer())\n",
Expand Down Expand Up @@ -249,7 +249,7 @@
" pp.pprint(x_data)\n",
" \n",
" hidden_size = 2\n",
" cell = rnn.BasicLSTMCell(num_units=hidden_size, state_is_tuple=True)\n",
" cell = tf.nn.rnn_cell.LSTMCell(num_units=hidden_size, state_is_tuple=True)\n",
" outputs, _states = tf.nn.dynamic_rnn(\n",
" cell, x_data, sequence_length=[5,3,4], dtype=tf.float32)\n",
" sess.run(tf.global_variables_initializer())\n",
Expand Down Expand Up @@ -314,7 +314,7 @@
" \n",
" # One cell RNN input_dim (4) -> output_dim (5). sequence: 5, batch: 3\n",
" hidden_size=2\n",
" cell = rnn.BasicLSTMCell(num_units=hidden_size, state_is_tuple=True)\n",
" cell = tf.nn.rnn_cell.LSTMCell(num_units=hidden_size, state_is_tuple=True)\n",
" initial_state = cell.zero_state(batch_size, tf.float32)\n",
" outputs, _states = tf.nn.dynamic_rnn(cell, x_data,\n",
" initial_state=initial_state, dtype=tf.float32)\n",
Expand Down Expand Up @@ -412,7 +412,7 @@
"source": [
"with tf.variable_scope('generated_data') as scope:\n",
" # One cell RNN input_dim (3) -> output_dim (5). sequence: 5, batch: 3\n",
" cell = rnn.BasicLSTMCell(num_units=5, state_is_tuple=True)\n",
" cell = tf.nn.rnn_cell.LSTMCell(num_units=5, state_is_tuple=True)\n",
" initial_state = cell.zero_state(batch_size, tf.float32)\n",
" outputs, _states = tf.nn.dynamic_rnn(cell, x_data,\n",
" initial_state=initial_state, dtype=tf.float32)\n",
Expand Down Expand Up @@ -470,7 +470,7 @@
"source": [
"with tf.variable_scope('MultiRNNCell') as scope:\n",
" # Make rnn\n",
" cell = rnn.BasicLSTMCell(num_units=5, state_is_tuple=True)\n",
" cell = tf.nn.rnn_cell.LSTMCell(num_units=5, state_is_tuple=True)\n",
" cell = rnn.MultiRNNCell([cell] * 3, state_is_tuple=True) # 3 layers\n",
"\n",
" # rnn in/out\n",
Expand Down Expand Up @@ -529,7 +529,7 @@
],
"source": [
"with tf.variable_scope('dynamic_rnn') as scope:\n",
" cell = rnn.BasicLSTMCell(num_units=5, state_is_tuple=True)\n",
" cell = tf.nn.rnn_cell.LSTMCell(num_units=5, state_is_tuple=True)\n",
" outputs, _states = tf.nn.dynamic_rnn(cell, x_data, dtype=tf.float32,\n",
" sequence_length=[1, 3, 2])\n",
" # lentgh 1 for batch 1, lentgh 2 for batch 2\n",
Expand Down Expand Up @@ -642,8 +642,8 @@
"source": [
"with tf.variable_scope('bi-directional') as scope:\n",
" # bi-directional rnn\n",
" cell_fw = rnn.BasicLSTMCell(num_units=5, state_is_tuple=True)\n",
" cell_bw = rnn.BasicLSTMCell(num_units=5, state_is_tuple=True)\n",
" cell_fw = tf.nn.rnn_cell.LSTMCell(num_units=5, state_is_tuple=True)\n",
" cell_bw = tf.nn.rnn_cell.LSTMCell(num_units=5, state_is_tuple=True)\n",
"\n",
" outputs, states = tf.nn.bidirectional_dynamic_rnn(cell_fw, cell_bw, x_data,\n",
" sequence_length=[2, 3, 1],\n",
Expand Down

0 comments on commit dbbe397

Please sign in to comment.