Skip to content

Commit

Permalink
Update lab-11-5-mnist_cnn_ensemble_layers.py
Browse files Browse the repository at this point in the history
dropout rate = 1 - keep_prob
  • Loading branch information
wizardbc authored and kkweon committed May 4, 2018
1 parent 976102e commit 29646ec
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions lab-11-5-mnist_cnn_ensemble_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,23 +44,23 @@ def _build_net(self):
pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2],
padding="SAME", strides=2)
dropout1 = tf.layers.dropout(inputs=pool1,
rate=0.7, training=self.training)
rate=0.3, training=self.training)

# Convolutional Layer #2 and Pooling Layer #2
conv2 = tf.layers.conv2d(inputs=dropout1, filters=64, kernel_size=[3, 3],
padding="SAME", activation=tf.nn.relu)
pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2],
padding="SAME", strides=2)
dropout2 = tf.layers.dropout(inputs=pool2,
rate=0.7, training=self.training)
rate=0.3, training=self.training)

# Convolutional Layer #3 and Pooling Layer #3
conv3 = tf.layers.conv2d(inputs=dropout2, filters=128, kernel_size=[3, 3],
padding="SAME", activation=tf.nn.relu)
pool3 = tf.layers.max_pooling2d(inputs=conv3, pool_size=[2, 2],
padding="SAME", strides=2)
dropout3 = tf.layers.dropout(inputs=pool3,
rate=0.7, training=self.training)
rate=0.3, training=self.training)

# Dense Layer with Relu
flat = tf.reshape(dropout3, [-1, 128 * 4 * 4])
Expand Down

0 comments on commit 29646ec

Please sign in to comment.