Skip to content

Commit 88e6626

Browse files
committed
changed file name for backprop
1 parent 9853fce commit 88e6626

File tree

1 file changed

+22
-21
lines changed

1 file changed

+22
-21
lines changed

lab-09_8_mnist_back_prop.ipynb renamed to lab-08_4_mnist_back_prop.ipynb

+22-21
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,9 @@
8888
{
8989
"data": {
9090
"text/plain": [
91-
"tensor([ 1.8153, 0.1568, 0.0348, 0.0334, 0.9967, 0.3957, 1.0805, 0.0302,\n",
92-
" -0.4433, -0.0206], device='cuda:0', grad_fn=<CopyBackwards>)"
91+
"Parameter containing:\n",
92+
"tensor([ 0.3078, -1.9857, 1.0512, 1.5122, -1.0199, -0.7402, -1.3111, 0.6142,\n",
93+
" -0.6474, 0.1758], requires_grad=True)"
9394
]
9495
},
9596
"execution_count": 7,
@@ -110,7 +111,7 @@
110111
"metadata": {},
111112
"outputs": [],
112113
"source": [
113-
"def sigma(x):\n",
114+
"def sigmoid(x):\n",
114115
" # sigmoid function\n",
115116
" return 1.0 / (1.0 + torch.exp(-x))\n",
116117
" # return torch.div(torch.tensor(1), torch.add(torch.tensor(1.0), torch.exp(-x)))"
@@ -122,9 +123,9 @@
122123
"metadata": {},
123124
"outputs": [],
124125
"source": [
125-
"def sigma_prime(x):\n",
126+
"def sigmoid_prime(x):\n",
126127
" # derivative of the sigmoid function\n",
127-
" return sigma(x) * (1 - sigma(x))"
128+
" return sigmoid(x) * (1 - sigmoid(x))"
128129
]
129130
},
130131
{
@@ -136,16 +137,16 @@
136137
"name": "stdout",
137138
"output_type": "stream",
138139
"text": [
139-
"829\n",
140-
"846\n",
141-
"870\n",
142-
"876\n",
140+
"736\n",
141+
"862\n",
142+
"860\n",
143143
"881\n",
144-
"892\n",
145-
"894\n",
146-
"894\n",
147-
"891\n",
148-
"898\n"
144+
"874\n",
145+
"890\n",
146+
"904\n",
147+
"923\n",
148+
"916\n",
149+
"920\n"
149150
]
150151
}
151152
],
@@ -161,19 +162,19 @@
161162
" X = X.view(-1, 28 * 28).to(device)\n",
162163
" Y = torch.zeros((batch_size, 10)).scatter_(1, Y.unsqueeze(1), 1).to(device) # one-hot\n",
163164
" l1 = torch.add(torch.matmul(X, w1), b1)\n",
164-
" a1 = sigma(l1)\n",
165+
" a1 = sigmoid(l1)\n",
165166
" l2 = torch.add(torch.matmul(a1, w2), b2)\n",
166-
" y_pred = sigma(l2)\n",
167+
" y_pred = sigmoid(l2)\n",
167168
"\n",
168169
" diff = y_pred - Y\n",
169170
"\n",
170171
" # Back prop (chain rule)\n",
171-
" d_l2 = diff * sigma_prime(l2)\n",
172+
" d_l2 = diff * sigmoid_prime(l2)\n",
172173
" d_b2 = d_l2\n",
173174
" d_w2 = torch.matmul(torch.transpose(a1, 0, 1), d_l2)\n",
174175
"\n",
175176
" d_a1 = torch.matmul(d_l2, torch.transpose(w2, 0, 1))\n",
176-
" d_l1 = d_a1 * sigma_prime(l1)\n",
177+
" d_l1 = d_a1 * sigmoid_prime(l1)\n",
177178
" d_b1 = d_l1\n",
178179
" d_w1 = torch.matmul(torch.transpose(X, 0, 1), d_l1)\n",
179180
"\n",
@@ -184,9 +185,9 @@
184185
"\n",
185186
" if i % 1000 == 0:\n",
186187
" l1 = torch.add(torch.matmul(X_test, w1), b1)\n",
187-
" a1 = sigma(l1)\n",
188+
" a1 = sigmoid(l1)\n",
188189
" l2 = torch.add(torch.matmul(a1, w2), b2)\n",
189-
" y_pred = sigma(l2)\n",
190+
" y_pred = sigmoid(l2)\n",
190191
" acct_mat = torch.argmax(y_pred, 1) == Y_test\n",
191192
" acct_res = acct_mat.sum()\n",
192193
" print(acct_res.item())\n",
@@ -212,7 +213,7 @@
212213
"name": "python",
213214
"nbconvert_exporter": "python",
214215
"pygments_lexer": "ipython3",
215-
"version": "3.5.3"
216+
"version": "3.6.8"
216217
}
217218
},
218219
"nbformat": 4,

0 commit comments

Comments
 (0)