-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path1_autograd.py
114 lines (60 loc) · 1.05 KB
/
1_autograd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
#!/usr/bin/env python
# coding: utf-8
#%%
import torch
x = torch.tensor(2., requires_grad= True)
#%%
y = x + 2
z = y * y
print(z)
#%%
z.backward()
print(x.grad)
#%%
x =torch.ones(2, 2, requires_grad=True) # 勾配の計算が必要なときはフラグを立てる
print(x)
#%%
y = x + 2
print(y)
#%%
print(y.grad_fn)
#%%
#%%
z = y * y * 3
z.backward()
print(x.grad)
#%%
out = z.mean()
print(z, out)
# %%
a = torch.randn(2, 2)
a = ((a * 3) / (a - 1))
print(a.requires_grad) # defaultはFalse
a.requires_grad_(True) # requires_grad_()でその変数のrequires_grad設定を変更できる
print(a.requires_grad)
b = (a * a).sum()
print(b.grad_fn)
#%%
out.backward() # backpropagataion
# scalarの場合は直接x.gradでアクセスできる
#%%
print(z.grad)
print(y.grad)
print(x.grad)
#%%
x = torch.randn(3, requires_grad=True)
y = x
print(y)
i = 0
while y.data.norm() < 1000:
y = y * 2
i += 1
print(y)
print(i)
# %%
v = torch.tensor([1, .1, .1], dtype=torch.float)
y.backward(v)
print(x.grad)
# %%
2 ** 9
# %%