-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathactivations.go
233 lines (208 loc) · 4.67 KB
/
activations.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
package keras2go
import (
"math"
)
/**
* Linear activation function.
* y=x
*
* :param x: Array of input values. Gets overwritten by output.
*/
func K2c_linear(x []float64) {
}
/**
* Exponential activation function.
* y = exp(x)
*
* :param x: Array of input values. Gets overwritten by output.
*/
func K2c_exponential(x []float64) {
for idx, value := range x {
x[idx] = math.Exp(value)
}
}
/**
* ReLU activation function.
* y = max(x,0)
*
* :param x: Array of input values. Gets overwritten by output.
*/
func K2c_relu(x []float64) {
for idx, value := range x {
if value <= 0 {
x[idx] = 0
}
}
}
/**
* ReLU activation function.
* y = {1 if x> 2.5}
* {0.2*x+0.5 if -2.5<x< 2.5}
* {0 if x<-2.5}
*
* :param x: Array of input values. Gets overwritten by output.
*/
func K2c_hard_sigmoid(x []float64) {
for idx, value := range x {
if value <= -2.5 {
x[idx] = 0
} else if value >= 2.5 {
x[idx] = 1
} else {
x[idx] = 0.2*x[idx] + 0.5
}
}
}
/**
* Tanh activation function.
* y = tanh(x)
*
* :param x: Array of input values. Gets overwritten by output.
*/
func K2c_tanh(x []float64) {
for idx, value := range x {
x[idx] = math.Tanh(value)
}
}
/**
* Sigmoid activation function.
* y = 1/(1+exp(-x))
*
* :param x: Array of input values. Gets overwritten by output.
*/
func K2c_sigmoid(x []float64) {
for idx, value := range x {
x[idx] = 1 / (1 + math.Exp(-value))
}
}
/**
* Soft max activation function.
* z[i] = exp(x[i]-max(x))
* y = z/sum(z)
*
* :param x: Array of input values. Gets overwritten by output.
*/
func K2c_softmax(x []float64) {
xmax := x[0]
var sum float64
for _, value := range x {
if value > xmax {
xmax = value
}
}
for idx, value := range x {
x[idx] = math.Exp(value - xmax)
}
for _, value := range x {
sum += value
}
sum = 1 / sum
for idx, value := range x {
x[idx] = value * sum
}
}
/**
* Soft plus activation function.
* y = ln(1+exp(x))
*
* :param x: Array of input values. Gets overwritten by output.
*/
func K2c_softplus(x []float64) {
for idx, value := range x {
x[idx] = math.Log1p(math.Exp(value))
}
}
/**
* Soft sign activation function.
* y = x/(1+|x|)
*
* :param x: Array of input values. Gets overwritten by output.
*/
func K2c_softsign(x []float64) {
for idx, value := range x {
x[idx] = value / (1 + math.Abs(value))
}
}
/**
* Leaky version of a Rectified Linear Unit.
* It allows a small gradient when the unit is not active:
* y = {alpha*x if x < 0}
* {x if x >= 0}
*
* :param x: Array of input values. Gets overwritten by output.
* :param alpha: slope of negative portion of activation curve.
*/
func k2c_LeakyReLU(x []float64, alpha float64) {
for idx, value := range x {
if value < 0 {
x[idx] = alpha * value
}
}
}
/**
* Parametric Rectified Linear Unit.
* It allows a small gradient when the unit is not active:
* y = {alpha*x if x < 0}
* {x if x >= 0}
* Where alpha is a learned Array with the same Shape as x.
*
* :param x: Array of input values. Gets overwritten by output.
* :param alpha: slope of negative portion of activation curve for each unit.
*/
func k2c_PReLU(x []float64, alpha []float64) {
for idx := range x {
if x[idx] < 0 {
x[idx] = x[idx] * alpha[idx]
}
}
}
/**
* Exponential Linear Unit activation (ELU).
* y = {alpha*(exp(x) - 1) if x < 0}
* {x if x >= 0}
*
* :param x: Array of input values. Gets overwritten by output.
* :param alpha: slope of negative portion of activation curve.
*/
func k2c_ELU(x []float64, alpha float64) {
for idx, value := range x {
if value < 0 {
x[idx] = alpha * math.Expm1(value)
}
}
}
/**
* Thresholded Rectified Linear Unit.
* y = {x if x > theta}
{0 if x <= theta}
*
* :param x: Array of input values. Gets overwritten by output.
* :param theta: threshold for activation.
*/
func k2c_ThresholdedReLU(x []float64, theta float64) {
for idx, value := range x {
if value < theta {
x[idx] = 0
}
}
}
/**
* Rectified Linear Unit activation function.
* y = {max_value if x >= max_value}
* {x if theta <= x < max_value}
* {alpha*(x-theta) if x < theta}
*
* :param x: Array of input values. Gets overwritten by output.
* :param max_value: maximum value for activated x.
* :param alpha: slope of negative portion of activation curve.
* :param theta: threshold for activation.
*/
func k2c_ReLU(x []float64, max_value float64, alpha float64, theta float64) {
for idx, value := range x {
if value >= max_value {
x[idx] = max_value
} else if value < theta {
x[idx] = alpha * (value - theta)
}
}
}