@@ -19,7 +19,7 @@ def swish(x, inplace: bool = False):
19
19
20
20
class Swish (nn .Module ):
21
21
def __init__ (self , inplace : bool = False ):
22
- super (Swish , self ).__init__ ()
22
+ super ().__init__ ()
23
23
self .inplace = inplace
24
24
25
25
def forward (self , x ):
@@ -37,7 +37,7 @@ class Mish(nn.Module):
37
37
"""Mish: A Self Regularized Non-Monotonic Neural Activation Function - https://arxiv.org/abs/1908.08681
38
38
"""
39
39
def __init__ (self , inplace : bool = False ):
40
- super (Mish , self ).__init__ ()
40
+ super ().__init__ ()
41
41
42
42
def forward (self , x ):
43
43
return mish (x )
@@ -50,7 +50,7 @@ def sigmoid(x, inplace: bool = False):
50
50
# PyTorch has this, but not with a consistent inplace argument interface
51
51
class Sigmoid (nn .Module ):
52
52
def __init__ (self , inplace : bool = False ):
53
- super (Sigmoid , self ).__init__ ()
53
+ super ().__init__ ()
54
54
self .inplace = inplace
55
55
56
56
def forward (self , x ):
@@ -64,7 +64,7 @@ def tanh(x, inplace: bool = False):
64
64
# PyTorch has this, but not with a consistent inplace argument interface
65
65
class Tanh (nn .Module ):
66
66
def __init__ (self , inplace : bool = False ):
67
- super (Tanh , self ).__init__ ()
67
+ super ().__init__ ()
68
68
self .inplace = inplace
69
69
70
70
def forward (self , x ):
@@ -78,7 +78,7 @@ def hard_swish(x, inplace: bool = False):
78
78
79
79
class HardSwish (nn .Module ):
80
80
def __init__ (self , inplace : bool = False ):
81
- super (HardSwish , self ).__init__ ()
81
+ super ().__init__ ()
82
82
self .inplace = inplace
83
83
84
84
def forward (self , x ):
@@ -94,7 +94,7 @@ def hard_sigmoid(x, inplace: bool = False):
94
94
95
95
class HardSigmoid (nn .Module ):
96
96
def __init__ (self , inplace : bool = False ):
97
- super (HardSigmoid , self ).__init__ ()
97
+ super ().__init__ ()
98
98
self .inplace = inplace
99
99
100
100
def forward (self , x ):
@@ -114,7 +114,7 @@ def hard_mish(x, inplace: bool = False):
114
114
115
115
class HardMish (nn .Module ):
116
116
def __init__ (self , inplace : bool = False ):
117
- super (HardMish , self ).__init__ ()
117
+ super ().__init__ ()
118
118
self .inplace = inplace
119
119
120
120
def forward (self , x ):
@@ -125,7 +125,7 @@ class PReLU(nn.PReLU):
125
125
"""Applies PReLU (w/ dummy inplace arg)
126
126
"""
127
127
def __init__ (self , num_parameters : int = 1 , init : float = 0.25 , inplace : bool = False ) -> None :
128
- super (PReLU , self ).__init__ (num_parameters = num_parameters , init = init )
128
+ super ().__init__ (num_parameters = num_parameters , init = init )
129
129
130
130
def forward (self , input : torch .Tensor ) -> torch .Tensor :
131
131
return F .prelu (input , self .weight )
@@ -139,7 +139,7 @@ class GELU(nn.Module):
139
139
"""Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)
140
140
"""
141
141
def __init__ (self , inplace : bool = False ):
142
- super (GELU , self ).__init__ ()
142
+ super ().__init__ ()
143
143
144
144
def forward (self , input : torch .Tensor ) -> torch .Tensor :
145
145
return F .gelu (input )
@@ -153,7 +153,7 @@ class GELUTanh(nn.Module):
153
153
"""Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)
154
154
"""
155
155
def __init__ (self , inplace : bool = False ):
156
- super (GELUTanh , self ).__init__ ()
156
+ super ().__init__ ()
157
157
158
158
def forward (self , input : torch .Tensor ) -> torch .Tensor :
159
159
return F .gelu (input , approximate = 'tanh' )
@@ -167,7 +167,7 @@ class QuickGELU(nn.Module):
167
167
"""Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)
168
168
"""
169
169
def __init__ (self , inplace : bool = False ):
170
- super (QuickGELU , self ).__init__ ()
170
+ super ().__init__ ()
171
171
172
172
def forward (self , input : torch .Tensor ) -> torch .Tensor :
173
173
return quick_gelu (input )
0 commit comments