Skip to content

Commit

Permalink
Fixed relu to lrelu in fuse_conv_batchnorm()
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexeyAB committed Mar 26, 2020
1 parent d6181c6 commit 2614a23
Showing 1 changed file with 8 additions and 2 deletions.
10 changes: 8 additions & 2 deletions src/network.c
Original file line number Diff line number Diff line change
Expand Up @@ -1092,6 +1092,12 @@ static float relu(float src) {
return 0;
}

static float lrelu(float src) {
const float eps = 0.001;
if (src > eps) return src;
return eps;
}

void fuse_conv_batchnorm(network net)
{
int j;
Expand Down Expand Up @@ -1160,14 +1166,14 @@ void fuse_conv_batchnorm(network net)
for (i = 0; i < (l->n + 1); ++i) {
int w_index = chan + i * layer_step;
float w = l->weights[w_index];
if (l->weights_normalizion == RELU_NORMALIZATION) sum += relu(w);
if (l->weights_normalizion == RELU_NORMALIZATION) sum += lrelu(w);
else if (l->weights_normalizion == SOFTMAX_NORMALIZATION) sum += expf(w - max_val);
}

for (i = 0; i < (l->n + 1); ++i) {
int w_index = chan + i * layer_step;
float w = l->weights[w_index];
if (l->weights_normalizion == RELU_NORMALIZATION) w = relu(w) / sum;
if (l->weights_normalizion == RELU_NORMALIZATION) w = lrelu(w) / sum;
else if (l->weights_normalizion == SOFTMAX_NORMALIZATION) w = expf(w - max_val) / sum;
l->weights[w_index] = w;
}
Expand Down

0 comments on commit 2614a23

Please sign in to comment.