Skip to content

Commit

Permalink
uppercase instead of underscore
Browse files Browse the repository at this point in the history
  • Loading branch information
aamini committed Dec 9, 2020
1 parent 845cd5c commit f9cb50c
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion evidential_deep_learning/losses/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def NIG_Reg(y, gamma, v, alpha, beta, omega=0.01, reduce=True, kl=False):

return tf.reduce_mean(reg) if reduce else reg

def evidential_regression(y_true, evidential_output, coeff=1.0):
def EvidentialRegression(y_true, evidential_output, coeff=1.0):
gamma, v, alpha, beta = tf.split(evidential_output, 4, axis=-1)
loss_nll = NIG_NLL(y_true, gamma, v, alpha, beta)
loss_reg = NIG_Reg(y_true, gamma, v, alpha, beta)
Expand Down
6 changes: 3 additions & 3 deletions hello_world.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ def main():
])

# Custom loss function to handle the custom regularizer coefficient
def evidential_regression_loss(true, pred):
return edl.losses.evidential_regression(true, pred, coeff=1e-2)
def EvidentialRegressionLoss(true, pred):
return edl.losses.EvidentialRegression(true, pred, coeff=1e-2)

# Compile and fit the model!
model.compile(
optimizer=tf.keras.optimizers.Adam(5e-4),
loss=evidential_regression_loss)
loss=EvidentialRegressionLoss)
model.fit(x_train, y_train, batch_size=100, epochs=500)

# Predict and plot using the trained model
Expand Down

0 comments on commit f9cb50c

Please sign in to comment.