From f9cb50cfc30cbe4cf478ea2471137a9acef412ce Mon Sep 17 00:00:00 2001 From: Alexander Amini Date: Wed, 9 Dec 2020 02:36:51 -0500 Subject: [PATCH] uppercase instead of underscore --- evidential_deep_learning/losses/continuous.py | 2 +- hello_world.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/evidential_deep_learning/losses/continuous.py b/evidential_deep_learning/losses/continuous.py index a975920..f9cc4ce 100644 --- a/evidential_deep_learning/losses/continuous.py +++ b/evidential_deep_learning/losses/continuous.py @@ -62,7 +62,7 @@ def NIG_Reg(y, gamma, v, alpha, beta, omega=0.01, reduce=True, kl=False): return tf.reduce_mean(reg) if reduce else reg -def evidential_regression(y_true, evidential_output, coeff=1.0): +def EvidentialRegression(y_true, evidential_output, coeff=1.0): gamma, v, alpha, beta = tf.split(evidential_output, 4, axis=-1) loss_nll = NIG_NLL(y_true, gamma, v, alpha, beta) loss_reg = NIG_Reg(y_true, gamma, v, alpha, beta) diff --git a/hello_world.py b/hello_world.py index 36b31ec..9181bf6 100644 --- a/hello_world.py +++ b/hello_world.py @@ -19,13 +19,13 @@ def main(): ]) # Custom loss function to handle the custom regularizer coefficient - def evidential_regression_loss(true, pred): - return edl.losses.evidential_regression(true, pred, coeff=1e-2) + def EvidentialRegressionLoss(true, pred): + return edl.losses.EvidentialRegression(true, pred, coeff=1e-2) # Compile and fit the model! model.compile( optimizer=tf.keras.optimizers.Adam(5e-4), - loss=evidential_regression_loss) + loss=EvidentialRegressionLoss) model.fit(x_train, y_train, batch_size=100, epochs=500) # Predict and plot using the trained model