From b006d22a6b9050b6d43346e5498d2159ad66fc3a Mon Sep 17 00:00:00 2001 From: Kanav Date: Mon, 26 Dec 2016 18:56:39 +0530 Subject: [PATCH 1/2] Corrected log-likelihood loss function --- neuralnet.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neuralnet.py b/neuralnet.py index f7d86d1..330b55f 100644 --- a/neuralnet.py +++ b/neuralnet.py @@ -77,7 +77,7 @@ def cross_entropy_loss(a, y): return np.sum(np.nan_to_num(-y*np.log(a)-(1-y)*np.log(1-a))) def log_likelihood_loss(a, y): - return -np.dot(y, softmax(a).transpose()) + return -np.log(np.dot(y, softmax(a).transpose())) def delta(a, y): """ delta for both activations works out to be the same""" @@ -97,4 +97,4 @@ def derivative(z, fn): f = sigmoid elif fn == SOFTMAX: f = softmax - return f(z)*(1-f(z)) \ No newline at end of file + return f(z)*(1-f(z)) From 353e52f5d91f976b7213a991abceaab2c086b6e0 Mon Sep 17 00:00:00 2001 From: Kanav Date: Mon, 26 Dec 2016 20:10:04 +0530 Subject: [PATCH 2/2] Added nan_to_num in log likelihood loss function --- neuralnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuralnet.py b/neuralnet.py index 330b55f..f222f11 100644 --- a/neuralnet.py +++ b/neuralnet.py @@ -77,7 +77,7 @@ def cross_entropy_loss(a, y): return np.sum(np.nan_to_num(-y*np.log(a)-(1-y)*np.log(1-a))) def log_likelihood_loss(a, y): - return -np.log(np.dot(y, softmax(a).transpose())) + return -np.nan_to_num(np.log(np.dot(y, softmax(a).transpose()))) def delta(a, y): """ delta for both activations works out to be the same"""