diff --git a/neuralnet.py b/neuralnet.py index f7d86d1..f222f11 100644 --- a/neuralnet.py +++ b/neuralnet.py @@ -77,7 +77,7 @@ def cross_entropy_loss(a, y): return np.sum(np.nan_to_num(-y*np.log(a)-(1-y)*np.log(1-a))) def log_likelihood_loss(a, y): - return -np.dot(y, softmax(a).transpose()) + return -np.nan_to_num(np.log(np.dot(y, softmax(a).transpose()))) def delta(a, y): """ delta for both activations works out to be the same""" @@ -97,4 +97,4 @@ def derivative(z, fn): f = sigmoid elif fn == SOFTMAX: f = softmax - return f(z)*(1-f(z)) \ No newline at end of file + return f(z)*(1-f(z))