diff --git a/MLlib/loss_func.py b/MLlib/loss_func.py index fe4a11d..2a5e28e 100644 --- a/MLlib/loss_func.py +++ b/MLlib/loss_func.py @@ -1,5 +1,6 @@ import numpy as np from MLlib.activations import sigmoid +from MLlib.activations import softmax class MeanSquaredError(): @@ -34,6 +35,28 @@ def loss(X, Y, W): M = X.shape[0] return np.sum(np.absolute(np.dot(X, W).T - Y)) / M +class SparseCategoricalCrossEntropy(): + @staticmethod + def loss(X,Y,W,n): + # n = total number of classes for classification + # W of dimension (X.shape[1],n) + Yprime = [] + for ydash in Y: + for y in ydash: + y=int(y) + yprime = list([0]*y+[1]+[0]*(n-y-1)) + Yprime.append(yprime) + Yprime = np.array(Yprime) + H=np.dot(X,W) + for i in range(len(H)): + H[i]=softmax(H[i]) + loss=0 + for y in range(len(Yprime)): + for feature_index in range(len(Yprime[y])): + if Yprime[y][feature_index] ==1: + loss-=np.log(H[y][feature_index]) + return loss + @staticmethod def derivative(X, Y, W): M = X.shape[0]