In [1]:
import numpy as np
import matplotlib.pyplot as plt
import pyprind
%matplotlib inline

In [136]:
class SVMClassifier:
    losses = []
    batchLosses = []
    
    def __init__(self, epochs=300, learningRate=0.00005, batchSize=250):
        self.epochs = epochs
        self.learningRate = learningRate
        self.batchSize = batchSize
        
    def forwardPropagation(self, W, X, vectY):
        # 1
        self.f1 = W.dot(X.T)
        # 2
        self.f2 = self.f1 * vectY
        #3
        self.f3 = np.sum(self.f2, axis=0)
        #4
        self.f4 = self.f1 - self.f3 + 1
        #5
        self.f5 = self.f4*(1-vectY)
        #6
        self.f6 = np.maximum(self.f5, 0)
        #7
        self.f7 = np.sum(self.f6)
        #8
        self.f8 = self.f7/len(X)
        
        loss = np.sum(self.f8)
        return loss
    
    def backPropagation(self, X, vectY):
        #8
        df7 = 1/len(X)
        #7
        #df7 = np.ones(shape = self.f7.shape)
        df6 = np.ones(shape=self.f6.shape) * df7
        #6
        #df6 = np.ones(shape = self.f6.shape)
        df5 = np.array(self.f5 > 0, dtype = np.float32) * df6
        #5
        #df5 = np.ones(shape = self.f5.shape)
        df4 = df5*(1 - vectY)
        #4
        #df4 = np.ones(shape = self.f4.shape)
        df3 = -1*np.ones(shape=self.f3.shape)*np.sum(df4, axis = 0)
        #3+2
        df1 = df4
        df2 = df3
        df1 += df2*vectY
        #1
        #df1 = np.ones(shape = self.f1.shape)*(1-vectY)
        dW = df1.dot(X)
        
        
        #df4 = np.ones(shape = self.f4.shape)
        #df3 = -self.f1.shape[0]*df4
        #df1 = df4
        #df2 = np.ones(shape = self.f2.shape)*df3
        #df1 += vectY*df2
        #dW = df1.dot(X)
        
        grad = dW
        return grad
    
    def fit(self, X, y):
        self.losses = []
        self.batchLosses = []
        self.labelsCardinality = len(np.unique(y))
        self.W = np.random.randn(self.labelsCardinality, X.shape[1])
        vectY = np.zeros(shape=(self.labelsCardinality, X.shape[0]))
        for i in range(0, X.shape[0]):
            vectY[y[i], i] = 1

        bar = pyprind.ProgBar(self.epochs*len(X)/self.batchSize, bar_char='█', width=100)
        for i in range(0, self.epochs):
            loss = self.forwardPropagation(self.W, X, vectY)
            self.losses.append(loss)
            #logging.info(loss)
            indexes = np.arange(0, len(X))
            np.random.shuffle(indexes)
            trainX = X[indexes]
            trainY = y[indexes]
            for batch in range(0, len(X), self.batchSize):
                batchX = trainX[batch:batch+self.batchSize]
                batchY = trainY[batch:batch+self.batchSize]
                batchVectY = np.zeros(shape=(self.labelsCardinality, batchX.shape[0]))
                for i in range(0, batchX.shape[0]):
                    batchVectY[batchY[i], i] = 1
                batchLoss = self.forwardPropagation(self.W, batchX, batchVectY) 
                self.batchLosses.append(batchLoss)
                grad = self.backPropagation(batchX, batchVectY)
                self.W += -self.learningRate*grad
                bar.update()
    
    def predict(self, X):
        return np.argmax(self.W.dot(X.T), axis=0)

In [4]:
X = np.array([[0.1, 0.2, 0.3], [0.5, 0.7, 0.8], [0.15, 0.18, 0.34], [0.57, 0.65, 0.89]])
X = X - np.mean(X)
y = np.array([0, 1, 0, 1])
vectY = np.zeros(shape=(2, X.shape[0]))
for i in range(0, X.shape[0]):
    vectY[y[i], i] = 1
W = np.random.randn(np.unique(y).shape[0], X.shape[1])

In [116]:
svm = SVMClassifier(epochs=200, learningRate=0.1)
svm.fit(X, y)
svm.losses


0%  100%
[███] | ETA: 23:59:59
Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00

Total time elapsed: 00:00:00
Out[116]:
[0.088167239422726612,
 0.086981770672726638,
 0.085796301922726637,
 0.084610833172726635,
 0.083425364422726633,
 0.082239895672726632,
 0.08105442692272663,
 0.079868958172726628,
 0.078683489422726627,
 0.077498020672726597,
 0.076312551922726596,
 0.075127083172726594,
 0.073941614422726593,
 0.072756145672726591,
 0.071570676922726589,
 0.070385208172726588,
 0.069199739422726586,
 0.068014270672726584,
 0.066828801922726583,
 0.065643333172726581,
 0.064457864422726552,
 0.06327239567272655,
 0.062086926922726549,
 0.060901458172726547,
 0.059715989422726545,
 0.058530520672726544,
 0.05734505192272657,
 0.05615958317272654,
 0.054974114422726539,
 0.053788645672726537,
 0.052603176922726536,
 0.051417708172726506,
 0.050232239422726505,
 0.049046770672726503,
 0.047861301922726501,
 0.0466758331727265,
 0.045490364422726498,
 0.044304895672726496,
 0.043119426922726495,
 0.041933958172726493,
 0.040748489422726464,
 0.03956302067272649,
 0.038377551922726461,
 0.037192083172726459,
 0.036006614422726457,
 0.034821145672726428,
 0.033635676922726426,
 0.032450208172726452,
 0.031264739422726451,
 0.030079270672726421,
 0.02889380192272642,
 0.027708333172726418,
 0.026522864422726417,
 0.025337395672726415,
 0.024151926922726386,
 0.022966458172726384,
 0.021780989422726382,
 0.020595520672726381,
 0.019410051922726379,
 0.018224583172726377,
 0.017039114422726376,
 0.015853645672726374,
 0.014668176922726373,
 0.013482708172726343,
 0.012297239422726342,
 0.01111177067272634,
 0.0099263019227263383,
 0.0087408331727263366,
 0.0075553644227263073,
 0.0063698956727263334,
 0.0051844269227263318,
 0.0039989581727263301,
 0.0028134894227263008,
 0.0016280206727262991,
 0.00044255192272629751,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0,
 0.0]

In [139]:
svm = SVMClassifier(epochs=20, learningRate=0.001)
dw = 0.001
(svm.forwardPropagation(W + [[0,0,0], [0,dw,0]], X, vectY) - svm.forwardPropagation(W, X, vectY))/dw


Out[139]:
-0.24250000000003435

In [138]:
svm.forwardPropagation(W, X, vectY)
svm.backPropagation(X, vectY)


Out[138]:
array([[ 0.205 ,  0.2425,  0.2625],
       [-0.205 , -0.2425, -0.2625]])

In [113]:
svm.f5, svm.f4


Out[113]:
(array([[ 0.        ,  0.56384532,  0.        ,  0.64626926],
        [ 0.39269223,  0.        ,  0.36900049,  0.        ]]),
 array([[ 1.        ,  0.56384532,  1.        ,  0.64626926],
        [ 0.39269223,  1.        ,  0.36900049,  1.        ]]))