In [1]:
import sys
import dlib
import openface
from skimage import io
import numpy as np

In [2]:
#Reading the image data into numpy
def rgb2gray(rgb):
    return np.dot(rgb[:,:,:], [0.299, 0.587, 0.114])

In [7]:
def load_data():
    import pandas as pd
    import numpy as np
    from PIL import Image
    from termcolor import colored
    
    train = pd.read_csv('/home/mckc/All Data/train.csv')
    test = pd.read_csv('/home/mckc/All Data/test.csv')
    print 'the training data shape is ',train.shape
    print 'the test data shape is ', test.shape
    
    X_tr = np.zeros((1,136),dtype=np.uint8)
    Y_tr=[]
    for i in range(train.shape[0]):
        image = np.array(Image.open(train.values[i,0]))
        landmarks = get_landmarks(image)
        if landmarks is not None:
            X_tr =  np.vstack((X_tr,landmarks))
            Y_tr = np.append(Y_tr,train.values[i,1])
        if i % 50==0:
            print colored((float(i)/train.shape[0]*100 ,' Percentage complete'), 'green')
    
    X_tr = X_tr[1:,:]
    X_ts = np.zeros((1,136),dtype=np.uint8)
    Y_ts=[]
    
    for i in range(test.shape[0]):
        image = np.array(Image.open(test.values[i,0]))
        landmarks = get_landmarks(image)
        if landmarks is not None:
            X_ts =  np.vstack((X_ts,landmarks))
            Y_ts = np.append(Y_ts,test.values[i,1])
            
        if i % 50==0:
            print colored((float(i)/test.shape[0]*100 ,' Percentage complete'), 'green')
    X_ts = X_ts[1:,:]
    
    print 'the training file shape',X_tr.shape,Y_tr.shape
    print 'the testing file shape',X_ts.shape,Y_ts.shape
    
    return X_tr,X_ts,Y_tr,Y_ts

In [4]:
def simulate(X,Y):
    import scipy as sp
    import scipy.ndimage
    complete = np.zeros((1,96,96),dtype=np.uint8)
    Y_complete = []
    for i in range(len(X)):
        complete = np.vstack((complete,X[i,:,:].reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(X[i,:,:], angle = 5,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(X[i,:,:], angle = 10,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(X[i,:,:], angle = 15,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(X[i,:,:], angle = -5,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(X[i,:,:], angle = -15,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(X[i,:,:], angle = -10,reshape=False,cval=1).reshape(1,96,96)))
        rotated = np.fliplr(X[i,:,:])
        complete = np.vstack((complete,scipy.ndimage.rotate(rotated, angle = 5,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(rotated, angle = 10,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(rotated, angle = 15,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(rotated, angle = -5,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(rotated, angle = -10,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,scipy.ndimage.rotate(rotated, angle = -15,reshape=False,cval=1).reshape(1,96,96)))
        complete = np.vstack((complete,rotated.reshape(1,96,96)))
        Y_complete = np.append(Y_complete,([Y[i]]*14))
        if i % 10==0:
            print colored((float(i)/len(X)*100 ,' Percentage complete'),'green')
    complete = complete[1:,:,:]
    return complete,Y_complete

In [136]:
def get_landmarks(im):
    predictor_model = "/home/mckc/Downloads/shape_predictor_68_face_landmarks.dat"

    detector = dlib.get_frontal_face_detector()
    predictor = dlib.shape_predictor(predictor_model)
    rects = detector(im, 1)
    
    if len(rects) > 1:
        print 'TooManyFaces'
        return None
    if len(rects) == 0:
        print 'NoFaces'
        return None
    coords = np.array([[p.x, p.y] for p in predictor(im, rects[0]).parts()])
    centroid = coords.mean(axis=0)
    return ((coords - centroid )).reshape(1,136)

In [137]:
X_tr,X_tst,Y_tr,Y_tst = load_data()


the training data shape is  (586, 2)
the test data shape is  (195, 2)
(0.0, ' Percentage complete')
NoFaces
(8.532423208191126, ' Percentage complete')
(17.064846416382252, ' Percentage complete')
(25.597269624573375, ' Percentage complete')
(34.129692832764505, ' Percentage complete')
(42.66211604095563, ' Percentage complete')
(51.19453924914675, ' Percentage complete')
NoFaces
(59.72696245733788, ' Percentage complete')
(68.25938566552901, ' Percentage complete')
(76.79180887372013, ' Percentage complete')
(85.32423208191126, ' Percentage complete')
(93.85665529010238, ' Percentage complete')
(0.0, ' Percentage complete')
(25.64102564102564, ' Percentage complete')
(51.28205128205128, ' Percentage complete')
(76.92307692307693, ' Percentage complete')
the training file shape (584, 136) (584,)
the testing file shape (195, 136) (195,)

In [138]:
def standard(X):
    return (X - X.mean())/X.max()

X_tst = standard(X_tst)
X_tr = standard(X_tr)

In [139]:
X_tr.shape,Y_tr.shape


Out[139]:
((584, 136), (584,))

In [140]:
X_tr[1,:]


Out[140]:
array([-0.71612952, -0.35313884, -0.70228094, -0.15752762, -0.66939056,
        0.03115931, -0.62611374,  0.21292195, -0.56206405,  0.38602922,
       -0.44954432,  0.52970826, -0.30759635,  0.63703477, -0.13102693,
        0.7114709 ,  0.06285322,  0.72185734,  0.25154015,  0.68723588,
        0.40560562,  0.59548903,  0.5215875 ,  0.46912072,  0.60121684,
        0.31332417,  0.63237615,  0.13675475,  0.64276259, -0.04500789,
        0.64622473, -0.23196375,  0.63237615, -0.41718853, -0.63996232,
       -0.4829693 , -0.55513976, -0.58164045, -0.41838501, -0.59029581,
       -0.28336134, -0.55567435, -0.15179981, -0.5020111 ,  0.08189502,
       -0.49681788,  0.20307011, -0.54875006,  0.32943842, -0.59029581,
        0.45926888, -0.58856474,  0.54409144, -0.50547324, -0.02370042,
       -0.39295352, -0.00812077, -0.27177842,  0.00745889, -0.15579655,
        0.02476961, -0.03289038, -0.14487552,  0.0761672 , -0.0635151 ,
        0.09347793,  0.02130747,  0.11251973,  0.09401252,  0.08482257,
        0.16498651,  0.05366326, -0.50320758, -0.36352528, -0.42011608,
       -0.41199532, -0.32317601, -0.40507102, -0.22450486, -0.34448348,
       -0.32663815, -0.33409704, -0.42704037, -0.33582811,  0.15806222,
       -0.35660099,  0.24461585, -0.41545746,  0.33982486, -0.41891961,
        0.42637849, -0.37910493,  0.34848022, -0.35486991,  0.25154015,
       -0.35140777, -0.26605061,  0.32544168, -0.16564839,  0.25619877,
       -0.05832188,  0.21811517,  0.02650069,  0.23542589,  0.10613003,
        0.2094598 ,  0.20826333,  0.23196375,  0.30693447,  0.28389593,
        0.22211191,  0.36179421,  0.12863398,  0.40160888,  0.04208034,
        0.41372639, -0.04620437,  0.41545746, -0.15526195,  0.39122244,
       -0.22104272,  0.32544168, -0.0565908 ,  0.29601344,  0.02996283,
        0.29601344,  0.11132325,  0.28216486,  0.26538873,  0.28735808,
        0.11824754,  0.28735808,  0.03515605,  0.30120666, -0.04966651,
        0.29947559])

In [141]:
map, Y_number = np.unique(Y_tr, return_inverse=True)
Y_test_number = np.unique(Y_tst, return_inverse=True)[1]

In [142]:
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score

clf = LogisticRegression(verbose=0,n_jobs=-1,multi_class='multinomial',solver='lbfgs',max_iter=500,warm_start=True)
clf.fit(X_tr,Y_number)
Y_logictic= clf.predict(X_tst)
Y_log_vales = map[Y_logictic]

print 'Accuracy of the model is ',accuracy_score(Y_tst,Y_log_vales)
confusion_matrix(Y_tst,Y_log_vales)


Accuracy of the model is  0.887179487179
Out[142]:
array([[24,  0,  0,  0,  0,  0,  1],
       [ 0, 18,  0,  0,  1,  2,  4],
       [ 0,  0, 20,  4,  0,  1,  1],
       [ 0,  0,  0, 25,  0,  0,  0],
       [ 0,  0,  0,  0, 26,  0,  1],
       [ 0,  0,  1,  0,  0, 20,  4],
       [ 0,  0,  1,  0,  1,  0, 40]])

In [145]:
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score

recognizer = RandomForestClassifier(500,verbose=0,oob_score=True,n_jobs=-1,max_features=20)
recognizer.fit(X_tr,Y_number)
Y_rf= recognizer.predict(X_tst)
Y_rf_vales = map[Y_rf]

print 'Accuracy of the model is ',accuracy_score(Y_tst,Y_rf_vales)
confusion_matrix(Y_tst,Y_rf_vales)


Accuracy of the model is  0.938461538462
Out[145]:
array([[25,  0,  0,  0,  0,  0,  0],
       [ 0, 23,  0,  0,  1,  1,  0],
       [ 0,  0, 24,  2,  0,  0,  0],
       [ 0,  0,  0, 25,  0,  0,  0],
       [ 0,  0,  0,  0, 24,  0,  3],
       [ 0,  0,  0,  0,  0, 22,  3],
       [ 0,  0,  1,  0,  1,  0, 40]])

In [143]:
from keras.models import Sequential
from keras.layers import Dense, Activation,LSTM
from keras import backend as K
from keras.optimizers import Adam,SGD
from keras.utils import np_utils
from keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='val_loss', patience=10)
from keras.layers import Merge

left_branch = Sequential()
left_branch.add(Dense(1000, input_dim=136,activation='relu'))

right_branch = Sequential()
right_branch.add(Dense(50, input_dim=136,activation='sigmoid'))

merged = Merge([left_branch, right_branch], mode='concat')

final_model = Sequential()
final_model.add(merged)
final_model.add(Dense(7,activation='softmax'))

final_model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'],lr=0.0001)
final_model.fit([X_tr,X_tr], Y_Keras,nb_epoch=100, batch_size=1,verbose=1,
          validation_split=0.2, callbacks=[early_stopping])
y_keras = map[final_model.predict_classes([X_tst,X_tst])]

print '/n Accuracy of the model is ',accuracy_score(Y_tst,y_keras)
confusion_matrix(Y_tst,y_keras)


Train on 467 samples, validate on 117 samples
Epoch 1/100
467/467 [==============================] - 1s - loss: 1.9404 - acc: 0.2227 - val_loss: 1.6014 - val_acc: 0.4701
Epoch 2/100
467/467 [==============================] - 1s - loss: 1.6298 - acc: 0.3640 - val_loss: 1.4222 - val_acc: 0.3846
Epoch 3/100
467/467 [==============================] - 1s - loss: 1.4212 - acc: 0.4604 - val_loss: 1.2187 - val_acc: 0.4188
Epoch 4/100
467/467 [==============================] - 1s - loss: 1.3118 - acc: 0.5139 - val_loss: 1.1476 - val_acc: 0.4701
Epoch 5/100
467/467 [==============================] - 1s - loss: 1.2386 - acc: 0.5418 - val_loss: 1.1854 - val_acc: 0.5043
Epoch 6/100
467/467 [==============================] - 1s - loss: 1.1668 - acc: 0.5760 - val_loss: 1.1895 - val_acc: 0.4701
Epoch 7/100
467/467 [==============================] - 1s - loss: 1.1156 - acc: 0.5910 - val_loss: 0.8784 - val_acc: 0.7350
Epoch 8/100
467/467 [==============================] - 1s - loss: 1.1053 - acc: 0.5996 - val_loss: 0.9566 - val_acc: 0.6752
Epoch 9/100
467/467 [==============================] - 1s - loss: 1.0553 - acc: 0.6274 - val_loss: 0.7811 - val_acc: 0.7265
Epoch 10/100
467/467 [==============================] - 1s - loss: 1.0380 - acc: 0.6360 - val_loss: 0.9381 - val_acc: 0.5897
Epoch 11/100
467/467 [==============================] - 1s - loss: 0.9800 - acc: 0.6467 - val_loss: 0.8343 - val_acc: 0.6752
Epoch 12/100
467/467 [==============================] - 1s - loss: 0.9505 - acc: 0.6660 - val_loss: 0.9179 - val_acc: 0.6239
Epoch 13/100
467/467 [==============================] - 1s - loss: 0.9172 - acc: 0.7024 - val_loss: 0.8093 - val_acc: 0.7094
Epoch 14/100
467/467 [==============================] - 1s - loss: 0.9045 - acc: 0.6981 - val_loss: 0.9738 - val_acc: 0.6239
Epoch 15/100
467/467 [==============================] - 1s - loss: 0.8554 - acc: 0.7024 - val_loss: 0.6021 - val_acc: 0.7949
Epoch 16/100
467/467 [==============================] - 1s - loss: 0.8487 - acc: 0.6938 - val_loss: 0.6832 - val_acc: 0.7094
Epoch 17/100
467/467 [==============================] - 1s - loss: 0.8121 - acc: 0.6959 - val_loss: 0.7644 - val_acc: 0.7009
Epoch 18/100
467/467 [==============================] - 1s - loss: 0.8227 - acc: 0.6981 - val_loss: 0.6293 - val_acc: 0.8120
Epoch 19/100
467/467 [==============================] - 1s - loss: 0.7550 - acc: 0.7281 - val_loss: 0.5292 - val_acc: 0.8718
Epoch 20/100
467/467 [==============================] - 1s - loss: 0.7562 - acc: 0.7366 - val_loss: 0.8494 - val_acc: 0.6496
Epoch 21/100
467/467 [==============================] - 1s - loss: 0.7422 - acc: 0.7366 - val_loss: 0.4771 - val_acc: 0.8632
Epoch 22/100
467/467 [==============================] - 1s - loss: 0.7110 - acc: 0.7623 - val_loss: 0.7017 - val_acc: 0.7265
Epoch 23/100
467/467 [==============================] - 1s - loss: 0.6847 - acc: 0.7709 - val_loss: 0.7409 - val_acc: 0.7949
Epoch 24/100
467/467 [==============================] - 1s - loss: 0.6717 - acc: 0.7602 - val_loss: 0.4758 - val_acc: 0.8462
Epoch 25/100
467/467 [==============================] - 1s - loss: 0.6329 - acc: 0.7816 - val_loss: 0.6224 - val_acc: 0.8034
Epoch 26/100
467/467 [==============================] - 1s - loss: 0.6254 - acc: 0.7709 - val_loss: 0.3746 - val_acc: 0.8718
Epoch 27/100
467/467 [==============================] - 1s - loss: 0.6101 - acc: 0.7923 - val_loss: 0.3932 - val_acc: 0.8974
Epoch 28/100
467/467 [==============================] - 1s - loss: 0.5728 - acc: 0.8073 - val_loss: 0.8563 - val_acc: 0.7009
Epoch 29/100
467/467 [==============================] - 1s - loss: 0.5940 - acc: 0.7923 - val_loss: 0.3911 - val_acc: 0.9060
Epoch 30/100
467/467 [==============================] - 1s - loss: 0.5839 - acc: 0.8030 - val_loss: 0.6296 - val_acc: 0.7692
Epoch 31/100
467/467 [==============================] - 1s - loss: 0.5404 - acc: 0.7880 - val_loss: 0.3828 - val_acc: 0.8889
Epoch 32/100
467/467 [==============================] - 1s - loss: 0.5395 - acc: 0.8158 - val_loss: 0.4417 - val_acc: 0.8889
Epoch 33/100
467/467 [==============================] - 1s - loss: 0.5126 - acc: 0.8201 - val_loss: 0.3552 - val_acc: 0.8718
Epoch 34/100
467/467 [==============================] - 1s - loss: 0.5131 - acc: 0.8266 - val_loss: 0.4491 - val_acc: 0.8718
Epoch 35/100
467/467 [==============================] - 1s - loss: 0.5008 - acc: 0.8308 - val_loss: 0.3436 - val_acc: 0.9316
Epoch 36/100
467/467 [==============================] - 1s - loss: 0.5114 - acc: 0.8522 - val_loss: 0.5877 - val_acc: 0.7778
Epoch 37/100
467/467 [==============================] - 1s - loss: 0.4822 - acc: 0.8458 - val_loss: 0.5867 - val_acc: 0.8803
Epoch 38/100
467/467 [==============================] - 1s - loss: 0.4882 - acc: 0.8458 - val_loss: 0.3629 - val_acc: 0.9060
Epoch 39/100
467/467 [==============================] - 1s - loss: 0.4864 - acc: 0.8373 - val_loss: 0.3999 - val_acc: 0.9231
Epoch 40/100
467/467 [==============================] - 1s - loss: 0.5118 - acc: 0.8330 - val_loss: 0.2757 - val_acc: 0.9060
Epoch 41/100
467/467 [==============================] - 1s - loss: 0.4703 - acc: 0.8351 - val_loss: 0.4686 - val_acc: 0.8462
Epoch 42/100
467/467 [==============================] - 1s - loss: 0.4608 - acc: 0.8415 - val_loss: 0.2993 - val_acc: 0.9060
Epoch 43/100
467/467 [==============================] - 1s - loss: 0.4624 - acc: 0.8437 - val_loss: 0.2572 - val_acc: 0.8974
Epoch 44/100
467/467 [==============================] - 1s - loss: 0.4488 - acc: 0.8587 - val_loss: 0.4998 - val_acc: 0.8034
Epoch 45/100
467/467 [==============================] - 1s - loss: 0.4426 - acc: 0.8608 - val_loss: 0.3136 - val_acc: 0.9145
Epoch 46/100
467/467 [==============================] - 1s - loss: 0.4229 - acc: 0.8565 - val_loss: 0.2760 - val_acc: 0.9316
Epoch 47/100
467/467 [==============================] - 1s - loss: 0.4201 - acc: 0.8480 - val_loss: 0.4551 - val_acc: 0.8547
Epoch 48/100
467/467 [==============================] - 1s - loss: 0.3883 - acc: 0.8651 - val_loss: 0.2108 - val_acc: 0.9402
Epoch 49/100
467/467 [==============================] - 1s - loss: 0.4530 - acc: 0.8522 - val_loss: 0.2512 - val_acc: 0.9145
Epoch 50/100
467/467 [==============================] - 1s - loss: 0.4248 - acc: 0.8565 - val_loss: 0.2623 - val_acc: 0.9060
Epoch 51/100
467/467 [==============================] - 1s - loss: 0.4161 - acc: 0.8630 - val_loss: 0.2881 - val_acc: 0.9060
Epoch 52/100
467/467 [==============================] - 1s - loss: 0.3993 - acc: 0.8630 - val_loss: 0.3727 - val_acc: 0.9316
Epoch 53/100
467/467 [==============================] - 1s - loss: 0.4268 - acc: 0.8801 - val_loss: 0.3975 - val_acc: 0.9060
Epoch 54/100
467/467 [==============================] - 1s - loss: 0.4156 - acc: 0.8608 - val_loss: 0.3672 - val_acc: 0.9145
Epoch 55/100
467/467 [==============================] - 1s - loss: 0.3865 - acc: 0.8672 - val_loss: 0.4980 - val_acc: 0.7863
Epoch 56/100
467/467 [==============================] - 1s - loss: 0.3605 - acc: 0.8929 - val_loss: 1.0063 - val_acc: 0.6581
Epoch 57/100
467/467 [==============================] - 1s - loss: 0.3798 - acc: 0.8779 - val_loss: 0.3551 - val_acc: 0.9402
Epoch 58/100
467/467 [==============================] - 1s - loss: 0.3862 - acc: 0.8887 - val_loss: 0.3100 - val_acc: 0.9145
Epoch 59/100
467/467 [==============================] - 1s - loss: 0.3908 - acc: 0.8758 - val_loss: 0.2382 - val_acc: 0.9231
 32/195 [===>..........................] - ETA: 0s/n Accuracy of the model is  0.938461538462
Out[143]:
array([[25,  0,  0,  0,  0,  0,  0],
       [ 0, 20,  0,  0,  1,  3,  1],
       [ 1,  0, 23,  0,  0,  2,  0],
       [ 0,  0,  0, 25,  0,  0,  0],
       [ 0,  0,  0,  0, 27,  0,  0],
       [ 0,  0,  0,  0,  0, 25,  0],
       [ 0,  0,  1,  0,  3,  0, 38]])

In [116]:
from keras.models import Sequential
from keras.layers import Dense, Activation,LSTM
from keras import backend as K
from keras.optimizers import Adam,SGD
from keras.utils import np_utils
from keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='val_loss', patience=10)


Y_Keras = np_utils.to_categorical(Y_number, 7)
# Create first network with Keras
from keras.models import Sequential
from keras.layers import Dense, Activation,Dropout
model = Sequential()
#model.add(LSTM(1000, input_dim=136,activation='relu'))
model.add(Dense(1000, input_dim=136,activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(5000,activation='relu'))
#model.add(Dense(1000,activation='relu'))
model.add(Dense(7,activation='softmax'))
sgd = SGD(lr=0.001, decay=1e-6, momentum=0.9, nesterov=True)

# Compile model
model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])

import time
model.fit(X_tr, Y_Keras, nb_epoch=100, batch_size=5,verbose=1,
          validation_split=0.2, callbacks=[early_stopping])

y_keras = map[model.predict_classes(X_tst)]

print 'Accuracy of the model is ',accuracy_score(Y_tst,y_keras)
confusion_matrix(Y_tst,y_keras)


Train on 467 samples, validate on 117 samples
Epoch 1/100
467/467 [==============================] - 5s - loss: 1.8632 - acc: 0.2698 - val_loss: 2.1192 - val_acc: 0.1282
Epoch 2/100
467/467 [==============================] - 5s - loss: 1.5615 - acc: 0.3683 - val_loss: 1.2656 - val_acc: 0.5043
Epoch 3/100
467/467 [==============================] - 4s - loss: 1.4058 - acc: 0.4433 - val_loss: 1.0340 - val_acc: 0.6239
Epoch 4/100
467/467 [==============================] - 5s - loss: 1.3048 - acc: 0.4818 - val_loss: 1.3190 - val_acc: 0.3675
Epoch 5/100
467/467 [==============================] - 5s - loss: 1.2756 - acc: 0.4946 - val_loss: 0.9859 - val_acc: 0.6154
Epoch 6/100
467/467 [==============================] - 5s - loss: 1.2067 - acc: 0.5075 - val_loss: 1.1030 - val_acc: 0.4701
Epoch 7/100
467/467 [==============================] - 5s - loss: 1.1470 - acc: 0.5439 - val_loss: 0.9779 - val_acc: 0.7009
Epoch 8/100
467/467 [==============================] - 5s - loss: 1.0658 - acc: 0.6039 - val_loss: 0.8841 - val_acc: 0.6239
Epoch 9/100
467/467 [==============================] - 6s - loss: 1.0813 - acc: 0.5996 - val_loss: 0.9401 - val_acc: 0.5983
Epoch 10/100
467/467 [==============================] - 5s - loss: 1.0155 - acc: 0.6188 - val_loss: 0.9027 - val_acc: 0.7009
Epoch 11/100
467/467 [==============================] - 5s - loss: 0.9529 - acc: 0.6510 - val_loss: 0.7454 - val_acc: 0.7350
Epoch 12/100
467/467 [==============================] - 6s - loss: 0.9472 - acc: 0.6360 - val_loss: 0.8493 - val_acc: 0.7179
Epoch 13/100
467/467 [==============================] - 6s - loss: 0.8559 - acc: 0.6552 - val_loss: 0.6079 - val_acc: 0.7009
Epoch 14/100
467/467 [==============================] - 6s - loss: 0.8981 - acc: 0.6424 - val_loss: 1.0489 - val_acc: 0.5043
Epoch 15/100
467/467 [==============================] - 6s - loss: 0.8707 - acc: 0.6617 - val_loss: 1.4144 - val_acc: 0.5470
Epoch 16/100
467/467 [==============================] - 6s - loss: 0.8500 - acc: 0.6767 - val_loss: 0.9326 - val_acc: 0.6068
Epoch 17/100
467/467 [==============================] - 6s - loss: 0.8555 - acc: 0.6852 - val_loss: 0.6388 - val_acc: 0.8034
Epoch 18/100
467/467 [==============================] - 6s - loss: 0.8154 - acc: 0.6938 - val_loss: 0.7147 - val_acc: 0.7607
Epoch 19/100
467/467 [==============================] - 6s - loss: 0.7852 - acc: 0.7302 - val_loss: 0.9067 - val_acc: 0.6239
Epoch 20/100
467/467 [==============================] - 6s - loss: 0.7687 - acc: 0.7045 - val_loss: 0.4466 - val_acc: 0.8718
Epoch 21/100
467/467 [==============================] - 6s - loss: 0.6945 - acc: 0.7409 - val_loss: 0.6543 - val_acc: 0.7350
Epoch 22/100
467/467 [==============================] - 6s - loss: 0.6873 - acc: 0.7580 - val_loss: 0.9189 - val_acc: 0.7094
Epoch 23/100
467/467 [==============================] - 6s - loss: 0.7138 - acc: 0.7281 - val_loss: 0.4408 - val_acc: 0.8291
Epoch 24/100
467/467 [==============================] - 6s - loss: 0.7062 - acc: 0.7452 - val_loss: 0.8008 - val_acc: 0.6752
Epoch 25/100
467/467 [==============================] - 6s - loss: 0.7104 - acc: 0.7259 - val_loss: 1.9008 - val_acc: 0.4274
Epoch 26/100
467/467 [==============================] - 6s - loss: 0.6560 - acc: 0.7388 - val_loss: 0.8609 - val_acc: 0.6496
Epoch 27/100
467/467 [==============================] - 5s - loss: 0.7054 - acc: 0.7388 - val_loss: 0.4394 - val_acc: 0.8718
Epoch 28/100
467/467 [==============================] - 6s - loss: 0.6254 - acc: 0.7709 - val_loss: 0.4460 - val_acc: 0.8547
Epoch 29/100
467/467 [==============================] - 6s - loss: 0.6545 - acc: 0.7559 - val_loss: 0.8739 - val_acc: 0.6068
Epoch 30/100
467/467 [==============================] - 6s - loss: 0.6410 - acc: 0.7495 - val_loss: 1.3339 - val_acc: 0.5812
Epoch 31/100
467/467 [==============================] - 6s - loss: 0.6742 - acc: 0.7794 - val_loss: 0.5435 - val_acc: 0.7949
Epoch 32/100
467/467 [==============================] - 6s - loss: 0.6540 - acc: 0.7880 - val_loss: 1.0086 - val_acc: 0.6496
Epoch 33/100
467/467 [==============================] - 6s - loss: 0.5907 - acc: 0.7966 - val_loss: 0.6121 - val_acc: 0.7607
Epoch 34/100
467/467 [==============================] - 6s - loss: 0.6572 - acc: 0.7901 - val_loss: 1.1642 - val_acc: 0.6154
Epoch 35/100
467/467 [==============================] - 6s - loss: 0.6361 - acc: 0.8009 - val_loss: 0.2513 - val_acc: 0.9402
Epoch 36/100
467/467 [==============================] - 6s - loss: 0.6261 - acc: 0.7987 - val_loss: 1.1552 - val_acc: 0.5897
Epoch 37/100
467/467 [==============================] - 6s - loss: 0.5721 - acc: 0.7901 - val_loss: 0.7357 - val_acc: 0.7521
Epoch 38/100
467/467 [==============================] - 6s - loss: 0.5399 - acc: 0.7987 - val_loss: 0.5377 - val_acc: 0.8034
Epoch 39/100
467/467 [==============================] - 6s - loss: 0.5488 - acc: 0.8137 - val_loss: 0.3162 - val_acc: 0.8632
Epoch 40/100
467/467 [==============================] - 6s - loss: 0.5526 - acc: 0.8073 - val_loss: 0.5885 - val_acc: 0.7607
Epoch 41/100
467/467 [==============================] - 6s - loss: 0.5619 - acc: 0.8223 - val_loss: 2.1444 - val_acc: 0.4444
Epoch 42/100
467/467 [==============================] - 6s - loss: 0.5793 - acc: 0.8094 - val_loss: 0.6088 - val_acc: 0.7521
Epoch 43/100
467/467 [==============================] - 6s - loss: 0.6110 - acc: 0.8094 - val_loss: 0.7048 - val_acc: 0.7179
Epoch 44/100
467/467 [==============================] - 6s - loss: 0.5295 - acc: 0.8244 - val_loss: 0.4704 - val_acc: 0.8889
Epoch 45/100
467/467 [==============================] - 6s - loss: 0.5877 - acc: 0.8051 - val_loss: 0.3152 - val_acc: 0.9402
Epoch 46/100
467/467 [==============================] - 6s - loss: 0.5609 - acc: 0.8137 - val_loss: 0.5406 - val_acc: 0.8547
192/195 [============================>.] - ETA: 0sAccuracy of the model is  0.897435897436
Out[116]:
array([[19,  0,  0,  5,  1,  0,  0],
       [ 0, 20,  0,  0,  0,  4,  1],
       [ 0,  0, 23,  2,  1,  0,  0],
       [ 0,  0,  0, 24,  0,  0,  1],
       [ 0,  0,  0,  0, 27,  0,  0],
       [ 0,  1,  0,  0,  0, 23,  1],
       [ 0,  1,  1,  0,  1,  0, 39]])

In [ ]:
from keras.models import Sequential
from keras.layers.core import Flatten, Dense, Dropout
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.optimizers import SGD

def VGG_16(weights_path=None):
    model = Sequential()
    model.add(ZeroPadding2D((1,1),input_shape=(3,224,224)))
    model.add(Convolution2D(64, 3, 3, activation='relu'))
    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(64, 3, 3, activation='relu'))
    model.add(MaxPooling2D((2,2), strides=(2,2)))

    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(128, 3, 3, activation='relu'))
    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(128, 3, 3, activation='relu'))
    model.add(MaxPooling2D((2,2), strides=(2,2)))

    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(256, 3, 3, activation='relu'))
    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(256, 3, 3, activation='relu'))
    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(256, 3, 3, activation='relu'))
    model.add(MaxPooling2D((2,2), strides=(2,2)))

    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(512, 3, 3, activation='relu'))
    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(512, 3, 3, activation='relu'))
    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(512, 3, 3, activation='relu'))
    model.add(MaxPooling2D((2,2), strides=(2,2)))

    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(512, 3, 3, activation='relu'))
    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(512, 3, 3, activation='relu'))
    model.add(ZeroPadding2D((1,1)))
    model.add(Convolution2D(512, 3, 3, activation='relu'))
    model.add(MaxPooling2D((2,2), strides=(2,2)))

    model.add(Flatten())
    model.add(Dense(4096, activation='relu'))
    model.add(Dropout(0.5))
    model.add(Dense(4096, activation='relu'))
    model.add(Dropout(0.5))
    model.add(Dense(7, activation='softmax'))

    if weights_path:
        model.load_weights(weights_path)

    return model

model = VGG_16()
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(optimizer=sgd, loss='categorical_crossentropy')

In [14]:
from PIL import Image
image = np.array(Image.open('/home/mckc/Downloads/1.jpg'))
predictor_model = "/home/mckc/Downloads/shape_predictor_68_face_landmarks.dat"

detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor(predictor_model)
rects = detector(image, 1)
    
if len(rects) > 1:
    print 'TooManyFaces'
if len(rects) == 0:
    print 'NoFaces'
coords = np.array([[p.x, p.y] for p in predictor(image, rects[0]).parts()])
centroid = coords.mean(axis=0)

In [38]:
a = np.array(rects[0])

In [26]:
import matplotlib.pyplot as plt
%matplotlib inline
from skimage.transform import resize

plt.imshow(resize(image[44:95,72:124],(96,96)))


Out[26]:
<matplotlib.image.AxesImage at 0x7fc3a78ca110>

In [ ]: