In [1]:
import csv
import numpy as np
import pandas as pd
import os
import matplotlib.pyplot as plt
from matplotlib import cm
%matplotlib inline
os.chdir('/home/mckc/Downloads/data bases/emotions/')

In [2]:
labels = {0:'Angry', 1:'Disgust', 2:'Fear', 3:'Happy', 4:'Sad', 5:'Surprise', 6:'Neutral'}
labels[0]


Out[2]:
'Angry'

In [3]:
df = pd.read_csv('fer2013.csv')
df['pixels'] = df['pixels'].apply(lambda im: np.fromstring(im, sep=' '))
df.shape


Out[3]:
(35887, 3)

In [4]:
train = (df.loc[df.Usage=='Training']).values[:,:2]
test = (df.loc[df.Usage!='Training']).values[:,:2]
print train.shape,test.shape,type(train)


(28709, 2) (7178, 2) <type 'numpy.ndarray'>

In [5]:
train[1,1].reshape(48,48)
print(labels[train[1,0]])
plt.imshow(train[1,1].reshape(48,48),cmap=cm.Greys_r)


Angry
Out[5]:
<matplotlib.image.AxesImage at 0x7f4a6d2c2a50>

In [6]:
X_tr= np.array([train[i,1] for i in range(train.shape[0])])
X_ts= np.array([test[i,1] for i in range(test.shape[0])])

In [7]:
X_normal = X_tr.reshape(-1,2304)
X_test_normal = X_ts.reshape(-1,2304)
Y_number = train[:,0].astype("|S6")
Y_test_number = test[:,0].astype("|S6")

In [8]:
Y_number.dtype


Out[8]:
dtype('S6')

In [9]:
unique, counts = np.unique(Y_number, return_counts=True)
dict(zip(unique, counts))


Out[9]:
{'0': 3995, '1': 436, '2': 4097, '3': 7215, '4': 4830, '5': 3171, '6': 4965}

In [10]:
from sklearn.linear_model import LogisticRegression
from sklearn import cross_validation
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score

clf = LogisticRegression(verbose=0,n_jobs=-1,multi_class='multinomial',solver='lbfgs',max_iter=100,warm_start=True)


/home/mckc/anaconda/lib/python2.7/site-packages/sklearn/cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.
  "This module will be removed in 0.20.", DeprecationWarning)

In [11]:
clf.fit(X_tr,Y_number)
Y_logictic= clf.predict(X_test_normal)

print 'Accuracy of the model is ',accuracy_score(Y_test_number,Y_logictic)
confusion_matrix(Y_test_number,Y_logictic)


Accuracy of the model is  0.372248537197
Out[11]:
array([[ 150,    0,   73,  358,  142,   90,  145],
       [  15,    0,   16,   39,   13,    8,   20],
       [  83,    0,  136,  326,  155,  143,  181],
       [  81,    0,   65, 1264,  141,   77,  146],
       [ 127,    2,  101,  349,  279,  133,  256],
       [  34,    0,   73,  158,   90,  376,  100],
       [  86,    0,   64,  377,  149,   90,  467]])

In [12]:
from sklearn.ensemble import RandomForestClassifier
recognizer = RandomForestClassifier(200,verbose=0,oob_score=True,n_jobs=-1,warm_start=True)
#scores = cross_validation.cross_val_score(recognizer, X_normal, Y_number, cv=5)
#print scores
#print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))

In [13]:
recognizer.fit(X_tr,Y_number)
Y_rf= recognizer.predict(X_test_normal)

print 'Accuracy of the model is ',accuracy_score(Y_test_number,Y_rf)
confusion_matrix(Y_test_number,Y_rf)


Accuracy of the model is  0.473251602118
Out[13]:
array([[ 188,    0,   60,  367,  168,   39,  136],
       [   5,   33,    5,   44,   10,    4,   10],
       [  42,    0,  280,  299,  167,   85,  151],
       [  28,    0,   32, 1416,  139,   46,  113],
       [  48,    0,   78,  424,  450,   18,  229],
       [  12,    0,   50,  142,   35,  515,   77],
       [  23,    0,   46,  443,  181,   25,  515]])

In [ ]:
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras import backend as K
from keras.optimizers import Adam,SGD,RMSprop,Adadelta,Adagrad,Nadam,Adamax
from keras.utils import np_utils
from keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='val_loss', patience=10)


Y_Keras = np_utils.to_categorical(Y_number, 7)
Y_Keras_test = np_utils.to_categorical(Y_test_number,7)
# Create first network with Keras
from keras.models import Sequential
from keras.layers import Dense, Activation,Dropout
model = Sequential()
model.add(Dense(1000, input_dim=2304,activation='relu'))
model.add(Dense(1000,activation='relu'))
#model.add(Dropout(0.5))
#model.add(Dense(1000,activation='relu'))
model.add(Dense(7,activation='softmax'))
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
rms = RMSprop(lr=0.001, rho=0.9, epsilon=1e-08)
adagrad = Adagrad(lr=0.0001, epsilon=1e-08)
adadelta = Adadelta(lr=.1, rho=0.95, epsilon=1e-08)
adam = Adam(lr=0.00000001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
adamax= Adamax(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
nadam = Nadam(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08, schedule_decay=0.004)


# Compile model
model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])

model.fit(X_tr, Y_Keras, nb_epoch=4000, batch_size=200,verbose=1,shuffle=True,
          validation_data=(X_ts,Y_Keras_test))

Y_ke= model.predict(X_test_normal)

print 'Accuracy of the model is ',accuracy_score(Y_test_number,Y_ke)
confusion_matrix(Y_test_number,Y_ke)

In [14]:
import lasagne
#from lasagne.layers.cuda_convnet import Conv2DCCLayer as Conv2DLayer
#from lasagne.layers.cuda_convnet import MaxPool2DCCLayer as MaxPool2DLayer
from lasagne import layers
from lasagne.objectives import categorical_crossentropy
from lasagne.updates import nesterov_momentum,adadelta
from nolearn.lasagne import BatchIterator,visualize,NeuralNet
Conv2DLayer = layers.Conv2DLayer
MaxPool2DLayer = layers.MaxPool2DLayer

net = NeuralNet(
    layers=[
        ('input', layers.InputLayer),
        ('conv1', Conv2DLayer),
        ('pool1', MaxPool2DLayer),
        ('dropout1', layers.DropoutLayer),
        ('conv2', Conv2DLayer),
        ('pool2', MaxPool2DLayer),
        ('dropout2', layers.DropoutLayer),
        ('conv3', Conv2DLayer),
        ('pool3', MaxPool2DLayer),
        ('dropout3', layers.DropoutLayer),
        ('hidden4', layers.DenseLayer),
        ('dropout4', layers.DropoutLayer),
        ('hidden5', layers.DenseLayer),
        ('output', layers.DenseLayer),
        ],
    input_shape=(None, 1, 48, 48),
    conv1_num_filters=32, conv1_filter_size=(3, 3), pool1_pool_size=(2, 2),
    dropout1_p=0.1,
    conv2_num_filters=64, conv2_filter_size=(2, 2), pool2_pool_size=(2, 2),
    dropout2_p=0.2,
    conv3_num_filters=128, conv3_filter_size=(2, 2), pool3_pool_size=(2, 2),
    dropout3_p=0.3,
    hidden4_num_units=1000,
    dropout4_p=0.5,
    hidden5_num_units=3000,
    output_nonlinearity=lasagne.nonlinearities.softmax,
    output_num_units=7,
    
    update = adadelta,
    update_learning_rate=0.01,
#    update_momentum=0.9,
    max_epochs=500,
    verbose=1
)


Using gpu device 0: Quadro M2000M (CNMeM is enabled with initial size: 95.0% of memory, cuDNN 5005)

In [15]:
net.fit(X_tr.reshape(-1,1,48,48).astype(np.float32), Y_number.astype(np.uint8))


/home/mckc/anaconda/lib/python2.7/site-packages/lasagne/layers/conv.py:489: UserWarning: The `image_shape` keyword argument to `tensor.nnet.conv2d` is deprecated, it has been renamed to `input_shape`.
  border_mode=border_mode)
# Neural Network with 6266479 learnable parameters

## Layer information

  #  name      size
---  --------  ---------
  0  input     1x48x48
  1  conv1     32x46x46
  2  pool1     32x23x23
  3  dropout1  32x23x23
  4  conv2     64x22x22
  5  pool2     64x11x11
  6  dropout2  64x11x11
  7  conv3     128x10x10
  8  pool3     128x5x5
  9  dropout3  128x5x5
 10  hidden4   1000
 11  dropout4  1000
 12  hidden5   3000
 13  output    7

  epoch    trn loss    val loss    trn/val    valid acc  dur
-------  ----------  ----------  ---------  -----------  -----
      1    10.45755     2.02761    5.15756      0.18576  8.64s
      2     3.12853     1.87660    1.66713      0.19342  8.64s
      3     2.09592     1.90245    1.10169      0.23259  8.63s
      4     1.90503     1.90761    0.99865      0.24739  8.63s
      5     1.87111     1.91446    0.97735      0.25226  8.64s
      6     1.85629     1.91350    0.97011      0.25157  8.64s
      7     1.84850     1.91204    0.96677      0.25122  8.64s
      8     1.84424     1.91189    0.96462      0.25139  8.63s
      9     1.83883     1.90640    0.96456      0.25192  8.63s
     10     1.83821     1.90563    0.96462      0.25157  8.64s
     11     1.83583     1.89951    0.96648      0.25139  8.63s
     12     1.83420     1.89136    0.96978      0.25122  8.64s
     13     1.83311     1.86500    0.98290      0.25139  8.63s
     14     1.83007     1.86422    0.98168      0.25174  8.62s
     15     1.82845     1.87192    0.97678      0.25139  8.62s
     16     1.82680     1.86587    0.97906      0.25157  8.63s
     17     1.82538     1.85458    0.98426      0.25157  8.62s
     18     1.82241     1.85751    0.98110      0.25157  8.64s
     19     1.81898     1.85320    0.98154      0.25139  8.64s
     20     1.82264     1.85980    0.98002      0.25226  8.63s
     21     1.81789     1.86155    0.97655      0.25139  8.63s
     22     1.82021     1.86204    0.97753      0.25192  8.63s
     23     1.81533     1.85241    0.97998      0.25157  8.63s
     24     1.81317     1.84917    0.98053      0.25226  8.64s
     25     1.81186     1.84149    0.98391      0.25331  8.64s
     26     1.80874     1.83090    0.98790      0.25348  8.64s
     27     1.80627     1.83150    0.98622      0.25383  8.64s
     28     1.80262     1.82214    0.98928      0.25662  8.64s
     29     1.79783     1.81132    0.99255      0.26149  8.64s
     30     1.79187     1.80231    0.99421      0.27019  8.64s
     31     1.78611     1.78760    0.99917      0.27942  8.64s
     32     1.77855     1.77619    1.00133      0.28343  8.64s
     33     1.77128     1.76002    1.00640      0.30188  8.64s
     34     1.76092     1.74789    1.00745      0.32051  8.64s
     35     1.75096     1.73236    1.01073      0.32886  8.63s
     36     1.73797     1.71172    1.01533      0.34244  8.63s
     37     1.72750     1.70522    1.01307      0.34384  8.63s
     38     1.71594     1.68831    1.01636      0.34993  8.64s
     39     1.70644     1.67728    1.01738      0.35794  8.64s
     40     1.69337     1.66232    1.01868      0.36386  8.64s
     41     1.68767     1.65772    1.01806      0.37674  8.63s
     42     1.68468     1.65256    1.01944      0.37047  8.64s
     43     1.67274     1.63684    1.02193      0.38370  8.63s
     44     1.66239     1.62831    1.02093      0.39136  8.64s
     45     1.65079     1.60921    1.02584      0.38962  8.63s
     46     1.63915     1.60280    1.02268      0.39728  8.64s
     47     1.63382     1.59567    1.02391      0.39972  8.63s
     48     1.62270     1.58432    1.02423      0.40024  8.63s
     49     1.62070     1.58038    1.02552      0.39903  8.64s
     50     1.60896     1.57361    1.02246      0.40268  8.64s
     51     1.60513     1.56574    1.02516      0.40529  8.64s
     52     1.59460     1.55622    1.02467      0.41400  8.63s
     53     1.59177     1.55117    1.02618      0.41313  8.63s
     54     1.57704     1.54206    1.02268      0.42009  8.64s
     55     1.57645     1.53431    1.02746      0.41835  8.64s
     56     1.56589     1.52738    1.02521      0.42253  8.64s
     57     1.55890     1.52134    1.02469      0.42531  8.63s
     58     1.55382     1.51454    1.02594      0.42897  8.63s
     59     1.54098     1.50600    1.02322      0.43245  8.63s
     60     1.53299     1.50062    1.02157      0.43419  8.63s
     61     1.53166     1.48873    1.02883      0.43942  8.64s
     62     1.52341     1.48338    1.02699      0.43924  8.64s
     63     1.51672     1.47715    1.02679      0.44690  8.63s
     64     1.51099     1.47083    1.02730      0.44620  8.64s
     65     1.50449     1.46439    1.02739      0.45508  8.63s
     66     1.50342     1.46301    1.02762      0.45073  8.63s
     67     1.48714     1.45542    1.02179      0.45456  8.64s
     68     1.49339     1.44916    1.03052      0.45961  8.63s
     69     1.48360     1.44411    1.02734      0.45769  8.63s
     70     1.47728     1.44456    1.02265      0.45700  8.63s
     71     1.47324     1.43426    1.02717      0.46153  8.63s
     72     1.46166     1.42645    1.02469      0.46448  8.63s
     73     1.46705     1.42588    1.02887      0.46414  8.63s
     74     1.45751     1.41879    1.02730      0.46797  8.63s
     75     1.44711     1.41491    1.02276      0.47023  8.63s
     76     1.44583     1.40971    1.02562      0.46866  8.64s
     77     1.44252     1.40181    1.02904      0.47389  8.63s
     78     1.42794     1.39833    1.02117      0.47180  8.63s
     79     1.42436     1.39316    1.02240      0.47528  8.63s
     80     1.42288     1.39049    1.02330      0.47319  8.63s
     81     1.41745     1.38282    1.02505      0.48015  8.62s
     82     1.41282     1.38255    1.02189      0.47841  8.63s
     83     1.40922     1.37698    1.02341      0.47754  8.63s
     84     1.40372     1.37115    1.02376      0.48416  8.63s
     85     1.39443     1.36830    1.01909      0.48259  8.63s
     86     1.39674     1.36397    1.02403      0.48694  8.63s
     87     1.38559     1.36275    1.01676      0.48590  8.63s
     88     1.38394     1.35969    1.01784      0.48503  8.63s
     89     1.38377     1.35650    1.02010      0.48799  8.63s
     90     1.37866     1.35516    1.01734      0.48990  8.63s
     91     1.37057     1.34788    1.01683      0.49164  8.63s
     92     1.37094     1.34744    1.01743      0.49617  8.63s
     93     1.36962     1.33997    1.02212      0.49843  8.63s
     94     1.36133     1.33534    1.01946      0.49634  8.64s
     95     1.36240     1.33686    1.01911      0.49356  8.63s
     96     1.34862     1.33014    1.01389      0.49495  8.63s
     97     1.34790     1.32776    1.01516      0.50087  8.63s
     98     1.34084     1.32515    1.01184      0.50244  8.64s
     99     1.33349     1.32140    1.00915      0.50366  8.63s
    100     1.33513     1.32307    1.00911      0.50035  8.64s
    101     1.33542     1.32095    1.01095      0.50174  8.64s
    102     1.32649     1.31541    1.00842      0.50714  8.64s
    103     1.31776     1.30974    1.00613      0.50992  8.65s
    104     1.32126     1.30945    1.00902      0.50679  8.64s
    105     1.31960     1.30487    1.01129      0.50958  8.65s
    106     1.31564     1.30552    1.00775      0.50731  8.64s
    107     1.31030     1.30174    1.00658      0.50975  8.64s
    108     1.30791     1.29753    1.00799      0.51201  8.64s
    109     1.29935     1.29824    1.00086      0.51462  8.64s
    110     1.29815     1.29617    1.00153      0.51219  8.62s
    111     1.29801     1.29093    1.00549      0.51671  8.63s
    112     1.28890     1.28896    0.99996      0.51654  8.63s
    113     1.28785     1.28559    1.00176      0.51341  8.63s
    114     1.28001     1.28581    0.99549      0.51549  8.64s
    115     1.28208     1.28316    0.99916      0.51776  8.64s
    116     1.28202     1.27948    1.00198      0.51845  8.64s
    117     1.27074     1.27584    0.99600      0.51915  8.64s
    118     1.27299     1.27602    0.99763      0.52072  8.63s
    119     1.26950     1.27312    0.99716      0.52368  8.63s
    120     1.26549     1.27200    0.99489      0.52176  8.63s
    121     1.25875     1.26868    0.99217      0.52315  8.64s
    122     1.25536     1.26536    0.99209      0.52524  8.64s
    123     1.25186     1.26572    0.98906      0.52716  8.63s
    124     1.25023     1.26274    0.99009      0.52594  8.63s
    125     1.25156     1.25721    0.99550      0.52960  8.63s
    126     1.24482     1.25792    0.98959      0.52925  8.63s
    127     1.24803     1.26253    0.98852      0.52315  8.63s
    128     1.24111     1.25477    0.98911      0.52542  8.62s
    129     1.23606     1.25404    0.98566      0.52681  8.62s
    130     1.22797     1.25215    0.98069      0.53169  8.62s
    131     1.23167     1.24759    0.98724      0.53412  8.64s
    132     1.23092     1.24863    0.98582      0.53064  8.63s
    133     1.22347     1.24518    0.98256      0.53499  8.63s
    134     1.22042     1.24661    0.97900      0.52890  8.63s
    135     1.21821     1.24806    0.97608      0.52925  8.62s
    136     1.21888     1.24097    0.98220      0.53256  8.62s
    137     1.21097     1.24087    0.97591      0.53534  8.62s
    138     1.20126     1.23766    0.97059      0.53569  8.62s
    139     1.20443     1.23273    0.97705      0.53639  8.63s
    140     1.19887     1.23272    0.97254      0.53900  8.62s
    141     1.20234     1.23240    0.97561      0.53586  8.62s
    142     1.19586     1.22903    0.97301      0.53447  8.62s
    143     1.19297     1.22857    0.97102      0.53447  8.62s
    144     1.18637     1.22341    0.96972      0.53795  8.61s
    145     1.18515     1.22362    0.96856      0.53691  8.62s
    146     1.17753     1.22090    0.96448      0.54039  8.63s
    147     1.17780     1.21829    0.96676      0.54300  8.62s
    148     1.17938     1.21854    0.96786      0.54109  8.62s
    149     1.17448     1.21904    0.96345      0.53987  8.63s
    150     1.16952     1.21749    0.96060      0.54422  8.63s
    151     1.16503     1.21637    0.95779      0.54126  8.63s
    152     1.16109     1.21744    0.95371      0.53865  8.62s
    153     1.16067     1.21375    0.95627      0.54300  8.62s
    154     1.16090     1.21284    0.95718      0.54178  8.62s
    155     1.15362     1.20965    0.95368      0.54387  8.61s
    156     1.14954     1.20894    0.95087      0.54178  8.62s
    157     1.15175     1.20900    0.95265      0.54439  8.63s
    158     1.15332     1.20449    0.95751      0.54701  8.62s
    159     1.15033     1.20727    0.95284      0.54474  8.62s
    160     1.14499     1.20092    0.95343      0.54997  8.62s
    161     1.13972     1.20603    0.94502      0.54318  8.62s
    162     1.14124     1.19866    0.95210      0.54735  8.62s
    163     1.12569     1.19654    0.94078      0.54979  8.62s
    164     1.12845     1.19550    0.94392      0.54770  8.63s
    165     1.13066     1.19220    0.94839      0.55049  8.62s
    166     1.11404     1.19613    0.93137      0.54579  8.63s
    167     1.12458     1.19205    0.94340      0.55049  8.63s
    168     1.11299     1.19182    0.93386      0.54997  8.62s
    169     1.11417     1.19287    0.93402      0.54927  8.63s
    170     1.11614     1.19049    0.93754      0.54909  8.62s
    171     1.10939     1.18573    0.93562      0.55240  8.64s
    172     1.11198     1.18377    0.93935      0.55327  8.63s
    173     1.09909     1.18312    0.92898      0.55519  8.63s
    174     1.09820     1.18308    0.92826      0.55797  8.63s
    175     1.09669     1.18239    0.92752      0.55536  8.64s
    176     1.09545     1.18361    0.92552      0.55432  8.64s
    177     1.09549     1.18206    0.92677      0.56024  8.63s
    178     1.09559     1.17969    0.92872      0.55797  8.63s
    179     1.09233     1.17596    0.92889      0.55832  8.62s
    180     1.08358     1.17879    0.91923      0.55710  8.62s
    181     1.07993     1.17947    0.91561      0.55919  8.62s
    182     1.08323     1.17587    0.92121      0.55867  8.62s
    183     1.07295     1.17907    0.91000      0.55571  8.63s
    184     1.07141     1.17656    0.91063      0.55832  8.62s
    185     1.07171     1.17422    0.91270      0.55606  8.64s
    186     1.06396     1.16912    0.91006      0.56250  8.63s
    187     1.07001     1.17225    0.91279      0.55919  8.62s
    188     1.06370     1.17446    0.90570      0.55780  8.61s
    189     1.05354     1.17425    0.89721      0.55623  8.62s
    190     1.05354     1.17106    0.89964      0.55919  8.64s
    191     1.05181     1.16561    0.90237      0.55971  8.63s
    192     1.04465     1.16444    0.89713      0.56111  8.64s
    193     1.05003     1.16634    0.90027      0.55919  8.63s
    194     1.04738     1.16830    0.89650      0.55902  8.63s
    195     1.04798     1.16590    0.89886      0.56041  8.62s
    196     1.03846     1.16571    0.89084      0.56302  8.63s
    197     1.03244     1.16854    0.88353      0.56128  8.63s
    198     1.03386     1.16104    0.89047      0.55902  8.63s
    199     1.02608     1.15853    0.88567      0.56546  8.63s
    200     1.02780     1.15796    0.88759      0.56302  8.62s
    201     1.02896     1.15980    0.88719      0.56146  8.62s
    202     1.02468     1.15810    0.88480      0.56111  8.62s
    203     1.01526     1.15933    0.87573      0.56337  8.62s
    204     1.01444     1.15699    0.87680      0.56511  8.62s
    205     1.01534     1.15560    0.87862      0.56146  8.62s
    206     1.01104     1.15449    0.87575      0.56581  8.62s
    207     1.00655     1.15674    0.87016      0.56703  8.63s
    208     1.00494     1.15225    0.87216      0.56685  8.63s
    209     1.00317     1.15965    0.86506      0.56529  8.62s
    210     1.00331     1.15211    0.87084      0.56877  8.63s
    211     0.99754     1.14984    0.86755      0.56633  8.62s
    212     0.99094     1.15098    0.86095      0.56720  8.62s
    213     0.98354     1.14884    0.85612      0.56650  8.63s
    214     0.98494     1.15438    0.85322      0.56633  8.62s
    215     0.98395     1.14965    0.85587      0.56372  8.63s
    216     0.97786     1.14927    0.85086      0.56285  8.63s
    217     0.98243     1.15003    0.85426      0.56825  8.62s
    218     0.97811     1.14905    0.85123      0.56511  8.63s
    219     0.96974     1.15277    0.84122      0.56529  8.63s
    220     0.96886     1.14954    0.84282      0.56320  8.62s
    221     0.96699     1.14713    0.84297      0.56755  8.63s
    222     0.96802     1.14486    0.84554      0.56964  8.62s
    223     0.96599     1.14286    0.84524      0.56964  8.62s
    224     0.96389     1.14675    0.84054      0.56616  8.62s
    225     0.95523     1.14327    0.83553      0.56859  8.62s
    226     0.95801     1.14292    0.83822      0.57068  8.63s
    227     0.94970     1.14294    0.83092      0.56772  8.62s
    228     0.94540     1.14339    0.82683      0.56825  8.62s
    229     0.94448     1.14394    0.82564      0.56668  8.62s
    230     0.94331     1.14148    0.82639      0.56946  8.62s
    231     0.93538     1.14124    0.81962      0.56720  8.62s
    232     0.93456     1.14340    0.81735      0.56929  8.62s
    233     0.93543     1.13866    0.82152      0.56650  8.63s
    234     0.93810     1.13589    0.82588      0.57347  8.63s
    235     0.92207     1.13860    0.80983      0.57295  8.63s
    236     0.92488     1.13932    0.81178      0.57068  8.63s
    237     0.91987     1.14066    0.80644      0.57068  8.63s
    238     0.92676     1.14428    0.80990      0.56720  8.63s
    239     0.91283     1.13745    0.80252      0.56894  8.63s
    240     0.91294     1.14848    0.79492      0.56650  8.63s
    241     0.90808     1.14297    0.79449      0.56772  8.62s
    242     0.90549     1.13794    0.79572      0.56859  8.63s
    243     0.90901     1.14045    0.79706      0.57225  8.63s
    244     0.90406     1.14241    0.79136      0.56842  8.63s
    245     0.89379     1.13623    0.78662      0.57103  8.63s
    246     0.90129     1.13857    0.79161      0.57155  8.63s
    247     0.90193     1.13872    0.79205      0.57190  8.63s
    248     0.89466     1.13726    0.78668      0.56894  8.62s
    249     0.89815     1.13954    0.78817      0.57312  8.62s
    250     0.89538     1.13751    0.78714      0.57277  8.62s
    251     0.88877     1.13575    0.78253      0.57451  8.62s
    252     0.88863     1.13750    0.78122      0.57451  8.63s
    253     0.88238     1.13942    0.77441      0.56859  8.62s
    254     0.87807     1.13490    0.77370      0.57086  8.62s
    255     0.87199     1.13370    0.76916      0.57033  8.62s
    256     0.86890     1.13159    0.76786      0.57625  8.62s
    257     0.87358     1.13809    0.76759      0.57208  8.62s
    258     0.86438     1.13784    0.75967      0.57416  8.63s
    259     0.86437     1.13438    0.76197      0.57503  8.63s
    260     0.86604     1.13605    0.76233      0.57312  8.62s
    261     0.85532     1.13126    0.75608      0.57503  8.62s
    262     0.84739     1.13545    0.74631      0.57399  8.63s
    263     0.85491     1.13340    0.75429      0.57225  8.62s
    264     0.85010     1.13349    0.74999      0.57660  8.62s
    265     0.84656     1.13397    0.74655      0.57608  8.63s
    266     0.84474     1.13796    0.74233      0.57295  8.62s
    267     0.84060     1.13487    0.74070      0.57260  8.62s
    268     0.83761     1.13526    0.73782      0.57556  8.62s
    269     0.83980     1.13483    0.74002      0.57503  8.62s
    270     0.83685     1.13407    0.73792      0.57712  8.63s
    271     0.83218     1.13178    0.73528      0.57782  8.63s
    272     0.82568     1.13653    0.72649      0.57382  8.63s
    273     0.83663     1.13551    0.73679      0.57538  8.63s
    274     0.82665     1.13091    0.73096      0.57573  8.62s
    275     0.82756     1.12754    0.73395      0.57799  8.64s
    276     0.82240     1.13322    0.72572      0.57625  8.63s
    277     0.81768     1.12913    0.72417      0.57521  8.64s
    278     0.81437     1.13365    0.71836      0.57678  8.63s
    279     0.81240     1.13190    0.71773      0.57852  8.63s
    280     0.80629     1.13385    0.71111      0.57242  8.63s
    281     0.81203     1.13443    0.71580      0.57886  8.63s
    282     0.79809     1.13497    0.70318      0.58026  8.63s
    283     0.79481     1.13576    0.69981      0.57869  8.63s
    284     0.80088     1.13363    0.70647      0.58165  8.63s
    285     0.79249     1.13169    0.70027      0.58095  8.63s
    286     0.79929     1.13615    0.70351      0.57573  8.62s
    287     0.78496     1.13298    0.69283      0.57921  8.62s
    288     0.78663     1.13123    0.69537      0.58217  8.62s
    289     0.78829     1.13225    0.69621      0.57974  8.62s
    290     0.78421     1.13276    0.69230      0.58235  8.63s
    291     0.78091     1.13573    0.68758      0.58513  8.62s
    292     0.78852     1.13569    0.69431      0.58148  8.62s
    293     0.77374     1.13896    0.67934      0.58008  8.63s
    294     0.77318     1.13316    0.68232      0.58409  8.63s
    295     0.77620     1.14092    0.68033      0.57747  8.63s
    296     0.76588     1.14024    0.67169      0.57817  8.62s
    297     0.76573     1.13809    0.67282      0.57974  8.62s
    298     0.76113     1.14120    0.66696      0.58252  8.63s
    299     0.75940     1.14170    0.66515      0.58113  8.63s
    300     0.75864     1.13945    0.66579      0.58113  8.62s
    301     0.75292     1.13620    0.66266      0.58061  8.62s
    302     0.75350     1.13704    0.66269      0.58304  8.63s
    303     0.74894     1.13720    0.65858      0.57991  8.63s
    304     0.75100     1.13197    0.66344      0.58374  8.63s
    305     0.74281     1.14197    0.65046      0.58705  8.63s
    306     0.74681     1.13860    0.65591      0.58444  8.63s
    307     0.74491     1.13772    0.65474      0.58304  8.63s
    308     0.74076     1.13722    0.65137      0.58200  8.63s
    309     0.74364     1.14100    0.65175      0.58409  8.63s
    310     0.73872     1.14104    0.64740      0.58095  8.63s
    311     0.72889     1.14176    0.63839      0.58461  8.62s
    312     0.73296     1.13592    0.64526      0.58409  8.63s
    313     0.71944     1.13987    0.63116      0.58740  8.63s
    314     0.72484     1.14346    0.63390      0.58722  8.63s
    315     0.72719     1.14194    0.63680      0.58304  8.62s
    316     0.72179     1.14158    0.63227      0.58496  8.63s
    317     0.71949     1.13947    0.63142      0.58740  8.62s
    318     0.71593     1.13591    0.63027      0.59001  8.63s
    319     0.72191     1.14101    0.63270      0.58565  8.62s
    320     0.70918     1.13956    0.62233      0.58391  8.63s
    321     0.71160     1.14240    0.62290      0.58653  8.63s
    322     0.71111     1.14169    0.62286      0.58409  8.62s
    323     0.70323     1.14810    0.61252      0.58844  8.62s
    324     0.69703     1.13941    0.61175      0.59088  8.63s
    325     0.69493     1.14323    0.60787      0.58827  8.62s
    326     0.70225     1.14075    0.61561      0.59192  8.63s
    327     0.69629     1.14384    0.60873      0.59210  8.63s
    328     0.68640     1.14252    0.60078      0.58948  8.63s
    329     0.68819     1.14265    0.60227      0.59244  8.62s
    330     0.68002     1.14691    0.59291      0.58827  8.64s
    331     0.67941     1.15548    0.58799      0.58687  8.73s
    332     0.68540     1.14842    0.59682      0.58722  8.63s
    333     0.68146     1.13983    0.59787      0.58896  8.63s
    334     0.67635     1.14845    0.58893      0.58861  8.62s
    335     0.67489     1.14694    0.58842      0.58827  8.62s
    336     0.67406     1.15154    0.58536      0.58948  8.63s
    337     0.67396     1.14854    0.58680      0.58705  8.62s
    338     0.66818     1.15187    0.58009      0.59036  8.62s
    339     0.66298     1.15528    0.57387      0.58983  8.63s
    340     0.66557     1.15024    0.57863      0.58670  8.62s
    341     0.66193     1.14883    0.57618      0.58966  8.62s
    342     0.66577     1.14543    0.58124      0.58879  8.62s
    343     0.65146     1.15023    0.56637      0.59036  8.62s
    344     0.65477     1.15391    0.56744      0.59053  8.62s
    345     0.65531     1.15202    0.56884      0.59088  8.63s
    346     0.65054     1.15700    0.56227      0.58931  8.62s
    347     0.64651     1.16105    0.55683      0.58565  8.62s
    348     0.64671     1.15761    0.55866      0.58792  8.62s
    349     0.64441     1.15568    0.55760      0.58844  8.63s
    350     0.64193     1.16107    0.55288      0.58896  8.63s
    351     0.63739     1.15541    0.55165      0.59558  8.62s
    352     0.63747     1.15567    0.55160      0.58966  8.62s
    353     0.63534     1.15438    0.55038      0.59314  8.62s
    354     0.62944     1.15885    0.54316      0.59036  8.63s
    355     0.63333     1.15088    0.55030      0.59384  8.63s
    356     0.63720     1.15498    0.55170      0.59210  8.63s
    357     0.62732     1.15522    0.54304      0.59244  8.62s
    358     0.61887     1.16110    0.53300      0.59349  8.63s
    359     0.61506     1.16615    0.52743      0.59053  8.63s
    360     0.61884     1.16686    0.53035      0.59210  8.62s
    361     0.62419     1.16048    0.53787      0.59088  8.63s
    362     0.61362     1.17070    0.52415      0.59175  8.63s
    363     0.61542     1.16223    0.52952      0.58983  8.62s
    364     0.61407     1.16441    0.52736      0.59436  8.62s
    365     0.60801     1.16387    0.52241      0.59123  8.62s
    366     0.60923     1.17030    0.52058      0.59001  8.63s
    367     0.60834     1.16420    0.52254      0.58983  8.63s
    368     0.61265     1.16856    0.52428      0.59053  8.63s
    369     0.59938     1.16631    0.51391      0.59419  8.63s
    370     0.60357     1.17395    0.51414      0.58496  8.62s
    371     0.59641     1.16484    0.51201      0.59453  8.62s
    372     0.59913     1.17214    0.51114      0.59401  8.62s
    373     0.59316     1.17253    0.50588      0.59297  8.63s
    374     0.58954     1.17284    0.50266      0.59488  8.62s
    375     0.59656     1.17072    0.50956      0.59453  8.62s
    376     0.58289     1.16919    0.49854      0.59314  8.63s
    377     0.58144     1.17156    0.49630      0.59366  8.63s
    378     0.58278     1.17267    0.49697      0.59366  8.62s
    379     0.58923     1.17594    0.50108      0.59540  8.62s
    380     0.57897     1.17240    0.49383      0.59314  8.62s
    381     0.57804     1.17564    0.49168      0.59558  8.63s
    382     0.57347     1.17652    0.48743      0.59244  8.63s
    383     0.57323     1.18243    0.48479      0.59627  8.63s
    384     0.57628     1.17655    0.48981      0.59244  8.62s
    385     0.57537     1.18773    0.48443      0.59401  8.63s
    386     0.57551     1.17176    0.49115      0.59506  8.63s
    387     0.56889     1.17807    0.48290      0.59366  8.62s
    388     0.56586     1.18141    0.47897      0.59488  8.63s
    389     0.56027     1.17521    0.47674      0.59645  8.63s
    390     0.56224     1.17548    0.47830      0.59575  8.63s
    391     0.56192     1.18748    0.47320      0.59384  8.62s
    392     0.55641     1.18455    0.46973      0.59384  8.63s
    393     0.55617     1.19045    0.46719      0.59540  8.62s
    394     0.55536     1.18176    0.46994      0.59645  8.62s
    395     0.55315     1.18514    0.46674      0.59680  8.62s
    396     0.54617     1.19391    0.45746      0.59331  8.63s
    397     0.55047     1.18559    0.46430      0.59384  8.63s
    398     0.55575     1.19043    0.46685      0.59767  8.63s
    399     0.55147     1.18890    0.46384      0.59279  8.63s
    400     0.53894     1.19382    0.45144      0.59593  8.63s
    401     0.54755     1.19147    0.45956      0.59558  8.63s
    402     0.54127     1.19977    0.45114      0.59314  8.65s
    403     0.53909     1.18991    0.45305      0.59540  8.63s
    404     0.52958     1.19148    0.44447      0.59610  8.63s
    405     0.53577     1.18845    0.45081      0.59419  8.62s
    406     0.53252     1.18738    0.44848      0.59836  8.62s
    407     0.53457     1.19046    0.44905      0.59419  8.62s
    408     0.52389     1.20157    0.43600      0.59401  8.64s
    409     0.52919     1.19554    0.44264      0.59471  8.62s
    410     0.52755     1.19331    0.44209      0.59558  8.65s
    411     0.51642     1.20890    0.42718      0.59662  8.62s
    412     0.52161     1.20211    0.43391      0.59384  8.63s
    413     0.51718     1.19230    0.43377      0.59958  8.63s
    414     0.52551     1.19900    0.43829      0.59488  8.62s
    415     0.52445     1.18944    0.44093      0.59889  8.63s
    416     0.51963     1.19454    0.43500      0.59767  8.64s
    417     0.51140     1.20052    0.42598      0.59575  8.63s
    418     0.51136     1.21220    0.42184      0.59436  8.63s
    419     0.50757     1.20736    0.42040      0.59540  8.62s
    420     0.50041     1.20777    0.41432      0.59575  8.63s
    421     0.50155     1.20316    0.41686      0.59593  8.63s
    422     0.50636     1.20859    0.41897      0.59662  8.63s
    423     0.49863     1.21293    0.41109      0.59923  8.63s
    424     0.49829     1.21499    0.41012      0.59714  8.63s
    425     0.49884     1.21571    0.41033      0.59906  8.63s
    426     0.48340     1.22711    0.39393      0.59314  8.63s
    427     0.49722     1.21865    0.40800      0.59471  8.62s
    428     0.49173     1.21593    0.40441      0.59819  8.63s
    429     0.49849     1.22055    0.40842      0.59471  8.62s
    430     0.49165     1.22537    0.40122      0.59227  8.62s
    431     0.48512     1.22062    0.39744      0.59575  8.63s
    432     0.49344     1.20888    0.40818      0.59941  8.62s
    433     0.48616     1.21498    0.40014      0.59366  8.63s
    434     0.49186     1.20983    0.40655      0.59958  8.62s
    435     0.48770     1.22251    0.39893      0.59488  8.62s
    436     0.48273     1.22248    0.39488      0.59575  8.63s
    437     0.47688     1.21843    0.39139      0.60202  8.63s
    438     0.47187     1.21985    0.38683      0.59436  8.63s
    439     0.46973     1.22461    0.38358      0.59331  8.62s
    440     0.47694     1.21647    0.39207      0.59854  8.62s
    441     0.47393     1.22267    0.38762      0.59645  8.63s
    442     0.47427     1.21566    0.39014      0.59732  8.63s
    443     0.46906     1.22136    0.38405      0.59836  8.63s
    444     0.46999     1.22991    0.38213      0.59558  8.63s
    445     0.46542     1.21996    0.38151      0.59941  8.63s
    446     0.46533     1.22573    0.37964      0.59645  8.64s
    447     0.46950     1.22691    0.38267      0.59906  8.62s
    448     0.46549     1.22595    0.37970      0.59732  8.63s
    449     0.46540     1.22705    0.37929      0.59854  8.62s
    450     0.45135     1.22359    0.36887      0.59802  8.62s
    451     0.46087     1.23376    0.37355      0.59767  8.62s
    452     0.45565     1.23476    0.36902      0.59366  8.62s
    453     0.45453     1.23458    0.36817      0.59401  8.63s
    454     0.45498     1.23425    0.36863      0.59645  8.62s
    455     0.45668     1.22972    0.37137      0.59714  8.63s
    456     0.45251     1.23039    0.36778      0.59923  8.62s
    457     0.45089     1.23372    0.36547      0.59784  8.63s
    458     0.45345     1.23666    0.36668      0.59593  8.63s
    459     0.44625     1.23960    0.36000      0.59401  8.62s
    460     0.45278     1.23100    0.36782      0.59680  8.64s
    461     0.44472     1.23367    0.36049      0.59732  8.62s
    462     0.44362     1.24209    0.35716      0.59575  8.63s
    463     0.44424     1.23848    0.35870      0.59819  8.63s
    464     0.43592     1.24891    0.34904      0.59575  8.63s
    465     0.44108     1.24783    0.35348      0.59819  8.63s
    466     0.43743     1.23663    0.35373      0.59941  8.63s
    467     0.42730     1.26121    0.33880      0.59384  8.63s
    468     0.44195     1.24206    0.35582      0.59593  8.62s
    469     0.43509     1.24727    0.34884      0.59384  8.63s
    470     0.43723     1.24474    0.35126      0.59906  8.63s
    471     0.42469     1.24641    0.34073      0.59593  8.64s
    472     0.42442     1.25203    0.33899      0.59680  8.62s
    473     0.42201     1.26422    0.33381      0.59488  8.63s
    474     0.42448     1.25530    0.33815      0.59714  8.64s
    475     0.41632     1.26491    0.32913      0.59558  8.62s
    476     0.42514     1.25756    0.33807      0.59314  8.63s
    477     0.41546     1.25960    0.32983      0.59645  8.63s
    478     0.42148     1.26874    0.33220      0.59784  8.63s
    479     0.42030     1.26773    0.33154      0.59941  8.63s
    480     0.41367     1.25556    0.32947      0.59732  8.62s
    481     0.41197     1.26252    0.32631      0.59819  8.63s
    482     0.40755     1.26658    0.32177      0.59889  8.62s
    483     0.42209     1.26426    0.33387      0.59575  8.63s
    484     0.40500     1.27697    0.31716      0.59523  8.63s
    485     0.40805     1.27345    0.32043      0.59384  8.63s
    486     0.40746     1.26113    0.32309      0.59854  8.64s
    487     0.40950     1.28519    0.31863      0.59244  8.63s
    488     0.41259     1.28032    0.32226      0.59262  8.62s
    489     0.41700     1.26698    0.32913      0.59645  8.65s
    490     0.40666     1.27787    0.31823      0.59540  8.64s
    491     0.40691     1.26723    0.32110      0.59680  8.64s
    492     0.40295     1.26165    0.31938      0.59854  8.64s
    493     0.40685     1.27692    0.31862      0.59680  8.63s
    494     0.40016     1.27177    0.31465      0.59714  8.63s
    495     0.40828     1.26743    0.32213      0.59645  8.63s
    496     0.39815     1.27027    0.31344      0.59941  8.63s
    497     0.39065     1.27155    0.30723      0.59906  8.63s
    498     0.39684     1.27407    0.31148      0.59593  8.62s
    499     0.39219     1.26662    0.30963      0.59941  8.63s
    500     0.39417     1.26904    0.31060      0.60306  8.62s
Out[15]:
NeuralNet(X_tensor_type=None,
     batch_iterator_test=<nolearn.lasagne.base.BatchIterator object at 0x7f4235eb8690>,
     batch_iterator_train=<nolearn.lasagne.base.BatchIterator object at 0x7f4235eb8590>,
     check_input=True, conv1_filter_size=(3, 3), conv1_num_filters=32,
     conv2_filter_size=(2, 2), conv2_num_filters=64,
     conv3_filter_size=(2, 2), conv3_num_filters=128, custom_scores=None,
     dropout1_p=0.1, dropout2_p=0.2, dropout3_p=0.3, dropout4_p=0.5,
     hidden4_num_units=1000, hidden5_num_units=3000,
     input_shape=(None, 1, 48, 48),
     layers=[('input', <class 'lasagne.layers.input.InputLayer'>), ('conv1', <class 'lasagne.layers.conv.Conv2DLayer'>), ('pool1', <class 'lasagne.layers.pool.MaxPool2DLayer'>), ('dropout1', <class 'lasagne.layers.noise.DropoutLayer'>), ('conv2', <class 'lasagne.layers.conv.Conv2DLayer'>), ('pool2', <cla..., <class 'lasagne.layers.dense.DenseLayer'>), ('output', <class 'lasagne.layers.dense.DenseLayer'>)],
     loss=None, max_epochs=500, more_params={},
     objective=<function objective at 0x7f4235ebb668>,
     objective_loss_function=<function categorical_crossentropy at 0x7f4235f365f0>,
     on_batch_finished=[],
     on_epoch_finished=[<nolearn.lasagne.handlers.PrintLog instance at 0x7f4235e8df80>],
     on_training_finished=[],
     on_training_started=[<nolearn.lasagne.handlers.PrintLayerInfo instance at 0x7f4235e8dfc8>],
     output_nonlinearity=<function softmax at 0x7f42363885f0>,
     output_num_units=7, pool1_pool_size=(2, 2), pool2_pool_size=(2, 2),
     pool3_pool_size=(2, 2), regression=False, scores_train=[],
     scores_valid=[],
     train_split=<nolearn.lasagne.base.TrainSplit object at 0x7f4235eb86d0>,
     update=<function adadelta at 0x7f4235f09488>,
     update_learning_rate=0.01, use_label_encoder=False, verbose=1,
     y_tensor_type=TensorType(int32, vector))

In [25]:
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten,AveragePooling2D
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras.optimizers import Adam,SGD,Adadelta,Adagrad
from keras import backend as K
from keras.models import load_model

Y_Keras = np_utils.to_categorical(Y_number, 7)
Y_Keras_test = np_utils.to_categorical(Y_test_number,7)

model = Sequential()
model.add(Convolution2D(32, 3, 3,border_mode='same',input_shape=( 1, 48, 48),activation='relu'))
model.add(Convolution2D(64, 3, 3,border_mode='same',activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(64, 3, 3,border_mode='same',activation='relu'))
model.add(Convolution2D(128, 3, 3,border_mode='same',activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(96, 3, 3,border_mode='same',activation='relu'))
model.add(Convolution2D(192, 3, 3,border_mode='same',activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(128, 3, 3,border_mode='same',activation='relu'))
model.add(Convolution2D(256, 3, 3,border_mode='same',activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(160, 3, 3,border_mode='same',activation='relu'))
model.add(Convolution2D(320, 3, 3,border_mode='same',activation='relu'))
model.add(AveragePooling2D(pool_size=(2, 2), strides=(1,1)))

model.add(Flatten())
model.add(Dense(1000,activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(1000,activation='relu'))
model.add(Dense(7,activation='softmax'))


adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)

model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])
model.load_weights("my_model.h5")


model.fit(X_tr.reshape(-1,1,48,48), Y_Keras, nb_epoch=1, batch_size=500,verbose=1,shuffle=True,
          validation_data=(X_ts.reshape(-1,1,48,48),Y_Keras_test))
model.save('my_model.h5')

Y_ke=  model.predict_classes(X_test_normal.reshape(-1,1,48,48))

print 'Accuracy of the model is ',accuracy_score(Y_test_number.astype(int),Y_ke)
confusion_matrix(Y_test_number.astype(int),Y_ke)


Train on 28709 samples, validate on 7178 samples
Epoch 1/1
28709/28709 [==============================] - 76s - loss: 1.4814 - acc: 0.4057 - val_loss: 1.4275 - val_acc: 0.4341
7168/7178 [============================>.] - ETA: 0sAccuracy of the model is  0.4341042073
Out[25]:
array([[  48,    0,   47,  143,  556,   62,  102],
       [   5,    0,    9,   12,   66,    3,   16],
       [  16,    0,  102,  182,  448,  181,   95],
       [   2,    0,   17, 1416,  269,   47,   23],
       [  30,    0,   58,  226,  780,   48,  105],
       [   2,    0,   85,   80,   82,  555,   27],
       [  34,    0,   66,  210,  675,   33,  215]])

In [16]:
from keras.preprocessing.image import ImageDataGenerator
datagen = ImageDataGenerator(
    featurewise_center=False,  # set input mean to 0 over the dataset
    samplewise_center=False,  # set each sample mean to 0
    featurewise_std_normalization=False,  # divide inputs by std of the dataset
    samplewise_std_normalization=False,  # divide each input by its std
    zca_whitening=False,  # apply ZCA whitening
    rotation_range=40,  # randomly rotate images in the range (degrees, 0 to 180)
    width_shift_range=0.2,  # randomly shift images horizontally (fraction of total width)
    height_shift_range=0.2,  # randomly shift images vertically (fraction of total height)
    horizontal_flip=True,  # randomly flip images
    vertical_flip=False)  # randomly flip images

datagen.fit(X_tr.reshape(-1,1,48,48))
model.fit_generator(datagen.flow(X_tr.reshape(-1,1,48,48), Y_Keras,
                    batch_size=300),
                    samples_per_epoch=80000,
                    nb_epoch=2,
                    validation_data=(X_ts.reshape(-1,1,48,48),Y_Keras_test))
model.save('my_model.h5')
#40000 best was 6.15 at 150
#80000 besy was 6.67 at 150
#180000 best was 6.68 at 50


Epoch 1/2
53009/80000 [==================>...........] - ETA: 68s - loss: 0.7263 - acc: 0.7341
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-16-2bb73ad61e05> in <module>()
     18                     samples_per_epoch=80000,
     19                     nb_epoch=2,
---> 20                     validation_data=(X_ts.reshape(-1,1,48,48),Y_Keras_test))
     21 model.save('my_model.h5')
     22 #40000 best was 6.15 at 150

/home/mckc/anaconda/lib/python2.7/site-packages/Keras-1.0.8-py2.7.egg/keras/models.pyc in fit_generator(self, generator, samples_per_epoch, nb_epoch, verbose, callbacks, validation_data, nb_val_samples, class_weight, max_q_size, nb_worker, pickle_safe, **kwargs)
    872                                         max_q_size=max_q_size,
    873                                         nb_worker=nb_worker,
--> 874                                         pickle_safe=pickle_safe)
    875 
    876     def evaluate_generator(self, generator, val_samples, max_q_size=10, nb_worker=1, pickle_safe=False, **kwargs):

/home/mckc/anaconda/lib/python2.7/site-packages/Keras-1.0.8-py2.7.egg/keras/engine/training.pyc in fit_generator(self, generator, samples_per_epoch, nb_epoch, verbose, callbacks, validation_data, nb_val_samples, class_weight, max_q_size, nb_worker, pickle_safe)
   1439                     outs = self.train_on_batch(x, y,
   1440                                                sample_weight=sample_weight,
-> 1441                                                class_weight=class_weight)
   1442                 except:
   1443                     _stop.set()

/home/mckc/anaconda/lib/python2.7/site-packages/Keras-1.0.8-py2.7.egg/keras/engine/training.pyc in train_on_batch(self, x, y, sample_weight, class_weight)
   1217             ins = x + y + sample_weights
   1218         self._make_train_function()
-> 1219         outputs = self.train_function(ins)
   1220         if len(outputs) == 1:
   1221             return outputs[0]

/home/mckc/anaconda/lib/python2.7/site-packages/Keras-1.0.8-py2.7.egg/keras/backend/theano_backend.pyc in __call__(self, inputs)
    715     def __call__(self, inputs):
    716         assert type(inputs) in {list, tuple}
--> 717         return self.function(*inputs)
    718 
    719 

/home/mckc/Downloads/Theano-master/theano/compile/function_module.pyc in __call__(self, *args, **kwargs)
    864         try:
    865             outputs =\
--> 866                 self.fn() if output_subset is None else\
    867                 self.fn(output_subset=output_subset)
    868         except Exception:

KeyboardInterrupt: 

In [ ]:
import tflearn
from tflearn.layers.core import input_data, dropout, fully_connected
from tflearn.layers.conv import conv_2d, max_pool_2d
from tflearn.layers.normalization import local_response_normalization
from tflearn.layers.estimator import regression

network = input_data(shape=[None, 48, 48, 1], name='input')
network = conv_2d(network, 32, 3, activation='relu', regularizer="L2")
network = max_pool_2d(network, 2)
network = local_response_normalization(network)
network = conv_2d(network, 64, 3, activation='relu', regularizer="L2")
network = max_pool_2d(network, 2)
network = local_response_normalization(network)
network = fully_connected(network, 128, activation='tanh')
network = dropout(network, 0.8)
network = fully_connected(network, 256, activation='tanh')
network = dropout(network, 0.8)
network = fully_connected(network, 7, activation='softmax')
network = regression(network, optimizer='adam', learning_rate=0.01,
                     loss='categorical_crossentropy', name='target')

# Training
model = tflearn.DNN(network, tensorboard_verbose=0)
model.fit({'input': X_tr.reshape(-1,1,48,48)}, {'target': Y_number}, n_epoch=20,
           validation_set=({'input': X_ts.reshape(-1,1,48,48)}, {'target': Y_test_number}),
snapshot_step=100, show_metric=True, run_id='convnet_mnist')

In [42]:
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.layers import SimpleRNN
from keras.initializations import normal, identity
from keras.optimizers import RMSprop
from keras.utils import np_utils

max_features = 20000
maxlen = 80  # cut texts after this number of words (among top max_features most common words)
batch_size = 32

model = Sequential()
model.add(SimpleRNN(output_dim=100,
                    init=lambda shape, name: normal(shape, scale=0.001, name=name),
                    inner_init=lambda shape, name: identity(shape, scale=1.0, name=name),
                    activation='relu',
                    input_shape=(X_tr.reshape(-1,48,48)).shape[1:]))
model.add(Dense(7))
model.add(Activation('softmax'))

adam = Adam(lr=0.000001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)

model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])
#model.load_weights("model.h5")

model.fit(X_tr.reshape(-1,48,48), Y_Keras, nb_epoch=40, batch_size=200,verbose=1,shuffle=True,
          validation_data=(X_ts.reshape(-1,48,48),Y_Keras_test))

Y_ke= model.predict(X_ts.reshape(-1,48,48))

print 'Accuracy of the model is ',accuracy_score(Y_test_number,Y_ke)
confusion_matrix(Y_test_number,Y_ke)

scores = model.evaluate(X_test_normal, Y_test_number, verbose=0)
print('IRNN test score:', scores[0])
print('IRNN test accuracy:', scores[1])


Train on 28709 samples, validate on 7178 samples
Epoch 1/40
28709/28709 [==============================] - 2s - loss: 7.6460 - acc: 0.1690 - val_loss: 5.1133 - val_acc: 0.1722
Epoch 2/40
28709/28709 [==============================] - 2s - loss: 3.9249 - acc: 0.1931 - val_loss: 3.4450 - val_acc: 0.2021
Epoch 3/40
28709/28709 [==============================] - 2s - loss: 3.2373 - acc: 0.2000 - val_loss: 3.0967 - val_acc: 0.2027
Epoch 4/40
28709/28709 [==============================] - 2s - loss: 2.9779 - acc: 0.2047 - val_loss: 2.9192 - val_acc: 0.1995
Epoch 5/40
28709/28709 [==============================] - 2s - loss: 2.8003 - acc: 0.2098 - val_loss: 2.7254 - val_acc: 0.2165
Epoch 6/40
28709/28709 [==============================] - 2s - loss: 2.6866 - acc: 0.2125 - val_loss: 2.6156 - val_acc: 0.2106
Epoch 7/40
28709/28709 [==============================] - 2s - loss: 2.6085 - acc: 0.2136 - val_loss: 2.5814 - val_acc: 0.1925
Epoch 8/40
28709/28709 [==============================] - 2s - loss: 2.5411 - acc: 0.2164 - val_loss: 2.5038 - val_acc: 0.2154
Epoch 9/40
28709/28709 [==============================] - 2s - loss: 2.4765 - acc: 0.2171 - val_loss: 2.4444 - val_acc: 0.2232
Epoch 10/40
28709/28709 [==============================] - 2s - loss: 2.4410 - acc: 0.2224 - val_loss: 2.4290 - val_acc: 0.2251
Epoch 11/40
28709/28709 [==============================] - 2s - loss: 2.4011 - acc: 0.2248 - val_loss: 2.3895 - val_acc: 0.2318
Epoch 12/40
28709/28709 [==============================] - 2s - loss: 2.3661 - acc: 0.2262 - val_loss: 2.3564 - val_acc: 0.2127
Epoch 13/40
28709/28709 [==============================] - 2s - loss: 2.3436 - acc: 0.2270 - val_loss: 2.3349 - val_acc: 0.2288
Epoch 14/40
28709/28709 [==============================] - 2s - loss: 2.3137 - acc: 0.2315 - val_loss: 2.3180 - val_acc: 0.2462
Epoch 15/40
28709/28709 [==============================] - 2s - loss: 2.2903 - acc: 0.2335 - val_loss: 2.2911 - val_acc: 0.2420
Epoch 16/40
28709/28709 [==============================] - 2s - loss: 2.2729 - acc: 0.2320 - val_loss: 2.2692 - val_acc: 0.2173
Epoch 17/40
28709/28709 [==============================] - 2s - loss: 2.2626 - acc: 0.2334 - val_loss: 2.2433 - val_acc: 0.2474
Epoch 18/40
28709/28709 [==============================] - 2s - loss: 2.2372 - acc: 0.2374 - val_loss: 2.2387 - val_acc: 0.2488
Epoch 19/40
28709/28709 [==============================] - 2s - loss: 2.2215 - acc: 0.2395 - val_loss: 2.2192 - val_acc: 0.2416
Epoch 20/40
28709/28709 [==============================] - 2s - loss: 2.2021 - acc: 0.2418 - val_loss: 2.2013 - val_acc: 0.2412
Epoch 21/40
28709/28709 [==============================] - 2s - loss: 2.1936 - acc: 0.2406 - val_loss: 2.2069 - val_acc: 0.2194
Epoch 22/40
28709/28709 [==============================] - 2s - loss: 2.1836 - acc: 0.2414 - val_loss: 2.1728 - val_acc: 0.2466
Epoch 23/40
28709/28709 [==============================] - 2s - loss: 2.1814 - acc: 0.2435 - val_loss: 2.2242 - val_acc: 0.2297
Epoch 24/40
28709/28709 [==============================] - 2s - loss: 2.1695 - acc: 0.2435 - val_loss: 2.2372 - val_acc: 0.2697
Epoch 25/40
28709/28709 [==============================] - 2s - loss: 2.1531 - acc: 0.2482 - val_loss: 2.1667 - val_acc: 0.2545
Epoch 26/40
28709/28709 [==============================] - 2s - loss: 2.1475 - acc: 0.2478 - val_loss: 2.1775 - val_acc: 0.2336
Epoch 27/40
28709/28709 [==============================] - 2s - loss: 2.1385 - acc: 0.2522 - val_loss: 2.1802 - val_acc: 0.2389
Epoch 28/40
28709/28709 [==============================] - 2s - loss: 2.1261 - acc: 0.2541 - val_loss: 2.1300 - val_acc: 0.2403
Epoch 29/40
28709/28709 [==============================] - 2s - loss: 2.1237 - acc: 0.2510 - val_loss: 2.1387 - val_acc: 0.2689
Epoch 30/40
28709/28709 [==============================] - 2s - loss: 2.1147 - acc: 0.2516 - val_loss: 2.1087 - val_acc: 0.2494
Epoch 31/40
28709/28709 [==============================] - 2s - loss: 2.1067 - acc: 0.2551 - val_loss: 2.1380 - val_acc: 0.2715
Epoch 32/40
28709/28709 [==============================] - 2s - loss: 2.1053 - acc: 0.2546 - val_loss: 2.0981 - val_acc: 0.2664
Epoch 33/40
28709/28709 [==============================] - 2s - loss: 2.0905 - acc: 0.2589 - val_loss: 2.1118 - val_acc: 0.2389
Epoch 34/40
28709/28709 [==============================] - 2s - loss: 2.0866 - acc: 0.2587 - val_loss: 2.0896 - val_acc: 0.2744
Epoch 35/40
28709/28709 [==============================] - 2s - loss: 2.0827 - acc: 0.2593 - val_loss: 2.0856 - val_acc: 0.2510
Epoch 36/40
28709/28709 [==============================] - 2s - loss: 2.0745 - acc: 0.2588 - val_loss: 2.0858 - val_acc: 0.2458
Epoch 37/40
28709/28709 [==============================] - 2s - loss: 2.0795 - acc: 0.2569 - val_loss: 2.1154 - val_acc: 0.2687
Epoch 38/40
28709/28709 [==============================] - 2s - loss: 2.0685 - acc: 0.2611 - val_loss: 2.0657 - val_acc: 0.2671
Epoch 39/40
28709/28709 [==============================] - 2s - loss: 2.0620 - acc: 0.2594 - val_loss: 2.0812 - val_acc: 0.2469
Epoch 40/40
28709/28709 [==============================] - 2s - loss: 2.0625 - acc: 0.2614 - val_loss: 2.0967 - val_acc: 0.2735
Accuracy of the model is 
---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-42-883590d05502> in <module>()
     29 Y_ke= model.predict(X_ts.reshape(-1,48,48))
     30 
---> 31 print 'Accuracy of the model is ',accuracy_score(Y_test_number,Y_ke)
     32 confusion_matrix(Y_test_number,Y_ke)
     33 

/home/mckc/anaconda/lib/python2.7/site-packages/sklearn/metrics/classification.pyc in accuracy_score(y_true, y_pred, normalize, sample_weight)
    170 
    171     # Compute accuracy for each possible representation
--> 172     y_type, y_true, y_pred = _check_targets(y_true, y_pred)
    173     if y_type.startswith('multilabel'):
    174         differing_labels = count_nonzero(y_true - y_pred, axis=1)

/home/mckc/anaconda/lib/python2.7/site-packages/sklearn/metrics/classification.pyc in _check_targets(y_true, y_pred)
     80     if len(y_type) > 1:
     81         raise ValueError("Can't handle mix of {0} and {1}"
---> 82                          "".format(type_true, type_pred))
     83 
     84     # We can't have more than one value on y_type => The set is no more needed

ValueError: Can't handle mix of multiclass and continuous-multioutput

In [47]:
unique, counts = np.unique(Y_ke.astype(int), return_counts=True)
dict(zip(unique, counts))


Out[47]:
{0: 50246}

In [ ]: