In [2]:
from keras.layers import Convolution2D ,MaxPooling2D,Flatten
from keras.layers.core import Dense, Dropout, Activation
from sklearn.cross_validation import train_test_split
from keras.callbacks import History,Callback
from sklearn.metrics import classification_report
from sklearn.cross_validation import StratifiedKFold ,cross_val_score
from keras.models import model_from_config
from keras.models import Sequential
from keras.optimizers import SGD
from keras.utils import np_utils
import theano
import keras
import pickle


Using Theano backend.
Using gpu device 0: GeForce 940M (CNMeM is disabled, CuDNN not available)
C:\Users\Back_jud\Anaconda2\lib\site-packages\theano\tensor\signal\downsample.py:6: UserWarning:

downsample module has been moved to the theano.tensor.signal.pool module.


In [3]:
def cat2lab (cat): 
    '''only for binary category
    
    #Args:
    
    cat : binary categorical variable
    '''
    return np.array([0 if s[0] else 1 for s in cat])

1.Formatting

1-1. Load data from pickle


In [4]:
img_pickle = open('d://labels_new.p')
lab_pickle = open('d://images_new.p')
labels = np.array(pickle.load(img_pickle))
imgs = np.array(pickle.load(lab_pickle))

1-2. Regularize images and categorize labels


In [5]:
reg_imgs = imgs /255
cat_labels = np_utils.to_categorical(labels,nb_classes=2)

1-3. Change image shape to 2D and 3D for classifier's input shape


In [6]:
reg_imgs_2d =[]
for img in reg_imgs:
    reg_imgs_2d.append(np.reshape(img,(50,50))) 
reg_imgs_2d = np.array(reg_imgs_2d)

In [7]:
#reshape to shape (1,50,50) for CNN
reg_imgs_3d =[]
for img in reg_imgs:
    reg_imgs_3d.append(np.reshape(img,(1,50,50)))
reg_imgs_3d = np.array(reg_imgs_3d)

1-4. Split data for each type of shape


In [8]:
x_tr1,x_te1,y_tr1,y_te1 = train_test_split(reg_imgs,cat_labels,test_size= 0.2,random_state= 123)
x_tr2,x_te2,y_tr2,y_te2 = train_test_split(reg_imgs_2d,cat_labels,test_size= 0.2,random_state= 123)
x_tr3,x_te3,y_tr3,y_te3 = train_test_split(reg_imgs_3d,cat_labels,test_size= 0.2,random_state= 123)
x_trn1,x_ten1,y_trn1,y_ten1 = train_test_split(reg_imgs,labels,test_size= 0.2,random_state= 123)

2.Modeling

2.1 Simple neuron

2.1.1. Specification of model


In [9]:
model1 = Sequential()
model1.add(Dense(2500, input_dim=2500,init ='uniform'))
model1.add(Activation('relu'))
model1.add(Dense(2, activation="softmax"))
model1.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.01,decay= 1e-6,momentum=0.1,nesterov=True))

In [10]:
%time hist1 = model1.fit(x_tr1, y_tr1, nb_epoch=300,validation_split=0.2 ,batch_size=50,show_accuracy=True,verbose=0)


Wall time: 24.5 s

In [11]:
model1.summary()


--------------------------------------------------------------------------------
Initial input shape: (None, 2500)
--------------------------------------------------------------------------------
Layer (name)                  Output Shape                  Param #             
--------------------------------------------------------------------------------
Dense (dense)                 (None, 2500)                  6252500             
Activation (activation)       (None, 2500)                  0                   
Dense (dense)                 (None, 2)                     5002                
--------------------------------------------------------------------------------
Total params: 6257502
--------------------------------------------------------------------------------

2.1.2. Graph of accuracy and loss on train


In [12]:
plt.plot(hist1.history['acc'],label='Accuracy on training set')
plt.plot(hist1.history['loss'],label='Loss on training set')
plt.plot(hist1.history['val_acc'],'--',label='Accuracy on validation set')
plt.plot(hist1.history['val_loss'],'--',label='Loss on validation set')
plt.grid('off')
plt.legend()


Out[12]:
<matplotlib.legend.Legend at 0x2d03f7b8>

2.1.3. Predict and evaluate using test set


In [13]:
model1.evaluate(x_te1,y_te1,batch_size=50,show_accuracy=True)


29/29 [==============================] - 0s
Out[13]:
[0.7418333888053894, 0.75862068965517238]

In [14]:
y_pred1 = model1.predict_classes(x_te1,20)
y_pred1


29/29 [==============================] - 0s     
Out[14]:
array([1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1,
       1, 0, 1, 1, 1, 1], dtype=int64)

In [15]:
y_ten1 = cat2lab(y_te1)
y_ten1


Out[15]:
array([0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1,
       1, 0, 1, 1, 1, 1])

In [16]:
print(classification_report(y_ten1,y_pred1))


             precision    recall  f1-score   support

          0       0.67      0.73      0.70        11
          1       0.82      0.78      0.80        18

avg / total       0.76      0.76      0.76        29


In [ ]:

2.2 Simple neuron

2.2.1. Specification of model


In [17]:
model2 = Sequential()
model2.add(Convolution2D(10,10, 10, border_mode='same', input_shape=(1, 50, 50)))
model2.add(Activation('relu'))
# model2.add(Convolution2D(50, 5, 5,init='uniform'))
# model2.add(Activation('relu'))
model2.add(MaxPooling2D(pool_size=(2, 2)))
model2.add(Dropout(0.5))

model2.add(Convolution2D(10, 10, 10,init='uniform' ,border_mode='same'))
model2.add(Activation('relu'))
# model2.add(Convolution2D(100, 5, 5,init='uniform'))
# model2.add(Activation('relu'))
model2.add(MaxPooling2D(pool_size=(2, 2)))
model2.add(Dropout(0.3))

model2.add(Flatten())
model2.add(Dense(1250,init='uniform'))
model2.add(Activation('relu'))
model2.add(Dense(2,activation='softmax'))
model2.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.01,decay=1e-6,
                                                              momentum=0.5,
                                                              nesterov=True))

In [18]:
%time hist2 = model2.fit(x_tr3, y_tr3, nb_epoch=300 , batch_size=50 ,validation_split=0.2, show_accuracy=True ,shuffle=True,verbose =1)


Train on 90 samples, validate on 23 samples
Epoch 1/300
90/90 [==============================] - 0s - loss: 0.8102 - acc: 0.4333 - val_loss: 0.6507 - val_acc: 0.6087
Epoch 2/300
90/90 [==============================] - 0s - loss: 0.9208 - acc: 0.5222 - val_loss: 0.6829 - val_acc: 0.6087
Epoch 3/300
90/90 [==============================] - 0s - loss: 0.6768 - acc: 0.5333 - val_loss: 0.6918 - val_acc: 0.4783
Epoch 4/300
90/90 [==============================] - 0s - loss: 0.6732 - acc: 0.5778 - val_loss: 0.6891 - val_acc: 0.5217
Epoch 5/300
90/90 [==============================] - 0s - loss: 0.6791 - acc: 0.6111 - val_loss: 0.6808 - val_acc: 0.6522
Epoch 6/300
90/90 [==============================] - 0s - loss: 0.6546 - acc: 0.6333 - val_loss: 0.6814 - val_acc: 0.6087
Epoch 7/300
90/90 [==============================] - 0s - loss: 0.6625 - acc: 0.6222 - val_loss: 0.6674 - val_acc: 0.8261
Epoch 8/300
90/90 [==============================] - 0s - loss: 0.6400 - acc: 0.6444 - val_loss: 0.6542 - val_acc: 0.7391
Epoch 9/300
90/90 [==============================] - 0s - loss: 0.6353 - acc: 0.6667 - val_loss: 0.6450 - val_acc: 0.8696
Epoch 10/300
90/90 [==============================] - 0s - loss: 0.6682 - acc: 0.6111 - val_loss: 0.6341 - val_acc: 0.6957
Epoch 11/300
90/90 [==============================] - 0s - loss: 0.5949 - acc: 0.7000 - val_loss: 0.6280 - val_acc: 0.7391
Epoch 12/300
90/90 [==============================] - 0s - loss: 0.6114 - acc: 0.6333 - val_loss: 0.5991 - val_acc: 0.7391
Epoch 13/300
90/90 [==============================] - 0s - loss: 0.6160 - acc: 0.6667 - val_loss: 0.5765 - val_acc: 0.6957
Epoch 14/300
90/90 [==============================] - 0s - loss: 0.5813 - acc: 0.7111 - val_loss: 0.5745 - val_acc: 0.7391
Epoch 15/300
90/90 [==============================] - 0s - loss: 0.7967 - acc: 0.4667 - val_loss: 0.6392 - val_acc: 0.8261
Epoch 16/300
90/90 [==============================] - 0s - loss: 0.5807 - acc: 0.7778 - val_loss: 0.6165 - val_acc: 0.9565
Epoch 17/300
90/90 [==============================] - 0s - loss: 0.5812 - acc: 0.7333 - val_loss: 0.5900 - val_acc: 0.8696
Epoch 18/300
90/90 [==============================] - 0s - loss: 0.5462 - acc: 0.7333 - val_loss: 0.5523 - val_acc: 0.7391
Epoch 19/300
90/90 [==============================] - 0s - loss: 0.5343 - acc: 0.7444 - val_loss: 0.5610 - val_acc: 0.6087
Epoch 20/300
90/90 [==============================] - 0s - loss: 0.5274 - acc: 0.7222 - val_loss: 0.4996 - val_acc: 0.8261
Epoch 21/300
90/90 [==============================] - 0s - loss: 0.4665 - acc: 0.8111 - val_loss: 0.4847 - val_acc: 0.7391
Epoch 22/300
90/90 [==============================] - 0s - loss: 0.6152 - acc: 0.6778 - val_loss: 0.6108 - val_acc: 0.6087
Epoch 23/300
90/90 [==============================] - 0s - loss: 0.7740 - acc: 0.5333 - val_loss: 0.6219 - val_acc: 0.8696
Epoch 24/300
90/90 [==============================] - 0s - loss: 0.5786 - acc: 0.7444 - val_loss: 0.5986 - val_acc: 0.9565
Epoch 25/300
90/90 [==============================] - 0s - loss: 0.5349 - acc: 0.8000 - val_loss: 0.5682 - val_acc: 0.9130
Epoch 26/300
90/90 [==============================] - 0s - loss: 0.5210 - acc: 0.7222 - val_loss: 0.4868 - val_acc: 0.8261
Epoch 27/300
90/90 [==============================] - 0s - loss: 0.5014 - acc: 0.7889 - val_loss: 0.4593 - val_acc: 0.8261
Epoch 28/300
90/90 [==============================] - 0s - loss: 0.4683 - acc: 0.8000 - val_loss: 0.4231 - val_acc: 0.7826
Epoch 29/300
90/90 [==============================] - 0s - loss: 0.6146 - acc: 0.6556 - val_loss: 0.5837 - val_acc: 0.6087
Epoch 30/300
90/90 [==============================] - 0s - loss: 0.6889 - acc: 0.6000 - val_loss: 0.5985 - val_acc: 0.8696
Epoch 31/300
90/90 [==============================] - 0s - loss: 0.4928 - acc: 0.8333 - val_loss: 0.5399 - val_acc: 0.8696
Epoch 32/300
90/90 [==============================] - 0s - loss: 0.4378 - acc: 0.8667 - val_loss: 0.4680 - val_acc: 0.8696
Epoch 33/300
90/90 [==============================] - 0s - loss: 0.3503 - acc: 0.9222 - val_loss: 0.3947 - val_acc: 0.8261
Epoch 34/300
90/90 [==============================] - 0s - loss: 0.3691 - acc: 0.8667 - val_loss: 0.3474 - val_acc: 0.8696
Epoch 35/300
90/90 [==============================] - 0s - loss: 0.3865 - acc: 0.8000 - val_loss: 0.5034 - val_acc: 0.6957
Epoch 36/300
90/90 [==============================] - 0s - loss: 0.4883 - acc: 0.7778 - val_loss: 0.3427 - val_acc: 0.8696
Epoch 37/300
90/90 [==============================] - 0s - loss: 0.2891 - acc: 0.9000 - val_loss: 0.3402 - val_acc: 0.8261
Epoch 38/300
90/90 [==============================] - 0s - loss: 0.3145 - acc: 0.8667 - val_loss: 0.3090 - val_acc: 0.8261
Epoch 39/300
90/90 [==============================] - 0s - loss: 0.2656 - acc: 0.9111 - val_loss: 0.2868 - val_acc: 0.8261
Epoch 40/300
90/90 [==============================] - 0s - loss: 0.2644 - acc: 0.9000 - val_loss: 0.2462 - val_acc: 1.0000
Epoch 41/300
90/90 [==============================] - 0s - loss: 0.2536 - acc: 0.8667 - val_loss: 0.2966 - val_acc: 0.8261
Epoch 42/300
90/90 [==============================] - 0s - loss: 0.2141 - acc: 0.9000 - val_loss: 0.2758 - val_acc: 0.9565
Epoch 43/300
90/90 [==============================] - 0s - loss: 0.2928 - acc: 0.8667 - val_loss: 0.2236 - val_acc: 0.9565
Epoch 44/300
90/90 [==============================] - 0s - loss: 0.3633 - acc: 0.8111 - val_loss: 0.2822 - val_acc: 0.9565
Epoch 45/300
90/90 [==============================] - 0s - loss: 0.4342 - acc: 0.7667 - val_loss: 0.2965 - val_acc: 0.9565
Epoch 46/300
90/90 [==============================] - 0s - loss: 0.2483 - acc: 0.9111 - val_loss: 0.2806 - val_acc: 0.9130
Epoch 47/300
90/90 [==============================] - 0s - loss: 0.2101 - acc: 0.9556 - val_loss: 0.2180 - val_acc: 0.9565
Epoch 48/300
90/90 [==============================] - 0s - loss: 0.1801 - acc: 0.9333 - val_loss: 0.2048 - val_acc: 0.9565
Epoch 49/300
90/90 [==============================] - 0s - loss: 0.2032 - acc: 0.9333 - val_loss: 0.2060 - val_acc: 0.9130
Epoch 50/300
90/90 [==============================] - 0s - loss: 0.1925 - acc: 0.9000 - val_loss: 0.1608 - val_acc: 0.9565
Epoch 51/300
90/90 [==============================] - 0s - loss: 0.1583 - acc: 0.9222 - val_loss: 0.1594 - val_acc: 0.9565
Epoch 52/300
90/90 [==============================] - 0s - loss: 0.1507 - acc: 0.9556 - val_loss: 0.1688 - val_acc: 0.9565
Epoch 53/300
90/90 [==============================] - 0s - loss: 0.1528 - acc: 0.9667 - val_loss: 0.1381 - val_acc: 0.9565
Epoch 54/300
90/90 [==============================] - 0s - loss: 0.1685 - acc: 0.9556 - val_loss: 0.1300 - val_acc: 1.0000
Epoch 55/300
90/90 [==============================] - 0s - loss: 0.1925 - acc: 0.9222 - val_loss: 0.1362 - val_acc: 1.0000
Epoch 56/300
90/90 [==============================] - 0s - loss: 0.1454 - acc: 0.9222 - val_loss: 0.2532 - val_acc: 0.8261
Epoch 57/300
90/90 [==============================] - 0s - loss: 0.1935 - acc: 0.9111 - val_loss: 0.1565 - val_acc: 0.9565
Epoch 58/300
90/90 [==============================] - 0s - loss: 0.1917 - acc: 0.9222 - val_loss: 0.1399 - val_acc: 1.0000
Epoch 59/300
90/90 [==============================] - 0s - loss: 0.1885 - acc: 0.8889 - val_loss: 0.1362 - val_acc: 0.9565
Epoch 60/300
90/90 [==============================] - 0s - loss: 0.1328 - acc: 0.9667 - val_loss: 0.1326 - val_acc: 1.0000
Epoch 61/300
90/90 [==============================] - 0s - loss: 0.0956 - acc: 0.9889 - val_loss: 0.1226 - val_acc: 1.0000
Epoch 62/300
90/90 [==============================] - 0s - loss: 0.1435 - acc: 0.9556 - val_loss: 0.1417 - val_acc: 0.9565
Epoch 63/300
90/90 [==============================] - 0s - loss: 0.2216 - acc: 0.9111 - val_loss: 0.1216 - val_acc: 1.0000
Epoch 64/300
90/90 [==============================] - 0s - loss: 0.1181 - acc: 0.9667 - val_loss: 0.1399 - val_acc: 0.9565
Epoch 65/300
90/90 [==============================] - 0s - loss: 0.0899 - acc: 1.0000 - val_loss: 0.1145 - val_acc: 1.0000
Epoch 66/300
90/90 [==============================] - 0s - loss: 0.1016 - acc: 0.9778 - val_loss: 0.1098 - val_acc: 1.0000
Epoch 67/300
90/90 [==============================] - 0s - loss: 0.1125 - acc: 0.9778 - val_loss: 0.1251 - val_acc: 1.0000
Epoch 68/300
90/90 [==============================] - 0s - loss: 0.1299 - acc: 0.9667 - val_loss: 0.1224 - val_acc: 1.0000
Epoch 69/300
90/90 [==============================] - 0s - loss: 0.0745 - acc: 0.9778 - val_loss: 0.1003 - val_acc: 1.0000
Epoch 70/300
90/90 [==============================] - 0s - loss: 0.0738 - acc: 0.9889 - val_loss: 0.1392 - val_acc: 0.9130
Epoch 71/300
90/90 [==============================] - 0s - loss: 0.1204 - acc: 0.9667 - val_loss: 0.1976 - val_acc: 0.8696
Epoch 72/300
90/90 [==============================] - 0s - loss: 0.1040 - acc: 0.9444 - val_loss: 0.1260 - val_acc: 0.9565
Epoch 73/300
90/90 [==============================] - 0s - loss: 0.1400 - acc: 0.9444 - val_loss: 0.1023 - val_acc: 1.0000
Epoch 74/300
90/90 [==============================] - 0s - loss: 0.0890 - acc: 0.9778 - val_loss: 0.1266 - val_acc: 0.9130
Epoch 75/300
90/90 [==============================] - 0s - loss: 0.0573 - acc: 1.0000 - val_loss: 0.1054 - val_acc: 1.0000
Epoch 76/300
90/90 [==============================] - 0s - loss: 0.1080 - acc: 0.9667 - val_loss: 0.2425 - val_acc: 0.8261
Epoch 77/300
90/90 [==============================] - 0s - loss: 0.0793 - acc: 0.9778 - val_loss: 0.1138 - val_acc: 1.0000
Epoch 78/300
90/90 [==============================] - 0s - loss: 0.0734 - acc: 0.9778 - val_loss: 0.1596 - val_acc: 0.8696
Epoch 79/300
90/90 [==============================] - 0s - loss: 0.0726 - acc: 0.9778 - val_loss: 0.1188 - val_acc: 0.9565
Epoch 80/300
90/90 [==============================] - 0s - loss: 0.0709 - acc: 0.9889 - val_loss: 0.0998 - val_acc: 1.0000
Epoch 81/300
90/90 [==============================] - 0s - loss: 0.0690 - acc: 0.9889 - val_loss: 0.1398 - val_acc: 0.9130
Epoch 82/300
90/90 [==============================] - 0s - loss: 0.1033 - acc: 0.9667 - val_loss: 0.1312 - val_acc: 0.9130
Epoch 83/300
90/90 [==============================] - 0s - loss: 0.1168 - acc: 0.9444 - val_loss: 0.1128 - val_acc: 1.0000
Epoch 84/300
90/90 [==============================] - 0s - loss: 0.0495 - acc: 0.9889 - val_loss: 0.1405 - val_acc: 0.9565
Epoch 85/300
90/90 [==============================] - 0s - loss: 0.0828 - acc: 0.9667 - val_loss: 0.0985 - val_acc: 0.9565
Epoch 86/300
90/90 [==============================] - 0s - loss: 0.0579 - acc: 0.9889 - val_loss: 0.1520 - val_acc: 0.9130
Epoch 87/300
90/90 [==============================] - 0s - loss: 0.0846 - acc: 0.9556 - val_loss: 0.1502 - val_acc: 0.9565
Epoch 88/300
90/90 [==============================] - 0s - loss: 0.2348 - acc: 0.8778 - val_loss: 0.1528 - val_acc: 0.9565
Epoch 89/300
90/90 [==============================] - 0s - loss: 0.1792 - acc: 0.9111 - val_loss: 0.1127 - val_acc: 0.9565
Epoch 90/300
90/90 [==============================] - 0s - loss: 0.0641 - acc: 0.9889 - val_loss: 0.1203 - val_acc: 1.0000
Epoch 91/300
90/90 [==============================] - 0s - loss: 0.0859 - acc: 0.9667 - val_loss: 0.1137 - val_acc: 1.0000
Epoch 92/300
90/90 [==============================] - 0s - loss: 0.0518 - acc: 0.9889 - val_loss: 0.1497 - val_acc: 0.9130
Epoch 93/300
90/90 [==============================] - 0s - loss: 0.0782 - acc: 0.9778 - val_loss: 0.1067 - val_acc: 0.9565
Epoch 94/300
90/90 [==============================] - 0s - loss: 0.0727 - acc: 0.9778 - val_loss: 0.1145 - val_acc: 0.9565
Epoch 95/300
90/90 [==============================] - 0s - loss: 0.0588 - acc: 1.0000 - val_loss: 0.1064 - val_acc: 1.0000
Epoch 96/300
90/90 [==============================] - 0s - loss: 0.0586 - acc: 0.9889 - val_loss: 0.1053 - val_acc: 0.9565
Epoch 97/300
90/90 [==============================] - 0s - loss: 0.0622 - acc: 0.9889 - val_loss: 0.1248 - val_acc: 0.9565
Epoch 98/300
90/90 [==============================] - 0s - loss: 0.0399 - acc: 1.0000 - val_loss: 0.0968 - val_acc: 1.0000
Epoch 99/300
90/90 [==============================] - 0s - loss: 0.0496 - acc: 0.9889 - val_loss: 0.1098 - val_acc: 1.0000
Epoch 100/300
90/90 [==============================] - 0s - loss: 0.0637 - acc: 0.9556 - val_loss: 0.1103 - val_acc: 1.0000
Epoch 101/300
90/90 [==============================] - 0s - loss: 0.0417 - acc: 0.9889 - val_loss: 0.1154 - val_acc: 0.9565
Epoch 102/300
90/90 [==============================] - 0s - loss: 0.0601 - acc: 0.9889 - val_loss: 0.1077 - val_acc: 0.9565
Epoch 103/300
90/90 [==============================] - 0s - loss: 0.0354 - acc: 1.0000 - val_loss: 0.1042 - val_acc: 0.9565
Epoch 104/300
90/90 [==============================] - 0s - loss: 0.0406 - acc: 1.0000 - val_loss: 0.1080 - val_acc: 0.9565
Epoch 105/300
90/90 [==============================] - 0s - loss: 0.0287 - acc: 1.0000 - val_loss: 0.1234 - val_acc: 0.9565
Epoch 106/300
90/90 [==============================] - 0s - loss: 0.0747 - acc: 0.9667 - val_loss: 0.2351 - val_acc: 0.8261
Epoch 107/300
90/90 [==============================] - 0s - loss: 0.0612 - acc: 0.9778 - val_loss: 0.0971 - val_acc: 0.9565
Epoch 108/300
90/90 [==============================] - 0s - loss: 0.0500 - acc: 0.9889 - val_loss: 0.1458 - val_acc: 0.9565
Epoch 109/300
90/90 [==============================] - 0s - loss: 0.0463 - acc: 0.9889 - val_loss: 0.1215 - val_acc: 0.9565
Epoch 110/300
90/90 [==============================] - 0s - loss: 0.0441 - acc: 0.9778 - val_loss: 0.1091 - val_acc: 0.9565
Epoch 111/300
90/90 [==============================] - 0s - loss: 0.0263 - acc: 1.0000 - val_loss: 0.0922 - val_acc: 0.9565
Epoch 112/300
90/90 [==============================] - 0s - loss: 0.0552 - acc: 0.9778 - val_loss: 0.1264 - val_acc: 0.9565
Epoch 113/300
90/90 [==============================] - 0s - loss: 0.0409 - acc: 0.9778 - val_loss: 0.0973 - val_acc: 0.9565
Epoch 114/300
90/90 [==============================] - 0s - loss: 0.0482 - acc: 0.9889 - val_loss: 0.1229 - val_acc: 0.9565
Epoch 115/300
90/90 [==============================] - 0s - loss: 0.0336 - acc: 0.9889 - val_loss: 0.0927 - val_acc: 0.9565
Epoch 116/300
90/90 [==============================] - 0s - loss: 0.0346 - acc: 0.9889 - val_loss: 0.0929 - val_acc: 0.9565
Epoch 117/300
90/90 [==============================] - 0s - loss: 0.0796 - acc: 0.9778 - val_loss: 0.1377 - val_acc: 0.9130
Epoch 118/300
90/90 [==============================] - 0s - loss: 0.0433 - acc: 0.9889 - val_loss: 0.0928 - val_acc: 0.9565
Epoch 119/300
90/90 [==============================] - 0s - loss: 0.0258 - acc: 1.0000 - val_loss: 0.0946 - val_acc: 0.9565
Epoch 120/300
90/90 [==============================] - 0s - loss: 0.0431 - acc: 0.9889 - val_loss: 0.0981 - val_acc: 1.0000
Epoch 121/300
90/90 [==============================] - 0s - loss: 0.0347 - acc: 1.0000 - val_loss: 0.0894 - val_acc: 0.9565
Epoch 122/300
90/90 [==============================] - 0s - loss: 0.0665 - acc: 0.9556 - val_loss: 0.0858 - val_acc: 0.9565
Epoch 123/300
90/90 [==============================] - 0s - loss: 0.0397 - acc: 0.9889 - val_loss: 0.0798 - val_acc: 0.9565
Epoch 124/300
90/90 [==============================] - 0s - loss: 0.0426 - acc: 0.9889 - val_loss: 0.1081 - val_acc: 1.0000
Epoch 125/300
90/90 [==============================] - 0s - loss: 0.0354 - acc: 0.9889 - val_loss: 0.1164 - val_acc: 0.9565
Epoch 126/300
90/90 [==============================] - 0s - loss: 0.0563 - acc: 0.9889 - val_loss: 0.0968 - val_acc: 0.9565
Epoch 127/300
90/90 [==============================] - 0s - loss: 0.0273 - acc: 1.0000 - val_loss: 0.1160 - val_acc: 1.0000
Epoch 128/300
90/90 [==============================] - 0s - loss: 0.0260 - acc: 1.0000 - val_loss: 0.0978 - val_acc: 0.9565
Epoch 129/300
90/90 [==============================] - 0s - loss: 0.0132 - acc: 1.0000 - val_loss: 0.1019 - val_acc: 0.9565
Epoch 130/300
90/90 [==============================] - 0s - loss: 0.0254 - acc: 1.0000 - val_loss: 0.0961 - val_acc: 0.9565
Epoch 131/300
90/90 [==============================] - 0s - loss: 0.0429 - acc: 1.0000 - val_loss: 0.0916 - val_acc: 0.9565
Epoch 132/300
90/90 [==============================] - 0s - loss: 0.0423 - acc: 0.9889 - val_loss: 0.1053 - val_acc: 1.0000
Epoch 133/300
90/90 [==============================] - 0s - loss: 0.0407 - acc: 0.9889 - val_loss: 0.1035 - val_acc: 1.0000
Epoch 134/300
90/90 [==============================] - 0s - loss: 0.0340 - acc: 1.0000 - val_loss: 0.0895 - val_acc: 0.9565
Epoch 135/300
90/90 [==============================] - 0s - loss: 0.0175 - acc: 1.0000 - val_loss: 0.1056 - val_acc: 0.9565
Epoch 136/300
90/90 [==============================] - 0s - loss: 0.0206 - acc: 1.0000 - val_loss: 0.1028 - val_acc: 0.9565
Epoch 137/300
90/90 [==============================] - 0s - loss: 0.0443 - acc: 0.9889 - val_loss: 0.1900 - val_acc: 0.8261
Epoch 138/300
90/90 [==============================] - 0s - loss: 0.0274 - acc: 1.0000 - val_loss: 0.0951 - val_acc: 0.9565
Epoch 139/300
90/90 [==============================] - 0s - loss: 0.0227 - acc: 1.0000 - val_loss: 0.1157 - val_acc: 0.9565
Epoch 140/300
90/90 [==============================] - 0s - loss: 0.0512 - acc: 0.9889 - val_loss: 0.1268 - val_acc: 0.9565
Epoch 141/300
90/90 [==============================] - 0s - loss: 0.0418 - acc: 0.9778 - val_loss: 0.1048 - val_acc: 0.9565
Epoch 142/300
90/90 [==============================] - 0s - loss: 0.0282 - acc: 0.9889 - val_loss: 0.1045 - val_acc: 0.9565
Epoch 143/300
90/90 [==============================] - 0s - loss: 0.0211 - acc: 1.0000 - val_loss: 0.0991 - val_acc: 0.9565
Epoch 144/300
90/90 [==============================] - 0s - loss: 0.0498 - acc: 0.9778 - val_loss: 0.1392 - val_acc: 0.8696
Epoch 145/300
90/90 [==============================] - 0s - loss: 0.0358 - acc: 1.0000 - val_loss: 0.1200 - val_acc: 0.9565
Epoch 146/300
90/90 [==============================] - 0s - loss: 0.0194 - acc: 1.0000 - val_loss: 0.1133 - val_acc: 0.9565
Epoch 147/300
90/90 [==============================] - 0s - loss: 0.0337 - acc: 0.9889 - val_loss: 0.2250 - val_acc: 0.9565
Epoch 148/300
90/90 [==============================] - 0s - loss: 0.1059 - acc: 0.9778 - val_loss: 0.1774 - val_acc: 0.9565
Epoch 149/300
90/90 [==============================] - 0s - loss: 0.0397 - acc: 0.9889 - val_loss: 0.1339 - val_acc: 0.9565
Epoch 150/300
90/90 [==============================] - 0s - loss: 0.0322 - acc: 0.9889 - val_loss: 0.1741 - val_acc: 0.8261
Epoch 151/300
90/90 [==============================] - 0s - loss: 0.0431 - acc: 0.9889 - val_loss: 0.1090 - val_acc: 0.9565
Epoch 152/300
90/90 [==============================] - 0s - loss: 0.0261 - acc: 1.0000 - val_loss: 0.1124 - val_acc: 0.9565
Epoch 153/300
90/90 [==============================] - 0s - loss: 0.0202 - acc: 1.0000 - val_loss: 0.1115 - val_acc: 1.0000
Epoch 154/300
90/90 [==============================] - 0s - loss: 0.0344 - acc: 0.9889 - val_loss: 0.0948 - val_acc: 0.9565
Epoch 155/300
90/90 [==============================] - 0s - loss: 0.0181 - acc: 1.0000 - val_loss: 0.1200 - val_acc: 1.0000
Epoch 156/300
90/90 [==============================] - 0s - loss: 0.0312 - acc: 0.9889 - val_loss: 0.0989 - val_acc: 0.9565
Epoch 157/300
90/90 [==============================] - 0s - loss: 0.0199 - acc: 1.0000 - val_loss: 0.0893 - val_acc: 1.0000
Epoch 158/300
90/90 [==============================] - 0s - loss: 0.0163 - acc: 1.0000 - val_loss: 0.0886 - val_acc: 1.0000
Epoch 159/300
90/90 [==============================] - 0s - loss: 0.0684 - acc: 0.9778 - val_loss: 0.0865 - val_acc: 0.9565
Epoch 160/300
90/90 [==============================] - 0s - loss: 0.0145 - acc: 1.0000 - val_loss: 0.0939 - val_acc: 1.0000
Epoch 161/300
90/90 [==============================] - 0s - loss: 0.0243 - acc: 0.9889 - val_loss: 0.1052 - val_acc: 0.9565
Epoch 162/300
90/90 [==============================] - 0s - loss: 0.0350 - acc: 0.9778 - val_loss: 0.1013 - val_acc: 0.9565
Epoch 163/300
90/90 [==============================] - 0s - loss: 0.0156 - acc: 1.0000 - val_loss: 0.0967 - val_acc: 0.9565
Epoch 164/300
90/90 [==============================] - 0s - loss: 0.0163 - acc: 1.0000 - val_loss: 0.1028 - val_acc: 0.9565
Epoch 165/300
90/90 [==============================] - 0s - loss: 0.0304 - acc: 1.0000 - val_loss: 0.0924 - val_acc: 0.9565
Epoch 166/300
90/90 [==============================] - 0s - loss: 0.0269 - acc: 0.9889 - val_loss: 0.1191 - val_acc: 0.9565
Epoch 167/300
90/90 [==============================] - 0s - loss: 0.0168 - acc: 1.0000 - val_loss: 0.0889 - val_acc: 0.9565
Epoch 168/300
90/90 [==============================] - 0s - loss: 0.0128 - acc: 1.0000 - val_loss: 0.0849 - val_acc: 1.0000
Epoch 169/300
90/90 [==============================] - 0s - loss: 0.0172 - acc: 1.0000 - val_loss: 0.0825 - val_acc: 0.9565
Epoch 170/300
90/90 [==============================] - 0s - loss: 0.0281 - acc: 0.9889 - val_loss: 0.0854 - val_acc: 1.0000
Epoch 171/300
90/90 [==============================] - 0s - loss: 0.0117 - acc: 1.0000 - val_loss: 0.0844 - val_acc: 0.9565
Epoch 172/300
90/90 [==============================] - 0s - loss: 0.0086 - acc: 1.0000 - val_loss: 0.0833 - val_acc: 0.9565
Epoch 173/300
90/90 [==============================] - 0s - loss: 0.0147 - acc: 1.0000 - val_loss: 0.0968 - val_acc: 0.9565
Epoch 174/300
90/90 [==============================] - 0s - loss: 0.0101 - acc: 1.0000 - val_loss: 0.0894 - val_acc: 0.9565
Epoch 175/300
90/90 [==============================] - 0s - loss: 0.0734 - acc: 0.9667 - val_loss: 0.2510 - val_acc: 0.9565
Epoch 176/300
90/90 [==============================] - 0s - loss: 0.0302 - acc: 0.9889 - val_loss: 0.1132 - val_acc: 0.9565
Epoch 177/300
90/90 [==============================] - 0s - loss: 0.0251 - acc: 0.9889 - val_loss: 0.1056 - val_acc: 1.0000
Epoch 178/300
90/90 [==============================] - 0s - loss: 0.0185 - acc: 0.9889 - val_loss: 0.1188 - val_acc: 0.9565
Epoch 179/300
90/90 [==============================] - 0s - loss: 0.0185 - acc: 1.0000 - val_loss: 0.1149 - val_acc: 0.9565
Epoch 180/300
90/90 [==============================] - 0s - loss: 0.0140 - acc: 1.0000 - val_loss: 0.1034 - val_acc: 0.9565
Epoch 181/300
90/90 [==============================] - 0s - loss: 0.0128 - acc: 1.0000 - val_loss: 0.1041 - val_acc: 0.9565
Epoch 182/300
90/90 [==============================] - 0s - loss: 0.0224 - acc: 0.9889 - val_loss: 0.0997 - val_acc: 0.9565
Epoch 183/300
90/90 [==============================] - 0s - loss: 0.0210 - acc: 1.0000 - val_loss: 0.1191 - val_acc: 0.9565
Epoch 184/300
90/90 [==============================] - 0s - loss: 0.0231 - acc: 1.0000 - val_loss: 0.0968 - val_acc: 0.9565
Epoch 185/300
90/90 [==============================] - 0s - loss: 0.0136 - acc: 1.0000 - val_loss: 0.0995 - val_acc: 0.9565
Epoch 186/300
90/90 [==============================] - 0s - loss: 0.0351 - acc: 0.9889 - val_loss: 0.1576 - val_acc: 0.9565
Epoch 187/300
90/90 [==============================] - 0s - loss: 0.0151 - acc: 1.0000 - val_loss: 0.1109 - val_acc: 0.9565
Epoch 188/300
90/90 [==============================] - 0s - loss: 0.0111 - acc: 1.0000 - val_loss: 0.0988 - val_acc: 0.9565
Epoch 189/300
90/90 [==============================] - 0s - loss: 0.0603 - acc: 0.9778 - val_loss: 0.0996 - val_acc: 0.9565
Epoch 190/300
90/90 [==============================] - 0s - loss: 0.0115 - acc: 1.0000 - val_loss: 0.1029 - val_acc: 0.9565
Epoch 191/300
90/90 [==============================] - 0s - loss: 0.0114 - acc: 1.0000 - val_loss: 0.1040 - val_acc: 0.9565
Epoch 192/300
90/90 [==============================] - 0s - loss: 0.0084 - acc: 1.0000 - val_loss: 0.1035 - val_acc: 0.9565
Epoch 193/300
90/90 [==============================] - 0s - loss: 0.0116 - acc: 1.0000 - val_loss: 0.0996 - val_acc: 0.9565
Epoch 194/300
90/90 [==============================] - 0s - loss: 0.0524 - acc: 0.9778 - val_loss: 0.1546 - val_acc: 0.9565
Epoch 195/300
90/90 [==============================] - 0s - loss: 0.0126 - acc: 1.0000 - val_loss: 0.1282 - val_acc: 0.9565
Epoch 196/300
90/90 [==============================] - 0s - loss: 0.0147 - acc: 1.0000 - val_loss: 0.1072 - val_acc: 0.9565
Epoch 197/300
90/90 [==============================] - 0s - loss: 0.0095 - acc: 1.0000 - val_loss: 0.1047 - val_acc: 0.9565
Epoch 198/300
90/90 [==============================] - 0s - loss: 0.0112 - acc: 1.0000 - val_loss: 0.1140 - val_acc: 0.9565
Epoch 199/300
90/90 [==============================] - 0s - loss: 0.0118 - acc: 1.0000 - val_loss: 0.1002 - val_acc: 0.9565
Epoch 200/300
90/90 [==============================] - 0s - loss: 0.0153 - acc: 1.0000 - val_loss: 0.0969 - val_acc: 0.9565
Epoch 201/300
90/90 [==============================] - 0s - loss: 0.0069 - acc: 1.0000 - val_loss: 0.0996 - val_acc: 0.9565
Epoch 202/300
90/90 [==============================] - 0s - loss: 0.0344 - acc: 0.9889 - val_loss: 0.1382 - val_acc: 0.9565
Epoch 203/300
90/90 [==============================] - 0s - loss: 0.0112 - acc: 1.0000 - val_loss: 0.1289 - val_acc: 0.9565
Epoch 204/300
90/90 [==============================] - 0s - loss: 0.0047 - acc: 1.0000 - val_loss: 0.1295 - val_acc: 0.9565
Epoch 205/300
90/90 [==============================] - 0s - loss: 0.0257 - acc: 1.0000 - val_loss: 0.1461 - val_acc: 0.9565
Epoch 206/300
90/90 [==============================] - 0s - loss: 0.0160 - acc: 1.0000 - val_loss: 0.1065 - val_acc: 0.9565
Epoch 207/300
90/90 [==============================] - 0s - loss: 0.0056 - acc: 1.0000 - val_loss: 0.1054 - val_acc: 0.9565
Epoch 208/300
90/90 [==============================] - 0s - loss: 0.0128 - acc: 1.0000 - val_loss: 0.1140 - val_acc: 0.9565
Epoch 209/300
90/90 [==============================] - 0s - loss: 0.0115 - acc: 1.0000 - val_loss: 0.1118 - val_acc: 0.9565
Epoch 210/300
90/90 [==============================] - 0s - loss: 0.0257 - acc: 1.0000 - val_loss: 0.1113 - val_acc: 0.9565
Epoch 211/300
90/90 [==============================] - 0s - loss: 0.0053 - acc: 1.0000 - val_loss: 0.1062 - val_acc: 0.9565
Epoch 212/300
90/90 [==============================] - 0s - loss: 0.0094 - acc: 1.0000 - val_loss: 0.1166 - val_acc: 0.9565
Epoch 213/300
90/90 [==============================] - 0s - loss: 0.0158 - acc: 1.0000 - val_loss: 0.1006 - val_acc: 0.9565
Epoch 214/300
90/90 [==============================] - 0s - loss: 0.0433 - acc: 0.9889 - val_loss: 0.1057 - val_acc: 0.9565
Epoch 215/300
90/90 [==============================] - 0s - loss: 0.0489 - acc: 0.9778 - val_loss: 0.1646 - val_acc: 0.9565
Epoch 216/300
90/90 [==============================] - 0s - loss: 0.0260 - acc: 0.9889 - val_loss: 0.1300 - val_acc: 0.9565
Epoch 217/300
90/90 [==============================] - 0s - loss: 0.0258 - acc: 0.9889 - val_loss: 0.1104 - val_acc: 0.9565
Epoch 218/300
90/90 [==============================] - 0s - loss: 0.0056 - acc: 1.0000 - val_loss: 0.1129 - val_acc: 0.9565
Epoch 219/300
90/90 [==============================] - 0s - loss: 0.0165 - acc: 1.0000 - val_loss: 0.0997 - val_acc: 0.9565
Epoch 220/300
90/90 [==============================] - 0s - loss: 0.0059 - acc: 1.0000 - val_loss: 0.1043 - val_acc: 0.9565
Epoch 221/300
90/90 [==============================] - 0s - loss: 0.0034 - acc: 1.0000 - val_loss: 0.1047 - val_acc: 0.9565
Epoch 222/300
90/90 [==============================] - 0s - loss: 0.0071 - acc: 1.0000 - val_loss: 0.1136 - val_acc: 0.9565
Epoch 223/300
90/90 [==============================] - 0s - loss: 0.0086 - acc: 1.0000 - val_loss: 0.1045 - val_acc: 0.9565
Epoch 224/300
90/90 [==============================] - 0s - loss: 0.0157 - acc: 0.9889 - val_loss: 0.1176 - val_acc: 1.0000
Epoch 225/300
90/90 [==============================] - 0s - loss: 0.0191 - acc: 1.0000 - val_loss: 0.1151 - val_acc: 0.9565
Epoch 226/300
90/90 [==============================] - 0s - loss: 0.0215 - acc: 0.9889 - val_loss: 0.1059 - val_acc: 1.0000
Epoch 227/300
90/90 [==============================] - 0s - loss: 0.0243 - acc: 0.9889 - val_loss: 0.1418 - val_acc: 0.9565
Epoch 228/300
90/90 [==============================] - 0s - loss: 0.0073 - acc: 1.0000 - val_loss: 0.1479 - val_acc: 0.9565
Epoch 229/300
90/90 [==============================] - 0s - loss: 0.0119 - acc: 1.0000 - val_loss: 0.1048 - val_acc: 0.9565
Epoch 230/300
90/90 [==============================] - 0s - loss: 0.0064 - acc: 1.0000 - val_loss: 0.0954 - val_acc: 0.9565
Epoch 231/300
90/90 [==============================] - 0s - loss: 0.0129 - acc: 1.0000 - val_loss: 0.1013 - val_acc: 0.9565
Epoch 232/300
90/90 [==============================] - 0s - loss: 0.0173 - acc: 1.0000 - val_loss: 0.1224 - val_acc: 0.9565
Epoch 233/300
90/90 [==============================] - 0s - loss: 0.0052 - acc: 1.0000 - val_loss: 0.1178 - val_acc: 0.9565
Epoch 234/300
90/90 [==============================] - 0s - loss: 0.0232 - acc: 0.9889 - val_loss: 0.1031 - val_acc: 0.9565
Epoch 235/300
90/90 [==============================] - 0s - loss: 0.0277 - acc: 0.9889 - val_loss: 0.1494 - val_acc: 0.9565
Epoch 236/300
90/90 [==============================] - 0s - loss: 0.0188 - acc: 0.9889 - val_loss: 0.1120 - val_acc: 0.9565
Epoch 237/300
90/90 [==============================] - 0s - loss: 0.0192 - acc: 0.9889 - val_loss: 0.1022 - val_acc: 0.9565
Epoch 238/300
90/90 [==============================] - 0s - loss: 0.0059 - acc: 1.0000 - val_loss: 0.1199 - val_acc: 0.9565
Epoch 239/300
90/90 [==============================] - 0s - loss: 0.0108 - acc: 1.0000 - val_loss: 0.1474 - val_acc: 0.9565
Epoch 240/300
90/90 [==============================] - 0s - loss: 0.0174 - acc: 1.0000 - val_loss: 0.1324 - val_acc: 0.9565
Epoch 241/300
90/90 [==============================] - 0s - loss: 0.0178 - acc: 1.0000 - val_loss: 0.0979 - val_acc: 0.9565
Epoch 242/300
90/90 [==============================] - 0s - loss: 0.0120 - acc: 1.0000 - val_loss: 0.0881 - val_acc: 1.0000
Epoch 243/300
90/90 [==============================] - 0s - loss: 0.0232 - acc: 0.9889 - val_loss: 0.1134 - val_acc: 0.9565
Epoch 244/300
90/90 [==============================] - 0s - loss: 0.0094 - acc: 1.0000 - val_loss: 0.1024 - val_acc: 0.9565
Epoch 245/300
90/90 [==============================] - 0s - loss: 0.0077 - acc: 1.0000 - val_loss: 0.0840 - val_acc: 0.9565
Epoch 246/300
90/90 [==============================] - 0s - loss: 0.0051 - acc: 1.0000 - val_loss: 0.0851 - val_acc: 0.9565
Epoch 247/300
90/90 [==============================] - 0s - loss: 0.0093 - acc: 1.0000 - val_loss: 0.1089 - val_acc: 0.9565
Epoch 248/300
90/90 [==============================] - 0s - loss: 0.0035 - acc: 1.0000 - val_loss: 0.1169 - val_acc: 0.9565
Epoch 249/300
90/90 [==============================] - 0s - loss: 0.0030 - acc: 1.0000 - val_loss: 0.1095 - val_acc: 0.9565
Epoch 250/300
90/90 [==============================] - 0s - loss: 0.0192 - acc: 1.0000 - val_loss: 0.0797 - val_acc: 1.0000
Epoch 251/300
90/90 [==============================] - 0s - loss: 0.0048 - acc: 1.0000 - val_loss: 0.0767 - val_acc: 0.9565
Epoch 252/300
90/90 [==============================] - 0s - loss: 0.0111 - acc: 1.0000 - val_loss: 0.1139 - val_acc: 0.9565
Epoch 253/300
90/90 [==============================] - 0s - loss: 0.0040 - acc: 1.0000 - val_loss: 0.0939 - val_acc: 0.9565
Epoch 254/300
90/90 [==============================] - 0s - loss: 0.0042 - acc: 1.0000 - val_loss: 0.0901 - val_acc: 0.9565
Epoch 255/300
90/90 [==============================] - 0s - loss: 0.0242 - acc: 0.9889 - val_loss: 0.0851 - val_acc: 1.0000
Epoch 256/300
90/90 [==============================] - 0s - loss: 0.0085 - acc: 1.0000 - val_loss: 0.0695 - val_acc: 1.0000
Epoch 257/300
90/90 [==============================] - 0s - loss: 0.0086 - acc: 1.0000 - val_loss: 0.0704 - val_acc: 1.0000
Epoch 258/300
90/90 [==============================] - 0s - loss: 0.0051 - acc: 1.0000 - val_loss: 0.0752 - val_acc: 0.9565
Epoch 259/300
90/90 [==============================] - 0s - loss: 0.0033 - acc: 1.0000 - val_loss: 0.0749 - val_acc: 0.9565
Epoch 260/300
90/90 [==============================] - 0s - loss: 0.0034 - acc: 1.0000 - val_loss: 0.0820 - val_acc: 0.9565
Epoch 261/300
90/90 [==============================] - 0s - loss: 0.0049 - acc: 1.0000 - val_loss: 0.0991 - val_acc: 0.9565
Epoch 262/300
90/90 [==============================] - 0s - loss: 0.0053 - acc: 1.0000 - val_loss: 0.1383 - val_acc: 0.9565
Epoch 263/300
90/90 [==============================] - 0s - loss: 0.0179 - acc: 1.0000 - val_loss: 0.1299 - val_acc: 0.9565
Epoch 264/300
90/90 [==============================] - 0s - loss: 0.0061 - acc: 1.0000 - val_loss: 0.0902 - val_acc: 0.9565
Epoch 265/300
90/90 [==============================] - 0s - loss: 0.0041 - acc: 1.0000 - val_loss: 0.0689 - val_acc: 0.9565
Epoch 266/300
90/90 [==============================] - 0s - loss: 0.0065 - acc: 1.0000 - val_loss: 0.0718 - val_acc: 0.9565
Epoch 267/300
90/90 [==============================] - 0s - loss: 0.0104 - acc: 0.9889 - val_loss: 0.0579 - val_acc: 1.0000
Epoch 268/300
90/90 [==============================] - 0s - loss: 0.0092 - acc: 1.0000 - val_loss: 0.0581 - val_acc: 1.0000
Epoch 269/300
90/90 [==============================] - 0s - loss: 0.0055 - acc: 1.0000 - val_loss: 0.0600 - val_acc: 0.9565
Epoch 270/300
90/90 [==============================] - 0s - loss: 0.0082 - acc: 1.0000 - val_loss: 0.0637 - val_acc: 1.0000
Epoch 271/300
90/90 [==============================] - 0s - loss: 0.0065 - acc: 1.0000 - val_loss: 0.0875 - val_acc: 0.9565
Epoch 272/300
90/90 [==============================] - 0s - loss: 0.0101 - acc: 1.0000 - val_loss: 0.0682 - val_acc: 0.9565
Epoch 273/300
90/90 [==============================] - 0s - loss: 0.0045 - acc: 1.0000 - val_loss: 0.0824 - val_acc: 0.9565
Epoch 274/300
90/90 [==============================] - 0s - loss: 0.0083 - acc: 1.0000 - val_loss: 0.0866 - val_acc: 0.9565
Epoch 275/300
90/90 [==============================] - 0s - loss: 0.0055 - acc: 1.0000 - val_loss: 0.1013 - val_acc: 0.9565
Epoch 276/300
90/90 [==============================] - 0s - loss: 0.0051 - acc: 1.0000 - val_loss: 0.0874 - val_acc: 0.9565
Epoch 277/300
90/90 [==============================] - 0s - loss: 0.0103 - acc: 1.0000 - val_loss: 0.1262 - val_acc: 0.9565
Epoch 278/300
90/90 [==============================] - 0s - loss: 0.0090 - acc: 1.0000 - val_loss: 0.0880 - val_acc: 0.9565
Epoch 279/300
90/90 [==============================] - 0s - loss: 0.0088 - acc: 1.0000 - val_loss: 0.1016 - val_acc: 0.9565
Epoch 280/300
90/90 [==============================] - 0s - loss: 0.0027 - acc: 1.0000 - val_loss: 0.1038 - val_acc: 0.9565
Epoch 281/300
90/90 [==============================] - 0s - loss: 0.0488 - acc: 0.9778 - val_loss: 0.0793 - val_acc: 1.0000
Epoch 282/300
90/90 [==============================] - 0s - loss: 0.0229 - acc: 0.9889 - val_loss: 0.1187 - val_acc: 0.9565
Epoch 283/300
90/90 [==============================] - 0s - loss: 0.0052 - acc: 1.0000 - val_loss: 0.1143 - val_acc: 0.9565
Epoch 284/300
90/90 [==============================] - 0s - loss: 0.0055 - acc: 1.0000 - val_loss: 0.0857 - val_acc: 0.9565
Epoch 285/300
90/90 [==============================] - 0s - loss: 0.0098 - acc: 1.0000 - val_loss: 0.1394 - val_acc: 0.9565
Epoch 286/300
90/90 [==============================] - 0s - loss: 0.0158 - acc: 0.9889 - val_loss: 0.0845 - val_acc: 0.9565
Epoch 287/300
90/90 [==============================] - 0s - loss: 0.0609 - acc: 0.9889 - val_loss: 0.1877 - val_acc: 0.9565
Epoch 288/300
90/90 [==============================] - 0s - loss: 0.0025 - acc: 1.0000 - val_loss: 0.1738 - val_acc: 0.9565
Epoch 289/300
90/90 [==============================] - 0s - loss: 0.0181 - acc: 0.9889 - val_loss: 0.1314 - val_acc: 0.9565
Epoch 290/300
90/90 [==============================] - 0s - loss: 0.0087 - acc: 1.0000 - val_loss: 0.1312 - val_acc: 0.9565
Epoch 291/300
90/90 [==============================] - 0s - loss: 0.0110 - acc: 1.0000 - val_loss: 0.1398 - val_acc: 0.9565
Epoch 292/300
90/90 [==============================] - 0s - loss: 0.0042 - acc: 1.0000 - val_loss: 0.1577 - val_acc: 0.9565
Epoch 293/300
90/90 [==============================] - 0s - loss: 0.0034 - acc: 1.0000 - val_loss: 0.1533 - val_acc: 0.9565
Epoch 294/300
90/90 [==============================] - 0s - loss: 0.0144 - acc: 0.9889 - val_loss: 0.1159 - val_acc: 0.9565
Epoch 295/300
90/90 [==============================] - 0s - loss: 0.0085 - acc: 1.0000 - val_loss: 0.1169 - val_acc: 0.9565
Epoch 296/300
90/90 [==============================] - 0s - loss: 0.0067 - acc: 1.0000 - val_loss: 0.1220 - val_acc: 0.9565
Epoch 297/300
90/90 [==============================] - 0s - loss: 0.0232 - acc: 0.9889 - val_loss: 0.1262 - val_acc: 0.9565
Epoch 298/300
90/90 [==============================] - 0s - loss: 0.0082 - acc: 1.0000 - val_loss: 0.1188 - val_acc: 0.9565
Epoch 299/300
90/90 [==============================] - 0s - loss: 0.0247 - acc: 0.9778 - val_loss: 0.2582 - val_acc: 0.9565
Epoch 300/300
90/90 [==============================] - 0s - loss: 0.0082 - acc: 1.0000 - val_loss: 0.2163 - val_acc: 0.9565
Wall time: 2min 44s

2.2.2. Graph of accuracy and loss on train


In [19]:
plt.figure(figsize=(25,15))
plt.plot(hist2.history['acc'],label='acc')
plt.plot(hist2.history['loss'],label='loss')
plt.plot(hist2.history['val_acc'],'--',label='val_acc')
plt.plot(hist2.history['val_loss'],'--',label='val_loss')
plt.legend()
plt.ylim(0,max(hist2.history['acc'])+0.05)
plt.grid('off')



In [20]:
model2.summary()


--------------------------------------------------------------------------------
Initial input shape: (None, 1, 50, 50)
--------------------------------------------------------------------------------
Layer (name)                  Output Shape                  Param #             
--------------------------------------------------------------------------------
Convolution2D (convolution2d) (None, 10, 50, 50)            1010                
Activation (activation)       (None, 10, 50, 50)            0                   
MaxPooling2D (maxpooling2d)   (None, 10, 25, 25)            0                   
Dropout (dropout)             (None, 10, 25, 25)            0                   
Convolution2D (convolution2d) (None, 10, 25, 25)            10010               
Activation (activation)       (None, 10, 25, 25)            0                   
MaxPooling2D (maxpooling2d)   (None, 10, 12, 12)            0                   
Dropout (dropout)             (None, 10, 12, 12)            0                   
Flatten (flatten)             (None, 1440)                  0                   
Dense (dense)                 (None, 1250)                  1801250             
Activation (activation)       (None, 1250)                  0                   
Dense (dense)                 (None, 2)                     2502                
--------------------------------------------------------------------------------
Total params: 1814772
--------------------------------------------------------------------------------

2.2.3. Predict and evaluate using test set


In [21]:
model2.evaluate(x_te3,y_te3,batch_size=50,show_accuracy=True,verbose=1)


29/29 [==============================] - 0s
Out[21]:
[0.39729949831962585, 0.93103448275862066]

2.2.4. Visualize trained weight of convolution layer


In [22]:
def plot_wegh (model):
    '''
    Plot weights of convolution layer
    
    #Args
    model : fitted model
    '''
    wegh_arr = model.get_weights()
    num = len(wegh_arr[0])
    if type(np.sqrt(num)) is int:
        col = np.sqrt(num)
        row = np.sqrt(num) 
    else:
        col = int(num/2)
        row = int(num/col)
        
    fig ,axes = plt.subplots(row,col, subplot_kw={'xticks': [], 'yticks': []})
    plt.subplots_adjust(hspace=0.02,wspace = 0.05)
    
    for i, ax in zip(xrange(num),axes.flat):
        
        ax.imshow(wegh_arr[0][i][0])
        ax.grid('off')
    plt.show()

In [23]:
plot_wegh(model2)


2.2.5. Predict and evaluate using test set


In [24]:
y_pred2 = model2.predict_classes(x_te3)
y_pred2


29/29 [==============================] - 0s
Out[24]:
array([1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1], dtype=int64)

In [25]:
y_ten2 = cat2lab(y_te3)
y_ten2


Out[25]:
array([0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1,
       1, 0, 1, 1, 1, 1])

In [26]:
print(classification_report(y_ten2,y_pred2))


             precision    recall  f1-score   support

          0       1.00      0.82      0.90        11
          1       0.90      1.00      0.95        18

avg / total       0.94      0.93      0.93        29

2.3. Support vector machine


In [31]:
from sklearn.grid_search import GridSearchCV
from sklearn.svm import SVC

2.3.1. Make cross-validation set


In [32]:
cv = StratifiedKFold(labels,n_folds=10,shuffle=True)

In [33]:
params = {'C' : [1e1, 1e2, 1e3,1e4,1e5],
           'gamma' : [0.0001,0.0005,0.001,0.005,0.01]}

2.3.3. Find best parameter


In [34]:
clf_grid = GridSearchCV(SVC(kernel='rbf'),params,cv=cv)

2.3.4. Demonstrate with best parmameter


In [35]:
model3 = clf_grid.fit(reg_imgs,labels)

In [36]:
model3.best_score_ , model3.best_params_


Out[36]:
(0.85915492957746475, {'C': 10.0, 'gamma': 0.01})

In [37]:
#demostration of upper GridSearchCV method
svc_rslt = []
for x,y in cv: 
    clf = SVC(kernel='rbf',C=10.0,gamma = 0.005,)
    clf.fit(reg_imgs[x],labels[x])
    svc_rslt.append(clf.score(reg_imgs[y], labels[y]))
svc_rslt = np.array(svc_rslt)
svc_rslt


Out[37]:
array([ 1.        ,  0.73333333,  0.93333333,  0.93333333,  0.93333333,
        0.64285714,  0.78571429,  0.76923077,  0.76923077,  0.84615385])

In [38]:
print('cross valdated SVC score is ' , svc_rslt.mean())


cross valdated SVC score is  0.834652014652

2.4. Ensenble(just trial, additional preprocessig is needed)


In [39]:
from sklearn.ensemble import AdaBoostClassifier, RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier

2.4.1. Make various models


In [40]:
ens1 = RandomForestClassifier(n_estimators =  250 , max_depth= None,verbose=1)

ens2 = AdaBoostClassifier(SVC(kernel='rbf',gamma=0.005,C = 10.0),
                          algorithm="SAMME",
                          n_estimators=100,
                          learning_rate=0.01)


ens3  = AdaBoostClassifier(DecisionTreeClassifier(max_depth=None),
                         algorithm="SAMME",
                         n_estimators=100,
                          learning_rate=0.01)

In [41]:
ens1.fit(x_trn1, y_trn1)
ens2.fit(x_trn1, y_trn1)
ens3.fit(x_trn1, y_trn1)


[Parallel(n_jobs=1)]: Done  49 tasks       | elapsed:    0.0s
[Parallel(n_jobs=1)]: Done 199 tasks       | elapsed:    0.1s
[Parallel(n_jobs=1)]: Done 250 out of 250 | elapsed:    0.1s finished
Out[41]:
AdaBoostClassifier(algorithm='SAMME',
          base_estimator=DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,
            max_features=None, max_leaf_nodes=None, min_samples_leaf=1,
            min_samples_split=2, min_weight_fraction_leaf=0.0,
            presort=False, random_state=None, splitter='best'),
          learning_rate=0.01, n_estimators=100, random_state=None)

2.4.2. Test on test set


In [42]:
ens1.score(x_ten1,y_ten1)


[Parallel(n_jobs=1)]: Done  49 tasks       | elapsed:    0.0s
[Parallel(n_jobs=1)]: Done 199 tasks       | elapsed:    0.0s
[Parallel(n_jobs=1)]: Done 250 out of 250 | elapsed:    0.0s finished
Out[42]:
0.7931034482758621

In [43]:
ens2.score(x_ten1,y_ten1)


Out[43]:
0.62068965517241381

In [44]:
ens3.score(x_ten1,y_ten1)


Out[44]:
0.48275862068965519

3. TODO(additional trial)

3.1. (temp) Harr-like feature

from cv2 import CascadeClassifier

3.2.(temp)Wavelet tranform

import pywt

3.3. (temp)Hough

from cv2 import HoughLines ,HoughLinesP ,HoughCircles from os import listdir import cv2