In [1]:
from keras.layers import Convolution2D ,MaxPooling2D,Flatten
from keras.layers.core import Dense, Dropout, Activation
from sklearn.cross_validation import train_test_split
from keras.callbacks import History,Callback
from sklearn.metrics import classification_report
from sklearn.cross_validation import StratifiedKFold ,cross_val_score
from keras.models import model_from_config
from keras.models import Sequential
from keras.optimizers import SGD
from keras.utils import np_utils
from keras.regularizers import l2
from keras import backend as K
import theano.tensor as T
import theano
import keras
import pickle
Using Theano backend.
Using gpu device 0: GeForce 940M (CNMeM is disabled, CuDNN not available)
C:\Users\Back_jud\Anaconda2\lib\site-packages\theano\tensor\signal\downsample.py:6: UserWarning:
downsample module has been moved to the theano.tensor.signal.pool module.
In [2]:
# binary category to label
def cat2lab (x):
'''only for binary category'''
return np.array([0 if s[0] else 1 for s in x])
In [3]:
a = open('d://labels_new.p')
b = open('d://images_new.p')
labels = np.array(pickle.load(a))
imgs = np.array(pickle.load(b))
imgsr = imgs /255
labels = np_utils.to_categorical(labels,nb_classes=2)
In [4]:
orimgs =[]
for img in imgsr:
orimgs.append(np.reshape(img,(50,50)))
orimgs = np.array(orimgs)
In [5]:
x_tr,x_te,y_tr,y_te = train_test_split(orimgs,labels,test_size= 0.2,random_state= 123)
In [6]:
x_tr1,x_te1,y_tr1,y_te1 = train_test_split(imgsr,labels,test_size= 0.2,random_state= 123)
In [7]:
model1 = Sequential()
model1.add(Dense(2500, input_dim=2500,init ='uniform'))
model1.add(Activation('relu'))
model1.add(Dense(2, activation="softmax"))
model1.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.01,decay= 1e-6,momentum=0.1,nesterov=True))
In [8]:
%time hist1 = model1.fit(x_tr1, np.array(y_tr1), nb_epoch=300,validation_split=0.2 ,batch_size=50,show_accuracy=True,verbose=0)
Wall time: 33.4 s
In [9]:
plt.plot(hist1.history['acc'],label='acc')
plt.plot(hist1.history['loss'],label='loss')
plt.plot(hist1.history['val_acc'],'--',label='val_acc')
plt.plot(hist1.history['val_loss'],'--',label='val_loss')
plt.grid('off')
plt.legend()
Out[9]:
<matplotlib.legend.Legend at 0x2c9f9e80>
In [10]:
model1.summary()
--------------------------------------------------------------------------------
Initial input shape: (None, 2500)
--------------------------------------------------------------------------------
Layer (name) Output Shape Param #
--------------------------------------------------------------------------------
Dense (dense) (None, 2500) 6252500
Activation (activation) (None, 2500) 0
Dense (dense) (None, 2) 5002
--------------------------------------------------------------------------------
Total params: 6257502
--------------------------------------------------------------------------------
In [11]:
model1.evaluate(x_te1,y_te1,batch_size=50,show_accuracy=True)
29/29 [==============================] - 0s
Out[11]:
[0.69461101293563843, 0.75862068965517238]
In [12]:
y_pred1 = model1.predict_classes(x_te1,20)
y_pred1
29/29 [==============================] - 0s
Out[12]:
array([1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1], dtype=int64)
In [13]:
y_ten1 = cat2lab(y_te1)
y_ten1
Out[13]:
array([0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1])
In [14]:
print(classification_report(y_ten1,y_pred1))
precision recall f1-score support
0 0.67 0.73 0.70 11
1 0.82 0.78 0.80 18
avg / total 0.76 0.76 0.76 29
In [15]:
for i in model1.get_weights():
print(np.shape(i))
(2500L, 2500L)
(2500L,)
(2500L, 2L)
(2L,)
In [16]:
#reshape to shape (1,50,50) for CNN
imgs2d= []
for img in imgsr:
imgs2d.append(np.reshape(img,(1,50,50)))
imgs2d = np.array(imgs2d)
In [17]:
x_tr2,x_te2,y_tr2,y_te2 = train_test_split(imgs2d,labels,test_size= 0.2,random_state= 123)
In [18]:
model2 = Sequential()
model2.add(Convolution2D(15,10, 10, border_mode='same', input_shape=(1, 50, 50)))
model2.add(Activation('relu'))
# model2.add(Convolution2D(50, 5, 5,init='uniform'))
# model2.add(Activation('relu'))
model2.add(MaxPooling2D(pool_size=(2, 2)))
model2.add(Dropout(0.5))
model2.add(Convolution2D(10, 10, 10,init='uniform' ,border_mode='same'))
model2.add(Activation('relu'))
# model2.add(Convolution2D(100, 5, 5,init='uniform'))
# model2.add(Activation('relu'))
model2.add(MaxPooling2D(pool_size=(2, 2)))
model2.add(Dropout(0.3))
model2.add(Flatten())
model2.add(Dense(1250,init='uniform'))
model2.add(Activation('relu'))
model2.add(Dense(2,activation='softmax'))
model2.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.01,decay=1e-6,
momentum=0.5,
nesterov=True))
In [19]:
%time hist2 = model2.fit(x_tr2, y_tr2, nb_epoch=300 , batch_size=50 ,validation_split=0.2, show_accuracy=True ,shuffle=True)
Train on 90 samples, validate on 23 samples
Epoch 1/300
90/90 [==============================] - 1s - loss: 1.0399 - acc: 0.4667 - val_loss: 0.7806 - val_acc: 0.6087
Epoch 2/300
90/90 [==============================] - 1s - loss: 1.3866 - acc: 0.4778 - val_loss: 0.6906 - val_acc: 0.6522
Epoch 3/300
90/90 [==============================] - 1s - loss: 0.6967 - acc: 0.5556 - val_loss: 0.6911 - val_acc: 0.4348
Epoch 4/300
90/90 [==============================] - 1s - loss: 0.6830 - acc: 0.5333 - val_loss: 0.6906 - val_acc: 0.4783
Epoch 5/300
90/90 [==============================] - 1s - loss: 0.6687 - acc: 0.6111 - val_loss: 0.6908 - val_acc: 0.4783
Epoch 6/300
90/90 [==============================] - 1s - loss: 0.6750 - acc: 0.5444 - val_loss: 0.6916 - val_acc: 0.4783
Epoch 7/300
90/90 [==============================] - 0s - loss: 0.6753 - acc: 0.5667 - val_loss: 0.6898 - val_acc: 0.5217
Epoch 8/300
90/90 [==============================] - 0s - loss: 0.6466 - acc: 0.6222 - val_loss: 0.6854 - val_acc: 0.6957
Epoch 9/300
90/90 [==============================] - 0s - loss: 0.6499 - acc: 0.5889 - val_loss: 0.6777 - val_acc: 0.7391
Epoch 10/300
90/90 [==============================] - 0s - loss: 0.6237 - acc: 0.7111 - val_loss: 0.6681 - val_acc: 0.7826
Epoch 11/300
90/90 [==============================] - 0s - loss: 0.6371 - acc: 0.6333 - val_loss: 0.6642 - val_acc: 0.7826
Epoch 12/300
90/90 [==============================] - 0s - loss: 0.6597 - acc: 0.6222 - val_loss: 0.6525 - val_acc: 0.7826
Epoch 13/300
90/90 [==============================] - 1s - loss: 0.6056 - acc: 0.7556 - val_loss: 0.6309 - val_acc: 0.7826
Epoch 14/300
90/90 [==============================] - 1s - loss: 0.6026 - acc: 0.7222 - val_loss: 0.6152 - val_acc: 0.7826
Epoch 15/300
90/90 [==============================] - 1s - loss: 0.5782 - acc: 0.7778 - val_loss: 0.5809 - val_acc: 0.7391
Epoch 16/300
90/90 [==============================] - 1s - loss: 0.5816 - acc: 0.6667 - val_loss: 0.5601 - val_acc: 0.6957
Epoch 17/300
90/90 [==============================] - 1s - loss: 0.5470 - acc: 0.7667 - val_loss: 0.5020 - val_acc: 0.7826
Epoch 18/300
90/90 [==============================] - 1s - loss: 0.5153 - acc: 0.8222 - val_loss: 0.4770 - val_acc: 0.9130
Epoch 19/300
90/90 [==============================] - 1s - loss: 0.5370 - acc: 0.6889 - val_loss: 0.4638 - val_acc: 0.9130
Epoch 20/300
90/90 [==============================] - 1s - loss: 0.4568 - acc: 0.8333 - val_loss: 0.4466 - val_acc: 0.7826
Epoch 21/300
90/90 [==============================] - 1s - loss: 0.4291 - acc: 0.8556 - val_loss: 0.3728 - val_acc: 0.7826
Epoch 22/300
90/90 [==============================] - 0s - loss: 0.4393 - acc: 0.8111 - val_loss: 1.0505 - val_acc: 0.3913
Epoch 23/300
90/90 [==============================] - 0s - loss: 0.9342 - acc: 0.5556 - val_loss: 0.5069 - val_acc: 0.7826
Epoch 24/300
90/90 [==============================] - 1s - loss: 0.4402 - acc: 0.8889 - val_loss: 0.4452 - val_acc: 0.9565
Epoch 25/300
90/90 [==============================] - 1s - loss: 0.3901 - acc: 0.9000 - val_loss: 0.3840 - val_acc: 0.8261
Epoch 26/300
90/90 [==============================] - 0s - loss: 0.3522 - acc: 0.8889 - val_loss: 0.3237 - val_acc: 0.8696
Epoch 27/300
90/90 [==============================] - 1s - loss: 0.3036 - acc: 0.9222 - val_loss: 0.3891 - val_acc: 0.8261
Epoch 28/300
90/90 [==============================] - 1s - loss: 0.3815 - acc: 0.8333 - val_loss: 0.3657 - val_acc: 0.8261
Epoch 29/300
90/90 [==============================] - 1s - loss: 0.3303 - acc: 0.8667 - val_loss: 0.2777 - val_acc: 0.8696
Epoch 30/300
90/90 [==============================] - 1s - loss: 0.2911 - acc: 0.9111 - val_loss: 0.2709 - val_acc: 0.8261
Epoch 31/300
90/90 [==============================] - 0s - loss: 0.3467 - acc: 0.8556 - val_loss: 0.5713 - val_acc: 0.6957
Epoch 32/300
90/90 [==============================] - 0s - loss: 0.8462 - acc: 0.5778 - val_loss: 0.3907 - val_acc: 1.0000
Epoch 33/300
90/90 [==============================] - 1s - loss: 0.3429 - acc: 0.9333 - val_loss: 0.3468 - val_acc: 1.0000
Epoch 34/300
90/90 [==============================] - 1s - loss: 0.3139 - acc: 0.9111 - val_loss: 0.2882 - val_acc: 0.9130
Epoch 35/300
90/90 [==============================] - 1s - loss: 0.3005 - acc: 0.9222 - val_loss: 0.2923 - val_acc: 0.8696
Epoch 36/300
90/90 [==============================] - 1s - loss: 0.2867 - acc: 0.8778 - val_loss: 0.2827 - val_acc: 0.8696
Epoch 37/300
90/90 [==============================] - 1s - loss: 0.2752 - acc: 0.8778 - val_loss: 0.2134 - val_acc: 0.9565
Epoch 38/300
90/90 [==============================] - 1s - loss: 0.2586 - acc: 0.9000 - val_loss: 0.2611 - val_acc: 0.8696
Epoch 39/300
90/90 [==============================] - 0s - loss: 0.2219 - acc: 0.9333 - val_loss: 0.2027 - val_acc: 0.9565
Epoch 40/300
90/90 [==============================] - 0s - loss: 0.2230 - acc: 0.9000 - val_loss: 0.1872 - val_acc: 0.9565
Epoch 41/300
90/90 [==============================] - 1s - loss: 0.1963 - acc: 0.9333 - val_loss: 0.2668 - val_acc: 0.8261
Epoch 42/300
90/90 [==============================] - 1s - loss: 0.2910 - acc: 0.8667 - val_loss: 0.3676 - val_acc: 0.7826
Epoch 43/300
90/90 [==============================] - 1s - loss: 0.3219 - acc: 0.8333 - val_loss: 0.2622 - val_acc: 0.8261
Epoch 44/300
90/90 [==============================] - 1s - loss: 0.2513 - acc: 0.8889 - val_loss: 0.2616 - val_acc: 0.8696
Epoch 45/300
90/90 [==============================] - 1s - loss: 0.2036 - acc: 0.9000 - val_loss: 0.1765 - val_acc: 0.9565
Epoch 46/300
90/90 [==============================] - 0s - loss: 0.1907 - acc: 0.9333 - val_loss: 0.1622 - val_acc: 1.0000
Epoch 47/300
90/90 [==============================] - 0s - loss: 0.2019 - acc: 0.9111 - val_loss: 0.4110 - val_acc: 0.7826
Epoch 48/300
90/90 [==============================] - 0s - loss: 0.3092 - acc: 0.8444 - val_loss: 0.2420 - val_acc: 0.8696
Epoch 49/300
90/90 [==============================] - 0s - loss: 0.1941 - acc: 0.9111 - val_loss: 0.1727 - val_acc: 0.9565
Epoch 50/300
90/90 [==============================] - 1s - loss: 0.1590 - acc: 0.9556 - val_loss: 0.1548 - val_acc: 0.9565
Epoch 51/300
90/90 [==============================] - 0s - loss: 0.1482 - acc: 0.9444 - val_loss: 0.2383 - val_acc: 0.8696
Epoch 52/300
90/90 [==============================] - 0s - loss: 0.1574 - acc: 0.9333 - val_loss: 0.1787 - val_acc: 0.8696
Epoch 53/300
90/90 [==============================] - 1s - loss: 0.1303 - acc: 0.9778 - val_loss: 0.1769 - val_acc: 0.8696
Epoch 54/300
90/90 [==============================] - 0s - loss: 0.1911 - acc: 0.9444 - val_loss: 0.2677 - val_acc: 0.8696
Epoch 55/300
90/90 [==============================] - 0s - loss: 0.1820 - acc: 0.9556 - val_loss: 0.1796 - val_acc: 0.8696
Epoch 56/300
90/90 [==============================] - 1s - loss: 0.1631 - acc: 0.9333 - val_loss: 0.1271 - val_acc: 1.0000
Epoch 57/300
90/90 [==============================] - 0s - loss: 0.2230 - acc: 0.8778 - val_loss: 0.2511 - val_acc: 0.8696
Epoch 58/300
90/90 [==============================] - 1s - loss: 0.3554 - acc: 0.8000 - val_loss: 0.1581 - val_acc: 0.9565
Epoch 59/300
90/90 [==============================] - 0s - loss: 0.1889 - acc: 0.9222 - val_loss: 0.1505 - val_acc: 0.9565
Epoch 60/300
90/90 [==============================] - 0s - loss: 0.1425 - acc: 0.9444 - val_loss: 0.1605 - val_acc: 0.9130
Epoch 61/300
90/90 [==============================] - 0s - loss: 0.1091 - acc: 0.9667 - val_loss: 0.1666 - val_acc: 0.9130
Epoch 62/300
90/90 [==============================] - 0s - loss: 0.1312 - acc: 0.9667 - val_loss: 0.1943 - val_acc: 0.8696
Epoch 63/300
90/90 [==============================] - 0s - loss: 0.1101 - acc: 0.9667 - val_loss: 0.1231 - val_acc: 0.9565
Epoch 64/300
90/90 [==============================] - 0s - loss: 0.0906 - acc: 0.9667 - val_loss: 0.1399 - val_acc: 0.9130
Epoch 65/300
90/90 [==============================] - 0s - loss: 0.1493 - acc: 0.9333 - val_loss: 0.2983 - val_acc: 0.8261
Epoch 66/300
90/90 [==============================] - 0s - loss: 0.1549 - acc: 0.9556 - val_loss: 0.1205 - val_acc: 0.9565
Epoch 67/300
90/90 [==============================] - 0s - loss: 0.1105 - acc: 0.9667 - val_loss: 0.2095 - val_acc: 0.8696
Epoch 68/300
90/90 [==============================] - 1s - loss: 0.0899 - acc: 1.0000 - val_loss: 0.1466 - val_acc: 0.9130
Epoch 69/300
90/90 [==============================] - 1s - loss: 0.1203 - acc: 0.9667 - val_loss: 0.2227 - val_acc: 0.8696
Epoch 70/300
90/90 [==============================] - 1s - loss: 0.0937 - acc: 0.9778 - val_loss: 0.1123 - val_acc: 0.9130
Epoch 71/300
90/90 [==============================] - 1s - loss: 0.0782 - acc: 0.9889 - val_loss: 0.1077 - val_acc: 0.9565
Epoch 72/300
90/90 [==============================] - 1s - loss: 0.0846 - acc: 0.9667 - val_loss: 0.1087 - val_acc: 0.9565
Epoch 73/300
90/90 [==============================] - 1s - loss: 0.1006 - acc: 0.9556 - val_loss: 0.1115 - val_acc: 0.9565
Epoch 74/300
90/90 [==============================] - 0s - loss: 0.0964 - acc: 0.9778 - val_loss: 0.1320 - val_acc: 0.9130
Epoch 75/300
90/90 [==============================] - 1s - loss: 0.0507 - acc: 1.0000 - val_loss: 0.1225 - val_acc: 0.9130
Epoch 76/300
90/90 [==============================] - 0s - loss: 0.0694 - acc: 0.9889 - val_loss: 0.2367 - val_acc: 0.8696
Epoch 77/300
90/90 [==============================] - 0s - loss: 0.1103 - acc: 0.9556 - val_loss: 0.1502 - val_acc: 0.9130
Epoch 78/300
90/90 [==============================] - 0s - loss: 0.0589 - acc: 0.9889 - val_loss: 0.0920 - val_acc: 0.9565
Epoch 79/300
90/90 [==============================] - 0s - loss: 0.0941 - acc: 0.9667 - val_loss: 0.1236 - val_acc: 0.9130
Epoch 80/300
90/90 [==============================] - 0s - loss: 0.1139 - acc: 0.9667 - val_loss: 0.2630 - val_acc: 0.9130
Epoch 81/300
90/90 [==============================] - 0s - loss: 0.3434 - acc: 0.8111 - val_loss: 0.0892 - val_acc: 1.0000
Epoch 82/300
90/90 [==============================] - 0s - loss: 0.1221 - acc: 0.9556 - val_loss: 0.1174 - val_acc: 0.9130
Epoch 83/300
90/90 [==============================] - 0s - loss: 0.0974 - acc: 0.9667 - val_loss: 0.1204 - val_acc: 0.9130
Epoch 84/300
90/90 [==============================] - 1s - loss: 0.0585 - acc: 0.9889 - val_loss: 0.1238 - val_acc: 0.9130
Epoch 85/300
90/90 [==============================] - 1s - loss: 0.0637 - acc: 1.0000 - val_loss: 0.0910 - val_acc: 1.0000
Epoch 86/300
90/90 [==============================] - 1s - loss: 0.0916 - acc: 0.9667 - val_loss: 0.1088 - val_acc: 0.9565
Epoch 87/300
90/90 [==============================] - 0s - loss: 0.0800 - acc: 0.9778 - val_loss: 0.1313 - val_acc: 0.9130
Epoch 88/300
90/90 [==============================] - 1s - loss: 0.0471 - acc: 1.0000 - val_loss: 0.1224 - val_acc: 0.9130
Epoch 89/300
90/90 [==============================] - 1s - loss: 0.0530 - acc: 1.0000 - val_loss: 0.1650 - val_acc: 0.9130
Epoch 90/300
90/90 [==============================] - 0s - loss: 0.0699 - acc: 0.9778 - val_loss: 0.0840 - val_acc: 1.0000
Epoch 91/300
90/90 [==============================] - 1s - loss: 0.0825 - acc: 0.9667 - val_loss: 0.0979 - val_acc: 0.9565
Epoch 92/300
90/90 [==============================] - 0s - loss: 0.0903 - acc: 0.9778 - val_loss: 0.1078 - val_acc: 0.9565
Epoch 93/300
90/90 [==============================] - 0s - loss: 0.1183 - acc: 0.9444 - val_loss: 0.1363 - val_acc: 0.9130
Epoch 94/300
90/90 [==============================] - 0s - loss: 0.0533 - acc: 0.9889 - val_loss: 0.1034 - val_acc: 0.9565
Epoch 95/300
90/90 [==============================] - 0s - loss: 0.0429 - acc: 1.0000 - val_loss: 0.1242 - val_acc: 0.9130
Epoch 96/300
90/90 [==============================] - 0s - loss: 0.0636 - acc: 0.9778 - val_loss: 0.0809 - val_acc: 0.9565
Epoch 97/300
90/90 [==============================] - 0s - loss: 0.0360 - acc: 1.0000 - val_loss: 0.1855 - val_acc: 0.9130
Epoch 98/300
90/90 [==============================] - 0s - loss: 0.0463 - acc: 0.9889 - val_loss: 0.0901 - val_acc: 0.9565
Epoch 99/300
90/90 [==============================] - 0s - loss: 0.0439 - acc: 1.0000 - val_loss: 0.0723 - val_acc: 1.0000
Epoch 100/300
90/90 [==============================] - 0s - loss: 0.0435 - acc: 0.9889 - val_loss: 0.1123 - val_acc: 0.9130
Epoch 101/300
90/90 [==============================] - 0s - loss: 0.0491 - acc: 0.9889 - val_loss: 0.0809 - val_acc: 1.0000
Epoch 102/300
90/90 [==============================] - 0s - loss: 0.0585 - acc: 0.9667 - val_loss: 0.1034 - val_acc: 0.9130
Epoch 103/300
90/90 [==============================] - 0s - loss: 0.0326 - acc: 1.0000 - val_loss: 0.0889 - val_acc: 0.9565
Epoch 104/300
90/90 [==============================] - 0s - loss: 0.0565 - acc: 0.9778 - val_loss: 0.0983 - val_acc: 0.9565
Epoch 105/300
90/90 [==============================] - 0s - loss: 0.0190 - acc: 1.0000 - val_loss: 0.1080 - val_acc: 0.9565
Epoch 106/300
90/90 [==============================] - 0s - loss: 0.0575 - acc: 0.9889 - val_loss: 0.2302 - val_acc: 0.8696
Epoch 107/300
90/90 [==============================] - 0s - loss: 0.0728 - acc: 0.9667 - val_loss: 0.0974 - val_acc: 0.9565
Epoch 108/300
90/90 [==============================] - 0s - loss: 0.0343 - acc: 0.9778 - val_loss: 0.1325 - val_acc: 0.9130
Epoch 109/300
90/90 [==============================] - 1s - loss: 0.0325 - acc: 0.9889 - val_loss: 0.1171 - val_acc: 0.9130
Epoch 110/300
90/90 [==============================] - 0s - loss: 0.0277 - acc: 0.9889 - val_loss: 0.1497 - val_acc: 0.9130
Epoch 111/300
90/90 [==============================] - 0s - loss: 0.0193 - acc: 1.0000 - val_loss: 0.1124 - val_acc: 0.9130
Epoch 112/300
90/90 [==============================] - 0s - loss: 0.0293 - acc: 1.0000 - val_loss: 0.0956 - val_acc: 0.9565
Epoch 113/300
90/90 [==============================] - 0s - loss: 0.0289 - acc: 1.0000 - val_loss: 0.0892 - val_acc: 0.9565
Epoch 114/300
90/90 [==============================] - 1s - loss: 0.0594 - acc: 0.9778 - val_loss: 0.0804 - val_acc: 0.9565
Epoch 115/300
90/90 [==============================] - 1s - loss: 0.0236 - acc: 1.0000 - val_loss: 0.1528 - val_acc: 0.9130
Epoch 116/300
90/90 [==============================] - 1s - loss: 0.0894 - acc: 0.9778 - val_loss: 0.1723 - val_acc: 0.9130
Epoch 117/300
90/90 [==============================] - 0s - loss: 0.0273 - acc: 0.9889 - val_loss: 0.0988 - val_acc: 0.9130
Epoch 118/300
90/90 [==============================] - 1s - loss: 0.0370 - acc: 1.0000 - val_loss: 0.1103 - val_acc: 0.9130
Epoch 119/300
90/90 [==============================] - 1s - loss: 0.0445 - acc: 0.9889 - val_loss: 0.0650 - val_acc: 0.9565
Epoch 120/300
90/90 [==============================] - 1s - loss: 0.0491 - acc: 0.9889 - val_loss: 0.2139 - val_acc: 0.9130
Epoch 121/300
90/90 [==============================] - 1s - loss: 0.0355 - acc: 0.9889 - val_loss: 0.1004 - val_acc: 0.9565
Epoch 122/300
90/90 [==============================] - 1s - loss: 0.0433 - acc: 0.9778 - val_loss: 0.2555 - val_acc: 0.8696
Epoch 123/300
90/90 [==============================] - 0s - loss: 0.0360 - acc: 0.9889 - val_loss: 0.1175 - val_acc: 0.9565
Epoch 124/300
90/90 [==============================] - 0s - loss: 0.0296 - acc: 0.9889 - val_loss: 0.1121 - val_acc: 0.9565
Epoch 125/300
90/90 [==============================] - 1s - loss: 0.0352 - acc: 0.9889 - val_loss: 0.1950 - val_acc: 0.9130
Epoch 126/300
90/90 [==============================] - 1s - loss: 0.0262 - acc: 1.0000 - val_loss: 0.1009 - val_acc: 0.9565
Epoch 127/300
90/90 [==============================] - 0s - loss: 0.0348 - acc: 0.9889 - val_loss: 0.0796 - val_acc: 0.9565
Epoch 128/300
90/90 [==============================] - 0s - loss: 0.1109 - acc: 0.9444 - val_loss: 0.0895 - val_acc: 0.9565
Epoch 129/300
90/90 [==============================] - 0s - loss: 0.0468 - acc: 0.9889 - val_loss: 0.1531 - val_acc: 0.9130
Epoch 130/300
90/90 [==============================] - 1s - loss: 0.0248 - acc: 1.0000 - val_loss: 0.0972 - val_acc: 0.9565
Epoch 131/300
90/90 [==============================] - 1s - loss: 0.0405 - acc: 0.9889 - val_loss: 0.1586 - val_acc: 0.9130
Epoch 132/300
90/90 [==============================] - 1s - loss: 0.0304 - acc: 0.9889 - val_loss: 0.0720 - val_acc: 1.0000
Epoch 133/300
90/90 [==============================] - 0s - loss: 0.0336 - acc: 1.0000 - val_loss: 0.1022 - val_acc: 0.9130
Epoch 134/300
90/90 [==============================] - 1s - loss: 0.0211 - acc: 1.0000 - val_loss: 0.1164 - val_acc: 0.9130
Epoch 135/300
90/90 [==============================] - 1s - loss: 0.0096 - acc: 1.0000 - val_loss: 0.1074 - val_acc: 0.9130
Epoch 136/300
90/90 [==============================] - 0s - loss: 0.0300 - acc: 1.0000 - val_loss: 0.1440 - val_acc: 0.9130
Epoch 137/300
90/90 [==============================] - 0s - loss: 0.0547 - acc: 0.9778 - val_loss: 0.1235 - val_acc: 0.9130
Epoch 138/300
90/90 [==============================] - 1s - loss: 0.0297 - acc: 1.0000 - val_loss: 0.1186 - val_acc: 0.9130
Epoch 139/300
90/90 [==============================] - 1s - loss: 0.0229 - acc: 1.0000 - val_loss: 0.1132 - val_acc: 0.9130
Epoch 140/300
90/90 [==============================] - 1s - loss: 0.0335 - acc: 0.9889 - val_loss: 0.0974 - val_acc: 0.9130
Epoch 141/300
90/90 [==============================] - 0s - loss: 0.0170 - acc: 1.0000 - val_loss: 0.1076 - val_acc: 0.9130
Epoch 142/300
90/90 [==============================] - 0s - loss: 0.0328 - acc: 0.9889 - val_loss: 0.0997 - val_acc: 0.9565
Epoch 143/300
90/90 [==============================] - 0s - loss: 0.0207 - acc: 1.0000 - val_loss: 0.0917 - val_acc: 0.9565
Epoch 144/300
90/90 [==============================] - 0s - loss: 0.0175 - acc: 0.9889 - val_loss: 0.1152 - val_acc: 0.9130
Epoch 145/300
90/90 [==============================] - 1s - loss: 0.0216 - acc: 1.0000 - val_loss: 0.0747 - val_acc: 1.0000
Epoch 146/300
90/90 [==============================] - 1s - loss: 0.0171 - acc: 1.0000 - val_loss: 0.0850 - val_acc: 0.9565
Epoch 147/300
90/90 [==============================] - 1s - loss: 0.0378 - acc: 0.9889 - val_loss: 0.1627 - val_acc: 0.9130
Epoch 148/300
90/90 [==============================] - 1s - loss: 0.0321 - acc: 0.9889 - val_loss: 0.0840 - val_acc: 0.9565
Epoch 149/300
90/90 [==============================] - 0s - loss: 0.0291 - acc: 1.0000 - val_loss: 0.0758 - val_acc: 1.0000
Epoch 150/300
90/90 [==============================] - 0s - loss: 0.0206 - acc: 1.0000 - val_loss: 0.1039 - val_acc: 0.9130
Epoch 151/300
90/90 [==============================] - 0s - loss: 0.0159 - acc: 1.0000 - val_loss: 0.1360 - val_acc: 0.9130
Epoch 152/300
90/90 [==============================] - 0s - loss: 0.0169 - acc: 1.0000 - val_loss: 0.0791 - val_acc: 0.9565
Epoch 153/300
90/90 [==============================] - 0s - loss: 0.0184 - acc: 1.0000 - val_loss: 0.1268 - val_acc: 0.9565
Epoch 154/300
90/90 [==============================] - 1s - loss: 0.0229 - acc: 1.0000 - val_loss: 0.1319 - val_acc: 0.9565
Epoch 155/300
90/90 [==============================] - 1s - loss: 0.0112 - acc: 1.0000 - val_loss: 0.1080 - val_acc: 0.9565
Epoch 156/300
90/90 [==============================] - 1s - loss: 0.0080 - acc: 1.0000 - val_loss: 0.1089 - val_acc: 0.9565
Epoch 157/300
90/90 [==============================] - 1s - loss: 0.0115 - acc: 1.0000 - val_loss: 0.1102 - val_acc: 0.9565
Epoch 158/300
90/90 [==============================] - 0s - loss: 0.0297 - acc: 1.0000 - val_loss: 0.1371 - val_acc: 0.9130
Epoch 159/300
90/90 [==============================] - 0s - loss: 0.0323 - acc: 0.9778 - val_loss: 0.2731 - val_acc: 0.9130
Epoch 160/300
90/90 [==============================] - 0s - loss: 0.0681 - acc: 0.9778 - val_loss: 0.1236 - val_acc: 0.9130
Epoch 161/300
90/90 [==============================] - 0s - loss: 0.0110 - acc: 1.0000 - val_loss: 0.1396 - val_acc: 0.9130
Epoch 162/300
90/90 [==============================] - 1s - loss: 0.0114 - acc: 1.0000 - val_loss: 0.1090 - val_acc: 0.9565
Epoch 163/300
90/90 [==============================] - 1s - loss: 0.0097 - acc: 1.0000 - val_loss: 0.1054 - val_acc: 0.9565
Epoch 164/300
90/90 [==============================] - 1s - loss: 0.0122 - acc: 1.0000 - val_loss: 0.1334 - val_acc: 0.9130
Epoch 165/300
90/90 [==============================] - 0s - loss: 0.0139 - acc: 1.0000 - val_loss: 0.1088 - val_acc: 0.9565
Epoch 166/300
90/90 [==============================] - 0s - loss: 0.0177 - acc: 0.9889 - val_loss: 0.0811 - val_acc: 0.9565
Epoch 167/300
90/90 [==============================] - 1s - loss: 0.0231 - acc: 1.0000 - val_loss: 0.1379 - val_acc: 0.9565
Epoch 168/300
90/90 [==============================] - 0s - loss: 0.0171 - acc: 0.9889 - val_loss: 0.0871 - val_acc: 0.9130
Epoch 169/300
90/90 [==============================] - 1s - loss: 0.0137 - acc: 1.0000 - val_loss: 0.1199 - val_acc: 0.9565
Epoch 170/300
90/90 [==============================] - 1s - loss: 0.0336 - acc: 0.9889 - val_loss: 0.1733 - val_acc: 0.9130
Epoch 171/300
90/90 [==============================] - 0s - loss: 0.0417 - acc: 0.9778 - val_loss: 0.1402 - val_acc: 0.9565
Epoch 172/300
90/90 [==============================] - 1s - loss: 0.0256 - acc: 0.9778 - val_loss: 0.1422 - val_acc: 0.9565
Epoch 173/300
90/90 [==============================] - 0s - loss: 0.0306 - acc: 0.9889 - val_loss: 0.1464 - val_acc: 0.9565
Epoch 174/300
90/90 [==============================] - 1s - loss: 0.0105 - acc: 1.0000 - val_loss: 0.0995 - val_acc: 0.9565
Epoch 175/300
90/90 [==============================] - 0s - loss: 0.0097 - acc: 1.0000 - val_loss: 0.1273 - val_acc: 0.9565
Epoch 176/300
90/90 [==============================] - 0s - loss: 0.0087 - acc: 1.0000 - val_loss: 0.1212 - val_acc: 0.9565
Epoch 177/300
90/90 [==============================] - 0s - loss: 0.0164 - acc: 0.9889 - val_loss: 0.1164 - val_acc: 0.9565
Epoch 178/300
90/90 [==============================] - 1s - loss: 0.0117 - acc: 1.0000 - val_loss: 0.0887 - val_acc: 0.9130
Epoch 179/300
90/90 [==============================] - 0s - loss: 0.0106 - acc: 1.0000 - val_loss: 0.1232 - val_acc: 0.9565
Epoch 180/300
90/90 [==============================] - 0s - loss: 0.0072 - acc: 1.0000 - val_loss: 0.1177 - val_acc: 0.9565
Epoch 181/300
90/90 [==============================] - 0s - loss: 0.0057 - acc: 1.0000 - val_loss: 0.1270 - val_acc: 0.9565
Epoch 182/300
90/90 [==============================] - 0s - loss: 0.0068 - acc: 1.0000 - val_loss: 0.1109 - val_acc: 0.9565
Epoch 183/300
90/90 [==============================] - 1s - loss: 0.0086 - acc: 1.0000 - val_loss: 0.1193 - val_acc: 0.9565
Epoch 184/300
90/90 [==============================] - 1s - loss: 0.0114 - acc: 1.0000 - val_loss: 0.0905 - val_acc: 0.9565
Epoch 185/300
90/90 [==============================] - 0s - loss: 0.0094 - acc: 1.0000 - val_loss: 0.0876 - val_acc: 0.9130
Epoch 186/300
90/90 [==============================] - 0s - loss: 0.0114 - acc: 1.0000 - val_loss: 0.1277 - val_acc: 0.9565
Epoch 187/300
90/90 [==============================] - 0s - loss: 0.0074 - acc: 1.0000 - val_loss: 0.1292 - val_acc: 0.9565
Epoch 188/300
90/90 [==============================] - 0s - loss: 0.0063 - acc: 1.0000 - val_loss: 0.0926 - val_acc: 0.9565
Epoch 189/300
90/90 [==============================] - 0s - loss: 0.0216 - acc: 1.0000 - val_loss: 0.0919 - val_acc: 0.9565
Epoch 190/300
90/90 [==============================] - 1s - loss: 0.0230 - acc: 1.0000 - val_loss: 0.0950 - val_acc: 0.9565
Epoch 191/300
90/90 [==============================] - 0s - loss: 0.0050 - acc: 1.0000 - val_loss: 0.0949 - val_acc: 0.9130
Epoch 192/300
90/90 [==============================] - 0s - loss: 0.0111 - acc: 1.0000 - val_loss: 0.1243 - val_acc: 0.9565
Epoch 193/300
90/90 [==============================] - 0s - loss: 0.0581 - acc: 0.9778 - val_loss: 0.1805 - val_acc: 0.9565
Epoch 194/300
90/90 [==============================] - 0s - loss: 0.0513 - acc: 0.9889 - val_loss: 0.1249 - val_acc: 0.9130
Epoch 195/300
90/90 [==============================] - 0s - loss: 0.0134 - acc: 1.0000 - val_loss: 0.1310 - val_acc: 0.9130
Epoch 196/300
90/90 [==============================] - 1s - loss: 0.0074 - acc: 1.0000 - val_loss: 0.1177 - val_acc: 0.9130
Epoch 197/300
90/90 [==============================] - 0s - loss: 0.0070 - acc: 1.0000 - val_loss: 0.1186 - val_acc: 0.9130
Epoch 198/300
90/90 [==============================] - 0s - loss: 0.0376 - acc: 0.9889 - val_loss: 0.1205 - val_acc: 0.9130
Epoch 199/300
90/90 [==============================] - 0s - loss: 0.0172 - acc: 1.0000 - val_loss: 0.1694 - val_acc: 0.9130
Epoch 200/300
90/90 [==============================] - 0s - loss: 0.0208 - acc: 1.0000 - val_loss: 0.1018 - val_acc: 0.9130
Epoch 201/300
90/90 [==============================] - 0s - loss: 0.0201 - acc: 0.9889 - val_loss: 0.0970 - val_acc: 0.9130
Epoch 202/300
90/90 [==============================] - 0s - loss: 0.0119 - acc: 1.0000 - val_loss: 0.1239 - val_acc: 0.9565
Epoch 203/300
90/90 [==============================] - 0s - loss: 0.0058 - acc: 1.0000 - val_loss: 0.1197 - val_acc: 0.9565
Epoch 204/300
90/90 [==============================] - 0s - loss: 0.0118 - acc: 1.0000 - val_loss: 0.0985 - val_acc: 0.9130
Epoch 205/300
90/90 [==============================] - 0s - loss: 0.0071 - acc: 1.0000 - val_loss: 0.0979 - val_acc: 0.9130
Epoch 206/300
90/90 [==============================] - 0s - loss: 0.0186 - acc: 1.0000 - val_loss: 0.0914 - val_acc: 0.9130
Epoch 207/300
90/90 [==============================] - 0s - loss: 0.0103 - acc: 1.0000 - val_loss: 0.1213 - val_acc: 0.9565
Epoch 208/300
90/90 [==============================] - 0s - loss: 0.0046 - acc: 1.0000 - val_loss: 0.1220 - val_acc: 0.9565
Epoch 209/300
90/90 [==============================] - 0s - loss: 0.0058 - acc: 1.0000 - val_loss: 0.1125 - val_acc: 0.9565
Epoch 210/300
90/90 [==============================] - 0s - loss: 0.0116 - acc: 1.0000 - val_loss: 0.0976 - val_acc: 0.9565
Epoch 211/300
90/90 [==============================] - 0s - loss: 0.0116 - acc: 1.0000 - val_loss: 0.1024 - val_acc: 0.9130
Epoch 212/300
90/90 [==============================] - 0s - loss: 0.0155 - acc: 1.0000 - val_loss: 0.0942 - val_acc: 0.9130
Epoch 213/300
90/90 [==============================] - 1s - loss: 0.0189 - acc: 1.0000 - val_loss: 0.1291 - val_acc: 0.9565
Epoch 214/300
90/90 [==============================] - 1s - loss: 0.0094 - acc: 1.0000 - val_loss: 0.1052 - val_acc: 0.9565
Epoch 215/300
90/90 [==============================] - 1s - loss: 0.0113 - acc: 1.0000 - val_loss: 0.1050 - val_acc: 0.9565
Epoch 216/300
90/90 [==============================] - 1s - loss: 0.0024 - acc: 1.0000 - val_loss: 0.1054 - val_acc: 0.9565
Epoch 217/300
90/90 [==============================] - 1s - loss: 0.0078 - acc: 1.0000 - val_loss: 0.1559 - val_acc: 0.9130
Epoch 218/300
90/90 [==============================] - 0s - loss: 0.0150 - acc: 0.9889 - val_loss: 0.0952 - val_acc: 0.9565
Epoch 219/300
90/90 [==============================] - 0s - loss: 0.0156 - acc: 0.9889 - val_loss: 0.0926 - val_acc: 0.9565
Epoch 220/300
90/90 [==============================] - 0s - loss: 0.0156 - acc: 1.0000 - val_loss: 0.1104 - val_acc: 0.9565
Epoch 221/300
90/90 [==============================] - 0s - loss: 0.0085 - acc: 1.0000 - val_loss: 0.1062 - val_acc: 0.9565
Epoch 222/300
90/90 [==============================] - 0s - loss: 0.0118 - acc: 1.0000 - val_loss: 0.1462 - val_acc: 0.9130
Epoch 223/300
90/90 [==============================] - 0s - loss: 0.0075 - acc: 1.0000 - val_loss: 0.1268 - val_acc: 0.9565
Epoch 224/300
90/90 [==============================] - 1s - loss: 0.0063 - acc: 1.0000 - val_loss: 0.1001 - val_acc: 0.9565
Epoch 225/300
90/90 [==============================] - 1s - loss: 0.0026 - acc: 1.0000 - val_loss: 0.0983 - val_acc: 0.9565
Epoch 226/300
90/90 [==============================] - 0s - loss: 0.0073 - acc: 1.0000 - val_loss: 0.1049 - val_acc: 0.9565
Epoch 227/300
90/90 [==============================] - 0s - loss: 0.0114 - acc: 1.0000 - val_loss: 0.0840 - val_acc: 0.9130
Epoch 228/300
90/90 [==============================] - 0s - loss: 0.0157 - acc: 0.9889 - val_loss: 0.1114 - val_acc: 0.9565
Epoch 229/300
90/90 [==============================] - 0s - loss: 0.0122 - acc: 1.0000 - val_loss: 0.0686 - val_acc: 0.9565
Epoch 230/300
90/90 [==============================] - 0s - loss: 0.0093 - acc: 1.0000 - val_loss: 0.0700 - val_acc: 0.9565
Epoch 231/300
90/90 [==============================] - 1s - loss: 0.0099 - acc: 1.0000 - val_loss: 0.1028 - val_acc: 0.9565
Epoch 232/300
90/90 [==============================] - 1s - loss: 0.0064 - acc: 1.0000 - val_loss: 0.1025 - val_acc: 0.9565
Epoch 233/300
90/90 [==============================] - 0s - loss: 0.0074 - acc: 1.0000 - val_loss: 0.0926 - val_acc: 0.9565
Epoch 234/300
90/90 [==============================] - 0s - loss: 0.0052 - acc: 1.0000 - val_loss: 0.0959 - val_acc: 0.9565
Epoch 235/300
90/90 [==============================] - 0s - loss: 0.0084 - acc: 1.0000 - val_loss: 0.0849 - val_acc: 0.9565
Epoch 236/300
90/90 [==============================] - 0s - loss: 0.0142 - acc: 0.9889 - val_loss: 0.0851 - val_acc: 0.9565
Epoch 237/300
90/90 [==============================] - 1s - loss: 0.0188 - acc: 0.9889 - val_loss: 0.1226 - val_acc: 0.9565
Epoch 238/300
90/90 [==============================] - 0s - loss: 0.0084 - acc: 1.0000 - val_loss: 0.0801 - val_acc: 0.9565
Epoch 239/300
90/90 [==============================] - 0s - loss: 0.0038 - acc: 1.0000 - val_loss: 0.0891 - val_acc: 0.9565
Epoch 240/300
90/90 [==============================] - 1s - loss: 0.0067 - acc: 1.0000 - val_loss: 0.0719 - val_acc: 0.9565
Epoch 241/300
90/90 [==============================] - 0s - loss: 0.0148 - acc: 1.0000 - val_loss: 0.0950 - val_acc: 0.9565
Epoch 242/300
90/90 [==============================] - 1s - loss: 0.0024 - acc: 1.0000 - val_loss: 0.0893 - val_acc: 0.9565
Epoch 243/300
90/90 [==============================] - 0s - loss: 0.0044 - acc: 1.0000 - val_loss: 0.0908 - val_acc: 0.9565
Epoch 244/300
90/90 [==============================] - 1s - loss: 0.0048 - acc: 1.0000 - val_loss: 0.0920 - val_acc: 0.9565
Epoch 245/300
90/90 [==============================] - 1s - loss: 0.0027 - acc: 1.0000 - val_loss: 0.0913 - val_acc: 0.9565
Epoch 246/300
90/90 [==============================] - 1s - loss: 0.0036 - acc: 1.0000 - val_loss: 0.1076 - val_acc: 0.9565
Epoch 247/300
90/90 [==============================] - 1s - loss: 0.0092 - acc: 1.0000 - val_loss: 0.1066 - val_acc: 0.9565
Epoch 248/300
90/90 [==============================] - 1s - loss: 0.0039 - acc: 1.0000 - val_loss: 0.0955 - val_acc: 0.9565
Epoch 249/300
90/90 [==============================] - 0s - loss: 0.0114 - acc: 1.0000 - val_loss: 0.1633 - val_acc: 0.9130
Epoch 250/300
90/90 [==============================] - 0s - loss: 0.0125 - acc: 1.0000 - val_loss: 0.1188 - val_acc: 0.9565
Epoch 251/300
90/90 [==============================] - 0s - loss: 0.0142 - acc: 0.9889 - val_loss: 0.0771 - val_acc: 0.9130
Epoch 252/300
90/90 [==============================] - 0s - loss: 0.0114 - acc: 1.0000 - val_loss: 0.0702 - val_acc: 0.9565
Epoch 253/300
90/90 [==============================] - 1s - loss: 0.0113 - acc: 1.0000 - val_loss: 0.0801 - val_acc: 0.9565
Epoch 254/300
90/90 [==============================] - 1s - loss: 0.0043 - acc: 1.0000 - val_loss: 0.0929 - val_acc: 0.9565
Epoch 255/300
90/90 [==============================] - 1s - loss: 0.0106 - acc: 1.0000 - val_loss: 0.1069 - val_acc: 0.9565
Epoch 256/300
90/90 [==============================] - 0s - loss: 0.0073 - acc: 1.0000 - val_loss: 0.0679 - val_acc: 0.9565
Epoch 257/300
90/90 [==============================] - 0s - loss: 0.0047 - acc: 1.0000 - val_loss: 0.0780 - val_acc: 0.9565
Epoch 258/300
90/90 [==============================] - 0s - loss: 0.0059 - acc: 1.0000 - val_loss: 0.1025 - val_acc: 0.9565
Epoch 259/300
90/90 [==============================] - 0s - loss: 0.0031 - acc: 1.0000 - val_loss: 0.0988 - val_acc: 0.9565
Epoch 260/300
90/90 [==============================] - 0s - loss: 0.0079 - acc: 1.0000 - val_loss: 0.1362 - val_acc: 0.9565
Epoch 261/300
90/90 [==============================] - 0s - loss: 0.0097 - acc: 1.0000 - val_loss: 0.1144 - val_acc: 0.9565
Epoch 262/300
90/90 [==============================] - 0s - loss: 0.0032 - acc: 1.0000 - val_loss: 0.1109 - val_acc: 0.9565
Epoch 263/300
90/90 [==============================] - 0s - loss: 0.0041 - acc: 1.0000 - val_loss: 0.0958 - val_acc: 0.9565
Epoch 264/300
90/90 [==============================] - 0s - loss: 0.0105 - acc: 1.0000 - val_loss: 0.0787 - val_acc: 0.9565
Epoch 265/300
90/90 [==============================] - 0s - loss: 0.0060 - acc: 1.0000 - val_loss: 0.0792 - val_acc: 0.9565
Epoch 266/300
90/90 [==============================] - 1s - loss: 0.0048 - acc: 1.0000 - val_loss: 0.1111 - val_acc: 0.9565
Epoch 267/300
90/90 [==============================] - 0s - loss: 0.0062 - acc: 1.0000 - val_loss: 0.1125 - val_acc: 0.9565
Epoch 268/300
90/90 [==============================] - 0s - loss: 0.0043 - acc: 1.0000 - val_loss: 0.1085 - val_acc: 0.9565
Epoch 269/300
90/90 [==============================] - 0s - loss: 0.0028 - acc: 1.0000 - val_loss: 0.1072 - val_acc: 0.9565
Epoch 270/300
90/90 [==============================] - 0s - loss: 0.0093 - acc: 1.0000 - val_loss: 0.0868 - val_acc: 0.9565
Epoch 271/300
90/90 [==============================] - 0s - loss: 0.0085 - acc: 1.0000 - val_loss: 0.1240 - val_acc: 0.9565
Epoch 272/300
90/90 [==============================] - 0s - loss: 0.0031 - acc: 1.0000 - val_loss: 0.1353 - val_acc: 0.9565
Epoch 273/300
90/90 [==============================] - 0s - loss: 0.0119 - acc: 0.9889 - val_loss: 0.0767 - val_acc: 0.9565
Epoch 274/300
90/90 [==============================] - 0s - loss: 0.0056 - acc: 1.0000 - val_loss: 0.0722 - val_acc: 0.9130
Epoch 275/300
90/90 [==============================] - 0s - loss: 0.0082 - acc: 1.0000 - val_loss: 0.1111 - val_acc: 0.9565
Epoch 276/300
90/90 [==============================] - 0s - loss: 0.0041 - acc: 1.0000 - val_loss: 0.0826 - val_acc: 0.9565
Epoch 277/300
90/90 [==============================] - 1s - loss: 0.0051 - acc: 1.0000 - val_loss: 0.1086 - val_acc: 0.9565
Epoch 278/300
90/90 [==============================] - 1s - loss: 0.0030 - acc: 1.0000 - val_loss: 0.1004 - val_acc: 0.9565
Epoch 279/300
90/90 [==============================] - 0s - loss: 0.0036 - acc: 1.0000 - val_loss: 0.0816 - val_acc: 0.9565
Epoch 280/300
90/90 [==============================] - 0s - loss: 0.0229 - acc: 0.9889 - val_loss: 0.0761 - val_acc: 0.9130
Epoch 281/300
90/90 [==============================] - 0s - loss: 0.0066 - acc: 1.0000 - val_loss: 0.0879 - val_acc: 0.9565
Epoch 282/300
90/90 [==============================] - 0s - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0870 - val_acc: 0.9565
Epoch 283/300
90/90 [==============================] - 0s - loss: 0.0058 - acc: 1.0000 - val_loss: 0.0758 - val_acc: 0.9565
Epoch 284/300
90/90 [==============================] - 0s - loss: 0.0034 - acc: 1.0000 - val_loss: 0.0777 - val_acc: 0.9130
Epoch 285/300
90/90 [==============================] - 1s - loss: 0.0046 - acc: 1.0000 - val_loss: 0.0844 - val_acc: 0.9565
Epoch 286/300
90/90 [==============================] - 0s - loss: 0.0198 - acc: 0.9889 - val_loss: 0.1136 - val_acc: 0.9130
Epoch 287/300
90/90 [==============================] - 0s - loss: 0.0013 - acc: 1.0000 - val_loss: 0.1121 - val_acc: 0.9130
Epoch 288/300
90/90 [==============================] - 0s - loss: 0.0080 - acc: 1.0000 - val_loss: 0.1443 - val_acc: 0.9565
Epoch 289/300
90/90 [==============================] - 0s - loss: 0.0048 - acc: 1.0000 - val_loss: 0.1185 - val_acc: 0.9130
Epoch 290/300
90/90 [==============================] - 0s - loss: 0.0071 - acc: 1.0000 - val_loss: 0.1343 - val_acc: 0.9565
Epoch 291/300
90/90 [==============================] - 1s - loss: 0.0041 - acc: 1.0000 - val_loss: 0.1142 - val_acc: 0.9130
Epoch 292/300
90/90 [==============================] - 0s - loss: 0.0032 - acc: 1.0000 - val_loss: 0.1091 - val_acc: 0.9130
Epoch 293/300
90/90 [==============================] - 1s - loss: 0.0108 - acc: 1.0000 - val_loss: 0.1949 - val_acc: 0.9565
Epoch 294/300
90/90 [==============================] - 1s - loss: 0.0121 - acc: 1.0000 - val_loss: 0.1314 - val_acc: 0.9130
Epoch 295/300
90/90 [==============================] - 1s - loss: 0.0101 - acc: 1.0000 - val_loss: 0.1855 - val_acc: 0.9565
Epoch 296/300
90/90 [==============================] - 0s - loss: 0.0038 - acc: 1.0000 - val_loss: 0.1862 - val_acc: 0.9565
Epoch 297/300
90/90 [==============================] - 1s - loss: 0.0262 - acc: 0.9889 - val_loss: 0.1130 - val_acc: 0.9565
Epoch 298/300
90/90 [==============================] - 0s - loss: 0.0090 - acc: 1.0000 - val_loss: 0.1036 - val_acc: 0.9565
Epoch 299/300
90/90 [==============================] - 1s - loss: 0.0025 - acc: 1.0000 - val_loss: 0.1118 - val_acc: 0.9565
Epoch 300/300
90/90 [==============================] - 0s - loss: 0.0016 - acc: 1.0000 - val_loss: 0.1133 - val_acc: 0.9565
Wall time: 4min 57s
In [20]:
plt.figure(figsize=(25,15))
plt.plot(hist2.history['acc'],label='acc')
plt.plot(hist2.history['loss'],label='loss')
plt.plot(hist2.history['val_acc'],'--',label='val_acc')
plt.plot(hist2.history['val_loss'],'--',label='val_loss')
plt.legend()
plt.ylim(0,max(hist2.history['acc'])+0.05)
plt.grid('off')
In [21]:
model2.evaluate(x_te2,y_te2,batch_size=50,show_accuracy=True,verbose=1)
29/29 [==============================] - 0s
Out[21]:
[0.33899489045143127, 0.89655172413793105]
In [22]:
model2.summary()
--------------------------------------------------------------------------------
Initial input shape: (None, 1, 50, 50)
--------------------------------------------------------------------------------
Layer (name) Output Shape Param #
--------------------------------------------------------------------------------
Convolution2D (convolution2d) (None, 20, 50, 50) 2020
Activation (activation) (None, 20, 50, 50) 0
MaxPooling2D (maxpooling2d) (None, 20, 25, 25) 0
Dropout (dropout) (None, 20, 25, 25) 0
Convolution2D (convolution2d) (None, 10, 25, 25) 20010
Activation (activation) (None, 10, 25, 25) 0
MaxPooling2D (maxpooling2d) (None, 10, 12, 12) 0
Dropout (dropout) (None, 10, 12, 12) 0
Flatten (flatten) (None, 1440) 0
Dense (dense) (None, 1250) 1801250
Activation (activation) (None, 1250) 0
Dense (dense) (None, 2) 2502
--------------------------------------------------------------------------------
Total params: 1825782
--------------------------------------------------------------------------------
In [23]:
def plot_wegh (model):
'''
Plot weights of convolution layer
only for first layer
#Args
model : fitted model
'''
wegh_arr = model.get_weights()
num = len(wegh_arr[0])
if type(np.sqrt(num)) is int:
col = np.sqrt(num)
row = np.sqrt(num)
else:
col = int(num/2)
row = int(num/col)
fig ,axes = plt.subplots(row,col, subplot_kw={'xticks': [], 'yticks': []})
plt.subplots_adjust(hspace=0.02,wspace = 0.05)
for i, ax in zip(xrange(num),axes.flat):
ax.imshow(wegh_arr[0][i][0])
ax.grid('off')
plt.show()
In [24]:
plot_wegh(model2)
In [25]:
m2_wegh = model2.get_weights()
for a in m2_wegh:
print(np.shape(a))
(20L, 1L, 10L, 10L)
(20L,)
(10L, 20L, 10L, 10L)
(10L,)
(1440L, 1250L)
(1250L,)
(1250L, 2L)
(2L,)
In [26]:
s = 0
for a in m2_wegh[0]:
s +=a[0]
print(s)
[[ 0.00620134 -0.13845733 0.17591509 0.0215441 -0.13001302 -0.10961664
0.09710694 0.11488244 0.06559778 -0.14923584]
[-0.04241746 -0.19932838 -0.07904346 -0.07725573 0.12621395 -0.04883246
-0.00058707 0.01919791 0.03127079 0.10166034]
[ 0.14732262 -0.17069477 0.21938354 0.05472885 0.1744183 -0.16910024
-0.19700043 0.05240116 0.05536261 -0.02083936]
[-0.13052641 0.13355359 -0.02104438 -0.16838482 0.11542881 0.1082248
-0.0273373 0.02824887 0.01728136 0.00762939]
[ 0.19872969 0.09865101 0.03856189 0.13712491 -0.22385548 0.07265813
-0.20964214 0.12037341 0.21269387 -0.11606394]
[-0.13645858 0.00979582 -0.07854454 -0.16572605 0.02083321 -0.19112387
-0.10770136 0.00489368 -0.16099103 0.05978655]
[-0.19209585 -0.00059606 0.16589729 0.04225431 -0.12321249 -0.05634013
0.11261923 0.06198092 0.19332975 -0.01244372]
[-0.20585516 -0.19776061 0.02017435 -0.17258973 0.06694929 0.1213682
-0.01612765 -0.10077311 -0.18276682 0.11446702]
[-0.03165622 -0.03147631 0.1930169 -0.11984684 -0.01795018 -0.04037216
-0.21662343 0.22861683 0.21385407 -0.18548965]
[-0.05713855 -0.05520208 0.20014976 -0.12693036 0.09283825 -0.17254964
0.06280977 0.17607276 -0.1813834 0.00360906]]
[[ 0.12648733 -0.1715658 -0.04274493 0.00793372 -0.24602479 0.00053056
-0.06071877 -0.07959089 0.22240415 -0.17983669]
[-0.19616255 -0.2151514 0.12246741 -0.08920176 0.16007861 -0.1307243
-0.16665187 0.20214635 -0.01182121 0.09466124]
[-0.043963 -0.25230983 0.4158285 -0.12699297 -0.00279468 -0.25509575
-0.08157963 -0.17974241 0.12664855 0.00085032]
[-0.17001972 0.16463968 -0.18536471 -0.26581374 -0.03860645 0.33363527
-0.22697277 -0.06540903 -0.09372692 0.23346052]
[ 0.17904067 -0.0960282 -0.11639885 0.34692776 -0.4238773 0.03165868
-0.00897957 0.24738108 0.28606576 -0.01286658]
[-0.05731665 0.1650535 -0.21493904 -0.08247419 -0.11216547 -0.08843334
0.12803112 -0.10655545 -0.0254654 -0.05333728]
[-0.10637771 0.11436314 0.04907041 -0.15163708 -0.03625487 -0.16616949
0.29388204 -0.03004359 0.22026137 0.19494225]
[ 0.01569472 -0.07173236 0.13258889 -0.12881748 0.09683006 -0.0088532
0.07296193 0.11191168 -0.05380903 -0.05973295]
[-0.08699402 -0.17840023 0.31796443 -0.27603453 0.08220148 0.06792994
-0.09506997 0.34675333 0.23438676 -0.35152513]
[-0.03214089 -0.26128608 0.17031342 -0.24370655 0.05389659 -0.19857863
-0.01159791 0.25335765 0.00444444 -0.20781748]]
[[ 0.18562226 -0.08513047 -0.12810802 0.19169435 -0.02311353 0.00232128
0.00097791 -0.19683085 0.17008844 -0.15230738]
[-0.14756706 -0.10669463 0.16707271 -0.11366589 0.1137715 -0.01624795
-0.18048538 0.28420568 -0.18649526 0.25994501]
[-0.1589946 -0.3915568 0.19513196 0.06912644 0.10562368 -0.44977534
-0.12415126 -0.23835555 0.31550902 -0.1659136 ]
[-0.06384127 0.0465009 -0.35501307 -0.14039882 -0.06738956 0.16103803
-0.11929356 -0.26072609 0.07758689 0.21493801]
[ 0.29653144 -0.18157239 -0.28148788 0.55699474 -0.35193354 0.03860567
-0.21675633 0.06967835 0.09666331 0.10369477]
[-0.14306471 0.19889714 -0.30127457 0.11275619 0.10032026 -0.26437414
-0.00684933 0.02738911 -0.03862541 -0.01286658]
[-0.19530523 0.27415708 0.26968873 -0.27586973 0.08438636 -0.30612043
0.06951942 -0.08954588 0.14012417 0.09323205]
[ 0.0522916 -0.28789136 -0.02484636 -0.11756767 0.00700226 0.13442239
0.1769557 0.17248754 -0.13246688 -0.09218913]
[-0.06744661 -0.2928896 0.5110718 -0.36931378 0.20367074 0.10483879
-0.15321599 0.19525988 0.03090553 -0.28026614]
[-0.00695284 -0.45201561 0.13557684 -0.20052455 0.27172059 -0.18263046
0.10703527 0.08658603 0.04700967 -0.29751369]]
[[ 0.28340718 -0.19286425 -0.05114582 0.10456552 -0.16517672 -0.15968129
-0.12480658 -0.17365061 0.39992416 -0.29322118]
[-0.30813146 0.03809217 0.31537399 -0.0223982 -0.09934226 0.06948723
-0.20215429 0.4588168 -0.06709426 0.40387702]
[-0.04107838 -0.43818507 0.37561971 0.24827801 0.16986895 -0.60612965
-0.12357545 -0.11703541 0.36064258 -0.32318974]
[ 0.11864139 -0.092846 -0.38567647 -0.29469296 0.01336909 0.1953612
-0.21540818 -0.17310216 -0.11224973 0.36073768]
[ 0.5083397 0.01705785 -0.49528211 0.33540374 -0.33199 -0.01330065
-0.22056755 -0.04056732 0.17152828 -0.02421512]
[ 0.06340586 0.19710557 -0.35553232 0.13425235 -0.02171556 -0.42552674
0.1755109 -0.0878676 -0.06269419 -0.11063024]
[-0.14759395 0.28707522 0.08420457 -0.25096107 0.15038246 -0.50246572
0.23114505 -0.21450657 -0.07346945 0.10953709]
[ 0.2569333 -0.45602414 -0.13456181 -0.09043232 0.23797148 0.002607
0.11595342 0.23079154 0.07574891 -0.15565553]
[-0.26735973 -0.14475133 0.45971128 -0.16363396 0.32525653 0.32341224
-0.31325957 0.28257424 -0.06163114 -0.35279903]
[-0.16796744 -0.42578799 -0.00872254 -0.41724569 0.11644605 -0.05607525
0.11542758 0.28823537 -0.07875925 -0.49572706]]
[[ 8.76594037e-02 -2.42696971e-01 -1.49890900e-01 -6.81910217e-02
-2.95480311e-01 -3.52128685e-01 9.37055200e-02 -1.68551311e-01
2.23009646e-01 -1.40827551e-01]
[ -4.26561862e-01 5.03713377e-02 3.26109231e-01 -1.94370091e-01
-6.18611164e-02 4.29804325e-02 -3.73864293e-01 3.93558890e-01
7.06496015e-02 3.71135712e-01]
[ -1.66660577e-01 -5.86801708e-01 5.03344417e-01 1.12893209e-01
1.38772547e-01 -6.02529883e-01 -1.57554984e-01 6.57737255e-05
3.00744355e-01 -2.35161841e-01]
[ 1.94991425e-01 1.20559908e-01 -5.32344818e-01 -6.51702434e-02
1.10182017e-01 2.69103885e-01 -2.76382267e-03 -1.70321643e-01
-9.59272981e-02 2.46165335e-01]
[ 4.25509483e-01 1.40535697e-01 -5.18843591e-01 2.74979860e-01
-3.33375722e-01 9.39309299e-02 -9.20031816e-02 -2.28980333e-01
-1.23248994e-03 -2.83675659e-02]
[ -1.03373304e-01 1.39666602e-01 -2.75396854e-01 -2.19141841e-02
-7.36469477e-02 -6.17929459e-01 -1.08088553e-02 1.10924080e-01
-1.96088612e-01 -2.37643763e-01]
[ 6.04506582e-02 1.77078635e-01 2.28704989e-01 -3.67386609e-01
9.29197669e-03 -7.08830535e-01 1.99937537e-01 -1.89229354e-01
-1.82209194e-01 1.19197629e-01]
[ 3.42064857e-01 -2.74914533e-01 -1.28539279e-02 -1.44846469e-01
4.30547595e-01 2.60478742e-02 -9.95185822e-02 3.73519361e-01
2.04709381e-01 -1.89848095e-02]
[ -2.61788636e-01 -8.70234817e-02 3.67680490e-01 -2.52800137e-02
5.68395317e-01 1.64903387e-01 -3.31988901e-01 3.68068784e-01
-1.58735111e-01 -2.86162704e-01]
[ -3.72397900e-01 -3.57755333e-01 5.37241735e-02 -4.88235831e-01
3.49351764e-01 -2.47651748e-02 5.52407652e-02 1.06565550e-01
7.80692250e-02 -3.75956774e-01]]
[[ 2.58296013e-01 -4.67082143e-01 -1.52094290e-01 -9.41370130e-02
-1.26250044e-01 -5.20529866e-01 -1.36371017e-01 -2.66301721e-01
2.85199374e-01 -2.04369426e-04]
[ -5.51717281e-01 2.06435509e-02 4.73811835e-01 1.52239203e-03
-1.10471755e-01 -1.05890080e-01 -1.88189492e-01 3.17949712e-01
2.05268860e-01 1.68877050e-01]
[ -2.33404726e-01 -6.86743617e-01 4.43055183e-01 2.88371742e-02
3.50753576e-01 -5.44105053e-01 1.25138611e-02 -1.18965723e-01
4.96612430e-01 -1.92947239e-02]
[ 6.23218715e-02 2.72176340e-02 -7.54490733e-01 -2.52781749e-01
1.41429976e-02 9.12838578e-02 1.15851136e-02 -3.18388313e-01
-1.57440662e-01 3.35901380e-01]
[ 3.79683763e-01 2.62754858e-01 -4.80448425e-01 2.69293845e-01
-3.62224936e-01 3.03980559e-02 -1.76141247e-01 -3.13455611e-01
-5.28708883e-02 -2.38484323e-01]
[ -3.26330245e-01 2.27800116e-01 -3.48040581e-01 1.07628241e-01
6.68284446e-02 -7.17232287e-01 1.28747329e-01 1.39977112e-01
-2.06307307e-01 -1.33891463e-01]
[ -3.81574929e-02 -4.56383675e-02 2.23413259e-01 -5.34640849e-01
2.52070665e-01 -5.35041332e-01 1.81569755e-02 -4.14593607e-01
-1.10920787e-01 1.12099946e-02]
[ 3.32016200e-01 -3.91522288e-01 -2.16805339e-01 5.58544993e-02
2.59089649e-01 4.48840261e-02 4.71194983e-02 3.52848023e-01
3.20214480e-01 -2.53077187e-02]
[ -1.48254514e-01 -1.31366491e-01 1.54760376e-01 3.52718607e-02
7.41757989e-01 5.14307618e-02 -2.69179344e-01 2.49191955e-01
6.33592010e-02 -6.26852214e-02]
[ -4.16433424e-01 -4.65382785e-01 -4.64087240e-02 -4.05216217e-01
5.37876487e-01 1.69157356e-01 -1.17834911e-01 4.95944135e-02
4.78990674e-02 -3.83739054e-01]]
[[ 0.23209864 -0.57481939 -0.25807863 -0.2112454 -0.2534377 -0.5267843
-0.19969372 -0.06088404 0.29967865 0.14230354]
[-0.58743852 0.24088328 0.48735365 -0.15932055 -0.04356671 0.12906358
-0.07143593 0.41722268 0.32398865 0.07885726]
[-0.24077696 -0.90721428 0.61362696 -0.16669926 0.13907172 -0.57472259
0.1163123 -0.04274409 0.46024516 0.18513758]
[ 0.04206129 0.14565951 -0.89354873 -0.29226339 0.19543862 -0.12080996
0.08365273 -0.52482885 -0.03744765 0.31431609]
[ 0.41721478 0.13926362 -0.30206585 0.23879725 -0.24257594 -0.03789833
-0.05452046 -0.31618851 -0.08964458 -0.22079962]
[-0.5333879 0.03498273 -0.46003684 0.07186163 -0.00465576 -0.93361533
0.14929995 0.3125681 -0.38661814 -0.31496438]
[ 0.06590697 -0.05428525 0.13525626 -0.52988279 0.21041624 -0.33449692
-0.21311742 -0.28055155 -0.28872341 -0.13762185]
[ 0.41455013 -0.38107175 -0.19910285 0.05620683 0.12065817 0.03082978
-0.05180799 0.33290297 0.25825974 0.04553296]
[-0.01150724 0.03648724 0.38946319 0.03330551 0.79848969 0.05701005
-0.4708131 0.18375984 0.05805115 -0.01206008]
[-0.45033947 -0.38116962 0.11589871 -0.34968099 0.44823337 0.38083783
-0.2145886 0.190274 0.26654348 -0.49381626]]
[[ 0.0093018 -0.81412822 -0.40543956 -0.01432332 -0.16725677 -0.45031297
-0.14037897 0.10543865 0.3633543 0.08871427]
[-0.65966177 0.00990871 0.39848727 -0.29914302 -0.04278526 0.02132176
-0.16097213 0.6024158 0.33355656 0.05161736]
[-0.41066843 -1.01825225 0.82638717 -0.38323849 0.16509138 -0.57531112
0.13770108 0.07259995 0.27630103 0.42134455]
[-0.04980499 0.13052297 -0.70819485 -0.27124715 0.30929074 -0.2061806
0.11340089 -0.40742469 -0.10140719 0.31918842]
[ 0.542346 0.21209154 -0.22047141 0.09929791 -0.03461842 -0.04237929
-0.26325768 -0.45352563 -0.21816242 -0.24131469]
[-0.63376087 -0.12364279 -0.32004553 0.16023019 0.02448819 -1.05014193
0.01891477 0.36622912 -0.58790421 -0.16937581]
[-0.0487271 0.09953938 -0.05426417 -0.5455355 0.10219327 -0.10061146
-0.32782862 -0.0655304 -0.2437842 -0.11272646]
[ 0.47460476 -0.22250845 -0.3487851 -0.1348688 0.04091158 0.20383058
-0.12202012 0.19973817 0.43430242 0.07479493]
[ 0.0333974 -0.05555583 0.32786405 0.14479804 1.02769983 0.2258922
-0.67090911 0.17352881 -0.09537134 -0.13962108]
[-0.40775973 -0.19141337 0.16554216 -0.37100911 0.25729829 0.59725249
-0.27481356 0.32414904 0.4547531 -0.69447845]]
[[-0.01745883 -0.8547709 -0.30793279 0.15449613 -0.21713755 -0.60885894
0.04858266 0.34552199 0.33075988 0.23638435]
[-0.79206818 -0.03569669 0.53651017 -0.44235647 -0.22959504 0.00421719
-0.10652471 0.48220387 0.39072189 0.08626314]
[-0.45405281 -1.14333141 0.71806228 -0.23955806 0.07354187 -0.82299089
0.20810221 0.01909439 0.46362984 0.33165964]
[-0.21317013 0.00613432 -0.50441206 -0.32150385 0.09890027 -0.09015496
0.15177141 -0.29956868 -0.15192597 0.42506546]
[ 0.71988904 0.33452052 -0.35537162 0.18811172 -0.25519156 -0.24920163
-0.09839536 -0.3945336 -0.42308196 -0.19431263]
[-0.78157091 -0.14289793 -0.42594829 0.19515708 0.19838402 -1.04607224
0.06182079 0.58346236 -0.6478737 -0.20855376]
[ 0.04664293 0.20774806 0.12923858 -0.7503494 0.15146571 -0.35224712
-0.32258585 0.10001619 -0.12159361 -0.37229553]
[ 0.3792212 -0.38012043 -0.29037657 -0.25596565 0.19105509 0.27929109
-0.05487791 0.11551734 0.59136903 0.07146762]
[-0.16866417 0.05622905 0.19046077 0.15113214 0.78348303 0.37026069
-0.45722082 0.1370012 0.02090652 -0.20723414]
[-0.28764069 -0.09922278 0.26999092 -0.58926332 0.0369086 0.38670897
-0.24942783 0.47266719 0.50121158 -0.90343422]]
[[-0.13295288 -1.06408 -0.4347623 0.24926579 -0.30175561 -0.76500124
0.13884059 0.21580857 0.44440502 0.3763735 ]
[-0.65128392 -0.12600672 0.64310569 -0.56328231 -0.43068457 0.17035376
-0.11101738 0.42245314 0.49445528 0.2759065 ]
[-0.3225458 -1.03393984 0.48021534 -0.38224953 0.05540283 -0.65052015
0.17871262 -0.02390057 0.38304585 0.14093491]
[-0.18709621 -0.17718905 -0.39090067 -0.29480532 0.18160897 -0.30474484
0.33039594 -0.46500212 0.01476601 0.40309972]
[ 0.66673154 0.46625119 -0.16727883 0.3848635 -0.35399204 -0.30694902
0.09945855 -0.58415592 -0.3439379 -0.21226196]
[-0.84862626 0.03919005 -0.36753711 0.16281858 0.39487612 -0.89571136
-0.17087919 0.63065147 -0.60517633 -0.18336749]
[ 0.23756137 0.22313873 0.28019688 -0.77618861 -0.01876791 -0.49789879
-0.20503548 0.22282964 -0.18788719 -0.24737078]
[ 0.2770611 -0.48187637 -0.35928261 -0.38776749 0.0385101 0.34449166
-0.27993762 -0.00726068 0.56485802 0.22562033]
[-0.27614546 -0.1140276 0.37495375 0.24414678 0.56186175 0.25622603
-0.39285979 0.28976876 -0.04151969 -0.00180581]
[-0.40695488 0.01457855 0.16200165 -0.51241052 0.14068697 0.41120693
-0.17929171 0.47799715 0.34076399 -0.78840774]]
[[-0.15910944 -0.85027665 -0.47905499 0.34797719 -0.30791083 -0.84781986
0.06042522 0.40398371 0.57933778 0.21148905]
[-0.72821182 0.0425179 0.5578835 -0.60982621 -0.62224132 0.11943033
-0.32101715 0.33436003 0.63081789 0.38447729]
[-0.24214284 -1.17915487 0.63266128 -0.31214151 -0.16371495 -0.50082725
0.38265556 -0.0486702 0.21378829 0.1505267 ]
[ 0.02708524 -0.23427266 -0.48012584 -0.36773133 -0.00714011 -0.13184455
0.5116477 -0.56179464 0.04249253 0.18933368]
[ 0.88985479 0.62566298 -0.2751506 0.49239367 -0.33079547 -0.45044953
0.17184493 -0.78688955 -0.13904504 -0.33982056]
[-0.84996432 0.03411086 -0.31023395 0.20587325 0.37944749 -1.03224421
-0.02038363 0.48886317 -0.8207041 -0.15053447]
[ 0.11177519 0.25199246 0.17917155 -0.97755665 -0.20628701 -0.71321833
-0.42231882 0.34345675 -0.33485502 -0.42239785]
[ 0.3515442 -0.4564046 -0.50458217 -0.46431664 -0.16038199 0.53276294
-0.37640873 -0.10199622 0.59146005 0.36594748]
[-0.10127918 -0.24364817 0.44602168 0.3311708 0.45112947 0.29833049
-0.52254653 0.49982482 -0.25156051 0.22214085]
[-0.50356936 -0.01310744 0.33705223 -0.68398559 0.3242178 0.24874379
-0.09838667 0.64030319 0.36266699 -0.82393122]]
[[-0.21760562 -0.98695576 -0.6881609 0.37326458 -0.30959067 -0.8579765
-0.0513251 0.35743693 0.51129502 0.26450261]
[-0.51046675 -0.03405379 0.60011142 -0.69684833 -0.81781399 0.09528475
-0.11994515 0.22949904 0.71269262 0.29857776]
[-0.35766664 -0.96764582 0.46002275 -0.25119287 -0.29362649 -0.59310329
0.31605315 -0.18061565 0.14632136 0.26564354]
[-0.16302924 -0.37271181 -0.5859825 -0.57538837 -0.02634616 -0.02659288
0.52292657 -0.56664461 0.02194534 0.37964398]
[ 0.68264258 0.64629126 -0.10952136 0.6338616 -0.23211357 -0.54732454
0.35912037 -0.78328937 0.08393249 -0.48504049]
[-0.64555651 -0.18520665 -0.24576762 0.17122838 0.56232375 -1.03176594
0.19693677 0.29319656 -0.64337718 -0.21611388]
[ 0.23143035 0.21687883 0.20985562 -0.8612923 -0.20877673 -0.51447237
-0.3440572 0.37605092 -0.31342322 -0.37650436]
[ 0.11839192 -0.67055708 -0.40073147 -0.58839881 0.00661021 0.67729932
-0.23591527 -0.23974222 0.41643023 0.56193256]
[ 0.11061187 -0.04870188 0.43169722 0.23353706 0.22653273 0.2628257
-0.43800628 0.57108992 -0.3426643 0.00180313]
[-0.65542215 -0.20481688 0.39032689 -0.85674763 0.3702687 0.11726378
-0.22194642 0.77297491 0.45932257 -0.62241489]]
[[-0.19517501 -0.74320304 -0.46129775 0.13395528 -0.49195361 -1.03038883
0.16628164 0.37188673 0.57643503 0.09887531]
[-0.43554831 0.05386548 0.81852037 -0.55978245 -0.99618334 0.07659268
-0.17310381 0.03980115 0.76537377 0.10405584]
[-0.17171414 -0.83655518 0.61968863 -0.22858159 -0.51931477 -0.42711538
0.54057407 -0.22805285 0.18151864 0.37992063]
[-0.14688516 -0.23332731 -0.35447037 -0.48142335 0.10586624 0.10500792
0.41957656 -0.53827143 -0.14134236 0.25338379]
[ 0.51268542 0.82135582 -0.27505955 0.82625479 -0.05539885 -0.48108375
0.39244136 -0.97837156 -0.08534196 -0.54760569]
[-0.49467814 -0.40085852 -0.19547181 0.4051609 0.75015056 -0.82814205
-0.0240106 0.44393897 -0.86312616 -0.28416514]
[ 0.14625111 0.20023181 0.08126877 -0.88782483 -0.15604204 -0.64446759
-0.46244431 0.29513958 -0.14366397 -0.52056098]
[ 0.18764821 -0.75929064 -0.51411599 -0.3803595 -0.08587687 0.81522214
-0.41825369 -0.18293038 0.47732311 0.35450134]
[-0.0406314 -0.15025474 0.46435162 0.07753101 0.23066784 0.26898852
-0.59234101 0.51915032 -0.14419465 -0.20134516]
[-0.85419583 -0.29684645 0.58870476 -0.78137726 0.37323377 0.01371971
-0.22327922 0.68858778 0.47359964 -0.73091137]]
[[ -6.33992702e-02 -5.57471633e-01 -4.67437744e-01 1.14295609e-01
-2.91741192e-01 -9.29164648e-01 8.58967900e-02 3.46255243e-01
7.17006862e-01 1.03373662e-01]
[ -5.59799850e-01 -7.51007721e-03 8.03398311e-01 -7.30502963e-01
-1.10128427e+00 2.10695952e-01 -2.15923682e-01 1.53949767e-01
6.69368744e-01 -3.47451568e-02]
[ -1.66822910e-01 -8.55783641e-01 4.01984453e-01 -3.39112520e-01
-4.23014700e-01 -6.41741633e-01 6.33202851e-01 -4.07825589e-01
-5.29438257e-05 4.44212705e-01]
[ -1.58552185e-01 -3.26950103e-01 -3.84208590e-01 -4.15616989e-01
-7.25015700e-02 1.97013542e-02 2.08114952e-01 -4.92758781e-01
-5.58552966e-02 1.43115222e-01]
[ 4.02320921e-01 6.02058768e-01 -3.43071818e-01 6.90665245e-01
7.48039633e-02 -4.84623492e-01 2.31013209e-01 -9.57535088e-01
5.07787466e-02 -3.73011529e-01]
[ -3.77651155e-01 -5.09324908e-01 -1.46574900e-01 2.54679590e-01
6.76848173e-01 -9.91767585e-01 -4.48790789e-02 4.75790173e-01
-1.04515374e+00 -4.57922757e-01]
[ 2.48949975e-01 2.48577416e-01 1.26962870e-01 -9.82616544e-01
-6.85112253e-02 -7.46220827e-01 -5.90204418e-01 8.52408856e-02
-1.44936964e-01 -5.76946557e-01]
[ 1.68973655e-01 -7.19400883e-01 -3.46974701e-01 -3.78842354e-01
-2.15191513e-01 7.25296617e-01 -6.20034575e-01 -3.02191913e-01
2.90837675e-01 2.31448695e-01]
[ 1.03306696e-01 -8.39973316e-02 4.63297039e-01 -1.26849204e-01
3.34884375e-02 4.90800530e-01 -3.75840962e-01 3.78561556e-01
-1.93567738e-01 -1.33645326e-01]
[ -7.66378164e-01 -4.35182542e-01 4.57005024e-01 -8.19418252e-01
4.66203332e-01 1.32368654e-01 -4.08493996e-01 8.77109528e-01
6.74094439e-01 -8.24287772e-01]]
[[-0.10480462 -0.75953996 -0.22950843 0.33630329 -0.14899158 -1.01098692
-0.12513666 0.28315371 0.72024518 0.18039161]
[-0.40642661 -0.10014457 0.92163479 -0.53395957 -1.05158412 0.30464917
-0.31554401 -0.05706938 0.71345103 0.08042629]
[-0.21656752 -0.87527376 0.59488529 -0.14454506 -0.48336899 -0.46521926
0.68466544 -0.39981556 -0.01922832 0.46383744]
[-0.18011764 -0.52560389 -0.15863241 -0.4602415 -0.09565365 0.1832414
0.26731139 -0.60477781 -0.23439816 -0.03485148]
[ 0.43506691 0.79671359 -0.24823704 0.59407341 0.27366579 -0.48520529
0.06877525 -0.87053096 0.05737355 -0.44131988]
[-0.54068178 -0.66285157 -0.19098699 0.03637506 0.85994756 -0.94964874
-0.06577665 0.62419295 -0.98891896 -0.52076948]
[ 0.29722664 0.32597458 -0.08201315 -1.18543327 -0.01989238 -0.6316402
-0.71650285 -0.0062397 -0.3686114 -0.54315883]
[ 0.37440208 -0.53244901 -0.50698876 -0.54554319 -0.12958559 0.61383075
-0.44417235 -0.29444352 0.33277825 0.01123321]
[ 0.14570156 -0.24537295 0.52574039 -0.11840913 -0.12599243 0.35156995
-0.38193646 0.58314097 -0.14203739 -0.26376444]
[-0.79760855 -0.46774435 0.44818228 -0.95718646 0.26747802 0.10164915
-0.5226258 0.96822244 0.88603592 -0.58822018]]
[[ -1.50744572e-01 -7.65848994e-01 -1.17855214e-01 3.57321978e-01
-3.38648647e-01 -1.16101491e+00 -1.46346629e-01 4.64634091e-01
5.81106186e-01 4.02746499e-01]
[ -3.89550358e-01 -2.64035583e-01 1.12583280e+00 -7.27994025e-01
-9.11443293e-01 4.04155493e-01 -2.96995193e-01 -2.06425607e-01
5.91906071e-01 -1.28133163e-01]
[ -2.43050218e-01 -9.14806247e-01 6.57753646e-01 -2.30362356e-01
-4.50172842e-01 -5.32698333e-01 8.22131634e-01 -6.18370950e-01
-1.75961763e-01 4.85637665e-01]
[ -1.92871168e-01 -5.59964001e-01 -2.77718842e-01 -4.28654760e-01
-1.82630062e-01 1.95374802e-01 4.13247824e-01 -7.55566001e-01
-1.76370829e-01 -2.75105238e-04]
[ 3.02818120e-01 7.19240844e-01 -1.79695979e-01 4.53264594e-01
4.63759333e-01 -5.32823980e-01 -3.91810983e-02 -1.03427708e+00
-1.80835985e-02 -4.87875640e-01]
[ -6.68919384e-01 -5.89739978e-01 -1.78907931e-01 -1.46792263e-01
7.32583165e-01 -7.25943983e-01 -2.67358661e-01 7.82483280e-01
-1.17257524e+00 -3.50898951e-01]
[ 4.42183673e-01 1.52836874e-01 -2.22130194e-01 -1.04598427e+00
-8.93722475e-02 -7.29319453e-01 -9.20108020e-01 -3.02203111e-02
-2.87711799e-01 -4.79344189e-01]
[ 5.38589597e-01 -4.70611185e-01 -5.77314019e-01 -5.87805510e-01
-3.14991802e-01 6.13336861e-01 -3.88931543e-01 -1.32234931e-01
3.37094516e-01 -3.61013338e-02]
[ -5.74139953e-02 -2.41553307e-01 6.29677892e-01 -3.03351045e-01
-2.12907150e-01 4.08777475e-01 -1.84727699e-01 8.01784158e-01
-2.89590418e-01 -1.31060243e-01]
[ -9.38533902e-01 -4.64731544e-01 4.31551367e-01 -1.15710628e+00
3.99555624e-01 6.25282377e-02 -5.45562863e-01 9.72170472e-01
9.50167894e-01 -4.11090434e-01]]
[[-0.11547117 -0.80439228 0.11451038 0.50309944 -0.30019563 -1.1382705
-0.13883176 0.35268795 0.42450172 0.39585942]
[-0.53948832 -0.19178453 1.28216231 -0.60023361 -0.89127797 0.39414978
-0.35871676 -0.28811932 0.54125917 -0.27936733]
[-0.26822233 -0.96573734 0.74621826 -0.00397989 -0.56973326 -0.66546988
0.96247804 -0.4777337 -0.35894543 0.65702945]
[-0.39745605 -0.59968323 -0.39711201 -0.58973676 -0.30276155 0.22022182
0.30975568 -0.81209749 -0.28489122 0.16705149]
[ 0.09659749 0.502536 -0.02273889 0.64361614 0.23196274 -0.5958358
0.12962677 -1.17361665 -0.04947643 -0.68499303]
[-0.69108975 -0.71480906 -0.07080218 -0.21542394 0.84180808 -0.85428834
-0.1534631 0.73124099 -1.20940864 -0.36619017]
[ 0.28682607 0.14852403 -0.04956119 -0.8882935 0.01780896 -0.54546291
-1.08358836 -0.18123871 -0.34975377 -0.36507931]
[ 0.43509883 -0.4532465 -0.78566456 -0.64483601 -0.5083977 0.79720926
-0.4460105 -0.07263392 0.3688193 0.0686001 ]
[ 0.11338323 -0.14193283 0.84829837 -0.52574992 -0.44889516 0.27381545
-0.01408349 0.6171326 -0.52076751 0.04170418]
[-0.76147103 -0.40853989 0.62373084 -1.35399187 0.53229725 -0.00610992
-0.67100537 1.08361936 0.87592041 -0.22665437]]
[[-0.02244703 -0.58048356 0.19838235 0.3359836 -0.51415586 -1.33527482
-0.16810562 0.39732072 0.36021432 0.17974243]
[-0.75751573 -0.1806743 1.49391592 -0.45400256 -0.86167192 0.46713752
-0.25227255 -0.4058795 0.71462095 -0.28580213]
[-0.25774825 -0.83817911 0.68329662 -0.17186652 -0.71770036 -0.59134525
0.84950632 -0.35582429 -0.49809867 0.79039383]
[-0.32579586 -0.38497028 -0.57914907 -0.77414894 -0.35525644 0.3317863
0.16898839 -1.03799677 -0.21602203 0.35772431]
[ 0.04346921 0.38973629 -0.20688514 0.70656073 0.34411615 -0.42308733
0.05833864 -1.25759935 -0.25829011 -0.64277494]
[-0.70761806 -0.62675703 0.06251599 -0.19805464 0.88490993 -0.96579975
-0.02085355 0.93646663 -1.09860718 -0.32321286]
[ 0.21884233 0.09246342 -0.00793102 -0.92096412 -0.11627436 -0.50651193
-1.23100162 -0.32735533 -0.43399554 -0.25312132]
[ 0.24955012 -0.32058299 -0.63721275 -0.48620689 -0.63348961 0.91988122
-0.56125641 -0.05290678 0.47065568 -0.01658714]
[ 0.03744819 -0.30503678 0.7315492 -0.50815064 -0.68426627 0.24854873
-0.19161661 0.65549713 -0.37752384 0.23299555]
[-0.58133799 -0.63193119 0.82402235 -1.29686558 0.6422289 -0.13141347
-0.46365744 0.91315746 0.72598255 -0.15087968]]
[[ 0.13029325 -0.71489227 0.15226817 0.46008766 -0.68660015 -1.37392914
-0.12502018 0.21772596 0.29903883 0.0540157 ]
[-0.96173984 -0.19382592 1.47045195 -0.38547131 -0.68283308 0.33724543
-0.38250172 -0.3795926 0.63158792 -0.46879193]
[-0.37916234 -0.95132232 0.85918796 -0.35951245 -0.72468519 -0.64832288
0.73498362 -0.18271559 -0.70360744 0.98131073]
[-0.40667212 -0.4364897 -0.49836162 -0.76562667 -0.50247955 0.13639136
0.25203261 -0.99866748 -0.33226818 0.47004682]
[ 0.11357866 0.50914085 -0.23693904 0.70793301 0.56204391 -0.31423652
0.2505998 -1.45572436 -0.0759791 -0.5328086 ]
[-0.86867177 -0.78365767 0.05592665 0.01909168 1.08765042 -0.78708088
0.18432213 0.76097095 -1.28960598 -0.36215332]
[ 0.03498709 -0.02845399 -0.16990608 -0.80309331 -0.29970264 -0.51812822
-1.25349712 -0.45241278 -0.44552243 -0.43814042]
[ 0.07835189 -0.25805783 -0.55071002 -0.46145737 -0.56178921 0.89075184
-0.55248195 -0.05804917 0.33154327 -0.20192298]
[-0.13241416 -0.21189556 0.74913311 -0.45734799 -0.51857615 0.42201722
-0.03050421 0.84656608 -0.54696184 0.46543002]
[-0.69930106 -0.79677051 0.97523993 -1.33892977 0.85994351 -0.13279699
-0.37310094 0.94451869 0.80580586 -0.2470637 ]]
[[ 0.35619092 -0.49461517 0.15809982 0.48255226 -0.63644648 -1.21460998
-0.30180711 0.34465837 0.15284756 0.19313961]
[-0.82258821 -0.09063832 1.45026755 -0.51612693 -0.86417288 0.20555528
-0.28072071 -0.40090889 0.49105823 -0.53832185]
[-0.45923886 -1.09840322 0.73862129 -0.27909464 -0.51163316 -0.5378865
0.61777502 -0.28685459 -0.51692116 0.76916051]
[-0.47360498 -0.22579038 -0.49361703 -0.85468888 -0.65591758 0.10771916
0.23351122 -1.16468835 -0.38995579 0.45692793]
[ 0.0986549 0.6469897 -0.27305996 0.80700338 0.66459924 -0.23329008
0.37711698 -1.29367328 -0.03364513 -0.47089413]
[-0.94406444 -0.82773787 0.09433681 -0.05716896 0.97795904 -0.59194684
0.13038619 0.68845785 -1.13460994 -0.23742232]
[ 0.24948351 0.19663699 -0.2266701 -0.84205711 -0.52178824 -0.4465
-1.06089985 -0.54404622 -0.64773059 -0.66691446]
[-0.10469978 -0.41479826 -0.62287229 -0.4242692 -0.79728323 1.08625793
-0.63242006 0.12694146 0.2585707 -0.07537752]
[-0.32560486 -0.35114795 0.79354376 -0.43074778 -0.62896615 0.46641478
0.09027579 1.04880273 -0.76580667 0.28842115]
[-0.5700348 -0.87512445 0.98192888 -1.16464758 0.70480269 -0.10750858
-0.21857989 1.07510626 0.61137694 -0.05688515]]
In [27]:
y_pred2 = model2.predict_classes(np.array(x_te2))
y_pred2
29/29 [==============================] - 0s
Out[27]:
array([1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 0, 1, 1], dtype=int64)
In [28]:
y_ten2 = cat2lab(y_te2)
y_ten2
Out[28]:
array([0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1])
In [29]:
print(classification_report(y_ten2,y_pred2))
precision recall f1-score support
0 0.90 0.82 0.86 11
1 0.89 0.94 0.92 18
avg / total 0.90 0.90 0.90 29
In [30]:
cv= StratifiedKFold(cat2lab(labels),n_folds=10,shuffle=True)
In [31]:
from sklearn.svm import SVC
from sklearn.grid_search import GridSearchCV
In [64]:
cv = StratifiedKFold(cat2lab(labels),n_folds=8,shuffle=True)
In [65]:
params = {'C' : [1e1, 1e2, 1e3,1e4,1e5],
'gamma' : [0.0001,0.0005,0.001,0.005,0.01]}
In [66]:
clf_grid = GridSearchCV(SVC(kernel='rbf'),params,cv=cv)
In [67]:
y_trn4 = cat2lab(y_tr1)
model4 = clf_grid.fit(imgsr,cat2lab(labels))
In [68]:
model4.best_score_ ,model4.best_params_
Out[68]:
(0.87323943661971826, {'C': 10.0, 'gamma': 0.01})
In [69]:
#demo of GridSearchCV method
svc_rslt = []
for x,y in cv:
clf = SVC(kernel='rbf',C=10.0,gamma = 0.01,)
clf.fit(imgsr[x],cat2lab(labels)[x])
svc_rslt.append(clf.score(imgsr[y], cat2lab(labels)[y]))
svc_rslt = np.array(svc_rslt)
svc_rslt
Out[69]:
array([ 1. , 0.86666667, 0.8 , 0.93333333, 0.86666667,
0.78571429, 0.78571429, 0.92307692, 0.92307692, 0.84615385])
In [70]:
print('cross valdated SVC score is ' , svc_rslt.mean())
cross valdated SVC score is 0.87304029304
In [71]:
clf.score(x_te1,cat2lab(y_te1)
File "<ipython-input-71-1c57b083098e>", line 1
clf.score(x_te1,cat2lab(y_te1)
^
SyntaxError: unexpected EOF while parsing
In [39]:
import pywt
In [40]:
Ca,Cd = pywt.dwt2(x_tr[25],'haar')
In [41]:
np.shape(Ca)
Out[41]:
(25L, 25L)
In [42]:
plt.imshow(Cd[1])
Out[42]:
<matplotlib.image.AxesImage at 0x3d964198>
In [43]:
plt.imshow(pywt.threshold(Cd[1],0.3))
Out[43]:
<matplotlib.image.AxesImage at 0x3be1b748>
In [44]:
from cv2 import HoughLines
from cv2 import HoughLinesP
from os import listdir
import cv2
In [ ]:
In [45]:
asdf = listdir('d://nor/')
In [46]:
ima = cv2.imread('d://nor/'+asdf[1],1)
In [47]:
imb = cv2.Canny(ima,100,250)
In [48]:
plt.imshow(imb)
Out[48]:
<matplotlib.image.AxesImage at 0x37946048>
In [49]:
from cv2 import CascadeClassifier
In [50]:
from sklearn.ensemble import AdaBoostClassifier, RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier
In [51]:
ens1 = RandomForestClassifier(n_estimators = 250 , max_depth= None,verbose=1)
ens2 = AdaBoostClassifier(SVC(kernel='rbf',gamma=0.005,C = 10.0),
algorithm="SAMME",
n_estimators=100,
learning_rate=0.01)
ens3 = AdaBoostClassifier(DecisionTreeClassifier(max_depth=None),
algorithm="SAMME",
n_estimators=100,
learning_rate=0.01)
ens1.fit(x_tr1, cat2lab(y_tr1))
ens2.fit(x_tr1, cat2lab(y_tr1))
ens3.fit(x_tr1, cat2lab(y_tr1))
[Parallel(n_jobs=1)]: Done 49 tasks | elapsed: 0.0s
[Parallel(n_jobs=1)]: Done 199 tasks | elapsed: 0.1s
[Parallel(n_jobs=1)]: Done 250 out of 250 | elapsed: 0.2s finished
Out[51]:
AdaBoostClassifier(algorithm='SAMME',
base_estimator=DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,
max_features=None, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
presort=False, random_state=None, splitter='best'),
learning_rate=0.01, n_estimators=100, random_state=None)
In [52]:
ens1.score(x_te1,cat2lab(y_te1))
[Parallel(n_jobs=1)]: Done 49 tasks | elapsed: 0.0s
[Parallel(n_jobs=1)]: Done 199 tasks | elapsed: 0.0s
[Parallel(n_jobs=1)]: Done 250 out of 250 | elapsed: 0.0s finished
Out[52]:
0.68965517241379315
In [53]:
ens2.score(x_te1,cat2lab(y_te1))
Out[53]:
0.62068965517241381
In [54]:
ens3.score(x_te1,cat2lab(y_te1))
Out[54]:
0.48275862068965519
Content source: Jesse-Back/mach_image_proc
Similar notebooks: