Training our VGG style network on augmented data


In [1]:
import warnings
warnings.filterwarnings('ignore')

In [2]:
%matplotlib inline
%pylab inline


Populating the interactive namespace from numpy and matplotlib

In [3]:
import matplotlib.pylab as plt
import numpy as np

In [4]:
from distutils.version import StrictVersion

In [5]:
import sklearn
print(sklearn.__version__)

assert StrictVersion(sklearn.__version__ ) >= StrictVersion('0.18.1')


0.18.1

In [6]:
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.ERROR)
print(tf.__version__)

assert StrictVersion(tf.__version__) >= StrictVersion('1.1.0')


1.2.1

In [7]:
import keras
print(keras.__version__)

assert StrictVersion(keras.__version__) >= StrictVersion('2.0.0')


Using TensorFlow backend.
2.0.6

In [8]:
import pandas as pd
print(pd.__version__)

assert StrictVersion(pd.__version__) >= StrictVersion('0.19.0')


0.20.1

Loading and mixing augmented and original data


In [9]:
!curl -O https://raw.githubusercontent.com/DJCordhose/speed-limit-signs/master/data/speed-limit-signs.zip
!curl -O https://raw.githubusercontent.com/DJCordhose/speed-limit-signs/master/data/augmented-signs.zip


  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed
100 1810k  100 1810k    0     0  5267k      0 --:--:-- --:--:-- --:--:-- 6033k
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed
100 16.7M  100 16.7M    0     0  21.6M      0 --:--:-- --:--:-- --:--:-- 21.8M

In [10]:
# https://docs.python.org/3/library/zipfile.html
from zipfile import ZipFile
zip = ZipFile('speed-limit-signs.zip')
zip.extractall('.')
zip = ZipFile('augmented-signs.zip')
zip.extractall('.')

In [12]:
!ls -lh


total 20M
drwxrwxr-x 8 ubuntu ubuntu 4.0K Jul 21 16:58 augmented-signs
-rw-rw-r-- 1 ubuntu ubuntu  17M Aug 30 11:48 augmented-signs.zip
-rw-rw-r-- 1 ubuntu ubuntu  32K Aug 30 11:46 CNN-Full.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 121K Aug 30 11:46 CNN.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 110K Aug 30 11:46 CNN-Mixed.ipynb
-rw-rw-r-- 1 ubuntu ubuntu  98K Aug 30 11:46 CNN-Original.ipynb
-rw-rw-r-- 1 ubuntu ubuntu  22K Aug 30 11:46 cnn-train-augmented.ipynb
drwxrwxr-x 3 ubuntu ubuntu 4.0K Jul 21 16:50 __MACOSX
drwxrwxr-x 2 ubuntu ubuntu 4.0K Aug  9 07:12 models
-rw------- 1 ubuntu ubuntu 109K Aug 30 11:48 nohup.out
-rw-rw-r-- 1 ubuntu ubuntu   36 Aug  8 09:49 sample_iris.json
drwxrwxr-x 8 ubuntu ubuntu 4.0K Jul 21 16:58 speed-limit-signs
-rw-rw-r-- 1 ubuntu ubuntu 1.8M Aug 30 11:48 speed-limit-signs.zip

In [13]:
import os
import skimage.data
import skimage.transform
from keras.utils.np_utils import to_categorical
import numpy as np

def load_data(data_dir, type=".ppm"):
    num_categories = 6

    # Get all subdirectories of data_dir. Each represents a label.
    directories = [d for d in os.listdir(data_dir) 
                   if os.path.isdir(os.path.join(data_dir, d))]
    # Loop through the label directories and collect the data in
    # two lists, labels and images.
    labels = []
    images = []
    for d in directories:
        label_dir = os.path.join(data_dir, d)
        file_names = [os.path.join(label_dir, f) for f in os.listdir(label_dir) if f.endswith(type)]
        # For each label, load it's images and add them to the images list.
        # And add the label number (i.e. directory name) to the labels list.
        for f in file_names:
            images.append(skimage.data.imread(f))
            labels.append(int(d))
    images64 = [skimage.transform.resize(image, (64, 64)) for image in images]
    y = np.array(labels)
    y = to_categorical(y, num_categories)
    X = np.array(images64)
    return X, y

In [14]:
# Load datasets.
ROOT_PATH = "./"

In [15]:
original_dir = os.path.join(ROOT_PATH, "speed-limit-signs")
original_images, original_labels = load_data(original_dir, type=".ppm")

In [16]:
data_dir = os.path.join(ROOT_PATH, "augmented-signs")
augmented_images, augmented_labels = load_data(data_dir, type=".png")

In [17]:
all_images = np.vstack((original_images, augmented_images))

In [18]:
all_labels = np.vstack((original_labels, augmented_labels))

In [19]:
# https://stackoverflow.com/a/4602224
p = numpy.random.permutation(len(all_labels))

In [20]:
shuffled_images = all_images[p]

In [21]:
shuffled_labels = all_labels[p]

In [22]:
# Turn this around if you want the large training set using augmented data or the original one

# X, y = original_images, original_labels
# X, y = augmented_images, augmented_labels
X, y = shuffled_images, shuffled_labels

In [23]:
from sklearn.model_selection import train_test_split

In [24]:
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42, stratify=y)

In [25]:
X_train.shape, y_train.shape


Out[25]:
((3335, 64, 64, 3), (3335, 6))

In [26]:
from keras.models import Model
from keras.layers import Dense, Dropout, Activation, Flatten, Input
from keras.layers import Convolution2D, MaxPooling2D

# this is important, try and vary between .4 and .75
drop_out = 0.7

# input tensor for a 3-channel 64x64 image
inputs = Input(shape=(64, 64, 3))

# one block of convolutional layers
x = Convolution2D(64, 3, activation='relu', padding='same')(inputs)
x = Convolution2D(64, 3, activation='relu', padding='same')(x)
x = Convolution2D(64, 3, activation='relu', padding='same')(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = Dropout(drop_out)(x)

# one more block
x = Convolution2D(128, 3, activation='relu', padding='same')(x)
x = Convolution2D(128, 3, activation='relu', padding='same')(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = Dropout(drop_out)(x)

# one more block
x = Convolution2D(256, 3, activation='relu', padding='same')(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = Dropout(drop_out)(x)

x = Flatten()(x)
x = Dense(256, activation='relu')(x)
x = Dropout(drop_out)(x)

# softmax activation, 6 categories
predictions = Dense(6, activation='softmax')(x)

In [27]:
model = Model(input=inputs, output=predictions)
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_1 (InputLayer)         (None, 64, 64, 3)         0         
_________________________________________________________________
conv2d_1 (Conv2D)            (None, 64, 64, 64)        1792      
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 64, 64, 64)        36928     
_________________________________________________________________
conv2d_3 (Conv2D)            (None, 64, 64, 64)        36928     
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 32, 32, 64)        0         
_________________________________________________________________
dropout_1 (Dropout)          (None, 32, 32, 64)        0         
_________________________________________________________________
conv2d_4 (Conv2D)            (None, 32, 32, 128)       73856     
_________________________________________________________________
conv2d_5 (Conv2D)            (None, 32, 32, 128)       147584    
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 16, 16, 128)       0         
_________________________________________________________________
dropout_2 (Dropout)          (None, 16, 16, 128)       0         
_________________________________________________________________
conv2d_6 (Conv2D)            (None, 16, 16, 256)       295168    
_________________________________________________________________
max_pooling2d_3 (MaxPooling2 (None, 8, 8, 256)         0         
_________________________________________________________________
dropout_3 (Dropout)          (None, 8, 8, 256)         0         
_________________________________________________________________
flatten_1 (Flatten)          (None, 16384)             0         
_________________________________________________________________
dense_1 (Dense)              (None, 256)               4194560   
_________________________________________________________________
dropout_4 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 6)                 1542      
=================================================================
Total params: 4,788,358
Trainable params: 4,788,358
Non-trainable params: 0
_________________________________________________________________

In [28]:
model.compile(optimizer='adam',
              loss='categorical_crossentropy',
              metrics=['accuracy'])

In [29]:
# https://keras.io/callbacks/#tensorboard
tb_callback = keras.callbacks.TensorBoard(log_dir='./tf_log')
# To start tensorboard
# tensorboard --logdir=/mnt/c/Users/olive/Development/ml/tf_log
# open http://localhost:6006

In [30]:
early_stopping_callback = keras.callbacks.EarlyStopping(monitor='val_loss', patience=100, verbose=1)

In [31]:
checkpoint_callback = keras.callbacks.ModelCheckpoint('./model-checkpoints/weights.epoch-{epoch:02d}-val_loss-{val_loss:.2f}.hdf5');

In [32]:
!rm -r tf_log
!rm -r model-checkpoints
!mkdir model-checkpoints


rm: cannot remove 'tf_log': No such file or directory
rm: cannot remove 'model-checkpoints': No such file or directory

In [ ]:
# Depends on harware GPU architecture, set as high as possible (this works well on K80)
BATCH_SIZE = 1000
# %time model.fit(X_train, y_train, epochs=500, batch_size=BATCH_SIZE, validation_split=0.2, callbacks=[tb_callback, early_stopping_callback])
%time model.fit(X_train, y_train, epochs=1500, batch_size=BATCH_SIZE, validation_split=0.2, callbacks=[tb_callback])
# %time model.fit(X_train, y_train, epochs=500, batch_size=BATCH_SIZE, validation_split=0.2)


Train on 2668 samples, validate on 667 samples
Epoch 1/1500
2668/2668 [==============================] - 31s - loss: 1.8513 - acc: 0.1960 - val_loss: 1.7897 - val_acc: 0.2474
Epoch 2/1500
2668/2668 [==============================] - 10s - loss: 1.7620 - acc: 0.2103 - val_loss: 1.7868 - val_acc: 0.2459
Epoch 3/1500
2668/2668 [==============================] - 10s - loss: 1.7510 - acc: 0.2208 - val_loss: 1.7893 - val_acc: 0.2624
Epoch 4/1500
2668/2668 [==============================] - 11s - loss: 1.7332 - acc: 0.2339 - val_loss: 1.7903 - val_acc: 0.2144
Epoch 5/1500
2668/2668 [==============================] - 11s - loss: 1.7259 - acc: 0.2339 - val_loss: 1.7854 - val_acc: 0.2654
Epoch 6/1500
2668/2668 [==============================] - 11s - loss: 1.7198 - acc: 0.2429 - val_loss: 1.7912 - val_acc: 0.1634
Epoch 7/1500
2668/2668 [==============================] - 11s - loss: 1.7148 - acc: 0.2350 - val_loss: 1.7906 - val_acc: 0.1769
Epoch 8/1500
2668/2668 [==============================] - 11s - loss: 1.7015 - acc: 0.2646 - val_loss: 1.7903 - val_acc: 0.1889
Epoch 9/1500
2668/2668 [==============================] - 11s - loss: 1.6957 - acc: 0.2717 - val_loss: 1.7914 - val_acc: 0.1544
Epoch 10/1500
2668/2668 [==============================] - 11s - loss: 1.6887 - acc: 0.2766 - val_loss: 1.7911 - val_acc: 0.1664
Epoch 11/1500
2668/2668 [==============================] - 11s - loss: 1.6796 - acc: 0.2924 - val_loss: 1.7893 - val_acc: 0.1784
Epoch 12/1500
2668/2668 [==============================] - 11s - loss: 1.6906 - acc: 0.2965 - val_loss: 1.7855 - val_acc: 0.2069
Epoch 13/1500
2668/2668 [==============================] - 11s - loss: 1.6771 - acc: 0.3010 - val_loss: 1.7912 - val_acc: 0.1619
Epoch 14/1500
2668/2668 [==============================] - 11s - loss: 1.6720 - acc: 0.3073 - val_loss: 1.7871 - val_acc: 0.1949
Epoch 15/1500
2668/2668 [==============================] - 11s - loss: 1.6713 - acc: 0.3152 - val_loss: 1.7813 - val_acc: 0.2144
Epoch 16/1500
2668/2668 [==============================] - 11s - loss: 1.6737 - acc: 0.3096 - val_loss: 1.7894 - val_acc: 0.1829
Epoch 17/1500
2668/2668 [==============================] - 11s - loss: 1.6555 - acc: 0.2957 - val_loss: 1.7838 - val_acc: 0.2099
Epoch 18/1500
2668/2668 [==============================] - 11s - loss: 1.6545 - acc: 0.3088 - val_loss: 1.7826 - val_acc: 0.2129
Epoch 19/1500
2668/2668 [==============================] - 11s - loss: 1.6431 - acc: 0.3122 - val_loss: 1.7872 - val_acc: 0.1889
Epoch 20/1500
2668/2668 [==============================] - 11s - loss: 1.6495 - acc: 0.3081 - val_loss: 1.7866 - val_acc: 0.1904
Epoch 21/1500
2668/2668 [==============================] - 11s - loss: 1.6410 - acc: 0.3126 - val_loss: 1.7783 - val_acc: 0.2159
Epoch 22/1500
2668/2668 [==============================] - 11s - loss: 1.6293 - acc: 0.3163 - val_loss: 1.7881 - val_acc: 0.1769
Epoch 23/1500
2668/2668 [==============================] - 11s - loss: 1.6400 - acc: 0.3088 - val_loss: 1.7825 - val_acc: 0.2069
Epoch 24/1500
2668/2668 [==============================] - 11s - loss: 1.6329 - acc: 0.3216 - val_loss: 1.7525 - val_acc: 0.2669
Epoch 25/1500
2668/2668 [==============================] - 11s - loss: 1.6333 - acc: 0.3156 - val_loss: 1.7690 - val_acc: 0.2249
Epoch 26/1500
2668/2668 [==============================] - 11s - loss: 1.6139 - acc: 0.3145 - val_loss: 1.7784 - val_acc: 0.2084
Epoch 27/1500
2668/2668 [==============================] - 11s - loss: 1.6194 - acc: 0.3268 - val_loss: 1.7618 - val_acc: 0.2399
Epoch 28/1500
2668/2668 [==============================] - 11s - loss: 1.6060 - acc: 0.3358 - val_loss: 1.7378 - val_acc: 0.2819
Epoch 29/1500
2668/2668 [==============================] - 11s - loss: 1.6030 - acc: 0.3328 - val_loss: 1.7696 - val_acc: 0.2189
Epoch 30/1500
2668/2668 [==============================] - 11s - loss: 1.5985 - acc: 0.3460 - val_loss: 1.7640 - val_acc: 0.2279
Epoch 31/1500
2668/2668 [==============================] - 11s - loss: 1.5821 - acc: 0.3347 - val_loss: 1.7333 - val_acc: 0.2639
Epoch 32/1500
2668/2668 [==============================] - 11s - loss: 1.5888 - acc: 0.3415 - val_loss: 1.7402 - val_acc: 0.2429
Epoch 33/1500
2668/2668 [==============================] - 11s - loss: 1.5712 - acc: 0.3519 - val_loss: 1.7632 - val_acc: 0.2009
Epoch 34/1500
2668/2668 [==============================] - 11s - loss: 1.5598 - acc: 0.3598 - val_loss: 1.7584 - val_acc: 0.2039
Epoch 35/1500
2668/2668 [==============================] - 11s - loss: 1.5624 - acc: 0.3452 - val_loss: 1.6937 - val_acc: 0.2594
Epoch 36/1500
2668/2668 [==============================] - 11s - loss: 1.5455 - acc: 0.3576 - val_loss: 1.6698 - val_acc: 0.2819
Epoch 37/1500
2668/2668 [==============================] - 11s - loss: 1.5547 - acc: 0.3512 - val_loss: 1.7283 - val_acc: 0.2234
Epoch 38/1500
2668/2668 [==============================] - 11s - loss: 1.5276 - acc: 0.3598 - val_loss: 1.7135 - val_acc: 0.2399
Epoch 39/1500
2668/2668 [==============================] - 11s - loss: 1.5259 - acc: 0.3433 - val_loss: 1.6569 - val_acc: 0.3238
Epoch 40/1500
2668/2668 [==============================] - 11s - loss: 1.5181 - acc: 0.3617 - val_loss: 1.6474 - val_acc: 0.3118
Epoch 41/1500
2668/2668 [==============================] - 11s - loss: 1.5054 - acc: 0.3789 - val_loss: 1.6566 - val_acc: 0.3148
Epoch 42/1500
2668/2668 [==============================] - 11s - loss: 1.4862 - acc: 0.3823 - val_loss: 1.6205 - val_acc: 0.3418
Epoch 43/1500
2668/2668 [==============================] - 11s - loss: 1.4836 - acc: 0.3876 - val_loss: 1.6146 - val_acc: 0.3463
Epoch 44/1500
2668/2668 [==============================] - 11s - loss: 1.4911 - acc: 0.3827 - val_loss: 1.5992 - val_acc: 0.3433
Epoch 45/1500
2668/2668 [==============================] - 11s - loss: 1.4783 - acc: 0.3831 - val_loss: 1.6649 - val_acc: 0.2864
Epoch 46/1500
2668/2668 [==============================] - 11s - loss: 1.4754 - acc: 0.3992 - val_loss: 1.7816 - val_acc: 0.2174
Epoch 47/1500
2668/2668 [==============================] - 11s - loss: 1.5174 - acc: 0.3624 - val_loss: 1.6318 - val_acc: 0.3463
Epoch 48/1500
2668/2668 [==============================] - 11s - loss: 1.4953 - acc: 0.3831 - val_loss: 1.6022 - val_acc: 0.3343
Epoch 49/1500
2668/2668 [==============================] - 11s - loss: 1.4460 - acc: 0.4052 - val_loss: 1.6693 - val_acc: 0.2819
Epoch 50/1500
2668/2668 [==============================] - 11s - loss: 1.4413 - acc: 0.4093 - val_loss: 1.6239 - val_acc: 0.3178
Epoch 51/1500
2668/2668 [==============================] - 11s - loss: 1.4408 - acc: 0.3992 - val_loss: 1.6142 - val_acc: 0.3418
Epoch 52/1500
2668/2668 [==============================] - 11s - loss: 1.4156 - acc: 0.4284 - val_loss: 1.5953 - val_acc: 0.3478
Epoch 53/1500
2668/2668 [==============================] - 11s - loss: 1.4175 - acc: 0.4224 - val_loss: 1.5783 - val_acc: 0.3613
Epoch 54/1500
2668/2668 [==============================] - 11s - loss: 1.3869 - acc: 0.4247 - val_loss: 1.6096 - val_acc: 0.3298
Epoch 55/1500
2668/2668 [==============================] - 11s - loss: 1.3741 - acc: 0.4408 - val_loss: 1.6617 - val_acc: 0.2954
Epoch 56/1500
2668/2668 [==============================] - 11s - loss: 1.3760 - acc: 0.4370 - val_loss: 1.5749 - val_acc: 0.3568
Epoch 57/1500
2668/2668 [==============================] - 11s - loss: 1.3956 - acc: 0.4322 - val_loss: 1.5502 - val_acc: 0.3748
Epoch 58/1500
2668/2668 [==============================] - 11s - loss: 1.3505 - acc: 0.4460 - val_loss: 1.5304 - val_acc: 0.3793
Epoch 59/1500
2668/2668 [==============================] - 11s - loss: 1.3596 - acc: 0.4606 - val_loss: 1.5412 - val_acc: 0.3718
Epoch 60/1500
2668/2668 [==============================] - 11s - loss: 1.3731 - acc: 0.4475 - val_loss: 1.6072 - val_acc: 0.3388
Epoch 61/1500
2668/2668 [==============================] - 11s - loss: 1.3434 - acc: 0.4550 - val_loss: 1.6151 - val_acc: 0.3163
Epoch 62/1500
2668/2668 [==============================] - 11s - loss: 1.3339 - acc: 0.4685 - val_loss: 1.5811 - val_acc: 0.3373
Epoch 63/1500
2668/2668 [==============================] - 11s - loss: 1.3438 - acc: 0.4670 - val_loss: 1.5492 - val_acc: 0.3703
Epoch 64/1500
2668/2668 [==============================] - 11s - loss: 1.3504 - acc: 0.4584 - val_loss: 1.5340 - val_acc: 0.3718
Epoch 65/1500
2668/2668 [==============================] - 11s - loss: 1.3081 - acc: 0.4809 - val_loss: 1.5402 - val_acc: 0.3778
Epoch 66/1500
2668/2668 [==============================] - 11s - loss: 1.3160 - acc: 0.4659 - val_loss: 1.6309 - val_acc: 0.3178
Epoch 67/1500
2668/2668 [==============================] - 11s - loss: 1.3338 - acc: 0.4790 - val_loss: 1.6969 - val_acc: 0.2954
Epoch 68/1500
2668/2668 [==============================] - 11s - loss: 1.3404 - acc: 0.4708 - val_loss: 1.4793 - val_acc: 0.4018
Epoch 69/1500
2668/2668 [==============================] - 11s - loss: 1.3248 - acc: 0.4846 - val_loss: 1.5607 - val_acc: 0.3598
Epoch 70/1500
2668/2668 [==============================] - 11s - loss: 1.3080 - acc: 0.4794 - val_loss: 1.6056 - val_acc: 0.3553
Epoch 71/1500
2668/2668 [==============================] - 11s - loss: 1.3047 - acc: 0.4843 - val_loss: 1.4453 - val_acc: 0.4243
Epoch 72/1500
2668/2668 [==============================] - 11s - loss: 1.2821 - acc: 0.5011 - val_loss: 1.5031 - val_acc: 0.4048
Epoch 73/1500
2668/2668 [==============================] - 11s - loss: 1.2460 - acc: 0.5026 - val_loss: 1.5126 - val_acc: 0.3868
Epoch 74/1500
2668/2668 [==============================] - 11s - loss: 1.2045 - acc: 0.5289 - val_loss: 1.4715 - val_acc: 0.3973
Epoch 75/1500
2668/2668 [==============================] - 11s - loss: 1.2112 - acc: 0.5184 - val_loss: 1.4011 - val_acc: 0.4423
Epoch 76/1500
2668/2668 [==============================] - 11s - loss: 1.2362 - acc: 0.5071 - val_loss: 1.4549 - val_acc: 0.4108
Epoch 77/1500
2668/2668 [==============================] - 11s - loss: 1.2025 - acc: 0.5349 - val_loss: 1.5353 - val_acc: 0.3613
Epoch 78/1500
2668/2668 [==============================] - 11s - loss: 1.1978 - acc: 0.5439 - val_loss: 1.3850 - val_acc: 0.4528
Epoch 79/1500
2668/2668 [==============================] - 11s - loss: 1.2034 - acc: 0.5304 - val_loss: 1.4265 - val_acc: 0.4258
Epoch 80/1500
2668/2668 [==============================] - 11s - loss: 1.1932 - acc: 0.5157 - val_loss: 1.4864 - val_acc: 0.3823
Epoch 81/1500
2668/2668 [==============================] - 11s - loss: 1.1597 - acc: 0.5555 - val_loss: 1.4867 - val_acc: 0.4018
Epoch 82/1500
2668/2668 [==============================] - 11s - loss: 1.1703 - acc: 0.5521 - val_loss: 1.3941 - val_acc: 0.4453
Epoch 83/1500
2668/2668 [==============================] - 11s - loss: 1.1392 - acc: 0.5469 - val_loss: 1.4089 - val_acc: 0.4438
Epoch 84/1500
2668/2668 [==============================] - 11s - loss: 1.1141 - acc: 0.5746 - val_loss: 1.4188 - val_acc: 0.4213
Epoch 85/1500
2668/2668 [==============================] - 11s - loss: 1.1021 - acc: 0.5836 - val_loss: 1.3434 - val_acc: 0.4723
Epoch 86/1500
2668/2668 [==============================] - 11s - loss: 1.1420 - acc: 0.5469 - val_loss: 1.3372 - val_acc: 0.4678
Epoch 87/1500
2668/2668 [==============================] - 11s - loss: 1.0874 - acc: 0.5791 - val_loss: 1.3474 - val_acc: 0.4798
Epoch 88/1500
2668/2668 [==============================] - 11s - loss: 1.0844 - acc: 0.5705 - val_loss: 1.3417 - val_acc: 0.4573
Epoch 89/1500
2668/2668 [==============================] - 11s - loss: 1.0976 - acc: 0.5686 - val_loss: 1.3952 - val_acc: 0.4438
Epoch 90/1500
2668/2668 [==============================] - 11s - loss: 1.0809 - acc: 0.5712 - val_loss: 1.4447 - val_acc: 0.3928
Epoch 91/1500
2668/2668 [==============================] - 11s - loss: 1.0851 - acc: 0.5697 - val_loss: 1.4780 - val_acc: 0.4048
Epoch 92/1500
2668/2668 [==============================] - 11s - loss: 1.0651 - acc: 0.5873 - val_loss: 1.3461 - val_acc: 0.4813
Epoch 93/1500
2668/2668 [==============================] - 11s - loss: 1.0458 - acc: 0.5960 - val_loss: 1.2711 - val_acc: 0.5067
Epoch 94/1500
2668/2668 [==============================] - 11s - loss: 1.0501 - acc: 0.5851 - val_loss: 1.3735 - val_acc: 0.4303
Epoch 95/1500
2668/2668 [==============================] - 11s - loss: 1.0193 - acc: 0.6049 - val_loss: 1.4002 - val_acc: 0.4198
Epoch 96/1500
2668/2668 [==============================] - 11s - loss: 1.0164 - acc: 0.5956 - val_loss: 1.2460 - val_acc: 0.5187
Epoch 97/1500
2668/2668 [==============================] - 11s - loss: 0.9967 - acc: 0.6023 - val_loss: 1.2775 - val_acc: 0.5172
Epoch 98/1500
2668/2668 [==============================] - 11s - loss: 0.9837 - acc: 0.6289 - val_loss: 1.2290 - val_acc: 0.5292
Epoch 99/1500
2668/2668 [==============================] - 11s - loss: 1.0077 - acc: 0.6121 - val_loss: 1.2253 - val_acc: 0.5142
Epoch 100/1500
2668/2668 [==============================] - 11s - loss: 1.0219 - acc: 0.5952 - val_loss: 1.3205 - val_acc: 0.4588
Epoch 101/1500
2668/2668 [==============================] - 11s - loss: 0.9997 - acc: 0.6222 - val_loss: 1.3376 - val_acc: 0.4588
Epoch 102/1500
2668/2668 [==============================] - 11s - loss: 0.9673 - acc: 0.6334 - val_loss: 1.2642 - val_acc: 0.5022
Epoch 103/1500
2668/2668 [==============================] - 11s - loss: 0.9681 - acc: 0.6274 - val_loss: 1.1818 - val_acc: 0.5607
Epoch 104/1500
2668/2668 [==============================] - 11s - loss: 0.9132 - acc: 0.6409 - val_loss: 1.3182 - val_acc: 0.4693
Epoch 105/1500
2668/2668 [==============================] - 11s - loss: 0.9219 - acc: 0.6368 - val_loss: 1.1871 - val_acc: 0.5607
Epoch 106/1500
2668/2668 [==============================] - 11s - loss: 0.9080 - acc: 0.6413 - val_loss: 1.1742 - val_acc: 0.5412
Epoch 107/1500
2668/2668 [==============================] - 11s - loss: 0.9231 - acc: 0.6417 - val_loss: 1.1599 - val_acc: 0.5442
Epoch 108/1500
2668/2668 [==============================] - 11s - loss: 0.8766 - acc: 0.6597 - val_loss: 1.2844 - val_acc: 0.4708
Epoch 109/1500
2668/2668 [==============================] - 11s - loss: 0.9114 - acc: 0.6533 - val_loss: 1.1584 - val_acc: 0.5457
Epoch 110/1500
2668/2668 [==============================] - 11s - loss: 0.9196 - acc: 0.6503 - val_loss: 1.0956 - val_acc: 0.5862
Epoch 111/1500
2668/2668 [==============================] - 11s - loss: 0.9517 - acc: 0.6327 - val_loss: 1.1411 - val_acc: 0.5652
Epoch 112/1500
2668/2668 [==============================] - 11s - loss: 0.8872 - acc: 0.6484 - val_loss: 1.2764 - val_acc: 0.4723
Epoch 113/1500
2668/2668 [==============================] - 11s - loss: 0.9021 - acc: 0.6466 - val_loss: 1.1096 - val_acc: 0.5667
Epoch 114/1500
2668/2668 [==============================] - 11s - loss: 0.8580 - acc: 0.6570 - val_loss: 1.1039 - val_acc: 0.5547
Epoch 115/1500
2668/2668 [==============================] - 11s - loss: 0.8527 - acc: 0.6728 - val_loss: 1.1709 - val_acc: 0.5292
Epoch 116/1500
2668/2668 [==============================] - 11s - loss: 0.8653 - acc: 0.6612 - val_loss: 1.1698 - val_acc: 0.5577
Epoch 117/1500
2668/2668 [==============================] - 11s - loss: 0.8733 - acc: 0.6762 - val_loss: 1.1598 - val_acc: 0.5757
Epoch 118/1500
2668/2668 [==============================] - 11s - loss: 0.8412 - acc: 0.6735 - val_loss: 1.2033 - val_acc: 0.5097
Epoch 119/1500
2668/2668 [==============================] - 11s - loss: 0.8410 - acc: 0.6900 - val_loss: 1.1656 - val_acc: 0.5412
Epoch 120/1500
2668/2668 [==============================] - 11s - loss: 0.8094 - acc: 0.6912 - val_loss: 1.0966 - val_acc: 0.5877
Epoch 121/1500
2668/2668 [==============================] - 11s - loss: 0.7767 - acc: 0.7013 - val_loss: 1.1025 - val_acc: 0.5577
Epoch 122/1500
2668/2668 [==============================] - 11s - loss: 0.7983 - acc: 0.6889 - val_loss: 1.0293 - val_acc: 0.6087
Epoch 123/1500
2668/2668 [==============================] - 11s - loss: 0.7836 - acc: 0.6964 - val_loss: 1.0574 - val_acc: 0.5802
Epoch 124/1500
2668/2668 [==============================] - 11s - loss: 0.7468 - acc: 0.7043 - val_loss: 1.0088 - val_acc: 0.6192
Epoch 125/1500
2668/2668 [==============================] - 11s - loss: 0.7537 - acc: 0.7069 - val_loss: 1.0633 - val_acc: 0.5757
Epoch 126/1500
2668/2668 [==============================] - 11s - loss: 0.7223 - acc: 0.7181 - val_loss: 1.0976 - val_acc: 0.5712
Epoch 127/1500
2668/2668 [==============================] - 11s - loss: 0.7205 - acc: 0.7178 - val_loss: 1.0197 - val_acc: 0.6072
Epoch 128/1500
2668/2668 [==============================] - 11s - loss: 0.7631 - acc: 0.6990 - val_loss: 0.9675 - val_acc: 0.6267
Epoch 129/1500
2668/2668 [==============================] - 11s - loss: 0.7543 - acc: 0.7166 - val_loss: 1.1539 - val_acc: 0.5367
Epoch 130/1500
2668/2668 [==============================] - 11s - loss: 0.7788 - acc: 0.7080 - val_loss: 1.0937 - val_acc: 0.5472
Epoch 131/1500
2668/2668 [==============================] - 11s - loss: 0.7451 - acc: 0.7106 - val_loss: 1.0231 - val_acc: 0.6117
Epoch 132/1500
2668/2668 [==============================] - 11s - loss: 0.7310 - acc: 0.7148 - val_loss: 1.0283 - val_acc: 0.5982
Epoch 133/1500
2668/2668 [==============================] - 11s - loss: 0.7189 - acc: 0.7283 - val_loss: 0.9538 - val_acc: 0.6357
Epoch 134/1500
2668/2668 [==============================] - 11s - loss: 0.7196 - acc: 0.7234 - val_loss: 1.0262 - val_acc: 0.6087
Epoch 135/1500
2668/2668 [==============================] - 11s - loss: 0.7021 - acc: 0.7268 - val_loss: 1.1371 - val_acc: 0.5352
Epoch 136/1500
2668/2668 [==============================] - 11s - loss: 0.6829 - acc: 0.7331 - val_loss: 0.9689 - val_acc: 0.6402
Epoch 137/1500
2668/2668 [==============================] - 11s - loss: 0.7318 - acc: 0.7215 - val_loss: 0.9638 - val_acc: 0.6372
Epoch 138/1500
2668/2668 [==============================] - 11s - loss: 0.7038 - acc: 0.7230 - val_loss: 0.9443 - val_acc: 0.6462
Epoch 139/1500
2668/2668 [==============================] - 11s - loss: 0.6893 - acc: 0.7305 - val_loss: 0.9560 - val_acc: 0.6372
Epoch 140/1500
2668/2668 [==============================] - 11s - loss: 0.7163 - acc: 0.7238 - val_loss: 0.9929 - val_acc: 0.6117
Epoch 141/1500
2668/2668 [==============================] - 11s - loss: 0.6860 - acc: 0.7395 - val_loss: 1.0288 - val_acc: 0.6087
Epoch 142/1500
2668/2668 [==============================] - 11s - loss: 0.6939 - acc: 0.7301 - val_loss: 0.9984 - val_acc: 0.6102
Epoch 143/1500
2668/2668 [==============================] - 11s - loss: 0.6881 - acc: 0.7328 - val_loss: 0.9113 - val_acc: 0.6507
Epoch 144/1500
2668/2668 [==============================] - 11s - loss: 0.6432 - acc: 0.7474 - val_loss: 1.0308 - val_acc: 0.6087
Epoch 145/1500
2668/2668 [==============================] - 11s - loss: 0.6530 - acc: 0.7564 - val_loss: 1.0896 - val_acc: 0.5667
Epoch 146/1500
2668/2668 [==============================] - 11s - loss: 0.6647 - acc: 0.7507 - val_loss: 0.8796 - val_acc: 0.6582
Epoch 147/1500
2668/2668 [==============================] - 11s - loss: 0.6590 - acc: 0.7567 - val_loss: 0.9030 - val_acc: 0.6447
Epoch 148/1500
2668/2668 [==============================] - 11s - loss: 0.6399 - acc: 0.7571 - val_loss: 0.9972 - val_acc: 0.6222
Epoch 149/1500
2668/2668 [==============================] - 11s - loss: 0.6271 - acc: 0.7590 - val_loss: 0.9862 - val_acc: 0.6162
Epoch 150/1500
2668/2668 [==============================] - 11s - loss: 0.6238 - acc: 0.7594 - val_loss: 0.9226 - val_acc: 0.6327
Epoch 151/1500
2668/2668 [==============================] - 11s - loss: 0.5983 - acc: 0.7796 - val_loss: 0.8900 - val_acc: 0.6627
Epoch 152/1500
2668/2668 [==============================] - 11s - loss: 0.6327 - acc: 0.7657 - val_loss: 1.0310 - val_acc: 0.6072
Epoch 153/1500
2668/2668 [==============================] - 11s - loss: 0.5947 - acc: 0.7665 - val_loss: 0.9036 - val_acc: 0.6747
Epoch 154/1500
2668/2668 [==============================] - 11s - loss: 0.5902 - acc: 0.7785 - val_loss: 0.9538 - val_acc: 0.6282
Epoch 155/1500
2668/2668 [==============================] - 11s - loss: 0.5804 - acc: 0.7834 - val_loss: 0.8908 - val_acc: 0.6642
Epoch 156/1500
2668/2668 [==============================] - 11s - loss: 0.5614 - acc: 0.7901 - val_loss: 0.8497 - val_acc: 0.6717
Epoch 157/1500
2668/2668 [==============================] - 11s - loss: 0.5636 - acc: 0.7871 - val_loss: 0.8547 - val_acc: 0.6792
Epoch 158/1500
2668/2668 [==============================] - 11s - loss: 0.5582 - acc: 0.7819 - val_loss: 0.8760 - val_acc: 0.6627
Epoch 159/1500
2668/2668 [==============================] - 11s - loss: 0.5463 - acc: 0.7901 - val_loss: 0.8751 - val_acc: 0.6627
Epoch 160/1500
2668/2668 [==============================] - 11s - loss: 0.5560 - acc: 0.7905 - val_loss: 0.8678 - val_acc: 0.6717
Epoch 161/1500
2668/2668 [==============================] - 11s - loss: 0.5482 - acc: 0.7897 - val_loss: 0.9307 - val_acc: 0.6357
Epoch 162/1500
2668/2668 [==============================] - 11s - loss: 0.5335 - acc: 0.7920 - val_loss: 0.8454 - val_acc: 0.6912
Epoch 163/1500
2668/2668 [==============================] - 11s - loss: 0.5517 - acc: 0.7916 - val_loss: 0.8515 - val_acc: 0.6762
Epoch 164/1500
2668/2668 [==============================] - 11s - loss: 0.5559 - acc: 0.7834 - val_loss: 1.0944 - val_acc: 0.5922
Epoch 165/1500
2668/2668 [==============================] - 11s - loss: 0.5799 - acc: 0.7886 - val_loss: 0.9726 - val_acc: 0.6207
Epoch 166/1500
2668/2668 [==============================] - 11s - loss: 0.5387 - acc: 0.7991 - val_loss: 0.8536 - val_acc: 0.6777
Epoch 167/1500
2668/2668 [==============================] - 11s - loss: 0.5603 - acc: 0.7800 - val_loss: 0.8661 - val_acc: 0.6702
Epoch 168/1500
2668/2668 [==============================] - 11s - loss: 0.5310 - acc: 0.8006 - val_loss: 0.8428 - val_acc: 0.6987
Epoch 169/1500
2668/2668 [==============================] - 11s - loss: 0.5149 - acc: 0.8028 - val_loss: 0.9412 - val_acc: 0.6537
Epoch 170/1500
2668/2668 [==============================] - 11s - loss: 0.5203 - acc: 0.8062 - val_loss: 0.9591 - val_acc: 0.6612
Epoch 171/1500
2668/2668 [==============================] - 11s - loss: 0.5375 - acc: 0.7999 - val_loss: 0.8972 - val_acc: 0.6642
Epoch 172/1500
2668/2668 [==============================] - 11s - loss: 0.5027 - acc: 0.8107 - val_loss: 0.7903 - val_acc: 0.7031
Epoch 173/1500
2668/2668 [==============================] - 11s - loss: 0.5281 - acc: 0.7957 - val_loss: 0.8623 - val_acc: 0.6867
Epoch 174/1500
2668/2668 [==============================] - 11s - loss: 0.5396 - acc: 0.7999 - val_loss: 1.0186 - val_acc: 0.6267
Epoch 175/1500
2668/2668 [==============================] - 11s - loss: 0.5505 - acc: 0.7890 - val_loss: 0.8358 - val_acc: 0.6972
Epoch 176/1500
2668/2668 [==============================] - 11s - loss: 0.5600 - acc: 0.7901 - val_loss: 0.8454 - val_acc: 0.7061
Epoch 177/1500
2668/2668 [==============================] - 11s - loss: 0.5527 - acc: 0.7927 - val_loss: 0.9452 - val_acc: 0.6342
Epoch 178/1500
2668/2668 [==============================] - 11s - loss: 0.5319 - acc: 0.7984 - val_loss: 0.8766 - val_acc: 0.6777
Epoch 179/1500
2668/2668 [==============================] - 11s - loss: 0.5048 - acc: 0.8043 - val_loss: 0.8612 - val_acc: 0.7016
Epoch 180/1500
2668/2668 [==============================] - 11s - loss: 0.5141 - acc: 0.8043 - val_loss: 0.7954 - val_acc: 0.7001
Epoch 181/1500
2668/2668 [==============================] - 11s - loss: 0.5223 - acc: 0.8032 - val_loss: 0.8068 - val_acc: 0.6987
Epoch 182/1500
2668/2668 [==============================] - 11s - loss: 0.5199 - acc: 0.8013 - val_loss: 0.7532 - val_acc: 0.7301
Epoch 183/1500
2668/2668 [==============================] - 11s - loss: 0.4802 - acc: 0.8201 - val_loss: 0.9165 - val_acc: 0.6582
Epoch 184/1500
2668/2668 [==============================] - 11s - loss: 0.4873 - acc: 0.8145 - val_loss: 0.7961 - val_acc: 0.7106
Epoch 185/1500
2668/2668 [==============================] - 11s - loss: 0.4378 - acc: 0.8392 - val_loss: 0.8021 - val_acc: 0.6912
Epoch 186/1500
2668/2668 [==============================] - 11s - loss: 0.4686 - acc: 0.8325 - val_loss: 0.8047 - val_acc: 0.7091
Epoch 187/1500
2668/2668 [==============================] - 11s - loss: 0.4618 - acc: 0.8227 - val_loss: 0.7577 - val_acc: 0.7376
Epoch 188/1500
2668/2668 [==============================] - 11s - loss: 0.4147 - acc: 0.8407 - val_loss: 0.8075 - val_acc: 0.6957
Epoch 189/1500
2668/2668 [==============================] - 11s - loss: 0.4349 - acc: 0.8351 - val_loss: 0.8473 - val_acc: 0.6732
Epoch 190/1500
2668/2668 [==============================] - 11s - loss: 0.4379 - acc: 0.8362 - val_loss: 0.9252 - val_acc: 0.6642
Epoch 191/1500
2668/2668 [==============================] - 11s - loss: 0.4603 - acc: 0.8325 - val_loss: 0.7915 - val_acc: 0.7121
Epoch 192/1500
2668/2668 [==============================] - 11s - loss: 0.4597 - acc: 0.8201 - val_loss: 0.8237 - val_acc: 0.6867
Epoch 193/1500
2668/2668 [==============================] - 11s - loss: 0.4444 - acc: 0.8298 - val_loss: 0.8006 - val_acc: 0.6927
Epoch 194/1500
2668/2668 [==============================] - 11s - loss: 0.4256 - acc: 0.8445 - val_loss: 0.8420 - val_acc: 0.6657
Epoch 195/1500
2668/2668 [==============================] - 11s - loss: 0.4204 - acc: 0.8482 - val_loss: 0.7236 - val_acc: 0.7271
Epoch 196/1500
2668/2668 [==============================] - 11s - loss: 0.4138 - acc: 0.8501 - val_loss: 0.8609 - val_acc: 0.6642
Epoch 197/1500
2668/2668 [==============================] - 11s - loss: 0.4043 - acc: 0.8504 - val_loss: 0.7771 - val_acc: 0.7031
Epoch 198/1500
2668/2668 [==============================] - 11s - loss: 0.4078 - acc: 0.8418 - val_loss: 0.8461 - val_acc: 0.6732
Epoch 199/1500
2668/2668 [==============================] - 11s - loss: 0.4282 - acc: 0.8351 - val_loss: 0.7749 - val_acc: 0.7316
Epoch 200/1500
2668/2668 [==============================] - 11s - loss: 0.4095 - acc: 0.8433 - val_loss: 0.7939 - val_acc: 0.6987
Epoch 201/1500
2668/2668 [==============================] - 11s - loss: 0.4180 - acc: 0.8490 - val_loss: 0.7412 - val_acc: 0.7346
Epoch 202/1500
2668/2668 [==============================] - 11s - loss: 0.3998 - acc: 0.8407 - val_loss: 0.8622 - val_acc: 0.6882
Epoch 203/1500
2668/2668 [==============================] - 11s - loss: 0.3710 - acc: 0.8669 - val_loss: 0.7597 - val_acc: 0.7121
Epoch 204/1500
2668/2668 [==============================] - 11s - loss: 0.3911 - acc: 0.8475 - val_loss: 0.8508 - val_acc: 0.6897
Epoch 205/1500
2668/2668 [==============================] - 11s - loss: 0.3918 - acc: 0.8579 - val_loss: 0.7784 - val_acc: 0.7136
Epoch 206/1500
2668/2668 [==============================] - 11s - loss: 0.3870 - acc: 0.8523 - val_loss: 0.7977 - val_acc: 0.7061
Epoch 207/1500
2668/2668 [==============================] - 11s - loss: 0.3859 - acc: 0.8519 - val_loss: 0.7683 - val_acc: 0.7196
Epoch 208/1500
2668/2668 [==============================] - 11s - loss: 0.3667 - acc: 0.8632 - val_loss: 0.8045 - val_acc: 0.7121
Epoch 209/1500
2668/2668 [==============================] - 11s - loss: 0.3701 - acc: 0.8591 - val_loss: 0.7682 - val_acc: 0.7316
Epoch 210/1500
2668/2668 [==============================] - 11s - loss: 0.3790 - acc: 0.8594 - val_loss: 0.7830 - val_acc: 0.7091
Epoch 211/1500
2668/2668 [==============================] - 11s - loss: 0.3785 - acc: 0.8587 - val_loss: 0.7469 - val_acc: 0.7286
Epoch 212/1500
2668/2668 [==============================] - 11s - loss: 0.3577 - acc: 0.8669 - val_loss: 0.7581 - val_acc: 0.7076
Epoch 213/1500
2668/2668 [==============================] - 11s - loss: 0.3404 - acc: 0.8651 - val_loss: 0.7210 - val_acc: 0.7496
Epoch 214/1500
2668/2668 [==============================] - 11s - loss: 0.3794 - acc: 0.8643 - val_loss: 0.8283 - val_acc: 0.6987
Epoch 215/1500
2668/2668 [==============================] - 11s - loss: 0.3791 - acc: 0.8628 - val_loss: 0.7720 - val_acc: 0.7226
Epoch 216/1500
2668/2668 [==============================] - 11s - loss: 0.3534 - acc: 0.8613 - val_loss: 0.7749 - val_acc: 0.7121
Epoch 217/1500
2668/2668 [==============================] - 11s - loss: 0.3483 - acc: 0.8624 - val_loss: 0.7399 - val_acc: 0.7286
Epoch 218/1500
2668/2668 [==============================] - 11s - loss: 0.3627 - acc: 0.8714 - val_loss: 0.7599 - val_acc: 0.7256
Epoch 219/1500
2668/2668 [==============================] - 11s - loss: 0.3655 - acc: 0.8564 - val_loss: 0.7725 - val_acc: 0.7346
Epoch 220/1500
2668/2668 [==============================] - 11s - loss: 0.3625 - acc: 0.8673 - val_loss: 0.6983 - val_acc: 0.7451
Epoch 221/1500
2668/2668 [==============================] - 11s - loss: 0.3496 - acc: 0.8684 - val_loss: 0.7118 - val_acc: 0.7346
Epoch 222/1500
2668/2668 [==============================] - 11s - loss: 0.3264 - acc: 0.8797 - val_loss: 0.7738 - val_acc: 0.7331
Epoch 223/1500
2668/2668 [==============================] - 11s - loss: 0.3568 - acc: 0.8741 - val_loss: 0.9335 - val_acc: 0.6837
Epoch 224/1500
2668/2668 [==============================] - 11s - loss: 0.3469 - acc: 0.8654 - val_loss: 0.6998 - val_acc: 0.7646
Epoch 225/1500
2668/2668 [==============================] - 11s - loss: 0.3328 - acc: 0.8703 - val_loss: 0.7837 - val_acc: 0.7196
Epoch 226/1500
2668/2668 [==============================] - 11s - loss: 0.3164 - acc: 0.8744 - val_loss: 0.7758 - val_acc: 0.7391
Epoch 227/1500
2668/2668 [==============================] - 11s - loss: 0.3506 - acc: 0.8722 - val_loss: 0.8490 - val_acc: 0.6987
Epoch 228/1500
2668/2668 [==============================] - 11s - loss: 0.3477 - acc: 0.8677 - val_loss: 0.7034 - val_acc: 0.7586
Epoch 229/1500
2668/2668 [==============================] - 11s - loss: 0.3279 - acc: 0.8804 - val_loss: 0.8200 - val_acc: 0.7106
Epoch 230/1500
2668/2668 [==============================] - 11s - loss: 0.3240 - acc: 0.8759 - val_loss: 0.7720 - val_acc: 0.7196
Epoch 231/1500
2668/2668 [==============================] - 11s - loss: 0.3103 - acc: 0.8857 - val_loss: 0.5903 - val_acc: 0.7856
Epoch 232/1500
2668/2668 [==============================] - 11s - loss: 0.3330 - acc: 0.8744 - val_loss: 0.8000 - val_acc: 0.7196
Epoch 233/1500
2668/2668 [==============================] - 11s - loss: 0.3430 - acc: 0.8733 - val_loss: 0.7794 - val_acc: 0.7331
Epoch 234/1500
2668/2668 [==============================] - 11s - loss: 0.3407 - acc: 0.8703 - val_loss: 0.6834 - val_acc: 0.7526
Epoch 235/1500
2668/2668 [==============================] - 11s - loss: 0.3370 - acc: 0.8767 - val_loss: 0.6659 - val_acc: 0.7616
Epoch 236/1500
2668/2668 [==============================] - 11s - loss: 0.3064 - acc: 0.8801 - val_loss: 0.8597 - val_acc: 0.7136
Epoch 237/1500
2668/2668 [==============================] - 11s - loss: 0.3428 - acc: 0.8771 - val_loss: 0.7185 - val_acc: 0.7571
Epoch 238/1500
2668/2668 [==============================] - 11s - loss: 0.3367 - acc: 0.8752 - val_loss: 0.6870 - val_acc: 0.7691
Epoch 239/1500
2668/2668 [==============================] - 11s - loss: 0.3305 - acc: 0.8741 - val_loss: 0.8373 - val_acc: 0.7016
Epoch 240/1500
2668/2668 [==============================] - 11s - loss: 0.3389 - acc: 0.8793 - val_loss: 0.6913 - val_acc: 0.7556
Epoch 241/1500
2668/2668 [==============================] - 11s - loss: 0.3252 - acc: 0.8789 - val_loss: 0.7630 - val_acc: 0.7301
Epoch 242/1500
2668/2668 [==============================] - 11s - loss: 0.3094 - acc: 0.8842 - val_loss: 0.7775 - val_acc: 0.7301
Epoch 243/1500
2668/2668 [==============================] - 11s - loss: 0.3130 - acc: 0.8756 - val_loss: 0.6289 - val_acc: 0.7811
Epoch 244/1500
2668/2668 [==============================] - 11s - loss: 0.3191 - acc: 0.8771 - val_loss: 0.7131 - val_acc: 0.7466
Epoch 245/1500
2668/2668 [==============================] - 11s - loss: 0.3115 - acc: 0.8864 - val_loss: 0.7326 - val_acc: 0.7646
Epoch 246/1500
2668/2668 [==============================] - 11s - loss: 0.2911 - acc: 0.8921 - val_loss: 0.7004 - val_acc: 0.7661
Epoch 247/1500
2668/2668 [==============================] - 11s - loss: 0.3180 - acc: 0.8793 - val_loss: 0.8372 - val_acc: 0.7181
Epoch 248/1500
2668/2668 [==============================] - 11s - loss: 0.2919 - acc: 0.8891 - val_loss: 0.6517 - val_acc: 0.7661
Epoch 249/1500
2668/2668 [==============================] - 11s - loss: 0.3065 - acc: 0.8864 - val_loss: 0.8930 - val_acc: 0.7016
Epoch 250/1500
2668/2668 [==============================] - 11s - loss: 0.2804 - acc: 0.8932 - val_loss: 0.7589 - val_acc: 0.7526
Epoch 251/1500
2668/2668 [==============================] - 11s - loss: 0.2721 - acc: 0.8973 - val_loss: 0.6077 - val_acc: 0.7856
Epoch 252/1500
2668/2668 [==============================] - 11s - loss: 0.2923 - acc: 0.9003 - val_loss: 0.7691 - val_acc: 0.7481
Epoch 253/1500
2668/2668 [==============================] - 11s - loss: 0.2938 - acc: 0.8831 - val_loss: 0.7582 - val_acc: 0.7421
Epoch 254/1500
2668/2668 [==============================] - 11s - loss: 0.2831 - acc: 0.8917 - val_loss: 0.6676 - val_acc: 0.7646
Epoch 255/1500
2668/2668 [==============================] - 11s - loss: 0.2731 - acc: 0.8936 - val_loss: 0.8485 - val_acc: 0.7166
Epoch 256/1500
2668/2668 [==============================] - 11s - loss: 0.2714 - acc: 0.9029 - val_loss: 0.7182 - val_acc: 0.7571
Epoch 257/1500
2668/2668 [==============================] - 11s - loss: 0.2756 - acc: 0.8977 - val_loss: 0.7188 - val_acc: 0.7511
Epoch 258/1500
2668/2668 [==============================] - 11s - loss: 0.2688 - acc: 0.9048 - val_loss: 0.7098 - val_acc: 0.7511
Epoch 259/1500
2668/2668 [==============================] - 11s - loss: 0.2656 - acc: 0.9033 - val_loss: 0.8005 - val_acc: 0.7211
Epoch 260/1500
2668/2668 [==============================] - 11s - loss: 0.3028 - acc: 0.8879 - val_loss: 0.7689 - val_acc: 0.7331
Epoch 261/1500
2668/2668 [==============================] - 11s - loss: 0.2799 - acc: 0.8992 - val_loss: 0.6879 - val_acc: 0.7631
Epoch 262/1500
2668/2668 [==============================] - 11s - loss: 0.2778 - acc: 0.8988 - val_loss: 0.6914 - val_acc: 0.7646
Epoch 263/1500
2668/2668 [==============================] - 11s - loss: 0.2645 - acc: 0.9037 - val_loss: 0.5647 - val_acc: 0.8021
Epoch 264/1500
2668/2668 [==============================] - 11s - loss: 0.2652 - acc: 0.8992 - val_loss: 0.7010 - val_acc: 0.7436
Epoch 265/1500
2668/2668 [==============================] - 11s - loss: 0.2678 - acc: 0.9007 - val_loss: 0.7595 - val_acc: 0.7631
Epoch 266/1500
2668/2668 [==============================] - 11s - loss: 0.2807 - acc: 0.8947 - val_loss: 0.6734 - val_acc: 0.7556
Epoch 267/1500
2668/2668 [==============================] - 11s - loss: 0.2723 - acc: 0.8981 - val_loss: 0.7173 - val_acc: 0.7751
Epoch 268/1500
2668/2668 [==============================] - 11s - loss: 0.2844 - acc: 0.8988 - val_loss: 0.7416 - val_acc: 0.7571
Epoch 269/1500
2668/2668 [==============================] - 11s - loss: 0.2429 - acc: 0.9160 - val_loss: 0.6626 - val_acc: 0.7736
Epoch 270/1500
2668/2668 [==============================] - 11s - loss: 0.2341 - acc: 0.9093 - val_loss: 0.7102 - val_acc: 0.7556
Epoch 271/1500
2668/2668 [==============================] - 11s - loss: 0.2441 - acc: 0.9138 - val_loss: 0.5804 - val_acc: 0.7916
Epoch 272/1500
2668/2668 [==============================] - 11s - loss: 0.2669 - acc: 0.8988 - val_loss: 0.6551 - val_acc: 0.7856
Epoch 273/1500
2668/2668 [==============================] - 11s - loss: 0.2364 - acc: 0.9104 - val_loss: 0.5992 - val_acc: 0.7961
Epoch 274/1500
2668/2668 [==============================] - 11s - loss: 0.2460 - acc: 0.9022 - val_loss: 0.6275 - val_acc: 0.7721
Epoch 275/1500
2668/2668 [==============================] - 11s - loss: 0.2333 - acc: 0.9059 - val_loss: 0.6652 - val_acc: 0.7781
Epoch 276/1500
2668/2668 [==============================] - 11s - loss: 0.2418 - acc: 0.9100 - val_loss: 0.6058 - val_acc: 0.7961
Epoch 277/1500
2668/2668 [==============================] - 11s - loss: 0.2331 - acc: 0.9172 - val_loss: 0.8155 - val_acc: 0.7181
Epoch 278/1500
2668/2668 [==============================] - 11s - loss: 0.2436 - acc: 0.9145 - val_loss: 0.7339 - val_acc: 0.7466
Epoch 279/1500
2668/2668 [==============================] - 11s - loss: 0.2477 - acc: 0.9082 - val_loss: 0.8218 - val_acc: 0.7361
Epoch 280/1500
2668/2668 [==============================] - 11s - loss: 0.2905 - acc: 0.8984 - val_loss: 0.6880 - val_acc: 0.7751
Epoch 281/1500
2668/2668 [==============================] - 11s - loss: 0.2523 - acc: 0.9085 - val_loss: 0.7202 - val_acc: 0.7496
Epoch 282/1500
2668/2668 [==============================] - 11s - loss: 0.2733 - acc: 0.8977 - val_loss: 0.7673 - val_acc: 0.7331
Epoch 283/1500
2668/2668 [==============================] - 11s - loss: 0.2533 - acc: 0.9067 - val_loss: 0.6670 - val_acc: 0.7511
Epoch 284/1500
2668/2668 [==============================] - 11s - loss: 0.2452 - acc: 0.9029 - val_loss: 0.6289 - val_acc: 0.7826
Epoch 285/1500
2668/2668 [==============================] - 11s - loss: 0.2550 - acc: 0.9157 - val_loss: 0.7057 - val_acc: 0.7661
Epoch 286/1500
2668/2668 [==============================] - 11s - loss: 0.2319 - acc: 0.9153 - val_loss: 0.5845 - val_acc: 0.7931
Epoch 287/1500
2668/2668 [==============================] - 11s - loss: 0.2543 - acc: 0.9100 - val_loss: 0.6523 - val_acc: 0.7796
Epoch 288/1500
2668/2668 [==============================] - 11s - loss: 0.2543 - acc: 0.9130 - val_loss: 0.5600 - val_acc: 0.8021
Epoch 289/1500
2668/2668 [==============================] - 11s - loss: 0.2443 - acc: 0.9145 - val_loss: 0.7226 - val_acc: 0.7451
Epoch 290/1500
2668/2668 [==============================] - 11s - loss: 0.2157 - acc: 0.9157 - val_loss: 0.5745 - val_acc: 0.8066
Epoch 291/1500
2668/2668 [==============================] - 11s - loss: 0.2157 - acc: 0.9284 - val_loss: 0.7019 - val_acc: 0.7661
Epoch 292/1500
2668/2668 [==============================] - 11s - loss: 0.2367 - acc: 0.9123 - val_loss: 0.6629 - val_acc: 0.7871
Epoch 293/1500
2668/2668 [==============================] - 11s - loss: 0.2201 - acc: 0.9250 - val_loss: 0.6139 - val_acc: 0.7931
Epoch 294/1500
2668/2668 [==============================] - 11s - loss: 0.2146 - acc: 0.9205 - val_loss: 0.5751 - val_acc: 0.7946
Epoch 295/1500
2668/2668 [==============================] - 11s - loss: 0.2107 - acc: 0.9194 - val_loss: 0.6864 - val_acc: 0.7811
Epoch 296/1500
2668/2668 [==============================] - 11s - loss: 0.2149 - acc: 0.9160 - val_loss: 0.6058 - val_acc: 0.7901
Epoch 297/1500
2668/2668 [==============================] - 11s - loss: 0.2018 - acc: 0.9314 - val_loss: 0.5792 - val_acc: 0.8066
Epoch 298/1500
2668/2668 [==============================] - 11s - loss: 0.2035 - acc: 0.9209 - val_loss: 0.7194 - val_acc: 0.7691
Epoch 299/1500
2668/2668 [==============================] - 11s - loss: 0.2100 - acc: 0.9243 - val_loss: 0.6106 - val_acc: 0.7916
Epoch 300/1500
2668/2668 [==============================] - 11s - loss: 0.2025 - acc: 0.9284 - val_loss: 0.5365 - val_acc: 0.8276
Epoch 301/1500
2668/2668 [==============================] - 11s - loss: 0.1828 - acc: 0.9303 - val_loss: 0.6098 - val_acc: 0.7946
Epoch 302/1500
2668/2668 [==============================] - 11s - loss: 0.2097 - acc: 0.9217 - val_loss: 0.5611 - val_acc: 0.8141
Epoch 303/1500
2668/2668 [==============================] - 11s - loss: 0.1946 - acc: 0.9288 - val_loss: 0.6082 - val_acc: 0.7901
Epoch 304/1500
2668/2668 [==============================] - 11s - loss: 0.2006 - acc: 0.9277 - val_loss: 0.6935 - val_acc: 0.7751
Epoch 305/1500
2668/2668 [==============================] - 11s - loss: 0.1931 - acc: 0.9303 - val_loss: 0.6871 - val_acc: 0.7691
Epoch 306/1500
2668/2668 [==============================] - 11s - loss: 0.2093 - acc: 0.9258 - val_loss: 0.6153 - val_acc: 0.8111
Epoch 307/1500
2668/2668 [==============================] - 11s - loss: 0.1973 - acc: 0.9265 - val_loss: 0.7946 - val_acc: 0.7376
Epoch 308/1500
2668/2668 [==============================] - 11s - loss: 0.2126 - acc: 0.9198 - val_loss: 0.5302 - val_acc: 0.8171
Epoch 309/1500
2668/2668 [==============================] - 11s - loss: 0.2359 - acc: 0.9205 - val_loss: 0.5685 - val_acc: 0.7961
Epoch 310/1500
2668/2668 [==============================] - 11s - loss: 0.1967 - acc: 0.9228 - val_loss: 0.7242 - val_acc: 0.7616
Epoch 311/1500
2668/2668 [==============================] - 11s - loss: 0.2224 - acc: 0.9157 - val_loss: 0.5367 - val_acc: 0.8231
Epoch 312/1500
2668/2668 [==============================] - 11s - loss: 0.2180 - acc: 0.9209 - val_loss: 0.6710 - val_acc: 0.7691
Epoch 313/1500
2668/2668 [==============================] - 11s - loss: 0.2051 - acc: 0.9269 - val_loss: 0.5618 - val_acc: 0.8171
Epoch 314/1500
2668/2668 [==============================] - 11s - loss: 0.1994 - acc: 0.9280 - val_loss: 0.6298 - val_acc: 0.7961
Epoch 315/1500
2668/2668 [==============================] - 11s - loss: 0.1958 - acc: 0.9295 - val_loss: 0.5900 - val_acc: 0.8036
Epoch 316/1500
2668/2668 [==============================] - 11s - loss: 0.2009 - acc: 0.9269 - val_loss: 0.6115 - val_acc: 0.8006
Epoch 317/1500
2668/2668 [==============================] - 11s - loss: 0.1790 - acc: 0.9329 - val_loss: 0.5927 - val_acc: 0.7901
Epoch 318/1500
2668/2668 [==============================] - 11s - loss: 0.1979 - acc: 0.9310 - val_loss: 0.6008 - val_acc: 0.8111
Epoch 319/1500
2668/2668 [==============================] - 11s - loss: 0.1806 - acc: 0.9333 - val_loss: 0.6514 - val_acc: 0.7871
Epoch 320/1500
2668/2668 [==============================] - 11s - loss: 0.1868 - acc: 0.9337 - val_loss: 0.5518 - val_acc: 0.8246
Epoch 321/1500
2668/2668 [==============================] - 11s - loss: 0.1787 - acc: 0.9359 - val_loss: 0.7503 - val_acc: 0.7646
Epoch 322/1500
2668/2668 [==============================] - 11s - loss: 0.1777 - acc: 0.9344 - val_loss: 0.5662 - val_acc: 0.8231
Epoch 323/1500
2668/2668 [==============================] - 11s - loss: 0.1933 - acc: 0.9318 - val_loss: 0.5913 - val_acc: 0.8006
Epoch 324/1500
2668/2668 [==============================] - 11s - loss: 0.1994 - acc: 0.9269 - val_loss: 0.6209 - val_acc: 0.7946
Epoch 325/1500
2668/2668 [==============================] - 11s - loss: 0.2064 - acc: 0.9205 - val_loss: 0.5517 - val_acc: 0.8141
Epoch 326/1500
2668/2668 [==============================] - 11s - loss: 0.1822 - acc: 0.9367 - val_loss: 0.7436 - val_acc: 0.7586
Epoch 327/1500
2668/2668 [==============================] - 11s - loss: 0.2013 - acc: 0.9213 - val_loss: 0.5702 - val_acc: 0.8111
Epoch 328/1500
2668/2668 [==============================] - 11s - loss: 0.1660 - acc: 0.9412 - val_loss: 0.7086 - val_acc: 0.7721
Epoch 329/1500
2668/2668 [==============================] - 11s - loss: 0.1941 - acc: 0.9314 - val_loss: 0.6699 - val_acc: 0.7751
Epoch 330/1500
2668/2668 [==============================] - 11s - loss: 0.1642 - acc: 0.9329 - val_loss: 0.5341 - val_acc: 0.8036
Epoch 331/1500
2668/2668 [==============================] - 11s - loss: 0.1817 - acc: 0.9329 - val_loss: 0.7543 - val_acc: 0.7796
Epoch 332/1500
2668/2668 [==============================] - 11s - loss: 0.1865 - acc: 0.9322 - val_loss: 0.5782 - val_acc: 0.7976
Epoch 333/1500
2668/2668 [==============================] - 11s - loss: 0.1634 - acc: 0.9438 - val_loss: 0.6071 - val_acc: 0.7931
Epoch 334/1500
2668/2668 [==============================] - 11s - loss: 0.1775 - acc: 0.9378 - val_loss: 0.5176 - val_acc: 0.8186
Epoch 335/1500
2668/2668 [==============================] - 11s - loss: 0.1778 - acc: 0.9329 - val_loss: 0.6784 - val_acc: 0.7736
Epoch 336/1500
2668/2668 [==============================] - 11s - loss: 0.1734 - acc: 0.9340 - val_loss: 0.5547 - val_acc: 0.8261
Epoch 337/1500
2668/2668 [==============================] - 11s - loss: 0.1635 - acc: 0.9430 - val_loss: 0.5935 - val_acc: 0.8036
Epoch 338/1500
2668/2668 [==============================] - 11s - loss: 0.1659 - acc: 0.9434 - val_loss: 0.6088 - val_acc: 0.7991
Epoch 339/1500
2668/2668 [==============================] - 11s - loss: 0.1909 - acc: 0.9284 - val_loss: 0.6655 - val_acc: 0.7811
Epoch 340/1500
2668/2668 [==============================] - 11s - loss: 0.1723 - acc: 0.9352 - val_loss: 0.5504 - val_acc: 0.8171
Epoch 341/1500
2668/2668 [==============================] - 11s - loss: 0.1678 - acc: 0.9415 - val_loss: 0.7336 - val_acc: 0.7706
Epoch 342/1500
2668/2668 [==============================] - 11s - loss: 0.1684 - acc: 0.9374 - val_loss: 0.6123 - val_acc: 0.8096
Epoch 343/1500
2668/2668 [==============================] - 11s - loss: 0.1614 - acc: 0.9427 - val_loss: 0.6245 - val_acc: 0.8006
Epoch 344/1500
2668/2668 [==============================] - 11s - loss: 0.1704 - acc: 0.9307 - val_loss: 0.6432 - val_acc: 0.7976
Epoch 345/1500
2668/2668 [==============================] - 11s - loss: 0.1716 - acc: 0.9378 - val_loss: 0.6027 - val_acc: 0.8021
Epoch 346/1500
2668/2668 [==============================] - 11s - loss: 0.1853 - acc: 0.9303 - val_loss: 0.6435 - val_acc: 0.8156
Epoch 347/1500
2668/2668 [==============================] - 11s - loss: 0.1758 - acc: 0.9415 - val_loss: 0.7001 - val_acc: 0.7676
Epoch 348/1500
2668/2668 [==============================] - 11s - loss: 0.1773 - acc: 0.9352 - val_loss: 0.5956 - val_acc: 0.7976
Epoch 349/1500
2668/2668 [==============================] - 11s - loss: 0.1675 - acc: 0.9367 - val_loss: 0.5878 - val_acc: 0.8066
Epoch 350/1500
2668/2668 [==============================] - 11s - loss: 0.1987 - acc: 0.9314 - val_loss: 0.6030 - val_acc: 0.7886
Epoch 351/1500
2668/2668 [==============================] - 11s - loss: 0.1711 - acc: 0.9397 - val_loss: 0.4815 - val_acc: 0.8381
Epoch 352/1500
2668/2668 [==============================] - 11s - loss: 0.1745 - acc: 0.9280 - val_loss: 0.5092 - val_acc: 0.8246
Epoch 353/1500
2668/2668 [==============================] - 11s - loss: 0.1698 - acc: 0.9468 - val_loss: 0.7470 - val_acc: 0.7631
Epoch 354/1500
2668/2668 [==============================] - 11s - loss: 0.1477 - acc: 0.9468 - val_loss: 0.4949 - val_acc: 0.8321
Epoch 355/1500
2668/2668 [==============================] - 11s - loss: 0.1721 - acc: 0.9400 - val_loss: 0.6829 - val_acc: 0.7796
Epoch 356/1500
2668/2668 [==============================] - 11s - loss: 0.1795 - acc: 0.9404 - val_loss: 0.5196 - val_acc: 0.8231
Epoch 357/1500
2668/2668 [==============================] - 11s - loss: 0.1678 - acc: 0.9352 - val_loss: 0.5672 - val_acc: 0.8171
Epoch 358/1500
2668/2668 [==============================] - 11s - loss: 0.1575 - acc: 0.9445 - val_loss: 0.6075 - val_acc: 0.7991
Epoch 359/1500
2668/2668 [==============================] - 11s - loss: 0.1744 - acc: 0.9404 - val_loss: 0.4943 - val_acc: 0.8276
Epoch 360/1500
2668/2668 [==============================] - 11s - loss: 0.1623 - acc: 0.9419 - val_loss: 0.6758 - val_acc: 0.7871
Epoch 361/1500
2668/2668 [==============================] - 11s - loss: 0.1705 - acc: 0.9397 - val_loss: 0.5198 - val_acc: 0.8411
Epoch 362/1500
2668/2668 [==============================] - 11s - loss: 0.1399 - acc: 0.9483 - val_loss: 0.6133 - val_acc: 0.8111
Epoch 363/1500
2668/2668 [==============================] - 11s - loss: 0.1578 - acc: 0.9427 - val_loss: 0.5280 - val_acc: 0.8186
Epoch 364/1500
2668/2668 [==============================] - 11s - loss: 0.1551 - acc: 0.9457 - val_loss: 0.6297 - val_acc: 0.8051
Epoch 365/1500
2668/2668 [==============================] - 11s - loss: 0.1629 - acc: 0.9363 - val_loss: 0.5105 - val_acc: 0.8231
Epoch 366/1500
2668/2668 [==============================] - 11s - loss: 0.1606 - acc: 0.9475 - val_loss: 0.6394 - val_acc: 0.7856
Epoch 367/1500
2668/2668 [==============================] - 11s - loss: 0.1606 - acc: 0.9475 - val_loss: 0.6134 - val_acc: 0.8006
Epoch 368/1500
2668/2668 [==============================] - 11s - loss: 0.1413 - acc: 0.9475 - val_loss: 0.4376 - val_acc: 0.8441
Epoch 369/1500
2668/2668 [==============================] - 11s - loss: 0.1495 - acc: 0.9449 - val_loss: 0.5967 - val_acc: 0.7916
Epoch 370/1500
2668/2668 [==============================] - 11s - loss: 0.1401 - acc: 0.9494 - val_loss: 0.4275 - val_acc: 0.8591
Epoch 371/1500
2668/2668 [==============================] - 11s - loss: 0.1542 - acc: 0.9412 - val_loss: 0.6258 - val_acc: 0.7931
Epoch 372/1500
2668/2668 [==============================] - 11s - loss: 0.1587 - acc: 0.9434 - val_loss: 0.5544 - val_acc: 0.8186
Epoch 373/1500
2668/2668 [==============================] - 11s - loss: 0.1356 - acc: 0.9528 - val_loss: 0.5206 - val_acc: 0.8381
Epoch 374/1500
2668/2668 [==============================] - 11s - loss: 0.1425 - acc: 0.9487 - val_loss: 0.6087 - val_acc: 0.7901
Epoch 375/1500
2668/2668 [==============================] - 11s - loss: 0.1330 - acc: 0.9550 - val_loss: 0.5116 - val_acc: 0.8291
Epoch 376/1500
2668/2668 [==============================] - 11s - loss: 0.1217 - acc: 0.9614 - val_loss: 0.5390 - val_acc: 0.8396
Epoch 377/1500
2668/2668 [==============================] - 11s - loss: 0.1327 - acc: 0.9490 - val_loss: 0.5535 - val_acc: 0.8186
Epoch 378/1500
2668/2668 [==============================] - 11s - loss: 0.1328 - acc: 0.9535 - val_loss: 0.5231 - val_acc: 0.8306
Epoch 379/1500
2668/2668 [==============================] - 11s - loss: 0.1359 - acc: 0.9483 - val_loss: 0.5198 - val_acc: 0.8246
Epoch 380/1500
2668/2668 [==============================] - 11s - loss: 0.1370 - acc: 0.9475 - val_loss: 0.6502 - val_acc: 0.7991
Epoch 381/1500
2668/2668 [==============================] - 11s - loss: 0.1457 - acc: 0.9528 - val_loss: 0.4266 - val_acc: 0.8531
Epoch 382/1500
2668/2668 [==============================] - 11s - loss: 0.1456 - acc: 0.9468 - val_loss: 0.7333 - val_acc: 0.7616
Epoch 383/1500
2668/2668 [==============================] - 11s - loss: 0.1516 - acc: 0.9464 - val_loss: 0.4511 - val_acc: 0.8636
Epoch 384/1500
2668/2668 [==============================] - 11s - loss: 0.1528 - acc: 0.9490 - val_loss: 0.6997 - val_acc: 0.7901
Epoch 385/1500
2668/2668 [==============================] - 11s - loss: 0.1461 - acc: 0.9442 - val_loss: 0.5259 - val_acc: 0.8126
Epoch 386/1500
2668/2668 [==============================] - 11s - loss: 0.1410 - acc: 0.9501 - val_loss: 0.5825 - val_acc: 0.8066
Epoch 387/1500
2668/2668 [==============================] - 11s - loss: 0.1587 - acc: 0.9430 - val_loss: 0.5521 - val_acc: 0.8201
Epoch 388/1500
2668/2668 [==============================] - 11s - loss: 0.1594 - acc: 0.9438 - val_loss: 0.5179 - val_acc: 0.8141
Epoch 389/1500
2668/2668 [==============================] - 11s - loss: 0.1420 - acc: 0.9501 - val_loss: 0.6516 - val_acc: 0.7841
Epoch 390/1500
2668/2668 [==============================] - 11s - loss: 0.1554 - acc: 0.9445 - val_loss: 0.4419 - val_acc: 0.8456
Epoch 391/1500
2668/2668 [==============================] - 11s - loss: 0.1638 - acc: 0.9415 - val_loss: 0.6159 - val_acc: 0.7721
Epoch 392/1500
2668/2668 [==============================] - 11s - loss: 0.1328 - acc: 0.9554 - val_loss: 0.5620 - val_acc: 0.8126
Epoch 393/1500
2668/2668 [==============================] - 11s - loss: 0.1535 - acc: 0.9520 - val_loss: 0.6159 - val_acc: 0.7976
Epoch 394/1500
2668/2668 [==============================] - 11s - loss: 0.1366 - acc: 0.9550 - val_loss: 0.6074 - val_acc: 0.8006
Epoch 395/1500
2668/2668 [==============================] - 11s - loss: 0.1379 - acc: 0.9520 - val_loss: 0.6075 - val_acc: 0.7811
Epoch 396/1500
2668/2668 [==============================] - 11s - loss: 0.1355 - acc: 0.9520 - val_loss: 0.5170 - val_acc: 0.8216
Epoch 397/1500
2668/2668 [==============================] - 11s - loss: 0.1489 - acc: 0.9479 - val_loss: 0.6568 - val_acc: 0.7901
Epoch 398/1500
2668/2668 [==============================] - 11s - loss: 0.1406 - acc: 0.9501 - val_loss: 0.5747 - val_acc: 0.8201
Epoch 399/1500
2668/2668 [==============================] - 11s - loss: 0.1648 - acc: 0.9460 - val_loss: 0.5234 - val_acc: 0.8336
Epoch 400/1500
2668/2668 [==============================] - 11s - loss: 0.1275 - acc: 0.9546 - val_loss: 0.4579 - val_acc: 0.8471
Epoch 401/1500
2668/2668 [==============================] - 11s - loss: 0.1516 - acc: 0.9490 - val_loss: 0.6263 - val_acc: 0.7886
Epoch 402/1500
2668/2668 [==============================] - 11s - loss: 0.1556 - acc: 0.9442 - val_loss: 0.4533 - val_acc: 0.8336
Epoch 403/1500
2668/2668 [==============================] - 11s - loss: 0.1415 - acc: 0.9483 - val_loss: 0.5523 - val_acc: 0.8081
Epoch 404/1500
2668/2668 [==============================] - 11s - loss: 0.1463 - acc: 0.9442 - val_loss: 0.4998 - val_acc: 0.8291
Epoch 405/1500
2668/2668 [==============================] - 11s - loss: 0.1154 - acc: 0.9550 - val_loss: 0.6723 - val_acc: 0.7976
Epoch 406/1500
2668/2668 [==============================] - 11s - loss: 0.1476 - acc: 0.9442 - val_loss: 0.4894 - val_acc: 0.8396
Epoch 407/1500
2668/2668 [==============================] - 11s - loss: 0.1279 - acc: 0.9494 - val_loss: 0.6517 - val_acc: 0.7916
Epoch 408/1500
2668/2668 [==============================] - 11s - loss: 0.1290 - acc: 0.9569 - val_loss: 0.5403 - val_acc: 0.8276
Epoch 409/1500
2668/2668 [==============================] - 11s - loss: 0.1247 - acc: 0.9550 - val_loss: 0.5218 - val_acc: 0.8306
Epoch 410/1500
2668/2668 [==============================] - 11s - loss: 0.1113 - acc: 0.9636 - val_loss: 0.5112 - val_acc: 0.8306
Epoch 411/1500
2668/2668 [==============================] - 11s - loss: 0.1177 - acc: 0.9576 - val_loss: 0.6667 - val_acc: 0.7901
Epoch 412/1500
2668/2668 [==============================] - 11s - loss: 0.1262 - acc: 0.9614 - val_loss: 0.4987 - val_acc: 0.8171
Epoch 413/1500
2668/2668 [==============================] - 11s - loss: 0.1044 - acc: 0.9625 - val_loss: 0.6984 - val_acc: 0.7781
Epoch 414/1500
2668/2668 [==============================] - 11s - loss: 0.1145 - acc: 0.9599 - val_loss: 0.5238 - val_acc: 0.8141
Epoch 415/1500
2668/2668 [==============================] - 11s - loss: 0.0989 - acc: 0.9625 - val_loss: 0.5841 - val_acc: 0.8096
Epoch 416/1500
2668/2668 [==============================] - 11s - loss: 0.1191 - acc: 0.9610 - val_loss: 0.4659 - val_acc: 0.8426
Epoch 417/1500
2668/2668 [==============================] - 11s - loss: 0.1060 - acc: 0.9576 - val_loss: 0.7397 - val_acc: 0.7751
Epoch 418/1500
2668/2668 [==============================] - 11s - loss: 0.1189 - acc: 0.9621 - val_loss: 0.4621 - val_acc: 0.8471
Epoch 419/1500
2668/2668 [==============================] - 11s - loss: 0.1709 - acc: 0.9434 - val_loss: 0.5984 - val_acc: 0.8096
Epoch 420/1500
2668/2668 [==============================] - 11s - loss: 0.1344 - acc: 0.9573 - val_loss: 0.6171 - val_acc: 0.8171
Epoch 421/1500
2668/2668 [==============================] - 11s - loss: 0.1240 - acc: 0.9584 - val_loss: 0.5247 - val_acc: 0.8156
Epoch 422/1500
2668/2668 [==============================] - 11s - loss: 0.1370 - acc: 0.9490 - val_loss: 0.5507 - val_acc: 0.8276
Epoch 423/1500
2668/2668 [==============================] - 11s - loss: 0.1005 - acc: 0.9644 - val_loss: 0.5094 - val_acc: 0.8126
Epoch 424/1500
2668/2668 [==============================] - 11s - loss: 0.1341 - acc: 0.9487 - val_loss: 0.4907 - val_acc: 0.8321
Epoch 425/1500
2668/2668 [==============================] - 11s - loss: 0.1101 - acc: 0.9606 - val_loss: 0.5237 - val_acc: 0.8291
Epoch 426/1500
2668/2668 [==============================] - 11s - loss: 0.1231 - acc: 0.9554 - val_loss: 0.5679 - val_acc: 0.8231
Epoch 427/1500
2668/2668 [==============================] - 11s - loss: 0.1276 - acc: 0.9520 - val_loss: 0.4847 - val_acc: 0.8441
Epoch 428/1500
2668/2668 [==============================] - 11s - loss: 0.1173 - acc: 0.9625 - val_loss: 0.4557 - val_acc: 0.8531
Epoch 429/1500
2668/2668 [==============================] - 11s - loss: 0.1172 - acc: 0.9546 - val_loss: 0.4633 - val_acc: 0.8456
Epoch 444/1500
2668/2668 [==============================] - 11s - loss: 0.1064 - acc: 0.9636 - val_loss: 0.6630 - val_acc: 0.7796
Epoch 445/1500
2668/2668 [==============================] - 11s - loss: 0.1109 - acc: 0.9621 - val_loss: 0.6264 - val_acc: 0.7961
Epoch 446/1500
2668/2668 [==============================] - 11s - loss: 0.1153 - acc: 0.9573 - val_loss: 0.5204 - val_acc: 0.8411
Epoch 447/1500
2668/2668 [==============================] - 11s - loss: 0.1133 - acc: 0.9636 - val_loss: 0.7393 - val_acc: 0.7526
Epoch 448/1500
2668/2668 [==============================] - 11s - loss: 0.1136 - acc: 0.9603 - val_loss: 0.4373 - val_acc: 0.8636
Epoch 449/1500
2668/2668 [==============================] - 11s - loss: 0.1114 - acc: 0.9614 - val_loss: 0.6558 - val_acc: 0.7886
Epoch 450/1500
2668/2668 [==============================] - 11s - loss: 0.1180 - acc: 0.9576 - val_loss: 0.3917 - val_acc: 0.8621
Epoch 451/1500
2668/2668 [==============================] - 11s - loss: 0.1042 - acc: 0.9618 - val_loss: 0.6205 - val_acc: 0.7886
Epoch 452/1500
2668/2668 [==============================] - 11s - loss: 0.1038 - acc: 0.9644 - val_loss: 0.4712 - val_acc: 0.8441
Epoch 453/1500
2668/2668 [==============================] - 11s - loss: 0.1109 - acc: 0.9599 - val_loss: 0.5421 - val_acc: 0.8201
Epoch 454/1500
2668/2668 [==============================] - 10s - loss: 0.0318 - acc: 0.9854 - val_loss: 0.3951 - val_acc: 0.8846
Epoch 983/1500
2668/2668 [==============================] - 10s - loss: 0.0380 - acc: 0.9861 - val_loss: 0.5176 - val_acc: 0.8531
Epoch 984/1500
2668/2668 [==============================] - 10s - loss: 0.0508 - acc: 0.9828 - val_loss: 0.3096 - val_acc: 0.9130
Epoch 985/1500
2668/2668 [==============================] - 10s - loss: 0.0543 - acc: 0.9850 - val_loss: 0.6218 - val_acc: 0.8291
Epoch 986/1500
2668/2668 [==============================] - 10s - loss: 0.0413 - acc: 0.9880 - val_loss: 0.4324 - val_acc: 0.8816
Epoch 987/1500
2000/2668 [=====================>........] - ETA: 2s - loss: 0.0713 - acc: 0.9820

Training takes 4.5 h on K80

Still does not seem to be fully trained

Saving our model


In [34]:
model.save('conv-vgg-augmented.hdf5')

In [35]:
!ls -lh


total 75M
drwxrwxr-x 8 ubuntu ubuntu 4.0K Jul 21 16:58 augmented-signs
-rw-rw-r-- 1 ubuntu ubuntu  17M Aug 30 11:48 augmented-signs.zip
-rw-rw-r-- 1 ubuntu ubuntu  32K Aug 30 11:46 CNN-Full.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 121K Aug 30 11:46 CNN.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 110K Aug 30 11:46 CNN-Mixed.ipynb
-rw-rw-r-- 1 ubuntu ubuntu  98K Aug 30 11:46 CNN-Original.ipynb
-rw-rw-r-- 1 ubuntu ubuntu  89K Aug 30 16:32 cnn-train-augmented.ipynb
-rw-rw-r-- 1 ubuntu ubuntu  55M Aug 30 16:34 conv-vgg-augmented.hdf5
drwxrwxr-x 3 ubuntu ubuntu 4.0K Jul 21 16:50 __MACOSX
drwxrwxr-x 2 ubuntu ubuntu 4.0K Aug 30 11:49 model-checkpoints
drwxrwxr-x 2 ubuntu ubuntu 4.0K Aug  9 07:12 models
-rw------- 1 ubuntu ubuntu 132K Aug 30 16:32 nohup.out
-rw-rw-r-- 1 ubuntu ubuntu   36 Aug  8 09:49 sample_iris.json
drwxrwxr-x 8 ubuntu ubuntu 4.0K Jul 21 16:58 speed-limit-signs
-rw-rw-r-- 1 ubuntu ubuntu 1.8M Aug 30 11:48 speed-limit-signs.zip
drwxr-xr-x 2 ubuntu ubuntu 4.0K Aug 30 11:50 tf_log

In [38]:
# https://transfer.sh/
# Kept for 14 days
!curl --upload-file conv-vgg-augmented.hdf5 https://transfer.sh

# Pre-trained Modell
# https://transfer.sh/Cvcar/conv-vgg-augmented.hdf5


https://transfer.sh/Cvcar/conv-vgg-augmented.hdf5

Evaluation


In [37]:
train_loss, train_accuracy = model.evaluate(X_train, y_train, batch_size=BATCH_SIZE)
train_loss, train_accuracy


3335/3335 [==============================] - 5s     
Out[37]:
(0.13158953055806424, 0.96131937605687701)

In [39]:
test_loss, test_accuracy = model.evaluate(X_test, y_test, batch_size=BATCH_SIZE)
test_loss, test_accuracy


834/834 [==============================] - 4s
Out[39]:
(0.46287021040916443, 0.8956834077835083)

Again, trying it on random test data


In [46]:
import random

# Pick 10 random images for test data set
random.seed(4) # to make this deterministic
sample_indexes = random.sample(range(len(X_test)), 10)
sample_images = [X_test[i] for i in sample_indexes]
sample_labels = [y_test[i] for i in sample_indexes]

ground_truth = np.argmax(sample_labels, axis=1)

X_sample = np.array(sample_images)
prediction = model.predict(X_sample)
predicted_categories = np.argmax(prediction, axis=1)
predicted_categories


Out[46]:
array([3, 1, 1, 2, 2, 2, 0, 3, 5, 4])

In [47]:
# Display the predictions and the ground truth visually.
def display_prediction (images, true_labels, predicted_labels):
    fig = plt.figure(figsize=(10, 10))
    for i in range(len(true_labels)):
        truth = true_labels[i]
        prediction = predicted_labels[i]
        plt.subplot(5, 2,1+i)
        plt.axis('off')
        color='green' if truth == prediction else 'red'
        plt.text(80, 10, "Truth:        {0}\nPrediction: {1}".format(truth, prediction), 
                 fontsize=12, color=color)
        plt.imshow(images[i])

In [48]:
display_prediction(sample_images, ground_truth, predicted_categories)



In [ ]: