In [1]:
import warnings
warnings.filterwarnings('ignore')

In [2]:
%matplotlib inline
%pylab inline


Populating the interactive namespace from numpy and matplotlib

In [3]:
import matplotlib.pylab as plt
import numpy as np

In [4]:
from distutils.version import StrictVersion

In [5]:
import sklearn
print(sklearn.__version__)

assert StrictVersion(sklearn.__version__ ) >= StrictVersion('0.18.1')


0.18.1

In [6]:
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.ERROR)
print(tf.__version__)

assert StrictVersion(tf.__version__) >= StrictVersion('1.1.0')


1.2.1

In [7]:
import keras
print(keras.__version__)

assert StrictVersion(keras.__version__) >= StrictVersion('2.0.0')


Using TensorFlow backend.
2.0.8

In [8]:
import pandas as pd
print(pd.__version__)

assert StrictVersion(pd.__version__) >= StrictVersion('0.20.0')


0.20.1

Preparation


In [9]:
# the larger the longer it takes, be sure to also adapt input layer size auf vgg network to this value

INPUT_SHAPE = (64, 64)
# INPUT_SHAPE = (128, 128)
# INPUT_SHAPE = (256, 256)

In [10]:
EPOCHS = 50

In [11]:
# Depends on harware GPU architecture, set as high as possible (this works well on K80)
BATCH_SIZE = 100

In [12]:
!rm -rf ./tf_log
# https://keras.io/callbacks/#tensorboard
tb_callback = keras.callbacks.TensorBoard(log_dir='./tf_log')
# To start tensorboard
# tensorboard --logdir=./tf_log
# open http://localhost:6006

In [13]:
!ls -lh


total 317M
-rw-rw-r-- 1 ubuntu ubuntu  44K Oct  1 08:04 440px-Beagle_Upsy.jpg
drwxrwxr-x 8 ubuntu ubuntu 4.0K Oct  1 08:10 augmented-signs
-rw-rw-r-- 1 ubuntu ubuntu  17M Oct  1 08:10 augmented-signs.zip
-rw-rw-r-- 1 ubuntu ubuntu 303K Sep 27 15:22 Black_New_York_stuy_town_squirrel_amanda_ernlund.jpeg
-rw-rw-r-- 1 ubuntu ubuntu 844K Oct  1 08:04 cat-bonkers.png
-rw-rw-r-- 1 ubuntu ubuntu 140K Sep 27 15:22 cnn-augmentation.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 1.6M Oct  1 08:04 cnn-comparing-all-models.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 484K Oct  1 09:57 cnn-imagenet-retrain.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 158K Oct  1 08:04 cnn-intro.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 1.3M Oct  1 08:04 cnn-prediction.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 158K Oct  1 08:04 cnn-standard-architectures.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 198K Oct  1 08:04 cnn-train-augmented.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 495K Sep 27 15:22 london.jpg
drwxrwxr-x 3 ubuntu ubuntu 4.0K Sep 27 15:25 __MACOSX
-rw-rw-r-- 1 ubuntu ubuntu 127K Sep 27 15:22 Michigan-MSU-raschka.jpg
-rw-rw-r-- 1 ubuntu ubuntu 519K Oct  1 08:04 ml-intro.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 286K Oct  1 08:21 nn-intro.ipynb
-rw-rw-r-- 1 ubuntu ubuntu   63 Sep 27 15:22 README.html
-rw-rw-r-- 1 ubuntu ubuntu   36 Oct  1 08:04 sample_iris.json
drwxrwxr-x 8 ubuntu ubuntu 4.0K Sep 27 15:25 speed-limit-signs
-rw-rw-r-- 1 ubuntu ubuntu 344K Oct  1 08:04 speed-limit-signs.ipynb
-rw-rw-r-- 1 ubuntu ubuntu 1.8M Oct  1 08:09 speed-limit-signs.zip
-rw-rw-r-- 1 ubuntu ubuntu  88M Oct  1 09:56 vgg16-augmented-retrained-fine-tuned.hdf5
-rw-rw-r-- 1 ubuntu ubuntu  59M Oct  1 08:52 vgg16-augmented-retrained.hdf5
-rw-rw-r-- 1 ubuntu ubuntu  88M Oct  1 09:56 vgg16-retrained-fine-tuned.hdf5
-rw-rw-r-- 1 ubuntu ubuntu  59M Oct  1 09:26 vgg16-retrained.hdf5
-rw-rw-r-- 1 ubuntu ubuntu  14K Oct  1 08:11 workshop.ipynb

In [14]:
import os
import skimage.data
import skimage.transform
from keras.utils.np_utils import to_categorical
import numpy as np

def load_data(data_dir, type=".ppm"):
    num_categories = 6

    # Get all subdirectories of data_dir. Each represents a label.
    directories = [d for d in os.listdir(data_dir) 
                   if os.path.isdir(os.path.join(data_dir, d))]
    # Loop through the label directories and collect the data in
    # two lists, labels and images.
    labels = []
    images = []
    for d in directories:
        label_dir = os.path.join(data_dir, d)
        file_names = [os.path.join(label_dir, f) for f in os.listdir(label_dir) if f.endswith(type)]
        # For each label, load it's images and add them to the images list.
        # And add the label number (i.e. directory name) to the labels list.
        for f in file_names:
            images.append(skimage.data.imread(f))
            labels.append(int(d))
    images64 = [skimage.transform.resize(image, INPUT_SHAPE) for image in images]
    y = np.array(labels)
    y = to_categorical(y, num_categories)
    X = np.array(images64)
    return X, y

In [15]:
# Load datasets.
ROOT_PATH = "./"
original_dir = os.path.join(ROOT_PATH, "speed-limit-signs")
original_images, original_labels = load_data(original_dir, type=".ppm")

In [16]:
X, y = original_images, original_labels

Uncomment next three cells if you want to train on augmented image set

Otherwise Overfitting can not be avoided because image set is simply too small


In [17]:
# !curl -O https://raw.githubusercontent.com/DJCordhose/speed-limit-signs/master/data/augmented-signs.zip
# from zipfile import ZipFile
# zip = ZipFile('augmented-signs.zip')
# zip.extractall('.')

In [18]:
data_dir = os.path.join(ROOT_PATH, "augmented-signs")
augmented_images, augmented_labels = load_data(data_dir, type=".png")

In [19]:
# merge both data sets

all_images = np.vstack((X, augmented_images))
all_labels = np.vstack((y, augmented_labels))

# shuffle
# https://stackoverflow.com/a/4602224

p = numpy.random.permutation(len(all_labels))
shuffled_images = all_images[p]
shuffled_labels = all_labels[p]
X, y = shuffled_images, shuffled_labels

Split test and train data 80% to 20%


In [20]:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42, stratify=y)
X_train.shape, y_train.shape


Out[20]:
((3335, 64, 64, 3), (3335, 6))

First Step: Load VGG pretrained on imagenet and remove classifier

Hope: Feature Extraction will also work well for Speed Limit Signs

Imagenet

http://image-net.org/about-stats

High level category # synset (subcategories) Avg # images per synset Total # images
amphibian9459156K
animal38227322799K
appliance51116459K
bird856949812K
covering946819774K
device23856751610K
fabric262690181K
fish566494280K
flower462735339K
food14956701001K
fruit309607188K
fungus303453137K
furniture1871043195K
geological formation151838127K
invertebrate728573417K
mammal1138821934K
musical instrument157891140K
plant1666600999K
reptile268707190K
sport1661207200K
structure1239763946K
tool316551174K
tree993568564K
utensil8691278K
vegetable176764135K
vehicle481778374K
person2035468952K

Might be more suitable for cats and dogs, but is the best we have right now


In [21]:
from keras import applications
# applications.VGG16?
vgg_model = applications.VGG16(include_top=False, weights='imagenet', input_shape=(64, 64, 3))
# vgg_model = applications.VGG16(include_top=False, weights='imagenet', input_shape=(128, 128, 3))
# vgg_model = applications.VGG16(include_top=False, weights='imagenet', input_shape=(256, 256, 3))

All Convolutional Blocks are kept fully trained, we just removed the classifier part


In [22]:
vgg_model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_1 (InputLayer)         (None, 64, 64, 3)         0         
_________________________________________________________________
block1_conv1 (Conv2D)        (None, 64, 64, 64)        1792      
_________________________________________________________________
block1_conv2 (Conv2D)        (None, 64, 64, 64)        36928     
_________________________________________________________________
block1_pool (MaxPooling2D)   (None, 32, 32, 64)        0         
_________________________________________________________________
block2_conv1 (Conv2D)        (None, 32, 32, 128)       73856     
_________________________________________________________________
block2_conv2 (Conv2D)        (None, 32, 32, 128)       147584    
_________________________________________________________________
block2_pool (MaxPooling2D)   (None, 16, 16, 128)       0         
_________________________________________________________________
block3_conv1 (Conv2D)        (None, 16, 16, 256)       295168    
_________________________________________________________________
block3_conv2 (Conv2D)        (None, 16, 16, 256)       590080    
_________________________________________________________________
block3_conv3 (Conv2D)        (None, 16, 16, 256)       590080    
_________________________________________________________________
block3_pool (MaxPooling2D)   (None, 8, 8, 256)         0         
_________________________________________________________________
block4_conv1 (Conv2D)        (None, 8, 8, 512)         1180160   
_________________________________________________________________
block4_conv2 (Conv2D)        (None, 8, 8, 512)         2359808   
_________________________________________________________________
block4_conv3 (Conv2D)        (None, 8, 8, 512)         2359808   
_________________________________________________________________
block4_pool (MaxPooling2D)   (None, 4, 4, 512)         0         
_________________________________________________________________
block5_conv1 (Conv2D)        (None, 4, 4, 512)         2359808   
_________________________________________________________________
block5_conv2 (Conv2D)        (None, 4, 4, 512)         2359808   
_________________________________________________________________
block5_conv3 (Conv2D)        (None, 4, 4, 512)         2359808   
_________________________________________________________________
block5_pool (MaxPooling2D)   (None, 2, 2, 512)         0         
=================================================================
Total params: 14,714,688
Trainable params: 14,714,688
Non-trainable params: 0
_________________________________________________________________

Next step is to push all our signs through the net just once and record the output of bottleneck features

Don't get confused: this is no training, yet, this just is recording the prediction in order not to repeat this expensive step over and over again when we train the classifier later


In [23]:
# will take a while, but not really long depending on size and number of input images

%time bottleneck_features_train = vgg_model.predict(X_train)


CPU times: user 7.03 s, sys: 1.34 s, total: 8.36 s
Wall time: 7.86 s

In [24]:
bottleneck_features_train.shape


Out[24]:
(3335, 2, 2, 512)

What does this mean?

  • 303 predictions for 303 images or 3335 predictions for 3335 images when using augmented data set
  • 512 bottleneck feature per prediction
  • each bottleneck feature has a size of 2x2, just a blob more or less
  • bottleneck feature has larger size when we increase size of input images (might be a good idea)
    • 4x4 when using 128x128 as input
    • 8x8 when using 256x256 as input

In [25]:
first_bottleneck_feature = bottleneck_features_train[0,:,:, 0]

In [26]:
first_bottleneck_feature


Out[26]:
array([[ 0.        ,  0.        ],
       [ 1.00562787,  0.80484837]], dtype=float32)

Now we create a new classifier and train it with this output and the labels from ground truth

Classifier is copied from our first VGG style network


In [27]:
input_shape = bottleneck_features_train.shape[1:]

In [28]:
from keras.models import Model
from keras.layers import Dense, Dropout, Flatten, Input

# try and vary between .4 and .75
drop_out = 0.50

inputs = Input(shape=input_shape)

x = Flatten()(inputs)

# this is an additional dropout to compensate for the missing one after bottleneck features
x = Dropout(drop_out)(x)

x = Dense(256, activation='relu')(x)
x = Dropout(drop_out)(x)

# softmax activation, 6 categories
predictions = Dense(6, activation='softmax')(x)

In [29]:
classifier_model = Model(input=inputs, output=predictions)
classifier_model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_2 (InputLayer)         (None, 2, 2, 512)         0         
_________________________________________________________________
flatten_1 (Flatten)          (None, 2048)              0         
_________________________________________________________________
dropout_1 (Dropout)          (None, 2048)              0         
_________________________________________________________________
dense_1 (Dense)              (None, 256)               524544    
_________________________________________________________________
dropout_2 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 6)                 1542      
=================================================================
Total params: 526,086
Trainable params: 526,086
Non-trainable params: 0
_________________________________________________________________

In [30]:
classifier_model.compile(optimizer='adam',
              loss='categorical_crossentropy',
              metrics=['accuracy'])

In [31]:
!rm -rf tf_log
# https://keras.io/callbacks/#tensorboard
tb_callback = keras.callbacks.TensorBoard(log_dir='./tf_log')
# To start tensorboard
# tensorboard --logdir=/mnt/c/Users/olive/Development/ml/tf_log
# open http://localhost:6006

This is a very simple architecture and should train pretty fast

  • it overfits by quite a bit

In [32]:
%time history = classifier_model.fit(bottleneck_features_train, y_train, epochs=500, batch_size=BATCH_SIZE, validation_split=0.2, callbacks=[tb_callback])
# more epochs might be needed for original data
# %time history = classifier_model.fit(bottleneck_features_train, y_train, epochs=2000, batch_size=BATCH_SIZE, validation_split=0.2, callbacks=[tb_callback])


Train on 2668 samples, validate on 667 samples
Epoch 1/500
2668/2668 [==============================] - 0s - loss: 1.9313 - acc: 0.2631 - val_loss: 1.5613 - val_acc: 0.3808
Epoch 2/500
2668/2668 [==============================] - 0s - loss: 1.5772 - acc: 0.3388 - val_loss: 1.4332 - val_acc: 0.4438
Epoch 3/500
2668/2668 [==============================] - 0s - loss: 1.4668 - acc: 0.3842 - val_loss: 1.2979 - val_acc: 0.5367
Epoch 4/500
2668/2668 [==============================] - 0s - loss: 1.3743 - acc: 0.4460 - val_loss: 1.2098 - val_acc: 0.5412
Epoch 5/500
2668/2668 [==============================] - 0s - loss: 1.3106 - acc: 0.4588 - val_loss: 1.1373 - val_acc: 0.6012
Epoch 6/500
2668/2668 [==============================] - 0s - loss: 1.2661 - acc: 0.4925 - val_loss: 1.0884 - val_acc: 0.6087
Epoch 7/500
2668/2668 [==============================] - 0s - loss: 1.2029 - acc: 0.5097 - val_loss: 1.0473 - val_acc: 0.6192
Epoch 8/500
2668/2668 [==============================] - 0s - loss: 1.1595 - acc: 0.5277 - val_loss: 0.9929 - val_acc: 0.6432
Epoch 9/500
2668/2668 [==============================] - 0s - loss: 1.1214 - acc: 0.5588 - val_loss: 0.9836 - val_acc: 0.6342
Epoch 10/500
2668/2668 [==============================] - 0s - loss: 1.0987 - acc: 0.5667 - val_loss: 0.9488 - val_acc: 0.6522
Epoch 11/500
2668/2668 [==============================] - 0s - loss: 1.0737 - acc: 0.5772 - val_loss: 0.9073 - val_acc: 0.6837
Epoch 12/500
2668/2668 [==============================] - 0s - loss: 1.0563 - acc: 0.5753 - val_loss: 0.9015 - val_acc: 0.6747
Epoch 13/500
2668/2668 [==============================] - 0s - loss: 1.0037 - acc: 0.5933 - val_loss: 0.8568 - val_acc: 0.7091
Epoch 14/500
2668/2668 [==============================] - 0s - loss: 0.9666 - acc: 0.6147 - val_loss: 0.8590 - val_acc: 0.6897
Epoch 15/500
2668/2668 [==============================] - 0s - loss: 0.9726 - acc: 0.6173 - val_loss: 0.8313 - val_acc: 0.7166
Epoch 16/500
2668/2668 [==============================] - 0s - loss: 0.9065 - acc: 0.6451 - val_loss: 0.8082 - val_acc: 0.6852
Epoch 17/500
2668/2668 [==============================] - 0s - loss: 0.9164 - acc: 0.6421 - val_loss: 0.8006 - val_acc: 0.7166
Epoch 18/500
2668/2668 [==============================] - 0s - loss: 0.9330 - acc: 0.6364 - val_loss: 0.7842 - val_acc: 0.7151
Epoch 19/500
2668/2668 [==============================] - 0s - loss: 0.8751 - acc: 0.6615 - val_loss: 0.7639 - val_acc: 0.7151
Epoch 20/500
2668/2668 [==============================] - 0s - loss: 0.8550 - acc: 0.6623 - val_loss: 0.7529 - val_acc: 0.7316
Epoch 21/500
2668/2668 [==============================] - 0s - loss: 0.8536 - acc: 0.6642 - val_loss: 0.7481 - val_acc: 0.7211
Epoch 22/500
2668/2668 [==============================] - 0s - loss: 0.8495 - acc: 0.6758 - val_loss: 0.7315 - val_acc: 0.7466
Epoch 23/500
2668/2668 [==============================] - 0s - loss: 0.8167 - acc: 0.6769 - val_loss: 0.7213 - val_acc: 0.7511
Epoch 24/500
2668/2668 [==============================] - 0s - loss: 0.8052 - acc: 0.6949 - val_loss: 0.7193 - val_acc: 0.7466
Epoch 25/500
2668/2668 [==============================] - 0s - loss: 0.7895 - acc: 0.6979 - val_loss: 0.7020 - val_acc: 0.7436
Epoch 26/500
2668/2668 [==============================] - 0s - loss: 0.7852 - acc: 0.6934 - val_loss: 0.6860 - val_acc: 0.7451
Epoch 27/500
2668/2668 [==============================] - 0s - loss: 0.7583 - acc: 0.7080 - val_loss: 0.6751 - val_acc: 0.7691
Epoch 28/500
2668/2668 [==============================] - 0s - loss: 0.7584 - acc: 0.7009 - val_loss: 0.6640 - val_acc: 0.7616
Epoch 29/500
2668/2668 [==============================] - 0s - loss: 0.7340 - acc: 0.7155 - val_loss: 0.6643 - val_acc: 0.7646
Epoch 30/500
2668/2668 [==============================] - 0s - loss: 0.7375 - acc: 0.7144 - val_loss: 0.6574 - val_acc: 0.7736
Epoch 31/500
2668/2668 [==============================] - 0s - loss: 0.7439 - acc: 0.7050 - val_loss: 0.6569 - val_acc: 0.7676
Epoch 32/500
2668/2668 [==============================] - 0s - loss: 0.6879 - acc: 0.7354 - val_loss: 0.6299 - val_acc: 0.7796
Epoch 33/500
2668/2668 [==============================] - 0s - loss: 0.7195 - acc: 0.7268 - val_loss: 0.6813 - val_acc: 0.7481
Epoch 34/500
2668/2668 [==============================] - 0s - loss: 0.6878 - acc: 0.7354 - val_loss: 0.6233 - val_acc: 0.7766
Epoch 35/500
2668/2668 [==============================] - 0s - loss: 0.6733 - acc: 0.7496 - val_loss: 0.6158 - val_acc: 0.77360.74
Epoch 36/500
2668/2668 [==============================] - 0s - loss: 0.6905 - acc: 0.7223 - val_loss: 0.6416 - val_acc: 0.7841
Epoch 37/500
2668/2668 [==============================] - 0s - loss: 0.6609 - acc: 0.7504 - val_loss: 0.6151 - val_acc: 0.77510.74
Epoch 38/500
2668/2668 [==============================] - 0s - loss: 0.6599 - acc: 0.7455 - val_loss: 0.6182 - val_acc: 0.7871
Epoch 39/500
2668/2668 [==============================] - 0s - loss: 0.6517 - acc: 0.7545 - val_loss: 0.6096 - val_acc: 0.8066
Epoch 40/500
2668/2668 [==============================] - 0s - loss: 0.6665 - acc: 0.7455 - val_loss: 0.6061 - val_acc: 0.7916
Epoch 41/500
2668/2668 [==============================] - 0s - loss: 0.6447 - acc: 0.7579 - val_loss: 0.5925 - val_acc: 0.7946
Epoch 42/500
2668/2668 [==============================] - 0s - loss: 0.6477 - acc: 0.7541 - val_loss: 0.6073 - val_acc: 0.7871
Epoch 43/500
2668/2668 [==============================] - 0s - loss: 0.6454 - acc: 0.7459 - val_loss: 0.5997 - val_acc: 0.7886
Epoch 44/500
2668/2668 [==============================] - 0s - loss: 0.6139 - acc: 0.7631 - val_loss: 0.5697 - val_acc: 0.7931
Epoch 45/500
2668/2668 [==============================] - 0s - loss: 0.5915 - acc: 0.7781 - val_loss: 0.5601 - val_acc: 0.8006
Epoch 46/500
2668/2668 [==============================] - 0s - loss: 0.6074 - acc: 0.7762 - val_loss: 0.5760 - val_acc: 0.8066
Epoch 47/500
2668/2668 [==============================] - 0s - loss: 0.5941 - acc: 0.7717 - val_loss: 0.5672 - val_acc: 0.8051
Epoch 48/500
2668/2668 [==============================] - 0s - loss: 0.5830 - acc: 0.7830 - val_loss: 0.5488 - val_acc: 0.8096
Epoch 49/500
2668/2668 [==============================] - 0s - loss: 0.5888 - acc: 0.7725 - val_loss: 0.5589 - val_acc: 0.8006
Epoch 50/500
2668/2668 [==============================] - 0s - loss: 0.5962 - acc: 0.7747 - val_loss: 0.5640 - val_acc: 0.8066
Epoch 51/500
2668/2668 [==============================] - 0s - loss: 0.5572 - acc: 0.7886 - val_loss: 0.5446 - val_acc: 0.8261
Epoch 52/500
2668/2668 [==============================] - 0s - loss: 0.5707 - acc: 0.7740 - val_loss: 0.5570 - val_acc: 0.8156
Epoch 53/500
2668/2668 [==============================] - 0s - loss: 0.5722 - acc: 0.7631 - val_loss: 0.5637 - val_acc: 0.8021
Epoch 54/500
2668/2668 [==============================] - 0s - loss: 0.5496 - acc: 0.7950 - val_loss: 0.5424 - val_acc: 0.7931
Epoch 55/500
2668/2668 [==============================] - 0s - loss: 0.5640 - acc: 0.7894 - val_loss: 0.5475 - val_acc: 0.8081
Epoch 56/500
2668/2668 [==============================] - 0s - loss: 0.5547 - acc: 0.7976 - val_loss: 0.5548 - val_acc: 0.7856
Epoch 57/500
2668/2668 [==============================] - 0s - loss: 0.5329 - acc: 0.7987 - val_loss: 0.5383 - val_acc: 0.8066
Epoch 58/500
2668/2668 [==============================] - 0s - loss: 0.5366 - acc: 0.7954 - val_loss: 0.5436 - val_acc: 0.8141
Epoch 59/500
2668/2668 [==============================] - 0s - loss: 0.5059 - acc: 0.8055 - val_loss: 0.5276 - val_acc: 0.8291
Epoch 60/500
2668/2668 [==============================] - 0s - loss: 0.5041 - acc: 0.8186 - val_loss: 0.5294 - val_acc: 0.8201
Epoch 61/500
2668/2668 [==============================] - 0s - loss: 0.5084 - acc: 0.8058 - val_loss: 0.5104 - val_acc: 0.8261
Epoch 62/500
2668/2668 [==============================] - 0s - loss: 0.5176 - acc: 0.8043 - val_loss: 0.5101 - val_acc: 0.8291
Epoch 63/500
2668/2668 [==============================] - 0s - loss: 0.5012 - acc: 0.8208 - val_loss: 0.5073 - val_acc: 0.8396
Epoch 64/500
2668/2668 [==============================] - 0s - loss: 0.5150 - acc: 0.8058 - val_loss: 0.5018 - val_acc: 0.8156
Epoch 65/500
2668/2668 [==============================] - 0s - loss: 0.4987 - acc: 0.8130 - val_loss: 0.5124 - val_acc: 0.8351
Epoch 66/500
2668/2668 [==============================] - 0s - loss: 0.5013 - acc: 0.8088 - val_loss: 0.5054 - val_acc: 0.8321
Epoch 67/500
2668/2668 [==============================] - 0s - loss: 0.4937 - acc: 0.8126 - val_loss: 0.5063 - val_acc: 0.8306
Epoch 68/500
2668/2668 [==============================] - 0s - loss: 0.4558 - acc: 0.8310 - val_loss: 0.4895 - val_acc: 0.8231
Epoch 69/500
2668/2668 [==============================] - 0s - loss: 0.4691 - acc: 0.8283 - val_loss: 0.5029 - val_acc: 0.8156
Epoch 70/500
2668/2668 [==============================] - 0s - loss: 0.4634 - acc: 0.8268 - val_loss: 0.5117 - val_acc: 0.8201
Epoch 71/500
2668/2668 [==============================] - 0s - loss: 0.4389 - acc: 0.8392 - val_loss: 0.4875 - val_acc: 0.8366
Epoch 72/500
2668/2668 [==============================] - 0s - loss: 0.4729 - acc: 0.8201 - val_loss: 0.4909 - val_acc: 0.8291
Epoch 73/500
2668/2668 [==============================] - 0s - loss: 0.4605 - acc: 0.8238 - val_loss: 0.4796 - val_acc: 0.8441
Epoch 74/500
2668/2668 [==============================] - 0s - loss: 0.4427 - acc: 0.8336 - val_loss: 0.4853 - val_acc: 0.8336
Epoch 75/500
2668/2668 [==============================] - 0s - loss: 0.4400 - acc: 0.8396 - val_loss: 0.4937 - val_acc: 0.8261
Epoch 76/500
2668/2668 [==============================] - 0s - loss: 0.4518 - acc: 0.8332 - val_loss: 0.4833 - val_acc: 0.8351
Epoch 77/500
2668/2668 [==============================] - 0s - loss: 0.4405 - acc: 0.8411 - val_loss: 0.4807 - val_acc: 0.8366
Epoch 78/500
2668/2668 [==============================] - 0s - loss: 0.4442 - acc: 0.8328 - val_loss: 0.4898 - val_acc: 0.8066
Epoch 79/500
2668/2668 [==============================] - 0s - loss: 0.4227 - acc: 0.8411 - val_loss: 0.4726 - val_acc: 0.8411
Epoch 80/500
2668/2668 [==============================] - 0s - loss: 0.4207 - acc: 0.8396 - val_loss: 0.5007 - val_acc: 0.8291
Epoch 81/500
2668/2668 [==============================] - 0s - loss: 0.4313 - acc: 0.8385 - val_loss: 0.4958 - val_acc: 0.8396
Epoch 82/500
2668/2668 [==============================] - 0s - loss: 0.4470 - acc: 0.8336 - val_loss: 0.4608 - val_acc: 0.8591
Epoch 83/500
2668/2668 [==============================] - 0s - loss: 0.4272 - acc: 0.8403 - val_loss: 0.4699 - val_acc: 0.8396
Epoch 84/500
2668/2668 [==============================] - 0s - loss: 0.4483 - acc: 0.8321 - val_loss: 0.4527 - val_acc: 0.8411
Epoch 85/500
2668/2668 [==============================] - 0s - loss: 0.4146 - acc: 0.8478 - val_loss: 0.4711 - val_acc: 0.8561
Epoch 86/500
2668/2668 [==============================] - 0s - loss: 0.4132 - acc: 0.8486 - val_loss: 0.4737 - val_acc: 0.8411
Epoch 87/500
2668/2668 [==============================] - 0s - loss: 0.4181 - acc: 0.8504 - val_loss: 0.4764 - val_acc: 0.8261
Epoch 88/500
2668/2668 [==============================] - 0s - loss: 0.4014 - acc: 0.8534 - val_loss: 0.4710 - val_acc: 0.8426
Epoch 89/500
2668/2668 [==============================] - 0s - loss: 0.3951 - acc: 0.8553 - val_loss: 0.4632 - val_acc: 0.8231
Epoch 90/500
2668/2668 [==============================] - 0s - loss: 0.4056 - acc: 0.8534 - val_loss: 0.4615 - val_acc: 0.8306
Epoch 91/500
2668/2668 [==============================] - 0s - loss: 0.3894 - acc: 0.8594 - val_loss: 0.4454 - val_acc: 0.8486
Epoch 92/500
2668/2668 [==============================] - 0s - loss: 0.4104 - acc: 0.8493 - val_loss: 0.4572 - val_acc: 0.8396
Epoch 93/500
2668/2668 [==============================] - 0s - loss: 0.3762 - acc: 0.8591 - val_loss: 0.4711 - val_acc: 0.8396
Epoch 94/500
2668/2668 [==============================] - 0s - loss: 0.3900 - acc: 0.8651 - val_loss: 0.4596 - val_acc: 0.8531
Epoch 95/500
2668/2668 [==============================] - 0s - loss: 0.3715 - acc: 0.8583 - val_loss: 0.4593 - val_acc: 0.8441
Epoch 96/500
2668/2668 [==============================] - 0s - loss: 0.3784 - acc: 0.8591 - val_loss: 0.4763 - val_acc: 0.8351
Epoch 97/500
2668/2668 [==============================] - 0s - loss: 0.3915 - acc: 0.8557 - val_loss: 0.4621 - val_acc: 0.8456
Epoch 98/500
2668/2668 [==============================] - 0s - loss: 0.3892 - acc: 0.8538 - val_loss: 0.4654 - val_acc: 0.8336
Epoch 99/500
2668/2668 [==============================] - 0s - loss: 0.3902 - acc: 0.8534 - val_loss: 0.4590 - val_acc: 0.8336
Epoch 100/500
2668/2668 [==============================] - 0s - loss: 0.3732 - acc: 0.8602 - val_loss: 0.4571 - val_acc: 0.8591
Epoch 101/500
2668/2668 [==============================] - 0s - loss: 0.4147 - acc: 0.8403 - val_loss: 0.4680 - val_acc: 0.8471
Epoch 102/500
2668/2668 [==============================] - 0s - loss: 0.3664 - acc: 0.8729 - val_loss: 0.4693 - val_acc: 0.8456
Epoch 103/500
2668/2668 [==============================] - 0s - loss: 0.3672 - acc: 0.8606 - val_loss: 0.4464 - val_acc: 0.8546
Epoch 104/500
2668/2668 [==============================] - 0s - loss: 0.3441 - acc: 0.8726 - val_loss: 0.4656 - val_acc: 0.8516
Epoch 105/500
2668/2668 [==============================] - 0s - loss: 0.3490 - acc: 0.8714 - val_loss: 0.4662 - val_acc: 0.8381
Epoch 106/500
2668/2668 [==============================] - 0s - loss: 0.3921 - acc: 0.8572 - val_loss: 0.4589 - val_acc: 0.8396
Epoch 107/500
2668/2668 [==============================] - 0s - loss: 0.3628 - acc: 0.8729 - val_loss: 0.4405 - val_acc: 0.8666
Epoch 108/500
2668/2668 [==============================] - 0s - loss: 0.3545 - acc: 0.8729 - val_loss: 0.4538 - val_acc: 0.8396
Epoch 109/500
2668/2668 [==============================] - 0s - loss: 0.3383 - acc: 0.8759 - val_loss: 0.4775 - val_acc: 0.8381
Epoch 110/500
2668/2668 [==============================] - 0s - loss: 0.3675 - acc: 0.8591 - val_loss: 0.4671 - val_acc: 0.8336
Epoch 111/500
2668/2668 [==============================] - 0s - loss: 0.3423 - acc: 0.8797 - val_loss: 0.4582 - val_acc: 0.8396
Epoch 112/500
2668/2668 [==============================] - 0s - loss: 0.3298 - acc: 0.8808 - val_loss: 0.4703 - val_acc: 0.8351
Epoch 113/500
2668/2668 [==============================] - 0s - loss: 0.3793 - acc: 0.8643 - val_loss: 0.4592 - val_acc: 0.8486
Epoch 114/500
2668/2668 [==============================] - 0s - loss: 0.3319 - acc: 0.8819 - val_loss: 0.4537 - val_acc: 0.8441
Epoch 115/500
2668/2668 [==============================] - 0s - loss: 0.3291 - acc: 0.8782 - val_loss: 0.4589 - val_acc: 0.8426
Epoch 116/500
2668/2668 [==============================] - 0s - loss: 0.3545 - acc: 0.8744 - val_loss: 0.4630 - val_acc: 0.8411
Epoch 117/500
2668/2668 [==============================] - 0s - loss: 0.3323 - acc: 0.8741 - val_loss: 0.4496 - val_acc: 0.8606
Epoch 118/500
2668/2668 [==============================] - 0s - loss: 0.3498 - acc: 0.8729 - val_loss: 0.4611 - val_acc: 0.8501
Epoch 119/500
2668/2668 [==============================] - 0s - loss: 0.3491 - acc: 0.8662 - val_loss: 0.4557 - val_acc: 0.8456
Epoch 120/500
2668/2668 [==============================] - 0s - loss: 0.3566 - acc: 0.8696 - val_loss: 0.4530 - val_acc: 0.8381
Epoch 121/500
2668/2668 [==============================] - 0s - loss: 0.3331 - acc: 0.8793 - val_loss: 0.4549 - val_acc: 0.8441
Epoch 122/500
2668/2668 [==============================] - 0s - loss: 0.3617 - acc: 0.8624 - val_loss: 0.4668 - val_acc: 0.8591
Epoch 123/500
2668/2668 [==============================] - 0s - loss: 0.3297 - acc: 0.8789 - val_loss: 0.4799 - val_acc: 0.8486
Epoch 124/500
2668/2668 [==============================] - 0s - loss: 0.3434 - acc: 0.8722 - val_loss: 0.4656 - val_acc: 0.8396
Epoch 125/500
2668/2668 [==============================] - 0s - loss: 0.3217 - acc: 0.8842 - val_loss: 0.4568 - val_acc: 0.8591
Epoch 126/500
2668/2668 [==============================] - 0s - loss: 0.3320 - acc: 0.8744 - val_loss: 0.4713 - val_acc: 0.8561
Epoch 127/500
2668/2668 [==============================] - 0s - loss: 0.3189 - acc: 0.8771 - val_loss: 0.4580 - val_acc: 0.8561
Epoch 128/500
2668/2668 [==============================] - 0s - loss: 0.3003 - acc: 0.8864 - val_loss: 0.4511 - val_acc: 0.8441
Epoch 129/500
2668/2668 [==============================] - 0s - loss: 0.3178 - acc: 0.8793 - val_loss: 0.4603 - val_acc: 0.8336
Epoch 130/500
2668/2668 [==============================] - 0s - loss: 0.3056 - acc: 0.8894 - val_loss: 0.4625 - val_acc: 0.8426
Epoch 131/500
2668/2668 [==============================] - 0s - loss: 0.3118 - acc: 0.8774 - val_loss: 0.4730 - val_acc: 0.8366
Epoch 132/500
2668/2668 [==============================] - 0s - loss: 0.3319 - acc: 0.8759 - val_loss: 0.4636 - val_acc: 0.8561
Epoch 133/500
2668/2668 [==============================] - 0s - loss: 0.3333 - acc: 0.8722 - val_loss: 0.4463 - val_acc: 0.8561
Epoch 134/500
2668/2668 [==============================] - 0s - loss: 0.2976 - acc: 0.8913 - val_loss: 0.4490 - val_acc: 0.8396
Epoch 135/500
2668/2668 [==============================] - 0s - loss: 0.3217 - acc: 0.8864 - val_loss: 0.4655 - val_acc: 0.8381
Epoch 136/500
2668/2668 [==============================] - 0s - loss: 0.3215 - acc: 0.8812 - val_loss: 0.4544 - val_acc: 0.8366
Epoch 137/500
2668/2668 [==============================] - 0s - loss: 0.2873 - acc: 0.8943 - val_loss: 0.4787 - val_acc: 0.8321
Epoch 138/500
2668/2668 [==============================] - 0s - loss: 0.3102 - acc: 0.8823 - val_loss: 0.4602 - val_acc: 0.8396
Epoch 139/500
2668/2668 [==============================] - 0s - loss: 0.3074 - acc: 0.8842 - val_loss: 0.4523 - val_acc: 0.8426
Epoch 140/500
2668/2668 [==============================] - 0s - loss: 0.3086 - acc: 0.8827 - val_loss: 0.4596 - val_acc: 0.8426
Epoch 141/500
2668/2668 [==============================] - 0s - loss: 0.3094 - acc: 0.8808 - val_loss: 0.4439 - val_acc: 0.8531
Epoch 142/500
2668/2668 [==============================] - 0s - loss: 0.3034 - acc: 0.8857 - val_loss: 0.4396 - val_acc: 0.8486
Epoch 143/500
2668/2668 [==============================] - 0s - loss: 0.2941 - acc: 0.8951 - val_loss: 0.4701 - val_acc: 0.8411
Epoch 144/500
2668/2668 [==============================] - 0s - loss: 0.3054 - acc: 0.8868 - val_loss: 0.4696 - val_acc: 0.8381
Epoch 145/500
2668/2668 [==============================] - 0s - loss: 0.2765 - acc: 0.8962 - val_loss: 0.4540 - val_acc: 0.8471
Epoch 146/500
2668/2668 [==============================] - 0s - loss: 0.3036 - acc: 0.8868 - val_loss: 0.4789 - val_acc: 0.8426
Epoch 147/500
2668/2668 [==============================] - 0s - loss: 0.2886 - acc: 0.8992 - val_loss: 0.4646 - val_acc: 0.8441
Epoch 148/500
2668/2668 [==============================] - 0s - loss: 0.2986 - acc: 0.8846 - val_loss: 0.4476 - val_acc: 0.8531
Epoch 149/500
2668/2668 [==============================] - 0s - loss: 0.2998 - acc: 0.8864 - val_loss: 0.4564 - val_acc: 0.8336
Epoch 150/500
2668/2668 [==============================] - 0s - loss: 0.2869 - acc: 0.8984 - val_loss: 0.4543 - val_acc: 0.8321
Epoch 151/500
2668/2668 [==============================] - 0s - loss: 0.2894 - acc: 0.8898 - val_loss: 0.4625 - val_acc: 0.8441
Epoch 152/500
2668/2668 [==============================] - 0s - loss: 0.2834 - acc: 0.8954 - val_loss: 0.4413 - val_acc: 0.8531
Epoch 153/500
2668/2668 [==============================] - 0s - loss: 0.2953 - acc: 0.8894 - val_loss: 0.4453 - val_acc: 0.8471
Epoch 154/500
2668/2668 [==============================] - 0s - loss: 0.2911 - acc: 0.8913 - val_loss: 0.4530 - val_acc: 0.8456
Epoch 155/500
2668/2668 [==============================] - 0s - loss: 0.2968 - acc: 0.8996 - val_loss: 0.4351 - val_acc: 0.8606
Epoch 156/500
2668/2668 [==============================] - 0s - loss: 0.2995 - acc: 0.8932 - val_loss: 0.4990 - val_acc: 0.8291
Epoch 157/500
2668/2668 [==============================] - 0s - loss: 0.2820 - acc: 0.8969 - val_loss: 0.4321 - val_acc: 0.8516
Epoch 158/500
2668/2668 [==============================] - 0s - loss: 0.2905 - acc: 0.8883 - val_loss: 0.4520 - val_acc: 0.8396
Epoch 159/500
2668/2668 [==============================] - 0s - loss: 0.2591 - acc: 0.9097 - val_loss: 0.4597 - val_acc: 0.8486
Epoch 160/500
2668/2668 [==============================] - 0s - loss: 0.2837 - acc: 0.8894 - val_loss: 0.4555 - val_acc: 0.8561
Epoch 161/500
2668/2668 [==============================] - 0s - loss: 0.2804 - acc: 0.8928 - val_loss: 0.4711 - val_acc: 0.8456
Epoch 162/500
2668/2668 [==============================] - 0s - loss: 0.2886 - acc: 0.8846 - val_loss: 0.4681 - val_acc: 0.8471
Epoch 163/500
2668/2668 [==============================] - 0s - loss: 0.2747 - acc: 0.8981 - val_loss: 0.4490 - val_acc: 0.8381
Epoch 164/500
2668/2668 [==============================] - 0s - loss: 0.2684 - acc: 0.8973 - val_loss: 0.4547 - val_acc: 0.8606
Epoch 165/500
2668/2668 [==============================] - 0s - loss: 0.2797 - acc: 0.8984 - val_loss: 0.4516 - val_acc: 0.8516
Epoch 166/500
2668/2668 [==============================] - 0s - loss: 0.2791 - acc: 0.8966 - val_loss: 0.4606 - val_acc: 0.8411
Epoch 167/500
2668/2668 [==============================] - 0s - loss: 0.3036 - acc: 0.8936 - val_loss: 0.4628 - val_acc: 0.8516
Epoch 168/500
2668/2668 [==============================] - 0s - loss: 0.2660 - acc: 0.9040 - val_loss: 0.4735 - val_acc: 0.8471
Epoch 169/500
2668/2668 [==============================] - 0s - loss: 0.2669 - acc: 0.8951 - val_loss: 0.4515 - val_acc: 0.8591
Epoch 170/500
2668/2668 [==============================] - 0s - loss: 0.2647 - acc: 0.9029 - val_loss: 0.4556 - val_acc: 0.8456
Epoch 171/500
2668/2668 [==============================] - 0s - loss: 0.2645 - acc: 0.9078 - val_loss: 0.4668 - val_acc: 0.8426
Epoch 172/500
2668/2668 [==============================] - 0s - loss: 0.2628 - acc: 0.9063 - val_loss: 0.4582 - val_acc: 0.8501
Epoch 173/500
2668/2668 [==============================] - 0s - loss: 0.2731 - acc: 0.8999 - val_loss: 0.4559 - val_acc: 0.8471
Epoch 174/500
2668/2668 [==============================] - 0s - loss: 0.2907 - acc: 0.8954 - val_loss: 0.4475 - val_acc: 0.8456
Epoch 175/500
2668/2668 [==============================] - 0s - loss: 0.2447 - acc: 0.9063 - val_loss: 0.4594 - val_acc: 0.8501
Epoch 176/500
2668/2668 [==============================] - 0s - loss: 0.2743 - acc: 0.8981 - val_loss: 0.4739 - val_acc: 0.8381
Epoch 177/500
2668/2668 [==============================] - 0s - loss: 0.2688 - acc: 0.9055 - val_loss: 0.4571 - val_acc: 0.8501
Epoch 178/500
2668/2668 [==============================] - 0s - loss: 0.2442 - acc: 0.9130 - val_loss: 0.4632 - val_acc: 0.8561
Epoch 179/500
2668/2668 [==============================] - 0s - loss: 0.2640 - acc: 0.9085 - val_loss: 0.4453 - val_acc: 0.8576
Epoch 180/500
2668/2668 [==============================] - 0s - loss: 0.2727 - acc: 0.8999 - val_loss: 0.4728 - val_acc: 0.8546
Epoch 181/500
2668/2668 [==============================] - 0s - loss: 0.2456 - acc: 0.9037 - val_loss: 0.4574 - val_acc: 0.8501
Epoch 182/500
2668/2668 [==============================] - 0s - loss: 0.2768 - acc: 0.9003 - val_loss: 0.4542 - val_acc: 0.8591
Epoch 183/500
2668/2668 [==============================] - 0s - loss: 0.2656 - acc: 0.9063 - val_loss: 0.4728 - val_acc: 0.8351
Epoch 184/500
2668/2668 [==============================] - 0s - loss: 0.2365 - acc: 0.9097 - val_loss: 0.4656 - val_acc: 0.8471
Epoch 185/500
2668/2668 [==============================] - 0s - loss: 0.2770 - acc: 0.8932 - val_loss: 0.4551 - val_acc: 0.8561
Epoch 186/500
2668/2668 [==============================] - 0s - loss: 0.2484 - acc: 0.9074 - val_loss: 0.4428 - val_acc: 0.8516
Epoch 187/500
2668/2668 [==============================] - 0s - loss: 0.2472 - acc: 0.9108 - val_loss: 0.4555 - val_acc: 0.8486
Epoch 188/500
2668/2668 [==============================] - 0s - loss: 0.2666 - acc: 0.9003 - val_loss: 0.4676 - val_acc: 0.8486
Epoch 189/500
2668/2668 [==============================] - 0s - loss: 0.2529 - acc: 0.9108 - val_loss: 0.4437 - val_acc: 0.8576
Epoch 190/500
2668/2668 [==============================] - 0s - loss: 0.2561 - acc: 0.9067 - val_loss: 0.4771 - val_acc: 0.8651
Epoch 191/500
2668/2668 [==============================] - 0s - loss: 0.2484 - acc: 0.9055 - val_loss: 0.4836 - val_acc: 0.8456
Epoch 192/500
2668/2668 [==============================] - 0s - loss: 0.2591 - acc: 0.9082 - val_loss: 0.4728 - val_acc: 0.8441
Epoch 193/500
2668/2668 [==============================] - 0s - loss: 0.2755 - acc: 0.9022 - val_loss: 0.4722 - val_acc: 0.8426
Epoch 194/500
2668/2668 [==============================] - 0s - loss: 0.2361 - acc: 0.9149 - val_loss: 0.4731 - val_acc: 0.8441
Epoch 195/500
2668/2668 [==============================] - 0s - loss: 0.2538 - acc: 0.9078 - val_loss: 0.4631 - val_acc: 0.8486
Epoch 196/500
2668/2668 [==============================] - 0s - loss: 0.2552 - acc: 0.9089 - val_loss: 0.4698 - val_acc: 0.8426
Epoch 197/500
2668/2668 [==============================] - 0s - loss: 0.2459 - acc: 0.9070 - val_loss: 0.4549 - val_acc: 0.8576
Epoch 198/500
2668/2668 [==============================] - 0s - loss: 0.2413 - acc: 0.9112 - val_loss: 0.4619 - val_acc: 0.8651
Epoch 199/500
2668/2668 [==============================] - 0s - loss: 0.2634 - acc: 0.9018 - val_loss: 0.4414 - val_acc: 0.8546
Epoch 200/500
2668/2668 [==============================] - 0s - loss: 0.2436 - acc: 0.9168 - val_loss: 0.4530 - val_acc: 0.8561
Epoch 201/500
2668/2668 [==============================] - 0s - loss: 0.2402 - acc: 0.9123 - val_loss: 0.4429 - val_acc: 0.8531
Epoch 202/500
2668/2668 [==============================] - 0s - loss: 0.2342 - acc: 0.9157 - val_loss: 0.4724 - val_acc: 0.8336
Epoch 203/500
2668/2668 [==============================] - 0s - loss: 0.2473 - acc: 0.9018 - val_loss: 0.4668 - val_acc: 0.8456
Epoch 204/500
2668/2668 [==============================] - 0s - loss: 0.2336 - acc: 0.9168 - val_loss: 0.4594 - val_acc: 0.8606
Epoch 205/500
2668/2668 [==============================] - 0s - loss: 0.2460 - acc: 0.9063 - val_loss: 0.4703 - val_acc: 0.8426
Epoch 206/500
2668/2668 [==============================] - 0s - loss: 0.2490 - acc: 0.9115 - val_loss: 0.4625 - val_acc: 0.8576
Epoch 207/500
2668/2668 [==============================] - 0s - loss: 0.2442 - acc: 0.9070 - val_loss: 0.4741 - val_acc: 0.8501
Epoch 208/500
2668/2668 [==============================] - 0s - loss: 0.2419 - acc: 0.9074 - val_loss: 0.4482 - val_acc: 0.8561
Epoch 209/500
2668/2668 [==============================] - 0s - loss: 0.2509 - acc: 0.9029 - val_loss: 0.4728 - val_acc: 0.8501
Epoch 210/500
2668/2668 [==============================] - 0s - loss: 0.2567 - acc: 0.9037 - val_loss: 0.4823 - val_acc: 0.8471
Epoch 211/500
2668/2668 [==============================] - 0s - loss: 0.2403 - acc: 0.9112 - val_loss: 0.4599 - val_acc: 0.8606
Epoch 212/500
2668/2668 [==============================] - 0s - loss: 0.2585 - acc: 0.9037 - val_loss: 0.4645 - val_acc: 0.8396
Epoch 213/500
2668/2668 [==============================] - 0s - loss: 0.2488 - acc: 0.9130 - val_loss: 0.4350 - val_acc: 0.8531
Epoch 214/500
2668/2668 [==============================] - 0s - loss: 0.2336 - acc: 0.9172 - val_loss: 0.4467 - val_acc: 0.8576
Epoch 215/500
2668/2668 [==============================] - 0s - loss: 0.2375 - acc: 0.9100 - val_loss: 0.4456 - val_acc: 0.8516
Epoch 216/500
2668/2668 [==============================] - 0s - loss: 0.2271 - acc: 0.9183 - val_loss: 0.4310 - val_acc: 0.8576
Epoch 217/500
2668/2668 [==============================] - 0s - loss: 0.2436 - acc: 0.9082 - val_loss: 0.4308 - val_acc: 0.8441
Epoch 218/500
2668/2668 [==============================] - 0s - loss: 0.2327 - acc: 0.9194 - val_loss: 0.4291 - val_acc: 0.8546
Epoch 219/500
2668/2668 [==============================] - 0s - loss: 0.2346 - acc: 0.9130 - val_loss: 0.4312 - val_acc: 0.8666
Epoch 220/500
2668/2668 [==============================] - 0s - loss: 0.2411 - acc: 0.9130 - val_loss: 0.4619 - val_acc: 0.8471
Epoch 221/500
2668/2668 [==============================] - 0s - loss: 0.2296 - acc: 0.9202 - val_loss: 0.4510 - val_acc: 0.8531
Epoch 222/500
2668/2668 [==============================] - 0s - loss: 0.2151 - acc: 0.9232 - val_loss: 0.4435 - val_acc: 0.8516
Epoch 223/500
2668/2668 [==============================] - 0s - loss: 0.2169 - acc: 0.9149 - val_loss: 0.4466 - val_acc: 0.8501
Epoch 224/500
2668/2668 [==============================] - 0s - loss: 0.2184 - acc: 0.9220 - val_loss: 0.4510 - val_acc: 0.8546
Epoch 225/500
2668/2668 [==============================] - 0s - loss: 0.2209 - acc: 0.9232 - val_loss: 0.4737 - val_acc: 0.8531
Epoch 226/500
2668/2668 [==============================] - 0s - loss: 0.2359 - acc: 0.9097 - val_loss: 0.4702 - val_acc: 0.8441
Epoch 227/500
2668/2668 [==============================] - 0s - loss: 0.2402 - acc: 0.9127 - val_loss: 0.4613 - val_acc: 0.8561
Epoch 228/500
2668/2668 [==============================] - 0s - loss: 0.2313 - acc: 0.9145 - val_loss: 0.4468 - val_acc: 0.8576
Epoch 229/500
2668/2668 [==============================] - 0s - loss: 0.2336 - acc: 0.9127 - val_loss: 0.4462 - val_acc: 0.8516
Epoch 230/500
2668/2668 [==============================] - 0s - loss: 0.2404 - acc: 0.9067 - val_loss: 0.4453 - val_acc: 0.8591
Epoch 231/500
2668/2668 [==============================] - 0s - loss: 0.2233 - acc: 0.9168 - val_loss: 0.4589 - val_acc: 0.8576
Epoch 232/500
2668/2668 [==============================] - 0s - loss: 0.2334 - acc: 0.9138 - val_loss: 0.4539 - val_acc: 0.8606
Epoch 233/500
2668/2668 [==============================] - 0s - loss: 0.2350 - acc: 0.9153 - val_loss: 0.4575 - val_acc: 0.8456
Epoch 234/500
2668/2668 [==============================] - 0s - loss: 0.2333 - acc: 0.9149 - val_loss: 0.4731 - val_acc: 0.8606
Epoch 235/500
2668/2668 [==============================] - 0s - loss: 0.2311 - acc: 0.9198 - val_loss: 0.4806 - val_acc: 0.8471
Epoch 236/500
2668/2668 [==============================] - 0s - loss: 0.2352 - acc: 0.9104 - val_loss: 0.5067 - val_acc: 0.8306
Epoch 237/500
2668/2668 [==============================] - 0s - loss: 0.2321 - acc: 0.9089 - val_loss: 0.4740 - val_acc: 0.8546
Epoch 238/500
2668/2668 [==============================] - 0s - loss: 0.2213 - acc: 0.9220 - val_loss: 0.4665 - val_acc: 0.8426
Epoch 239/500
2668/2668 [==============================] - 0s - loss: 0.2244 - acc: 0.9194 - val_loss: 0.4427 - val_acc: 0.8441
Epoch 240/500
2668/2668 [==============================] - 0s - loss: 0.2212 - acc: 0.9187 - val_loss: 0.4403 - val_acc: 0.8576
Epoch 241/500
2668/2668 [==============================] - 0s - loss: 0.2128 - acc: 0.9224 - val_loss: 0.4644 - val_acc: 0.8411
Epoch 242/500
2668/2668 [==============================] - 0s - loss: 0.2286 - acc: 0.9115 - val_loss: 0.4526 - val_acc: 0.8621
Epoch 243/500
2668/2668 [==============================] - 0s - loss: 0.2226 - acc: 0.9179 - val_loss: 0.4806 - val_acc: 0.8351
Epoch 244/500
2668/2668 [==============================] - 0s - loss: 0.2117 - acc: 0.9243 - val_loss: 0.4541 - val_acc: 0.8441
Epoch 245/500
2668/2668 [==============================] - 0s - loss: 0.2332 - acc: 0.9142 - val_loss: 0.4713 - val_acc: 0.8516
Epoch 246/500
2668/2668 [==============================] - 0s - loss: 0.2171 - acc: 0.9164 - val_loss: 0.4563 - val_acc: 0.8561
Epoch 247/500
2668/2668 [==============================] - 0s - loss: 0.2241 - acc: 0.9183 - val_loss: 0.4669 - val_acc: 0.8381
Epoch 248/500
2668/2668 [==============================] - 0s - loss: 0.2261 - acc: 0.9183 - val_loss: 0.4415 - val_acc: 0.8516
Epoch 249/500
2668/2668 [==============================] - 0s - loss: 0.2145 - acc: 0.9239 - val_loss: 0.4410 - val_acc: 0.8546
Epoch 250/500
2668/2668 [==============================] - 0s - loss: 0.2250 - acc: 0.9205 - val_loss: 0.4397 - val_acc: 0.8516
Epoch 251/500
2668/2668 [==============================] - 0s - loss: 0.2273 - acc: 0.9160 - val_loss: 0.4540 - val_acc: 0.8546
Epoch 252/500
2668/2668 [==============================] - 0s - loss: 0.2415 - acc: 0.9097 - val_loss: 0.4676 - val_acc: 0.8486
Epoch 253/500
2668/2668 [==============================] - 0s - loss: 0.2006 - acc: 0.9243 - val_loss: 0.4766 - val_acc: 0.8456
Epoch 254/500
2668/2668 [==============================] - 0s - loss: 0.2075 - acc: 0.9250 - val_loss: 0.4861 - val_acc: 0.8411
Epoch 255/500
2668/2668 [==============================] - 0s - loss: 0.2273 - acc: 0.9179 - val_loss: 0.4564 - val_acc: 0.8501
Epoch 256/500
2668/2668 [==============================] - 0s - loss: 0.2213 - acc: 0.9179 - val_loss: 0.4545 - val_acc: 0.8606
Epoch 257/500
2668/2668 [==============================] - 0s - loss: 0.2212 - acc: 0.9205 - val_loss: 0.4561 - val_acc: 0.8561
Epoch 258/500
2668/2668 [==============================] - 0s - loss: 0.2230 - acc: 0.9190 - val_loss: 0.4524 - val_acc: 0.8531
Epoch 259/500
2668/2668 [==============================] - 0s - loss: 0.2145 - acc: 0.9190 - val_loss: 0.4659 - val_acc: 0.8486
Epoch 260/500
2668/2668 [==============================] - 0s - loss: 0.1977 - acc: 0.9262 - val_loss: 0.4559 - val_acc: 0.8561
Epoch 261/500
2668/2668 [==============================] - 0s - loss: 0.2191 - acc: 0.9243 - val_loss: 0.4554 - val_acc: 0.8501
Epoch 262/500
2668/2668 [==============================] - 0s - loss: 0.2101 - acc: 0.9239 - val_loss: 0.4529 - val_acc: 0.8546
Epoch 263/500
2668/2668 [==============================] - 0s - loss: 0.2253 - acc: 0.9149 - val_loss: 0.4514 - val_acc: 0.8516
Epoch 264/500
2668/2668 [==============================] - 0s - loss: 0.2021 - acc: 0.9265 - val_loss: 0.4513 - val_acc: 0.8606
Epoch 265/500
2668/2668 [==============================] - 0s - loss: 0.2100 - acc: 0.9277 - val_loss: 0.4515 - val_acc: 0.8516
Epoch 266/500
2668/2668 [==============================] - 0s - loss: 0.2086 - acc: 0.9224 - val_loss: 0.4460 - val_acc: 0.8471
Epoch 267/500
2668/2668 [==============================] - 0s - loss: 0.2110 - acc: 0.9262 - val_loss: 0.4588 - val_acc: 0.8471
Epoch 268/500
2668/2668 [==============================] - 0s - loss: 0.2188 - acc: 0.9239 - val_loss: 0.4524 - val_acc: 0.8591
Epoch 269/500
2668/2668 [==============================] - 0s - loss: 0.2179 - acc: 0.9187 - val_loss: 0.4941 - val_acc: 0.8366
Epoch 270/500
2668/2668 [==============================] - 0s - loss: 0.1846 - acc: 0.9325 - val_loss: 0.4721 - val_acc: 0.8426
Epoch 271/500
2668/2668 [==============================] - 0s - loss: 0.2034 - acc: 0.9303 - val_loss: 0.4950 - val_acc: 0.8366
Epoch 272/500
2668/2668 [==============================] - 0s - loss: 0.2010 - acc: 0.9292 - val_loss: 0.4770 - val_acc: 0.8591
Epoch 273/500
2668/2668 [==============================] - 0s - loss: 0.2068 - acc: 0.9235 - val_loss: 0.4666 - val_acc: 0.8531
Epoch 274/500
2668/2668 [==============================] - 0s - loss: 0.2089 - acc: 0.9198 - val_loss: 0.4692 - val_acc: 0.8591
Epoch 275/500
2668/2668 [==============================] - 0s - loss: 0.2259 - acc: 0.9202 - val_loss: 0.4676 - val_acc: 0.8471
Epoch 276/500
2668/2668 [==============================] - 0s - loss: 0.2150 - acc: 0.9209 - val_loss: 0.4523 - val_acc: 0.8531
Epoch 277/500
2668/2668 [==============================] - 0s - loss: 0.2120 - acc: 0.9243 - val_loss: 0.4520 - val_acc: 0.8531
Epoch 278/500
2668/2668 [==============================] - 0s - loss: 0.2012 - acc: 0.9235 - val_loss: 0.4674 - val_acc: 0.8576
Epoch 279/500
2668/2668 [==============================] - 0s - loss: 0.2034 - acc: 0.9232 - val_loss: 0.4671 - val_acc: 0.8591
Epoch 280/500
2668/2668 [==============================] - 0s - loss: 0.2185 - acc: 0.9153 - val_loss: 0.5021 - val_acc: 0.8471
Epoch 281/500
2668/2668 [==============================] - 0s - loss: 0.1969 - acc: 0.9280 - val_loss: 0.4823 - val_acc: 0.8486
Epoch 282/500
2668/2668 [==============================] - 0s - loss: 0.1933 - acc: 0.9303 - val_loss: 0.4704 - val_acc: 0.8516
Epoch 283/500
2668/2668 [==============================] - 0s - loss: 0.1820 - acc: 0.9280 - val_loss: 0.4626 - val_acc: 0.8636
Epoch 284/500
2668/2668 [==============================] - 0s - loss: 0.2146 - acc: 0.9187 - val_loss: 0.4675 - val_acc: 0.8561
Epoch 285/500
2668/2668 [==============================] - 0s - loss: 0.2109 - acc: 0.9228 - val_loss: 0.4597 - val_acc: 0.8561
Epoch 286/500
2668/2668 [==============================] - 0s - loss: 0.2072 - acc: 0.9265 - val_loss: 0.4704 - val_acc: 0.8591
Epoch 287/500
2668/2668 [==============================] - 0s - loss: 0.1871 - acc: 0.9325 - val_loss: 0.4763 - val_acc: 0.8591
Epoch 288/500
2668/2668 [==============================] - 0s - loss: 0.2014 - acc: 0.9247 - val_loss: 0.4642 - val_acc: 0.8621
Epoch 289/500
2668/2668 [==============================] - 0s - loss: 0.1826 - acc: 0.9363 - val_loss: 0.4995 - val_acc: 0.8426
Epoch 290/500
2668/2668 [==============================] - 0s - loss: 0.2199 - acc: 0.9160 - val_loss: 0.4911 - val_acc: 0.8321
Epoch 291/500
2668/2668 [==============================] - 0s - loss: 0.1996 - acc: 0.9198 - val_loss: 0.4775 - val_acc: 0.8516
Epoch 292/500
2668/2668 [==============================] - 0s - loss: 0.2055 - acc: 0.9250 - val_loss: 0.4840 - val_acc: 0.8621
Epoch 293/500
2668/2668 [==============================] - 0s - loss: 0.1978 - acc: 0.9273 - val_loss: 0.4948 - val_acc: 0.8486
Epoch 294/500
2668/2668 [==============================] - 0s - loss: 0.2023 - acc: 0.9228 - val_loss: 0.4772 - val_acc: 0.8531
Epoch 295/500
2668/2668 [==============================] - 0s - loss: 0.1912 - acc: 0.9228 - val_loss: 0.4689 - val_acc: 0.8501
Epoch 296/500
2668/2668 [==============================] - 0s - loss: 0.1872 - acc: 0.9284 - val_loss: 0.4942 - val_acc: 0.8561
Epoch 297/500
2668/2668 [==============================] - 0s - loss: 0.2163 - acc: 0.9164 - val_loss: 0.5162 - val_acc: 0.8366
Epoch 298/500
2668/2668 [==============================] - 0s - loss: 0.2081 - acc: 0.9258 - val_loss: 0.4809 - val_acc: 0.8486
Epoch 299/500
2668/2668 [==============================] - 0s - loss: 0.2046 - acc: 0.9205 - val_loss: 0.4974 - val_acc: 0.8486
Epoch 300/500
2668/2668 [==============================] - 0s - loss: 0.1885 - acc: 0.9273 - val_loss: 0.4747 - val_acc: 0.8486
Epoch 301/500
2668/2668 [==============================] - 0s - loss: 0.1904 - acc: 0.9269 - val_loss: 0.4863 - val_acc: 0.8456
Epoch 302/500
2668/2668 [==============================] - 0s - loss: 0.1975 - acc: 0.9269 - val_loss: 0.4718 - val_acc: 0.8576
Epoch 303/500
2668/2668 [==============================] - 0s - loss: 0.1907 - acc: 0.9310 - val_loss: 0.4823 - val_acc: 0.8606
Epoch 304/500
2668/2668 [==============================] - 0s - loss: 0.1786 - acc: 0.9333 - val_loss: 0.4723 - val_acc: 0.8561
Epoch 305/500
2668/2668 [==============================] - 0s - loss: 0.2125 - acc: 0.9254 - val_loss: 0.4823 - val_acc: 0.8396
Epoch 306/500
2668/2668 [==============================] - 0s - loss: 0.1930 - acc: 0.9310 - val_loss: 0.4961 - val_acc: 0.8591
Epoch 307/500
2668/2668 [==============================] - 0s - loss: 0.2006 - acc: 0.9318 - val_loss: 0.4686 - val_acc: 0.8441
Epoch 308/500
2668/2668 [==============================] - 0s - loss: 0.1999 - acc: 0.9265 - val_loss: 0.4868 - val_acc: 0.8546
Epoch 309/500
2668/2668 [==============================] - 0s - loss: 0.2065 - acc: 0.9247 - val_loss: 0.4697 - val_acc: 0.8561
Epoch 310/500
2668/2668 [==============================] - 0s - loss: 0.2028 - acc: 0.9235 - val_loss: 0.4711 - val_acc: 0.8441
Epoch 311/500
2668/2668 [==============================] - 0s - loss: 0.1983 - acc: 0.9258 - val_loss: 0.4564 - val_acc: 0.8546
Epoch 312/500
2668/2668 [==============================] - 0s - loss: 0.2009 - acc: 0.9243 - val_loss: 0.4633 - val_acc: 0.8591
Epoch 313/500
2668/2668 [==============================] - 0s - loss: 0.1932 - acc: 0.9288 - val_loss: 0.4952 - val_acc: 0.8456
Epoch 314/500
2668/2668 [==============================] - 0s - loss: 0.1953 - acc: 0.9295 - val_loss: 0.4824 - val_acc: 0.8486
Epoch 315/500
2668/2668 [==============================] - 0s - loss: 0.2026 - acc: 0.9239 - val_loss: 0.4832 - val_acc: 0.8531
Epoch 316/500
2668/2668 [==============================] - 0s - loss: 0.2030 - acc: 0.9254 - val_loss: 0.4862 - val_acc: 0.8486
Epoch 317/500
2668/2668 [==============================] - 0s - loss: 0.2013 - acc: 0.9262 - val_loss: 0.4693 - val_acc: 0.8486
Epoch 318/500
2668/2668 [==============================] - 0s - loss: 0.1892 - acc: 0.9292 - val_loss: 0.4803 - val_acc: 0.8396
Epoch 319/500
2668/2668 [==============================] - 0s - loss: 0.2101 - acc: 0.9288 - val_loss: 0.4982 - val_acc: 0.8441
Epoch 320/500
2668/2668 [==============================] - 0s - loss: 0.1994 - acc: 0.9307 - val_loss: 0.4794 - val_acc: 0.8501
Epoch 321/500
2668/2668 [==============================] - 0s - loss: 0.1892 - acc: 0.9322 - val_loss: 0.4429 - val_acc: 0.8441
Epoch 322/500
2668/2668 [==============================] - 0s - loss: 0.1896 - acc: 0.9314 - val_loss: 0.4778 - val_acc: 0.8471
Epoch 323/500
2668/2668 [==============================] - 0s - loss: 0.1862 - acc: 0.9333 - val_loss: 0.4574 - val_acc: 0.8516
Epoch 324/500
2668/2668 [==============================] - 0s - loss: 0.2068 - acc: 0.9273 - val_loss: 0.4596 - val_acc: 0.8531
Epoch 325/500
2668/2668 [==============================] - 0s - loss: 0.1984 - acc: 0.9310 - val_loss: 0.4811 - val_acc: 0.8486
Epoch 326/500
2668/2668 [==============================] - 0s - loss: 0.1837 - acc: 0.9318 - val_loss: 0.4651 - val_acc: 0.8651
Epoch 327/500
2668/2668 [==============================] - 0s - loss: 0.1686 - acc: 0.9415 - val_loss: 0.4923 - val_acc: 0.8456
Epoch 328/500
2668/2668 [==============================] - 0s - loss: 0.2169 - acc: 0.9239 - val_loss: 0.4756 - val_acc: 0.8426
Epoch 329/500
2668/2668 [==============================] - 0s - loss: 0.1935 - acc: 0.9299 - val_loss: 0.4697 - val_acc: 0.8456
Epoch 330/500
2668/2668 [==============================] - 0s - loss: 0.1944 - acc: 0.9295 - val_loss: 0.4705 - val_acc: 0.8546
Epoch 331/500
2668/2668 [==============================] - 0s - loss: 0.1936 - acc: 0.9250 - val_loss: 0.4589 - val_acc: 0.8501
Epoch 332/500
2668/2668 [==============================] - 0s - loss: 0.2002 - acc: 0.9307 - val_loss: 0.4657 - val_acc: 0.8441
Epoch 333/500
2668/2668 [==============================] - 0s - loss: 0.1817 - acc: 0.9352 - val_loss: 0.4822 - val_acc: 0.8501
Epoch 334/500
2668/2668 [==============================] - 0s - loss: 0.2022 - acc: 0.9292 - val_loss: 0.4941 - val_acc: 0.8471
Epoch 335/500
2668/2668 [==============================] - 0s - loss: 0.1870 - acc: 0.9258 - val_loss: 0.4802 - val_acc: 0.8366
Epoch 336/500
2668/2668 [==============================] - 0s - loss: 0.1742 - acc: 0.9404 - val_loss: 0.4938 - val_acc: 0.8306
Epoch 337/500
2668/2668 [==============================] - 0s - loss: 0.2077 - acc: 0.9224 - val_loss: 0.4800 - val_acc: 0.8456
Epoch 338/500
2668/2668 [==============================] - 0s - loss: 0.1764 - acc: 0.9359 - val_loss: 0.4989 - val_acc: 0.8396
Epoch 339/500
2668/2668 [==============================] - 0s - loss: 0.1969 - acc: 0.9235 - val_loss: 0.4943 - val_acc: 0.8426
Epoch 340/500
2668/2668 [==============================] - 0s - loss: 0.2007 - acc: 0.9262 - val_loss: 0.4933 - val_acc: 0.8351
Epoch 341/500
2668/2668 [==============================] - 0s - loss: 0.1704 - acc: 0.9307 - val_loss: 0.4960 - val_acc: 0.8516
Epoch 342/500
2668/2668 [==============================] - 0s - loss: 0.2123 - acc: 0.9243 - val_loss: 0.5032 - val_acc: 0.8456
Epoch 343/500
2668/2668 [==============================] - 0s - loss: 0.1812 - acc: 0.9299 - val_loss: 0.4668 - val_acc: 0.8576
Epoch 344/500
2668/2668 [==============================] - 0s - loss: 0.1750 - acc: 0.9378 - val_loss: 0.4735 - val_acc: 0.8321
Epoch 345/500
2668/2668 [==============================] - 0s - loss: 0.1885 - acc: 0.9254 - val_loss: 0.4881 - val_acc: 0.8486
Epoch 346/500
2668/2668 [==============================] - 0s - loss: 0.1813 - acc: 0.9303 - val_loss: 0.4856 - val_acc: 0.8426
Epoch 347/500
2668/2668 [==============================] - 0s - loss: 0.1781 - acc: 0.9370 - val_loss: 0.4757 - val_acc: 0.8546
Epoch 348/500
2668/2668 [==============================] - 0s - loss: 0.1716 - acc: 0.9438 - val_loss: 0.5113 - val_acc: 0.8426
Epoch 349/500
2668/2668 [==============================] - 0s - loss: 0.1854 - acc: 0.9303 - val_loss: 0.4755 - val_acc: 0.8516
Epoch 350/500
2668/2668 [==============================] - 0s - loss: 0.1716 - acc: 0.9385 - val_loss: 0.4816 - val_acc: 0.8426
Epoch 351/500
2668/2668 [==============================] - 0s - loss: 0.1932 - acc: 0.9314 - val_loss: 0.4928 - val_acc: 0.8501
Epoch 352/500
2668/2668 [==============================] - 0s - loss: 0.1954 - acc: 0.9269 - val_loss: 0.4943 - val_acc: 0.8381
Epoch 353/500
2668/2668 [==============================] - 0s - loss: 0.1721 - acc: 0.9359 - val_loss: 0.4874 - val_acc: 0.8591
Epoch 354/500
2668/2668 [==============================] - 0s - loss: 0.1701 - acc: 0.9374 - val_loss: 0.5045 - val_acc: 0.8426
Epoch 355/500
2668/2668 [==============================] - 0s - loss: 0.1817 - acc: 0.9314 - val_loss: 0.5080 - val_acc: 0.8456
Epoch 356/500
2668/2668 [==============================] - 0s - loss: 0.1734 - acc: 0.9397 - val_loss: 0.4667 - val_acc: 0.8531
Epoch 357/500
2668/2668 [==============================] - 0s - loss: 0.1839 - acc: 0.9325 - val_loss: 0.4843 - val_acc: 0.8426
Epoch 358/500
2668/2668 [==============================] - 0s - loss: 0.1764 - acc: 0.9352 - val_loss: 0.5245 - val_acc: 0.8351
Epoch 359/500
2668/2668 [==============================] - 0s - loss: 0.1822 - acc: 0.9310 - val_loss: 0.4955 - val_acc: 0.8411
Epoch 360/500
2668/2668 [==============================] - 0s - loss: 0.1881 - acc: 0.9355 - val_loss: 0.5252 - val_acc: 0.8471
Epoch 361/500
2668/2668 [==============================] - 0s - loss: 0.1870 - acc: 0.9344 - val_loss: 0.5127 - val_acc: 0.8351
Epoch 362/500
2668/2668 [==============================] - 0s - loss: 0.1805 - acc: 0.9352 - val_loss: 0.4983 - val_acc: 0.8426
Epoch 363/500
2668/2668 [==============================] - 0s - loss: 0.1884 - acc: 0.9310 - val_loss: 0.5095 - val_acc: 0.8336
Epoch 364/500
2668/2668 [==============================] - 0s - loss: 0.1595 - acc: 0.9389 - val_loss: 0.4840 - val_acc: 0.8366
Epoch 365/500
2668/2668 [==============================] - 0s - loss: 0.1726 - acc: 0.9434 - val_loss: 0.4867 - val_acc: 0.8411
Epoch 366/500
2668/2668 [==============================] - 0s - loss: 0.1800 - acc: 0.9348 - val_loss: 0.5002 - val_acc: 0.8381
Epoch 367/500
2668/2668 [==============================] - 0s - loss: 0.1721 - acc: 0.9355 - val_loss: 0.5057 - val_acc: 0.8426
Epoch 368/500
2668/2668 [==============================] - 0s - loss: 0.1979 - acc: 0.9250 - val_loss: 0.5016 - val_acc: 0.8396
Epoch 369/500
2668/2668 [==============================] - 0s - loss: 0.1817 - acc: 0.9318 - val_loss: 0.5061 - val_acc: 0.8396
Epoch 370/500
2668/2668 [==============================] - 0s - loss: 0.1711 - acc: 0.9404 - val_loss: 0.4863 - val_acc: 0.8501
Epoch 371/500
2668/2668 [==============================] - 0s - loss: 0.1635 - acc: 0.9415 - val_loss: 0.5327 - val_acc: 0.8321
Epoch 372/500
2668/2668 [==============================] - 0s - loss: 0.2025 - acc: 0.9247 - val_loss: 0.5031 - val_acc: 0.8366
Epoch 373/500
2668/2668 [==============================] - 0s - loss: 0.1718 - acc: 0.9340 - val_loss: 0.4917 - val_acc: 0.8456
Epoch 374/500
2668/2668 [==============================] - 0s - loss: 0.1739 - acc: 0.9412 - val_loss: 0.4938 - val_acc: 0.8501
Epoch 375/500
2668/2668 [==============================] - 0s - loss: 0.1904 - acc: 0.9292 - val_loss: 0.5191 - val_acc: 0.8426
Epoch 376/500
2668/2668 [==============================] - 0s - loss: 0.1840 - acc: 0.9344 - val_loss: 0.5221 - val_acc: 0.8441
Epoch 377/500
2668/2668 [==============================] - 0s - loss: 0.1630 - acc: 0.9363 - val_loss: 0.5082 - val_acc: 0.8501
Epoch 378/500
2668/2668 [==============================] - 0s - loss: 0.1618 - acc: 0.9412 - val_loss: 0.4899 - val_acc: 0.8576
Epoch 379/500
2668/2668 [==============================] - 0s - loss: 0.1667 - acc: 0.9397 - val_loss: 0.5077 - val_acc: 0.8471
Epoch 380/500
2668/2668 [==============================] - 0s - loss: 0.1599 - acc: 0.9442 - val_loss: 0.4955 - val_acc: 0.8441
Epoch 381/500
2668/2668 [==============================] - 0s - loss: 0.1832 - acc: 0.9329 - val_loss: 0.4845 - val_acc: 0.8486
Epoch 382/500
2668/2668 [==============================] - 0s - loss: 0.1943 - acc: 0.9280 - val_loss: 0.4805 - val_acc: 0.8531
Epoch 383/500
2668/2668 [==============================] - 0s - loss: 0.1815 - acc: 0.9400 - val_loss: 0.4836 - val_acc: 0.8411
Epoch 384/500
2668/2668 [==============================] - 0s - loss: 0.1796 - acc: 0.9363 - val_loss: 0.5226 - val_acc: 0.8486
Epoch 385/500
2668/2668 [==============================] - 0s - loss: 0.1786 - acc: 0.9299 - val_loss: 0.4907 - val_acc: 0.8516
Epoch 386/500
2668/2668 [==============================] - 0s - loss: 0.2183 - acc: 0.9228 - val_loss: 0.5009 - val_acc: 0.8531
Epoch 387/500
2668/2668 [==============================] - 0s - loss: 0.1934 - acc: 0.9280 - val_loss: 0.4994 - val_acc: 0.8516
Epoch 388/500
2668/2668 [==============================] - 0s - loss: 0.1709 - acc: 0.9423 - val_loss: 0.5013 - val_acc: 0.8456
Epoch 389/500
2668/2668 [==============================] - 0s - loss: 0.1690 - acc: 0.9427 - val_loss: 0.4857 - val_acc: 0.8546
Epoch 390/500
2668/2668 [==============================] - 0s - loss: 0.1753 - acc: 0.9400 - val_loss: 0.4827 - val_acc: 0.8501
Epoch 391/500
2668/2668 [==============================] - 0s - loss: 0.1755 - acc: 0.9389 - val_loss: 0.4846 - val_acc: 0.8516
Epoch 392/500
2668/2668 [==============================] - 0s - loss: 0.1823 - acc: 0.9382 - val_loss: 0.5027 - val_acc: 0.8396
Epoch 393/500
2668/2668 [==============================] - 0s - loss: 0.1850 - acc: 0.9303 - val_loss: 0.4990 - val_acc: 0.8381
Epoch 394/500
2668/2668 [==============================] - 0s - loss: 0.1588 - acc: 0.9430 - val_loss: 0.4799 - val_acc: 0.8486
Epoch 395/500
2668/2668 [==============================] - 0s - loss: 0.1848 - acc: 0.9329 - val_loss: 0.4869 - val_acc: 0.8411
Epoch 396/500
2668/2668 [==============================] - 0s - loss: 0.1635 - acc: 0.9423 - val_loss: 0.4850 - val_acc: 0.8516
Epoch 397/500
2668/2668 [==============================] - 0s - loss: 0.1820 - acc: 0.9340 - val_loss: 0.4900 - val_acc: 0.8441
Epoch 398/500
2668/2668 [==============================] - 0s - loss: 0.1897 - acc: 0.9355 - val_loss: 0.4770 - val_acc: 0.8441
Epoch 399/500
2668/2668 [==============================] - 0s - loss: 0.1799 - acc: 0.9397 - val_loss: 0.4963 - val_acc: 0.8381
Epoch 400/500
2668/2668 [==============================] - 0s - loss: 0.1625 - acc: 0.9419 - val_loss: 0.5021 - val_acc: 0.8486
Epoch 401/500
2668/2668 [==============================] - 0s - loss: 0.1702 - acc: 0.9427 - val_loss: 0.4966 - val_acc: 0.8441
Epoch 402/500
2668/2668 [==============================] - 0s - loss: 0.1829 - acc: 0.9329 - val_loss: 0.4800 - val_acc: 0.8456
Epoch 403/500
2668/2668 [==============================] - 0s - loss: 0.1786 - acc: 0.9322 - val_loss: 0.4789 - val_acc: 0.8441
Epoch 404/500
2668/2668 [==============================] - 0s - loss: 0.1676 - acc: 0.9378 - val_loss: 0.5013 - val_acc: 0.8501
Epoch 405/500
2668/2668 [==============================] - 0s - loss: 0.1742 - acc: 0.9367 - val_loss: 0.4845 - val_acc: 0.8456
Epoch 406/500
2668/2668 [==============================] - 0s - loss: 0.1747 - acc: 0.9385 - val_loss: 0.4997 - val_acc: 0.8486
Epoch 407/500
2668/2668 [==============================] - 0s - loss: 0.1648 - acc: 0.9408 - val_loss: 0.4862 - val_acc: 0.8471
Epoch 408/500
2668/2668 [==============================] - 0s - loss: 0.1762 - acc: 0.9337 - val_loss: 0.4792 - val_acc: 0.8471
Epoch 409/500
2668/2668 [==============================] - 0s - loss: 0.1757 - acc: 0.9337 - val_loss: 0.4769 - val_acc: 0.8411
Epoch 410/500
2668/2668 [==============================] - 0s - loss: 0.1731 - acc: 0.9363 - val_loss: 0.4777 - val_acc: 0.8426
Epoch 411/500
2668/2668 [==============================] - 0s - loss: 0.1840 - acc: 0.9329 - val_loss: 0.4922 - val_acc: 0.8381
Epoch 412/500
2668/2668 [==============================] - 0s - loss: 0.1676 - acc: 0.9427 - val_loss: 0.4918 - val_acc: 0.8396
Epoch 413/500
2668/2668 [==============================] - 0s - loss: 0.1708 - acc: 0.9389 - val_loss: 0.5165 - val_acc: 0.8411
Epoch 414/500
2668/2668 [==============================] - 0s - loss: 0.1855 - acc: 0.9337 - val_loss: 0.5192 - val_acc: 0.8486
Epoch 415/500
2668/2668 [==============================] - 0s - loss: 0.1484 - acc: 0.9460 - val_loss: 0.4951 - val_acc: 0.8426
Epoch 416/500
2668/2668 [==============================] - 0s - loss: 0.1762 - acc: 0.9355 - val_loss: 0.4965 - val_acc: 0.8471
Epoch 417/500
2668/2668 [==============================] - 0s - loss: 0.1762 - acc: 0.9337 - val_loss: 0.5085 - val_acc: 0.8531
Epoch 418/500
2668/2668 [==============================] - 0s - loss: 0.1615 - acc: 0.9397 - val_loss: 0.5173 - val_acc: 0.8501
Epoch 419/500
2668/2668 [==============================] - 0s - loss: 0.1605 - acc: 0.9378 - val_loss: 0.4941 - val_acc: 0.8456
Epoch 420/500
2668/2668 [==============================] - 0s - loss: 0.1723 - acc: 0.9374 - val_loss: 0.5119 - val_acc: 0.8441
Epoch 421/500
2668/2668 [==============================] - 0s - loss: 0.1759 - acc: 0.9359 - val_loss: 0.5011 - val_acc: 0.8381
Epoch 422/500
2668/2668 [==============================] - 0s - loss: 0.1701 - acc: 0.9408 - val_loss: 0.4984 - val_acc: 0.8486
Epoch 423/500
2668/2668 [==============================] - 0s - loss: 0.1841 - acc: 0.9333 - val_loss: 0.5202 - val_acc: 0.8426
Epoch 424/500
2668/2668 [==============================] - 0s - loss: 0.1739 - acc: 0.9378 - val_loss: 0.5056 - val_acc: 0.8546
Epoch 425/500
2668/2668 [==============================] - 0s - loss: 0.1496 - acc: 0.9419 - val_loss: 0.5002 - val_acc: 0.8381
Epoch 426/500
2668/2668 [==============================] - 0s - loss: 0.1591 - acc: 0.9412 - val_loss: 0.4919 - val_acc: 0.8441
Epoch 427/500
2668/2668 [==============================] - 0s - loss: 0.1578 - acc: 0.9434 - val_loss: 0.5231 - val_acc: 0.8441
Epoch 428/500
2668/2668 [==============================] - 0s - loss: 0.1713 - acc: 0.9325 - val_loss: 0.5279 - val_acc: 0.8531
Epoch 429/500
2668/2668 [==============================] - 0s - loss: 0.1614 - acc: 0.9434 - val_loss: 0.5215 - val_acc: 0.8471
Epoch 430/500
2668/2668 [==============================] - 0s - loss: 0.1863 - acc: 0.9337 - val_loss: 0.4975 - val_acc: 0.8441
Epoch 431/500
2668/2668 [==============================] - 0s - loss: 0.1406 - acc: 0.9472 - val_loss: 0.5025 - val_acc: 0.8486
Epoch 432/500
2668/2668 [==============================] - 0s - loss: 0.1695 - acc: 0.9382 - val_loss: 0.4942 - val_acc: 0.8561
Epoch 433/500
2668/2668 [==============================] - 0s - loss: 0.1720 - acc: 0.9382 - val_loss: 0.4672 - val_acc: 0.8516
Epoch 434/500
2668/2668 [==============================] - 0s - loss: 0.1418 - acc: 0.9472 - val_loss: 0.5188 - val_acc: 0.8456
Epoch 435/500
2668/2668 [==============================] - 0s - loss: 0.1711 - acc: 0.9374 - val_loss: 0.4808 - val_acc: 0.8591
Epoch 436/500
2668/2668 [==============================] - 0s - loss: 0.1530 - acc: 0.9475 - val_loss: 0.4821 - val_acc: 0.8486
Epoch 437/500
2668/2668 [==============================] - 0s - loss: 0.1735 - acc: 0.9404 - val_loss: 0.5060 - val_acc: 0.8516
Epoch 438/500
2668/2668 [==============================] - 0s - loss: 0.1638 - acc: 0.9389 - val_loss: 0.4898 - val_acc: 0.8561
Epoch 439/500
2668/2668 [==============================] - 0s - loss: 0.1841 - acc: 0.9325 - val_loss: 0.5209 - val_acc: 0.8441
Epoch 440/500
2668/2668 [==============================] - 0s - loss: 0.1726 - acc: 0.9438 - val_loss: 0.5043 - val_acc: 0.8426
Epoch 441/500
2668/2668 [==============================] - 0s - loss: 0.1751 - acc: 0.9367 - val_loss: 0.5137 - val_acc: 0.8351
Epoch 442/500
2668/2668 [==============================] - 0s - loss: 0.1875 - acc: 0.9322 - val_loss: 0.5146 - val_acc: 0.8501
Epoch 443/500
2668/2668 [==============================] - 0s - loss: 0.1660 - acc: 0.9412 - val_loss: 0.4834 - val_acc: 0.8381
Epoch 444/500
2668/2668 [==============================] - 0s - loss: 0.1851 - acc: 0.9295 - val_loss: 0.5125 - val_acc: 0.8486
Epoch 445/500
2668/2668 [==============================] - 0s - loss: 0.1701 - acc: 0.9363 - val_loss: 0.5323 - val_acc: 0.8381
Epoch 446/500
2668/2668 [==============================] - 0s - loss: 0.1702 - acc: 0.9382 - val_loss: 0.4744 - val_acc: 0.8516
Epoch 447/500
2668/2668 [==============================] - 0s - loss: 0.1547 - acc: 0.9449 - val_loss: 0.4812 - val_acc: 0.8621
Epoch 448/500
2668/2668 [==============================] - 0s - loss: 0.1796 - acc: 0.9329 - val_loss: 0.5239 - val_acc: 0.8471
Epoch 449/500
2668/2668 [==============================] - 0s - loss: 0.1588 - acc: 0.9427 - val_loss: 0.5249 - val_acc: 0.8501
Epoch 450/500
2668/2668 [==============================] - 0s - loss: 0.1633 - acc: 0.9408 - val_loss: 0.5178 - val_acc: 0.8456
Epoch 451/500
2668/2668 [==============================] - 0s - loss: 0.1743 - acc: 0.9340 - val_loss: 0.5148 - val_acc: 0.8441
Epoch 452/500
2668/2668 [==============================] - 0s - loss: 0.1564 - acc: 0.9449 - val_loss: 0.5126 - val_acc: 0.8501
Epoch 453/500
2668/2668 [==============================] - 0s - loss: 0.1680 - acc: 0.9408 - val_loss: 0.5336 - val_acc: 0.8426
Epoch 454/500
2668/2668 [==============================] - 0s - loss: 0.1565 - acc: 0.9393 - val_loss: 0.5218 - val_acc: 0.8471
Epoch 455/500
2668/2668 [==============================] - 0s - loss: 0.1757 - acc: 0.9367 - val_loss: 0.5398 - val_acc: 0.8486
Epoch 456/500
2668/2668 [==============================] - 0s - loss: 0.1870 - acc: 0.9329 - val_loss: 0.5316 - val_acc: 0.8486
Epoch 457/500
2668/2668 [==============================] - 0s - loss: 0.1529 - acc: 0.9412 - val_loss: 0.5298 - val_acc: 0.8441
Epoch 458/500
2668/2668 [==============================] - 0s - loss: 0.1605 - acc: 0.9442 - val_loss: 0.5027 - val_acc: 0.8501
Epoch 459/500
2668/2668 [==============================] - 0s - loss: 0.1555 - acc: 0.9427 - val_loss: 0.4978 - val_acc: 0.8471
Epoch 460/500
2668/2668 [==============================] - 0s - loss: 0.1539 - acc: 0.9400 - val_loss: 0.5109 - val_acc: 0.8591
Epoch 461/500
2668/2668 [==============================] - 0s - loss: 0.1620 - acc: 0.9427 - val_loss: 0.5264 - val_acc: 0.8501
Epoch 462/500
2668/2668 [==============================] - 0s - loss: 0.1608 - acc: 0.9412 - val_loss: 0.5194 - val_acc: 0.8546
Epoch 463/500
2668/2668 [==============================] - 0s - loss: 0.2003 - acc: 0.9322 - val_loss: 0.5251 - val_acc: 0.8456
Epoch 464/500
2668/2668 [==============================] - 0s - loss: 0.1627 - acc: 0.9427 - val_loss: 0.5529 - val_acc: 0.8336
Epoch 465/500
2668/2668 [==============================] - 0s - loss: 0.1675 - acc: 0.9457 - val_loss: 0.5391 - val_acc: 0.8411
Epoch 466/500
2668/2668 [==============================] - 0s - loss: 0.1749 - acc: 0.9367 - val_loss: 0.5241 - val_acc: 0.8531
Epoch 467/500
2668/2668 [==============================] - 0s - loss: 0.1769 - acc: 0.9367 - val_loss: 0.5120 - val_acc: 0.8426
Epoch 468/500
2668/2668 [==============================] - 0s - loss: 0.1595 - acc: 0.9370 - val_loss: 0.5088 - val_acc: 0.8561
Epoch 469/500
2668/2668 [==============================] - 0s - loss: 0.1571 - acc: 0.9397 - val_loss: 0.5268 - val_acc: 0.8411
Epoch 470/500
2668/2668 [==============================] - 0s - loss: 0.1555 - acc: 0.9400 - val_loss: 0.5215 - val_acc: 0.8441
Epoch 471/500
2668/2668 [==============================] - 0s - loss: 0.1576 - acc: 0.9453 - val_loss: 0.5064 - val_acc: 0.8501
Epoch 472/500
2668/2668 [==============================] - 0s - loss: 0.1507 - acc: 0.9434 - val_loss: 0.5578 - val_acc: 0.8531
Epoch 473/500
2668/2668 [==============================] - 0s - loss: 0.1775 - acc: 0.9363 - val_loss: 0.5164 - val_acc: 0.8456
Epoch 474/500
2668/2668 [==============================] - 0s - loss: 0.1737 - acc: 0.9415 - val_loss: 0.5407 - val_acc: 0.8411
Epoch 475/500
2668/2668 [==============================] - 0s - loss: 0.1854 - acc: 0.9295 - val_loss: 0.5038 - val_acc: 0.8501
Epoch 476/500
2668/2668 [==============================] - 0s - loss: 0.1713 - acc: 0.9355 - val_loss: 0.5065 - val_acc: 0.8471
Epoch 477/500
2668/2668 [==============================] - 0s - loss: 0.1689 - acc: 0.9434 - val_loss: 0.4947 - val_acc: 0.8531
Epoch 478/500
2668/2668 [==============================] - 0s - loss: 0.1527 - acc: 0.9464 - val_loss: 0.5162 - val_acc: 0.8531
Epoch 479/500
2668/2668 [==============================] - 0s - loss: 0.1642 - acc: 0.9453 - val_loss: 0.5366 - val_acc: 0.8516
Epoch 480/500
2668/2668 [==============================] - 0s - loss: 0.1607 - acc: 0.9453 - val_loss: 0.5167 - val_acc: 0.8501
Epoch 481/500
2668/2668 [==============================] - 0s - loss: 0.1754 - acc: 0.9378 - val_loss: 0.5345 - val_acc: 0.8486
Epoch 482/500
2668/2668 [==============================] - 0s - loss: 0.1787 - acc: 0.9382 - val_loss: 0.5020 - val_acc: 0.8501
Epoch 483/500
2668/2668 [==============================] - 0s - loss: 0.1615 - acc: 0.9374 - val_loss: 0.4997 - val_acc: 0.8441
Epoch 484/500
2668/2668 [==============================] - 0s - loss: 0.1672 - acc: 0.9370 - val_loss: 0.5168 - val_acc: 0.8471
Epoch 485/500
2668/2668 [==============================] - 0s - loss: 0.1802 - acc: 0.9348 - val_loss: 0.5338 - val_acc: 0.8456
Epoch 486/500
2668/2668 [==============================] - 0s - loss: 0.1516 - acc: 0.9501 - val_loss: 0.5289 - val_acc: 0.8486
Epoch 487/500
2668/2668 [==============================] - 0s - loss: 0.1553 - acc: 0.9445 - val_loss: 0.5311 - val_acc: 0.8351
Epoch 488/500
2668/2668 [==============================] - 0s - loss: 0.1465 - acc: 0.9487 - val_loss: 0.5277 - val_acc: 0.8411
Epoch 489/500
2668/2668 [==============================] - 0s - loss: 0.1565 - acc: 0.9434 - val_loss: 0.5005 - val_acc: 0.8576
Epoch 490/500
2668/2668 [==============================] - 0s - loss: 0.1459 - acc: 0.9490 - val_loss: 0.4979 - val_acc: 0.8561
Epoch 491/500
2668/2668 [==============================] - 0s - loss: 0.1585 - acc: 0.9389 - val_loss: 0.5176 - val_acc: 0.8471
Epoch 492/500
2668/2668 [==============================] - 0s - loss: 0.1508 - acc: 0.9468 - val_loss: 0.5219 - val_acc: 0.8546
Epoch 493/500
2668/2668 [==============================] - 0s - loss: 0.1618 - acc: 0.9397 - val_loss: 0.4949 - val_acc: 0.8531
Epoch 494/500
2668/2668 [==============================] - 0s - loss: 0.1437 - acc: 0.9501 - val_loss: 0.5255 - val_acc: 0.8486
Epoch 495/500
2668/2668 [==============================] - 0s - loss: 0.1616 - acc: 0.9442 - val_loss: 0.5159 - val_acc: 0.8516
Epoch 496/500
2668/2668 [==============================] - 0s - loss: 0.1576 - acc: 0.9404 - val_loss: 0.5131 - val_acc: 0.8456
Epoch 497/500
2668/2668 [==============================] - 0s - loss: 0.1623 - acc: 0.9434 - val_loss: 0.5327 - val_acc: 0.8456
Epoch 498/500
2668/2668 [==============================] - 0s - loss: 0.1559 - acc: 0.9472 - val_loss: 0.5274 - val_acc: 0.8486
Epoch 499/500
2668/2668 [==============================] - 0s - loss: 0.1733 - acc: 0.9400 - val_loss: 0.5324 - val_acc: 0.8471
Epoch 500/500
2668/2668 [==============================] - 0s - loss: 0.1536 - acc: 0.9457 - val_loss: 0.5321 - val_acc: 0.8381
CPU times: user 1min 50s, sys: 14.6 s, total: 2min 4s
Wall time: 1min 18s

Issue 1: We have two separate models now

  • How do we evaluate?
  • How to save model for later prediction use / deployment?

In [33]:
from keras import models

combined_model = models.Sequential()
combined_model.add(vgg_model)
combined_model.add(classifier_model)

In [34]:
combined_model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
vgg16 (Model)                (None, 2, 2, 512)         14714688  
_________________________________________________________________
model_1 (Model)              (None, 6)                 526086    
=================================================================
Total params: 15,240,774
Trainable params: 15,240,774
Non-trainable params: 0
_________________________________________________________________

In [35]:
combined_model.compile(optimizer='adam',
              loss='categorical_crossentropy',
              metrics=['accuracy'])

In [36]:
train_loss, train_accuracy = combined_model.evaluate(X_train, y_train, batch_size=BATCH_SIZE)
train_loss, train_accuracy


3335/3335 [==============================] - 7s     
Out[36]:
(0.11308885638558792, 0.96731634484953077)

In [37]:
test_loss, test_accuracy = combined_model.evaluate(X_test, y_test, batch_size=BATCH_SIZE)
test_loss, test_accuracy


834/834 [==============================] - 2s     
Out[37]:
(0.49626786088486086, 0.85731415182566473)

In [38]:
# complete original non augmented speed limit signs
original_loss, original_accuracy = combined_model.evaluate(original_images, original_labels, batch_size=BATCH_SIZE)
original_loss, original_accuracy


379/379 [==============================] - 1s     
Out[38]:
(0.50608633509726508, 0.85488126774891071)

In [39]:
# combined_model.save('vgg16-retrained.hdf5')
combined_model.save('vgg16-augmented-retrained.hdf5')

In [40]:
# !ls -lh vgg16-retrained.hdf5
!ls -lh vgg16-augmented-retrained.hdf5


-rw-rw-r-- 1 ubuntu ubuntu 59M Oct  1 10:00 vgg16-augmented-retrained.hdf5

Issue 2: Whatever we do, we overfit, much more than 85% on test not possible

  • for non augmented data it might even be as low as 70%
  • first thing we could try: maybe bottlebeck feature being 2x2 is too small, we could compensate by scaling images up to 128x128 or even 256x256
    • this can indeed bring up test score to 90%
    • however, this will make the model incompatible with the 64x64 input of the other models and make deployment harder, so we keep 64x64
  • maybe feature extracting from Imagenet is too different from what we have with speed limit signs?
  • or is the classifier too simply for the complex features?

Let us try some fine tuning

First we freeze all but the last convolutional block


In [41]:
len(vgg_model.layers)


Out[41]:
19

In [42]:
vgg_model.layers


Out[42]:
[<keras.engine.topology.InputLayer at 0x7f85b65566d8>,
 <keras.layers.convolutional.Conv2D at 0x7f85b6556a58>,
 <keras.layers.convolutional.Conv2D at 0x7f85b6556c18>,
 <keras.layers.pooling.MaxPooling2D at 0x7f8568aaef98>,
 <keras.layers.convolutional.Conv2D at 0x7f85551a2be0>,
 <keras.layers.convolutional.Conv2D at 0x7f85551b6e80>,
 <keras.layers.pooling.MaxPooling2D at 0x7f85551c8518>,
 <keras.layers.convolutional.Conv2D at 0x7f85551717f0>,
 <keras.layers.convolutional.Conv2D at 0x7f8555171048>,
 <keras.layers.convolutional.Conv2D at 0x7f8555196668>,
 <keras.layers.pooling.MaxPooling2D at 0x7f8555141cf8>,
 <keras.layers.convolutional.Conv2D at 0x7f85550ea240>,
 <keras.layers.convolutional.Conv2D at 0x7f85550ead68>,
 <keras.layers.convolutional.Conv2D at 0x7f8555111630>,
 <keras.layers.pooling.MaxPooling2D at 0x7f85550a6320>,
 <keras.layers.convolutional.Conv2D at 0x7f85550cf940>,
 <keras.layers.convolutional.Conv2D at 0x7f85550cf240>,
 <keras.layers.convolutional.Conv2D at 0x7f85550777b8>,
 <keras.layers.pooling.MaxPooling2D at 0x7f855509de48>]

In [43]:
first_conv_layer = vgg_model.layers[1]

In [44]:
first_conv_layer.trainable


Out[44]:
True

In [45]:
# set the first 15 layers (up to the last conv block)
# to non-trainable (weights will not be updated)
# so, the general features are kept and we (hopefully) do not have overfitting
non_trainable_layers = vgg_model.layers[:15]

In [46]:
non_trainable_layers


Out[46]:
[<keras.engine.topology.InputLayer at 0x7f85b65566d8>,
 <keras.layers.convolutional.Conv2D at 0x7f85b6556a58>,
 <keras.layers.convolutional.Conv2D at 0x7f85b6556c18>,
 <keras.layers.pooling.MaxPooling2D at 0x7f8568aaef98>,
 <keras.layers.convolutional.Conv2D at 0x7f85551a2be0>,
 <keras.layers.convolutional.Conv2D at 0x7f85551b6e80>,
 <keras.layers.pooling.MaxPooling2D at 0x7f85551c8518>,
 <keras.layers.convolutional.Conv2D at 0x7f85551717f0>,
 <keras.layers.convolutional.Conv2D at 0x7f8555171048>,
 <keras.layers.convolutional.Conv2D at 0x7f8555196668>,
 <keras.layers.pooling.MaxPooling2D at 0x7f8555141cf8>,
 <keras.layers.convolutional.Conv2D at 0x7f85550ea240>,
 <keras.layers.convolutional.Conv2D at 0x7f85550ead68>,
 <keras.layers.convolutional.Conv2D at 0x7f8555111630>,
 <keras.layers.pooling.MaxPooling2D at 0x7f85550a6320>]

In [47]:
for layer in non_trainable_layers:
    layer.trainable = False

In [48]:
first_conv_layer.trainable


Out[48]:
False

We then tweak the complete model by very slowly re-retraining classifier and final convolutional block

  • slow learning prevents us from ruining previous good results
  • leave everthing else in place
    • earlier layers hopefully already encode common feaure channels
    • less risk of overfitting
      • earlier layers are more general
      • model has too much capacity for training and is likley to learn each and every detail
    • a little bit faster

This may still take quite a while


In [49]:
from keras import optimizers

# compile the model with a SGD/momentum optimizer
# and a very slow learning rate
# make updates very small and non adaptive so we do not ruin previous learnings 
combined_model.compile(loss='categorical_crossentropy',
              optimizer=optimizers.SGD(lr=1e-4, momentum=0.9),
              metrics=['accuracy'])

In [50]:
!rm -r tf_log

In [51]:
%time combined_model.fit(X_train, y_train, epochs=150, batch_size=BATCH_SIZE, validation_split=0.2, callbacks=[tb_callback])
# non augmented data is cheap to retrain, so we can try a few more epochs
# %time combined_model.fit(X_train, y_train, epochs=1000, batch_size=BATCH_SIZE, validation_split=0.2, callbacks=[tb_callback])


Train on 2668 samples, validate on 667 samples
Epoch 1/150
2668/2668 [==============================] - 9s - loss: 0.1467 - acc: 0.9509 - val_loss: 0.5544 - val_acc: 0.8471
Epoch 2/150
2668/2668 [==============================] - 6s - loss: 0.1582 - acc: 0.9460 - val_loss: 0.5605 - val_acc: 0.8561
Epoch 3/150
2668/2668 [==============================] - 6s - loss: 0.1675 - acc: 0.9449 - val_loss: 0.5098 - val_acc: 0.8576
Epoch 4/150
2668/2668 [==============================] - 6s - loss: 0.1700 - acc: 0.9389 - val_loss: 0.5663 - val_acc: 0.8426
Epoch 5/150
2668/2668 [==============================] - 6s - loss: 0.1463 - acc: 0.9483 - val_loss: 0.5123 - val_acc: 0.8591
Epoch 6/150
2668/2668 [==============================] - 6s - loss: 0.1522 - acc: 0.9528 - val_loss: 0.4983 - val_acc: 0.8546
Epoch 7/150
2668/2668 [==============================] - 6s - loss: 0.1399 - acc: 0.9520 - val_loss: 0.5469 - val_acc: 0.8516
Epoch 8/150
2668/2668 [==============================] - 6s - loss: 0.1350 - acc: 0.9509 - val_loss: 0.5881 - val_acc: 0.8471
Epoch 9/150
2668/2668 [==============================] - 6s - loss: 0.1322 - acc: 0.9539 - val_loss: 0.5356 - val_acc: 0.8636
Epoch 10/150
2668/2668 [==============================] - 6s - loss: 0.1092 - acc: 0.9588 - val_loss: 0.5056 - val_acc: 0.8651
Epoch 11/150
2668/2668 [==============================] - 6s - loss: 0.1057 - acc: 0.9648 - val_loss: 0.5250 - val_acc: 0.8456
Epoch 12/150
2668/2668 [==============================] - 6s - loss: 0.1036 - acc: 0.9599 - val_loss: 0.5349 - val_acc: 0.8591
Epoch 13/150
2668/2668 [==============================] - 6s - loss: 0.0829 - acc: 0.9711 - val_loss: 0.5592 - val_acc: 0.8486
Epoch 14/150
2668/2668 [==============================] - 6s - loss: 0.1056 - acc: 0.9636 - val_loss: 0.4869 - val_acc: 0.8651
Epoch 15/150
2668/2668 [==============================] - 6s - loss: 0.0844 - acc: 0.9678 - val_loss: 0.5264 - val_acc: 0.8621
Epoch 16/150
2668/2668 [==============================] - 6s - loss: 0.0901 - acc: 0.9693 - val_loss: 0.4923 - val_acc: 0.8711
Epoch 17/150
2668/2668 [==============================] - 6s - loss: 0.0858 - acc: 0.9670 - val_loss: 0.5351 - val_acc: 0.8546
Epoch 18/150
2668/2668 [==============================] - 6s - loss: 0.0854 - acc: 0.9700 - val_loss: 0.5206 - val_acc: 0.8681
Epoch 19/150
2668/2668 [==============================] - 6s - loss: 0.0706 - acc: 0.9738 - val_loss: 0.5086 - val_acc: 0.8711
Epoch 20/150
2668/2668 [==============================] - 6s - loss: 0.0768 - acc: 0.9734 - val_loss: 0.5779 - val_acc: 0.8621
Epoch 21/150
2668/2668 [==============================] - 6s - loss: 0.0965 - acc: 0.9648 - val_loss: 0.5039 - val_acc: 0.8621
Epoch 22/150
2668/2668 [==============================] - 6s - loss: 0.0736 - acc: 0.9723 - val_loss: 0.4974 - val_acc: 0.8726
Epoch 23/150
2668/2668 [==============================] - 6s - loss: 0.0816 - acc: 0.9719 - val_loss: 0.5183 - val_acc: 0.8711
Epoch 24/150
2668/2668 [==============================] - 6s - loss: 0.0691 - acc: 0.9738 - val_loss: 0.4679 - val_acc: 0.8801
Epoch 25/150
2668/2668 [==============================] - 6s - loss: 0.0847 - acc: 0.9711 - val_loss: 0.5114 - val_acc: 0.8741
Epoch 26/150
2668/2668 [==============================] - 6s - loss: 0.0686 - acc: 0.9790 - val_loss: 0.5096 - val_acc: 0.8681
Epoch 27/150
2668/2668 [==============================] - 6s - loss: 0.0656 - acc: 0.9798 - val_loss: 0.5163 - val_acc: 0.8651
Epoch 28/150
2668/2668 [==============================] - 6s - loss: 0.0588 - acc: 0.9783 - val_loss: 0.5082 - val_acc: 0.8711
Epoch 29/150
2668/2668 [==============================] - 6s - loss: 0.0667 - acc: 0.9749 - val_loss: 0.4632 - val_acc: 0.8816
Epoch 30/150
2668/2668 [==============================] - 6s - loss: 0.0742 - acc: 0.9741 - val_loss: 0.4623 - val_acc: 0.8831
Epoch 31/150
2668/2668 [==============================] - 6s - loss: 0.0619 - acc: 0.9756 - val_loss: 0.4774 - val_acc: 0.8771
Epoch 32/150
2668/2668 [==============================] - 6s - loss: 0.0730 - acc: 0.9719 - val_loss: 0.4973 - val_acc: 0.8786
Epoch 33/150
2668/2668 [==============================] - 6s - loss: 0.0566 - acc: 0.9816 - val_loss: 0.4835 - val_acc: 0.8816
Epoch 34/150
2668/2668 [==============================] - 6s - loss: 0.0677 - acc: 0.9764 - val_loss: 0.5121 - val_acc: 0.8786
Epoch 35/150
2668/2668 [==============================] - 6s - loss: 0.0557 - acc: 0.9809 - val_loss: 0.5178 - val_acc: 0.8816
Epoch 36/150
2668/2668 [==============================] - 6s - loss: 0.0531 - acc: 0.9790 - val_loss: 0.5161 - val_acc: 0.8726
Epoch 37/150
2668/2668 [==============================] - 6s - loss: 0.0609 - acc: 0.9801 - val_loss: 0.4706 - val_acc: 0.8741
Epoch 38/150
2668/2668 [==============================] - 6s - loss: 0.0613 - acc: 0.9809 - val_loss: 0.5163 - val_acc: 0.8846
Epoch 39/150
2668/2668 [==============================] - 6s - loss: 0.0704 - acc: 0.9783 - val_loss: 0.5086 - val_acc: 0.8756
Epoch 40/150
2668/2668 [==============================] - 6s - loss: 0.0505 - acc: 0.9824 - val_loss: 0.4764 - val_acc: 0.8771
Epoch 41/150
2668/2668 [==============================] - 6s - loss: 0.0483 - acc: 0.9828 - val_loss: 0.4875 - val_acc: 0.8801
Epoch 42/150
2668/2668 [==============================] - 6s - loss: 0.0536 - acc: 0.9816 - val_loss: 0.5006 - val_acc: 0.8786
Epoch 43/150
2668/2668 [==============================] - 6s - loss: 0.0483 - acc: 0.9820 - val_loss: 0.4842 - val_acc: 0.8876
Epoch 44/150
2668/2668 [==============================] - 6s - loss: 0.0365 - acc: 0.9861 - val_loss: 0.5087 - val_acc: 0.8906
Epoch 45/150
2668/2668 [==============================] - 6s - loss: 0.0387 - acc: 0.9854 - val_loss: 0.4860 - val_acc: 0.8876
Epoch 46/150
2668/2668 [==============================] - 6s - loss: 0.0441 - acc: 0.9839 - val_loss: 0.4930 - val_acc: 0.8876
Epoch 47/150
2668/2668 [==============================] - 6s - loss: 0.0503 - acc: 0.9843 - val_loss: 0.4876 - val_acc: 0.8816
Epoch 48/150
2668/2668 [==============================] - 6s - loss: 0.0484 - acc: 0.9831 - val_loss: 0.4541 - val_acc: 0.8966
Epoch 49/150
2668/2668 [==============================] - 6s - loss: 0.0419 - acc: 0.9839 - val_loss: 0.4874 - val_acc: 0.8981
Epoch 50/150
2668/2668 [==============================] - 6s - loss: 0.0476 - acc: 0.9839 - val_loss: 0.4921 - val_acc: 0.8921
Epoch 51/150
2668/2668 [==============================] - 6s - loss: 0.0471 - acc: 0.9839 - val_loss: 0.5276 - val_acc: 0.8846
Epoch 52/150
2668/2668 [==============================] - 6s - loss: 0.0466 - acc: 0.9846 - val_loss: 0.5371 - val_acc: 0.8831
Epoch 53/150
2668/2668 [==============================] - 6s - loss: 0.0519 - acc: 0.9831 - val_loss: 0.5136 - val_acc: 0.8876
Epoch 54/150
2668/2668 [==============================] - 6s - loss: 0.0554 - acc: 0.9816 - val_loss: 0.4664 - val_acc: 0.8921
Epoch 55/150
2668/2668 [==============================] - 6s - loss: 0.0438 - acc: 0.9854 - val_loss: 0.4549 - val_acc: 0.8861
Epoch 56/150
2668/2668 [==============================] - 6s - loss: 0.0408 - acc: 0.9880 - val_loss: 0.5001 - val_acc: 0.8831
Epoch 57/150
2668/2668 [==============================] - 6s - loss: 0.0294 - acc: 0.9933 - val_loss: 0.4996 - val_acc: 0.8906
Epoch 58/150
2668/2668 [==============================] - 6s - loss: 0.0421 - acc: 0.9854 - val_loss: 0.4743 - val_acc: 0.8891
Epoch 59/150
2668/2668 [==============================] - 6s - loss: 0.0480 - acc: 0.9835 - val_loss: 0.5070 - val_acc: 0.8816
Epoch 60/150
2668/2668 [==============================] - 6s - loss: 0.0503 - acc: 0.9850 - val_loss: 0.4663 - val_acc: 0.8966
Epoch 61/150
2668/2668 [==============================] - 6s - loss: 0.0369 - acc: 0.9861 - val_loss: 0.4519 - val_acc: 0.8936
Epoch 62/150
2668/2668 [==============================] - 6s - loss: 0.0386 - acc: 0.9876 - val_loss: 0.4938 - val_acc: 0.8906
Epoch 63/150
2668/2668 [==============================] - 6s - loss: 0.0299 - acc: 0.9895 - val_loss: 0.4744 - val_acc: 0.8891
Epoch 64/150
2668/2668 [==============================] - 6s - loss: 0.0290 - acc: 0.9921 - val_loss: 0.4875 - val_acc: 0.8981
Epoch 65/150
2668/2668 [==============================] - 6s - loss: 0.0312 - acc: 0.9895 - val_loss: 0.4937 - val_acc: 0.8906
Epoch 66/150
2668/2668 [==============================] - 6s - loss: 0.0310 - acc: 0.9903 - val_loss: 0.5108 - val_acc: 0.8936
Epoch 67/150
2668/2668 [==============================] - 6s - loss: 0.0419 - acc: 0.9869 - val_loss: 0.5490 - val_acc: 0.8831
Epoch 68/150
2668/2668 [==============================] - 6s - loss: 0.0290 - acc: 0.9895 - val_loss: 0.5083 - val_acc: 0.8831
Epoch 69/150
2668/2668 [==============================] - 6s - loss: 0.0394 - acc: 0.9854 - val_loss: 0.5056 - val_acc: 0.8891
Epoch 70/150
2668/2668 [==============================] - 6s - loss: 0.0300 - acc: 0.9906 - val_loss: 0.5107 - val_acc: 0.8966
Epoch 71/150
2668/2668 [==============================] - 6s - loss: 0.0311 - acc: 0.9891 - val_loss: 0.5062 - val_acc: 0.8876
Epoch 72/150
2668/2668 [==============================] - 6s - loss: 0.0361 - acc: 0.9876 - val_loss: 0.5020 - val_acc: 0.8771
Epoch 73/150
2668/2668 [==============================] - 6s - loss: 0.0301 - acc: 0.9891 - val_loss: 0.4806 - val_acc: 0.8936
Epoch 74/150
2668/2668 [==============================] - 6s - loss: 0.0314 - acc: 0.9899 - val_loss: 0.4917 - val_acc: 0.8996
Epoch 75/150
2668/2668 [==============================] - 6s - loss: 0.0280 - acc: 0.9914 - val_loss: 0.5174 - val_acc: 0.8891
Epoch 76/150
2668/2668 [==============================] - 6s - loss: 0.0335 - acc: 0.9884 - val_loss: 0.5086 - val_acc: 0.8951
Epoch 77/150
2668/2668 [==============================] - 6s - loss: 0.0296 - acc: 0.9903 - val_loss: 0.5079 - val_acc: 0.8921
Epoch 78/150
2668/2668 [==============================] - 6s - loss: 0.0319 - acc: 0.9873 - val_loss: 0.5754 - val_acc: 0.8801
Epoch 79/150
2668/2668 [==============================] - 6s - loss: 0.0347 - acc: 0.9899 - val_loss: 0.4971 - val_acc: 0.8846
Epoch 80/150
2668/2668 [==============================] - 6s - loss: 0.0280 - acc: 0.9903 - val_loss: 0.4813 - val_acc: 0.8876
Epoch 81/150
2668/2668 [==============================] - 6s - loss: 0.0262 - acc: 0.9899 - val_loss: 0.4917 - val_acc: 0.8936
Epoch 82/150
2668/2668 [==============================] - 6s - loss: 0.0318 - acc: 0.9918 - val_loss: 0.5179 - val_acc: 0.8891
Epoch 83/150
2668/2668 [==============================] - 6s - loss: 0.0220 - acc: 0.9933 - val_loss: 0.4852 - val_acc: 0.9070
Epoch 84/150
2668/2668 [==============================] - 6s - loss: 0.0303 - acc: 0.9906 - val_loss: 0.4948 - val_acc: 0.8906
Epoch 85/150
2668/2668 [==============================] - 6s - loss: 0.0301 - acc: 0.9899 - val_loss: 0.5155 - val_acc: 0.8981
Epoch 86/150
2668/2668 [==============================] - 6s - loss: 0.0261 - acc: 0.9918 - val_loss: 0.5212 - val_acc: 0.8906
Epoch 87/150
2668/2668 [==============================] - 6s - loss: 0.0323 - acc: 0.9891 - val_loss: 0.5014 - val_acc: 0.8996
Epoch 88/150
2668/2668 [==============================] - 6s - loss: 0.0223 - acc: 0.9918 - val_loss: 0.5092 - val_acc: 0.8951
Epoch 89/150
2668/2668 [==============================] - 6s - loss: 0.0233 - acc: 0.9944 - val_loss: 0.4958 - val_acc: 0.9025
Epoch 90/150
2668/2668 [==============================] - 6s - loss: 0.0361 - acc: 0.9895 - val_loss: 0.5287 - val_acc: 0.8861
Epoch 91/150
2668/2668 [==============================] - 6s - loss: 0.0347 - acc: 0.9884 - val_loss: 0.4925 - val_acc: 0.8921
Epoch 92/150
2668/2668 [==============================] - 6s - loss: 0.0296 - acc: 0.9903 - val_loss: 0.5766 - val_acc: 0.8861
Epoch 93/150
2668/2668 [==============================] - 6s - loss: 0.0344 - acc: 0.9903 - val_loss: 0.5244 - val_acc: 0.8891
Epoch 94/150
2668/2668 [==============================] - 6s - loss: 0.0225 - acc: 0.9925 - val_loss: 0.5915 - val_acc: 0.8876
Epoch 95/150
2668/2668 [==============================] - 6s - loss: 0.0191 - acc: 0.9940 - val_loss: 0.5426 - val_acc: 0.8936
Epoch 96/150
2668/2668 [==============================] - 6s - loss: 0.0299 - acc: 0.9891 - val_loss: 0.5569 - val_acc: 0.8846
Epoch 97/150
2668/2668 [==============================] - 6s - loss: 0.0258 - acc: 0.9914 - val_loss: 0.5196 - val_acc: 0.8891
Epoch 98/150
2668/2668 [==============================] - 6s - loss: 0.0222 - acc: 0.9921 - val_loss: 0.5077 - val_acc: 0.9025
Epoch 99/150
2668/2668 [==============================] - 7s - loss: 0.0252 - acc: 0.9929 - val_loss: 0.5014 - val_acc: 0.8936
Epoch 100/150
2668/2668 [==============================] - 6s - loss: 0.0279 - acc: 0.9906 - val_loss: 0.5018 - val_acc: 0.8981
Epoch 101/150
2668/2668 [==============================] - 6s - loss: 0.0251 - acc: 0.9925 - val_loss: 0.4950 - val_acc: 0.8936
Epoch 102/150
2668/2668 [==============================] - 6s - loss: 0.0236 - acc: 0.9925 - val_loss: 0.5103 - val_acc: 0.8891
Epoch 103/150
2668/2668 [==============================] - 6s - loss: 0.0244 - acc: 0.9921 - val_loss: 0.4829 - val_acc: 0.9055
Epoch 104/150
2668/2668 [==============================] - 7s - loss: 0.0238 - acc: 0.9940 - val_loss: 0.5210 - val_acc: 0.8951
Epoch 105/150
2668/2668 [==============================] - 7s - loss: 0.0157 - acc: 0.9970 - val_loss: 0.5266 - val_acc: 0.9010
Epoch 106/150
2668/2668 [==============================] - 7s - loss: 0.0263 - acc: 0.9895 - val_loss: 0.5531 - val_acc: 0.8921
Epoch 107/150
2668/2668 [==============================] - 7s - loss: 0.0249 - acc: 0.9918 - val_loss: 0.5276 - val_acc: 0.8921
Epoch 108/150
2668/2668 [==============================] - 7s - loss: 0.0235 - acc: 0.9914 - val_loss: 0.4971 - val_acc: 0.9100
Epoch 109/150
2668/2668 [==============================] - 7s - loss: 0.0275 - acc: 0.9918 - val_loss: 0.4940 - val_acc: 0.8996
Epoch 110/150
2668/2668 [==============================] - 6s - loss: 0.0220 - acc: 0.9929 - val_loss: 0.4882 - val_acc: 0.8936
Epoch 111/150
2668/2668 [==============================] - 6s - loss: 0.0265 - acc: 0.9936 - val_loss: 0.4780 - val_acc: 0.8996
Epoch 112/150
2668/2668 [==============================] - 6s - loss: 0.0204 - acc: 0.9918 - val_loss: 0.4816 - val_acc: 0.9025
Epoch 113/150
2668/2668 [==============================] - 6s - loss: 0.0204 - acc: 0.9925 - val_loss: 0.4731 - val_acc: 0.8996
Epoch 114/150
2668/2668 [==============================] - 6s - loss: 0.0224 - acc: 0.9929 - val_loss: 0.5010 - val_acc: 0.8951
Epoch 115/150
2668/2668 [==============================] - 6s - loss: 0.0178 - acc: 0.9936 - val_loss: 0.4943 - val_acc: 0.8966
Epoch 116/150
2668/2668 [==============================] - 6s - loss: 0.0258 - acc: 0.9918 - val_loss: 0.5080 - val_acc: 0.8936
Epoch 117/150
2668/2668 [==============================] - 6s - loss: 0.0187 - acc: 0.9944 - val_loss: 0.5100 - val_acc: 0.8981
Epoch 118/150
2668/2668 [==============================] - 6s - loss: 0.0223 - acc: 0.9918 - val_loss: 0.4994 - val_acc: 0.9055
Epoch 119/150
2668/2668 [==============================] - 6s - loss: 0.0232 - acc: 0.9914 - val_loss: 0.4865 - val_acc: 0.8996
Epoch 120/150
2668/2668 [==============================] - 6s - loss: 0.0181 - acc: 0.9944 - val_loss: 0.4975 - val_acc: 0.8981
Epoch 121/150
2668/2668 [==============================] - 6s - loss: 0.0184 - acc: 0.9951 - val_loss: 0.4976 - val_acc: 0.9025
Epoch 122/150
2668/2668 [==============================] - 6s - loss: 0.0168 - acc: 0.9948 - val_loss: 0.5160 - val_acc: 0.8966
Epoch 123/150
2668/2668 [==============================] - 6s - loss: 0.0207 - acc: 0.9944 - val_loss: 0.4957 - val_acc: 0.8996
Epoch 124/150
2668/2668 [==============================] - 6s - loss: 0.0162 - acc: 0.9959 - val_loss: 0.4941 - val_acc: 0.9070
Epoch 125/150
2668/2668 [==============================] - 6s - loss: 0.0176 - acc: 0.9936 - val_loss: 0.5374 - val_acc: 0.8996
Epoch 126/150
2668/2668 [==============================] - 6s - loss: 0.0171 - acc: 0.9948 - val_loss: 0.4947 - val_acc: 0.9025
Epoch 127/150
2668/2668 [==============================] - 6s - loss: 0.0174 - acc: 0.9951 - val_loss: 0.4958 - val_acc: 0.8981
Epoch 128/150
2668/2668 [==============================] - 6s - loss: 0.0167 - acc: 0.9948 - val_loss: 0.4835 - val_acc: 0.8951
Epoch 129/150
2668/2668 [==============================] - 6s - loss: 0.0218 - acc: 0.9936 - val_loss: 0.4493 - val_acc: 0.8981
Epoch 130/150
2668/2668 [==============================] - 6s - loss: 0.0248 - acc: 0.9929 - val_loss: 0.4862 - val_acc: 0.8951
Epoch 131/150
2668/2668 [==============================] - 6s - loss: 0.0196 - acc: 0.9929 - val_loss: 0.4992 - val_acc: 0.9025
Epoch 132/150
2668/2668 [==============================] - 6s - loss: 0.0159 - acc: 0.9936 - val_loss: 0.5076 - val_acc: 0.9025
Epoch 133/150
2668/2668 [==============================] - 6s - loss: 0.0255 - acc: 0.9925 - val_loss: 0.5199 - val_acc: 0.9040
Epoch 134/150
2668/2668 [==============================] - 6s - loss: 0.0128 - acc: 0.9959 - val_loss: 0.5309 - val_acc: 0.8951
Epoch 135/150
2668/2668 [==============================] - 6s - loss: 0.0163 - acc: 0.9970 - val_loss: 0.5430 - val_acc: 0.8966
Epoch 136/150
2668/2668 [==============================] - 6s - loss: 0.0151 - acc: 0.9948 - val_loss: 0.5435 - val_acc: 0.8981
Epoch 137/150
2668/2668 [==============================] - 6s - loss: 0.0193 - acc: 0.9918 - val_loss: 0.5547 - val_acc: 0.9010
Epoch 138/150
2668/2668 [==============================] - 6s - loss: 0.0191 - acc: 0.9951 - val_loss: 0.5317 - val_acc: 0.9010
Epoch 139/150
2668/2668 [==============================] - 6s - loss: 0.0176 - acc: 0.9940 - val_loss: 0.4968 - val_acc: 0.9070
Epoch 140/150
2668/2668 [==============================] - 6s - loss: 0.0138 - acc: 0.9966 - val_loss: 0.5141 - val_acc: 0.9085
Epoch 141/150
2668/2668 [==============================] - 6s - loss: 0.0157 - acc: 0.9955 - val_loss: 0.4865 - val_acc: 0.9040
Epoch 142/150
2668/2668 [==============================] - 6s - loss: 0.0179 - acc: 0.9936 - val_loss: 0.5198 - val_acc: 0.8981
Epoch 143/150
2668/2668 [==============================] - 6s - loss: 0.0235 - acc: 0.9918 - val_loss: 0.5158 - val_acc: 0.8981
Epoch 144/150
2668/2668 [==============================] - 6s - loss: 0.0166 - acc: 0.9936 - val_loss: 0.5325 - val_acc: 0.8981
Epoch 145/150
2668/2668 [==============================] - 6s - loss: 0.0187 - acc: 0.9921 - val_loss: 0.4894 - val_acc: 0.8981
Epoch 146/150
2668/2668 [==============================] - 6s - loss: 0.0170 - acc: 0.9936 - val_loss: 0.4978 - val_acc: 0.9025
Epoch 147/150
2668/2668 [==============================] - 6s - loss: 0.0197 - acc: 0.9936 - val_loss: 0.4851 - val_acc: 0.9055
Epoch 148/150
2668/2668 [==============================] - 6s - loss: 0.0130 - acc: 0.9963 - val_loss: 0.4931 - val_acc: 0.9055
Epoch 149/150
2668/2668 [==============================] - 6s - loss: 0.0183 - acc: 0.9944 - val_loss: 0.5061 - val_acc: 0.9025
Epoch 150/150
2668/2668 [==============================] - 6s - loss: 0.0124 - acc: 0.9966 - val_loss: 0.5104 - val_acc: 0.9010
CPU times: user 3min 44s, sys: 34.4 s, total: 4min 18s
Wall time: 17min 25s
Out[51]:
<keras.callbacks.History at 0x7f8518cb8da0>

90% for validation is quite a bit of improvement, might even increase when we train for a bit longer

Metrics for Augmented Data

Accuracy

Validation Accuracy


In [52]:
train_loss, train_accuracy = combined_model.evaluate(X_train, y_train, batch_size=BATCH_SIZE)
train_loss, train_accuracy


3335/3335 [==============================] - 6s     
Out[52]:
(0.10222922369810358, 0.98020990570445821)

In [53]:
test_loss, test_accuracy = combined_model.evaluate(X_test, y_test, batch_size=BATCH_SIZE)
test_loss, test_accuracy


834/834 [==============================] - 1s     
Out[53]:
(0.42116924984563742, 0.89928059569365681)

In [54]:
# complete original non augmented speed limit signs
original_loss, original_accuracy = combined_model.evaluate(original_images, original_labels, batch_size=BATCH_SIZE)
original_loss, original_accuracy


379/379 [==============================] - 0s     
Out[54]:
(0.45832276033694636, 0.90501321683143876)

In [55]:
combined_model.save('vgg16-augmented-retrained-fine-tuned.hdf5')
# combined_model.save('vgg16-retrained-fine-tuned.hdf5')

In [56]:
# !ls -lh vgg16-retrained-fine-tuned.hdf5
!ls -lh vgg16-augmented-retrained-fine-tuned.hdf5


-rw-rw-r-- 1 ubuntu ubuntu 88M Oct  1 10:22 vgg16-augmented-retrained-fine-tuned.hdf5

Hands-On: Experiment with all parameters