use vgg19 to train the cifar-10

import pakages


In [1]:
import os
import keras
import numpy as np
import tensorflow as tf
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D, GlobalAveragePooling2D, AveragePooling2D
from keras.initializers import he_normal
from keras import optimizers
from keras.callbacks import LearningRateScheduler, TensorBoard
from keras.utils.data_utils import get_file


Using TensorFlow backend.

force to use gpu and limit the use of gpu memory


In [2]:
os.environ["CUDA_VISIBLE_DEVICES"] = "3"
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 1.0
set_session(tf.Session(config=config))

init some parameters


In [4]:
num_classes  = 10
batch_size   = 128
epochs       = 170
iterations   = 391
dropout      = 0.5
weight_decay = 0.0015
log_filepath = r'./vgg19_retrain_WHE_wd/'

do some precessing with images


In [5]:
def color_preprocessing(x_train,x_test):
    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')
    # data preprocessing 
    x_train[:,:,:,0] = (x_train[:,:,:,0]-123.680)
    x_train[:,:,:,1] = (x_train[:,:,:,1]-116.779)
    x_train[:,:,:,2] = (x_train[:,:,:,2]-103.939)
    x_test[:,:,:,0] = (x_test[:,:,:,0]-123.680)
    x_test[:,:,:,1] = (x_test[:,:,:,1]-116.779)
    x_test[:,:,:,2] = (x_test[:,:,:,2]-103.939)

    return x_train, x_test

set the learning rate changes strategy


In [6]:
def scheduler(epoch):
  learning_rate_init = 0.1
  if epoch > 80:
    learning_rate_init = 0.01
  if epoch > 120:
    learning_rate_init = 0.001
  return learning_rate_init

define network


In [7]:
def VGG19():
    model = Sequential()

    # Block 1
    model.add(Conv2D(64, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block1_conv1', input_shape=x_train.shape[1:]))
    model.add(Activation('relu'))
    model.add(Conv2D(64, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block1_conv2'))
    model.add(Activation('relu'))
    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool'))

    # Block 2
    model.add(Conv2D(128, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block2_conv1'))
    model.add(Activation('relu'))
    model.add(Conv2D(128, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block2_conv2'))
    model.add(Activation('relu'))
    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool'))

    # Block 3
    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block3_conv1'))
    model.add(Activation('relu'))
    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block3_conv2'))
    model.add(Activation('relu'))
    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block3_conv3'))
    model.add(Activation('relu'))
    model.add(Conv2D(256, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block3_conv4'))
    model.add(Activation('relu'))
    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool'))

    # Block 4
    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block4_conv1'))
    model.add(Activation('relu'))
    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block4_conv2'))
    model.add(Activation('relu'))
    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block4_conv3'))
    model.add(Activation('relu'))
    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block4_conv4'))
    model.add(Activation('relu'))
    model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool'))

    # Block 5
    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block5_conv1'))
    model.add(Activation('relu'))
    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block5_conv2'))
    model.add(Activation('relu'))
    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block5_conv3'))
    model.add(Activation('relu'))
    model.add(Conv2D(512, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='block5_conv4'))
    model.add(Activation('relu'))

    # model modification for cifar-10
    model.add(Flatten(name='flatten'))
    model.add(Dense(4096, use_bias = True, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='fc_cifa10'))
    model.add(Activation('relu'))
    model.add(Dropout(dropout))
    model.add(Dense(4096, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='fc2'))  
    model.add(Activation('relu'))
    model.add(Dropout(dropout))      
    model.add(Dense(10, kernel_regularizer=keras.regularizers.l2(weight_decay), kernel_initializer=he_normal(), name='predictions_cifa10'))        
    model.add(Activation('softmax'))
    
    return model

load data and build model


In [ ]:
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# color preprocessing
x_train, x_test = color_preprocessing(x_train, x_test)

# build network
WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg19_weights_tf_dim_ordering_tf_kernels.h5'
filepath = get_file('vgg19_weights_tf_dim_ordering_tf_kernels.h5', WEIGHTS_PATH, cache_subdir='models')

model = VGG19()
print(model.summary())

# load pretrained weight from VGG19 by name      
model.load_weights(filepath, by_name=True)

# -------- optimizer setting -------- #
sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
block1_conv1 (Conv2D)        (None, 32, 32, 64)        1792      
_________________________________________________________________
activation_1 (Activation)    (None, 32, 32, 64)        0         
_________________________________________________________________
block1_conv2 (Conv2D)        (None, 32, 32, 64)        36928     
_________________________________________________________________
activation_2 (Activation)    (None, 32, 32, 64)        0         
_________________________________________________________________
block1_pool (MaxPooling2D)   (None, 16, 16, 64)        0         
_________________________________________________________________
block2_conv1 (Conv2D)        (None, 16, 16, 128)       73856     
_________________________________________________________________
activation_3 (Activation)    (None, 16, 16, 128)       0         
_________________________________________________________________
block2_conv2 (Conv2D)        (None, 16, 16, 128)       147584    
_________________________________________________________________
activation_4 (Activation)    (None, 16, 16, 128)       0         
_________________________________________________________________
block2_pool (MaxPooling2D)   (None, 8, 8, 128)         0         
_________________________________________________________________
block3_conv1 (Conv2D)        (None, 8, 8, 256)         295168    
_________________________________________________________________
activation_5 (Activation)    (None, 8, 8, 256)         0         
_________________________________________________________________
block3_conv2 (Conv2D)        (None, 8, 8, 256)         590080    
_________________________________________________________________
activation_6 (Activation)    (None, 8, 8, 256)         0         
_________________________________________________________________
block3_conv3 (Conv2D)        (None, 8, 8, 256)         590080    
_________________________________________________________________
activation_7 (Activation)    (None, 8, 8, 256)         0         
_________________________________________________________________
block3_conv4 (Conv2D)        (None, 8, 8, 256)         590080    
_________________________________________________________________
activation_8 (Activation)    (None, 8, 8, 256)         0         
_________________________________________________________________
block3_pool (MaxPooling2D)   (None, 4, 4, 256)         0         
_________________________________________________________________
block4_conv1 (Conv2D)        (None, 4, 4, 512)         1180160   
_________________________________________________________________
activation_9 (Activation)    (None, 4, 4, 512)         0         
_________________________________________________________________
block4_conv2 (Conv2D)        (None, 4, 4, 512)         2359808   
_________________________________________________________________
activation_10 (Activation)   (None, 4, 4, 512)         0         
_________________________________________________________________
block4_conv3 (Conv2D)        (None, 4, 4, 512)         2359808   
_________________________________________________________________
activation_11 (Activation)   (None, 4, 4, 512)         0         
_________________________________________________________________
block4_conv4 (Conv2D)        (None, 4, 4, 512)         2359808   
_________________________________________________________________
activation_12 (Activation)   (None, 4, 4, 512)         0         
_________________________________________________________________
block4_pool (MaxPooling2D)   (None, 2, 2, 512)         0         
_________________________________________________________________
block5_conv1 (Conv2D)        (None, 2, 2, 512)         2359808   
_________________________________________________________________
activation_13 (Activation)   (None, 2, 2, 512)         0         
_________________________________________________________________
block5_conv2 (Conv2D)        (None, 2, 2, 512)         2359808   
_________________________________________________________________
activation_14 (Activation)   (None, 2, 2, 512)         0         
_________________________________________________________________
block5_conv3 (Conv2D)        (None, 2, 2, 512)         2359808   
_________________________________________________________________
activation_15 (Activation)   (None, 2, 2, 512)         0         
_________________________________________________________________
block5_conv4 (Conv2D)        (None, 2, 2, 512)         2359808   
_________________________________________________________________
activation_16 (Activation)   (None, 2, 2, 512)         0         
_________________________________________________________________
flatten (Flatten)            (None, 2048)              0         
_________________________________________________________________
fc_cifa10 (Dense)            (None, 4096)              8392704   
_________________________________________________________________
activation_17 (Activation)   (None, 4096)              0         
_________________________________________________________________
dropout_1 (Dropout)          (None, 4096)              0         
_________________________________________________________________
fc2 (Dense)                  (None, 4096)              16781312  
_________________________________________________________________
activation_18 (Activation)   (None, 4096)              0         
_________________________________________________________________
dropout_2 (Dropout)          (None, 4096)              0         
_________________________________________________________________
predictions_cifa10 (Dense)   (None, 10)                40970     
_________________________________________________________________
activation_19 (Activation)   (None, 10)                0         
=================================================================
Total params: 45,239,370
Trainable params: 45,239,370
Non-trainable params: 0
_________________________________________________________________
None

set tensorboard


In [ ]:
tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
change_lr = LearningRateScheduler(scheduler)
cbks = [change_lr,tb_cb]

processing images


In [9]:
print('Using real-time data augmentation.')
datagen = ImageDataGenerator(horizontal_flip=True,
        width_shift_range=0.125,height_shift_range=0.125,fill_mode='constant',cval=0.)

datagen.fit(x_train)


Using real-time data augmentation.

train


In [10]:
model.fit_generator(datagen.flow(x_train, y_train,batch_size=batch_size),
                    steps_per_epoch=iterations,
                    epochs=epochs,
                    callbacks=cbks,
                    validation_data=(x_test, y_test))
model.save('vgg19_retrain.h5')


Epoch 1/170
391/391 [==============================] - 48s - loss: 6.8878 - acc: 0.6139 - val_loss: 3.5075 - val_acc: 0.4492
Epoch 2/170
391/391 [==============================] - 45s - loss: 2.1108 - acc: 0.6682 - val_loss: 2.2759 - val_acc: 0.4886
Epoch 3/170
391/391 [==============================] - 45s - loss: 1.6630 - acc: 0.6808 - val_loss: 2.8778 - val_acc: 0.3666
Epoch 4/170
391/391 [==============================] - 45s - loss: 1.6069 - acc: 0.6889 - val_loss: 2.2547 - val_acc: 0.4986
Epoch 5/170
391/391 [==============================] - 45s - loss: 1.5893 - acc: 0.6978 - val_loss: 1.7269 - val_acc: 0.6550
Epoch 6/170
391/391 [==============================] - 45s - loss: 1.5958 - acc: 0.7011 - val_loss: 2.6924 - val_acc: 0.3892
Epoch 7/170
391/391 [==============================] - 45s - loss: 1.5822 - acc: 0.7062 - val_loss: 2.4918 - val_acc: 0.4452
Epoch 8/170
391/391 [==============================] - 45s - loss: 1.5929 - acc: 0.7060 - val_loss: 2.1708 - val_acc: 0.5210
Epoch 9/170
391/391 [==============================] - 45s - loss: 1.5911 - acc: 0.7083 - val_loss: 1.9936 - val_acc: 0.5769
Epoch 10/170
391/391 [==============================] - 45s - loss: 1.5936 - acc: 0.7140 - val_loss: 2.0213 - val_acc: 0.5507
Epoch 11/170
391/391 [==============================] - 45s - loss: 1.5887 - acc: 0.7147 - val_loss: 2.2894 - val_acc: 0.4830
Epoch 12/170
391/391 [==============================] - 45s - loss: 1.5989 - acc: 0.7148 - val_loss: 3.1557 - val_acc: 0.3229
Epoch 13/170
391/391 [==============================] - 45s - loss: 1.5874 - acc: 0.7194 - val_loss: 2.4012 - val_acc: 0.5076
Epoch 14/170
391/391 [==============================] - 45s - loss: 1.6052 - acc: 0.7156 - val_loss: 3.7248 - val_acc: 0.2863
Epoch 15/170
391/391 [==============================] - 45s - loss: 1.5982 - acc: 0.7158 - val_loss: 2.7553 - val_acc: 0.3899
Epoch 16/170
391/391 [==============================] - 45s - loss: 1.6067 - acc: 0.7190 - val_loss: 2.3964 - val_acc: 0.4573
Epoch 17/170
391/391 [==============================] - 45s - loss: 1.6110 - acc: 0.7228 - val_loss: 2.5093 - val_acc: 0.4269
Epoch 18/170
391/391 [==============================] - 45s - loss: 1.6119 - acc: 0.7208 - val_loss: 2.9080 - val_acc: 0.4681
Epoch 19/170
391/391 [==============================] - 45s - loss: 1.6164 - acc: 0.7209 - val_loss: 2.1174 - val_acc: 0.5694
Epoch 20/170
391/391 [==============================] - 44s - loss: 1.6058 - acc: 0.7242 - val_loss: 2.7051 - val_acc: 0.3764
Epoch 21/170
391/391 [==============================] - 44s - loss: 1.6146 - acc: 0.7236 - val_loss: 3.2420 - val_acc: 0.3711
Epoch 22/170
391/391 [==============================] - 45s - loss: 1.6265 - acc: 0.7227 - val_loss: 2.4697 - val_acc: 0.4456
Epoch 23/170
391/391 [==============================] - 45s - loss: 1.6153 - acc: 0.7249 - val_loss: 2.4554 - val_acc: 0.4455
Epoch 24/170
391/391 [==============================] - 44s - loss: 1.6359 - acc: 0.7216 - val_loss: 2.8173 - val_acc: 0.3943
Epoch 25/170
391/391 [==============================] - 44s - loss: 1.6264 - acc: 0.7268 - val_loss: 2.0038 - val_acc: 0.5731
Epoch 26/170
391/391 [==============================] - 45s - loss: 1.6331 - acc: 0.7253 - val_loss: 2.4279 - val_acc: 0.4687
Epoch 27/170
391/391 [==============================] - 45s - loss: 1.6361 - acc: 0.7252 - val_loss: 2.9963 - val_acc: 0.3576
Epoch 28/170
391/391 [==============================] - 45s - loss: 1.6291 - acc: 0.7271 - val_loss: 2.4251 - val_acc: 0.5070
Epoch 29/170
391/391 [==============================] - 44s - loss: 1.6352 - acc: 0.7280 - val_loss: 2.4878 - val_acc: 0.4652
Epoch 30/170
391/391 [==============================] - 44s - loss: 1.6241 - acc: 0.7294 - val_loss: 2.2547 - val_acc: 0.5298
Epoch 31/170
391/391 [==============================] - 44s - loss: 1.6290 - acc: 0.7270 - val_loss: 2.0977 - val_acc: 0.5632
Epoch 32/170
391/391 [==============================] - 45s - loss: 1.6315 - acc: 0.7270 - val_loss: 2.7400 - val_acc: 0.3834
Epoch 33/170
391/391 [==============================] - 44s - loss: 1.6283 - acc: 0.7262 - val_loss: 2.0775 - val_acc: 0.5628
Epoch 34/170
391/391 [==============================] - 44s - loss: 1.6155 - acc: 0.7297 - val_loss: 2.4855 - val_acc: 0.4750
Epoch 35/170
391/391 [==============================] - 45s - loss: 1.6210 - acc: 0.7299 - val_loss: 2.5215 - val_acc: 0.4396
Epoch 36/170
391/391 [==============================] - 45s - loss: 1.6286 - acc: 0.7279 - val_loss: 3.1068 - val_acc: 0.3895
Epoch 37/170
391/391 [==============================] - 45s - loss: 1.6404 - acc: 0.7284 - val_loss: 2.4683 - val_acc: 0.4433
Epoch 38/170
391/391 [==============================] - 45s - loss: 1.6233 - acc: 0.7314 - val_loss: 2.2438 - val_acc: 0.5579
Epoch 39/170
391/391 [==============================] - 45s - loss: 1.6292 - acc: 0.7286 - val_loss: 2.1861 - val_acc: 0.5306
Epoch 40/170
391/391 [==============================] - 45s - loss: 1.6136 - acc: 0.7328 - val_loss: 2.4955 - val_acc: 0.4440
Epoch 41/170
391/391 [==============================] - 45s - loss: 1.6273 - acc: 0.7292 - val_loss: 2.6906 - val_acc: 0.4575
Epoch 42/170
391/391 [==============================] - 45s - loss: 1.6268 - acc: 0.7293 - val_loss: 2.3609 - val_acc: 0.4941
Epoch 43/170
391/391 [==============================] - 45s - loss: 1.6183 - acc: 0.7318 - val_loss: 2.4815 - val_acc: 0.4693
Epoch 44/170
391/391 [==============================] - 45s - loss: 1.6233 - acc: 0.7293 - val_loss: 2.3069 - val_acc: 0.4750
Epoch 45/170
391/391 [==============================] - 45s - loss: 1.6239 - acc: 0.7300 - val_loss: 2.5219 - val_acc: 0.4626
Epoch 46/170
391/391 [==============================] - 45s - loss: 1.6120 - acc: 0.7337 - val_loss: 2.0765 - val_acc: 0.5481
Epoch 47/170
391/391 [==============================] - 44s - loss: 1.6070 - acc: 0.7325 - val_loss: 2.5972 - val_acc: 0.4426
Epoch 48/170
391/391 [==============================] - 45s - loss: 1.6140 - acc: 0.7302 - val_loss: 2.3313 - val_acc: 0.5036
Epoch 49/170
391/391 [==============================] - 45s - loss: 1.6109 - acc: 0.7314 - val_loss: 2.4352 - val_acc: 0.4775
Epoch 50/170
391/391 [==============================] - 45s - loss: 1.6054 - acc: 0.7344 - val_loss: 1.9621 - val_acc: 0.5994
Epoch 51/170
391/391 [==============================] - 44s - loss: 1.6300 - acc: 0.7307 - val_loss: 6.0263 - val_acc: 0.1273
Epoch 52/170
391/391 [==============================] - 45s - loss: 1.6103 - acc: 0.7311 - val_loss: 2.5825 - val_acc: 0.4361
Epoch 53/170
391/391 [==============================] - 44s - loss: 1.6193 - acc: 0.7336 - val_loss: 2.3532 - val_acc: 0.4965
Epoch 54/170
391/391 [==============================] - 45s - loss: 1.6151 - acc: 0.7342 - val_loss: 2.8765 - val_acc: 0.3680
Epoch 55/170
391/391 [==============================] - 45s - loss: 1.6219 - acc: 0.7302 - val_loss: 2.3603 - val_acc: 0.5124
Epoch 56/170
391/391 [==============================] - 45s - loss: 1.6107 - acc: 0.7304 - val_loss: 2.5536 - val_acc: 0.4310
Epoch 57/170
391/391 [==============================] - 45s - loss: 1.6155 - acc: 0.7341 - val_loss: 2.4213 - val_acc: 0.4873
Epoch 58/170
391/391 [==============================] - 45s - loss: 1.5933 - acc: 0.7342 - val_loss: 2.5767 - val_acc: 0.3996
Epoch 59/170
391/391 [==============================] - 45s - loss: 1.6072 - acc: 0.7293 - val_loss: 2.7264 - val_acc: 0.4714
Epoch 60/170
391/391 [==============================] - 44s - loss: 1.6019 - acc: 0.7347 - val_loss: 2.1024 - val_acc: 0.5680
Epoch 61/170
391/391 [==============================] - 44s - loss: 1.6020 - acc: 0.7320 - val_loss: 3.4962 - val_acc: 0.3448
Epoch 62/170
391/391 [==============================] - 44s - loss: 1.6017 - acc: 0.7343 - val_loss: 3.0863 - val_acc: 0.3589
Epoch 63/170
391/391 [==============================] - 45s - loss: 1.5970 - acc: 0.7303 - val_loss: 2.0296 - val_acc: 0.5620
Epoch 64/170
391/391 [==============================] - 45s - loss: 1.5918 - acc: 0.7337 - val_loss: 2.1089 - val_acc: 0.5558
Epoch 65/170
391/391 [==============================] - 45s - loss: 1.6085 - acc: 0.7278 - val_loss: 1.9820 - val_acc: 0.5989
Epoch 66/170
391/391 [==============================] - 45s - loss: 1.5918 - acc: 0.7330 - val_loss: 2.7357 - val_acc: 0.4175
Epoch 67/170
391/391 [==============================] - 45s - loss: 1.5946 - acc: 0.7336 - val_loss: 2.5758 - val_acc: 0.3925
Epoch 68/170
391/391 [==============================] - 45s - loss: 1.5931 - acc: 0.7326 - val_loss: 2.8285 - val_acc: 0.3382
Epoch 69/170
391/391 [==============================] - 45s - loss: 1.5978 - acc: 0.7331 - val_loss: 2.5085 - val_acc: 0.4478
Epoch 70/170
391/391 [==============================] - 45s - loss: 1.5879 - acc: 0.7353 - val_loss: 2.4009 - val_acc: 0.4973
Epoch 71/170
391/391 [==============================] - 45s - loss: 1.5942 - acc: 0.7368 - val_loss: 2.4004 - val_acc: 0.4152
Epoch 72/170
391/391 [==============================] - 45s - loss: 1.5944 - acc: 0.7313 - val_loss: 2.1285 - val_acc: 0.5658
Epoch 73/170
391/391 [==============================] - 44s - loss: 1.5846 - acc: 0.7349 - val_loss: 2.0390 - val_acc: 0.5717
Epoch 74/170
391/391 [==============================] - 44s - loss: 1.5990 - acc: 0.7326 - val_loss: 2.3853 - val_acc: 0.4744
Epoch 75/170
391/391 [==============================] - 44s - loss: 1.5958 - acc: 0.7358 - val_loss: 2.6013 - val_acc: 0.4367
Epoch 76/170
391/391 [==============================] - 44s - loss: 1.6033 - acc: 0.7332 - val_loss: 2.5766 - val_acc: 0.4145
Epoch 77/170
391/391 [==============================] - 44s - loss: 1.6014 - acc: 0.7324 - val_loss: 3.2699 - val_acc: 0.3168
Epoch 78/170
391/391 [==============================] - 44s - loss: 1.5880 - acc: 0.7332 - val_loss: 2.2472 - val_acc: 0.5362
Epoch 79/170
391/391 [==============================] - 44s - loss: 1.5973 - acc: 0.7338 - val_loss: 2.6982 - val_acc: 0.3678
Epoch 80/170
391/391 [==============================] - 45s - loss: 1.5955 - acc: 0.7320 - val_loss: 2.3848 - val_acc: 0.5045
Epoch 81/170
391/391 [==============================] - 45s - loss: 1.5764 - acc: 0.7337 - val_loss: 2.0337 - val_acc: 0.5618
Epoch 82/170
391/391 [==============================] - 45s - loss: 1.3197 - acc: 0.8007 - val_loss: 1.3066 - val_acc: 0.7887
Epoch 83/170
391/391 [==============================] - 45s - loss: 1.0416 - acc: 0.8472 - val_loss: 1.1027 - val_acc: 0.8112
Epoch 84/170
391/391 [==============================] - 45s - loss: 0.8996 - acc: 0.8631 - val_loss: 1.0225 - val_acc: 0.8130
Epoch 85/170
391/391 [==============================] - 45s - loss: 0.8106 - acc: 0.8702 - val_loss: 0.9317 - val_acc: 0.8239
Epoch 86/170
391/391 [==============================] - 45s - loss: 0.7498 - acc: 0.8741 - val_loss: 0.9299 - val_acc: 0.8169
Epoch 87/170
391/391 [==============================] - 45s - loss: 0.7157 - acc: 0.8774 - val_loss: 0.8181 - val_acc: 0.8343
Epoch 88/170
391/391 [==============================] - 45s - loss: 0.6914 - acc: 0.8801 - val_loss: 0.9621 - val_acc: 0.7938
Epoch 89/170
391/391 [==============================] - 45s - loss: 0.6827 - acc: 0.8805 - val_loss: 0.9323 - val_acc: 0.8049
Epoch 90/170
391/391 [==============================] - 45s - loss: 0.6788 - acc: 0.8824 - val_loss: 0.9320 - val_acc: 0.8006
Epoch 91/170
391/391 [==============================] - 45s - loss: 0.6720 - acc: 0.8844 - val_loss: 0.8394 - val_acc: 0.8367
Epoch 92/170
391/391 [==============================] - 45s - loss: 0.6748 - acc: 0.8859 - val_loss: 1.1221 - val_acc: 0.7608
Epoch 93/170
391/391 [==============================] - 45s - loss: 0.6697 - acc: 0.8870 - val_loss: 0.9130 - val_acc: 0.8160
Epoch 94/170
391/391 [==============================] - 45s - loss: 0.6677 - acc: 0.8917 - val_loss: 0.8731 - val_acc: 0.8317
Epoch 95/170
391/391 [==============================] - 45s - loss: 0.6697 - acc: 0.8935 - val_loss: 0.9115 - val_acc: 0.8248
Epoch 96/170
391/391 [==============================] - 45s - loss: 0.6743 - acc: 0.8936 - val_loss: 0.9579 - val_acc: 0.8131
Epoch 97/170
391/391 [==============================] - 45s - loss: 0.6739 - acc: 0.8966 - val_loss: 0.9413 - val_acc: 0.8205
Epoch 98/170
391/391 [==============================] - 45s - loss: 0.6797 - acc: 0.8953 - val_loss: 0.9161 - val_acc: 0.8226
Epoch 99/170
391/391 [==============================] - 45s - loss: 0.6807 - acc: 0.8975 - val_loss: 0.8980 - val_acc: 0.8344
Epoch 100/170
391/391 [==============================] - 45s - loss: 0.6808 - acc: 0.8979 - val_loss: 0.8584 - val_acc: 0.8465
Epoch 101/170
391/391 [==============================] - 45s - loss: 0.6795 - acc: 0.8994 - val_loss: 0.8751 - val_acc: 0.8359
Epoch 102/170
391/391 [==============================] - 45s - loss: 0.6771 - acc: 0.9008 - val_loss: 0.8976 - val_acc: 0.8302
Epoch 103/170
391/391 [==============================] - 45s - loss: 0.6727 - acc: 0.9042 - val_loss: 0.9247 - val_acc: 0.8257
Epoch 104/170
391/391 [==============================] - 45s - loss: 0.6802 - acc: 0.9022 - val_loss: 0.8823 - val_acc: 0.8394
Epoch 105/170
391/391 [==============================] - 45s - loss: 0.6807 - acc: 0.9036 - val_loss: 1.1629 - val_acc: 0.7608
Epoch 106/170
391/391 [==============================] - 46s - loss: 0.6805 - acc: 0.9058 - val_loss: 0.9777 - val_acc: 0.8223
Epoch 107/170
391/391 [==============================] - 46s - loss: 0.6783 - acc: 0.9081 - val_loss: 0.8959 - val_acc: 0.8405
Epoch 108/170
391/391 [==============================] - 46s - loss: 0.6792 - acc: 0.9064 - val_loss: 1.0403 - val_acc: 0.7902
Epoch 109/170
391/391 [==============================] - 46s - loss: 0.6861 - acc: 0.9061 - val_loss: 1.2975 - val_acc: 0.7517
Epoch 110/170
391/391 [==============================] - 46s - loss: 0.6854 - acc: 0.9071 - val_loss: 0.8764 - val_acc: 0.8512
Epoch 111/170
391/391 [==============================] - 46s - loss: 0.6892 - acc: 0.9067 - val_loss: 0.9311 - val_acc: 0.8287
Epoch 112/170
391/391 [==============================] - 46s - loss: 0.6824 - acc: 0.9104 - val_loss: 1.1703 - val_acc: 0.7850
Epoch 113/170
391/391 [==============================] - 46s - loss: 0.6943 - acc: 0.9091 - val_loss: 0.9857 - val_acc: 0.8165
Epoch 114/170
391/391 [==============================] - 46s - loss: 0.6943 - acc: 0.9074 - val_loss: 1.0189 - val_acc: 0.8120
Epoch 115/170
391/391 [==============================] - 46s - loss: 0.6908 - acc: 0.9085 - val_loss: 0.9910 - val_acc: 0.8250
Epoch 116/170
391/391 [==============================] - 46s - loss: 0.6954 - acc: 0.9096 - val_loss: 1.1163 - val_acc: 0.7998
Epoch 117/170
391/391 [==============================] - 46s - loss: 0.6955 - acc: 0.9103 - val_loss: 1.3160 - val_acc: 0.7321
Epoch 118/170
391/391 [==============================] - 46s - loss: 0.6826 - acc: 0.9137 - val_loss: 0.9815 - val_acc: 0.8312
Epoch 119/170
391/391 [==============================] - 46s - loss: 0.6953 - acc: 0.9113 - val_loss: 1.1597 - val_acc: 0.7902
Epoch 120/170
391/391 [==============================] - 46s - loss: 0.6941 - acc: 0.9125 - val_loss: 1.0074 - val_acc: 0.8210
Epoch 121/170
391/391 [==============================] - 46s - loss: 0.6919 - acc: 0.9138 - val_loss: 1.0106 - val_acc: 0.8271
Epoch 122/170
391/391 [==============================] - 46s - loss: 0.5955 - acc: 0.9437 - val_loss: 0.7098 - val_acc: 0.9073
Epoch 123/170
391/391 [==============================] - 46s - loss: 0.5343 - acc: 0.9617 - val_loss: 0.6960 - val_acc: 0.9124
Epoch 124/170
391/391 [==============================] - 46s - loss: 0.5044 - acc: 0.9682 - val_loss: 0.6885 - val_acc: 0.9139
Epoch 125/170
391/391 [==============================] - 47s - loss: 0.4797 - acc: 0.9739 - val_loss: 0.6774 - val_acc: 0.9169
Epoch 126/170
391/391 [==============================] - 46s - loss: 0.4663 - acc: 0.9757 - val_loss: 0.6719 - val_acc: 0.9164
Epoch 127/170
391/391 [==============================] - 46s - loss: 0.4471 - acc: 0.9785 - val_loss: 0.6743 - val_acc: 0.9170
Epoch 128/170
391/391 [==============================] - 46s - loss: 0.4344 - acc: 0.9799 - val_loss: 0.6679 - val_acc: 0.9159
Epoch 129/170
391/391 [==============================] - 46s - loss: 0.4204 - acc: 0.9825 - val_loss: 0.6741 - val_acc: 0.9156
Epoch 130/170
391/391 [==============================] - 46s - loss: 0.4094 - acc: 0.9837 - val_loss: 0.6752 - val_acc: 0.9131
Epoch 131/170
391/391 [==============================] - 46s - loss: 0.3990 - acc: 0.9834 - val_loss: 0.6565 - val_acc: 0.9187
Epoch 132/170
391/391 [==============================] - 46s - loss: 0.3864 - acc: 0.9863 - val_loss: 0.6580 - val_acc: 0.9155
Epoch 133/170
391/391 [==============================] - 46s - loss: 0.3786 - acc: 0.9860 - val_loss: 0.6555 - val_acc: 0.9149
Epoch 134/170
391/391 [==============================] - 46s - loss: 0.3665 - acc: 0.9875 - val_loss: 0.6570 - val_acc: 0.9181
Epoch 135/170
391/391 [==============================] - 46s - loss: 0.3591 - acc: 0.9882 - val_loss: 0.6614 - val_acc: 0.9162
Epoch 136/170
391/391 [==============================] - 46s - loss: 0.3521 - acc: 0.9884 - val_loss: 0.6504 - val_acc: 0.9131
Epoch 137/170
391/391 [==============================] - 46s - loss: 0.3455 - acc: 0.9886 - val_loss: 0.6467 - val_acc: 0.9160
Epoch 138/170
391/391 [==============================] - 46s - loss: 0.3391 - acc: 0.9883 - val_loss: 0.6419 - val_acc: 0.9129
Epoch 139/170
391/391 [==============================] - 46s - loss: 0.3317 - acc: 0.9890 - val_loss: 0.6316 - val_acc: 0.9160
Epoch 140/170
391/391 [==============================] - 46s - loss: 0.3207 - acc: 0.9908 - val_loss: 0.6438 - val_acc: 0.9157
Epoch 141/170
391/391 [==============================] - 46s - loss: 0.3163 - acc: 0.9901 - val_loss: 0.6209 - val_acc: 0.9165
Epoch 142/170
391/391 [==============================] - 46s - loss: 0.3082 - acc: 0.9906 - val_loss: 0.6508 - val_acc: 0.9102
Epoch 143/170
391/391 [==============================] - 46s - loss: 0.3038 - acc: 0.9906 - val_loss: 0.6329 - val_acc: 0.9139
Epoch 144/170
391/391 [==============================] - 46s - loss: 0.2965 - acc: 0.9914 - val_loss: 0.6455 - val_acc: 0.9114
Epoch 145/170
391/391 [==============================] - 46s - loss: 0.2947 - acc: 0.9903 - val_loss: 0.6247 - val_acc: 0.9141
Epoch 146/170
391/391 [==============================] - 46s - loss: 0.2874 - acc: 0.9911 - val_loss: 0.6295 - val_acc: 0.9130
Epoch 147/170
391/391 [==============================] - 46s - loss: 0.2845 - acc: 0.9905 - val_loss: 0.6222 - val_acc: 0.9148
Epoch 148/170
391/391 [==============================] - 46s - loss: 0.2780 - acc: 0.9907 - val_loss: 0.6465 - val_acc: 0.9083
Epoch 149/170
391/391 [==============================] - 46s - loss: 0.2767 - acc: 0.9902 - val_loss: 0.6324 - val_acc: 0.9086
Epoch 150/170
391/391 [==============================] - 46s - loss: 0.2705 - acc: 0.9909 - val_loss: 0.6128 - val_acc: 0.9123
Epoch 151/170
391/391 [==============================] - 46s - loss: 0.2638 - acc: 0.9914 - val_loss: 0.5972 - val_acc: 0.9145
Epoch 152/170
391/391 [==============================] - 46s - loss: 0.2594 - acc: 0.9920 - val_loss: 0.6372 - val_acc: 0.9057
Epoch 153/170
391/391 [==============================] - 46s - loss: 0.2578 - acc: 0.9914 - val_loss: 0.6167 - val_acc: 0.9078
Epoch 154/170
391/391 [==============================] - 46s - loss: 0.2545 - acc: 0.9909 - val_loss: 0.6002 - val_acc: 0.9109
Epoch 155/170
391/391 [==============================] - 46s - loss: 0.2486 - acc: 0.9914 - val_loss: 0.5844 - val_acc: 0.9128
Epoch 156/170
391/391 [==============================] - 46s - loss: 0.2452 - acc: 0.9919 - val_loss: 0.6113 - val_acc: 0.9065
Epoch 157/170
391/391 [==============================] - 46s - loss: 0.2412 - acc: 0.9916 - val_loss: 0.5785 - val_acc: 0.9151
Epoch 158/170
391/391 [==============================] - 46s - loss: 0.2371 - acc: 0.9922 - val_loss: 0.6072 - val_acc: 0.9115
Epoch 159/170
391/391 [==============================] - 46s - loss: 0.2401 - acc: 0.9900 - val_loss: 0.6420 - val_acc: 0.9013
Epoch 160/170
391/391 [==============================] - 46s - loss: 0.2368 - acc: 0.9902 - val_loss: 0.5932 - val_acc: 0.9095
Epoch 161/170
391/391 [==============================] - 46s - loss: 0.2341 - acc: 0.9900 - val_loss: 0.5711 - val_acc: 0.9102
Epoch 162/170
391/391 [==============================] - 46s - loss: 0.2278 - acc: 0.9908 - val_loss: 0.6049 - val_acc: 0.9042
Epoch 163/170
391/391 [==============================] - 46s - loss: 0.2269 - acc: 0.9907 - val_loss: 0.5716 - val_acc: 0.9111
Epoch 164/170
391/391 [==============================] - 46s - loss: 0.2238 - acc: 0.9911 - val_loss: 0.6103 - val_acc: 0.8998
Epoch 165/170
391/391 [==============================] - 46s - loss: 0.2233 - acc: 0.9898 - val_loss: 0.5775 - val_acc: 0.9070
Epoch 166/170
391/391 [==============================] - 46s - loss: 0.2215 - acc: 0.9903 - val_loss: 0.5826 - val_acc: 0.9059
Epoch 167/170
391/391 [==============================] - 46s - loss: 0.2164 - acc: 0.9907 - val_loss: 0.5751 - val_acc: 0.9098
Epoch 168/170
391/391 [==============================] - 46s - loss: 0.2127 - acc: 0.9913 - val_loss: 0.5643 - val_acc: 0.9147
Epoch 169/170
391/391 [==============================] - 46s - loss: 0.2126 - acc: 0.9903 - val_loss: 0.6312 - val_acc: 0.8973
Epoch 170/170
391/391 [==============================] - 46s - loss: 0.2121 - acc: 0.9902 - val_loss: 0.5702 - val_acc: 0.9060

In [ ]: