use resnet to train the cifar-10

import pakages


In [1]:
import os
import keras
import numpy as np
import tensorflow as tf
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.layers.normalization import BatchNormalization
from keras.layers import Conv2D, Dense, Input, add, Activation, GlobalAveragePooling2D
from keras.initializers import he_normal
from keras.callbacks import LearningRateScheduler, TensorBoard
from keras.models import Model
from keras import optimizers
from keras import regularizers


Using TensorFlow backend.

force to use gpu and limit the use of gpu memory


In [2]:
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 1.0
set_session(tf.Session(config=config))

init some parameters


In [3]:
num_classes        = 10
img_rows, img_cols = 32, 32
img_channels       = 3
stack_num          = 18
batch_size         = 128
epochs             = 200
iterations         = 391
weight_decay       = 0.0005
log_filepath       = r'./resnet50/'

do some precessing with images


In [4]:
def color_preprocessing(x_train,x_test):
    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')
    x_train[:,:,:,0] = (x_train[:,:,:,0] - np.mean(x_train[:,:,:,0])) / np.std(x_train[:,:,:,0])
    x_train[:,:,:,1] = (x_train[:,:,:,1] - np.mean(x_train[:,:,:,1])) / np.std(x_train[:,:,:,1])
    x_train[:,:,:,2] = (x_train[:,:,:,2] - np.mean(x_train[:,:,:,2])) / np.std(x_train[:,:,:,2])

    x_test[:,:,:,0] = (x_test[:,:,:,0] - np.mean(x_test[:,:,:,0])) / np.std(x_test[:,:,:,0])
    x_test[:,:,:,1] = (x_test[:,:,:,1] - np.mean(x_test[:,:,:,1])) / np.std(x_test[:,:,:,1])
    x_test[:,:,:,2] = (x_test[:,:,:,2] - np.mean(x_test[:,:,:,2])) / np.std(x_test[:,:,:,2])

    return x_train, x_test

set the learning rate changes strategy


In [5]:
def scheduler(epoch):
    if epoch <= 60:
        return 0.1
    if epoch <= 120:
        return 0.02
    if epoch <= 160:
        return 0.004
    return 0.0008

define network


In [6]:
def residual_network(img_input,classes_num=10,stack_n=18):
    def residual_block(x,shape,increase_filter=False):
        output_filter_num = shape[1]
        if increase_filter:
            first_stride = (2,2)
        else:
            first_stride = (1,1)

        pre_bn   = BatchNormalization()(x)
        pre_relu = Activation('relu')(pre_bn)

        conv_1 = Conv2D(output_filter_num,
            kernel_size=(3,3),
            strides=first_stride,
            padding='same',
            kernel_initializer=he_normal(),
            kernel_regularizer=regularizers.l2(weight_decay)
            )(pre_relu)
        bn_1   = BatchNormalization()(conv_1)
        relu1  = Activation('relu')(bn_1)
        conv_2 = Conv2D(output_filter_num,
            kernel_size=(3,3),
            strides=(1,1),
            padding='same',
            kernel_initializer=he_normal(),
            kernel_regularizer=regularizers.l2(weight_decay)
            )(relu1)
        if increase_filter:
            projection = Conv2D(output_filter_num,
            kernel_size=(1,1),
            strides=(2,2),
            padding='same',
            kernel_initializer=he_normal(),
            kernel_regularizer=regularizers.l2(weight_decay)
            )(x)
            block = add([conv_2, projection])
        else:
            block = add([conv_2,x])
        return block

    x = Conv2D(filters=16,
        kernel_size=(3,3),
        strides=(1,1),
        padding='same',
        kernel_initializer=he_normal(),
        kernel_regularizer=regularizers.l2(weight_decay)
        )(img_input)

    for _ in range(0,stack_n):
        x = residual_block(x,[16,16])

    x = residual_block(x,[16,32],increase_filter=True)
    for _ in range(1,stack_n):
        x = residual_block(x,[16,32])

    x = residual_block(x,[32,64],increase_filter=True)
    for _ in range(1,stack_n):
        x = residual_block(x,[32,64])

    x = BatchNormalization()(x)
    x = Activation('relu')(x)
    x = GlobalAveragePooling2D()(x)
    x = Dense(classes_num,activation='softmax',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay))(x)
    return x

load data and build model


In [7]:
# load data
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

# color preprocessing
x_train, x_test = color_preprocessing(x_train, x_test)

# build network
img_input = Input(shape=(img_rows,img_cols,img_channels))
output = residual_network(img_input,num_classes,18)
resnet = Model(img_input, output)
print(resnet.summary())
# set optimizer
sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
resnet.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
block1_conv1 (Conv2D)        (None, 32, 32, 64)        1792      
_________________________________________________________________
batch_normalization_1 (Batch (None, 32, 32, 64)        256       
_________________________________________________________________
activation_1 (Activation)    (None, 32, 32, 64)        0         
_________________________________________________________________
block1_conv2 (Conv2D)        (None, 32, 32, 64)        36928     
_________________________________________________________________
batch_normalization_2 (Batch (None, 32, 32, 64)        256       
_________________________________________________________________
activation_2 (Activation)    (None, 32, 32, 64)        0         
_________________________________________________________________
block1_pool (MaxPooling2D)   (None, 16, 16, 64)        0         
_________________________________________________________________
block2_conv1 (Conv2D)        (None, 16, 16, 128)       73856     
_________________________________________________________________
batch_normalization_3 (Batch (None, 16, 16, 128)       512       
_________________________________________________________________
activation_3 (Activation)    (None, 16, 16, 128)       0         
_________________________________________________________________
block2_conv2 (Conv2D)        (None, 16, 16, 128)       147584    
_________________________________________________________________
batch_normalization_4 (Batch (None, 16, 16, 128)       512       
_________________________________________________________________
activation_4 (Activation)    (None, 16, 16, 128)       0         
_________________________________________________________________
block2_pool (MaxPooling2D)   (None, 8, 8, 128)         0         
_________________________________________________________________
block3_conv1 (Conv2D)        (None, 8, 8, 256)         295168    
_________________________________________________________________
batch_normalization_5 (Batch (None, 8, 8, 256)         1024      
_________________________________________________________________
activation_5 (Activation)    (None, 8, 8, 256)         0         
_________________________________________________________________
block3_conv2 (Conv2D)        (None, 8, 8, 256)         590080    
_________________________________________________________________
batch_normalization_6 (Batch (None, 8, 8, 256)         1024      
_________________________________________________________________
activation_6 (Activation)    (None, 8, 8, 256)         0         
_________________________________________________________________
block3_conv3 (Conv2D)        (None, 8, 8, 256)         590080    
_________________________________________________________________
batch_normalization_7 (Batch (None, 8, 8, 256)         1024      
_________________________________________________________________
activation_7 (Activation)    (None, 8, 8, 256)         0         
_________________________________________________________________
block3_conv4 (Conv2D)        (None, 8, 8, 256)         590080    
_________________________________________________________________
batch_normalization_8 (Batch (None, 8, 8, 256)         1024      
_________________________________________________________________
activation_8 (Activation)    (None, 8, 8, 256)         0         
_________________________________________________________________
block3_pool (MaxPooling2D)   (None, 4, 4, 256)         0         
_________________________________________________________________
block4_conv1 (Conv2D)        (None, 4, 4, 512)         1180160   
_________________________________________________________________
batch_normalization_9 (Batch (None, 4, 4, 512)         2048      
_________________________________________________________________
activation_9 (Activation)    (None, 4, 4, 512)         0         
_________________________________________________________________
block4_conv2 (Conv2D)        (None, 4, 4, 512)         2359808   
_________________________________________________________________
batch_normalization_10 (Batc (None, 4, 4, 512)         2048      
_________________________________________________________________
activation_10 (Activation)   (None, 4, 4, 512)         0         
_________________________________________________________________
block4_conv3 (Conv2D)        (None, 4, 4, 512)         2359808   
_________________________________________________________________
batch_normalization_11 (Batc (None, 4, 4, 512)         2048      
_________________________________________________________________
activation_11 (Activation)   (None, 4, 4, 512)         0         
_________________________________________________________________
block4_conv4 (Conv2D)        (None, 4, 4, 512)         2359808   
_________________________________________________________________
batch_normalization_12 (Batc (None, 4, 4, 512)         2048      
_________________________________________________________________
activation_12 (Activation)   (None, 4, 4, 512)         0         
_________________________________________________________________
block4_pool (MaxPooling2D)   (None, 2, 2, 512)         0         
_________________________________________________________________
block5_conv1 (Conv2D)        (None, 2, 2, 512)         2359808   
_________________________________________________________________
batch_normalization_13 (Batc (None, 2, 2, 512)         2048      
_________________________________________________________________
activation_13 (Activation)   (None, 2, 2, 512)         0         
_________________________________________________________________
block5_conv2 (Conv2D)        (None, 2, 2, 512)         2359808   
_________________________________________________________________
batch_normalization_14 (Batc (None, 2, 2, 512)         2048      
_________________________________________________________________
activation_14 (Activation)   (None, 2, 2, 512)         0         
_________________________________________________________________
block5_conv3 (Conv2D)        (None, 2, 2, 512)         2359808   
_________________________________________________________________
batch_normalization_15 (Batc (None, 2, 2, 512)         2048      
_________________________________________________________________
activation_15 (Activation)   (None, 2, 2, 512)         0         
_________________________________________________________________
block5_conv4 (Conv2D)        (None, 2, 2, 512)         2359808   
_________________________________________________________________
batch_normalization_16 (Batc (None, 2, 2, 512)         2048      
_________________________________________________________________
activation_16 (Activation)   (None, 2, 2, 512)         0         
_________________________________________________________________
flatten (Flatten)            (None, 2048)              0         
_________________________________________________________________
fc_cifa10 (Dense)            (None, 4096)              8392704   
_________________________________________________________________
batch_normalization_17 (Batc (None, 4096)              16384     
_________________________________________________________________
activation_17 (Activation)   (None, 4096)              0         
_________________________________________________________________
dropout_1 (Dropout)          (None, 4096)              0         
_________________________________________________________________
fc2 (Dense)                  (None, 4096)              16781312  
_________________________________________________________________
batch_normalization_18 (Batc (None, 4096)              16384     
_________________________________________________________________
activation_18 (Activation)   (None, 4096)              0         
_________________________________________________________________
dropout_2 (Dropout)          (None, 4096)              0         
_________________________________________________________________
predictions_cifa10 (Dense)   (None, 10)                40970     
_________________________________________________________________
batch_normalization_19 (Batc (None, 10)                40        
_________________________________________________________________
activation_19 (Activation)   (None, 10)                0         
=================================================================
Total params: 45,294,194
Trainable params: 45,266,782
Non-trainable params: 27,412
_________________________________________________________________
None

set tensorboard


In [8]:
# set callback
tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
change_lr = LearningRateScheduler(scheduler)
cbks = [change_lr,tb_cb]

processing images


In [9]:
# set data augmentation
print('Using real-time data augmentation.')
datagen = ImageDataGenerator(horizontal_flip=True,
        width_shift_range=0.125,height_shift_range=0.125,fill_mode='constant',cval=0.)

datagen.fit(x_train)


Using real-time data augmentation.

train


In [10]:
# start training
resnet.fit_generator(datagen.flow(x_train, y_train,batch_size=batch_size),
                    steps_per_epoch=iterations,
                    epochs=epochs,
                    callbacks=cbks,
                    validation_data=(x_test, y_test))
resnet.save('resnet.h5')


Epoch 1/170
391/391 [==============================] - 50s - loss: 6.8733 - acc: 0.6144 - val_loss: 3.3814 - val_acc: 0.4490
Epoch 2/170
391/391 [==============================] - 47s - loss: 2.1080 - acc: 0.6667 - val_loss: 2.8081 - val_acc: 0.4488
Epoch 3/170
391/391 [==============================] - 47s - loss: 1.6726 - acc: 0.6756 - val_loss: 2.2019 - val_acc: 0.4961
Epoch 4/170
391/391 [==============================] - 47s - loss: 1.6172 - acc: 0.6870 - val_loss: 2.0787 - val_acc: 0.5076
Epoch 5/170
391/391 [==============================] - 47s - loss: 1.6046 - acc: 0.6933 - val_loss: 2.5723 - val_acc: 0.4315
Epoch 6/170
391/391 [==============================] - 47s - loss: 1.5781 - acc: 0.7034 - val_loss: 3.1483 - val_acc: 0.3339
Epoch 7/170
391/391 [==============================] - 47s - loss: 1.5871 - acc: 0.7011 - val_loss: 2.5337 - val_acc: 0.4410
Epoch 8/170
391/391 [==============================] - 47s - loss: 1.5866 - acc: 0.7067 - val_loss: 2.3699 - val_acc: 0.4626
Epoch 9/170
391/391 [==============================] - 47s - loss: 1.5931 - acc: 0.7078 - val_loss: 2.7684 - val_acc: 0.4161
Epoch 10/170
391/391 [==============================] - 47s - loss: 1.5993 - acc: 0.7092 - val_loss: 4.0378 - val_acc: 0.2909
Epoch 11/170
391/391 [==============================] - 47s - loss: 1.6068 - acc: 0.7110 - val_loss: 2.5748 - val_acc: 0.3800
Epoch 12/170
391/391 [==============================] - 47s - loss: 1.6033 - acc: 0.7119 - val_loss: 2.8108 - val_acc: 0.3767
Epoch 13/170
391/391 [==============================] - 47s - loss: 1.6024 - acc: 0.7121 - val_loss: 2.9787 - val_acc: 0.4021
Epoch 14/170
391/391 [==============================] - 47s - loss: 1.6101 - acc: 0.7137 - val_loss: 2.3104 - val_acc: 0.4779
Epoch 15/170
391/391 [==============================] - 47s - loss: 1.6040 - acc: 0.7133 - val_loss: 2.5038 - val_acc: 0.4734
Epoch 16/170
391/391 [==============================] - 47s - loss: 1.6107 - acc: 0.7161 - val_loss: 2.3807 - val_acc: 0.4583
Epoch 17/170
391/391 [==============================] - 47s - loss: 1.6138 - acc: 0.7171 - val_loss: 3.8443 - val_acc: 0.2397
Epoch 18/170
391/391 [==============================] - 47s - loss: 1.6165 - acc: 0.7182 - val_loss: 2.3163 - val_acc: 0.4558
Epoch 19/170
391/391 [==============================] - 47s - loss: 1.6046 - acc: 0.7185 - val_loss: 2.2108 - val_acc: 0.4767
Epoch 20/170
391/391 [==============================] - 47s - loss: 1.6155 - acc: 0.7199 - val_loss: 3.1318 - val_acc: 0.3193
Epoch 21/170
391/391 [==============================] - 47s - loss: 1.6201 - acc: 0.7177 - val_loss: 3.4192 - val_acc: 0.3326
Epoch 22/170
391/391 [==============================] - 47s - loss: 1.6348 - acc: 0.7194 - val_loss: 2.2206 - val_acc: 0.5001
Epoch 23/170
391/391 [==============================] - 47s - loss: 1.6161 - acc: 0.7204 - val_loss: 4.0988 - val_acc: 0.3669
Epoch 24/170
391/391 [==============================] - 47s - loss: 1.6234 - acc: 0.7203 - val_loss: 2.0256 - val_acc: 0.5626
Epoch 25/170
391/391 [==============================] - 47s - loss: 1.6304 - acc: 0.7181 - val_loss: 2.1443 - val_acc: 0.5329
Epoch 26/170
391/391 [==============================] - 47s - loss: 1.6300 - acc: 0.7195 - val_loss: 2.3451 - val_acc: 0.5006
Epoch 27/170
391/391 [==============================] - 47s - loss: 1.6393 - acc: 0.7207 - val_loss: 2.5780 - val_acc: 0.4403
Epoch 28/170
391/391 [==============================] - 47s - loss: 1.6287 - acc: 0.7215 - val_loss: 2.5715 - val_acc: 0.4776
Epoch 29/170
391/391 [==============================] - 47s - loss: 1.6300 - acc: 0.7221 - val_loss: 2.8518 - val_acc: 0.4285
Epoch 30/170
391/391 [==============================] - 47s - loss: 1.6404 - acc: 0.7215 - val_loss: 2.4584 - val_acc: 0.4945
Epoch 31/170
391/391 [==============================] - 47s - loss: 1.6282 - acc: 0.7209 - val_loss: 2.5046 - val_acc: 0.5113
Epoch 32/170
391/391 [==============================] - 47s - loss: 1.6416 - acc: 0.7201 - val_loss: 2.3102 - val_acc: 0.4979
Epoch 33/170
391/391 [==============================] - 47s - loss: 1.6333 - acc: 0.7206 - val_loss: 2.5152 - val_acc: 0.4330
Epoch 34/170
391/391 [==============================] - 47s - loss: 1.6302 - acc: 0.7226 - val_loss: 2.5259 - val_acc: 0.4699
Epoch 35/170
391/391 [==============================] - 47s - loss: 1.6161 - acc: 0.7234 - val_loss: 1.9640 - val_acc: 0.5889
Epoch 36/170
391/391 [==============================] - 47s - loss: 1.6259 - acc: 0.7236 - val_loss: 2.3457 - val_acc: 0.5162
Epoch 37/170
391/391 [==============================] - 47s - loss: 1.6260 - acc: 0.7244 - val_loss: 2.0545 - val_acc: 0.5848
Epoch 38/170
391/391 [==============================] - 47s - loss: 1.6213 - acc: 0.7240 - val_loss: 3.0053 - val_acc: 0.4066
Epoch 39/170
391/391 [==============================] - 47s - loss: 1.6327 - acc: 0.7210 - val_loss: 2.5730 - val_acc: 0.4627
Epoch 40/170
391/391 [==============================] - 47s - loss: 1.6269 - acc: 0.7266 - val_loss: 2.7331 - val_acc: 0.4236
Epoch 41/170
391/391 [==============================] - 47s - loss: 1.6318 - acc: 0.7235 - val_loss: 3.1587 - val_acc: 0.3374
Epoch 42/170
391/391 [==============================] - 47s - loss: 1.6219 - acc: 0.7241 - val_loss: 1.9371 - val_acc: 0.6010
Epoch 43/170
391/391 [==============================] - 47s - loss: 1.6238 - acc: 0.7250 - val_loss: 1.8615 - val_acc: 0.6370
Epoch 44/170
391/391 [==============================] - 47s - loss: 1.6088 - acc: 0.7289 - val_loss: 2.1406 - val_acc: 0.5457
Epoch 45/170
391/391 [==============================] - 47s - loss: 1.6186 - acc: 0.7263 - val_loss: 2.4098 - val_acc: 0.4200
Epoch 46/170
391/391 [==============================] - 47s - loss: 1.6269 - acc: 0.7264 - val_loss: 2.5061 - val_acc: 0.4543
Epoch 47/170
391/391 [==============================] - 47s - loss: 1.6175 - acc: 0.7262 - val_loss: 3.1260 - val_acc: 0.3544
Epoch 48/170
391/391 [==============================] - 47s - loss: 1.6042 - acc: 0.7312 - val_loss: 2.2268 - val_acc: 0.5676
Epoch 49/170
391/391 [==============================] - 47s - loss: 1.6084 - acc: 0.7263 - val_loss: 1.9193 - val_acc: 0.6018
Epoch 50/170
391/391 [==============================] - 47s - loss: 1.6259 - acc: 0.7252 - val_loss: 2.2189 - val_acc: 0.5066
Epoch 51/170
391/391 [==============================] - 47s - loss: 1.6178 - acc: 0.7257 - val_loss: 2.2615 - val_acc: 0.5204
Epoch 52/170
391/391 [==============================] - 47s - loss: 1.6155 - acc: 0.7261 - val_loss: 3.3112 - val_acc: 0.3473
Epoch 53/170
391/391 [==============================] - 47s - loss: 1.6115 - acc: 0.7302 - val_loss: 2.7714 - val_acc: 0.3997
Epoch 54/170
391/391 [==============================] - 47s - loss: 1.6162 - acc: 0.7271 - val_loss: 2.9672 - val_acc: 0.3994
Epoch 55/170
391/391 [==============================] - 47s - loss: 1.6080 - acc: 0.7308 - val_loss: 2.4996 - val_acc: 0.4279
Epoch 56/170
391/391 [==============================] - 47s - loss: 1.6004 - acc: 0.7286 - val_loss: 2.2675 - val_acc: 0.5178
Epoch 57/170
391/391 [==============================] - 47s - loss: 1.6187 - acc: 0.7283 - val_loss: 2.6785 - val_acc: 0.4741
Epoch 58/170
391/391 [==============================] - 47s - loss: 1.6110 - acc: 0.7292 - val_loss: 3.1129 - val_acc: 0.4037
Epoch 59/170
391/391 [==============================] - 47s - loss: 1.6065 - acc: 0.7273 - val_loss: 2.3070 - val_acc: 0.5143
Epoch 60/170
391/391 [==============================] - 47s - loss: 1.6082 - acc: 0.7335 - val_loss: 1.9923 - val_acc: 0.6041
Epoch 61/170
391/391 [==============================] - 47s - loss: 1.6060 - acc: 0.7287 - val_loss: 3.2342 - val_acc: 0.3867
Epoch 62/170
391/391 [==============================] - 47s - loss: 1.5992 - acc: 0.7322 - val_loss: 2.4784 - val_acc: 0.4264
Epoch 63/170
391/391 [==============================] - 47s - loss: 1.6176 - acc: 0.7292 - val_loss: 3.5449 - val_acc: 0.3312
Epoch 64/170
391/391 [==============================] - 47s - loss: 1.6200 - acc: 0.7292 - val_loss: 3.0173 - val_acc: 0.3669
Epoch 65/170
391/391 [==============================] - 47s - loss: 1.6117 - acc: 0.7303 - val_loss: 2.3045 - val_acc: 0.5683
Epoch 66/170
391/391 [==============================] - 47s - loss: 1.6125 - acc: 0.7275 - val_loss: 3.0528 - val_acc: 0.4214
Epoch 67/170
391/391 [==============================] - 47s - loss: 1.6158 - acc: 0.7263 - val_loss: 2.7249 - val_acc: 0.4170
Epoch 68/170
391/391 [==============================] - 47s - loss: 1.6204 - acc: 0.7292 - val_loss: 2.2788 - val_acc: 0.5680
Epoch 69/170
391/391 [==============================] - 47s - loss: 1.6177 - acc: 0.7297 - val_loss: 2.2586 - val_acc: 0.5006
Epoch 70/170
391/391 [==============================] - 47s - loss: 1.6157 - acc: 0.7299 - val_loss: 2.0717 - val_acc: 0.5748
Epoch 71/170
391/391 [==============================] - 47s - loss: 1.6143 - acc: 0.7312 - val_loss: 2.5410 - val_acc: 0.4630
Epoch 72/170
391/391 [==============================] - 47s - loss: 1.6039 - acc: 0.7310 - val_loss: 1.9323 - val_acc: 0.6174
Epoch 73/170
391/391 [==============================] - 47s - loss: 1.6003 - acc: 0.7295 - val_loss: 2.2902 - val_acc: 0.5235
Epoch 74/170
391/391 [==============================] - 47s - loss: 1.6068 - acc: 0.7324 - val_loss: 2.5038 - val_acc: 0.5217
Epoch 75/170
391/391 [==============================] - 47s - loss: 1.6154 - acc: 0.7293 - val_loss: 2.0997 - val_acc: 0.5767
Epoch 76/170
391/391 [==============================] - 47s - loss: 1.6013 - acc: 0.7319 - val_loss: 2.3859 - val_acc: 0.5063
Epoch 77/170
391/391 [==============================] - 47s - loss: 1.5979 - acc: 0.7310 - val_loss: 2.1419 - val_acc: 0.5214
Epoch 78/170
391/391 [==============================] - 47s - loss: 1.6091 - acc: 0.7311 - val_loss: 2.4664 - val_acc: 0.4788
Epoch 79/170
391/391 [==============================] - 47s - loss: 1.6026 - acc: 0.7315 - val_loss: 5.7107 - val_acc: 0.2138
Epoch 80/170
391/391 [==============================] - 47s - loss: 1.6061 - acc: 0.7292 - val_loss: 2.2648 - val_acc: 0.5206
Epoch 81/170
391/391 [==============================] - 47s - loss: 1.6069 - acc: 0.7298 - val_loss: 2.9052 - val_acc: 0.4159
Epoch 82/170
391/391 [==============================] - 47s - loss: 1.3059 - acc: 0.8073 - val_loss: 1.2077 - val_acc: 0.8255
Epoch 83/170
391/391 [==============================] - 47s - loss: 1.0429 - acc: 0.8475 - val_loss: 1.0837 - val_acc: 0.8201
Epoch 84/170
391/391 [==============================] - 47s - loss: 0.9037 - acc: 0.8619 - val_loss: 0.9467 - val_acc: 0.8390
Epoch 85/170
391/391 [==============================] - 47s - loss: 0.8109 - acc: 0.8708 - val_loss: 0.8903 - val_acc: 0.8411
Epoch 86/170
391/391 [==============================] - 47s - loss: 0.7526 - acc: 0.8740 - val_loss: 0.8821 - val_acc: 0.8257
Epoch 87/170
391/391 [==============================] - 47s - loss: 0.7152 - acc: 0.8784 - val_loss: 0.9462 - val_acc: 0.7989
Epoch 88/170
391/391 [==============================] - 47s - loss: 0.6977 - acc: 0.8794 - val_loss: 0.8266 - val_acc: 0.8381
Epoch 89/170
391/391 [==============================] - 47s - loss: 0.6816 - acc: 0.8816 - val_loss: 0.9405 - val_acc: 0.7944
Epoch 90/170
391/391 [==============================] - 47s - loss: 0.6815 - acc: 0.8831 - val_loss: 0.9609 - val_acc: 0.7917
Epoch 91/170
391/391 [==============================] - 47s - loss: 0.6759 - acc: 0.8850 - val_loss: 0.9505 - val_acc: 0.8077
Epoch 92/170
391/391 [==============================] - 47s - loss: 0.6695 - acc: 0.8884 - val_loss: 0.9538 - val_acc: 0.8046
Epoch 93/170
391/391 [==============================] - 47s - loss: 0.6726 - acc: 0.8883 - val_loss: 0.9761 - val_acc: 0.7951
Epoch 94/170
391/391 [==============================] - 47s - loss: 0.6712 - acc: 0.8909 - val_loss: 1.1159 - val_acc: 0.7642
Epoch 95/170
391/391 [==============================] - 47s - loss: 0.6732 - acc: 0.8924 - val_loss: 0.9101 - val_acc: 0.8220
Epoch 96/170
391/391 [==============================] - 47s - loss: 0.6712 - acc: 0.8947 - val_loss: 0.8778 - val_acc: 0.8382
Epoch 97/170
391/391 [==============================] - 47s - loss: 0.6747 - acc: 0.8945 - val_loss: 1.1035 - val_acc: 0.7861
Epoch 98/170
391/391 [==============================] - 47s - loss: 0.6756 - acc: 0.8968 - val_loss: 1.1398 - val_acc: 0.7702
Epoch 99/170
391/391 [==============================] - 47s - loss: 0.6796 - acc: 0.8968 - val_loss: 0.9427 - val_acc: 0.8259
Epoch 100/170
391/391 [==============================] - 47s - loss: 0.6750 - acc: 0.9006 - val_loss: 0.9588 - val_acc: 0.8194
Epoch 101/170
391/391 [==============================] - 47s - loss: 0.6793 - acc: 0.8986 - val_loss: 0.8680 - val_acc: 0.8487
Epoch 102/170
391/391 [==============================] - 47s - loss: 0.6754 - acc: 0.9026 - val_loss: 0.8573 - val_acc: 0.8443
Epoch 103/170
391/391 [==============================] - 47s - loss: 0.6813 - acc: 0.8995 - val_loss: 0.9045 - val_acc: 0.8318
Epoch 104/170
391/391 [==============================] - 47s - loss: 0.6805 - acc: 0.9035 - val_loss: 0.9324 - val_acc: 0.8319
Epoch 105/170
391/391 [==============================] - 47s - loss: 0.6851 - acc: 0.9044 - val_loss: 1.0671 - val_acc: 0.8029
Epoch 106/170
391/391 [==============================] - 47s - loss: 0.6765 - acc: 0.9067 - val_loss: 1.0608 - val_acc: 0.7888
Epoch 107/170
391/391 [==============================] - 47s - loss: 0.6830 - acc: 0.9078 - val_loss: 1.0084 - val_acc: 0.8076
Epoch 108/170
391/391 [==============================] - 47s - loss: 0.6817 - acc: 0.9068 - val_loss: 0.9599 - val_acc: 0.8234
Epoch 109/170
391/391 [==============================] - 47s - loss: 0.6862 - acc: 0.9078 - val_loss: 0.9275 - val_acc: 0.8424
Epoch 110/170
391/391 [==============================] - 47s - loss: 0.6850 - acc: 0.9087 - val_loss: 0.8531 - val_acc: 0.8570
Epoch 111/170
391/391 [==============================] - 47s - loss: 0.6880 - acc: 0.9078 - val_loss: 0.9358 - val_acc: 0.8395
Epoch 112/170
391/391 [==============================] - 47s - loss: 0.6908 - acc: 0.9074 - val_loss: 0.9747 - val_acc: 0.8214
Epoch 113/170
391/391 [==============================] - 47s - loss: 0.6912 - acc: 0.9075 - val_loss: 1.1390 - val_acc: 0.7855
Epoch 114/170
391/391 [==============================] - 47s - loss: 0.6872 - acc: 0.9095 - val_loss: 1.1652 - val_acc: 0.7900
Epoch 115/170
391/391 [==============================] - 47s - loss: 0.6923 - acc: 0.9081 - val_loss: 1.0689 - val_acc: 0.7996
Epoch 116/170
391/391 [==============================] - 47s - loss: 0.6904 - acc: 0.9102 - val_loss: 0.9700 - val_acc: 0.8310
Epoch 117/170
391/391 [==============================] - 47s - loss: 0.6848 - acc: 0.9117 - val_loss: 1.0279 - val_acc: 0.8150
Epoch 118/170
391/391 [==============================] - 47s - loss: 0.6888 - acc: 0.9113 - val_loss: 1.0516 - val_acc: 0.8132
Epoch 119/170
391/391 [==============================] - 47s - loss: 0.6953 - acc: 0.9092 - val_loss: 0.9578 - val_acc: 0.8340
Epoch 120/170
391/391 [==============================] - 47s - loss: 0.6910 - acc: 0.9135 - val_loss: 1.0685 - val_acc: 0.8003
Epoch 121/170
391/391 [==============================] - 47s - loss: 0.6869 - acc: 0.9140 - val_loss: 0.9397 - val_acc: 0.8384
Epoch 122/170
391/391 [==============================] - 47s - loss: 0.5931 - acc: 0.9440 - val_loss: 0.7037 - val_acc: 0.9093
Epoch 123/170
391/391 [==============================] - 47s - loss: 0.5294 - acc: 0.9624 - val_loss: 0.6937 - val_acc: 0.9127
Epoch 124/170
391/391 [==============================] - 47s - loss: 0.5027 - acc: 0.9675 - val_loss: 0.6755 - val_acc: 0.9156
Epoch 125/170
391/391 [==============================] - 47s - loss: 0.4825 - acc: 0.9716 - val_loss: 0.6711 - val_acc: 0.9138
Epoch 126/170
391/391 [==============================] - 47s - loss: 0.4643 - acc: 0.9745 - val_loss: 0.6645 - val_acc: 0.9170
Epoch 127/170
391/391 [==============================] - 47s - loss: 0.4471 - acc: 0.9781 - val_loss: 0.6733 - val_acc: 0.9137
Epoch 128/170
391/391 [==============================] - 47s - loss: 0.4311 - acc: 0.9801 - val_loss: 0.6662 - val_acc: 0.9157
Epoch 129/170
391/391 [==============================] - 47s - loss: 0.4156 - acc: 0.9830 - val_loss: 0.6602 - val_acc: 0.9159
Epoch 130/170
391/391 [==============================] - 47s - loss: 0.4064 - acc: 0.9831 - val_loss: 0.6614 - val_acc: 0.9151
Epoch 131/170
391/391 [==============================] - 47s - loss: 0.3953 - acc: 0.9846 - val_loss: 0.6539 - val_acc: 0.9175
Epoch 132/170
391/391 [==============================] - 47s - loss: 0.3838 - acc: 0.9861 - val_loss: 0.6515 - val_acc: 0.9199
Epoch 133/170
391/391 [==============================] - 47s - loss: 0.3726 - acc: 0.9874 - val_loss: 0.6655 - val_acc: 0.9131
Epoch 134/170
391/391 [==============================] - 47s - loss: 0.3646 - acc: 0.9878 - val_loss: 0.6412 - val_acc: 0.9163
Epoch 135/170
391/391 [==============================] - 47s - loss: 0.3552 - acc: 0.9887 - val_loss: 0.6295 - val_acc: 0.9187
Epoch 136/170
391/391 [==============================] - 47s - loss: 0.3464 - acc: 0.9890 - val_loss: 0.6463 - val_acc: 0.9146
Epoch 137/170
391/391 [==============================] - 47s - loss: 0.3409 - acc: 0.9895 - val_loss: 0.6324 - val_acc: 0.9168
Epoch 138/170
391/391 [==============================] - 47s - loss: 0.3324 - acc: 0.9898 - val_loss: 0.6278 - val_acc: 0.9152
Epoch 139/170
391/391 [==============================] - 47s - loss: 0.3240 - acc: 0.9908 - val_loss: 0.6307 - val_acc: 0.9161
Epoch 140/170
391/391 [==============================] - 47s - loss: 0.3178 - acc: 0.9904 - val_loss: 0.6433 - val_acc: 0.9134
Epoch 141/170
391/391 [==============================] - 47s - loss: 0.3124 - acc: 0.9905 - val_loss: 0.6186 - val_acc: 0.9166
Epoch 142/170
391/391 [==============================] - 47s - loss: 0.3051 - acc: 0.9911 - val_loss: 0.6101 - val_acc: 0.9154
Epoch 143/170
391/391 [==============================] - 47s - loss: 0.2985 - acc: 0.9914 - val_loss: 0.6205 - val_acc: 0.9142
Epoch 144/170
391/391 [==============================] - 47s - loss: 0.2940 - acc: 0.9911 - val_loss: 0.6181 - val_acc: 0.9158
Epoch 145/170
391/391 [==============================] - 47s - loss: 0.2881 - acc: 0.9913 - val_loss: 0.6170 - val_acc: 0.9139
Epoch 146/170
391/391 [==============================] - 47s - loss: 0.2842 - acc: 0.9907 - val_loss: 0.6225 - val_acc: 0.9095
Epoch 147/170
391/391 [==============================] - 47s - loss: 0.2795 - acc: 0.9914 - val_loss: 0.6154 - val_acc: 0.9155
Epoch 148/170
391/391 [==============================] - 47s - loss: 0.2724 - acc: 0.9921 - val_loss: 0.6139 - val_acc: 0.9117
Epoch 149/170
391/391 [==============================] - 47s - loss: 0.2676 - acc: 0.9920 - val_loss: 0.6085 - val_acc: 0.9164
Epoch 150/170
391/391 [==============================] - 47s - loss: 0.2653 - acc: 0.9911 - val_loss: 0.6120 - val_acc: 0.9127
Epoch 151/170
391/391 [==============================] - 47s - loss: 0.2616 - acc: 0.9912 - val_loss: 0.6136 - val_acc: 0.9077
Epoch 152/170
391/391 [==============================] - 47s - loss: 0.2539 - acc: 0.9922 - val_loss: 0.6167 - val_acc: 0.9110
Epoch 153/170
391/391 [==============================] - 47s - loss: 0.2522 - acc: 0.9917 - val_loss: 0.6380 - val_acc: 0.9095
Epoch 154/170
391/391 [==============================] - 47s - loss: 0.2516 - acc: 0.9907 - val_loss: 0.6220 - val_acc: 0.9046
Epoch 155/170
391/391 [==============================] - 47s - loss: 0.2458 - acc: 0.9914 - val_loss: 0.5740 - val_acc: 0.9142
Epoch 156/170
391/391 [==============================] - 47s - loss: 0.2407 - acc: 0.9919 - val_loss: 0.5757 - val_acc: 0.9153
Epoch 157/170
391/391 [==============================] - 47s - loss: 0.2405 - acc: 0.9911 - val_loss: 0.5991 - val_acc: 0.9113
Epoch 158/170
391/391 [==============================] - 47s - loss: 0.2365 - acc: 0.9904 - val_loss: 0.6038 - val_acc: 0.9073
Epoch 159/170
391/391 [==============================] - 47s - loss: 0.2324 - acc: 0.9912 - val_loss: 0.5955 - val_acc: 0.9056
Epoch 160/170
391/391 [==============================] - 47s - loss: 0.2275 - acc: 0.9925 - val_loss: 0.5939 - val_acc: 0.9089
Epoch 161/170
391/391 [==============================] - 47s - loss: 0.2259 - acc: 0.9909 - val_loss: 0.5922 - val_acc: 0.9068
Epoch 162/170
391/391 [==============================] - 47s - loss: 0.2242 - acc: 0.9906 - val_loss: 0.5786 - val_acc: 0.9071
Epoch 163/170
391/391 [==============================] - 47s - loss: 0.2246 - acc: 0.9898 - val_loss: 0.5573 - val_acc: 0.9121
Epoch 164/170
391/391 [==============================] - 47s - loss: 0.2210 - acc: 0.9905 - val_loss: 0.5992 - val_acc: 0.9012
Epoch 165/170
391/391 [==============================] - 47s - loss: 0.2182 - acc: 0.9906 - val_loss: 0.5753 - val_acc: 0.9090
Epoch 166/170
391/391 [==============================] - 47s - loss: 0.2151 - acc: 0.9909 - val_loss: 0.5915 - val_acc: 0.9048
Epoch 167/170
391/391 [==============================] - 47s - loss: 0.2116 - acc: 0.9911 - val_loss: 0.5586 - val_acc: 0.9109
Epoch 168/170
391/391 [==============================] - 47s - loss: 0.2110 - acc: 0.9903 - val_loss: 0.5692 - val_acc: 0.9062
Epoch 169/170
391/391 [==============================] - 47s - loss: 0.2111 - acc: 0.9901 - val_loss: 0.5620 - val_acc: 0.9066
Epoch 170/170
391/391 [==============================] - 47s - loss: 0.2070 - acc: 0.9908 - val_loss: 0.5980 - val_acc: 0.9024

In [ ]: