use wide_resnet to train the cifar-10

import pakages


In [1]:
import os
import keras
import numpy as np
import tensorflow as tf
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.layers.normalization import BatchNormalization
from keras.layers import Conv2D, Dense, Input, add, Activation, GlobalAveragePooling2D
from keras.initializers import he_normal
from keras.callbacks import LearningRateScheduler, TensorBoard
from keras.models import Model
from keras import optimizers
from keras import regularizers


Using TensorFlow backend.

force to use gpu and limit the use of gpu memory


In [2]:
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 1.0
set_session(tf.Session(config=config))

init some parameters


In [3]:
depth              = 16
wide               = 8
num_classes        = 10
img_rows, img_cols = 32, 32
img_channels       = 3
batch_size         = 128
epochs             = 200
iterations         = 391
weight_decay       = 0.0005
log_filepath       = r'./w_resnet/'

do some precessing with images


In [4]:
def color_preprocessing(x_train,x_test):
    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')
    x_train[:,:,:,0] = (x_train[:,:,:,0] - np.mean(x_train[:,:,:,0])) / np.std(x_train[:,:,:,0])
    x_train[:,:,:,1] = (x_train[:,:,:,1] - np.mean(x_train[:,:,:,1])) / np.std(x_train[:,:,:,1])
    x_train[:,:,:,2] = (x_train[:,:,:,2] - np.mean(x_train[:,:,:,2])) / np.std(x_train[:,:,:,2])
    x_test[:,:,:,0] = (x_test[:,:,:,0] - np.mean(x_test[:,:,:,0])) / np.std(x_test[:,:,:,0])
    x_test[:,:,:,1] = (x_test[:,:,:,1] - np.mean(x_test[:,:,:,1])) / np.std(x_test[:,:,:,1])
    x_test[:,:,:,2] = (x_test[:,:,:,2] - np.mean(x_test[:,:,:,2])) / np.std(x_test[:,:,:,2])

    return x_train, x_test

set the learning rate changes strategy


In [5]:
def scheduler(epoch):
    if epoch <= 60:
        return 0.1
    if epoch <= 120:
        return 0.02
    if epoch <= 160:
        return 0.004
    return 0.0008

define network


In [6]:
def residual_network(img_input,classes_num=10,stack_n=18):
    def residual_block(x,shape,increase_filter=False):
        output_filter_num = shape[1]
        if increase_filter:
            first_stride = (2,2)
        else:
            first_stride = (1,1)

        pre_bn   = BatchNormalization()(x)
        pre_relu = Activation('relu')(pre_bn)

        conv_1 = Conv2D(output_filter_num,
            kernel_size=(3,3),
            strides=first_stride,
            padding='same',
            kernel_initializer=he_normal(),
            kernel_regularizer=regularizers.l2(weight_decay)
            )(pre_relu)
        bn_1   = BatchNormalization()(conv_1)
        relu1  = Activation('relu')(bn_1)
        conv_2 = Conv2D(output_filter_num,
            kernel_size=(3,3),
            strides=(1,1),
            padding='same',
            kernel_initializer=he_normal(),
            kernel_regularizer=regularizers.l2(weight_decay)
            )(relu1)
        if increase_filter:
            projection = Conv2D(output_filter_num,
            kernel_size=(1,1),
            strides=(2,2),
            padding='same',
            kernel_initializer=he_normal(),
            kernel_regularizer=regularizers.l2(weight_decay)
            )(x)
            block = add([conv_2, projection])
        else:
            block = add([conv_2,x])
        return block

    x = Conv2D(filters=16,
        kernel_size=(3,3),
        strides=(1,1),
        padding='same',
        kernel_initializer=he_normal(),
        kernel_regularizer=regularizers.l2(weight_decay)
        )(img_input)

    for _ in range(0,stack_n):
        x = residual_block(x,[16,16])

    x = residual_block(x,[16,32],increase_filter=True)
    for _ in range(1,stack_n):
        x = residual_block(x,[16,32])

    x = residual_block(x,[32,64],increase_filter=True)
    for _ in range(1,stack_n):
        x = residual_block(x,[32,64])

    x = BatchNormalization()(x)
    x = Activation('relu')(x)
    x = GlobalAveragePooling2D()(x)
    x = Dense(classes_num,activation='softmax',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay))(x)
    return x

load data and build model


In [7]:
# load data
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

# color preprocessing
x_train, x_test = color_preprocessing(x_train, x_test)

# build network
img_input = Input(shape=(img_rows,img_cols,img_channels))
output = residual_network(img_input,num_classes,18)
resnet = Model(img_input, output)
print(resnet.summary())
# set optimizer
sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
resnet.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])


____________________________________________________________________________________________________
Layer (type)                     Output Shape          Param #     Connected to                     
====================================================================================================
input_1 (InputLayer)             (None, 32, 32, 3)     0                                            
____________________________________________________________________________________________________
conv2d_1 (Conv2D)                (None, 32, 32, 16)    448         input_1[0][0]                    
____________________________________________________________________________________________________
batch_normalization_1 (BatchNorm (None, 32, 32, 16)    64          conv2d_1[0][0]                   
____________________________________________________________________________________________________
activation_1 (Activation)        (None, 32, 32, 16)    0           batch_normalization_1[0][0]      
____________________________________________________________________________________________________
conv2d_2 (Conv2D)                (None, 32, 32, 16)    2320        activation_1[0][0]               
____________________________________________________________________________________________________
batch_normalization_2 (BatchNorm (None, 32, 32, 16)    64          conv2d_2[0][0]                   
____________________________________________________________________________________________________
activation_2 (Activation)        (None, 32, 32, 16)    0           batch_normalization_2[0][0]      
____________________________________________________________________________________________________
conv2d_3 (Conv2D)                (None, 32, 32, 16)    2320        activation_2[0][0]               
____________________________________________________________________________________________________
add_1 (Add)                      (None, 32, 32, 16)    0           conv2d_3[0][0]                   
                                                                   conv2d_1[0][0]                   
____________________________________________________________________________________________________
batch_normalization_3 (BatchNorm (None, 32, 32, 16)    64          add_1[0][0]                      
____________________________________________________________________________________________________
activation_3 (Activation)        (None, 32, 32, 16)    0           batch_normalization_3[0][0]      
____________________________________________________________________________________________________
conv2d_4 (Conv2D)                (None, 32, 32, 16)    2320        activation_3[0][0]               
____________________________________________________________________________________________________
batch_normalization_4 (BatchNorm (None, 32, 32, 16)    64          conv2d_4[0][0]                   
____________________________________________________________________________________________________
activation_4 (Activation)        (None, 32, 32, 16)    0           batch_normalization_4[0][0]      
____________________________________________________________________________________________________
conv2d_5 (Conv2D)                (None, 32, 32, 16)    2320        activation_4[0][0]               
____________________________________________________________________________________________________
add_2 (Add)                      (None, 32, 32, 16)    0           conv2d_5[0][0]                   
                                                                   add_1[0][0]                      
____________________________________________________________________________________________________
batch_normalization_5 (BatchNorm (None, 32, 32, 16)    64          add_2[0][0]                      
____________________________________________________________________________________________________
activation_5 (Activation)        (None, 32, 32, 16)    0           batch_normalization_5[0][0]      
____________________________________________________________________________________________________
conv2d_6 (Conv2D)                (None, 32, 32, 16)    2320        activation_5[0][0]               
____________________________________________________________________________________________________
batch_normalization_6 (BatchNorm (None, 32, 32, 16)    64          conv2d_6[0][0]                   
____________________________________________________________________________________________________
activation_6 (Activation)        (None, 32, 32, 16)    0           batch_normalization_6[0][0]      
____________________________________________________________________________________________________
conv2d_7 (Conv2D)                (None, 32, 32, 16)    2320        activation_6[0][0]               
____________________________________________________________________________________________________
add_3 (Add)                      (None, 32, 32, 16)    0           conv2d_7[0][0]                   
                                                                   add_2[0][0]                      
____________________________________________________________________________________________________
batch_normalization_7 (BatchNorm (None, 32, 32, 16)    64          add_3[0][0]                      
____________________________________________________________________________________________________
activation_7 (Activation)        (None, 32, 32, 16)    0           batch_normalization_7[0][0]      
____________________________________________________________________________________________________
conv2d_8 (Conv2D)                (None, 32, 32, 16)    2320        activation_7[0][0]               
____________________________________________________________________________________________________
batch_normalization_8 (BatchNorm (None, 32, 32, 16)    64          conv2d_8[0][0]                   
____________________________________________________________________________________________________
activation_8 (Activation)        (None, 32, 32, 16)    0           batch_normalization_8[0][0]      
____________________________________________________________________________________________________
conv2d_9 (Conv2D)                (None, 32, 32, 16)    2320        activation_8[0][0]               
____________________________________________________________________________________________________
add_4 (Add)                      (None, 32, 32, 16)    0           conv2d_9[0][0]                   
                                                                   add_3[0][0]                      
____________________________________________________________________________________________________
batch_normalization_9 (BatchNorm (None, 32, 32, 16)    64          add_4[0][0]                      
____________________________________________________________________________________________________
activation_9 (Activation)        (None, 32, 32, 16)    0           batch_normalization_9[0][0]      
____________________________________________________________________________________________________
conv2d_10 (Conv2D)               (None, 32, 32, 16)    2320        activation_9[0][0]               
____________________________________________________________________________________________________
batch_normalization_10 (BatchNor (None, 32, 32, 16)    64          conv2d_10[0][0]                  
____________________________________________________________________________________________________
activation_10 (Activation)       (None, 32, 32, 16)    0           batch_normalization_10[0][0]     
____________________________________________________________________________________________________
conv2d_11 (Conv2D)               (None, 32, 32, 16)    2320        activation_10[0][0]              
____________________________________________________________________________________________________
add_5 (Add)                      (None, 32, 32, 16)    0           conv2d_11[0][0]                  
                                                                   add_4[0][0]                      
____________________________________________________________________________________________________
batch_normalization_11 (BatchNor (None, 32, 32, 16)    64          add_5[0][0]                      
____________________________________________________________________________________________________
activation_11 (Activation)       (None, 32, 32, 16)    0           batch_normalization_11[0][0]     
____________________________________________________________________________________________________
conv2d_12 (Conv2D)               (None, 32, 32, 16)    2320        activation_11[0][0]              
____________________________________________________________________________________________________
batch_normalization_12 (BatchNor (None, 32, 32, 16)    64          conv2d_12[0][0]                  
____________________________________________________________________________________________________
activation_12 (Activation)       (None, 32, 32, 16)    0           batch_normalization_12[0][0]     
____________________________________________________________________________________________________
conv2d_13 (Conv2D)               (None, 32, 32, 16)    2320        activation_12[0][0]              
____________________________________________________________________________________________________
add_6 (Add)                      (None, 32, 32, 16)    0           conv2d_13[0][0]                  
                                                                   add_5[0][0]                      
____________________________________________________________________________________________________
batch_normalization_13 (BatchNor (None, 32, 32, 16)    64          add_6[0][0]                      
____________________________________________________________________________________________________
activation_13 (Activation)       (None, 32, 32, 16)    0           batch_normalization_13[0][0]     
____________________________________________________________________________________________________
conv2d_14 (Conv2D)               (None, 32, 32, 16)    2320        activation_13[0][0]              
____________________________________________________________________________________________________
batch_normalization_14 (BatchNor (None, 32, 32, 16)    64          conv2d_14[0][0]                  
____________________________________________________________________________________________________
activation_14 (Activation)       (None, 32, 32, 16)    0           batch_normalization_14[0][0]     
____________________________________________________________________________________________________
conv2d_15 (Conv2D)               (None, 32, 32, 16)    2320        activation_14[0][0]              
____________________________________________________________________________________________________
add_7 (Add)                      (None, 32, 32, 16)    0           conv2d_15[0][0]                  
                                                                   add_6[0][0]                      
____________________________________________________________________________________________________
batch_normalization_15 (BatchNor (None, 32, 32, 16)    64          add_7[0][0]                      
____________________________________________________________________________________________________
activation_15 (Activation)       (None, 32, 32, 16)    0           batch_normalization_15[0][0]     
____________________________________________________________________________________________________
conv2d_16 (Conv2D)               (None, 32, 32, 16)    2320        activation_15[0][0]              
____________________________________________________________________________________________________
batch_normalization_16 (BatchNor (None, 32, 32, 16)    64          conv2d_16[0][0]                  
____________________________________________________________________________________________________
activation_16 (Activation)       (None, 32, 32, 16)    0           batch_normalization_16[0][0]     
____________________________________________________________________________________________________
conv2d_17 (Conv2D)               (None, 32, 32, 16)    2320        activation_16[0][0]              
____________________________________________________________________________________________________
add_8 (Add)                      (None, 32, 32, 16)    0           conv2d_17[0][0]                  
                                                                   add_7[0][0]                      
____________________________________________________________________________________________________
batch_normalization_17 (BatchNor (None, 32, 32, 16)    64          add_8[0][0]                      
____________________________________________________________________________________________________
activation_17 (Activation)       (None, 32, 32, 16)    0           batch_normalization_17[0][0]     
____________________________________________________________________________________________________
conv2d_18 (Conv2D)               (None, 32, 32, 16)    2320        activation_17[0][0]              
____________________________________________________________________________________________________
batch_normalization_18 (BatchNor (None, 32, 32, 16)    64          conv2d_18[0][0]                  
____________________________________________________________________________________________________
activation_18 (Activation)       (None, 32, 32, 16)    0           batch_normalization_18[0][0]     
____________________________________________________________________________________________________
conv2d_19 (Conv2D)               (None, 32, 32, 16)    2320        activation_18[0][0]              
____________________________________________________________________________________________________
add_9 (Add)                      (None, 32, 32, 16)    0           conv2d_19[0][0]                  
                                                                   add_8[0][0]                      
____________________________________________________________________________________________________
batch_normalization_19 (BatchNor (None, 32, 32, 16)    64          add_9[0][0]                      
____________________________________________________________________________________________________
activation_19 (Activation)       (None, 32, 32, 16)    0           batch_normalization_19[0][0]     
____________________________________________________________________________________________________
conv2d_20 (Conv2D)               (None, 32, 32, 16)    2320        activation_19[0][0]              
____________________________________________________________________________________________________
batch_normalization_20 (BatchNor (None, 32, 32, 16)    64          conv2d_20[0][0]                  
____________________________________________________________________________________________________
activation_20 (Activation)       (None, 32, 32, 16)    0           batch_normalization_20[0][0]     
____________________________________________________________________________________________________
conv2d_21 (Conv2D)               (None, 32, 32, 16)    2320        activation_20[0][0]              
____________________________________________________________________________________________________
add_10 (Add)                     (None, 32, 32, 16)    0           conv2d_21[0][0]                  
                                                                   add_9[0][0]                      
____________________________________________________________________________________________________
batch_normalization_21 (BatchNor (None, 32, 32, 16)    64          add_10[0][0]                     
____________________________________________________________________________________________________
activation_21 (Activation)       (None, 32, 32, 16)    0           batch_normalization_21[0][0]     
____________________________________________________________________________________________________
conv2d_22 (Conv2D)               (None, 32, 32, 16)    2320        activation_21[0][0]              
____________________________________________________________________________________________________
batch_normalization_22 (BatchNor (None, 32, 32, 16)    64          conv2d_22[0][0]                  
____________________________________________________________________________________________________
activation_22 (Activation)       (None, 32, 32, 16)    0           batch_normalization_22[0][0]     
____________________________________________________________________________________________________
conv2d_23 (Conv2D)               (None, 32, 32, 16)    2320        activation_22[0][0]              
____________________________________________________________________________________________________
add_11 (Add)                     (None, 32, 32, 16)    0           conv2d_23[0][0]                  
                                                                   add_10[0][0]                     
____________________________________________________________________________________________________
batch_normalization_23 (BatchNor (None, 32, 32, 16)    64          add_11[0][0]                     
____________________________________________________________________________________________________
activation_23 (Activation)       (None, 32, 32, 16)    0           batch_normalization_23[0][0]     
____________________________________________________________________________________________________
conv2d_24 (Conv2D)               (None, 32, 32, 16)    2320        activation_23[0][0]              
____________________________________________________________________________________________________
batch_normalization_24 (BatchNor (None, 32, 32, 16)    64          conv2d_24[0][0]                  
____________________________________________________________________________________________________
activation_24 (Activation)       (None, 32, 32, 16)    0           batch_normalization_24[0][0]     
____________________________________________________________________________________________________
conv2d_25 (Conv2D)               (None, 32, 32, 16)    2320        activation_24[0][0]              
____________________________________________________________________________________________________
add_12 (Add)                     (None, 32, 32, 16)    0           conv2d_25[0][0]                  
                                                                   add_11[0][0]                     
____________________________________________________________________________________________________
batch_normalization_25 (BatchNor (None, 32, 32, 16)    64          add_12[0][0]                     
____________________________________________________________________________________________________
activation_25 (Activation)       (None, 32, 32, 16)    0           batch_normalization_25[0][0]     
____________________________________________________________________________________________________
conv2d_26 (Conv2D)               (None, 32, 32, 16)    2320        activation_25[0][0]              
____________________________________________________________________________________________________
batch_normalization_26 (BatchNor (None, 32, 32, 16)    64          conv2d_26[0][0]                  
____________________________________________________________________________________________________
activation_26 (Activation)       (None, 32, 32, 16)    0           batch_normalization_26[0][0]     
____________________________________________________________________________________________________
conv2d_27 (Conv2D)               (None, 32, 32, 16)    2320        activation_26[0][0]              
____________________________________________________________________________________________________
add_13 (Add)                     (None, 32, 32, 16)    0           conv2d_27[0][0]                  
                                                                   add_12[0][0]                     
____________________________________________________________________________________________________
batch_normalization_27 (BatchNor (None, 32, 32, 16)    64          add_13[0][0]                     
____________________________________________________________________________________________________
activation_27 (Activation)       (None, 32, 32, 16)    0           batch_normalization_27[0][0]     
____________________________________________________________________________________________________
conv2d_28 (Conv2D)               (None, 32, 32, 16)    2320        activation_27[0][0]              
____________________________________________________________________________________________________
batch_normalization_28 (BatchNor (None, 32, 32, 16)    64          conv2d_28[0][0]                  
____________________________________________________________________________________________________
activation_28 (Activation)       (None, 32, 32, 16)    0           batch_normalization_28[0][0]     
____________________________________________________________________________________________________
conv2d_29 (Conv2D)               (None, 32, 32, 16)    2320        activation_28[0][0]              
____________________________________________________________________________________________________
add_14 (Add)                     (None, 32, 32, 16)    0           conv2d_29[0][0]                  
                                                                   add_13[0][0]                     
____________________________________________________________________________________________________
batch_normalization_29 (BatchNor (None, 32, 32, 16)    64          add_14[0][0]                     
____________________________________________________________________________________________________
activation_29 (Activation)       (None, 32, 32, 16)    0           batch_normalization_29[0][0]     
____________________________________________________________________________________________________
conv2d_30 (Conv2D)               (None, 32, 32, 16)    2320        activation_29[0][0]              
____________________________________________________________________________________________________
batch_normalization_30 (BatchNor (None, 32, 32, 16)    64          conv2d_30[0][0]                  
____________________________________________________________________________________________________
activation_30 (Activation)       (None, 32, 32, 16)    0           batch_normalization_30[0][0]     
____________________________________________________________________________________________________
conv2d_31 (Conv2D)               (None, 32, 32, 16)    2320        activation_30[0][0]              
____________________________________________________________________________________________________
add_15 (Add)                     (None, 32, 32, 16)    0           conv2d_31[0][0]                  
                                                                   add_14[0][0]                     
____________________________________________________________________________________________________
batch_normalization_31 (BatchNor (None, 32, 32, 16)    64          add_15[0][0]                     
____________________________________________________________________________________________________
activation_31 (Activation)       (None, 32, 32, 16)    0           batch_normalization_31[0][0]     
____________________________________________________________________________________________________
conv2d_32 (Conv2D)               (None, 32, 32, 16)    2320        activation_31[0][0]              
____________________________________________________________________________________________________
batch_normalization_32 (BatchNor (None, 32, 32, 16)    64          conv2d_32[0][0]                  
____________________________________________________________________________________________________
activation_32 (Activation)       (None, 32, 32, 16)    0           batch_normalization_32[0][0]     
____________________________________________________________________________________________________
conv2d_33 (Conv2D)               (None, 32, 32, 16)    2320        activation_32[0][0]              
____________________________________________________________________________________________________
add_16 (Add)                     (None, 32, 32, 16)    0           conv2d_33[0][0]                  
                                                                   add_15[0][0]                     
____________________________________________________________________________________________________
batch_normalization_33 (BatchNor (None, 32, 32, 16)    64          add_16[0][0]                     
____________________________________________________________________________________________________
activation_33 (Activation)       (None, 32, 32, 16)    0           batch_normalization_33[0][0]     
____________________________________________________________________________________________________
conv2d_34 (Conv2D)               (None, 32, 32, 16)    2320        activation_33[0][0]              
____________________________________________________________________________________________________
batch_normalization_34 (BatchNor (None, 32, 32, 16)    64          conv2d_34[0][0]                  
____________________________________________________________________________________________________
activation_34 (Activation)       (None, 32, 32, 16)    0           batch_normalization_34[0][0]     
____________________________________________________________________________________________________
conv2d_35 (Conv2D)               (None, 32, 32, 16)    2320        activation_34[0][0]              
____________________________________________________________________________________________________
add_17 (Add)                     (None, 32, 32, 16)    0           conv2d_35[0][0]                  
                                                                   add_16[0][0]                     
____________________________________________________________________________________________________
batch_normalization_35 (BatchNor (None, 32, 32, 16)    64          add_17[0][0]                     
____________________________________________________________________________________________________
activation_35 (Activation)       (None, 32, 32, 16)    0           batch_normalization_35[0][0]     
____________________________________________________________________________________________________
conv2d_36 (Conv2D)               (None, 32, 32, 16)    2320        activation_35[0][0]              
____________________________________________________________________________________________________
batch_normalization_36 (BatchNor (None, 32, 32, 16)    64          conv2d_36[0][0]                  
____________________________________________________________________________________________________
activation_36 (Activation)       (None, 32, 32, 16)    0           batch_normalization_36[0][0]     
____________________________________________________________________________________________________
conv2d_37 (Conv2D)               (None, 32, 32, 16)    2320        activation_36[0][0]              
____________________________________________________________________________________________________
add_18 (Add)                     (None, 32, 32, 16)    0           conv2d_37[0][0]                  
                                                                   add_17[0][0]                     
____________________________________________________________________________________________________
batch_normalization_37 (BatchNor (None, 32, 32, 16)    64          add_18[0][0]                     
____________________________________________________________________________________________________
activation_37 (Activation)       (None, 32, 32, 16)    0           batch_normalization_37[0][0]     
____________________________________________________________________________________________________
conv2d_38 (Conv2D)               (None, 16, 16, 32)    4640        activation_37[0][0]              
____________________________________________________________________________________________________
batch_normalization_38 (BatchNor (None, 16, 16, 32)    128         conv2d_38[0][0]                  
____________________________________________________________________________________________________
activation_38 (Activation)       (None, 16, 16, 32)    0           batch_normalization_38[0][0]     
____________________________________________________________________________________________________
conv2d_39 (Conv2D)               (None, 16, 16, 32)    9248        activation_38[0][0]              
____________________________________________________________________________________________________
conv2d_40 (Conv2D)               (None, 16, 16, 32)    544         add_18[0][0]                     
____________________________________________________________________________________________________
add_19 (Add)                     (None, 16, 16, 32)    0           conv2d_39[0][0]                  
                                                                   conv2d_40[0][0]                  
____________________________________________________________________________________________________
batch_normalization_39 (BatchNor (None, 16, 16, 32)    128         add_19[0][0]                     
____________________________________________________________________________________________________
activation_39 (Activation)       (None, 16, 16, 32)    0           batch_normalization_39[0][0]     
____________________________________________________________________________________________________
conv2d_41 (Conv2D)               (None, 16, 16, 32)    9248        activation_39[0][0]              
____________________________________________________________________________________________________
batch_normalization_40 (BatchNor (None, 16, 16, 32)    128         conv2d_41[0][0]                  
____________________________________________________________________________________________________
activation_40 (Activation)       (None, 16, 16, 32)    0           batch_normalization_40[0][0]     
____________________________________________________________________________________________________
conv2d_42 (Conv2D)               (None, 16, 16, 32)    9248        activation_40[0][0]              
____________________________________________________________________________________________________
add_20 (Add)                     (None, 16, 16, 32)    0           conv2d_42[0][0]                  
                                                                   add_19[0][0]                     
____________________________________________________________________________________________________
batch_normalization_41 (BatchNor (None, 16, 16, 32)    128         add_20[0][0]                     
____________________________________________________________________________________________________
activation_41 (Activation)       (None, 16, 16, 32)    0           batch_normalization_41[0][0]     
____________________________________________________________________________________________________
conv2d_43 (Conv2D)               (None, 16, 16, 32)    9248        activation_41[0][0]              
____________________________________________________________________________________________________
batch_normalization_42 (BatchNor (None, 16, 16, 32)    128         conv2d_43[0][0]                  
____________________________________________________________________________________________________
activation_42 (Activation)       (None, 16, 16, 32)    0           batch_normalization_42[0][0]     
____________________________________________________________________________________________________
conv2d_44 (Conv2D)               (None, 16, 16, 32)    9248        activation_42[0][0]              
____________________________________________________________________________________________________
add_21 (Add)                     (None, 16, 16, 32)    0           conv2d_44[0][0]                  
                                                                   add_20[0][0]                     
____________________________________________________________________________________________________
batch_normalization_43 (BatchNor (None, 16, 16, 32)    128         add_21[0][0]                     
____________________________________________________________________________________________________
activation_43 (Activation)       (None, 16, 16, 32)    0           batch_normalization_43[0][0]     
____________________________________________________________________________________________________
conv2d_45 (Conv2D)               (None, 16, 16, 32)    9248        activation_43[0][0]              
____________________________________________________________________________________________________
batch_normalization_44 (BatchNor (None, 16, 16, 32)    128         conv2d_45[0][0]                  
____________________________________________________________________________________________________
activation_44 (Activation)       (None, 16, 16, 32)    0           batch_normalization_44[0][0]     
____________________________________________________________________________________________________
conv2d_46 (Conv2D)               (None, 16, 16, 32)    9248        activation_44[0][0]              
____________________________________________________________________________________________________
add_22 (Add)                     (None, 16, 16, 32)    0           conv2d_46[0][0]                  
                                                                   add_21[0][0]                     
____________________________________________________________________________________________________
batch_normalization_45 (BatchNor (None, 16, 16, 32)    128         add_22[0][0]                     
____________________________________________________________________________________________________
activation_45 (Activation)       (None, 16, 16, 32)    0           batch_normalization_45[0][0]     
____________________________________________________________________________________________________
conv2d_47 (Conv2D)               (None, 16, 16, 32)    9248        activation_45[0][0]              
____________________________________________________________________________________________________
batch_normalization_46 (BatchNor (None, 16, 16, 32)    128         conv2d_47[0][0]                  
____________________________________________________________________________________________________
activation_46 (Activation)       (None, 16, 16, 32)    0           batch_normalization_46[0][0]     
____________________________________________________________________________________________________
conv2d_48 (Conv2D)               (None, 16, 16, 32)    9248        activation_46[0][0]              
____________________________________________________________________________________________________
add_23 (Add)                     (None, 16, 16, 32)    0           conv2d_48[0][0]                  
                                                                   add_22[0][0]                     
____________________________________________________________________________________________________
batch_normalization_47 (BatchNor (None, 16, 16, 32)    128         add_23[0][0]                     
____________________________________________________________________________________________________
activation_47 (Activation)       (None, 16, 16, 32)    0           batch_normalization_47[0][0]     
____________________________________________________________________________________________________
conv2d_49 (Conv2D)               (None, 16, 16, 32)    9248        activation_47[0][0]              
____________________________________________________________________________________________________
batch_normalization_48 (BatchNor (None, 16, 16, 32)    128         conv2d_49[0][0]                  
____________________________________________________________________________________________________
activation_48 (Activation)       (None, 16, 16, 32)    0           batch_normalization_48[0][0]     
____________________________________________________________________________________________________
conv2d_50 (Conv2D)               (None, 16, 16, 32)    9248        activation_48[0][0]              
____________________________________________________________________________________________________
add_24 (Add)                     (None, 16, 16, 32)    0           conv2d_50[0][0]                  
                                                                   add_23[0][0]                     
____________________________________________________________________________________________________
batch_normalization_49 (BatchNor (None, 16, 16, 32)    128         add_24[0][0]                     
____________________________________________________________________________________________________
activation_49 (Activation)       (None, 16, 16, 32)    0           batch_normalization_49[0][0]     
____________________________________________________________________________________________________
conv2d_51 (Conv2D)               (None, 16, 16, 32)    9248        activation_49[0][0]              
____________________________________________________________________________________________________
batch_normalization_50 (BatchNor (None, 16, 16, 32)    128         conv2d_51[0][0]                  
____________________________________________________________________________________________________
activation_50 (Activation)       (None, 16, 16, 32)    0           batch_normalization_50[0][0]     
____________________________________________________________________________________________________
conv2d_52 (Conv2D)               (None, 16, 16, 32)    9248        activation_50[0][0]              
____________________________________________________________________________________________________
add_25 (Add)                     (None, 16, 16, 32)    0           conv2d_52[0][0]                  
                                                                   add_24[0][0]                     
____________________________________________________________________________________________________
batch_normalization_51 (BatchNor (None, 16, 16, 32)    128         add_25[0][0]                     
____________________________________________________________________________________________________
activation_51 (Activation)       (None, 16, 16, 32)    0           batch_normalization_51[0][0]     
____________________________________________________________________________________________________
conv2d_53 (Conv2D)               (None, 16, 16, 32)    9248        activation_51[0][0]              
____________________________________________________________________________________________________
batch_normalization_52 (BatchNor (None, 16, 16, 32)    128         conv2d_53[0][0]                  
____________________________________________________________________________________________________
activation_52 (Activation)       (None, 16, 16, 32)    0           batch_normalization_52[0][0]     
____________________________________________________________________________________________________
conv2d_54 (Conv2D)               (None, 16, 16, 32)    9248        activation_52[0][0]              
____________________________________________________________________________________________________
add_26 (Add)                     (None, 16, 16, 32)    0           conv2d_54[0][0]                  
                                                                   add_25[0][0]                     
____________________________________________________________________________________________________
batch_normalization_53 (BatchNor (None, 16, 16, 32)    128         add_26[0][0]                     
____________________________________________________________________________________________________
activation_53 (Activation)       (None, 16, 16, 32)    0           batch_normalization_53[0][0]     
____________________________________________________________________________________________________
conv2d_55 (Conv2D)               (None, 16, 16, 32)    9248        activation_53[0][0]              
____________________________________________________________________________________________________
batch_normalization_54 (BatchNor (None, 16, 16, 32)    128         conv2d_55[0][0]                  
____________________________________________________________________________________________________
activation_54 (Activation)       (None, 16, 16, 32)    0           batch_normalization_54[0][0]     
____________________________________________________________________________________________________
conv2d_56 (Conv2D)               (None, 16, 16, 32)    9248        activation_54[0][0]              
____________________________________________________________________________________________________
add_27 (Add)                     (None, 16, 16, 32)    0           conv2d_56[0][0]                  
                                                                   add_26[0][0]                     
____________________________________________________________________________________________________
batch_normalization_55 (BatchNor (None, 16, 16, 32)    128         add_27[0][0]                     
____________________________________________________________________________________________________
activation_55 (Activation)       (None, 16, 16, 32)    0           batch_normalization_55[0][0]     
____________________________________________________________________________________________________
conv2d_57 (Conv2D)               (None, 16, 16, 32)    9248        activation_55[0][0]              
____________________________________________________________________________________________________
batch_normalization_56 (BatchNor (None, 16, 16, 32)    128         conv2d_57[0][0]                  
____________________________________________________________________________________________________
activation_56 (Activation)       (None, 16, 16, 32)    0           batch_normalization_56[0][0]     
____________________________________________________________________________________________________
conv2d_58 (Conv2D)               (None, 16, 16, 32)    9248        activation_56[0][0]              
____________________________________________________________________________________________________
add_28 (Add)                     (None, 16, 16, 32)    0           conv2d_58[0][0]                  
                                                                   add_27[0][0]                     
____________________________________________________________________________________________________
batch_normalization_57 (BatchNor (None, 16, 16, 32)    128         add_28[0][0]                     
____________________________________________________________________________________________________
activation_57 (Activation)       (None, 16, 16, 32)    0           batch_normalization_57[0][0]     
____________________________________________________________________________________________________
conv2d_59 (Conv2D)               (None, 16, 16, 32)    9248        activation_57[0][0]              
____________________________________________________________________________________________________
batch_normalization_58 (BatchNor (None, 16, 16, 32)    128         conv2d_59[0][0]                  
____________________________________________________________________________________________________
activation_58 (Activation)       (None, 16, 16, 32)    0           batch_normalization_58[0][0]     
____________________________________________________________________________________________________
conv2d_60 (Conv2D)               (None, 16, 16, 32)    9248        activation_58[0][0]              
____________________________________________________________________________________________________
add_29 (Add)                     (None, 16, 16, 32)    0           conv2d_60[0][0]                  
                                                                   add_28[0][0]                     
____________________________________________________________________________________________________
batch_normalization_59 (BatchNor (None, 16, 16, 32)    128         add_29[0][0]                     
____________________________________________________________________________________________________
activation_59 (Activation)       (None, 16, 16, 32)    0           batch_normalization_59[0][0]     
____________________________________________________________________________________________________
conv2d_61 (Conv2D)               (None, 16, 16, 32)    9248        activation_59[0][0]              
____________________________________________________________________________________________________
batch_normalization_60 (BatchNor (None, 16, 16, 32)    128         conv2d_61[0][0]                  
____________________________________________________________________________________________________
activation_60 (Activation)       (None, 16, 16, 32)    0           batch_normalization_60[0][0]     
____________________________________________________________________________________________________
conv2d_62 (Conv2D)               (None, 16, 16, 32)    9248        activation_60[0][0]              
____________________________________________________________________________________________________
add_30 (Add)                     (None, 16, 16, 32)    0           conv2d_62[0][0]                  
                                                                   add_29[0][0]                     
____________________________________________________________________________________________________
batch_normalization_61 (BatchNor (None, 16, 16, 32)    128         add_30[0][0]                     
____________________________________________________________________________________________________
activation_61 (Activation)       (None, 16, 16, 32)    0           batch_normalization_61[0][0]     
____________________________________________________________________________________________________
conv2d_63 (Conv2D)               (None, 16, 16, 32)    9248        activation_61[0][0]              
____________________________________________________________________________________________________
batch_normalization_62 (BatchNor (None, 16, 16, 32)    128         conv2d_63[0][0]                  
____________________________________________________________________________________________________
activation_62 (Activation)       (None, 16, 16, 32)    0           batch_normalization_62[0][0]     
____________________________________________________________________________________________________
conv2d_64 (Conv2D)               (None, 16, 16, 32)    9248        activation_62[0][0]              
____________________________________________________________________________________________________
add_31 (Add)                     (None, 16, 16, 32)    0           conv2d_64[0][0]                  
                                                                   add_30[0][0]                     
____________________________________________________________________________________________________
batch_normalization_63 (BatchNor (None, 16, 16, 32)    128         add_31[0][0]                     
____________________________________________________________________________________________________
activation_63 (Activation)       (None, 16, 16, 32)    0           batch_normalization_63[0][0]     
____________________________________________________________________________________________________
conv2d_65 (Conv2D)               (None, 16, 16, 32)    9248        activation_63[0][0]              
____________________________________________________________________________________________________
batch_normalization_64 (BatchNor (None, 16, 16, 32)    128         conv2d_65[0][0]                  
____________________________________________________________________________________________________
activation_64 (Activation)       (None, 16, 16, 32)    0           batch_normalization_64[0][0]     
____________________________________________________________________________________________________
conv2d_66 (Conv2D)               (None, 16, 16, 32)    9248        activation_64[0][0]              
____________________________________________________________________________________________________
add_32 (Add)                     (None, 16, 16, 32)    0           conv2d_66[0][0]                  
                                                                   add_31[0][0]                     
____________________________________________________________________________________________________
batch_normalization_65 (BatchNor (None, 16, 16, 32)    128         add_32[0][0]                     
____________________________________________________________________________________________________
activation_65 (Activation)       (None, 16, 16, 32)    0           batch_normalization_65[0][0]     
____________________________________________________________________________________________________
conv2d_67 (Conv2D)               (None, 16, 16, 32)    9248        activation_65[0][0]              
____________________________________________________________________________________________________
batch_normalization_66 (BatchNor (None, 16, 16, 32)    128         conv2d_67[0][0]                  
____________________________________________________________________________________________________
activation_66 (Activation)       (None, 16, 16, 32)    0           batch_normalization_66[0][0]     
____________________________________________________________________________________________________
conv2d_68 (Conv2D)               (None, 16, 16, 32)    9248        activation_66[0][0]              
____________________________________________________________________________________________________
add_33 (Add)                     (None, 16, 16, 32)    0           conv2d_68[0][0]                  
                                                                   add_32[0][0]                     
____________________________________________________________________________________________________
batch_normalization_67 (BatchNor (None, 16, 16, 32)    128         add_33[0][0]                     
____________________________________________________________________________________________________
activation_67 (Activation)       (None, 16, 16, 32)    0           batch_normalization_67[0][0]     
____________________________________________________________________________________________________
conv2d_69 (Conv2D)               (None, 16, 16, 32)    9248        activation_67[0][0]              
____________________________________________________________________________________________________
batch_normalization_68 (BatchNor (None, 16, 16, 32)    128         conv2d_69[0][0]                  
____________________________________________________________________________________________________
activation_68 (Activation)       (None, 16, 16, 32)    0           batch_normalization_68[0][0]     
____________________________________________________________________________________________________
conv2d_70 (Conv2D)               (None, 16, 16, 32)    9248        activation_68[0][0]              
____________________________________________________________________________________________________
add_34 (Add)                     (None, 16, 16, 32)    0           conv2d_70[0][0]                  
                                                                   add_33[0][0]                     
____________________________________________________________________________________________________
batch_normalization_69 (BatchNor (None, 16, 16, 32)    128         add_34[0][0]                     
____________________________________________________________________________________________________
activation_69 (Activation)       (None, 16, 16, 32)    0           batch_normalization_69[0][0]     
____________________________________________________________________________________________________
conv2d_71 (Conv2D)               (None, 16, 16, 32)    9248        activation_69[0][0]              
____________________________________________________________________________________________________
batch_normalization_70 (BatchNor (None, 16, 16, 32)    128         conv2d_71[0][0]                  
____________________________________________________________________________________________________
activation_70 (Activation)       (None, 16, 16, 32)    0           batch_normalization_70[0][0]     
____________________________________________________________________________________________________
conv2d_72 (Conv2D)               (None, 16, 16, 32)    9248        activation_70[0][0]              
____________________________________________________________________________________________________
add_35 (Add)                     (None, 16, 16, 32)    0           conv2d_72[0][0]                  
                                                                   add_34[0][0]                     
____________________________________________________________________________________________________
batch_normalization_71 (BatchNor (None, 16, 16, 32)    128         add_35[0][0]                     
____________________________________________________________________________________________________
activation_71 (Activation)       (None, 16, 16, 32)    0           batch_normalization_71[0][0]     
____________________________________________________________________________________________________
conv2d_73 (Conv2D)               (None, 16, 16, 32)    9248        activation_71[0][0]              
____________________________________________________________________________________________________
batch_normalization_72 (BatchNor (None, 16, 16, 32)    128         conv2d_73[0][0]                  
____________________________________________________________________________________________________
activation_72 (Activation)       (None, 16, 16, 32)    0           batch_normalization_72[0][0]     
____________________________________________________________________________________________________
conv2d_74 (Conv2D)               (None, 16, 16, 32)    9248        activation_72[0][0]              
____________________________________________________________________________________________________
add_36 (Add)                     (None, 16, 16, 32)    0           conv2d_74[0][0]                  
                                                                   add_35[0][0]                     
____________________________________________________________________________________________________
batch_normalization_73 (BatchNor (None, 16, 16, 32)    128         add_36[0][0]                     
____________________________________________________________________________________________________
activation_73 (Activation)       (None, 16, 16, 32)    0           batch_normalization_73[0][0]     
____________________________________________________________________________________________________
conv2d_75 (Conv2D)               (None, 8, 8, 64)      18496       activation_73[0][0]              
____________________________________________________________________________________________________
batch_normalization_74 (BatchNor (None, 8, 8, 64)      256         conv2d_75[0][0]                  
____________________________________________________________________________________________________
activation_74 (Activation)       (None, 8, 8, 64)      0           batch_normalization_74[0][0]     
____________________________________________________________________________________________________
conv2d_76 (Conv2D)               (None, 8, 8, 64)      36928       activation_74[0][0]              
____________________________________________________________________________________________________
conv2d_77 (Conv2D)               (None, 8, 8, 64)      2112        add_36[0][0]                     
____________________________________________________________________________________________________
add_37 (Add)                     (None, 8, 8, 64)      0           conv2d_76[0][0]                  
                                                                   conv2d_77[0][0]                  
____________________________________________________________________________________________________
batch_normalization_75 (BatchNor (None, 8, 8, 64)      256         add_37[0][0]                     
____________________________________________________________________________________________________
activation_75 (Activation)       (None, 8, 8, 64)      0           batch_normalization_75[0][0]     
____________________________________________________________________________________________________
conv2d_78 (Conv2D)               (None, 8, 8, 64)      36928       activation_75[0][0]              
____________________________________________________________________________________________________
batch_normalization_76 (BatchNor (None, 8, 8, 64)      256         conv2d_78[0][0]                  
____________________________________________________________________________________________________
activation_76 (Activation)       (None, 8, 8, 64)      0           batch_normalization_76[0][0]     
____________________________________________________________________________________________________
conv2d_79 (Conv2D)               (None, 8, 8, 64)      36928       activation_76[0][0]              
____________________________________________________________________________________________________
add_38 (Add)                     (None, 8, 8, 64)      0           conv2d_79[0][0]                  
                                                                   add_37[0][0]                     
____________________________________________________________________________________________________
batch_normalization_77 (BatchNor (None, 8, 8, 64)      256         add_38[0][0]                     
____________________________________________________________________________________________________
activation_77 (Activation)       (None, 8, 8, 64)      0           batch_normalization_77[0][0]     
____________________________________________________________________________________________________
conv2d_80 (Conv2D)               (None, 8, 8, 64)      36928       activation_77[0][0]              
____________________________________________________________________________________________________
batch_normalization_78 (BatchNor (None, 8, 8, 64)      256         conv2d_80[0][0]                  
____________________________________________________________________________________________________
activation_78 (Activation)       (None, 8, 8, 64)      0           batch_normalization_78[0][0]     
____________________________________________________________________________________________________
conv2d_81 (Conv2D)               (None, 8, 8, 64)      36928       activation_78[0][0]              
____________________________________________________________________________________________________
add_39 (Add)                     (None, 8, 8, 64)      0           conv2d_81[0][0]                  
                                                                   add_38[0][0]                     
____________________________________________________________________________________________________
batch_normalization_79 (BatchNor (None, 8, 8, 64)      256         add_39[0][0]                     
____________________________________________________________________________________________________
activation_79 (Activation)       (None, 8, 8, 64)      0           batch_normalization_79[0][0]     
____________________________________________________________________________________________________
conv2d_82 (Conv2D)               (None, 8, 8, 64)      36928       activation_79[0][0]              
____________________________________________________________________________________________________
batch_normalization_80 (BatchNor (None, 8, 8, 64)      256         conv2d_82[0][0]                  
____________________________________________________________________________________________________
activation_80 (Activation)       (None, 8, 8, 64)      0           batch_normalization_80[0][0]     
____________________________________________________________________________________________________
conv2d_83 (Conv2D)               (None, 8, 8, 64)      36928       activation_80[0][0]              
____________________________________________________________________________________________________
add_40 (Add)                     (None, 8, 8, 64)      0           conv2d_83[0][0]                  
                                                                   add_39[0][0]                     
____________________________________________________________________________________________________
batch_normalization_81 (BatchNor (None, 8, 8, 64)      256         add_40[0][0]                     
____________________________________________________________________________________________________
activation_81 (Activation)       (None, 8, 8, 64)      0           batch_normalization_81[0][0]     
____________________________________________________________________________________________________
conv2d_84 (Conv2D)               (None, 8, 8, 64)      36928       activation_81[0][0]              
____________________________________________________________________________________________________
batch_normalization_82 (BatchNor (None, 8, 8, 64)      256         conv2d_84[0][0]                  
____________________________________________________________________________________________________
activation_82 (Activation)       (None, 8, 8, 64)      0           batch_normalization_82[0][0]     
____________________________________________________________________________________________________
conv2d_85 (Conv2D)               (None, 8, 8, 64)      36928       activation_82[0][0]              
____________________________________________________________________________________________________
add_41 (Add)                     (None, 8, 8, 64)      0           conv2d_85[0][0]                  
                                                                   add_40[0][0]                     
____________________________________________________________________________________________________
batch_normalization_83 (BatchNor (None, 8, 8, 64)      256         add_41[0][0]                     
____________________________________________________________________________________________________
activation_83 (Activation)       (None, 8, 8, 64)      0           batch_normalization_83[0][0]     
____________________________________________________________________________________________________
conv2d_86 (Conv2D)               (None, 8, 8, 64)      36928       activation_83[0][0]              
____________________________________________________________________________________________________
batch_normalization_84 (BatchNor (None, 8, 8, 64)      256         conv2d_86[0][0]                  
____________________________________________________________________________________________________
activation_84 (Activation)       (None, 8, 8, 64)      0           batch_normalization_84[0][0]     
____________________________________________________________________________________________________
conv2d_87 (Conv2D)               (None, 8, 8, 64)      36928       activation_84[0][0]              
____________________________________________________________________________________________________
add_42 (Add)                     (None, 8, 8, 64)      0           conv2d_87[0][0]                  
                                                                   add_41[0][0]                     
____________________________________________________________________________________________________
batch_normalization_85 (BatchNor (None, 8, 8, 64)      256         add_42[0][0]                     
____________________________________________________________________________________________________
activation_85 (Activation)       (None, 8, 8, 64)      0           batch_normalization_85[0][0]     
____________________________________________________________________________________________________
conv2d_88 (Conv2D)               (None, 8, 8, 64)      36928       activation_85[0][0]              
____________________________________________________________________________________________________
batch_normalization_86 (BatchNor (None, 8, 8, 64)      256         conv2d_88[0][0]                  
____________________________________________________________________________________________________
activation_86 (Activation)       (None, 8, 8, 64)      0           batch_normalization_86[0][0]     
____________________________________________________________________________________________________
conv2d_89 (Conv2D)               (None, 8, 8, 64)      36928       activation_86[0][0]              
____________________________________________________________________________________________________
add_43 (Add)                     (None, 8, 8, 64)      0           conv2d_89[0][0]                  
                                                                   add_42[0][0]                     
____________________________________________________________________________________________________
batch_normalization_87 (BatchNor (None, 8, 8, 64)      256         add_43[0][0]                     
____________________________________________________________________________________________________
activation_87 (Activation)       (None, 8, 8, 64)      0           batch_normalization_87[0][0]     
____________________________________________________________________________________________________
conv2d_90 (Conv2D)               (None, 8, 8, 64)      36928       activation_87[0][0]              
____________________________________________________________________________________________________
batch_normalization_88 (BatchNor (None, 8, 8, 64)      256         conv2d_90[0][0]                  
____________________________________________________________________________________________________
activation_88 (Activation)       (None, 8, 8, 64)      0           batch_normalization_88[0][0]     
____________________________________________________________________________________________________
conv2d_91 (Conv2D)               (None, 8, 8, 64)      36928       activation_88[0][0]              
____________________________________________________________________________________________________
add_44 (Add)                     (None, 8, 8, 64)      0           conv2d_91[0][0]                  
                                                                   add_43[0][0]                     
____________________________________________________________________________________________________
batch_normalization_89 (BatchNor (None, 8, 8, 64)      256         add_44[0][0]                     
____________________________________________________________________________________________________
activation_89 (Activation)       (None, 8, 8, 64)      0           batch_normalization_89[0][0]     
____________________________________________________________________________________________________
conv2d_92 (Conv2D)               (None, 8, 8, 64)      36928       activation_89[0][0]              
____________________________________________________________________________________________________
batch_normalization_90 (BatchNor (None, 8, 8, 64)      256         conv2d_92[0][0]                  
____________________________________________________________________________________________________
activation_90 (Activation)       (None, 8, 8, 64)      0           batch_normalization_90[0][0]     
____________________________________________________________________________________________________
conv2d_93 (Conv2D)               (None, 8, 8, 64)      36928       activation_90[0][0]              
____________________________________________________________________________________________________
add_45 (Add)                     (None, 8, 8, 64)      0           conv2d_93[0][0]                  
                                                                   add_44[0][0]                     
____________________________________________________________________________________________________
batch_normalization_91 (BatchNor (None, 8, 8, 64)      256         add_45[0][0]                     
____________________________________________________________________________________________________
activation_91 (Activation)       (None, 8, 8, 64)      0           batch_normalization_91[0][0]     
____________________________________________________________________________________________________
conv2d_94 (Conv2D)               (None, 8, 8, 64)      36928       activation_91[0][0]              
____________________________________________________________________________________________________
batch_normalization_92 (BatchNor (None, 8, 8, 64)      256         conv2d_94[0][0]                  
____________________________________________________________________________________________________
activation_92 (Activation)       (None, 8, 8, 64)      0           batch_normalization_92[0][0]     
____________________________________________________________________________________________________
conv2d_95 (Conv2D)               (None, 8, 8, 64)      36928       activation_92[0][0]              
____________________________________________________________________________________________________
add_46 (Add)                     (None, 8, 8, 64)      0           conv2d_95[0][0]                  
                                                                   add_45[0][0]                     
____________________________________________________________________________________________________
batch_normalization_93 (BatchNor (None, 8, 8, 64)      256         add_46[0][0]                     
____________________________________________________________________________________________________
activation_93 (Activation)       (None, 8, 8, 64)      0           batch_normalization_93[0][0]     
____________________________________________________________________________________________________
conv2d_96 (Conv2D)               (None, 8, 8, 64)      36928       activation_93[0][0]              
____________________________________________________________________________________________________
batch_normalization_94 (BatchNor (None, 8, 8, 64)      256         conv2d_96[0][0]                  
____________________________________________________________________________________________________
activation_94 (Activation)       (None, 8, 8, 64)      0           batch_normalization_94[0][0]     
____________________________________________________________________________________________________
conv2d_97 (Conv2D)               (None, 8, 8, 64)      36928       activation_94[0][0]              
____________________________________________________________________________________________________
add_47 (Add)                     (None, 8, 8, 64)      0           conv2d_97[0][0]                  
                                                                   add_46[0][0]                     
____________________________________________________________________________________________________
batch_normalization_95 (BatchNor (None, 8, 8, 64)      256         add_47[0][0]                     
____________________________________________________________________________________________________
activation_95 (Activation)       (None, 8, 8, 64)      0           batch_normalization_95[0][0]     
____________________________________________________________________________________________________
conv2d_98 (Conv2D)               (None, 8, 8, 64)      36928       activation_95[0][0]              
____________________________________________________________________________________________________
batch_normalization_96 (BatchNor (None, 8, 8, 64)      256         conv2d_98[0][0]                  
____________________________________________________________________________________________________
activation_96 (Activation)       (None, 8, 8, 64)      0           batch_normalization_96[0][0]     
____________________________________________________________________________________________________
conv2d_99 (Conv2D)               (None, 8, 8, 64)      36928       activation_96[0][0]              
____________________________________________________________________________________________________
add_48 (Add)                     (None, 8, 8, 64)      0           conv2d_99[0][0]                  
                                                                   add_47[0][0]                     
____________________________________________________________________________________________________
batch_normalization_97 (BatchNor (None, 8, 8, 64)      256         add_48[0][0]                     
____________________________________________________________________________________________________
activation_97 (Activation)       (None, 8, 8, 64)      0           batch_normalization_97[0][0]     
____________________________________________________________________________________________________
conv2d_100 (Conv2D)              (None, 8, 8, 64)      36928       activation_97[0][0]              
____________________________________________________________________________________________________
batch_normalization_98 (BatchNor (None, 8, 8, 64)      256         conv2d_100[0][0]                 
____________________________________________________________________________________________________
activation_98 (Activation)       (None, 8, 8, 64)      0           batch_normalization_98[0][0]     
____________________________________________________________________________________________________
conv2d_101 (Conv2D)              (None, 8, 8, 64)      36928       activation_98[0][0]              
____________________________________________________________________________________________________
add_49 (Add)                     (None, 8, 8, 64)      0           conv2d_101[0][0]                 
                                                                   add_48[0][0]                     
____________________________________________________________________________________________________
batch_normalization_99 (BatchNor (None, 8, 8, 64)      256         add_49[0][0]                     
____________________________________________________________________________________________________
activation_99 (Activation)       (None, 8, 8, 64)      0           batch_normalization_99[0][0]     
____________________________________________________________________________________________________
conv2d_102 (Conv2D)              (None, 8, 8, 64)      36928       activation_99[0][0]              
____________________________________________________________________________________________________
batch_normalization_100 (BatchNo (None, 8, 8, 64)      256         conv2d_102[0][0]                 
____________________________________________________________________________________________________
activation_100 (Activation)      (None, 8, 8, 64)      0           batch_normalization_100[0][0]    
____________________________________________________________________________________________________
conv2d_103 (Conv2D)              (None, 8, 8, 64)      36928       activation_100[0][0]             
____________________________________________________________________________________________________
add_50 (Add)                     (None, 8, 8, 64)      0           conv2d_103[0][0]                 
                                                                   add_49[0][0]                     
____________________________________________________________________________________________________
batch_normalization_101 (BatchNo (None, 8, 8, 64)      256         add_50[0][0]                     
____________________________________________________________________________________________________
activation_101 (Activation)      (None, 8, 8, 64)      0           batch_normalization_101[0][0]    
____________________________________________________________________________________________________
conv2d_104 (Conv2D)              (None, 8, 8, 64)      36928       activation_101[0][0]             
____________________________________________________________________________________________________
batch_normalization_102 (BatchNo (None, 8, 8, 64)      256         conv2d_104[0][0]                 
____________________________________________________________________________________________________
activation_102 (Activation)      (None, 8, 8, 64)      0           batch_normalization_102[0][0]    
____________________________________________________________________________________________________
conv2d_105 (Conv2D)              (None, 8, 8, 64)      36928       activation_102[0][0]             
____________________________________________________________________________________________________
add_51 (Add)                     (None, 8, 8, 64)      0           conv2d_105[0][0]                 
                                                                   add_50[0][0]                     
____________________________________________________________________________________________________
batch_normalization_103 (BatchNo (None, 8, 8, 64)      256         add_51[0][0]                     
____________________________________________________________________________________________________
activation_103 (Activation)      (None, 8, 8, 64)      0           batch_normalization_103[0][0]    
____________________________________________________________________________________________________
conv2d_106 (Conv2D)              (None, 8, 8, 64)      36928       activation_103[0][0]             
____________________________________________________________________________________________________
batch_normalization_104 (BatchNo (None, 8, 8, 64)      256         conv2d_106[0][0]                 
____________________________________________________________________________________________________
activation_104 (Activation)      (None, 8, 8, 64)      0           batch_normalization_104[0][0]    
____________________________________________________________________________________________________
conv2d_107 (Conv2D)              (None, 8, 8, 64)      36928       activation_104[0][0]             
____________________________________________________________________________________________________
add_52 (Add)                     (None, 8, 8, 64)      0           conv2d_107[0][0]                 
                                                                   add_51[0][0]                     
____________________________________________________________________________________________________
batch_normalization_105 (BatchNo (None, 8, 8, 64)      256         add_52[0][0]                     
____________________________________________________________________________________________________
activation_105 (Activation)      (None, 8, 8, 64)      0           batch_normalization_105[0][0]    
____________________________________________________________________________________________________
conv2d_108 (Conv2D)              (None, 8, 8, 64)      36928       activation_105[0][0]             
____________________________________________________________________________________________________
batch_normalization_106 (BatchNo (None, 8, 8, 64)      256         conv2d_108[0][0]                 
____________________________________________________________________________________________________
activation_106 (Activation)      (None, 8, 8, 64)      0           batch_normalization_106[0][0]    
____________________________________________________________________________________________________
conv2d_109 (Conv2D)              (None, 8, 8, 64)      36928       activation_106[0][0]             
____________________________________________________________________________________________________
add_53 (Add)                     (None, 8, 8, 64)      0           conv2d_109[0][0]                 
                                                                   add_52[0][0]                     
____________________________________________________________________________________________________
batch_normalization_107 (BatchNo (None, 8, 8, 64)      256         add_53[0][0]                     
____________________________________________________________________________________________________
activation_107 (Activation)      (None, 8, 8, 64)      0           batch_normalization_107[0][0]    
____________________________________________________________________________________________________
conv2d_110 (Conv2D)              (None, 8, 8, 64)      36928       activation_107[0][0]             
____________________________________________________________________________________________________
batch_normalization_108 (BatchNo (None, 8, 8, 64)      256         conv2d_110[0][0]                 
____________________________________________________________________________________________________
activation_108 (Activation)      (None, 8, 8, 64)      0           batch_normalization_108[0][0]    
____________________________________________________________________________________________________
conv2d_111 (Conv2D)              (None, 8, 8, 64)      36928       activation_108[0][0]             
____________________________________________________________________________________________________
add_54 (Add)                     (None, 8, 8, 64)      0           conv2d_111[0][0]                 
                                                                   add_53[0][0]                     
____________________________________________________________________________________________________
batch_normalization_109 (BatchNo (None, 8, 8, 64)      256         add_54[0][0]                     
____________________________________________________________________________________________________
activation_109 (Activation)      (None, 8, 8, 64)      0           batch_normalization_109[0][0]    
____________________________________________________________________________________________________
global_average_pooling2d_1 (Glob (None, 64)            0           activation_109[0][0]             
____________________________________________________________________________________________________
dense_1 (Dense)                  (None, 10)            650         global_average_pooling2d_1[0][0] 
====================================================================================================
Total params: 1,742,762
Trainable params: 1,734,666
Non-trainable params: 8,096
____________________________________________________________________________________________________
None

set tensorboard


In [8]:
# set callback
tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
change_lr = LearningRateScheduler(scheduler)
cbks = [change_lr,tb_cb]

processing images


In [9]:
# set data augmentation
print('Using real-time data augmentation.')
datagen = ImageDataGenerator(horizontal_flip=True,
        width_shift_range=0.125,height_shift_range=0.125,fill_mode='constant',cval=0.)

datagen.fit(x_train)


Using real-time data augmentation.

train


In [10]:
# start training
resnet.fit_generator(datagen.flow(x_train, y_train,batch_size=batch_size),
                    steps_per_epoch=iterations,
                    epochs=epochs,
                    callbacks=cbks,
                    validation_data=(x_test, y_test))
resnet.save('resnet.h5')


Epoch 1/200
391/391 [==============================] - 114s - loss: 3.7221 - acc: 0.4765 - val_loss: 3.3557 - val_acc: 0.4166
Epoch 2/200
391/391 [==============================] - 112s - loss: 2.1532 - acc: 0.6466 - val_loss: 2.4888 - val_acc: 0.4732
Epoch 3/200
391/391 [==============================] - 113s - loss: 1.5021 - acc: 0.7092 - val_loss: 1.7706 - val_acc: 0.5795
Epoch 4/200
391/391 [==============================] - 112s - loss: 1.1981 - acc: 0.7442 - val_loss: 1.4300 - val_acc: 0.6442
Epoch 5/200
391/391 [==============================] - 106s - loss: 1.0503 - acc: 0.7649 - val_loss: 1.4330 - val_acc: 0.6529
Epoch 6/200
391/391 [==============================] - 110s - loss: 0.9828 - acc: 0.7770 - val_loss: 2.0536 - val_acc: 0.5101
Epoch 7/200
391/391 [==============================] - 110s - loss: 0.9430 - acc: 0.7855 - val_loss: 1.3311 - val_acc: 0.6671
Epoch 8/200
391/391 [==============================] - 110s - loss: 0.9228 - acc: 0.7932 - val_loss: 1.2216 - val_acc: 0.6986
Epoch 9/200
391/391 [==============================] - 110s - loss: 0.9051 - acc: 0.7993 - val_loss: 1.2340 - val_acc: 0.6820
Epoch 10/200
391/391 [==============================] - 109s - loss: 0.8898 - acc: 0.8031 - val_loss: 1.2620 - val_acc: 0.6759
Epoch 11/200
391/391 [==============================] - 110s - loss: 0.8797 - acc: 0.8096 - val_loss: 1.3042 - val_acc: 0.6901
Epoch 12/200
391/391 [==============================] - 109s - loss: 0.8740 - acc: 0.8117 - val_loss: 2.1322 - val_acc: 0.5162
Epoch 13/200
391/391 [==============================] - 109s - loss: 0.8633 - acc: 0.8169 - val_loss: 1.5838 - val_acc: 0.5999
Epoch 14/200
391/391 [==============================] - 110s - loss: 0.8614 - acc: 0.8190 - val_loss: 1.0743 - val_acc: 0.7485
Epoch 15/200
391/391 [==============================] - 110s - loss: 0.8575 - acc: 0.8217 - val_loss: 1.2212 - val_acc: 0.7108
Epoch 16/200
391/391 [==============================] - 109s - loss: 0.8555 - acc: 0.8251 - val_loss: 1.1657 - val_acc: 0.7264
Epoch 17/200
391/391 [==============================] - 110s - loss: 0.8441 - acc: 0.8289 - val_loss: 1.4427 - val_acc: 0.6646
Epoch 18/200
391/391 [==============================] - 110s - loss: 0.8441 - acc: 0.8304 - val_loss: 1.1430 - val_acc: 0.7402
Epoch 19/200
391/391 [==============================] - 109s - loss: 0.8405 - acc: 0.8322 - val_loss: 1.1276 - val_acc: 0.7641
Epoch 20/200
391/391 [==============================] - 109s - loss: 0.8374 - acc: 0.8345 - val_loss: 1.3026 - val_acc: 0.7005
Epoch 21/200
391/391 [==============================] - 110s - loss: 0.8348 - acc: 0.8365 - val_loss: 1.3896 - val_acc: 0.6671
Epoch 22/200
391/391 [==============================] - 110s - loss: 0.8350 - acc: 0.8374 - val_loss: 1.2375 - val_acc: 0.7274
Epoch 23/200
391/391 [==============================] - 109s - loss: 0.8308 - acc: 0.8390 - val_loss: 1.6758 - val_acc: 0.6486
Epoch 24/200
391/391 [==============================] - 110s - loss: 0.8324 - acc: 0.8388 - val_loss: 1.0699 - val_acc: 0.7666
Epoch 25/200
391/391 [==============================] - 107s - loss: 0.8306 - acc: 0.8401 - val_loss: 1.2792 - val_acc: 0.7001
Epoch 26/200
391/391 [==============================] - 103s - loss: 0.8295 - acc: 0.8414 - val_loss: 1.6877 - val_acc: 0.6765
Epoch 27/200
391/391 [==============================] - 109s - loss: 0.8303 - acc: 0.8417 - val_loss: 1.1691 - val_acc: 0.7500
Epoch 28/200
391/391 [==============================] - 109s - loss: 0.8287 - acc: 0.8430 - val_loss: 1.1080 - val_acc: 0.7626
Epoch 29/200
391/391 [==============================] - 110s - loss: 0.8226 - acc: 0.8448 - val_loss: 1.4507 - val_acc: 0.6917
Epoch 30/200
391/391 [==============================] - 109s - loss: 0.8198 - acc: 0.8464 - val_loss: 1.5472 - val_acc: 0.6748
Epoch 31/200
391/391 [==============================] - 109s - loss: 0.8191 - acc: 0.8457 - val_loss: 1.7173 - val_acc: 0.5721
Epoch 32/200
391/391 [==============================] - 110s - loss: 0.8167 - acc: 0.8473 - val_loss: 1.2567 - val_acc: 0.7235
Epoch 33/200
391/391 [==============================] - 109s - loss: 0.8153 - acc: 0.8487 - val_loss: 1.1637 - val_acc: 0.7503
Epoch 34/200
391/391 [==============================] - 110s - loss: 0.8161 - acc: 0.8479 - val_loss: 2.0922 - val_acc: 0.5621
Epoch 35/200
391/391 [==============================] - 109s - loss: 0.8226 - acc: 0.8456 - val_loss: 1.2515 - val_acc: 0.7059
Epoch 36/200
391/391 [==============================] - 108s - loss: 0.8222 - acc: 0.8469 - val_loss: 1.6522 - val_acc: 0.6499
Epoch 37/200
391/391 [==============================] - 107s - loss: 0.8147 - acc: 0.8487 - val_loss: 1.0204 - val_acc: 0.7847
Epoch 38/200
391/391 [==============================] - 109s - loss: 0.8155 - acc: 0.8500 - val_loss: 1.1426 - val_acc: 0.7671
Epoch 39/200
391/391 [==============================] - 106s - loss: 0.8099 - acc: 0.8499 - val_loss: 1.5346 - val_acc: 0.6906
Epoch 40/200
391/391 [==============================] - 110s - loss: 0.8063 - acc: 0.8531 - val_loss: 1.2345 - val_acc: 0.7192
Epoch 41/200
391/391 [==============================] - 110s - loss: 0.8122 - acc: 0.8506 - val_loss: 1.5357 - val_acc: 0.6835
Epoch 42/200
391/391 [==============================] - 109s - loss: 0.8128 - acc: 0.8522 - val_loss: 1.3661 - val_acc: 0.7056
Epoch 43/200
391/391 [==============================] - 110s - loss: 0.8097 - acc: 0.8530 - val_loss: 1.2586 - val_acc: 0.7020
Epoch 44/200
391/391 [==============================] - 108s - loss: 0.8099 - acc: 0.8548 - val_loss: 1.2076 - val_acc: 0.7342
Epoch 45/200
391/391 [==============================] - 110s - loss: 0.8180 - acc: 0.8506 - val_loss: 1.3901 - val_acc: 0.6738
Epoch 46/200
391/391 [==============================] - 109s - loss: 0.8133 - acc: 0.8519 - val_loss: 1.5853 - val_acc: 0.6572
Epoch 47/200
391/391 [==============================] - 109s - loss: 0.8109 - acc: 0.8530 - val_loss: 1.2498 - val_acc: 0.7123
Epoch 48/200
391/391 [==============================] - 109s - loss: 0.8051 - acc: 0.8550 - val_loss: 1.3644 - val_acc: 0.6878
Epoch 49/200
391/391 [==============================] - 109s - loss: 0.8014 - acc: 0.8561 - val_loss: 0.9559 - val_acc: 0.8129
Epoch 50/200
391/391 [==============================] - 109s - loss: 0.8093 - acc: 0.8546 - val_loss: 0.9756 - val_acc: 0.8035
Epoch 51/200
391/391 [==============================] - 109s - loss: 0.8060 - acc: 0.8542 - val_loss: 1.1276 - val_acc: 0.7581
Epoch 52/200
391/391 [==============================] - 108s - loss: 0.8049 - acc: 0.8560 - val_loss: 1.9803 - val_acc: 0.6145
Epoch 53/200
391/391 [==============================] - 109s - loss: 0.8030 - acc: 0.8560 - val_loss: 1.6679 - val_acc: 0.6435
Epoch 54/200
391/391 [==============================] - 110s - loss: 0.8020 - acc: 0.8561 - val_loss: 1.3478 - val_acc: 0.6883
Epoch 55/200
391/391 [==============================] - 109s - loss: 0.8049 - acc: 0.8563 - val_loss: 1.0042 - val_acc: 0.7938
Epoch 56/200
391/391 [==============================] - 109s - loss: 0.8030 - acc: 0.8570 - val_loss: 1.1175 - val_acc: 0.7638
Epoch 57/200
391/391 [==============================] - 109s - loss: 0.8000 - acc: 0.8568 - val_loss: 0.9276 - val_acc: 0.8137
Epoch 58/200
391/391 [==============================] - 109s - loss: 0.8029 - acc: 0.8568 - val_loss: 1.0181 - val_acc: 0.7878
Epoch 59/200
391/391 [==============================] - 109s - loss: 0.8038 - acc: 0.8563 - val_loss: 1.5373 - val_acc: 0.6748
Epoch 60/200
391/391 [==============================] - 109s - loss: 0.8015 - acc: 0.8574 - val_loss: 1.0405 - val_acc: 0.7815
Epoch 61/200
391/391 [==============================] - 110s - loss: 0.8035 - acc: 0.8564 - val_loss: 1.1440 - val_acc: 0.7371
Epoch 62/200
391/391 [==============================] - 108s - loss: 0.6342 - acc: 0.9088 - val_loss: 0.6465 - val_acc: 0.8993
Epoch 63/200
391/391 [==============================] - 109s - loss: 0.5371 - acc: 0.9285 - val_loss: 0.6638 - val_acc: 0.8862
Epoch 64/200
391/391 [==============================] - 109s - loss: 0.4920 - acc: 0.9326 - val_loss: 0.5824 - val_acc: 0.9011
Epoch 65/200
391/391 [==============================] - 110s - loss: 0.4599 - acc: 0.9339 - val_loss: 0.6756 - val_acc: 0.8668
Epoch 66/200
391/391 [==============================] - 110s - loss: 0.4375 - acc: 0.9363 - val_loss: 0.5897 - val_acc: 0.8920
Epoch 67/200
391/391 [==============================] - 109s - loss: 0.4275 - acc: 0.9354 - val_loss: 0.5947 - val_acc: 0.8877
Epoch 68/200
391/391 [==============================] - 110s - loss: 0.4188 - acc: 0.9351 - val_loss: 0.6070 - val_acc: 0.8827
Epoch 69/200
391/391 [==============================] - 110s - loss: 0.4145 - acc: 0.9357 - val_loss: 0.5661 - val_acc: 0.8893
Epoch 70/200
391/391 [==============================] - 109s - loss: 0.4124 - acc: 0.9352 - val_loss: 0.6256 - val_acc: 0.8730
Epoch 71/200
391/391 [==============================] - 110s - loss: 0.4098 - acc: 0.9349 - val_loss: 0.6319 - val_acc: 0.8667
Epoch 72/200
391/391 [==============================] - 110s - loss: 0.4072 - acc: 0.9340 - val_loss: 0.6598 - val_acc: 0.8631
Epoch 73/200
391/391 [==============================] - 109s - loss: 0.4058 - acc: 0.9351 - val_loss: 0.7759 - val_acc: 0.8352
Epoch 74/200
391/391 [==============================] - 109s - loss: 0.4035 - acc: 0.9379 - val_loss: 0.6437 - val_acc: 0.8715
Epoch 75/200
391/391 [==============================] - 108s - loss: 0.4081 - acc: 0.9350 - val_loss: 0.8033 - val_acc: 0.8245
Epoch 76/200
391/391 [==============================] - 109s - loss: 0.4073 - acc: 0.9357 - val_loss: 0.6071 - val_acc: 0.8820
Epoch 77/200
391/391 [==============================] - 109s - loss: 0.4040 - acc: 0.9382 - val_loss: 0.7804 - val_acc: 0.8375
Epoch 78/200
391/391 [==============================] - 109s - loss: 0.4022 - acc: 0.9385 - val_loss: 0.6067 - val_acc: 0.8813
Epoch 79/200
391/391 [==============================] - 108s - loss: 0.4057 - acc: 0.9371 - val_loss: 0.6015 - val_acc: 0.8755
Epoch 80/200
391/391 [==============================] - 108s - loss: 0.4041 - acc: 0.9396 - val_loss: 0.6003 - val_acc: 0.8835
Epoch 81/200
391/391 [==============================] - 109s - loss: 0.4020 - acc: 0.9396 - val_loss: 0.6055 - val_acc: 0.8815
Epoch 82/200
391/391 [==============================] - 109s - loss: 0.4017 - acc: 0.9399 - val_loss: 0.6359 - val_acc: 0.8729
Epoch 83/200
391/391 [==============================] - 109s - loss: 0.4058 - acc: 0.9391 - val_loss: 0.6605 - val_acc: 0.8722
Epoch 84/200
391/391 [==============================] - 108s - loss: 0.4057 - acc: 0.9389 - val_loss: 0.5437 - val_acc: 0.9010
Epoch 85/200
391/391 [==============================] - 108s - loss: 0.4031 - acc: 0.9413 - val_loss: 0.5967 - val_acc: 0.8873
Epoch 86/200
391/391 [==============================] - 108s - loss: 0.4035 - acc: 0.9419 - val_loss: 0.6589 - val_acc: 0.8719
Epoch 87/200
391/391 [==============================] - 108s - loss: 0.4061 - acc: 0.9411 - val_loss: 0.6505 - val_acc: 0.8775
Epoch 88/200
391/391 [==============================] - 109s - loss: 0.4032 - acc: 0.9431 - val_loss: 0.5868 - val_acc: 0.8931
Epoch 89/200
391/391 [==============================] - 109s - loss: 0.4042 - acc: 0.9418 - val_loss: 0.6613 - val_acc: 0.8728
Epoch 90/200
391/391 [==============================] - 108s - loss: 0.4078 - acc: 0.9423 - val_loss: 0.5856 - val_acc: 0.8887
Epoch 91/200
391/391 [==============================] - 109s - loss: 0.4042 - acc: 0.9427 - val_loss: 0.7632 - val_acc: 0.8519
Epoch 92/200
391/391 [==============================] - 109s - loss: 0.4074 - acc: 0.9430 - val_loss: 0.6274 - val_acc: 0.8787
Epoch 93/200
391/391 [==============================] - 109s - loss: 0.4020 - acc: 0.9454 - val_loss: 0.6875 - val_acc: 0.8708
Epoch 94/200
391/391 [==============================] - 108s - loss: 0.4083 - acc: 0.9427 - val_loss: 0.5926 - val_acc: 0.8960
Epoch 95/200
391/391 [==============================] - 109s - loss: 0.4009 - acc: 0.9451 - val_loss: 0.6024 - val_acc: 0.8889
Epoch 96/200
391/391 [==============================] - 107s - loss: 0.4033 - acc: 0.9439 - val_loss: 0.6697 - val_acc: 0.8711
Epoch 97/200
391/391 [==============================] - 108s - loss: 0.4084 - acc: 0.9434 - val_loss: 0.6286 - val_acc: 0.8831
Epoch 98/200
391/391 [==============================] - 109s - loss: 0.4100 - acc: 0.9435 - val_loss: 0.6963 - val_acc: 0.8636
Epoch 99/200
391/391 [==============================] - 107s - loss: 0.4035 - acc: 0.9453 - val_loss: 0.7445 - val_acc: 0.8523
Epoch 100/200
391/391 [==============================] - 109s - loss: 0.4059 - acc: 0.9446 - val_loss: 0.6245 - val_acc: 0.8838
Epoch 101/200
391/391 [==============================] - 109s - loss: 0.4044 - acc: 0.9454 - val_loss: 0.7119 - val_acc: 0.8669
Epoch 102/200
391/391 [==============================] - 109s - loss: 0.4080 - acc: 0.9444 - val_loss: 0.7061 - val_acc: 0.8647
Epoch 103/200
391/391 [==============================] - 109s - loss: 0.4029 - acc: 0.9458 - val_loss: 0.6828 - val_acc: 0.8745
Epoch 104/200
391/391 [==============================] - 108s - loss: 0.4058 - acc: 0.9453 - val_loss: 0.6866 - val_acc: 0.8709
Epoch 105/200
391/391 [==============================] - 108s - loss: 0.4052 - acc: 0.9454 - val_loss: 0.6014 - val_acc: 0.8930
Epoch 106/200
391/391 [==============================] - 104s - loss: 0.4031 - acc: 0.9462 - val_loss: 0.7301 - val_acc: 0.8646
Epoch 107/200
391/391 [==============================] - 108s - loss: 0.4076 - acc: 0.9460 - val_loss: 0.7135 - val_acc: 0.8665
Epoch 108/200
391/391 [==============================] - 108s - loss: 0.4017 - acc: 0.9479 - val_loss: 0.6825 - val_acc: 0.8721
Epoch 109/200
391/391 [==============================] - 109s - loss: 0.4033 - acc: 0.9471 - val_loss: 0.6054 - val_acc: 0.8906
Epoch 110/200
391/391 [==============================] - 108s - loss: 0.4047 - acc: 0.9465 - val_loss: 0.6367 - val_acc: 0.8835
Epoch 111/200
391/391 [==============================] - 108s - loss: 0.4044 - acc: 0.9475 - val_loss: 0.6727 - val_acc: 0.8785
Epoch 112/200
391/391 [==============================] - 108s - loss: 0.4067 - acc: 0.9461 - val_loss: 0.7126 - val_acc: 0.8693
Epoch 113/200
391/391 [==============================] - 107s - loss: 0.4071 - acc: 0.9468 - val_loss: 0.7594 - val_acc: 0.8527
Epoch 114/200
391/391 [==============================] - 108s - loss: 0.4043 - acc: 0.9474 - val_loss: 0.6592 - val_acc: 0.8739
Epoch 115/200
391/391 [==============================] - 109s - loss: 0.4072 - acc: 0.9469 - val_loss: 0.6470 - val_acc: 0.8832
Epoch 116/200
391/391 [==============================] - 109s - loss: 0.4066 - acc: 0.9463 - val_loss: 0.7073 - val_acc: 0.8594
Epoch 117/200
391/391 [==============================] - 108s - loss: 0.4103 - acc: 0.9461 - val_loss: 0.7326 - val_acc: 0.8578
Epoch 118/200
391/391 [==============================] - 109s - loss: 0.4034 - acc: 0.9477 - val_loss: 0.6988 - val_acc: 0.8711
Epoch 119/200
391/391 [==============================] - 109s - loss: 0.4031 - acc: 0.9474 - val_loss: 0.6277 - val_acc: 0.8872
Epoch 120/200
391/391 [==============================] - 108s - loss: 0.4010 - acc: 0.9481 - val_loss: 0.6015 - val_acc: 0.8940
Epoch 121/200
391/391 [==============================] - 109s - loss: 0.4057 - acc: 0.9473 - val_loss: 0.6519 - val_acc: 0.8799
Epoch 122/200
391/391 [==============================] - 109s - loss: 0.3338 - acc: 0.9736 - val_loss: 0.4715 - val_acc: 0.9342
Epoch 123/200
391/391 [==============================] - 109s - loss: 0.2936 - acc: 0.9843 - val_loss: 0.4849 - val_acc: 0.9355
Epoch 124/200
391/391 [==============================] - 107s - loss: 0.2768 - acc: 0.9883 - val_loss: 0.4799 - val_acc: 0.9350
Epoch 125/200
391/391 [==============================] - 109s - loss: 0.2677 - acc: 0.9897 - val_loss: 0.4672 - val_acc: 0.9398
Epoch 126/200
391/391 [==============================] - 109s - loss: 0.2560 - acc: 0.9910 - val_loss: 0.4834 - val_acc: 0.9337
Epoch 127/200
391/391 [==============================] - 109s - loss: 0.2468 - acc: 0.9924 - val_loss: 0.4756 - val_acc: 0.9375
Epoch 128/200
391/391 [==============================] - 109s - loss: 0.2385 - acc: 0.9926 - val_loss: 0.4839 - val_acc: 0.9373
Epoch 129/200
391/391 [==============================] - 109s - loss: 0.2326 - acc: 0.9929 - val_loss: 0.4915 - val_acc: 0.9325
Epoch 130/200
391/391 [==============================] - 108s - loss: 0.2260 - acc: 0.9933 - val_loss: 0.4778 - val_acc: 0.9356
Epoch 131/200
391/391 [==============================] - 109s - loss: 0.2202 - acc: 0.9938 - val_loss: 0.4770 - val_acc: 0.9338
Epoch 132/200
391/391 [==============================] - 110s - loss: 0.2146 - acc: 0.9940 - val_loss: 0.4810 - val_acc: 0.9337
Epoch 133/200
391/391 [==============================] - 109s - loss: 0.2088 - acc: 0.9942 - val_loss: 0.4614 - val_acc: 0.9359
Epoch 134/200
391/391 [==============================] - 109s - loss: 0.2064 - acc: 0.9935 - val_loss: 0.4928 - val_acc: 0.9276
Epoch 135/200
391/391 [==============================] - 109s - loss: 0.2018 - acc: 0.9933 - val_loss: 0.4874 - val_acc: 0.9287
Epoch 136/200
391/391 [==============================] - 110s - loss: 0.1974 - acc: 0.9936 - val_loss: 0.4627 - val_acc: 0.9328
Epoch 137/200
391/391 [==============================] - 110s - loss: 0.1927 - acc: 0.9937 - val_loss: 0.4803 - val_acc: 0.9307
Epoch 138/200
391/391 [==============================] - 109s - loss: 0.1889 - acc: 0.9934 - val_loss: 0.4905 - val_acc: 0.9269
Epoch 139/200
391/391 [==============================] - 108s - loss: 0.1855 - acc: 0.9934 - val_loss: 0.5106 - val_acc: 0.9240
Epoch 140/200
391/391 [==============================] - 109s - loss: 0.1870 - acc: 0.9917 - val_loss: 0.4740 - val_acc: 0.9282
Epoch 141/200
391/391 [==============================] - 109s - loss: 0.1801 - acc: 0.9932 - val_loss: 0.4846 - val_acc: 0.9259
Epoch 142/200
391/391 [==============================] - 109s - loss: 0.1779 - acc: 0.9926 - val_loss: 0.4950 - val_acc: 0.9241
Epoch 143/200
391/391 [==============================] - 107s - loss: 0.1759 - acc: 0.9927 - val_loss: 0.4684 - val_acc: 0.9286
Epoch 144/200
391/391 [==============================] - 109s - loss: 0.1749 - acc: 0.9917 - val_loss: 0.4671 - val_acc: 0.9261
Epoch 145/200
391/391 [==============================] - 109s - loss: 0.1734 - acc: 0.9917 - val_loss: 0.4823 - val_acc: 0.9248
Epoch 146/200
391/391 [==============================] - 109s - loss: 0.1683 - acc: 0.9925 - val_loss: 0.4565 - val_acc: 0.9302
Epoch 147/200
391/391 [==============================] - 109s - loss: 0.1688 - acc: 0.9913 - val_loss: 0.4480 - val_acc: 0.9254
Epoch 148/200
391/391 [==============================] - 109s - loss: 0.1679 - acc: 0.9907 - val_loss: 0.4583 - val_acc: 0.9256
Epoch 149/200
391/391 [==============================] - 109s - loss: 0.1627 - acc: 0.9923 - val_loss: 0.4732 - val_acc: 0.9231
Epoch 150/200
391/391 [==============================] - 109s - loss: 0.1648 - acc: 0.9901 - val_loss: 0.4676 - val_acc: 0.9239
Epoch 151/200
391/391 [==============================] - 109s - loss: 0.1670 - acc: 0.9893 - val_loss: 0.4622 - val_acc: 0.9192
Epoch 152/200
391/391 [==============================] - 109s - loss: 0.1665 - acc: 0.9896 - val_loss: 0.4553 - val_acc: 0.9232
Epoch 153/200
391/391 [==============================] - 110s - loss: 0.1645 - acc: 0.9892 - val_loss: 0.4660 - val_acc: 0.9225
Epoch 154/200
391/391 [==============================] - 108s - loss: 0.1623 - acc: 0.9899 - val_loss: 0.4655 - val_acc: 0.9210
Epoch 155/200
391/391 [==============================] - 109s - loss: 0.1636 - acc: 0.9891 - val_loss: 0.4544 - val_acc: 0.9233
Epoch 156/200
391/391 [==============================] - 109s - loss: 0.1607 - acc: 0.9894 - val_loss: 0.4424 - val_acc: 0.9286
Epoch 157/200
391/391 [==============================] - 108s - loss: 0.1595 - acc: 0.9896 - val_loss: 0.4691 - val_acc: 0.9169
Epoch 158/200
391/391 [==============================] - 109s - loss: 0.1631 - acc: 0.9881 - val_loss: 0.4111 - val_acc: 0.9279
Epoch 159/200
391/391 [==============================] - 109s - loss: 0.1595 - acc: 0.9888 - val_loss: 0.4532 - val_acc: 0.9218
Epoch 160/200
391/391 [==============================] - 109s - loss: 0.1587 - acc: 0.9889 - val_loss: 0.4247 - val_acc: 0.9263
Epoch 161/200
391/391 [==============================] - 105s - loss: 0.1579 - acc: 0.9888 - val_loss: 0.4569 - val_acc: 0.9176
Epoch 162/200
391/391 [==============================] - 109s - loss: 0.1444 - acc: 0.9938 - val_loss: 0.3759 - val_acc: 0.9388
Epoch 163/200
391/391 [==============================] - 109s - loss: 0.1338 - acc: 0.9971 - val_loss: 0.3810 - val_acc: 0.9418
Epoch 164/200
391/391 [==============================] - 109s - loss: 0.1316 - acc: 0.9976 - val_loss: 0.3801 - val_acc: 0.9414
Epoch 165/200
391/391 [==============================] - 109s - loss: 0.1294 - acc: 0.9980 - val_loss: 0.3848 - val_acc: 0.9423
Epoch 166/200
391/391 [==============================] - 105s - loss: 0.1285 - acc: 0.9979 - val_loss: 0.3828 - val_acc: 0.9433
Epoch 167/200
391/391 [==============================] - 109s - loss: 0.1259 - acc: 0.9987 - val_loss: 0.3828 - val_acc: 0.9417
Epoch 168/200
391/391 [==============================] - 109s - loss: 0.1242 - acc: 0.9989 - val_loss: 0.3864 - val_acc: 0.9426
Epoch 169/200
391/391 [==============================] - 103s - loss: 0.1236 - acc: 0.9988 - val_loss: 0.3897 - val_acc: 0.9428
Epoch 170/200
391/391 [==============================] - 108s - loss: 0.1223 - acc: 0.9990 - val_loss: 0.3843 - val_acc: 0.9446
Epoch 171/200
391/391 [==============================] - 109s - loss: 0.1217 - acc: 0.9989 - val_loss: 0.3895 - val_acc: 0.9432
Epoch 172/200
391/391 [==============================] - 110s - loss: 0.1212 - acc: 0.9990 - val_loss: 0.3954 - val_acc: 0.9426
Epoch 173/200
391/391 [==============================] - 109s - loss: 0.1202 - acc: 0.9990 - val_loss: 0.3991 - val_acc: 0.9431
Epoch 174/200
391/391 [==============================] - 109s - loss: 0.1196 - acc: 0.9990 - val_loss: 0.4007 - val_acc: 0.9437
Epoch 175/200
391/391 [==============================] - 107s - loss: 0.1184 - acc: 0.9991 - val_loss: 0.4015 - val_acc: 0.9426
Epoch 176/200
391/391 [==============================] - 109s - loss: 0.1176 - acc: 0.9990 - val_loss: 0.4009 - val_acc: 0.9436
Epoch 177/200
391/391 [==============================] - 105s - loss: 0.1163 - acc: 0.9994 - val_loss: 0.4033 - val_acc: 0.9441
Epoch 178/200
391/391 [==============================] - 107s - loss: 0.1161 - acc: 0.9991 - val_loss: 0.4052 - val_acc: 0.9433
Epoch 179/200
391/391 [==============================] - 110s - loss: 0.1152 - acc: 0.9993 - val_loss: 0.4039 - val_acc: 0.9436
Epoch 180/200
391/391 [==============================] - 108s - loss: 0.1142 - acc: 0.9994 - val_loss: 0.4048 - val_acc: 0.9436
Epoch 181/200
391/391 [==============================] - 109s - loss: 0.1133 - acc: 0.9995 - val_loss: 0.4045 - val_acc: 0.9433
Epoch 182/200
391/391 [==============================] - 110s - loss: 0.1130 - acc: 0.9992 - val_loss: 0.4020 - val_acc: 0.9442
Epoch 183/200
391/391 [==============================] - 110s - loss: 0.1118 - acc: 0.9996 - val_loss: 0.4041 - val_acc: 0.9449
Epoch 184/200
391/391 [==============================] - 110s - loss: 0.1120 - acc: 0.9991 - val_loss: 0.4055 - val_acc: 0.9439
Epoch 185/200
391/391 [==============================] - 109s - loss: 0.1113 - acc: 0.9992 - val_loss: 0.4088 - val_acc: 0.9429
Epoch 186/200
391/391 [==============================] - 109s - loss: 0.1102 - acc: 0.9995 - val_loss: 0.4047 - val_acc: 0.9441
Epoch 187/200
391/391 [==============================] - 109s - loss: 0.1092 - acc: 0.9995 - val_loss: 0.4056 - val_acc: 0.9435
Epoch 188/200
391/391 [==============================] - 109s - loss: 0.1092 - acc: 0.9994 - val_loss: 0.4052 - val_acc: 0.9435
Epoch 189/200
391/391 [==============================] - 110s - loss: 0.1081 - acc: 0.9994 - val_loss: 0.4134 - val_acc: 0.9430
Epoch 190/200
391/391 [==============================] - 109s - loss: 0.1072 - acc: 0.9996 - val_loss: 0.4108 - val_acc: 0.9434
Epoch 191/200
391/391 [==============================] - 109s - loss: 0.1066 - acc: 0.9995 - val_loss: 0.4072 - val_acc: 0.9436
Epoch 192/200
391/391 [==============================] - 109s - loss: 0.1060 - acc: 0.9996 - val_loss: 0.4045 - val_acc: 0.9444
Epoch 193/200
391/391 [==============================] - 107s - loss: 0.1056 - acc: 0.9994 - val_loss: 0.4099 - val_acc: 0.9424
Epoch 194/200
391/391 [==============================] - 109s - loss: 0.1047 - acc: 0.9996 - val_loss: 0.4118 - val_acc: 0.9433
Epoch 195/200
391/391 [==============================] - 109s - loss: 0.1043 - acc: 0.9995 - val_loss: 0.4065 - val_acc: 0.9440
Epoch 196/200
391/391 [==============================] - 110s - loss: 0.1043 - acc: 0.9992 - val_loss: 0.4044 - val_acc: 0.9438
Epoch 197/200
391/391 [==============================] - 109s - loss: 0.1036 - acc: 0.9994 - val_loss: 0.4090 - val_acc: 0.9438
Epoch 198/200
391/391 [==============================] - 109s - loss: 0.1027 - acc: 0.9995 - val_loss: 0.4108 - val_acc: 0.9422
Epoch 199/200
391/391 [==============================] - 110s - loss: 0.1020 - acc: 0.9995 - val_loss: 0.4129 - val_acc: 0.9423
Epoch 200/200
391/391 [==============================] - 109s - loss: 0.1014 - acc: 0.9994 - val_loss: 0.4056 - val_acc: 0.9452

In [ ]: