use Network_in_Network to train the cifar-10

import pakages


In [1]:
import keras
import os
import numpy as np
import tensorflow as tf
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D, GlobalAveragePooling2D, AveragePooling2D
from keras.initializers import RandomNormal  
from keras import optimizers
from keras.callbacks import LearningRateScheduler, TensorBoard
from keras.layers.normalization import BatchNormalization


Using TensorFlow backend.

force to use gpu and limit the use of gpu memory


In [2]:
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.3
set_session(tf.Session(config=config))

init some parameters


In [3]:
batch_size    = 128
epochs        = 164
iterations    = 391
num_classes   = 10
dropout       = 0.5
log_filepath = r'./nin'

do some precessing with images


In [4]:
def color_preprocessing(x_train,x_test):
    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')
    x_train[:,:,:,0] = (x_train[:,:,:,0] - np.mean(x_train[:,:,:,0])) / np.std(x_train[:,:,:,0])
    x_train[:,:,:,1] = (x_train[:,:,:,1] - np.mean(x_train[:,:,:,1])) / np.std(x_train[:,:,:,1])
    x_train[:,:,:,2] = (x_train[:,:,:,2] - np.mean(x_train[:,:,:,2])) / np.std(x_train[:,:,:,2])
    x_test[:,:,:,0] = (x_test[:,:,:,0] - np.mean(x_test[:,:,:,0])) / np.std(x_test[:,:,:,0])
    x_test[:,:,:,1] = (x_test[:,:,:,1] - np.mean(x_test[:,:,:,1])) / np.std(x_test[:,:,:,1])
    x_test[:,:,:,2] = (x_test[:,:,:,2] - np.mean(x_test[:,:,:,2])) / np.std(x_test[:,:,:,2])

    return x_train, x_test

set the learning rate changes strategy


In [5]:
def scheduler(epoch):
  learning_rate_init = 0.08
  if epoch >= 81:
    learning_rate_init = 0.01
  if epoch >= 122:
    learning_rate_init = 0.001
  return learning_rate_init

define network


In [6]:
def build_model():
  model = Sequential()
  # Weight initialization
  model.add(Conv2D(192, (5, 5), padding='same', kernel_regularizer=keras.regularizers.l2(0.0001), kernel_initializer=RandomNormal(stddev = 0.01), input_shape=x_train.shape[1:]))
  model.add(Activation('relu'))
  model.add(Conv2D(160, (1, 1), padding='same', kernel_regularizer=keras.regularizers.l2(0.0001), kernel_initializer=RandomNormal(stddev = 0.05)))
  model.add(Activation('relu'))
  model.add(Conv2D(96, (1, 1), padding='same', kernel_regularizer=keras.regularizers.l2(0.0001), kernel_initializer=RandomNormal(stddev = 0.05)))
  model.add(Activation('relu'))
  model.add(MaxPooling2D(pool_size=(3, 3),strides=(2,2),padding = 'same'))
  
  model.add(Dropout(dropout))
  
  model.add(Conv2D(192, (5, 5), padding='same', kernel_regularizer=keras.regularizers.l2(0.0001), kernel_initializer=RandomNormal(stddev = 0.05)))
  model.add(Activation('relu'))
  model.add(Conv2D(192, (1, 1),padding='same', kernel_regularizer=keras.regularizers.l2(0.0001), kernel_initializer=RandomNormal(stddev = 0.05)))
  model.add(Activation('relu'))
  model.add(Conv2D(192, (1, 1),padding='same', kernel_regularizer=keras.regularizers.l2(0.0001), kernel_initializer=RandomNormal(stddev = 0.05)))
  model.add(Activation('relu'))
  model.add(MaxPooling2D(pool_size=(3, 3),strides=(2,2),padding = 'same'))
  
  model.add(Dropout(dropout))
  
  model.add(Conv2D(192, (3, 3), padding='same', kernel_regularizer=keras.regularizers.l2(0.0001), kernel_initializer=RandomNormal(stddev = 0.05)))
  model.add(Activation('relu'))
  model.add(Conv2D(192, (1, 1), padding='same', kernel_regularizer=keras.regularizers.l2(0.0001), kernel_initializer=RandomNormal(stddev = 0.05)))
  model.add(Activation('relu'))
  model.add(Conv2D(10, (1, 1), padding='same', kernel_regularizer=keras.regularizers.l2(0.0001), kernel_initializer=RandomNormal(stddev = 0.05)))
  model.add(Activation('relu'))
  
  model.add(GlobalAveragePooling2D())
  model.add(Activation('softmax'))
  
      
  sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
  model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
  return model

load data and build model


In [7]:
# load data
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

# color preprocessing
x_train, x_test = color_preprocessing(x_train, x_test)

# build network
model = build_model()
print(model.summary())


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d_1 (Conv2D)            (None, 32, 32, 192)       14592     
_________________________________________________________________
activation_1 (Activation)    (None, 32, 32, 192)       0         
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 32, 32, 160)       30880     
_________________________________________________________________
activation_2 (Activation)    (None, 32, 32, 160)       0         
_________________________________________________________________
conv2d_3 (Conv2D)            (None, 32, 32, 96)        15456     
_________________________________________________________________
activation_3 (Activation)    (None, 32, 32, 96)        0         
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 16, 16, 96)        0         
_________________________________________________________________
dropout_1 (Dropout)          (None, 16, 16, 96)        0         
_________________________________________________________________
conv2d_4 (Conv2D)            (None, 16, 16, 192)       460992    
_________________________________________________________________
activation_4 (Activation)    (None, 16, 16, 192)       0         
_________________________________________________________________
conv2d_5 (Conv2D)            (None, 16, 16, 192)       37056     
_________________________________________________________________
activation_5 (Activation)    (None, 16, 16, 192)       0         
_________________________________________________________________
conv2d_6 (Conv2D)            (None, 16, 16, 192)       37056     
_________________________________________________________________
activation_6 (Activation)    (None, 16, 16, 192)       0         
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 8, 8, 192)         0         
_________________________________________________________________
dropout_2 (Dropout)          (None, 8, 8, 192)         0         
_________________________________________________________________
conv2d_7 (Conv2D)            (None, 8, 8, 192)         331968    
_________________________________________________________________
activation_7 (Activation)    (None, 8, 8, 192)         0         
_________________________________________________________________
conv2d_8 (Conv2D)            (None, 8, 8, 192)         37056     
_________________________________________________________________
activation_8 (Activation)    (None, 8, 8, 192)         0         
_________________________________________________________________
conv2d_9 (Conv2D)            (None, 8, 8, 10)          1930      
_________________________________________________________________
activation_9 (Activation)    (None, 8, 8, 10)          0         
_________________________________________________________________
global_average_pooling2d_1 ( (None, 10)                0         
_________________________________________________________________
activation_10 (Activation)   (None, 10)                0         
=================================================================
Total params: 966,986
Trainable params: 966,986
Non-trainable params: 0
_________________________________________________________________
None

set tensorboard


In [8]:
# set callback
tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
change_lr = LearningRateScheduler(scheduler)
cbks = [change_lr,tb_cb]

processing images


In [9]:
# set data augmentation
print('Using real-time data augmentation.')
datagen = ImageDataGenerator(horizontal_flip=True,
        width_shift_range=0.125,height_shift_range=0.125,fill_mode='constant',cval=0.)

datagen.fit(x_train)


Using real-time data augmentation.

train


In [10]:
# start training
model.fit_generator(datagen.flow(x_train, y_train,batch_size=batch_size),
                    steps_per_epoch=iterations,
                    epochs=epochs,
                    callbacks=cbks,
                    validation_data=(x_test, y_test))
model.save('nin.h5')


Epoch 1/164
391/391 [==============================] - 18s - loss: 2.4706 - acc: 0.1570 - val_loss: 2.3311 - val_acc: 0.2230
Epoch 2/164
391/391 [==============================] - 16s - loss: 2.1973 - acc: 0.2541 - val_loss: 2.0379 - val_acc: 0.3219
Epoch 3/164
391/391 [==============================] - 15s - loss: 1.9590 - acc: 0.3634 - val_loss: 1.9126 - val_acc: 0.3924
Epoch 4/164
391/391 [==============================] - 15s - loss: 1.7976 - acc: 0.4334 - val_loss: 1.6545 - val_acc: 0.4869
Epoch 5/164
391/391 [==============================] - 15s - loss: 1.7011 - acc: 0.4685 - val_loss: 1.6043 - val_acc: 0.4982
Epoch 6/164
391/391 [==============================] - 15s - loss: 1.6109 - acc: 0.4995 - val_loss: 1.5741 - val_acc: 0.5296
Epoch 7/164
391/391 [==============================] - 16s - loss: 1.5364 - acc: 0.5262 - val_loss: 1.4117 - val_acc: 0.5674
Epoch 8/164
391/391 [==============================] - 15s - loss: 1.4598 - acc: 0.5524 - val_loss: 1.5260 - val_acc: 0.5416
Epoch 9/164
391/391 [==============================] - 16s - loss: 1.4097 - acc: 0.5717 - val_loss: 1.4039 - val_acc: 0.5785
Epoch 10/164
391/391 [==============================] - 17s - loss: 1.3531 - acc: 0.5923 - val_loss: 1.3021 - val_acc: 0.6156
Epoch 11/164
391/391 [==============================] - 17s - loss: 1.3094 - acc: 0.6081 - val_loss: 1.3042 - val_acc: 0.6202
Epoch 12/164
391/391 [==============================] - 17s - loss: 1.2635 - acc: 0.6270 - val_loss: 1.2306 - val_acc: 0.6440
Epoch 13/164
391/391 [==============================] - 16s - loss: 1.2307 - acc: 0.6389 - val_loss: 1.1663 - val_acc: 0.6627
Epoch 14/164
391/391 [==============================] - 16s - loss: 1.2021 - acc: 0.6514 - val_loss: 1.1693 - val_acc: 0.6707
Epoch 15/164
391/391 [==============================] - 18s - loss: 1.1762 - acc: 0.6597 - val_loss: 1.1659 - val_acc: 0.6673
Epoch 16/164
391/391 [==============================] - 17s - loss: 1.1516 - acc: 0.6715 - val_loss: 1.0951 - val_acc: 0.6896
Epoch 17/164
391/391 [==============================] - 16s - loss: 1.1325 - acc: 0.6785 - val_loss: 1.1507 - val_acc: 0.6690
Epoch 18/164
391/391 [==============================] - 17s - loss: 1.1189 - acc: 0.6825 - val_loss: 1.0498 - val_acc: 0.7047
Epoch 19/164
391/391 [==============================] - 16s - loss: 1.1008 - acc: 0.6899 - val_loss: 1.0679 - val_acc: 0.7020
Epoch 20/164
391/391 [==============================] - 17s - loss: 1.0894 - acc: 0.6965 - val_loss: 1.0752 - val_acc: 0.7089
Epoch 21/164
391/391 [==============================] - 16s - loss: 1.0867 - acc: 0.6990 - val_loss: 1.0631 - val_acc: 0.7098
Epoch 22/164
391/391 [==============================] - 18s - loss: 1.0724 - acc: 0.7046 - val_loss: 1.0903 - val_acc: 0.7071
Epoch 23/164
391/391 [==============================] - 17s - loss: 1.0657 - acc: 0.7081 - val_loss: 1.0158 - val_acc: 0.7247
Epoch 24/164
391/391 [==============================] - 17s - loss: 1.0520 - acc: 0.7127 - val_loss: 1.0238 - val_acc: 0.7263
Epoch 25/164
391/391 [==============================] - 17s - loss: 1.0462 - acc: 0.7164 - val_loss: 1.0152 - val_acc: 0.7385
Epoch 26/164
391/391 [==============================] - 16s - loss: 1.0384 - acc: 0.7220 - val_loss: 1.0789 - val_acc: 0.7258
Epoch 27/164
391/391 [==============================] - 16s - loss: 1.0342 - acc: 0.7216 - val_loss: 1.0347 - val_acc: 0.7225
Epoch 28/164
391/391 [==============================] - 16s - loss: 1.0351 - acc: 0.7219 - val_loss: 1.0087 - val_acc: 0.7357
Epoch 29/164
391/391 [==============================] - 16s - loss: 1.0268 - acc: 0.7268 - val_loss: 1.0468 - val_acc: 0.7275
Epoch 30/164
391/391 [==============================] - 15s - loss: 1.0193 - acc: 0.7295 - val_loss: 1.0077 - val_acc: 0.7444
Epoch 31/164
391/391 [==============================] - 16s - loss: 1.0167 - acc: 0.7304 - val_loss: 0.9717 - val_acc: 0.7489
Epoch 32/164
391/391 [==============================] - 16s - loss: 1.0149 - acc: 0.7340 - val_loss: 1.0015 - val_acc: 0.7417
Epoch 33/164
391/391 [==============================] - 15s - loss: 1.0121 - acc: 0.7358 - val_loss: 1.1658 - val_acc: 0.7053
Epoch 34/164
391/391 [==============================] - 15s - loss: 1.0084 - acc: 0.7358 - val_loss: 0.9708 - val_acc: 0.7459
Epoch 35/164
391/391 [==============================] - 17s - loss: 1.0054 - acc: 0.7387 - val_loss: 1.0553 - val_acc: 0.7289
Epoch 36/164
391/391 [==============================] - 16s - loss: 0.9976 - acc: 0.7403 - val_loss: 0.9886 - val_acc: 0.7503
Epoch 37/164
391/391 [==============================] - 16s - loss: 0.9994 - acc: 0.7408 - val_loss: 1.0614 - val_acc: 0.7368
Epoch 38/164
391/391 [==============================] - 16s - loss: 0.9998 - acc: 0.7399 - val_loss: 1.0231 - val_acc: 0.7398
Epoch 39/164
391/391 [==============================] - 16s - loss: 0.9955 - acc: 0.7434 - val_loss: 0.9842 - val_acc: 0.7471
Epoch 40/164
391/391 [==============================] - 16s - loss: 0.9960 - acc: 0.7449 - val_loss: 0.9741 - val_acc: 0.7561
Epoch 41/164
391/391 [==============================] - 17s - loss: 0.9899 - acc: 0.7467 - val_loss: 1.0098 - val_acc: 0.7503
Epoch 42/164
391/391 [==============================] - 17s - loss: 0.9914 - acc: 0.7480 - val_loss: 1.0243 - val_acc: 0.7435
Epoch 43/164
391/391 [==============================] - 17s - loss: 0.9904 - acc: 0.7476 - val_loss: 1.0228 - val_acc: 0.7415
Epoch 44/164
391/391 [==============================] - 18s - loss: 0.9946 - acc: 0.7482 - val_loss: 1.0385 - val_acc: 0.7374
Epoch 45/164
391/391 [==============================] - 16s - loss: 0.9854 - acc: 0.7514 - val_loss: 0.9722 - val_acc: 0.7604
Epoch 46/164
391/391 [==============================] - 17s - loss: 0.9864 - acc: 0.7523 - val_loss: 0.9537 - val_acc: 0.7641
Epoch 47/164
391/391 [==============================] - 17s - loss: 0.9853 - acc: 0.7513 - val_loss: 0.9782 - val_acc: 0.7539
Epoch 48/164
391/391 [==============================] - 17s - loss: 0.9810 - acc: 0.7534 - val_loss: 0.9754 - val_acc: 0.7619
Epoch 49/164
391/391 [==============================] - 16s - loss: 0.9808 - acc: 0.7519 - val_loss: 1.0016 - val_acc: 0.7541
Epoch 50/164
391/391 [==============================] - 18s - loss: 0.9779 - acc: 0.7546 - val_loss: 0.9663 - val_acc: 0.7592
Epoch 51/164
391/391 [==============================] - 16s - loss: 0.9818 - acc: 0.7540 - val_loss: 1.0010 - val_acc: 0.7486
Epoch 52/164
391/391 [==============================] - 17s - loss: 0.9802 - acc: 0.7555 - val_loss: 0.9966 - val_acc: 0.7506
Epoch 53/164
391/391 [==============================] - 16s - loss: 0.9782 - acc: 0.7540 - val_loss: 0.9613 - val_acc: 0.7648
Epoch 54/164
391/391 [==============================] - 16s - loss: 0.9817 - acc: 0.7547 - val_loss: 0.9490 - val_acc: 0.7700
Epoch 55/164
391/391 [==============================] - 17s - loss: 0.9723 - acc: 0.7573 - val_loss: 1.0252 - val_acc: 0.7509
Epoch 56/164
391/391 [==============================] - 17s - loss: 0.9724 - acc: 0.7586 - val_loss: 0.9379 - val_acc: 0.7727
Epoch 57/164
391/391 [==============================] - 17s - loss: 0.9731 - acc: 0.7580 - val_loss: 0.9720 - val_acc: 0.7658
Epoch 58/164
391/391 [==============================] - 16s - loss: 0.9693 - acc: 0.7590 - val_loss: 0.9756 - val_acc: 0.7633
Epoch 59/164
391/391 [==============================] - 15s - loss: 0.9719 - acc: 0.7606 - val_loss: 0.9578 - val_acc: 0.7654
Epoch 60/164
391/391 [==============================] - 15s - loss: 0.9689 - acc: 0.7611 - val_loss: 0.9959 - val_acc: 0.7547
Epoch 61/164
391/391 [==============================] - 16s - loss: 0.9707 - acc: 0.7602 - val_loss: 0.9438 - val_acc: 0.7706
Epoch 62/164
391/391 [==============================] - 16s - loss: 0.9657 - acc: 0.7625 - val_loss: 0.9808 - val_acc: 0.7601
Epoch 63/164
391/391 [==============================] - 16s - loss: 0.9667 - acc: 0.7614 - val_loss: 0.9918 - val_acc: 0.7634
Epoch 64/164
391/391 [==============================] - 16s - loss: 0.9610 - acc: 0.7631 - val_loss: 0.9950 - val_acc: 0.7535
Epoch 65/164
391/391 [==============================] - 16s - loss: 0.9658 - acc: 0.7625 - val_loss: 0.9730 - val_acc: 0.7628
Epoch 66/164
391/391 [==============================] - 16s - loss: 0.9654 - acc: 0.7613 - val_loss: 0.9680 - val_acc: 0.7617
Epoch 67/164
391/391 [==============================] - 16s - loss: 0.9599 - acc: 0.7669 - val_loss: 1.0055 - val_acc: 0.7549
Epoch 68/164
391/391 [==============================] - 16s - loss: 0.9673 - acc: 0.7622 - val_loss: 0.9860 - val_acc: 0.7630
Epoch 69/164
391/391 [==============================] - 16s - loss: 0.9642 - acc: 0.7645 - val_loss: 0.9952 - val_acc: 0.7621
Epoch 70/164
391/391 [==============================] - 17s - loss: 0.9625 - acc: 0.7658 - val_loss: 0.9463 - val_acc: 0.7721
Epoch 71/164
391/391 [==============================] - 18s - loss: 0.9587 - acc: 0.7676 - val_loss: 1.0362 - val_acc: 0.7500
Epoch 72/164
391/391 [==============================] - 16s - loss: 0.9656 - acc: 0.7654 - val_loss: 0.9634 - val_acc: 0.7684
Epoch 73/164
391/391 [==============================] - 19s - loss: 0.9598 - acc: 0.7660 - val_loss: 0.9736 - val_acc: 0.7729
Epoch 74/164
391/391 [==============================] - 17s - loss: 0.9619 - acc: 0.7658 - val_loss: 0.9697 - val_acc: 0.7678
Epoch 75/164
391/391 [==============================] - 18s - loss: 0.9596 - acc: 0.7694 - val_loss: 0.9748 - val_acc: 0.7703
Epoch 76/164
391/391 [==============================] - 18s - loss: 0.9580 - acc: 0.7684 - val_loss: 0.9742 - val_acc: 0.7661
Epoch 77/164
391/391 [==============================] - 17s - loss: 0.9607 - acc: 0.7676 - val_loss: 0.9611 - val_acc: 0.7725
Epoch 78/164
391/391 [==============================] - 17s - loss: 0.9611 - acc: 0.7670 - val_loss: 0.9808 - val_acc: 0.7638
Epoch 79/164
391/391 [==============================] - 17s - loss: 0.9564 - acc: 0.7695 - val_loss: 0.9600 - val_acc: 0.7732
Epoch 80/164
391/391 [==============================] - 17s - loss: 0.9628 - acc: 0.7678 - val_loss: 0.9527 - val_acc: 0.7768
Epoch 81/164
391/391 [==============================] - 16s - loss: 0.9613 - acc: 0.7665 - val_loss: 1.0034 - val_acc: 0.7610
Epoch 82/164
391/391 [==============================] - 16s - loss: 0.8639 - acc: 0.7995 - val_loss: 0.8695 - val_acc: 0.7982
Epoch 83/164
391/391 [==============================] - 16s - loss: 0.8168 - acc: 0.8121 - val_loss: 0.8639 - val_acc: 0.8036
Epoch 84/164
391/391 [==============================] - 16s - loss: 0.7994 - acc: 0.8157 - val_loss: 0.8612 - val_acc: 0.8028
Epoch 85/164
391/391 [==============================] - 16s - loss: 0.7876 - acc: 0.8193 - val_loss: 0.8585 - val_acc: 0.8039
Epoch 86/164
391/391 [==============================] - 16s - loss: 0.7827 - acc: 0.8205 - val_loss: 0.8389 - val_acc: 0.8060
Epoch 87/164
391/391 [==============================] - 16s - loss: 0.7738 - acc: 0.8221 - val_loss: 0.8554 - val_acc: 0.7990
Epoch 88/164
391/391 [==============================] - 16s - loss: 0.7650 - acc: 0.8238 - val_loss: 0.8518 - val_acc: 0.8070
Epoch 89/164
391/391 [==============================] - 16s - loss: 0.7607 - acc: 0.8234 - val_loss: 0.8386 - val_acc: 0.8047
Epoch 90/164
391/391 [==============================] - 16s - loss: 0.7506 - acc: 0.8265 - val_loss: 0.8292 - val_acc: 0.8088
Epoch 91/164
391/391 [==============================] - 16s - loss: 0.7501 - acc: 0.8261 - val_loss: 0.8131 - val_acc: 0.8099
Epoch 92/164
391/391 [==============================] - 16s - loss: 0.7440 - acc: 0.8267 - val_loss: 0.8241 - val_acc: 0.8079
Epoch 93/164
391/391 [==============================] - 16s - loss: 0.7367 - acc: 0.8276 - val_loss: 0.8312 - val_acc: 0.8088
Epoch 94/164
391/391 [==============================] - 16s - loss: 0.7324 - acc: 0.8288 - val_loss: 0.8255 - val_acc: 0.8058
Epoch 95/164
391/391 [==============================] - 16s - loss: 0.7271 - acc: 0.8295 - val_loss: 0.8176 - val_acc: 0.8103
Epoch 96/164
391/391 [==============================] - 17s - loss: 0.7264 - acc: 0.8289 - val_loss: 0.8163 - val_acc: 0.8074
Epoch 97/164
391/391 [==============================] - 16s - loss: 0.7214 - acc: 0.8302 - val_loss: 0.8207 - val_acc: 0.8067
Epoch 98/164
391/391 [==============================] - 17s - loss: 0.7124 - acc: 0.8323 - val_loss: 0.8298 - val_acc: 0.8063
Epoch 99/164
391/391 [==============================] - 16s - loss: 0.7116 - acc: 0.8312 - val_loss: 0.8121 - val_acc: 0.8091
Epoch 100/164
391/391 [==============================] - 16s - loss: 0.7129 - acc: 0.8295 - val_loss: 0.8153 - val_acc: 0.8068
Epoch 101/164
391/391 [==============================] - 16s - loss: 0.7079 - acc: 0.8309 - val_loss: 0.7985 - val_acc: 0.8118
Epoch 102/164
391/391 [==============================] - 17s - loss: 0.7006 - acc: 0.8328 - val_loss: 0.7839 - val_acc: 0.8148
Epoch 103/164
391/391 [==============================] - 17s - loss: 0.6975 - acc: 0.8331 - val_loss: 0.7851 - val_acc: 0.8125
Epoch 104/164
391/391 [==============================] - 16s - loss: 0.6961 - acc: 0.8327 - val_loss: 0.7960 - val_acc: 0.8114
Epoch 105/164
391/391 [==============================] - 17s - loss: 0.6909 - acc: 0.8346 - val_loss: 0.7835 - val_acc: 0.8094
Epoch 106/164
391/391 [==============================] - 17s - loss: 0.6888 - acc: 0.8352 - val_loss: 0.7933 - val_acc: 0.8121
Epoch 107/164
391/391 [==============================] - 17s - loss: 0.6907 - acc: 0.8340 - val_loss: 0.7836 - val_acc: 0.8123
Epoch 108/164
391/391 [==============================] - 16s - loss: 0.6835 - acc: 0.8349 - val_loss: 0.7845 - val_acc: 0.8110
Epoch 109/164
391/391 [==============================] - 17s - loss: 0.6829 - acc: 0.8338 - val_loss: 0.7951 - val_acc: 0.8098
Epoch 110/164
391/391 [==============================] - 16s - loss: 0.6775 - acc: 0.8373 - val_loss: 0.7913 - val_acc: 0.8075
Epoch 111/164
391/391 [==============================] - 17s - loss: 0.6773 - acc: 0.8353 - val_loss: 0.7825 - val_acc: 0.8113
Epoch 112/164
391/391 [==============================] - 17s - loss: 0.6735 - acc: 0.8369 - val_loss: 0.7889 - val_acc: 0.8091
Epoch 113/164
391/391 [==============================] - 17s - loss: 0.6690 - acc: 0.8378 - val_loss: 0.7667 - val_acc: 0.8114
Epoch 114/164
391/391 [==============================] - 17s - loss: 0.6715 - acc: 0.8364 - val_loss: 0.8001 - val_acc: 0.8081
Epoch 115/164
391/391 [==============================] - 17s - loss: 0.6663 - acc: 0.8367 - val_loss: 0.7951 - val_acc: 0.8081
Epoch 116/164
391/391 [==============================] - 18s - loss: 0.6668 - acc: 0.8371 - val_loss: 0.7887 - val_acc: 0.8105
Epoch 117/164
391/391 [==============================] - 17s - loss: 0.6600 - acc: 0.8382 - val_loss: 0.7740 - val_acc: 0.8120
Epoch 118/164
391/391 [==============================] - 17s - loss: 0.6594 - acc: 0.8378 - val_loss: 0.7679 - val_acc: 0.8133
Epoch 119/164
391/391 [==============================] - 17s - loss: 0.6564 - acc: 0.8380 - val_loss: 0.7733 - val_acc: 0.8102
Epoch 120/164
391/391 [==============================] - 16s - loss: 0.6514 - acc: 0.8397 - val_loss: 0.7658 - val_acc: 0.8075
Epoch 121/164
391/391 [==============================] - 17s - loss: 0.6552 - acc: 0.8387 - val_loss: 0.7685 - val_acc: 0.8119
Epoch 122/164
391/391 [==============================] - 16s - loss: 0.6524 - acc: 0.8393 - val_loss: 0.7699 - val_acc: 0.8078
Epoch 123/164
391/391 [==============================] - 16s - loss: 0.6341 - acc: 0.8444 - val_loss: 0.7569 - val_acc: 0.8116
Epoch 124/164
391/391 [==============================] - 16s - loss: 0.6263 - acc: 0.8488 - val_loss: 0.7544 - val_acc: 0.8165
Epoch 125/164
391/391 [==============================] - 16s - loss: 0.6261 - acc: 0.8468 - val_loss: 0.7626 - val_acc: 0.8152
Epoch 126/164
391/391 [==============================] - 16s - loss: 0.6238 - acc: 0.8486 - val_loss: 0.7589 - val_acc: 0.8147
Epoch 127/164
391/391 [==============================] - 17s - loss: 0.6206 - acc: 0.8489 - val_loss: 0.7513 - val_acc: 0.8152
Epoch 128/164
391/391 [==============================] - 16s - loss: 0.6198 - acc: 0.8492 - val_loss: 0.7585 - val_acc: 0.8135
Epoch 129/164
391/391 [==============================] - 16s - loss: 0.6196 - acc: 0.8485 - val_loss: 0.7572 - val_acc: 0.8160
Epoch 130/164
391/391 [==============================] - 16s - loss: 0.6181 - acc: 0.8499 - val_loss: 0.7559 - val_acc: 0.8166
Epoch 131/164
391/391 [==============================] - 16s - loss: 0.6182 - acc: 0.8496 - val_loss: 0.7572 - val_acc: 0.8148
Epoch 132/164
391/391 [==============================] - 16s - loss: 0.6159 - acc: 0.8507 - val_loss: 0.7571 - val_acc: 0.8145
Epoch 133/164
391/391 [==============================] - 16s - loss: 0.6185 - acc: 0.8498 - val_loss: 0.7552 - val_acc: 0.8175
Epoch 134/164
391/391 [==============================] - 16s - loss: 0.6172 - acc: 0.8502 - val_loss: 0.7566 - val_acc: 0.8153
Epoch 135/164
391/391 [==============================] - 16s - loss: 0.6155 - acc: 0.8511 - val_loss: 0.7532 - val_acc: 0.8169
Epoch 136/164
391/391 [==============================] - 17s - loss: 0.6127 - acc: 0.8519 - val_loss: 0.7519 - val_acc: 0.8155
Epoch 137/164
391/391 [==============================] - 16s - loss: 0.6149 - acc: 0.8506 - val_loss: 0.7556 - val_acc: 0.8163
Epoch 138/164
391/391 [==============================] - 16s - loss: 0.6145 - acc: 0.8510 - val_loss: 0.7509 - val_acc: 0.8167
Epoch 139/164
391/391 [==============================] - 16s - loss: 0.6139 - acc: 0.8516 - val_loss: 0.7572 - val_acc: 0.8160
Epoch 140/164
391/391 [==============================] - 16s - loss: 0.6148 - acc: 0.8511 - val_loss: 0.7509 - val_acc: 0.8173
Epoch 141/164
391/391 [==============================] - 16s - loss: 0.6132 - acc: 0.8512 - val_loss: 0.7625 - val_acc: 0.8142
Epoch 142/164
391/391 [==============================] - 16s - loss: 0.6139 - acc: 0.8510 - val_loss: 0.7609 - val_acc: 0.8160
Epoch 143/164
391/391 [==============================] - 17s - loss: 0.6114 - acc: 0.8520 - val_loss: 0.7555 - val_acc: 0.8157
Epoch 144/164
391/391 [==============================] - 16s - loss: 0.6112 - acc: 0.8511 - val_loss: 0.7517 - val_acc: 0.8167
Epoch 145/164
391/391 [==============================] - 17s - loss: 0.6130 - acc: 0.8507 - val_loss: 0.7613 - val_acc: 0.8146
Epoch 146/164
391/391 [==============================] - 16s - loss: 0.6111 - acc: 0.8506 - val_loss: 0.7523 - val_acc: 0.8174
Epoch 147/164
391/391 [==============================] - 16s - loss: 0.6140 - acc: 0.8503 - val_loss: 0.7549 - val_acc: 0.8158
Epoch 148/164
391/391 [==============================] - 17s - loss: 0.6082 - acc: 0.8519 - val_loss: 0.7483 - val_acc: 0.8180
Epoch 149/164
391/391 [==============================] - 16s - loss: 0.6109 - acc: 0.8503 - val_loss: 0.7569 - val_acc: 0.8155
Epoch 150/164
391/391 [==============================] - 17s - loss: 0.6089 - acc: 0.8511 - val_loss: 0.7467 - val_acc: 0.8187
Epoch 151/164
391/391 [==============================] - 16s - loss: 0.6093 - acc: 0.8519 - val_loss: 0.7545 - val_acc: 0.8167
Epoch 152/164
391/391 [==============================] - 17s - loss: 0.6066 - acc: 0.8521 - val_loss: 0.7567 - val_acc: 0.8159
Epoch 153/164
391/391 [==============================] - 17s - loss: 0.6064 - acc: 0.8536 - val_loss: 0.7496 - val_acc: 0.8177
Epoch 154/164
391/391 [==============================] - 17s - loss: 0.6059 - acc: 0.8530 - val_loss: 0.7524 - val_acc: 0.8176
Epoch 155/164
391/391 [==============================] - 16s - loss: 0.6089 - acc: 0.8519 - val_loss: 0.7471 - val_acc: 0.8192
Epoch 156/164
391/391 [==============================] - 17s - loss: 0.6049 - acc: 0.8531 - val_loss: 0.7633 - val_acc: 0.8135
Epoch 157/164
391/391 [==============================] - 16s - loss: 0.6040 - acc: 0.8534 - val_loss: 0.7569 - val_acc: 0.8171
Epoch 158/164
391/391 [==============================] - 16s - loss: 0.6070 - acc: 0.8520 - val_loss: 0.7551 - val_acc: 0.8164
Epoch 159/164
391/391 [==============================] - 16s - loss: 0.6062 - acc: 0.8514 - val_loss: 0.7595 - val_acc: 0.8166
Epoch 160/164
391/391 [==============================] - 16s - loss: 0.6054 - acc: 0.8527 - val_loss: 0.7561 - val_acc: 0.8161
Epoch 161/164
391/391 [==============================] - 16s - loss: 0.6035 - acc: 0.8535 - val_loss: 0.7487 - val_acc: 0.8178
Epoch 162/164
391/391 [==============================] - 17s - loss: 0.6076 - acc: 0.8508 - val_loss: 0.7540 - val_acc: 0.8153
Epoch 163/164
391/391 [==============================] - 16s - loss: 0.6059 - acc: 0.8528 - val_loss: 0.7475 - val_acc: 0.8181
Epoch 164/164
391/391 [==============================] - 17s - loss: 0.6041 - acc: 0.8524 - val_loss: 0.7523 - val_acc: 0.8156

In [ ]: