In [1]:
import os
import keras
import numpy as np
import tensorflow as tf
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.layers.normalization import BatchNormalization
from keras.layers import Conv2D, Dense, Input, add, Activation, GlobalAveragePooling2D
from keras.initializers import he_normal
from keras.callbacks import LearningRateScheduler, TensorBoard
from keras.models import Model
from keras import optimizers
from keras import regularizers
Using TensorFlow backend.
In [2]:
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 1.0
set_session(tf.Session(config=config))
In [3]:
depth = 16
wide = 8
num_classes = 10
img_rows, img_cols = 32, 32
img_channels = 3
batch_size = 128
epochs = 200
iterations = 391
weight_decay = 0.0005
log_filepath = r'./w_resnet/'
In [4]:
def color_preprocessing(x_train,x_test):
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train[:,:,:,0] = (x_train[:,:,:,0] - np.mean(x_train[:,:,:,0])) / np.std(x_train[:,:,:,0])
x_train[:,:,:,1] = (x_train[:,:,:,1] - np.mean(x_train[:,:,:,1])) / np.std(x_train[:,:,:,1])
x_train[:,:,:,2] = (x_train[:,:,:,2] - np.mean(x_train[:,:,:,2])) / np.std(x_train[:,:,:,2])
x_test[:,:,:,0] = (x_test[:,:,:,0] - np.mean(x_test[:,:,:,0])) / np.std(x_test[:,:,:,0])
x_test[:,:,:,1] = (x_test[:,:,:,1] - np.mean(x_test[:,:,:,1])) / np.std(x_test[:,:,:,1])
x_test[:,:,:,2] = (x_test[:,:,:,2] - np.mean(x_test[:,:,:,2])) / np.std(x_test[:,:,:,2])
return x_train, x_test
In [5]:
def scheduler(epoch):
if epoch <= 60:
return 0.1
if epoch <= 120:
return 0.02
if epoch <= 160:
return 0.004
return 0.0008
In [6]:
def wide_residual_network(img_input,classes_num,depth,k):
print('Wide-Resnet %dx%d' %(depth, k))
n_filters = [16, 16*k, 32*k, 64*k]
n_stack = (depth - 4) / 6
in_filters = 16
def conv3x3(x,filters):
return Conv2D(filters=filters, kernel_size=(3,3), strides=(1,1), padding='same',
kernel_initializer=he_normal(),
kernel_regularizer=regularizers.l2(weight_decay))(x)
def residual_block(x,out_filters,increase_filter=False):
if increase_filter:
first_stride = (2,2)
else:
first_stride = (1,1)
pre_bn = BatchNormalization()(x)
pre_relu = Activation('relu')(pre_bn)
conv_1 = Conv2D(out_filters,kernel_size=(3,3),strides=first_stride,padding='same',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay))(pre_relu)
bn_1 = BatchNormalization()(conv_1)
relu1 = Activation('relu')(bn_1)
conv_2 = Conv2D(out_filters, kernel_size=(3,3), strides=(1,1), padding='same', kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay))(relu1)
if increase_filter or in_filters != out_filters:
projection = Conv2D(out_filters,kernel_size=(1,1),strides=first_stride,padding='same',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay))(x)
block = add([conv_2, projection])
else:
block = add([conv_2,x])
return block
def wide_residual_layer(x,out_filters,increase_filter=False):
x = residual_block(x,out_filters,increase_filter)
in_filters = out_filters
for _ in range(1,int(n_stack)):
x = residual_block(x,out_filters)
return x
x = conv3x3(img_input,n_filters[0])
x = wide_residual_layer(x,n_filters[1])
x = wide_residual_layer(x,n_filters[2],increase_filter=True)
x = wide_residual_layer(x,n_filters[3],increase_filter=True)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = GlobalAveragePooling2D()(x)
x = Dense(classes_num,activation='softmax',kernel_initializer=he_normal(),kernel_regularizer=regularizers.l2(weight_decay))(x)
return x
In [7]:
# load data
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# color preprocessing
x_train, x_test = color_preprocessing(x_train, x_test)
# build network
img_input = Input(shape=(img_rows,img_cols,img_channels))
output = wide_residual_network(img_input,num_classes,depth,wide)
resnet = Model(img_input, output)
print(resnet.summary())
# set optimizer
sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)
resnet.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
Wide-Resnet 16x8
____________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
====================================================================================================
input_1 (InputLayer) (None, 32, 32, 3) 0
____________________________________________________________________________________________________
conv2d_1 (Conv2D) (None, 32, 32, 16) 448 input_1[0][0]
____________________________________________________________________________________________________
batch_normalization_1 (BatchNorm (None, 32, 32, 16) 64 conv2d_1[0][0]
____________________________________________________________________________________________________
activation_1 (Activation) (None, 32, 32, 16) 0 batch_normalization_1[0][0]
____________________________________________________________________________________________________
conv2d_2 (Conv2D) (None, 32, 32, 128) 18560 activation_1[0][0]
____________________________________________________________________________________________________
batch_normalization_2 (BatchNorm (None, 32, 32, 128) 512 conv2d_2[0][0]
____________________________________________________________________________________________________
activation_2 (Activation) (None, 32, 32, 128) 0 batch_normalization_2[0][0]
____________________________________________________________________________________________________
conv2d_3 (Conv2D) (None, 32, 32, 128) 147584 activation_2[0][0]
____________________________________________________________________________________________________
conv2d_4 (Conv2D) (None, 32, 32, 128) 2176 conv2d_1[0][0]
____________________________________________________________________________________________________
add_1 (Add) (None, 32, 32, 128) 0 conv2d_3[0][0]
conv2d_4[0][0]
____________________________________________________________________________________________________
batch_normalization_3 (BatchNorm (None, 32, 32, 128) 512 add_1[0][0]
____________________________________________________________________________________________________
activation_3 (Activation) (None, 32, 32, 128) 0 batch_normalization_3[0][0]
____________________________________________________________________________________________________
conv2d_5 (Conv2D) (None, 32, 32, 128) 147584 activation_3[0][0]
____________________________________________________________________________________________________
batch_normalization_4 (BatchNorm (None, 32, 32, 128) 512 conv2d_5[0][0]
____________________________________________________________________________________________________
activation_4 (Activation) (None, 32, 32, 128) 0 batch_normalization_4[0][0]
____________________________________________________________________________________________________
conv2d_6 (Conv2D) (None, 32, 32, 128) 147584 activation_4[0][0]
____________________________________________________________________________________________________
conv2d_7 (Conv2D) (None, 32, 32, 128) 16512 add_1[0][0]
____________________________________________________________________________________________________
add_2 (Add) (None, 32, 32, 128) 0 conv2d_6[0][0]
conv2d_7[0][0]
____________________________________________________________________________________________________
batch_normalization_5 (BatchNorm (None, 32, 32, 128) 512 add_2[0][0]
____________________________________________________________________________________________________
activation_5 (Activation) (None, 32, 32, 128) 0 batch_normalization_5[0][0]
____________________________________________________________________________________________________
conv2d_8 (Conv2D) (None, 16, 16, 256) 295168 activation_5[0][0]
____________________________________________________________________________________________________
batch_normalization_6 (BatchNorm (None, 16, 16, 256) 1024 conv2d_8[0][0]
____________________________________________________________________________________________________
activation_6 (Activation) (None, 16, 16, 256) 0 batch_normalization_6[0][0]
____________________________________________________________________________________________________
conv2d_9 (Conv2D) (None, 16, 16, 256) 590080 activation_6[0][0]
____________________________________________________________________________________________________
conv2d_10 (Conv2D) (None, 16, 16, 256) 33024 add_2[0][0]
____________________________________________________________________________________________________
add_3 (Add) (None, 16, 16, 256) 0 conv2d_9[0][0]
conv2d_10[0][0]
____________________________________________________________________________________________________
batch_normalization_7 (BatchNorm (None, 16, 16, 256) 1024 add_3[0][0]
____________________________________________________________________________________________________
activation_7 (Activation) (None, 16, 16, 256) 0 batch_normalization_7[0][0]
____________________________________________________________________________________________________
conv2d_11 (Conv2D) (None, 16, 16, 256) 590080 activation_7[0][0]
____________________________________________________________________________________________________
batch_normalization_8 (BatchNorm (None, 16, 16, 256) 1024 conv2d_11[0][0]
____________________________________________________________________________________________________
activation_8 (Activation) (None, 16, 16, 256) 0 batch_normalization_8[0][0]
____________________________________________________________________________________________________
conv2d_12 (Conv2D) (None, 16, 16, 256) 590080 activation_8[0][0]
____________________________________________________________________________________________________
conv2d_13 (Conv2D) (None, 16, 16, 256) 65792 add_3[0][0]
____________________________________________________________________________________________________
add_4 (Add) (None, 16, 16, 256) 0 conv2d_12[0][0]
conv2d_13[0][0]
____________________________________________________________________________________________________
batch_normalization_9 (BatchNorm (None, 16, 16, 256) 1024 add_4[0][0]
____________________________________________________________________________________________________
activation_9 (Activation) (None, 16, 16, 256) 0 batch_normalization_9[0][0]
____________________________________________________________________________________________________
conv2d_14 (Conv2D) (None, 8, 8, 512) 1180160 activation_9[0][0]
____________________________________________________________________________________________________
batch_normalization_10 (BatchNor (None, 8, 8, 512) 2048 conv2d_14[0][0]
____________________________________________________________________________________________________
activation_10 (Activation) (None, 8, 8, 512) 0 batch_normalization_10[0][0]
____________________________________________________________________________________________________
conv2d_15 (Conv2D) (None, 8, 8, 512) 2359808 activation_10[0][0]
____________________________________________________________________________________________________
conv2d_16 (Conv2D) (None, 8, 8, 512) 131584 add_4[0][0]
____________________________________________________________________________________________________
add_5 (Add) (None, 8, 8, 512) 0 conv2d_15[0][0]
conv2d_16[0][0]
____________________________________________________________________________________________________
batch_normalization_11 (BatchNor (None, 8, 8, 512) 2048 add_5[0][0]
____________________________________________________________________________________________________
activation_11 (Activation) (None, 8, 8, 512) 0 batch_normalization_11[0][0]
____________________________________________________________________________________________________
conv2d_17 (Conv2D) (None, 8, 8, 512) 2359808 activation_11[0][0]
____________________________________________________________________________________________________
batch_normalization_12 (BatchNor (None, 8, 8, 512) 2048 conv2d_17[0][0]
____________________________________________________________________________________________________
activation_12 (Activation) (None, 8, 8, 512) 0 batch_normalization_12[0][0]
____________________________________________________________________________________________________
conv2d_18 (Conv2D) (None, 8, 8, 512) 2359808 activation_12[0][0]
____________________________________________________________________________________________________
conv2d_19 (Conv2D) (None, 8, 8, 512) 262656 add_5[0][0]
____________________________________________________________________________________________________
add_6 (Add) (None, 8, 8, 512) 0 conv2d_18[0][0]
conv2d_19[0][0]
____________________________________________________________________________________________________
batch_normalization_13 (BatchNor (None, 8, 8, 512) 2048 add_6[0][0]
____________________________________________________________________________________________________
activation_13 (Activation) (None, 8, 8, 512) 0 batch_normalization_13[0][0]
____________________________________________________________________________________________________
global_average_pooling2d_1 (Glob (None, 512) 0 activation_13[0][0]
____________________________________________________________________________________________________
dense_1 (Dense) (None, 10) 5130 global_average_pooling2d_1[0][0]
====================================================================================================
Total params: 11,318,026
Trainable params: 11,310,826
Non-trainable params: 7,200
____________________________________________________________________________________________________
None
In [8]:
# set callback
tb_cb = TensorBoard(log_dir=log_filepath, histogram_freq=0)
change_lr = LearningRateScheduler(scheduler)
cbks = [change_lr,tb_cb]
In [9]:
# set data augmentation
print('Using real-time data augmentation.')
datagen = ImageDataGenerator(horizontal_flip=True,
width_shift_range=0.125,height_shift_range=0.125,fill_mode='constant',cval=0.)
datagen.fit(x_train)
Using real-time data augmentation.
In [10]:
# start training
resnet.fit_generator(datagen.flow(x_train, y_train,batch_size=batch_size),
steps_per_epoch=iterations,
epochs=epochs,
callbacks=cbks,
validation_data=(x_test, y_test))
resnet.save('w_resnet.h5')
Epoch 1/200
391/391 [==============================] - 96s - loss: 4.4075 - acc: 0.4804 - val_loss: 4.1354 - val_acc: 0.3378
Epoch 2/200
391/391 [==============================] - 95s - loss: 2.4165 - acc: 0.6655 - val_loss: 2.3527 - val_acc: 0.5880
Epoch 3/200
391/391 [==============================] - 95s - loss: 1.5872 - acc: 0.7328 - val_loss: 1.4609 - val_acc: 0.7111
Epoch 4/200
391/391 [==============================] - 95s - loss: 1.2037 - acc: 0.7688 - val_loss: 1.6196 - val_acc: 0.6410
Epoch 5/200
391/391 [==============================] - 95s - loss: 1.0243 - acc: 0.7902 - val_loss: 1.3698 - val_acc: 0.6869
Epoch 6/200
391/391 [==============================] - 95s - loss: 0.9346 - acc: 0.8034 - val_loss: 1.9835 - val_acc: 0.5403
Epoch 7/200
391/391 [==============================] - 95s - loss: 0.8879 - acc: 0.8147 - val_loss: 1.2901 - val_acc: 0.6886
Epoch 8/200
391/391 [==============================] - 95s - loss: 0.8605 - acc: 0.8256 - val_loss: 1.5942 - val_acc: 0.6561
Epoch 9/200
391/391 [==============================] - 95s - loss: 0.8515 - acc: 0.8263 - val_loss: 1.2741 - val_acc: 0.6961
Epoch 10/200
391/391 [==============================] - 95s - loss: 0.8435 - acc: 0.8340 - val_loss: 1.6098 - val_acc: 0.5896
Epoch 11/200
391/391 [==============================] - 95s - loss: 0.8348 - acc: 0.8369 - val_loss: 2.1589 - val_acc: 0.5885
Epoch 12/200
391/391 [==============================] - 95s - loss: 0.8270 - acc: 0.8424 - val_loss: 3.2221 - val_acc: 0.3909
Epoch 13/200
391/391 [==============================] - 95s - loss: 0.8221 - acc: 0.8443 - val_loss: 2.1609 - val_acc: 0.5754
Epoch 14/200
391/391 [==============================] - 95s - loss: 0.8218 - acc: 0.8472 - val_loss: 1.4106 - val_acc: 0.6993
Epoch 15/200
391/391 [==============================] - 95s - loss: 0.8165 - acc: 0.8496 - val_loss: 1.4305 - val_acc: 0.6983
Epoch 16/200
391/391 [==============================] - 95s - loss: 0.8171 - acc: 0.8517 - val_loss: 3.1949 - val_acc: 0.3627
Epoch 17/200
391/391 [==============================] - 95s - loss: 0.8202 - acc: 0.8532 - val_loss: 1.1631 - val_acc: 0.7497
Epoch 18/200
391/391 [==============================] - 95s - loss: 0.8153 - acc: 0.8569 - val_loss: 1.7845 - val_acc: 0.6336
Epoch 19/200
391/391 [==============================] - 95s - loss: 0.8204 - acc: 0.8555 - val_loss: 1.4997 - val_acc: 0.6715
Epoch 20/200
391/391 [==============================] - 95s - loss: 0.8137 - acc: 0.8601 - val_loss: 1.8548 - val_acc: 0.5485
Epoch 21/200
391/391 [==============================] - 95s - loss: 0.8156 - acc: 0.8620 - val_loss: 2.0106 - val_acc: 0.5358
Epoch 22/200
391/391 [==============================] - 95s - loss: 0.8124 - acc: 0.8625 - val_loss: 1.8329 - val_acc: 0.6135
Epoch 23/200
391/391 [==============================] - 94s - loss: 0.8213 - acc: 0.8617 - val_loss: 1.2566 - val_acc: 0.7443
Epoch 24/200
391/391 [==============================] - 95s - loss: 0.8198 - acc: 0.8633 - val_loss: 1.7346 - val_acc: 0.6260
Epoch 25/200
391/391 [==============================] - 95s - loss: 0.8164 - acc: 0.8642 - val_loss: 1.3256 - val_acc: 0.7103
Epoch 26/200
391/391 [==============================] - 95s - loss: 0.8265 - acc: 0.8645 - val_loss: 2.2422 - val_acc: 0.5790
Epoch 27/200
391/391 [==============================] - 95s - loss: 0.8200 - acc: 0.8672 - val_loss: 1.4835 - val_acc: 0.6842
Epoch 28/200
391/391 [==============================] - 95s - loss: 0.8212 - acc: 0.8684 - val_loss: 1.7027 - val_acc: 0.6395
Epoch 29/200
391/391 [==============================] - 95s - loss: 0.8285 - acc: 0.8661 - val_loss: 1.1054 - val_acc: 0.7828
Epoch 30/200
391/391 [==============================] - 94s - loss: 0.8265 - acc: 0.8675 - val_loss: 1.2764 - val_acc: 0.7313
Epoch 31/200
391/391 [==============================] - 94s - loss: 0.8293 - acc: 0.8688 - val_loss: 1.3347 - val_acc: 0.6922
Epoch 32/200
391/391 [==============================] - 94s - loss: 0.8318 - acc: 0.8692 - val_loss: 1.4631 - val_acc: 0.7093
Epoch 33/200
391/391 [==============================] - 95s - loss: 0.8345 - acc: 0.8671 - val_loss: 1.2052 - val_acc: 0.7393
Epoch 34/200
391/391 [==============================] - 95s - loss: 0.8337 - acc: 0.8702 - val_loss: 1.6182 - val_acc: 0.6680
Epoch 35/200
391/391 [==============================] - 94s - loss: 0.8394 - acc: 0.8683 - val_loss: 1.4481 - val_acc: 0.7020
Epoch 36/200
391/391 [==============================] - 95s - loss: 0.8380 - acc: 0.8705 - val_loss: 1.5244 - val_acc: 0.6976
Epoch 37/200
391/391 [==============================] - 94s - loss: 0.8395 - acc: 0.8728 - val_loss: 2.7175 - val_acc: 0.4919
Epoch 38/200
391/391 [==============================] - 94s - loss: 0.8378 - acc: 0.8710 - val_loss: 1.6640 - val_acc: 0.6604
Epoch 39/200
391/391 [==============================] - 94s - loss: 0.8401 - acc: 0.8728 - val_loss: 1.2292 - val_acc: 0.7504
Epoch 40/200
391/391 [==============================] - 94s - loss: 0.8405 - acc: 0.8723 - val_loss: 1.6107 - val_acc: 0.6700
Epoch 41/200
391/391 [==============================] - 94s - loss: 0.8450 - acc: 0.8727 - val_loss: 1.4512 - val_acc: 0.7010
Epoch 42/200
391/391 [==============================] - 94s - loss: 0.8550 - acc: 0.8703 - val_loss: 1.2574 - val_acc: 0.7497
Epoch 43/200
391/391 [==============================] - 94s - loss: 0.8515 - acc: 0.8721 - val_loss: 1.1825 - val_acc: 0.7654
Epoch 44/200
391/391 [==============================] - 95s - loss: 0.8458 - acc: 0.8741 - val_loss: 1.6954 - val_acc: 0.6594
Epoch 45/200
391/391 [==============================] - 94s - loss: 0.8454 - acc: 0.8734 - val_loss: 2.5856 - val_acc: 0.5722
Epoch 46/200
391/391 [==============================] - 94s - loss: 0.8505 - acc: 0.8746 - val_loss: 1.3945 - val_acc: 0.7124
Epoch 47/200
391/391 [==============================] - 94s - loss: 0.8510 - acc: 0.8742 - val_loss: 1.2465 - val_acc: 0.7371
Epoch 48/200
391/391 [==============================] - 94s - loss: 0.8502 - acc: 0.8741 - val_loss: 1.4684 - val_acc: 0.6858
Epoch 49/200
391/391 [==============================] - 94s - loss: 0.8617 - acc: 0.8741 - val_loss: 1.1473 - val_acc: 0.7787
Epoch 50/200
391/391 [==============================] - 95s - loss: 0.8599 - acc: 0.8745 - val_loss: 2.0643 - val_acc: 0.5653
Epoch 51/200
391/391 [==============================] - 94s - loss: 0.8568 - acc: 0.8743 - val_loss: 1.2619 - val_acc: 0.7586
Epoch 52/200
391/391 [==============================] - 94s - loss: 0.8586 - acc: 0.8761 - val_loss: 1.4897 - val_acc: 0.6829
Epoch 53/200
391/391 [==============================] - 94s - loss: 0.8581 - acc: 0.8760 - val_loss: 1.2824 - val_acc: 0.7504
Epoch 54/200
391/391 [==============================] - 94s - loss: 0.8632 - acc: 0.8754 - val_loss: 1.4287 - val_acc: 0.7031
Epoch 55/200
391/391 [==============================] - 94s - loss: 0.8680 - acc: 0.8744 - val_loss: 1.2541 - val_acc: 0.7642
Epoch 56/200
391/391 [==============================] - 94s - loss: 0.8628 - acc: 0.8770 - val_loss: 1.4939 - val_acc: 0.6948
Epoch 57/200
391/391 [==============================] - 94s - loss: 0.8719 - acc: 0.8741 - val_loss: 1.3511 - val_acc: 0.7267
Epoch 58/200
391/391 [==============================] - 94s - loss: 0.8619 - acc: 0.8786 - val_loss: 1.2001 - val_acc: 0.7709
Epoch 59/200
391/391 [==============================] - 94s - loss: 0.8768 - acc: 0.8747 - val_loss: 1.7186 - val_acc: 0.6702
Epoch 60/200
391/391 [==============================] - 94s - loss: 0.8656 - acc: 0.8767 - val_loss: 1.3003 - val_acc: 0.7301
Epoch 61/200
391/391 [==============================] - 94s - loss: 0.8661 - acc: 0.8765 - val_loss: 1.4943 - val_acc: 0.6775
Epoch 62/200
391/391 [==============================] - 94s - loss: 0.6834 - acc: 0.9308 - val_loss: 0.7376 - val_acc: 0.9073
Epoch 63/200
391/391 [==============================] - 94s - loss: 0.5591 - acc: 0.9541 - val_loss: 0.6621 - val_acc: 0.9144
Epoch 64/200
391/391 [==============================] - 94s - loss: 0.4951 - acc: 0.9603 - val_loss: 0.7272 - val_acc: 0.8849
Epoch 65/200
391/391 [==============================] - 94s - loss: 0.4509 - acc: 0.9628 - val_loss: 0.6671 - val_acc: 0.8963
Epoch 66/200
391/391 [==============================] - 94s - loss: 0.4236 - acc: 0.9625 - val_loss: 0.6915 - val_acc: 0.8876
Epoch 67/200
391/391 [==============================] - 94s - loss: 0.4090 - acc: 0.9621 - val_loss: 0.5924 - val_acc: 0.9062
Epoch 68/200
391/391 [==============================] - 94s - loss: 0.3995 - acc: 0.9608 - val_loss: 0.5817 - val_acc: 0.9007
Epoch 69/200
391/391 [==============================] - 94s - loss: 0.3881 - acc: 0.9612 - val_loss: 0.7017 - val_acc: 0.8796
Epoch 70/200
391/391 [==============================] - 94s - loss: 0.3839 - acc: 0.9607 - val_loss: 0.7358 - val_acc: 0.8665
Epoch 71/200
391/391 [==============================] - 94s - loss: 0.3803 - acc: 0.9619 - val_loss: 0.5933 - val_acc: 0.8988
Epoch 72/200
391/391 [==============================] - 94s - loss: 0.3761 - acc: 0.9616 - val_loss: 0.6418 - val_acc: 0.8868
Epoch 73/200
391/391 [==============================] - 95s - loss: 0.3794 - acc: 0.9607 - val_loss: 0.8185 - val_acc: 0.8437
Epoch 74/200
391/391 [==============================] - 94s - loss: 0.3761 - acc: 0.9634 - val_loss: 0.6979 - val_acc: 0.8714
Epoch 75/200
391/391 [==============================] - 94s - loss: 0.3796 - acc: 0.9616 - val_loss: 0.7003 - val_acc: 0.8763
Epoch 76/200
391/391 [==============================] - 94s - loss: 0.3823 - acc: 0.9611 - val_loss: 0.8101 - val_acc: 0.8551
Epoch 77/200
391/391 [==============================] - 94s - loss: 0.3812 - acc: 0.9631 - val_loss: 0.6284 - val_acc: 0.8915
Epoch 78/200
391/391 [==============================] - 94s - loss: 0.3816 - acc: 0.9617 - val_loss: 0.6552 - val_acc: 0.8907
Epoch 79/200
391/391 [==============================] - 94s - loss: 0.3780 - acc: 0.9635 - val_loss: 0.7185 - val_acc: 0.8781
Epoch 80/200
391/391 [==============================] - 94s - loss: 0.3814 - acc: 0.9639 - val_loss: 0.6373 - val_acc: 0.8972
Epoch 81/200
391/391 [==============================] - 94s - loss: 0.3761 - acc: 0.9659 - val_loss: 0.7535 - val_acc: 0.8594
Epoch 82/200
391/391 [==============================] - 94s - loss: 0.3818 - acc: 0.9643 - val_loss: 0.7362 - val_acc: 0.8769
Epoch 83/200
391/391 [==============================] - 94s - loss: 0.3839 - acc: 0.9634 - val_loss: 0.6737 - val_acc: 0.8920
Epoch 84/200
391/391 [==============================] - 94s - loss: 0.3805 - acc: 0.9649 - val_loss: 0.7497 - val_acc: 0.8683
Epoch 85/200
391/391 [==============================] - 94s - loss: 0.3875 - acc: 0.9630 - val_loss: 0.8570 - val_acc: 0.8458
Epoch 86/200
391/391 [==============================] - 94s - loss: 0.3818 - acc: 0.9663 - val_loss: 0.6945 - val_acc: 0.8779
Epoch 87/200
391/391 [==============================] - 94s - loss: 0.3810 - acc: 0.9665 - val_loss: 0.7333 - val_acc: 0.8796
Epoch 88/200
391/391 [==============================] - 94s - loss: 0.3843 - acc: 0.9655 - val_loss: 0.7125 - val_acc: 0.8765
Epoch 89/200
391/391 [==============================] - 94s - loss: 0.3760 - acc: 0.9688 - val_loss: 0.8033 - val_acc: 0.8529
Epoch 90/200
391/391 [==============================] - 94s - loss: 0.3847 - acc: 0.9656 - val_loss: 0.7719 - val_acc: 0.8697
Epoch 91/200
391/391 [==============================] - 94s - loss: 0.3874 - acc: 0.9651 - val_loss: 0.6600 - val_acc: 0.8892
Epoch 92/200
391/391 [==============================] - 94s - loss: 0.3898 - acc: 0.9664 - val_loss: 0.7785 - val_acc: 0.8618
Epoch 93/200
391/391 [==============================] - 94s - loss: 0.3858 - acc: 0.9677 - val_loss: 0.6877 - val_acc: 0.8861
Epoch 94/200
391/391 [==============================] - 94s - loss: 0.3876 - acc: 0.9665 - val_loss: 0.9447 - val_acc: 0.8476
Epoch 95/200
391/391 [==============================] - 94s - loss: 0.3855 - acc: 0.9684 - val_loss: 0.6851 - val_acc: 0.8886
Epoch 96/200
391/391 [==============================] - 94s - loss: 0.3883 - acc: 0.9667 - val_loss: 0.7444 - val_acc: 0.8771
Epoch 97/200
391/391 [==============================] - 94s - loss: 0.3901 - acc: 0.9662 - val_loss: 0.7235 - val_acc: 0.8815
Epoch 98/200
391/391 [==============================] - 94s - loss: 0.3918 - acc: 0.9673 - val_loss: 0.6950 - val_acc: 0.8865
Epoch 99/200
391/391 [==============================] - 94s - loss: 0.3864 - acc: 0.9693 - val_loss: 0.7870 - val_acc: 0.8629
Epoch 100/200
391/391 [==============================] - 94s - loss: 0.3958 - acc: 0.9657 - val_loss: 0.7592 - val_acc: 0.8712
Epoch 101/200
391/391 [==============================] - 94s - loss: 0.3897 - acc: 0.9680 - val_loss: 0.7300 - val_acc: 0.8808
Epoch 102/200
391/391 [==============================] - 94s - loss: 0.3930 - acc: 0.9672 - val_loss: 0.7118 - val_acc: 0.8893
Epoch 103/200
391/391 [==============================] - 94s - loss: 0.3878 - acc: 0.9691 - val_loss: 0.7249 - val_acc: 0.8765
Epoch 104/200
391/391 [==============================] - 94s - loss: 0.3940 - acc: 0.9669 - val_loss: 0.7189 - val_acc: 0.8835
Epoch 105/200
391/391 [==============================] - 93s - loss: 0.3960 - acc: 0.9671 - val_loss: 0.7256 - val_acc: 0.8838
Epoch 106/200
391/391 [==============================] - 94s - loss: 0.3868 - acc: 0.9697 - val_loss: 0.7194 - val_acc: 0.8784
Epoch 107/200
391/391 [==============================] - 94s - loss: 0.3909 - acc: 0.9683 - val_loss: 0.7126 - val_acc: 0.8813
Epoch 108/200
391/391 [==============================] - 94s - loss: 0.3959 - acc: 0.9662 - val_loss: 0.7264 - val_acc: 0.8823
Epoch 109/200
391/391 [==============================] - 94s - loss: 0.3897 - acc: 0.9691 - val_loss: 0.6959 - val_acc: 0.8926
Epoch 110/200
391/391 [==============================] - 94s - loss: 0.3915 - acc: 0.9692 - val_loss: 0.8168 - val_acc: 0.8678
Epoch 111/200
391/391 [==============================] - 94s - loss: 0.3928 - acc: 0.9691 - val_loss: 0.8750 - val_acc: 0.8606
Epoch 112/200
391/391 [==============================] - 94s - loss: 0.3943 - acc: 0.9677 - val_loss: 0.7021 - val_acc: 0.8935
Epoch 113/200
391/391 [==============================] - 94s - loss: 0.3970 - acc: 0.9675 - val_loss: 0.7059 - val_acc: 0.8868
Epoch 114/200
391/391 [==============================] - 94s - loss: 0.3967 - acc: 0.9686 - val_loss: 0.7462 - val_acc: 0.8690
Epoch 115/200
391/391 [==============================] - 94s - loss: 0.3981 - acc: 0.9688 - val_loss: 0.7003 - val_acc: 0.8953
Epoch 116/200
391/391 [==============================] - 94s - loss: 0.3955 - acc: 0.9688 - val_loss: 0.8367 - val_acc: 0.8561
Epoch 117/200
391/391 [==============================] - 94s - loss: 0.3991 - acc: 0.9681 - val_loss: 0.7848 - val_acc: 0.8571
Epoch 118/200
391/391 [==============================] - 93s - loss: 0.3979 - acc: 0.9684 - val_loss: 0.6623 - val_acc: 0.8995
Epoch 119/200
391/391 [==============================] - 94s - loss: 0.3983 - acc: 0.9688 - val_loss: 0.7475 - val_acc: 0.8794
Epoch 120/200
391/391 [==============================] - 94s - loss: 0.3934 - acc: 0.9695 - val_loss: 0.8279 - val_acc: 0.8584
Epoch 121/200
391/391 [==============================] - 94s - loss: 0.3967 - acc: 0.9689 - val_loss: 0.7545 - val_acc: 0.8731
Epoch 122/200
391/391 [==============================] - 94s - loss: 0.3506 - acc: 0.9859 - val_loss: 0.5095 - val_acc: 0.9371
Epoch 123/200
391/391 [==============================] - 94s - loss: 0.3122 - acc: 0.9960 - val_loss: 0.4876 - val_acc: 0.9427
Epoch 124/200
391/391 [==============================] - 94s - loss: 0.2986 - acc: 0.9975 - val_loss: 0.4810 - val_acc: 0.9433
Epoch 125/200
391/391 [==============================] - 93s - loss: 0.2873 - acc: 0.9986 - val_loss: 0.4679 - val_acc: 0.9470
Epoch 126/200
391/391 [==============================] - 94s - loss: 0.2772 - acc: 0.9989 - val_loss: 0.4582 - val_acc: 0.9465
Epoch 127/200
391/391 [==============================] - 94s - loss: 0.2686 - acc: 0.9990 - val_loss: 0.4531 - val_acc: 0.9472
Epoch 128/200
391/391 [==============================] - 94s - loss: 0.2601 - acc: 0.9992 - val_loss: 0.4462 - val_acc: 0.9459
Epoch 129/200
391/391 [==============================] - 94s - loss: 0.2520 - acc: 0.9993 - val_loss: 0.4366 - val_acc: 0.9474
Epoch 130/200
391/391 [==============================] - 94s - loss: 0.2440 - acc: 0.9995 - val_loss: 0.4289 - val_acc: 0.9485
Epoch 131/200
391/391 [==============================] - 94s - loss: 0.2367 - acc: 0.9995 - val_loss: 0.4242 - val_acc: 0.9483
Epoch 132/200
391/391 [==============================] - 94s - loss: 0.2291 - acc: 0.9997 - val_loss: 0.4183 - val_acc: 0.9496
Epoch 133/200
391/391 [==============================] - 93s - loss: 0.2225 - acc: 0.9996 - val_loss: 0.4141 - val_acc: 0.9501
Epoch 134/200
391/391 [==============================] - 94s - loss: 0.2159 - acc: 0.9997 - val_loss: 0.4108 - val_acc: 0.9481
Epoch 135/200
391/391 [==============================] - 94s - loss: 0.2093 - acc: 0.9996 - val_loss: 0.4030 - val_acc: 0.9501
Epoch 136/200
391/391 [==============================] - 94s - loss: 0.2030 - acc: 0.9997 - val_loss: 0.3963 - val_acc: 0.9491
Epoch 137/200
391/391 [==============================] - 94s - loss: 0.1968 - acc: 0.9997 - val_loss: 0.3868 - val_acc: 0.9503
Epoch 138/200
391/391 [==============================] - 94s - loss: 0.1908 - acc: 0.9999 - val_loss: 0.3853 - val_acc: 0.9495
Epoch 139/200
391/391 [==============================] - 94s - loss: 0.1848 - acc: 0.9999 - val_loss: 0.3767 - val_acc: 0.9504
Epoch 140/200
391/391 [==============================] - 94s - loss: 0.1796 - acc: 0.9999 - val_loss: 0.3715 - val_acc: 0.9510
Epoch 141/200
391/391 [==============================] - 92s - loss: 0.1741 - acc: 0.9999 - val_loss: 0.3650 - val_acc: 0.9509
Epoch 142/200
391/391 [==============================] - 92s - loss: 0.1692 - acc: 0.9998 - val_loss: 0.3562 - val_acc: 0.9509
Epoch 143/200
391/391 [==============================] - 92s - loss: 0.1642 - acc: 0.9998 - val_loss: 0.3538 - val_acc: 0.9519
Epoch 144/200
391/391 [==============================] - 92s - loss: 0.1590 - acc: 0.9999 - val_loss: 0.3533 - val_acc: 0.9513
Epoch 145/200
391/391 [==============================] - 92s - loss: 0.1547 - acc: 0.9998 - val_loss: 0.3514 - val_acc: 0.9494
Epoch 146/200
391/391 [==============================] - 92s - loss: 0.1500 - acc: 0.9998 - val_loss: 0.3469 - val_acc: 0.9499
Epoch 147/200
391/391 [==============================] - 92s - loss: 0.1456 - acc: 0.9999 - val_loss: 0.3418 - val_acc: 0.9499
Epoch 148/200
391/391 [==============================] - 92s - loss: 0.1415 - acc: 0.9999 - val_loss: 0.3381 - val_acc: 0.9507
Epoch 149/200
391/391 [==============================] - 92s - loss: 0.1372 - acc: 0.9999 - val_loss: 0.3321 - val_acc: 0.9510
Epoch 150/200
391/391 [==============================] - 92s - loss: 0.1334 - acc: 0.9998 - val_loss: 0.3395 - val_acc: 0.9481
Epoch 151/200
391/391 [==============================] - 92s - loss: 0.1294 - acc: 0.9999 - val_loss: 0.3301 - val_acc: 0.9485
Epoch 152/200
391/391 [==============================] - 92s - loss: 0.1255 - acc: 0.9999 - val_loss: 0.3254 - val_acc: 0.9491
Epoch 153/200
391/391 [==============================] - 92s - loss: 0.1219 - acc: 1.0000 - val_loss: 0.3192 - val_acc: 0.9491
Epoch 154/200
391/391 [==============================] - 92s - loss: 0.1182 - acc: 1.0000 - val_loss: 0.3128 - val_acc: 0.9496
Epoch 155/200
391/391 [==============================] - 92s - loss: 0.1149 - acc: 0.9999 - val_loss: 0.3156 - val_acc: 0.9495
Epoch 156/200
391/391 [==============================] - 92s - loss: 0.1117 - acc: 0.9999 - val_loss: 0.3153 - val_acc: 0.9480
Epoch 157/200
391/391 [==============================] - 92s - loss: 0.1087 - acc: 0.9998 - val_loss: 0.3139 - val_acc: 0.9467
Epoch 158/200
391/391 [==============================] - 92s - loss: 0.1054 - acc: 0.9999 - val_loss: 0.3114 - val_acc: 0.9464
Epoch 159/200
391/391 [==============================] - 92s - loss: 0.1027 - acc: 0.9998 - val_loss: 0.3120 - val_acc: 0.9474
Epoch 160/200
391/391 [==============================] - 92s - loss: 0.0995 - acc: 0.9999 - val_loss: 0.3019 - val_acc: 0.9472
Epoch 161/200
391/391 [==============================] - 92s - loss: 0.0968 - acc: 0.9999 - val_loss: 0.2975 - val_acc: 0.9487
Epoch 162/200
391/391 [==============================] - 92s - loss: 0.0950 - acc: 0.9999 - val_loss: 0.2918 - val_acc: 0.9499
Epoch 163/200
391/391 [==============================] - 92s - loss: 0.0942 - acc: 1.0000 - val_loss: 0.2905 - val_acc: 0.9493
Epoch 164/200
391/391 [==============================] - 92s - loss: 0.0934 - acc: 1.0000 - val_loss: 0.2883 - val_acc: 0.9495
Epoch 165/200
391/391 [==============================] - 92s - loss: 0.0929 - acc: 1.0000 - val_loss: 0.2876 - val_acc: 0.9498
Epoch 166/200
391/391 [==============================] - 92s - loss: 0.0923 - acc: 1.0000 - val_loss: 0.2859 - val_acc: 0.9493
Epoch 167/200
391/391 [==============================] - 92s - loss: 0.0918 - acc: 1.0000 - val_loss: 0.2831 - val_acc: 0.9500
Epoch 168/200
391/391 [==============================] - 92s - loss: 0.0912 - acc: 1.0000 - val_loss: 0.2838 - val_acc: 0.9502
Epoch 169/200
391/391 [==============================] - 92s - loss: 0.0906 - acc: 1.0000 - val_loss: 0.2834 - val_acc: 0.9508
Epoch 170/200
391/391 [==============================] - 92s - loss: 0.0901 - acc: 1.0000 - val_loss: 0.2819 - val_acc: 0.9505
Epoch 171/200
391/391 [==============================] - 92s - loss: 0.0895 - acc: 1.0000 - val_loss: 0.2814 - val_acc: 0.9507
Epoch 172/200
391/391 [==============================] - 92s - loss: 0.0890 - acc: 1.0000 - val_loss: 0.2808 - val_acc: 0.9507
Epoch 173/200
391/391 [==============================] - 92s - loss: 0.0884 - acc: 1.0000 - val_loss: 0.2795 - val_acc: 0.9509
Epoch 174/200
391/391 [==============================] - 92s - loss: 0.0879 - acc: 1.0000 - val_loss: 0.2797 - val_acc: 0.9503
Epoch 175/200
391/391 [==============================] - 92s - loss: 0.0874 - acc: 1.0000 - val_loss: 0.2787 - val_acc: 0.9510
Epoch 176/200
391/391 [==============================] - 92s - loss: 0.0868 - acc: 1.0000 - val_loss: 0.2780 - val_acc: 0.9511
Epoch 177/200
391/391 [==============================] - 92s - loss: 0.0863 - acc: 1.0000 - val_loss: 0.2775 - val_acc: 0.9508
Epoch 178/200
391/391 [==============================] - 92s - loss: 0.0858 - acc: 1.0000 - val_loss: 0.2769 - val_acc: 0.9513
Epoch 179/200
391/391 [==============================] - 92s - loss: 0.0854 - acc: 1.0000 - val_loss: 0.2761 - val_acc: 0.9510
Epoch 180/200
391/391 [==============================] - 92s - loss: 0.0848 - acc: 1.0000 - val_loss: 0.2745 - val_acc: 0.9508
Epoch 181/200
391/391 [==============================] - 92s - loss: 0.0843 - acc: 1.0000 - val_loss: 0.2731 - val_acc: 0.9507
Epoch 182/200
391/391 [==============================] - 92s - loss: 0.0838 - acc: 1.0000 - val_loss: 0.2719 - val_acc: 0.9513
Epoch 183/200
391/391 [==============================] - 92s - loss: 0.0833 - acc: 1.0000 - val_loss: 0.2717 - val_acc: 0.9506
Epoch 184/200
391/391 [==============================] - 92s - loss: 0.0828 - acc: 1.0000 - val_loss: 0.2728 - val_acc: 0.9509
Epoch 185/200
391/391 [==============================] - 92s - loss: 0.0823 - acc: 1.0000 - val_loss: 0.2721 - val_acc: 0.9502
Epoch 186/200
391/391 [==============================] - 92s - loss: 0.0818 - acc: 1.0000 - val_loss: 0.2714 - val_acc: 0.9505
Epoch 187/200
391/391 [==============================] - 92s - loss: 0.0813 - acc: 1.0000 - val_loss: 0.2723 - val_acc: 0.9508
Epoch 188/200
391/391 [==============================] - 93s - loss: 0.0809 - acc: 1.0000 - val_loss: 0.2693 - val_acc: 0.9508
Epoch 189/200
391/391 [==============================] - 94s - loss: 0.0803 - acc: 1.0000 - val_loss: 0.2682 - val_acc: 0.9510
Epoch 190/200
391/391 [==============================] - 94s - loss: 0.0799 - acc: 1.0000 - val_loss: 0.2687 - val_acc: 0.9510
Epoch 191/200
391/391 [==============================] - 94s - loss: 0.0795 - acc: 1.0000 - val_loss: 0.2680 - val_acc: 0.9516
Epoch 192/200
391/391 [==============================] - 94s - loss: 0.0789 - acc: 1.0000 - val_loss: 0.2675 - val_acc: 0.9509
Epoch 193/200
391/391 [==============================] - 94s - loss: 0.0784 - acc: 1.0000 - val_loss: 0.2670 - val_acc: 0.9515
Epoch 194/200
391/391 [==============================] - 94s - loss: 0.0780 - acc: 1.0000 - val_loss: 0.2676 - val_acc: 0.9510
Epoch 195/200
391/391 [==============================] - 94s - loss: 0.0775 - acc: 1.0000 - val_loss: 0.2669 - val_acc: 0.9511
Epoch 196/200
391/391 [==============================] - 94s - loss: 0.0770 - acc: 1.0000 - val_loss: 0.2658 - val_acc: 0.9512
Epoch 197/200
391/391 [==============================] - 94s - loss: 0.0766 - acc: 1.0000 - val_loss: 0.2647 - val_acc: 0.9514
Epoch 198/200
391/391 [==============================] - 94s - loss: 0.0762 - acc: 1.0000 - val_loss: 0.2641 - val_acc: 0.9519
Epoch 199/200
391/391 [==============================] - 94s - loss: 0.0756 - acc: 1.0000 - val_loss: 0.2640 - val_acc: 0.9520
Epoch 200/200
391/391 [==============================] - 94s - loss: 0.0752 - acc: 1.0000 - val_loss: 0.2639 - val_acc: 0.9521
In [ ]:
Content source: WenmuZhou/cifar-10-cnn
Similar notebooks: