In [1]:
from __future__ import print_function
#Basic libraries
import numpy as np
import tensorflow as tf
import time
from os import listdir, walk
from os.path import isfile, join
import random
#Show images
import matplotlib.pyplot as plt
%matplotlib inline
plt.rcParams['figure.figsize'] = (15, 10) # size of images
plt.rcParams['image.interpolation'] = 'nearest' # show exact image
# Select GPU
import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="0"
print('Tensorflow version: ', tf.__version__)
data_path = '/home/ubuntu/data/training/image/cats_dogs/sample'
Tensorflow version: 1.4.0
In [2]:
# Data features
categories=[]
labels={}
for dirname, dirnames, filenames in walk(join(data_path,'trn')):
# print path to all subdirectories first.
for i,subdirname in enumerate(dirnames):
categories += [subdirname]
labels[i] = subdirname
print('Categories: ', categories)
n_classes = len(categories)
print('Num classes: ', n_classes)
Categories: ['dog', 'cat']
Num classes: 2
In [3]:
# Examine the images
show_categories = categories
if n_classes>10:
show_categories = categories[:10]
fig = plt.figure()
n=0
for i, category in enumerate(show_categories):
filelist = [f for f in listdir(join(data_path, 'trn', category)) if isfile(join(data_path, 'trn', category, f))]
random.shuffle(filelist) # Randomize images
for f in filelist[:5]:
n += 1
a = fig.add_subplot(len(show_categories),5,n)
a.set_title(category)
img = plt.imread(join(data_path, 'trn', category, f))
plt.imshow(img)
In [ ]:
In [4]:
from tensorflow.contrib.keras import applications, preprocessing
#Load model
model = applications.inception_v3.InceptionV3(weights='imagenet')
In [5]:
# Lists of image files
my_filelist = [f for f in listdir(join(data_path, 'trn', categories[0])) if isfile(join(data_path, 'trn', categories[0], f))]
In [6]:
img_path = join(data_path, 'trn', categories[0], my_filelist[0])
# Load image and adapt to inception
def load_image(img_path, draw=True):
img = preprocessing.image.load_img(img_path, target_size=(299, 299))
x = preprocessing.image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = applications.inception_v3.preprocess_input(x)
if draw:
plt.imshow(img)
return x
x = load_image(img_path)
preds = model.predict(x)
# decode the results into a list of tuples (class, description, probability)
print('Predicted:', applications.inception_v3.decode_predictions(preds, top=3)[0])
Predicted: [('n02105056', 'groenendael', 0.78728551), ('n02104365', 'schipperke', 0.12660344), ('n02112018', 'Pomeranian', 0.0056617558)]
In [ ]:
In [7]:
from tensorflow.contrib.keras import applications, preprocessing, layers, optimizers, callbacks, models
#Load base model
base_model = applications.inception_v3.InceptionV3(input_shape=(299, 299, 3), weights='imagenet', include_top=False)
# Check the output of the last loaded layer
print('Last layer shape:', base_model.output)
Last layer shape: Tensor("mixed10_2/concat:0", shape=(?, 8, 8, 2048), dtype=float32)
In [8]:
# add a global spatial average pooling layer
x = layers.GlobalAveragePooling2D()(base_model.output)
print('New last layer shape:', x)
# Add a fully-connected layer
#x = layers.Dense(1024, activation='relu')(x)
# Add the prediction layer. 2 classes
predictions = layers.Dense(2, activation='softmax')(x)
New last layer shape: Tensor("global_average_pooling2d/Mean:0", shape=(?, 2048), dtype=float32)
In [9]:
# Model to train
model = models.Model(inputs=base_model.input, outputs=predictions)
# Train only the top layers (which were randomly initialized)
# Freeze all convolutional InceptionV3 layers
for layer in base_model.layers:
layer.trainable = False
# Print the summary of the model
model.summary()
____________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
====================================================================================================
input_2 (InputLayer) (None, 299, 299, 3) 0
____________________________________________________________________________________________________
conv2d_95 (Conv2D) (None, 149, 149, 32) 864 input_2[0][0]
____________________________________________________________________________________________________
batch_normalization_95 (BatchNor (None, 149, 149, 32) 96 conv2d_95[0][0]
____________________________________________________________________________________________________
activation_95 (Activation) (None, 149, 149, 32) 0 batch_normalization_95[0][0]
____________________________________________________________________________________________________
conv2d_96 (Conv2D) (None, 147, 147, 32) 9216 activation_95[0][0]
____________________________________________________________________________________________________
batch_normalization_96 (BatchNor (None, 147, 147, 32) 96 conv2d_96[0][0]
____________________________________________________________________________________________________
activation_96 (Activation) (None, 147, 147, 32) 0 batch_normalization_96[0][0]
____________________________________________________________________________________________________
conv2d_97 (Conv2D) (None, 147, 147, 64) 18432 activation_96[0][0]
____________________________________________________________________________________________________
batch_normalization_97 (BatchNor (None, 147, 147, 64) 192 conv2d_97[0][0]
____________________________________________________________________________________________________
activation_97 (Activation) (None, 147, 147, 64) 0 batch_normalization_97[0][0]
____________________________________________________________________________________________________
max_pooling2d_5 (MaxPooling2D) (None, 73, 73, 64) 0 activation_97[0][0]
____________________________________________________________________________________________________
conv2d_98 (Conv2D) (None, 73, 73, 80) 5120 max_pooling2d_5[0][0]
____________________________________________________________________________________________________
batch_normalization_98 (BatchNor (None, 73, 73, 80) 240 conv2d_98[0][0]
____________________________________________________________________________________________________
activation_98 (Activation) (None, 73, 73, 80) 0 batch_normalization_98[0][0]
____________________________________________________________________________________________________
conv2d_99 (Conv2D) (None, 71, 71, 192) 138240 activation_98[0][0]
____________________________________________________________________________________________________
batch_normalization_99 (BatchNor (None, 71, 71, 192) 576 conv2d_99[0][0]
____________________________________________________________________________________________________
activation_99 (Activation) (None, 71, 71, 192) 0 batch_normalization_99[0][0]
____________________________________________________________________________________________________
max_pooling2d_6 (MaxPooling2D) (None, 35, 35, 192) 0 activation_99[0][0]
____________________________________________________________________________________________________
conv2d_103 (Conv2D) (None, 35, 35, 64) 12288 max_pooling2d_6[0][0]
____________________________________________________________________________________________________
batch_normalization_103 (BatchNo (None, 35, 35, 64) 192 conv2d_103[0][0]
____________________________________________________________________________________________________
activation_103 (Activation) (None, 35, 35, 64) 0 batch_normalization_103[0][0]
____________________________________________________________________________________________________
conv2d_101 (Conv2D) (None, 35, 35, 48) 9216 max_pooling2d_6[0][0]
____________________________________________________________________________________________________
conv2d_104 (Conv2D) (None, 35, 35, 96) 55296 activation_103[0][0]
____________________________________________________________________________________________________
batch_normalization_101 (BatchNo (None, 35, 35, 48) 144 conv2d_101[0][0]
____________________________________________________________________________________________________
batch_normalization_104 (BatchNo (None, 35, 35, 96) 288 conv2d_104[0][0]
____________________________________________________________________________________________________
activation_101 (Activation) (None, 35, 35, 48) 0 batch_normalization_101[0][0]
____________________________________________________________________________________________________
activation_104 (Activation) (None, 35, 35, 96) 0 batch_normalization_104[0][0]
____________________________________________________________________________________________________
average_pooling2d_10 (AveragePoo (None, 35, 35, 192) 0 max_pooling2d_6[0][0]
____________________________________________________________________________________________________
conv2d_100 (Conv2D) (None, 35, 35, 64) 12288 max_pooling2d_6[0][0]
____________________________________________________________________________________________________
conv2d_102 (Conv2D) (None, 35, 35, 64) 76800 activation_101[0][0]
____________________________________________________________________________________________________
conv2d_105 (Conv2D) (None, 35, 35, 96) 82944 activation_104[0][0]
____________________________________________________________________________________________________
conv2d_106 (Conv2D) (None, 35, 35, 32) 6144 average_pooling2d_10[0][0]
____________________________________________________________________________________________________
batch_normalization_100 (BatchNo (None, 35, 35, 64) 192 conv2d_100[0][0]
____________________________________________________________________________________________________
batch_normalization_102 (BatchNo (None, 35, 35, 64) 192 conv2d_102[0][0]
____________________________________________________________________________________________________
batch_normalization_105 (BatchNo (None, 35, 35, 96) 288 conv2d_105[0][0]
____________________________________________________________________________________________________
batch_normalization_106 (BatchNo (None, 35, 35, 32) 96 conv2d_106[0][0]
____________________________________________________________________________________________________
activation_100 (Activation) (None, 35, 35, 64) 0 batch_normalization_100[0][0]
____________________________________________________________________________________________________
activation_102 (Activation) (None, 35, 35, 64) 0 batch_normalization_102[0][0]
____________________________________________________________________________________________________
activation_105 (Activation) (None, 35, 35, 96) 0 batch_normalization_105[0][0]
____________________________________________________________________________________________________
activation_106 (Activation) (None, 35, 35, 32) 0 batch_normalization_106[0][0]
____________________________________________________________________________________________________
mixed0 (Concatenate) (None, 35, 35, 256) 0 activation_100[0][0]
activation_102[0][0]
activation_105[0][0]
activation_106[0][0]
____________________________________________________________________________________________________
conv2d_110 (Conv2D) (None, 35, 35, 64) 16384 mixed0[0][0]
____________________________________________________________________________________________________
batch_normalization_110 (BatchNo (None, 35, 35, 64) 192 conv2d_110[0][0]
____________________________________________________________________________________________________
activation_110 (Activation) (None, 35, 35, 64) 0 batch_normalization_110[0][0]
____________________________________________________________________________________________________
conv2d_108 (Conv2D) (None, 35, 35, 48) 12288 mixed0[0][0]
____________________________________________________________________________________________________
conv2d_111 (Conv2D) (None, 35, 35, 96) 55296 activation_110[0][0]
____________________________________________________________________________________________________
batch_normalization_108 (BatchNo (None, 35, 35, 48) 144 conv2d_108[0][0]
____________________________________________________________________________________________________
batch_normalization_111 (BatchNo (None, 35, 35, 96) 288 conv2d_111[0][0]
____________________________________________________________________________________________________
activation_108 (Activation) (None, 35, 35, 48) 0 batch_normalization_108[0][0]
____________________________________________________________________________________________________
activation_111 (Activation) (None, 35, 35, 96) 0 batch_normalization_111[0][0]
____________________________________________________________________________________________________
average_pooling2d_11 (AveragePoo (None, 35, 35, 256) 0 mixed0[0][0]
____________________________________________________________________________________________________
conv2d_107 (Conv2D) (None, 35, 35, 64) 16384 mixed0[0][0]
____________________________________________________________________________________________________
conv2d_109 (Conv2D) (None, 35, 35, 64) 76800 activation_108[0][0]
____________________________________________________________________________________________________
conv2d_112 (Conv2D) (None, 35, 35, 96) 82944 activation_111[0][0]
____________________________________________________________________________________________________
conv2d_113 (Conv2D) (None, 35, 35, 64) 16384 average_pooling2d_11[0][0]
____________________________________________________________________________________________________
batch_normalization_107 (BatchNo (None, 35, 35, 64) 192 conv2d_107[0][0]
____________________________________________________________________________________________________
batch_normalization_109 (BatchNo (None, 35, 35, 64) 192 conv2d_109[0][0]
____________________________________________________________________________________________________
batch_normalization_112 (BatchNo (None, 35, 35, 96) 288 conv2d_112[0][0]
____________________________________________________________________________________________________
batch_normalization_113 (BatchNo (None, 35, 35, 64) 192 conv2d_113[0][0]
____________________________________________________________________________________________________
activation_107 (Activation) (None, 35, 35, 64) 0 batch_normalization_107[0][0]
____________________________________________________________________________________________________
activation_109 (Activation) (None, 35, 35, 64) 0 batch_normalization_109[0][0]
____________________________________________________________________________________________________
activation_112 (Activation) (None, 35, 35, 96) 0 batch_normalization_112[0][0]
____________________________________________________________________________________________________
activation_113 (Activation) (None, 35, 35, 64) 0 batch_normalization_113[0][0]
____________________________________________________________________________________________________
mixed1 (Concatenate) (None, 35, 35, 288) 0 activation_107[0][0]
activation_109[0][0]
activation_112[0][0]
activation_113[0][0]
____________________________________________________________________________________________________
conv2d_117 (Conv2D) (None, 35, 35, 64) 18432 mixed1[0][0]
____________________________________________________________________________________________________
batch_normalization_117 (BatchNo (None, 35, 35, 64) 192 conv2d_117[0][0]
____________________________________________________________________________________________________
activation_117 (Activation) (None, 35, 35, 64) 0 batch_normalization_117[0][0]
____________________________________________________________________________________________________
conv2d_115 (Conv2D) (None, 35, 35, 48) 13824 mixed1[0][0]
____________________________________________________________________________________________________
conv2d_118 (Conv2D) (None, 35, 35, 96) 55296 activation_117[0][0]
____________________________________________________________________________________________________
batch_normalization_115 (BatchNo (None, 35, 35, 48) 144 conv2d_115[0][0]
____________________________________________________________________________________________________
batch_normalization_118 (BatchNo (None, 35, 35, 96) 288 conv2d_118[0][0]
____________________________________________________________________________________________________
activation_115 (Activation) (None, 35, 35, 48) 0 batch_normalization_115[0][0]
____________________________________________________________________________________________________
activation_118 (Activation) (None, 35, 35, 96) 0 batch_normalization_118[0][0]
____________________________________________________________________________________________________
average_pooling2d_12 (AveragePoo (None, 35, 35, 288) 0 mixed1[0][0]
____________________________________________________________________________________________________
conv2d_114 (Conv2D) (None, 35, 35, 64) 18432 mixed1[0][0]
____________________________________________________________________________________________________
conv2d_116 (Conv2D) (None, 35, 35, 64) 76800 activation_115[0][0]
____________________________________________________________________________________________________
conv2d_119 (Conv2D) (None, 35, 35, 96) 82944 activation_118[0][0]
____________________________________________________________________________________________________
conv2d_120 (Conv2D) (None, 35, 35, 64) 18432 average_pooling2d_12[0][0]
____________________________________________________________________________________________________
batch_normalization_114 (BatchNo (None, 35, 35, 64) 192 conv2d_114[0][0]
____________________________________________________________________________________________________
batch_normalization_116 (BatchNo (None, 35, 35, 64) 192 conv2d_116[0][0]
____________________________________________________________________________________________________
batch_normalization_119 (BatchNo (None, 35, 35, 96) 288 conv2d_119[0][0]
____________________________________________________________________________________________________
batch_normalization_120 (BatchNo (None, 35, 35, 64) 192 conv2d_120[0][0]
____________________________________________________________________________________________________
activation_114 (Activation) (None, 35, 35, 64) 0 batch_normalization_114[0][0]
____________________________________________________________________________________________________
activation_116 (Activation) (None, 35, 35, 64) 0 batch_normalization_116[0][0]
____________________________________________________________________________________________________
activation_119 (Activation) (None, 35, 35, 96) 0 batch_normalization_119[0][0]
____________________________________________________________________________________________________
activation_120 (Activation) (None, 35, 35, 64) 0 batch_normalization_120[0][0]
____________________________________________________________________________________________________
mixed2 (Concatenate) (None, 35, 35, 288) 0 activation_114[0][0]
activation_116[0][0]
activation_119[0][0]
activation_120[0][0]
____________________________________________________________________________________________________
conv2d_122 (Conv2D) (None, 35, 35, 64) 18432 mixed2[0][0]
____________________________________________________________________________________________________
batch_normalization_122 (BatchNo (None, 35, 35, 64) 192 conv2d_122[0][0]
____________________________________________________________________________________________________
activation_122 (Activation) (None, 35, 35, 64) 0 batch_normalization_122[0][0]
____________________________________________________________________________________________________
conv2d_123 (Conv2D) (None, 35, 35, 96) 55296 activation_122[0][0]
____________________________________________________________________________________________________
batch_normalization_123 (BatchNo (None, 35, 35, 96) 288 conv2d_123[0][0]
____________________________________________________________________________________________________
activation_123 (Activation) (None, 35, 35, 96) 0 batch_normalization_123[0][0]
____________________________________________________________________________________________________
conv2d_121 (Conv2D) (None, 17, 17, 384) 995328 mixed2[0][0]
____________________________________________________________________________________________________
conv2d_124 (Conv2D) (None, 17, 17, 96) 82944 activation_123[0][0]
____________________________________________________________________________________________________
batch_normalization_121 (BatchNo (None, 17, 17, 384) 1152 conv2d_121[0][0]
____________________________________________________________________________________________________
batch_normalization_124 (BatchNo (None, 17, 17, 96) 288 conv2d_124[0][0]
____________________________________________________________________________________________________
activation_121 (Activation) (None, 17, 17, 384) 0 batch_normalization_121[0][0]
____________________________________________________________________________________________________
activation_124 (Activation) (None, 17, 17, 96) 0 batch_normalization_124[0][0]
____________________________________________________________________________________________________
max_pooling2d_7 (MaxPooling2D) (None, 17, 17, 288) 0 mixed2[0][0]
____________________________________________________________________________________________________
mixed3 (Concatenate) (None, 17, 17, 768) 0 activation_121[0][0]
activation_124[0][0]
max_pooling2d_7[0][0]
____________________________________________________________________________________________________
conv2d_129 (Conv2D) (None, 17, 17, 128) 98304 mixed3[0][0]
____________________________________________________________________________________________________
batch_normalization_129 (BatchNo (None, 17, 17, 128) 384 conv2d_129[0][0]
____________________________________________________________________________________________________
activation_129 (Activation) (None, 17, 17, 128) 0 batch_normalization_129[0][0]
____________________________________________________________________________________________________
conv2d_130 (Conv2D) (None, 17, 17, 128) 114688 activation_129[0][0]
____________________________________________________________________________________________________
batch_normalization_130 (BatchNo (None, 17, 17, 128) 384 conv2d_130[0][0]
____________________________________________________________________________________________________
activation_130 (Activation) (None, 17, 17, 128) 0 batch_normalization_130[0][0]
____________________________________________________________________________________________________
conv2d_126 (Conv2D) (None, 17, 17, 128) 98304 mixed3[0][0]
____________________________________________________________________________________________________
conv2d_131 (Conv2D) (None, 17, 17, 128) 114688 activation_130[0][0]
____________________________________________________________________________________________________
batch_normalization_126 (BatchNo (None, 17, 17, 128) 384 conv2d_126[0][0]
____________________________________________________________________________________________________
batch_normalization_131 (BatchNo (None, 17, 17, 128) 384 conv2d_131[0][0]
____________________________________________________________________________________________________
activation_126 (Activation) (None, 17, 17, 128) 0 batch_normalization_126[0][0]
____________________________________________________________________________________________________
activation_131 (Activation) (None, 17, 17, 128) 0 batch_normalization_131[0][0]
____________________________________________________________________________________________________
conv2d_127 (Conv2D) (None, 17, 17, 128) 114688 activation_126[0][0]
____________________________________________________________________________________________________
conv2d_132 (Conv2D) (None, 17, 17, 128) 114688 activation_131[0][0]
____________________________________________________________________________________________________
batch_normalization_127 (BatchNo (None, 17, 17, 128) 384 conv2d_127[0][0]
____________________________________________________________________________________________________
batch_normalization_132 (BatchNo (None, 17, 17, 128) 384 conv2d_132[0][0]
____________________________________________________________________________________________________
activation_127 (Activation) (None, 17, 17, 128) 0 batch_normalization_127[0][0]
____________________________________________________________________________________________________
activation_132 (Activation) (None, 17, 17, 128) 0 batch_normalization_132[0][0]
____________________________________________________________________________________________________
average_pooling2d_13 (AveragePoo (None, 17, 17, 768) 0 mixed3[0][0]
____________________________________________________________________________________________________
conv2d_125 (Conv2D) (None, 17, 17, 192) 147456 mixed3[0][0]
____________________________________________________________________________________________________
conv2d_128 (Conv2D) (None, 17, 17, 192) 172032 activation_127[0][0]
____________________________________________________________________________________________________
conv2d_133 (Conv2D) (None, 17, 17, 192) 172032 activation_132[0][0]
____________________________________________________________________________________________________
conv2d_134 (Conv2D) (None, 17, 17, 192) 147456 average_pooling2d_13[0][0]
____________________________________________________________________________________________________
batch_normalization_125 (BatchNo (None, 17, 17, 192) 576 conv2d_125[0][0]
____________________________________________________________________________________________________
batch_normalization_128 (BatchNo (None, 17, 17, 192) 576 conv2d_128[0][0]
____________________________________________________________________________________________________
batch_normalization_133 (BatchNo (None, 17, 17, 192) 576 conv2d_133[0][0]
____________________________________________________________________________________________________
batch_normalization_134 (BatchNo (None, 17, 17, 192) 576 conv2d_134[0][0]
____________________________________________________________________________________________________
activation_125 (Activation) (None, 17, 17, 192) 0 batch_normalization_125[0][0]
____________________________________________________________________________________________________
activation_128 (Activation) (None, 17, 17, 192) 0 batch_normalization_128[0][0]
____________________________________________________________________________________________________
activation_133 (Activation) (None, 17, 17, 192) 0 batch_normalization_133[0][0]
____________________________________________________________________________________________________
activation_134 (Activation) (None, 17, 17, 192) 0 batch_normalization_134[0][0]
____________________________________________________________________________________________________
mixed4 (Concatenate) (None, 17, 17, 768) 0 activation_125[0][0]
activation_128[0][0]
activation_133[0][0]
activation_134[0][0]
____________________________________________________________________________________________________
conv2d_139 (Conv2D) (None, 17, 17, 160) 122880 mixed4[0][0]
____________________________________________________________________________________________________
batch_normalization_139 (BatchNo (None, 17, 17, 160) 480 conv2d_139[0][0]
____________________________________________________________________________________________________
activation_139 (Activation) (None, 17, 17, 160) 0 batch_normalization_139[0][0]
____________________________________________________________________________________________________
conv2d_140 (Conv2D) (None, 17, 17, 160) 179200 activation_139[0][0]
____________________________________________________________________________________________________
batch_normalization_140 (BatchNo (None, 17, 17, 160) 480 conv2d_140[0][0]
____________________________________________________________________________________________________
activation_140 (Activation) (None, 17, 17, 160) 0 batch_normalization_140[0][0]
____________________________________________________________________________________________________
conv2d_136 (Conv2D) (None, 17, 17, 160) 122880 mixed4[0][0]
____________________________________________________________________________________________________
conv2d_141 (Conv2D) (None, 17, 17, 160) 179200 activation_140[0][0]
____________________________________________________________________________________________________
batch_normalization_136 (BatchNo (None, 17, 17, 160) 480 conv2d_136[0][0]
____________________________________________________________________________________________________
batch_normalization_141 (BatchNo (None, 17, 17, 160) 480 conv2d_141[0][0]
____________________________________________________________________________________________________
activation_136 (Activation) (None, 17, 17, 160) 0 batch_normalization_136[0][0]
____________________________________________________________________________________________________
activation_141 (Activation) (None, 17, 17, 160) 0 batch_normalization_141[0][0]
____________________________________________________________________________________________________
conv2d_137 (Conv2D) (None, 17, 17, 160) 179200 activation_136[0][0]
____________________________________________________________________________________________________
conv2d_142 (Conv2D) (None, 17, 17, 160) 179200 activation_141[0][0]
____________________________________________________________________________________________________
batch_normalization_137 (BatchNo (None, 17, 17, 160) 480 conv2d_137[0][0]
____________________________________________________________________________________________________
batch_normalization_142 (BatchNo (None, 17, 17, 160) 480 conv2d_142[0][0]
____________________________________________________________________________________________________
activation_137 (Activation) (None, 17, 17, 160) 0 batch_normalization_137[0][0]
____________________________________________________________________________________________________
activation_142 (Activation) (None, 17, 17, 160) 0 batch_normalization_142[0][0]
____________________________________________________________________________________________________
average_pooling2d_14 (AveragePoo (None, 17, 17, 768) 0 mixed4[0][0]
____________________________________________________________________________________________________
conv2d_135 (Conv2D) (None, 17, 17, 192) 147456 mixed4[0][0]
____________________________________________________________________________________________________
conv2d_138 (Conv2D) (None, 17, 17, 192) 215040 activation_137[0][0]
____________________________________________________________________________________________________
conv2d_143 (Conv2D) (None, 17, 17, 192) 215040 activation_142[0][0]
____________________________________________________________________________________________________
conv2d_144 (Conv2D) (None, 17, 17, 192) 147456 average_pooling2d_14[0][0]
____________________________________________________________________________________________________
batch_normalization_135 (BatchNo (None, 17, 17, 192) 576 conv2d_135[0][0]
____________________________________________________________________________________________________
batch_normalization_138 (BatchNo (None, 17, 17, 192) 576 conv2d_138[0][0]
____________________________________________________________________________________________________
batch_normalization_143 (BatchNo (None, 17, 17, 192) 576 conv2d_143[0][0]
____________________________________________________________________________________________________
batch_normalization_144 (BatchNo (None, 17, 17, 192) 576 conv2d_144[0][0]
____________________________________________________________________________________________________
activation_135 (Activation) (None, 17, 17, 192) 0 batch_normalization_135[0][0]
____________________________________________________________________________________________________
activation_138 (Activation) (None, 17, 17, 192) 0 batch_normalization_138[0][0]
____________________________________________________________________________________________________
activation_143 (Activation) (None, 17, 17, 192) 0 batch_normalization_143[0][0]
____________________________________________________________________________________________________
activation_144 (Activation) (None, 17, 17, 192) 0 batch_normalization_144[0][0]
____________________________________________________________________________________________________
mixed5 (Concatenate) (None, 17, 17, 768) 0 activation_135[0][0]
activation_138[0][0]
activation_143[0][0]
activation_144[0][0]
____________________________________________________________________________________________________
conv2d_149 (Conv2D) (None, 17, 17, 160) 122880 mixed5[0][0]
____________________________________________________________________________________________________
batch_normalization_149 (BatchNo (None, 17, 17, 160) 480 conv2d_149[0][0]
____________________________________________________________________________________________________
activation_149 (Activation) (None, 17, 17, 160) 0 batch_normalization_149[0][0]
____________________________________________________________________________________________________
conv2d_150 (Conv2D) (None, 17, 17, 160) 179200 activation_149[0][0]
____________________________________________________________________________________________________
batch_normalization_150 (BatchNo (None, 17, 17, 160) 480 conv2d_150[0][0]
____________________________________________________________________________________________________
activation_150 (Activation) (None, 17, 17, 160) 0 batch_normalization_150[0][0]
____________________________________________________________________________________________________
conv2d_146 (Conv2D) (None, 17, 17, 160) 122880 mixed5[0][0]
____________________________________________________________________________________________________
conv2d_151 (Conv2D) (None, 17, 17, 160) 179200 activation_150[0][0]
____________________________________________________________________________________________________
batch_normalization_146 (BatchNo (None, 17, 17, 160) 480 conv2d_146[0][0]
____________________________________________________________________________________________________
batch_normalization_151 (BatchNo (None, 17, 17, 160) 480 conv2d_151[0][0]
____________________________________________________________________________________________________
activation_146 (Activation) (None, 17, 17, 160) 0 batch_normalization_146[0][0]
____________________________________________________________________________________________________
activation_151 (Activation) (None, 17, 17, 160) 0 batch_normalization_151[0][0]
____________________________________________________________________________________________________
conv2d_147 (Conv2D) (None, 17, 17, 160) 179200 activation_146[0][0]
____________________________________________________________________________________________________
conv2d_152 (Conv2D) (None, 17, 17, 160) 179200 activation_151[0][0]
____________________________________________________________________________________________________
batch_normalization_147 (BatchNo (None, 17, 17, 160) 480 conv2d_147[0][0]
____________________________________________________________________________________________________
batch_normalization_152 (BatchNo (None, 17, 17, 160) 480 conv2d_152[0][0]
____________________________________________________________________________________________________
activation_147 (Activation) (None, 17, 17, 160) 0 batch_normalization_147[0][0]
____________________________________________________________________________________________________
activation_152 (Activation) (None, 17, 17, 160) 0 batch_normalization_152[0][0]
____________________________________________________________________________________________________
average_pooling2d_15 (AveragePoo (None, 17, 17, 768) 0 mixed5[0][0]
____________________________________________________________________________________________________
conv2d_145 (Conv2D) (None, 17, 17, 192) 147456 mixed5[0][0]
____________________________________________________________________________________________________
conv2d_148 (Conv2D) (None, 17, 17, 192) 215040 activation_147[0][0]
____________________________________________________________________________________________________
conv2d_153 (Conv2D) (None, 17, 17, 192) 215040 activation_152[0][0]
____________________________________________________________________________________________________
conv2d_154 (Conv2D) (None, 17, 17, 192) 147456 average_pooling2d_15[0][0]
____________________________________________________________________________________________________
batch_normalization_145 (BatchNo (None, 17, 17, 192) 576 conv2d_145[0][0]
____________________________________________________________________________________________________
batch_normalization_148 (BatchNo (None, 17, 17, 192) 576 conv2d_148[0][0]
____________________________________________________________________________________________________
batch_normalization_153 (BatchNo (None, 17, 17, 192) 576 conv2d_153[0][0]
____________________________________________________________________________________________________
batch_normalization_154 (BatchNo (None, 17, 17, 192) 576 conv2d_154[0][0]
____________________________________________________________________________________________________
activation_145 (Activation) (None, 17, 17, 192) 0 batch_normalization_145[0][0]
____________________________________________________________________________________________________
activation_148 (Activation) (None, 17, 17, 192) 0 batch_normalization_148[0][0]
____________________________________________________________________________________________________
activation_153 (Activation) (None, 17, 17, 192) 0 batch_normalization_153[0][0]
____________________________________________________________________________________________________
activation_154 (Activation) (None, 17, 17, 192) 0 batch_normalization_154[0][0]
____________________________________________________________________________________________________
mixed6 (Concatenate) (None, 17, 17, 768) 0 activation_145[0][0]
activation_148[0][0]
activation_153[0][0]
activation_154[0][0]
____________________________________________________________________________________________________
conv2d_159 (Conv2D) (None, 17, 17, 192) 147456 mixed6[0][0]
____________________________________________________________________________________________________
batch_normalization_159 (BatchNo (None, 17, 17, 192) 576 conv2d_159[0][0]
____________________________________________________________________________________________________
activation_159 (Activation) (None, 17, 17, 192) 0 batch_normalization_159[0][0]
____________________________________________________________________________________________________
conv2d_160 (Conv2D) (None, 17, 17, 192) 258048 activation_159[0][0]
____________________________________________________________________________________________________
batch_normalization_160 (BatchNo (None, 17, 17, 192) 576 conv2d_160[0][0]
____________________________________________________________________________________________________
activation_160 (Activation) (None, 17, 17, 192) 0 batch_normalization_160[0][0]
____________________________________________________________________________________________________
conv2d_156 (Conv2D) (None, 17, 17, 192) 147456 mixed6[0][0]
____________________________________________________________________________________________________
conv2d_161 (Conv2D) (None, 17, 17, 192) 258048 activation_160[0][0]
____________________________________________________________________________________________________
batch_normalization_156 (BatchNo (None, 17, 17, 192) 576 conv2d_156[0][0]
____________________________________________________________________________________________________
batch_normalization_161 (BatchNo (None, 17, 17, 192) 576 conv2d_161[0][0]
____________________________________________________________________________________________________
activation_156 (Activation) (None, 17, 17, 192) 0 batch_normalization_156[0][0]
____________________________________________________________________________________________________
activation_161 (Activation) (None, 17, 17, 192) 0 batch_normalization_161[0][0]
____________________________________________________________________________________________________
conv2d_157 (Conv2D) (None, 17, 17, 192) 258048 activation_156[0][0]
____________________________________________________________________________________________________
conv2d_162 (Conv2D) (None, 17, 17, 192) 258048 activation_161[0][0]
____________________________________________________________________________________________________
batch_normalization_157 (BatchNo (None, 17, 17, 192) 576 conv2d_157[0][0]
____________________________________________________________________________________________________
batch_normalization_162 (BatchNo (None, 17, 17, 192) 576 conv2d_162[0][0]
____________________________________________________________________________________________________
activation_157 (Activation) (None, 17, 17, 192) 0 batch_normalization_157[0][0]
____________________________________________________________________________________________________
activation_162 (Activation) (None, 17, 17, 192) 0 batch_normalization_162[0][0]
____________________________________________________________________________________________________
average_pooling2d_16 (AveragePoo (None, 17, 17, 768) 0 mixed6[0][0]
____________________________________________________________________________________________________
conv2d_155 (Conv2D) (None, 17, 17, 192) 147456 mixed6[0][0]
____________________________________________________________________________________________________
conv2d_158 (Conv2D) (None, 17, 17, 192) 258048 activation_157[0][0]
____________________________________________________________________________________________________
conv2d_163 (Conv2D) (None, 17, 17, 192) 258048 activation_162[0][0]
____________________________________________________________________________________________________
conv2d_164 (Conv2D) (None, 17, 17, 192) 147456 average_pooling2d_16[0][0]
____________________________________________________________________________________________________
batch_normalization_155 (BatchNo (None, 17, 17, 192) 576 conv2d_155[0][0]
____________________________________________________________________________________________________
batch_normalization_158 (BatchNo (None, 17, 17, 192) 576 conv2d_158[0][0]
____________________________________________________________________________________________________
batch_normalization_163 (BatchNo (None, 17, 17, 192) 576 conv2d_163[0][0]
____________________________________________________________________________________________________
batch_normalization_164 (BatchNo (None, 17, 17, 192) 576 conv2d_164[0][0]
____________________________________________________________________________________________________
activation_155 (Activation) (None, 17, 17, 192) 0 batch_normalization_155[0][0]
____________________________________________________________________________________________________
activation_158 (Activation) (None, 17, 17, 192) 0 batch_normalization_158[0][0]
____________________________________________________________________________________________________
activation_163 (Activation) (None, 17, 17, 192) 0 batch_normalization_163[0][0]
____________________________________________________________________________________________________
activation_164 (Activation) (None, 17, 17, 192) 0 batch_normalization_164[0][0]
____________________________________________________________________________________________________
mixed7 (Concatenate) (None, 17, 17, 768) 0 activation_155[0][0]
activation_158[0][0]
activation_163[0][0]
activation_164[0][0]
____________________________________________________________________________________________________
conv2d_167 (Conv2D) (None, 17, 17, 192) 147456 mixed7[0][0]
____________________________________________________________________________________________________
batch_normalization_167 (BatchNo (None, 17, 17, 192) 576 conv2d_167[0][0]
____________________________________________________________________________________________________
activation_167 (Activation) (None, 17, 17, 192) 0 batch_normalization_167[0][0]
____________________________________________________________________________________________________
conv2d_168 (Conv2D) (None, 17, 17, 192) 258048 activation_167[0][0]
____________________________________________________________________________________________________
batch_normalization_168 (BatchNo (None, 17, 17, 192) 576 conv2d_168[0][0]
____________________________________________________________________________________________________
activation_168 (Activation) (None, 17, 17, 192) 0 batch_normalization_168[0][0]
____________________________________________________________________________________________________
conv2d_165 (Conv2D) (None, 17, 17, 192) 147456 mixed7[0][0]
____________________________________________________________________________________________________
conv2d_169 (Conv2D) (None, 17, 17, 192) 258048 activation_168[0][0]
____________________________________________________________________________________________________
batch_normalization_165 (BatchNo (None, 17, 17, 192) 576 conv2d_165[0][0]
____________________________________________________________________________________________________
batch_normalization_169 (BatchNo (None, 17, 17, 192) 576 conv2d_169[0][0]
____________________________________________________________________________________________________
activation_165 (Activation) (None, 17, 17, 192) 0 batch_normalization_165[0][0]
____________________________________________________________________________________________________
activation_169 (Activation) (None, 17, 17, 192) 0 batch_normalization_169[0][0]
____________________________________________________________________________________________________
conv2d_166 (Conv2D) (None, 8, 8, 320) 552960 activation_165[0][0]
____________________________________________________________________________________________________
conv2d_170 (Conv2D) (None, 8, 8, 192) 331776 activation_169[0][0]
____________________________________________________________________________________________________
batch_normalization_166 (BatchNo (None, 8, 8, 320) 960 conv2d_166[0][0]
____________________________________________________________________________________________________
batch_normalization_170 (BatchNo (None, 8, 8, 192) 576 conv2d_170[0][0]
____________________________________________________________________________________________________
activation_166 (Activation) (None, 8, 8, 320) 0 batch_normalization_166[0][0]
____________________________________________________________________________________________________
activation_170 (Activation) (None, 8, 8, 192) 0 batch_normalization_170[0][0]
____________________________________________________________________________________________________
max_pooling2d_8 (MaxPooling2D) (None, 8, 8, 768) 0 mixed7[0][0]
____________________________________________________________________________________________________
mixed8 (Concatenate) (None, 8, 8, 1280) 0 activation_166[0][0]
activation_170[0][0]
max_pooling2d_8[0][0]
____________________________________________________________________________________________________
conv2d_175 (Conv2D) (None, 8, 8, 448) 573440 mixed8[0][0]
____________________________________________________________________________________________________
batch_normalization_175 (BatchNo (None, 8, 8, 448) 1344 conv2d_175[0][0]
____________________________________________________________________________________________________
activation_175 (Activation) (None, 8, 8, 448) 0 batch_normalization_175[0][0]
____________________________________________________________________________________________________
conv2d_172 (Conv2D) (None, 8, 8, 384) 491520 mixed8[0][0]
____________________________________________________________________________________________________
conv2d_176 (Conv2D) (None, 8, 8, 384) 1548288 activation_175[0][0]
____________________________________________________________________________________________________
batch_normalization_172 (BatchNo (None, 8, 8, 384) 1152 conv2d_172[0][0]
____________________________________________________________________________________________________
batch_normalization_176 (BatchNo (None, 8, 8, 384) 1152 conv2d_176[0][0]
____________________________________________________________________________________________________
activation_172 (Activation) (None, 8, 8, 384) 0 batch_normalization_172[0][0]
____________________________________________________________________________________________________
activation_176 (Activation) (None, 8, 8, 384) 0 batch_normalization_176[0][0]
____________________________________________________________________________________________________
conv2d_173 (Conv2D) (None, 8, 8, 384) 442368 activation_172[0][0]
____________________________________________________________________________________________________
conv2d_174 (Conv2D) (None, 8, 8, 384) 442368 activation_172[0][0]
____________________________________________________________________________________________________
conv2d_177 (Conv2D) (None, 8, 8, 384) 442368 activation_176[0][0]
____________________________________________________________________________________________________
conv2d_178 (Conv2D) (None, 8, 8, 384) 442368 activation_176[0][0]
____________________________________________________________________________________________________
average_pooling2d_17 (AveragePoo (None, 8, 8, 1280) 0 mixed8[0][0]
____________________________________________________________________________________________________
conv2d_171 (Conv2D) (None, 8, 8, 320) 409600 mixed8[0][0]
____________________________________________________________________________________________________
batch_normalization_173 (BatchNo (None, 8, 8, 384) 1152 conv2d_173[0][0]
____________________________________________________________________________________________________
batch_normalization_174 (BatchNo (None, 8, 8, 384) 1152 conv2d_174[0][0]
____________________________________________________________________________________________________
batch_normalization_177 (BatchNo (None, 8, 8, 384) 1152 conv2d_177[0][0]
____________________________________________________________________________________________________
batch_normalization_178 (BatchNo (None, 8, 8, 384) 1152 conv2d_178[0][0]
____________________________________________________________________________________________________
conv2d_179 (Conv2D) (None, 8, 8, 192) 245760 average_pooling2d_17[0][0]
____________________________________________________________________________________________________
batch_normalization_171 (BatchNo (None, 8, 8, 320) 960 conv2d_171[0][0]
____________________________________________________________________________________________________
activation_173 (Activation) (None, 8, 8, 384) 0 batch_normalization_173[0][0]
____________________________________________________________________________________________________
activation_174 (Activation) (None, 8, 8, 384) 0 batch_normalization_174[0][0]
____________________________________________________________________________________________________
activation_177 (Activation) (None, 8, 8, 384) 0 batch_normalization_177[0][0]
____________________________________________________________________________________________________
activation_178 (Activation) (None, 8, 8, 384) 0 batch_normalization_178[0][0]
____________________________________________________________________________________________________
batch_normalization_179 (BatchNo (None, 8, 8, 192) 576 conv2d_179[0][0]
____________________________________________________________________________________________________
activation_171 (Activation) (None, 8, 8, 320) 0 batch_normalization_171[0][0]
____________________________________________________________________________________________________
mixed9_0 (Concatenate) (None, 8, 8, 768) 0 activation_173[0][0]
activation_174[0][0]
____________________________________________________________________________________________________
concatenate_3 (Concatenate) (None, 8, 8, 768) 0 activation_177[0][0]
activation_178[0][0]
____________________________________________________________________________________________________
activation_179 (Activation) (None, 8, 8, 192) 0 batch_normalization_179[0][0]
____________________________________________________________________________________________________
mixed9 (Concatenate) (None, 8, 8, 2048) 0 activation_171[0][0]
mixed9_0[0][0]
concatenate_3[0][0]
activation_179[0][0]
____________________________________________________________________________________________________
conv2d_184 (Conv2D) (None, 8, 8, 448) 917504 mixed9[0][0]
____________________________________________________________________________________________________
batch_normalization_184 (BatchNo (None, 8, 8, 448) 1344 conv2d_184[0][0]
____________________________________________________________________________________________________
activation_184 (Activation) (None, 8, 8, 448) 0 batch_normalization_184[0][0]
____________________________________________________________________________________________________
conv2d_181 (Conv2D) (None, 8, 8, 384) 786432 mixed9[0][0]
____________________________________________________________________________________________________
conv2d_185 (Conv2D) (None, 8, 8, 384) 1548288 activation_184[0][0]
____________________________________________________________________________________________________
batch_normalization_181 (BatchNo (None, 8, 8, 384) 1152 conv2d_181[0][0]
____________________________________________________________________________________________________
batch_normalization_185 (BatchNo (None, 8, 8, 384) 1152 conv2d_185[0][0]
____________________________________________________________________________________________________
activation_181 (Activation) (None, 8, 8, 384) 0 batch_normalization_181[0][0]
____________________________________________________________________________________________________
activation_185 (Activation) (None, 8, 8, 384) 0 batch_normalization_185[0][0]
____________________________________________________________________________________________________
conv2d_182 (Conv2D) (None, 8, 8, 384) 442368 activation_181[0][0]
____________________________________________________________________________________________________
conv2d_183 (Conv2D) (None, 8, 8, 384) 442368 activation_181[0][0]
____________________________________________________________________________________________________
conv2d_186 (Conv2D) (None, 8, 8, 384) 442368 activation_185[0][0]
____________________________________________________________________________________________________
conv2d_187 (Conv2D) (None, 8, 8, 384) 442368 activation_185[0][0]
____________________________________________________________________________________________________
average_pooling2d_18 (AveragePoo (None, 8, 8, 2048) 0 mixed9[0][0]
____________________________________________________________________________________________________
conv2d_180 (Conv2D) (None, 8, 8, 320) 655360 mixed9[0][0]
____________________________________________________________________________________________________
batch_normalization_182 (BatchNo (None, 8, 8, 384) 1152 conv2d_182[0][0]
____________________________________________________________________________________________________
batch_normalization_183 (BatchNo (None, 8, 8, 384) 1152 conv2d_183[0][0]
____________________________________________________________________________________________________
batch_normalization_186 (BatchNo (None, 8, 8, 384) 1152 conv2d_186[0][0]
____________________________________________________________________________________________________
batch_normalization_187 (BatchNo (None, 8, 8, 384) 1152 conv2d_187[0][0]
____________________________________________________________________________________________________
conv2d_188 (Conv2D) (None, 8, 8, 192) 393216 average_pooling2d_18[0][0]
____________________________________________________________________________________________________
batch_normalization_180 (BatchNo (None, 8, 8, 320) 960 conv2d_180[0][0]
____________________________________________________________________________________________________
activation_182 (Activation) (None, 8, 8, 384) 0 batch_normalization_182[0][0]
____________________________________________________________________________________________________
activation_183 (Activation) (None, 8, 8, 384) 0 batch_normalization_183[0][0]
____________________________________________________________________________________________________
activation_186 (Activation) (None, 8, 8, 384) 0 batch_normalization_186[0][0]
____________________________________________________________________________________________________
activation_187 (Activation) (None, 8, 8, 384) 0 batch_normalization_187[0][0]
____________________________________________________________________________________________________
batch_normalization_188 (BatchNo (None, 8, 8, 192) 576 conv2d_188[0][0]
____________________________________________________________________________________________________
activation_180 (Activation) (None, 8, 8, 320) 0 batch_normalization_180[0][0]
____________________________________________________________________________________________________
mixed9_1 (Concatenate) (None, 8, 8, 768) 0 activation_182[0][0]
activation_183[0][0]
____________________________________________________________________________________________________
concatenate_4 (Concatenate) (None, 8, 8, 768) 0 activation_186[0][0]
activation_187[0][0]
____________________________________________________________________________________________________
activation_188 (Activation) (None, 8, 8, 192) 0 batch_normalization_188[0][0]
____________________________________________________________________________________________________
mixed10 (Concatenate) (None, 8, 8, 2048) 0 activation_180[0][0]
mixed9_1[0][0]
concatenate_4[0][0]
activation_188[0][0]
____________________________________________________________________________________________________
global_average_pooling2d_1 (Glob (None, 2048) 0 mixed10[0][0]
____________________________________________________________________________________________________
dense_1 (Dense) (None, 2) 4098 global_average_pooling2d_1[0][0]
====================================================================================================
Total params: 21,806,882
Trainable params: 4,098
Non-trainable params: 21,802,784
____________________________________________________________________________________________________
In [10]:
# We preload in memory the original images preprocessed.
datagen = preprocessing.image.ImageDataGenerator()
X_all = []
y_all = []
for i, c in enumerate(categories):
cat_dir = join(data_path, 'trn', c)
sample_size_category = len([name for name in os.listdir(cat_dir)])
print(cat_dir, sample_size_category)
generator = datagen.flow_from_directory(join(data_path, 'trn'), classes=[c],
target_size=(299, 299), batch_size=sample_size_category)
X, y = next(generator)
X = applications.inception_v3.preprocess_input(X)
X_all += list(X)
y_all += [i]*len(y)
X_all = np.array(X_all)
y_all = np.array(y_all)
print(X_all.shape, y_all.shape)
/home/ubuntu/data/training/image/cats_dogs/sample/trn/dog 981
Found 981 images belonging to 1 classes.
/home/ubuntu/data/training/image/cats_dogs/sample/trn/cat 981
Found 981 images belonging to 1 classes.
(1962, 299, 299, 3) (1962,)
In [11]:
# Separate train and valid datasets
from sklearn.model_selection import train_test_split
X_train, X_valid, y_train, y_valid = train_test_split(X_all, y_all, train_size=0.7)
print(X_train.shape, X_valid.shape)
print(y_train.shape, y_valid.shape)
/home/jorge/anaconda3/envs/tf14/lib/python3.5/site-packages/sklearn/model_selection/_split.py:2010: FutureWarning: From version 0.21, test_size will always complement train_size unless both are specified.
FutureWarning)
(1373, 299, 299, 3) (589, 299, 299, 3)
(1373,) (589,)
In [12]:
datagen = preprocessing.image.ImageDataGenerator(
shear_range=0.2,
zoom_range=0.2,
width_shift_range=0.1, # randomly shift images horizontally (fraction of total width)
height_shift_range=0.1, # randomly shift images vertically (fraction of total height)
horizontal_flip=True)
In [13]:
batch_size = 32
train_size = X_train.shape[0]
# Select optimizer and compile model
opt = optimizers.RMSprop(lr=1E-3)
model.compile(loss='sparse_categorical_crossentropy', optimizer=opt, metrics=['accuracy'])
# Train
tb_callback_ln = callbacks.TensorBoard(log_dir='/tmp/tensorboard/cats_dogs/inception')
history = model.fit_generator(datagen.flow(X_train, y_train),
steps_per_epoch = train_size // batch_size,
epochs = 10,
validation_data = (X_valid, y_valid),
callbacks=[tb_callback_ln])
Epoch 1/10
42/42 [==============================] - 22s - loss: 0.4503 - acc: 0.8147 - val_loss: 0.1748 - val_acc: 0.9660
Epoch 2/10
42/42 [==============================] - 19s - loss: 0.2020 - acc: 0.9298 - val_loss: 0.1003 - val_acc: 0.9728
Epoch 3/10
42/42 [==============================] - 20s - loss: 0.1675 - acc: 0.9394 - val_loss: 0.0898 - val_acc: 0.9711
Epoch 4/10
42/42 [==============================] - 20s - loss: 0.1384 - acc: 0.9462 - val_loss: 0.0776 - val_acc: 0.9779
Epoch 5/10
42/42 [==============================] - 20s - loss: 0.1340 - acc: 0.9597 - val_loss: 0.0770 - val_acc: 0.9813
Epoch 6/10
42/42 [==============================] - 20s - loss: 0.1189 - acc: 0.9516 - val_loss: 0.0680 - val_acc: 0.9813
Epoch 7/10
42/42 [==============================] - 20s - loss: 0.0894 - acc: 0.9687 - val_loss: 0.0726 - val_acc: 0.9813
Epoch 8/10
42/42 [==============================] - 20s - loss: 0.0775 - acc: 0.9769 - val_loss: 0.0656 - val_acc: 0.9830
Epoch 9/10
42/42 [==============================] - 19s - loss: 0.0989 - acc: 0.9650 - val_loss: 0.0626 - val_acc: 0.9830
Epoch 10/10
42/42 [==============================] - 20s - loss: 0.1075 - acc: 0.9658 - val_loss: 0.0684 - val_acc: 0.9779
In [ ]:
In [15]:
# Print the transformations of the first inception cell: mixed0
model2 = models.Model(inputs=base_model.input, outputs=base_model.get_layer('mixed0').output)
# Read image img
my_filelist = [f for f in listdir(join(data_path, 'trn', categories[0])) if isfile(join(data_path, 'trn', categories[0], f))]
img_path = join(data_path, 'trn', categories[0], my_filelist[0])
img = load_image(img_path)
x = applications.inception_v3.preprocess_input(img)
mixed0_features = model2.predict(x)
print('Fists inception cell output shape: ', mixed0_features.shape)
Fists inception cell output shape: (1, 35, 35, 256)
In [16]:
plt.rcParams['figure.figsize'] = (15, 15) # size of images
plt.rcParams['image.cmap'] = 'gray' # use grayscale
fig = plt.figure()
n=0
for i in range(100): #Print the first 100 outputs of the 256.
n += 1
a = fig.add_subplot(10,10,n)
plt.imshow(mixed0_features[0,:,:,i])
In [17]:
#Extract the first convolutional weights
conv2d_1_w = model2.get_layer(base_model.layers[1].name).get_weights()[0]
conv2d_1_w.shape
Out[17]:
(3, 3, 3, 32)
In [ ]:
In [18]:
# let's visualize layer names and layer indices to see how many layers
# we should freeze:
for i, layer in enumerate(base_model.layers):
print(i, layer.name)
0 input_2
1 conv2d_95
2 batch_normalization_95
3 activation_95
4 conv2d_96
5 batch_normalization_96
6 activation_96
7 conv2d_97
8 batch_normalization_97
9 activation_97
10 max_pooling2d_5
11 conv2d_98
12 batch_normalization_98
13 activation_98
14 conv2d_99
15 batch_normalization_99
16 activation_99
17 max_pooling2d_6
18 conv2d_103
19 batch_normalization_103
20 activation_103
21 conv2d_101
22 conv2d_104
23 batch_normalization_101
24 batch_normalization_104
25 activation_101
26 activation_104
27 average_pooling2d_10
28 conv2d_100
29 conv2d_102
30 conv2d_105
31 conv2d_106
32 batch_normalization_100
33 batch_normalization_102
34 batch_normalization_105
35 batch_normalization_106
36 activation_100
37 activation_102
38 activation_105
39 activation_106
40 mixed0
41 conv2d_110
42 batch_normalization_110
43 activation_110
44 conv2d_108
45 conv2d_111
46 batch_normalization_108
47 batch_normalization_111
48 activation_108
49 activation_111
50 average_pooling2d_11
51 conv2d_107
52 conv2d_109
53 conv2d_112
54 conv2d_113
55 batch_normalization_107
56 batch_normalization_109
57 batch_normalization_112
58 batch_normalization_113
59 activation_107
60 activation_109
61 activation_112
62 activation_113
63 mixed1
64 conv2d_117
65 batch_normalization_117
66 activation_117
67 conv2d_115
68 conv2d_118
69 batch_normalization_115
70 batch_normalization_118
71 activation_115
72 activation_118
73 average_pooling2d_12
74 conv2d_114
75 conv2d_116
76 conv2d_119
77 conv2d_120
78 batch_normalization_114
79 batch_normalization_116
80 batch_normalization_119
81 batch_normalization_120
82 activation_114
83 activation_116
84 activation_119
85 activation_120
86 mixed2
87 conv2d_122
88 batch_normalization_122
89 activation_122
90 conv2d_123
91 batch_normalization_123
92 activation_123
93 conv2d_121
94 conv2d_124
95 batch_normalization_121
96 batch_normalization_124
97 activation_121
98 activation_124
99 max_pooling2d_7
100 mixed3
101 conv2d_129
102 batch_normalization_129
103 activation_129
104 conv2d_130
105 batch_normalization_130
106 activation_130
107 conv2d_126
108 conv2d_131
109 batch_normalization_126
110 batch_normalization_131
111 activation_126
112 activation_131
113 conv2d_127
114 conv2d_132
115 batch_normalization_127
116 batch_normalization_132
117 activation_127
118 activation_132
119 average_pooling2d_13
120 conv2d_125
121 conv2d_128
122 conv2d_133
123 conv2d_134
124 batch_normalization_125
125 batch_normalization_128
126 batch_normalization_133
127 batch_normalization_134
128 activation_125
129 activation_128
130 activation_133
131 activation_134
132 mixed4
133 conv2d_139
134 batch_normalization_139
135 activation_139
136 conv2d_140
137 batch_normalization_140
138 activation_140
139 conv2d_136
140 conv2d_141
141 batch_normalization_136
142 batch_normalization_141
143 activation_136
144 activation_141
145 conv2d_137
146 conv2d_142
147 batch_normalization_137
148 batch_normalization_142
149 activation_137
150 activation_142
151 average_pooling2d_14
152 conv2d_135
153 conv2d_138
154 conv2d_143
155 conv2d_144
156 batch_normalization_135
157 batch_normalization_138
158 batch_normalization_143
159 batch_normalization_144
160 activation_135
161 activation_138
162 activation_143
163 activation_144
164 mixed5
165 conv2d_149
166 batch_normalization_149
167 activation_149
168 conv2d_150
169 batch_normalization_150
170 activation_150
171 conv2d_146
172 conv2d_151
173 batch_normalization_146
174 batch_normalization_151
175 activation_146
176 activation_151
177 conv2d_147
178 conv2d_152
179 batch_normalization_147
180 batch_normalization_152
181 activation_147
182 activation_152
183 average_pooling2d_15
184 conv2d_145
185 conv2d_148
186 conv2d_153
187 conv2d_154
188 batch_normalization_145
189 batch_normalization_148
190 batch_normalization_153
191 batch_normalization_154
192 activation_145
193 activation_148
194 activation_153
195 activation_154
196 mixed6
197 conv2d_159
198 batch_normalization_159
199 activation_159
200 conv2d_160
201 batch_normalization_160
202 activation_160
203 conv2d_156
204 conv2d_161
205 batch_normalization_156
206 batch_normalization_161
207 activation_156
208 activation_161
209 conv2d_157
210 conv2d_162
211 batch_normalization_157
212 batch_normalization_162
213 activation_157
214 activation_162
215 average_pooling2d_16
216 conv2d_155
217 conv2d_158
218 conv2d_163
219 conv2d_164
220 batch_normalization_155
221 batch_normalization_158
222 batch_normalization_163
223 batch_normalization_164
224 activation_155
225 activation_158
226 activation_163
227 activation_164
228 mixed7
229 conv2d_167
230 batch_normalization_167
231 activation_167
232 conv2d_168
233 batch_normalization_168
234 activation_168
235 conv2d_165
236 conv2d_169
237 batch_normalization_165
238 batch_normalization_169
239 activation_165
240 activation_169
241 conv2d_166
242 conv2d_170
243 batch_normalization_166
244 batch_normalization_170
245 activation_166
246 activation_170
247 max_pooling2d_8
248 mixed8
249 conv2d_175
250 batch_normalization_175
251 activation_175
252 conv2d_172
253 conv2d_176
254 batch_normalization_172
255 batch_normalization_176
256 activation_172
257 activation_176
258 conv2d_173
259 conv2d_174
260 conv2d_177
261 conv2d_178
262 average_pooling2d_17
263 conv2d_171
264 batch_normalization_173
265 batch_normalization_174
266 batch_normalization_177
267 batch_normalization_178
268 conv2d_179
269 batch_normalization_171
270 activation_173
271 activation_174
272 activation_177
273 activation_178
274 batch_normalization_179
275 activation_171
276 mixed9_0
277 concatenate_3
278 activation_179
279 mixed9
280 conv2d_184
281 batch_normalization_184
282 activation_184
283 conv2d_181
284 conv2d_185
285 batch_normalization_181
286 batch_normalization_185
287 activation_181
288 activation_185
289 conv2d_182
290 conv2d_183
291 conv2d_186
292 conv2d_187
293 average_pooling2d_18
294 conv2d_180
295 batch_normalization_182
296 batch_normalization_183
297 batch_normalization_186
298 batch_normalization_187
299 conv2d_188
300 batch_normalization_180
301 activation_182
302 activation_183
303 activation_186
304 activation_187
305 batch_normalization_188
306 activation_180
307 mixed9_1
308 concatenate_4
309 activation_188
310 mixed10
In [19]:
# we chose to train the top 2 inception blocks, i.e. we will freeze from the mixed8 layer
# Then, freeze the first 249 layers and unfreeze the rest:
for layer in model.layers[:249]:
layer.trainable = False
for layer in model.layers[249:]:
layer.trainable = True
In [20]:
# we need to recompile the model for these modifications to take effect
# we use SGD with a low learning rate
opt = optimizers.SGD(lr=0.0001, momentum=0.9)
model.compile(optimizer=opt, loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# we train our model again (this time fine-tuning the top 2 inception blocks
# alongside the top Dense layers
history2 = model.fit_generator(datagen.flow(X_train, y_train),
steps_per_epoch = train_size // batch_size,
epochs = 10,
validation_data = (X_valid, y_valid),
callbacks=[tb_callback_ln])
Epoch 1/10
42/42 [==============================] - 22s - loss: 0.0817 - acc: 0.9680 - val_loss: 0.0632 - val_acc: 0.9779
Epoch 2/10
42/42 [==============================] - 19s - loss: 0.0603 - acc: 0.9784 - val_loss: 0.0606 - val_acc: 0.9779
Epoch 3/10
42/42 [==============================] - 20s - loss: 0.0625 - acc: 0.9769 - val_loss: 0.0581 - val_acc: 0.9796
Epoch 4/10
42/42 [==============================] - 20s - loss: 0.0771 - acc: 0.9709 - val_loss: 0.0576 - val_acc: 0.9813
Epoch 5/10
42/42 [==============================] - 20s - loss: 0.0608 - acc: 0.9754 - val_loss: 0.0569 - val_acc: 0.9813
Epoch 6/10
42/42 [==============================] - 20s - loss: 0.0699 - acc: 0.9731 - val_loss: 0.0551 - val_acc: 0.9813
Epoch 7/10
42/42 [==============================] - 20s - loss: 0.0651 - acc: 0.9784 - val_loss: 0.0558 - val_acc: 0.9830
Epoch 8/10
42/42 [==============================] - 20s - loss: 0.0661 - acc: 0.9777 - val_loss: 0.0543 - val_acc: 0.9813
Epoch 9/10
42/42 [==============================] - 20s - loss: 0.0523 - acc: 0.9791 - val_loss: 0.0532 - val_acc: 0.9813
Epoch 10/10
42/42 [==============================] - 20s - loss: 0.0650 - acc: 0.9783 - val_loss: 0.0530 - val_acc: 0.9813
In [ ]:
In [60]:
#Read test data into memory
X_tst_raw = []
X_tst = []
y_tst = []
for i, c in enumerate(categories):
cat_dir = join(data_path, 'tst', c)
for f in os.listdir(cat_dir):
img = preprocessing.image.load_img(join(cat_dir,f), target_size=(299, 299))
x = preprocessing.image.img_to_array(img)
x = np.expand_dims(x, axis=0)
X_tst += list(applications.inception_v3.preprocess_input(x))
X_tst_raw += [img]
y_tst += [i]
X_tst = np.array(X_tst)
y_tst = np.array(y_tst)
print(X_tst.shape, y_tst.shape)
(4022, 299, 299, 3) (4022,)
In [61]:
# Score
probability_test = model.predict(X_tst)
probability_test.shape
Out[61]:
(4022, 2)
In [65]:
# Predict
clasify_test = np.argmax(probability_test, axis=1)
prob_clasify = [probability_test[i,clasify_test[i]] for i in range(len(clasify_test))]
clasify_test.shape
Out[65]:
(4022,)
In [67]:
# Print errors
errors_index=[]
for i in range(len(y_tst)):
if y_tst[i] != clasify_test[i]:
errors_index += [i]
print('Pct errors test: ', len(errors_index)/len(y_tst))
fig = plt.figure()
n=0
for i in errors_index[:25]:
n += 1
a = fig.add_subplot(5, 5, n)
a.set_title('P:' + str(round(prob_clasify[i],3)))
plt.imshow(X_tst_raw[i])
Pct errors test: 0.015415216310293387
In [ ]:
In [ ]:
Content source: sueiras/training
Similar notebooks: