Transfer learning example

- Faces96 dataset download from http://cswww.essex.ac.uk/mv/allfaces/faces96.html
- Pretrained keras models

In [1]:
from __future__ import print_function

#Basic libraries
import numpy as np
import tensorflow as tf
import time
from os import listdir, walk
from os.path import isfile, join
import random

#Show images
import matplotlib.pyplot as plt
%matplotlib inline
plt.rcParams['figure.figsize'] = (15, 15)        # size of images
plt.rcParams['image.interpolation'] = 'nearest'  # show exact image


# Select GPU
import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="0"


print('Tensorflow version: ', tf.__version__)

data_path = '/home/ubuntu/data/training/image/faces/'


Tensorflow version:  1.4.0

In [2]:
# Data features
categories=[]
labels={}
for dirname, dirnames, filenames in walk(join(data_path,'train')):
    # print path to all subdirectories first.
    for i,subdirname in enumerate(dirnames):
        categories += [subdirname]
        labels[i] = subdirname
print('Categories: ', categories)

n_classes = len(categories)
print('Num classes: ', n_classes)


Categories:  ['carund', 'sdaly', 'agaffa', 'pmathi', 'rchadh', 'spyount', 'wylsow', 'filip', 'kdaska', 'amtalb', 'bcbesta', 'jwdpag', 'dmcdow', 'sherbe', 'dlwise', '9540733', '9540644', 'shamilc', 'nhahme', 'jepott', 'dhaydo', 'idsizm', 'memiah', '9540652', '9540563', '9540709', 'mpdavie', 'pvaris', 'cjhewi', 'gfilip', 'aolcer', 'kelbied', 'mroper', '9540792', 'gdsamp', 'pjburr', 'gmidgl', 'llevet', 'adpoun', 'mkotza', 'csjscu', 'amflem', 'djevan', 'lcelli', 'jphoor', 'jjkeho', 'wardcy', 'dfarre', 'pchyta', 'cowie', 'moshep', '9540725', 'pguan', 'gghazv', 'gmagul', 'swren', 'rposbo', 'mystery', '9540636', 'ireloz', '9540601', '9556273', 'nmakri', '9540814', '9540822', 'gcprie', 'akopci', 'hmgula', 'hwyman', 'icolli', 'pjmcar', 'dnoguy', 'pgray', 'smfarrf', 'jonesd', 'atfpou', 'rbrown', 'mamago', 'jbgood', 'pmbtin', 'pgfost', 'tjpret', '9540512', 'kphans', 'gjbods', 'darodr', 'tthurs', 'cjbowe', 'smredh', '9540768', '9540504', '9540784', 'smalga', 'gllong', 'fmquaz', 'mwilco', 'mclarkd', 'djotto', 'jlrums', '9540474', 'wjsaic', 'pmspry', 'rhosan', 'rshaw', 'nhotoo', 'mdcowl', 'rscoop', 'jjdacl', '9602283', 'jjohns', '9630643', 'mwillc', 'gpreec', 'rjwils', 'jedphi', '9540849', 'djpben', 'jcgrij', 'todris', 'ldebna', 'snhopp', 'ihopki', '9540717', '9540741', 'bmphil', 'mhoore', '9540687', 'cpatki', 'pgolds', 'elalio', 'alebes', 'oegill', 'cprice', 'rjobyr', 'djtye', 'arwebb', 'nandri', 'rwsick', 'tchand', 'azavro', '9540547', 'shpill', '9540695', 'ischou', 'reaton', 'acatsa', 'mkvwon', 'dkourkf', 'jgelln', 'pwest', '9540628']
Num classes:  151

In [3]:
# Examine the images
show_categories = categories
if n_classes>10:
    show_categories = categories[:10]
    
fig = plt.figure()
n=0
for i, category in enumerate(show_categories):
    filelist = [f for f in listdir(join(data_path, 'train', category)) if isfile(join(data_path, 'train', category, f))]
    random.shuffle(filelist) # Randomize images
    for f in filelist[:5]: 
        n += 1
        a = fig.add_subplot(len(show_categories),5,n)
        a.set_title(category)
        img = plt.imread(join(data_path, 'train', category, f))
        plt.imshow(img)


Example: evaluate one image using InceptionV3 imagenet trained model.


In [6]:
#Load model
model = tf.keras.applications.inception_v3.InceptionV3(weights='imagenet')

# Select the path for one image of the selected category
path_category = join(data_path, 'train', categories[0])
img_path = join(path_category, os.listdir(path_category)[0])


# Load image and adapt to 
def load_image(img_path, draw=True, transform=True):
    img = tf.keras.preprocessing.image.load_img(img_path, target_size=(299, 299))
    x = tf.keras.preprocessing.image.img_to_array(img)
    x = np.expand_dims(x, axis=0)
    if transform:
        x = tf.keras.applications.inception_v3.preprocess_input(x)
    if draw:
        plt.imshow(img)
    return x

x = load_image(img_path)
preds = model.predict(x)

# decode the results into a list of tuples (class, description, probability)
print('Predicted:', tf.keras.applications.inception_v3.decode_predictions(preds, top=3)[0])


Predicted: [('n03595614', 'jersey', 0.59271342), ('n04404412', 'television', 0.040898114), ('n03642806', 'laptop', 0.014330408)]

Fine tunnig of inceptionV3 for the faces problem


In [8]:
#Load base model
base_model = tf.keras.applications.inception_v3.InceptionV3(input_shape=(299, 299, 3), weights='imagenet', include_top=False)

# Check the output of the last loaded layer
print('Last layer shape:', base_model.output)


Last layer shape: Tensor("mixed10_2/concat:0", shape=(?, 8, 8, 2048), dtype=float32)

In [9]:
# add a global spatial average pooling layer
x = tf.keras.layers.GlobalAveragePooling2D()(base_model.output)
print('New last layer shape:', x)

# Add a fully-connected layer
x = tf.keras.layers.Dense(1024, activation='relu')(x)
x = tf.keras.layers.Dropout(0.5)(x)
# Add the prediction layer. n_classes
predictions = tf.keras.layers.Dense(n_classes, activation='softmax')(x)


New last layer shape: Tensor("global_average_pooling2d/Mean:0", shape=(?, 2048), dtype=float32)

In [10]:
# Model to train
model = tf.keras.models.Model(inputs=base_model.input, outputs=predictions)

# Train only the top layers (which were randomly initialized)
# Freeze all convolutional InceptionV3 layers
for layer in base_model.layers:
    layer.trainable = False

# Print the summary of the model
model.summary()


____________________________________________________________________________________________________
Layer (type)                     Output Shape          Param #     Connected to                     
====================================================================================================
input_2 (InputLayer)             (None, 299, 299, 3)   0                                            
____________________________________________________________________________________________________
conv2d_95 (Conv2D)               (None, 149, 149, 32)  864         input_2[0][0]                    
____________________________________________________________________________________________________
batch_normalization_95 (BatchNor (None, 149, 149, 32)  96          conv2d_95[0][0]                  
____________________________________________________________________________________________________
activation_95 (Activation)       (None, 149, 149, 32)  0           batch_normalization_95[0][0]     
____________________________________________________________________________________________________
conv2d_96 (Conv2D)               (None, 147, 147, 32)  9216        activation_95[0][0]              
____________________________________________________________________________________________________
batch_normalization_96 (BatchNor (None, 147, 147, 32)  96          conv2d_96[0][0]                  
____________________________________________________________________________________________________
activation_96 (Activation)       (None, 147, 147, 32)  0           batch_normalization_96[0][0]     
____________________________________________________________________________________________________
conv2d_97 (Conv2D)               (None, 147, 147, 64)  18432       activation_96[0][0]              
____________________________________________________________________________________________________
batch_normalization_97 (BatchNor (None, 147, 147, 64)  192         conv2d_97[0][0]                  
____________________________________________________________________________________________________
activation_97 (Activation)       (None, 147, 147, 64)  0           batch_normalization_97[0][0]     
____________________________________________________________________________________________________
max_pooling2d_5 (MaxPooling2D)   (None, 73, 73, 64)    0           activation_97[0][0]              
____________________________________________________________________________________________________
conv2d_98 (Conv2D)               (None, 73, 73, 80)    5120        max_pooling2d_5[0][0]            
____________________________________________________________________________________________________
batch_normalization_98 (BatchNor (None, 73, 73, 80)    240         conv2d_98[0][0]                  
____________________________________________________________________________________________________
activation_98 (Activation)       (None, 73, 73, 80)    0           batch_normalization_98[0][0]     
____________________________________________________________________________________________________
conv2d_99 (Conv2D)               (None, 71, 71, 192)   138240      activation_98[0][0]              
____________________________________________________________________________________________________
batch_normalization_99 (BatchNor (None, 71, 71, 192)   576         conv2d_99[0][0]                  
____________________________________________________________________________________________________
activation_99 (Activation)       (None, 71, 71, 192)   0           batch_normalization_99[0][0]     
____________________________________________________________________________________________________
max_pooling2d_6 (MaxPooling2D)   (None, 35, 35, 192)   0           activation_99[0][0]              
____________________________________________________________________________________________________
conv2d_103 (Conv2D)              (None, 35, 35, 64)    12288       max_pooling2d_6[0][0]            
____________________________________________________________________________________________________
batch_normalization_103 (BatchNo (None, 35, 35, 64)    192         conv2d_103[0][0]                 
____________________________________________________________________________________________________
activation_103 (Activation)      (None, 35, 35, 64)    0           batch_normalization_103[0][0]    
____________________________________________________________________________________________________
conv2d_101 (Conv2D)              (None, 35, 35, 48)    9216        max_pooling2d_6[0][0]            
____________________________________________________________________________________________________
conv2d_104 (Conv2D)              (None, 35, 35, 96)    55296       activation_103[0][0]             
____________________________________________________________________________________________________
batch_normalization_101 (BatchNo (None, 35, 35, 48)    144         conv2d_101[0][0]                 
____________________________________________________________________________________________________
batch_normalization_104 (BatchNo (None, 35, 35, 96)    288         conv2d_104[0][0]                 
____________________________________________________________________________________________________
activation_101 (Activation)      (None, 35, 35, 48)    0           batch_normalization_101[0][0]    
____________________________________________________________________________________________________
activation_104 (Activation)      (None, 35, 35, 96)    0           batch_normalization_104[0][0]    
____________________________________________________________________________________________________
average_pooling2d_10 (AveragePoo (None, 35, 35, 192)   0           max_pooling2d_6[0][0]            
____________________________________________________________________________________________________
conv2d_100 (Conv2D)              (None, 35, 35, 64)    12288       max_pooling2d_6[0][0]            
____________________________________________________________________________________________________
conv2d_102 (Conv2D)              (None, 35, 35, 64)    76800       activation_101[0][0]             
____________________________________________________________________________________________________
conv2d_105 (Conv2D)              (None, 35, 35, 96)    82944       activation_104[0][0]             
____________________________________________________________________________________________________
conv2d_106 (Conv2D)              (None, 35, 35, 32)    6144        average_pooling2d_10[0][0]       
____________________________________________________________________________________________________
batch_normalization_100 (BatchNo (None, 35, 35, 64)    192         conv2d_100[0][0]                 
____________________________________________________________________________________________________
batch_normalization_102 (BatchNo (None, 35, 35, 64)    192         conv2d_102[0][0]                 
____________________________________________________________________________________________________
batch_normalization_105 (BatchNo (None, 35, 35, 96)    288         conv2d_105[0][0]                 
____________________________________________________________________________________________________
batch_normalization_106 (BatchNo (None, 35, 35, 32)    96          conv2d_106[0][0]                 
____________________________________________________________________________________________________
activation_100 (Activation)      (None, 35, 35, 64)    0           batch_normalization_100[0][0]    
____________________________________________________________________________________________________
activation_102 (Activation)      (None, 35, 35, 64)    0           batch_normalization_102[0][0]    
____________________________________________________________________________________________________
activation_105 (Activation)      (None, 35, 35, 96)    0           batch_normalization_105[0][0]    
____________________________________________________________________________________________________
activation_106 (Activation)      (None, 35, 35, 32)    0           batch_normalization_106[0][0]    
____________________________________________________________________________________________________
mixed0 (Concatenate)             (None, 35, 35, 256)   0           activation_100[0][0]             
                                                                   activation_102[0][0]             
                                                                   activation_105[0][0]             
                                                                   activation_106[0][0]             
____________________________________________________________________________________________________
conv2d_110 (Conv2D)              (None, 35, 35, 64)    16384       mixed0[0][0]                     
____________________________________________________________________________________________________
batch_normalization_110 (BatchNo (None, 35, 35, 64)    192         conv2d_110[0][0]                 
____________________________________________________________________________________________________
activation_110 (Activation)      (None, 35, 35, 64)    0           batch_normalization_110[0][0]    
____________________________________________________________________________________________________
conv2d_108 (Conv2D)              (None, 35, 35, 48)    12288       mixed0[0][0]                     
____________________________________________________________________________________________________
conv2d_111 (Conv2D)              (None, 35, 35, 96)    55296       activation_110[0][0]             
____________________________________________________________________________________________________
batch_normalization_108 (BatchNo (None, 35, 35, 48)    144         conv2d_108[0][0]                 
____________________________________________________________________________________________________
batch_normalization_111 (BatchNo (None, 35, 35, 96)    288         conv2d_111[0][0]                 
____________________________________________________________________________________________________
activation_108 (Activation)      (None, 35, 35, 48)    0           batch_normalization_108[0][0]    
____________________________________________________________________________________________________
activation_111 (Activation)      (None, 35, 35, 96)    0           batch_normalization_111[0][0]    
____________________________________________________________________________________________________
average_pooling2d_11 (AveragePoo (None, 35, 35, 256)   0           mixed0[0][0]                     
____________________________________________________________________________________________________
conv2d_107 (Conv2D)              (None, 35, 35, 64)    16384       mixed0[0][0]                     
____________________________________________________________________________________________________
conv2d_109 (Conv2D)              (None, 35, 35, 64)    76800       activation_108[0][0]             
____________________________________________________________________________________________________
conv2d_112 (Conv2D)              (None, 35, 35, 96)    82944       activation_111[0][0]             
____________________________________________________________________________________________________
conv2d_113 (Conv2D)              (None, 35, 35, 64)    16384       average_pooling2d_11[0][0]       
____________________________________________________________________________________________________
batch_normalization_107 (BatchNo (None, 35, 35, 64)    192         conv2d_107[0][0]                 
____________________________________________________________________________________________________
batch_normalization_109 (BatchNo (None, 35, 35, 64)    192         conv2d_109[0][0]                 
____________________________________________________________________________________________________
batch_normalization_112 (BatchNo (None, 35, 35, 96)    288         conv2d_112[0][0]                 
____________________________________________________________________________________________________
batch_normalization_113 (BatchNo (None, 35, 35, 64)    192         conv2d_113[0][0]                 
____________________________________________________________________________________________________
activation_107 (Activation)      (None, 35, 35, 64)    0           batch_normalization_107[0][0]    
____________________________________________________________________________________________________
activation_109 (Activation)      (None, 35, 35, 64)    0           batch_normalization_109[0][0]    
____________________________________________________________________________________________________
activation_112 (Activation)      (None, 35, 35, 96)    0           batch_normalization_112[0][0]    
____________________________________________________________________________________________________
activation_113 (Activation)      (None, 35, 35, 64)    0           batch_normalization_113[0][0]    
____________________________________________________________________________________________________
mixed1 (Concatenate)             (None, 35, 35, 288)   0           activation_107[0][0]             
                                                                   activation_109[0][0]             
                                                                   activation_112[0][0]             
                                                                   activation_113[0][0]             
____________________________________________________________________________________________________
conv2d_117 (Conv2D)              (None, 35, 35, 64)    18432       mixed1[0][0]                     
____________________________________________________________________________________________________
batch_normalization_117 (BatchNo (None, 35, 35, 64)    192         conv2d_117[0][0]                 
____________________________________________________________________________________________________
activation_117 (Activation)      (None, 35, 35, 64)    0           batch_normalization_117[0][0]    
____________________________________________________________________________________________________
conv2d_115 (Conv2D)              (None, 35, 35, 48)    13824       mixed1[0][0]                     
____________________________________________________________________________________________________
conv2d_118 (Conv2D)              (None, 35, 35, 96)    55296       activation_117[0][0]             
____________________________________________________________________________________________________
batch_normalization_115 (BatchNo (None, 35, 35, 48)    144         conv2d_115[0][0]                 
____________________________________________________________________________________________________
batch_normalization_118 (BatchNo (None, 35, 35, 96)    288         conv2d_118[0][0]                 
____________________________________________________________________________________________________
activation_115 (Activation)      (None, 35, 35, 48)    0           batch_normalization_115[0][0]    
____________________________________________________________________________________________________
activation_118 (Activation)      (None, 35, 35, 96)    0           batch_normalization_118[0][0]    
____________________________________________________________________________________________________
average_pooling2d_12 (AveragePoo (None, 35, 35, 288)   0           mixed1[0][0]                     
____________________________________________________________________________________________________
conv2d_114 (Conv2D)              (None, 35, 35, 64)    18432       mixed1[0][0]                     
____________________________________________________________________________________________________
conv2d_116 (Conv2D)              (None, 35, 35, 64)    76800       activation_115[0][0]             
____________________________________________________________________________________________________
conv2d_119 (Conv2D)              (None, 35, 35, 96)    82944       activation_118[0][0]             
____________________________________________________________________________________________________
conv2d_120 (Conv2D)              (None, 35, 35, 64)    18432       average_pooling2d_12[0][0]       
____________________________________________________________________________________________________
batch_normalization_114 (BatchNo (None, 35, 35, 64)    192         conv2d_114[0][0]                 
____________________________________________________________________________________________________
batch_normalization_116 (BatchNo (None, 35, 35, 64)    192         conv2d_116[0][0]                 
____________________________________________________________________________________________________
batch_normalization_119 (BatchNo (None, 35, 35, 96)    288         conv2d_119[0][0]                 
____________________________________________________________________________________________________
batch_normalization_120 (BatchNo (None, 35, 35, 64)    192         conv2d_120[0][0]                 
____________________________________________________________________________________________________
activation_114 (Activation)      (None, 35, 35, 64)    0           batch_normalization_114[0][0]    
____________________________________________________________________________________________________
activation_116 (Activation)      (None, 35, 35, 64)    0           batch_normalization_116[0][0]    
____________________________________________________________________________________________________
activation_119 (Activation)      (None, 35, 35, 96)    0           batch_normalization_119[0][0]    
____________________________________________________________________________________________________
activation_120 (Activation)      (None, 35, 35, 64)    0           batch_normalization_120[0][0]    
____________________________________________________________________________________________________
mixed2 (Concatenate)             (None, 35, 35, 288)   0           activation_114[0][0]             
                                                                   activation_116[0][0]             
                                                                   activation_119[0][0]             
                                                                   activation_120[0][0]             
____________________________________________________________________________________________________
conv2d_122 (Conv2D)              (None, 35, 35, 64)    18432       mixed2[0][0]                     
____________________________________________________________________________________________________
batch_normalization_122 (BatchNo (None, 35, 35, 64)    192         conv2d_122[0][0]                 
____________________________________________________________________________________________________
activation_122 (Activation)      (None, 35, 35, 64)    0           batch_normalization_122[0][0]    
____________________________________________________________________________________________________
conv2d_123 (Conv2D)              (None, 35, 35, 96)    55296       activation_122[0][0]             
____________________________________________________________________________________________________
batch_normalization_123 (BatchNo (None, 35, 35, 96)    288         conv2d_123[0][0]                 
____________________________________________________________________________________________________
activation_123 (Activation)      (None, 35, 35, 96)    0           batch_normalization_123[0][0]    
____________________________________________________________________________________________________
conv2d_121 (Conv2D)              (None, 17, 17, 384)   995328      mixed2[0][0]                     
____________________________________________________________________________________________________
conv2d_124 (Conv2D)              (None, 17, 17, 96)    82944       activation_123[0][0]             
____________________________________________________________________________________________________
batch_normalization_121 (BatchNo (None, 17, 17, 384)   1152        conv2d_121[0][0]                 
____________________________________________________________________________________________________
batch_normalization_124 (BatchNo (None, 17, 17, 96)    288         conv2d_124[0][0]                 
____________________________________________________________________________________________________
activation_121 (Activation)      (None, 17, 17, 384)   0           batch_normalization_121[0][0]    
____________________________________________________________________________________________________
activation_124 (Activation)      (None, 17, 17, 96)    0           batch_normalization_124[0][0]    
____________________________________________________________________________________________________
max_pooling2d_7 (MaxPooling2D)   (None, 17, 17, 288)   0           mixed2[0][0]                     
____________________________________________________________________________________________________
mixed3 (Concatenate)             (None, 17, 17, 768)   0           activation_121[0][0]             
                                                                   activation_124[0][0]             
                                                                   max_pooling2d_7[0][0]            
____________________________________________________________________________________________________
conv2d_129 (Conv2D)              (None, 17, 17, 128)   98304       mixed3[0][0]                     
____________________________________________________________________________________________________
batch_normalization_129 (BatchNo (None, 17, 17, 128)   384         conv2d_129[0][0]                 
____________________________________________________________________________________________________
activation_129 (Activation)      (None, 17, 17, 128)   0           batch_normalization_129[0][0]    
____________________________________________________________________________________________________
conv2d_130 (Conv2D)              (None, 17, 17, 128)   114688      activation_129[0][0]             
____________________________________________________________________________________________________
batch_normalization_130 (BatchNo (None, 17, 17, 128)   384         conv2d_130[0][0]                 
____________________________________________________________________________________________________
activation_130 (Activation)      (None, 17, 17, 128)   0           batch_normalization_130[0][0]    
____________________________________________________________________________________________________
conv2d_126 (Conv2D)              (None, 17, 17, 128)   98304       mixed3[0][0]                     
____________________________________________________________________________________________________
conv2d_131 (Conv2D)              (None, 17, 17, 128)   114688      activation_130[0][0]             
____________________________________________________________________________________________________
batch_normalization_126 (BatchNo (None, 17, 17, 128)   384         conv2d_126[0][0]                 
____________________________________________________________________________________________________
batch_normalization_131 (BatchNo (None, 17, 17, 128)   384         conv2d_131[0][0]                 
____________________________________________________________________________________________________
activation_126 (Activation)      (None, 17, 17, 128)   0           batch_normalization_126[0][0]    
____________________________________________________________________________________________________
activation_131 (Activation)      (None, 17, 17, 128)   0           batch_normalization_131[0][0]    
____________________________________________________________________________________________________
conv2d_127 (Conv2D)              (None, 17, 17, 128)   114688      activation_126[0][0]             
____________________________________________________________________________________________________
conv2d_132 (Conv2D)              (None, 17, 17, 128)   114688      activation_131[0][0]             
____________________________________________________________________________________________________
batch_normalization_127 (BatchNo (None, 17, 17, 128)   384         conv2d_127[0][0]                 
____________________________________________________________________________________________________
batch_normalization_132 (BatchNo (None, 17, 17, 128)   384         conv2d_132[0][0]                 
____________________________________________________________________________________________________
activation_127 (Activation)      (None, 17, 17, 128)   0           batch_normalization_127[0][0]    
____________________________________________________________________________________________________
activation_132 (Activation)      (None, 17, 17, 128)   0           batch_normalization_132[0][0]    
____________________________________________________________________________________________________
average_pooling2d_13 (AveragePoo (None, 17, 17, 768)   0           mixed3[0][0]                     
____________________________________________________________________________________________________
conv2d_125 (Conv2D)              (None, 17, 17, 192)   147456      mixed3[0][0]                     
____________________________________________________________________________________________________
conv2d_128 (Conv2D)              (None, 17, 17, 192)   172032      activation_127[0][0]             
____________________________________________________________________________________________________
conv2d_133 (Conv2D)              (None, 17, 17, 192)   172032      activation_132[0][0]             
____________________________________________________________________________________________________
conv2d_134 (Conv2D)              (None, 17, 17, 192)   147456      average_pooling2d_13[0][0]       
____________________________________________________________________________________________________
batch_normalization_125 (BatchNo (None, 17, 17, 192)   576         conv2d_125[0][0]                 
____________________________________________________________________________________________________
batch_normalization_128 (BatchNo (None, 17, 17, 192)   576         conv2d_128[0][0]                 
____________________________________________________________________________________________________
batch_normalization_133 (BatchNo (None, 17, 17, 192)   576         conv2d_133[0][0]                 
____________________________________________________________________________________________________
batch_normalization_134 (BatchNo (None, 17, 17, 192)   576         conv2d_134[0][0]                 
____________________________________________________________________________________________________
activation_125 (Activation)      (None, 17, 17, 192)   0           batch_normalization_125[0][0]    
____________________________________________________________________________________________________
activation_128 (Activation)      (None, 17, 17, 192)   0           batch_normalization_128[0][0]    
____________________________________________________________________________________________________
activation_133 (Activation)      (None, 17, 17, 192)   0           batch_normalization_133[0][0]    
____________________________________________________________________________________________________
activation_134 (Activation)      (None, 17, 17, 192)   0           batch_normalization_134[0][0]    
____________________________________________________________________________________________________
mixed4 (Concatenate)             (None, 17, 17, 768)   0           activation_125[0][0]             
                                                                   activation_128[0][0]             
                                                                   activation_133[0][0]             
                                                                   activation_134[0][0]             
____________________________________________________________________________________________________
conv2d_139 (Conv2D)              (None, 17, 17, 160)   122880      mixed4[0][0]                     
____________________________________________________________________________________________________
batch_normalization_139 (BatchNo (None, 17, 17, 160)   480         conv2d_139[0][0]                 
____________________________________________________________________________________________________
activation_139 (Activation)      (None, 17, 17, 160)   0           batch_normalization_139[0][0]    
____________________________________________________________________________________________________
conv2d_140 (Conv2D)              (None, 17, 17, 160)   179200      activation_139[0][0]             
____________________________________________________________________________________________________
batch_normalization_140 (BatchNo (None, 17, 17, 160)   480         conv2d_140[0][0]                 
____________________________________________________________________________________________________
activation_140 (Activation)      (None, 17, 17, 160)   0           batch_normalization_140[0][0]    
____________________________________________________________________________________________________
conv2d_136 (Conv2D)              (None, 17, 17, 160)   122880      mixed4[0][0]                     
____________________________________________________________________________________________________
conv2d_141 (Conv2D)              (None, 17, 17, 160)   179200      activation_140[0][0]             
____________________________________________________________________________________________________
batch_normalization_136 (BatchNo (None, 17, 17, 160)   480         conv2d_136[0][0]                 
____________________________________________________________________________________________________
batch_normalization_141 (BatchNo (None, 17, 17, 160)   480         conv2d_141[0][0]                 
____________________________________________________________________________________________________
activation_136 (Activation)      (None, 17, 17, 160)   0           batch_normalization_136[0][0]    
____________________________________________________________________________________________________
activation_141 (Activation)      (None, 17, 17, 160)   0           batch_normalization_141[0][0]    
____________________________________________________________________________________________________
conv2d_137 (Conv2D)              (None, 17, 17, 160)   179200      activation_136[0][0]             
____________________________________________________________________________________________________
conv2d_142 (Conv2D)              (None, 17, 17, 160)   179200      activation_141[0][0]             
____________________________________________________________________________________________________
batch_normalization_137 (BatchNo (None, 17, 17, 160)   480         conv2d_137[0][0]                 
____________________________________________________________________________________________________
batch_normalization_142 (BatchNo (None, 17, 17, 160)   480         conv2d_142[0][0]                 
____________________________________________________________________________________________________
activation_137 (Activation)      (None, 17, 17, 160)   0           batch_normalization_137[0][0]    
____________________________________________________________________________________________________
activation_142 (Activation)      (None, 17, 17, 160)   0           batch_normalization_142[0][0]    
____________________________________________________________________________________________________
average_pooling2d_14 (AveragePoo (None, 17, 17, 768)   0           mixed4[0][0]                     
____________________________________________________________________________________________________
conv2d_135 (Conv2D)              (None, 17, 17, 192)   147456      mixed4[0][0]                     
____________________________________________________________________________________________________
conv2d_138 (Conv2D)              (None, 17, 17, 192)   215040      activation_137[0][0]             
____________________________________________________________________________________________________
conv2d_143 (Conv2D)              (None, 17, 17, 192)   215040      activation_142[0][0]             
____________________________________________________________________________________________________
conv2d_144 (Conv2D)              (None, 17, 17, 192)   147456      average_pooling2d_14[0][0]       
____________________________________________________________________________________________________
batch_normalization_135 (BatchNo (None, 17, 17, 192)   576         conv2d_135[0][0]                 
____________________________________________________________________________________________________
batch_normalization_138 (BatchNo (None, 17, 17, 192)   576         conv2d_138[0][0]                 
____________________________________________________________________________________________________
batch_normalization_143 (BatchNo (None, 17, 17, 192)   576         conv2d_143[0][0]                 
____________________________________________________________________________________________________
batch_normalization_144 (BatchNo (None, 17, 17, 192)   576         conv2d_144[0][0]                 
____________________________________________________________________________________________________
activation_135 (Activation)      (None, 17, 17, 192)   0           batch_normalization_135[0][0]    
____________________________________________________________________________________________________
activation_138 (Activation)      (None, 17, 17, 192)   0           batch_normalization_138[0][0]    
____________________________________________________________________________________________________
activation_143 (Activation)      (None, 17, 17, 192)   0           batch_normalization_143[0][0]    
____________________________________________________________________________________________________
activation_144 (Activation)      (None, 17, 17, 192)   0           batch_normalization_144[0][0]    
____________________________________________________________________________________________________
mixed5 (Concatenate)             (None, 17, 17, 768)   0           activation_135[0][0]             
                                                                   activation_138[0][0]             
                                                                   activation_143[0][0]             
                                                                   activation_144[0][0]             
____________________________________________________________________________________________________
conv2d_149 (Conv2D)              (None, 17, 17, 160)   122880      mixed5[0][0]                     
____________________________________________________________________________________________________
batch_normalization_149 (BatchNo (None, 17, 17, 160)   480         conv2d_149[0][0]                 
____________________________________________________________________________________________________
activation_149 (Activation)      (None, 17, 17, 160)   0           batch_normalization_149[0][0]    
____________________________________________________________________________________________________
conv2d_150 (Conv2D)              (None, 17, 17, 160)   179200      activation_149[0][0]             
____________________________________________________________________________________________________
batch_normalization_150 (BatchNo (None, 17, 17, 160)   480         conv2d_150[0][0]                 
____________________________________________________________________________________________________
activation_150 (Activation)      (None, 17, 17, 160)   0           batch_normalization_150[0][0]    
____________________________________________________________________________________________________
conv2d_146 (Conv2D)              (None, 17, 17, 160)   122880      mixed5[0][0]                     
____________________________________________________________________________________________________
conv2d_151 (Conv2D)              (None, 17, 17, 160)   179200      activation_150[0][0]             
____________________________________________________________________________________________________
batch_normalization_146 (BatchNo (None, 17, 17, 160)   480         conv2d_146[0][0]                 
____________________________________________________________________________________________________
batch_normalization_151 (BatchNo (None, 17, 17, 160)   480         conv2d_151[0][0]                 
____________________________________________________________________________________________________
activation_146 (Activation)      (None, 17, 17, 160)   0           batch_normalization_146[0][0]    
____________________________________________________________________________________________________
activation_151 (Activation)      (None, 17, 17, 160)   0           batch_normalization_151[0][0]    
____________________________________________________________________________________________________
conv2d_147 (Conv2D)              (None, 17, 17, 160)   179200      activation_146[0][0]             
____________________________________________________________________________________________________
conv2d_152 (Conv2D)              (None, 17, 17, 160)   179200      activation_151[0][0]             
____________________________________________________________________________________________________
batch_normalization_147 (BatchNo (None, 17, 17, 160)   480         conv2d_147[0][0]                 
____________________________________________________________________________________________________
batch_normalization_152 (BatchNo (None, 17, 17, 160)   480         conv2d_152[0][0]                 
____________________________________________________________________________________________________
activation_147 (Activation)      (None, 17, 17, 160)   0           batch_normalization_147[0][0]    
____________________________________________________________________________________________________
activation_152 (Activation)      (None, 17, 17, 160)   0           batch_normalization_152[0][0]    
____________________________________________________________________________________________________
average_pooling2d_15 (AveragePoo (None, 17, 17, 768)   0           mixed5[0][0]                     
____________________________________________________________________________________________________
conv2d_145 (Conv2D)              (None, 17, 17, 192)   147456      mixed5[0][0]                     
____________________________________________________________________________________________________
conv2d_148 (Conv2D)              (None, 17, 17, 192)   215040      activation_147[0][0]             
____________________________________________________________________________________________________
conv2d_153 (Conv2D)              (None, 17, 17, 192)   215040      activation_152[0][0]             
____________________________________________________________________________________________________
conv2d_154 (Conv2D)              (None, 17, 17, 192)   147456      average_pooling2d_15[0][0]       
____________________________________________________________________________________________________
batch_normalization_145 (BatchNo (None, 17, 17, 192)   576         conv2d_145[0][0]                 
____________________________________________________________________________________________________
batch_normalization_148 (BatchNo (None, 17, 17, 192)   576         conv2d_148[0][0]                 
____________________________________________________________________________________________________
batch_normalization_153 (BatchNo (None, 17, 17, 192)   576         conv2d_153[0][0]                 
____________________________________________________________________________________________________
batch_normalization_154 (BatchNo (None, 17, 17, 192)   576         conv2d_154[0][0]                 
____________________________________________________________________________________________________
activation_145 (Activation)      (None, 17, 17, 192)   0           batch_normalization_145[0][0]    
____________________________________________________________________________________________________
activation_148 (Activation)      (None, 17, 17, 192)   0           batch_normalization_148[0][0]    
____________________________________________________________________________________________________
activation_153 (Activation)      (None, 17, 17, 192)   0           batch_normalization_153[0][0]    
____________________________________________________________________________________________________
activation_154 (Activation)      (None, 17, 17, 192)   0           batch_normalization_154[0][0]    
____________________________________________________________________________________________________
mixed6 (Concatenate)             (None, 17, 17, 768)   0           activation_145[0][0]             
                                                                   activation_148[0][0]             
                                                                   activation_153[0][0]             
                                                                   activation_154[0][0]             
____________________________________________________________________________________________________
conv2d_159 (Conv2D)              (None, 17, 17, 192)   147456      mixed6[0][0]                     
____________________________________________________________________________________________________
batch_normalization_159 (BatchNo (None, 17, 17, 192)   576         conv2d_159[0][0]                 
____________________________________________________________________________________________________
activation_159 (Activation)      (None, 17, 17, 192)   0           batch_normalization_159[0][0]    
____________________________________________________________________________________________________
conv2d_160 (Conv2D)              (None, 17, 17, 192)   258048      activation_159[0][0]             
____________________________________________________________________________________________________
batch_normalization_160 (BatchNo (None, 17, 17, 192)   576         conv2d_160[0][0]                 
____________________________________________________________________________________________________
activation_160 (Activation)      (None, 17, 17, 192)   0           batch_normalization_160[0][0]    
____________________________________________________________________________________________________
conv2d_156 (Conv2D)              (None, 17, 17, 192)   147456      mixed6[0][0]                     
____________________________________________________________________________________________________
conv2d_161 (Conv2D)              (None, 17, 17, 192)   258048      activation_160[0][0]             
____________________________________________________________________________________________________
batch_normalization_156 (BatchNo (None, 17, 17, 192)   576         conv2d_156[0][0]                 
____________________________________________________________________________________________________
batch_normalization_161 (BatchNo (None, 17, 17, 192)   576         conv2d_161[0][0]                 
____________________________________________________________________________________________________
activation_156 (Activation)      (None, 17, 17, 192)   0           batch_normalization_156[0][0]    
____________________________________________________________________________________________________
activation_161 (Activation)      (None, 17, 17, 192)   0           batch_normalization_161[0][0]    
____________________________________________________________________________________________________
conv2d_157 (Conv2D)              (None, 17, 17, 192)   258048      activation_156[0][0]             
____________________________________________________________________________________________________
conv2d_162 (Conv2D)              (None, 17, 17, 192)   258048      activation_161[0][0]             
____________________________________________________________________________________________________
batch_normalization_157 (BatchNo (None, 17, 17, 192)   576         conv2d_157[0][0]                 
____________________________________________________________________________________________________
batch_normalization_162 (BatchNo (None, 17, 17, 192)   576         conv2d_162[0][0]                 
____________________________________________________________________________________________________
activation_157 (Activation)      (None, 17, 17, 192)   0           batch_normalization_157[0][0]    
____________________________________________________________________________________________________
activation_162 (Activation)      (None, 17, 17, 192)   0           batch_normalization_162[0][0]    
____________________________________________________________________________________________________
average_pooling2d_16 (AveragePoo (None, 17, 17, 768)   0           mixed6[0][0]                     
____________________________________________________________________________________________________
conv2d_155 (Conv2D)              (None, 17, 17, 192)   147456      mixed6[0][0]                     
____________________________________________________________________________________________________
conv2d_158 (Conv2D)              (None, 17, 17, 192)   258048      activation_157[0][0]             
____________________________________________________________________________________________________
conv2d_163 (Conv2D)              (None, 17, 17, 192)   258048      activation_162[0][0]             
____________________________________________________________________________________________________
conv2d_164 (Conv2D)              (None, 17, 17, 192)   147456      average_pooling2d_16[0][0]       
____________________________________________________________________________________________________
batch_normalization_155 (BatchNo (None, 17, 17, 192)   576         conv2d_155[0][0]                 
____________________________________________________________________________________________________
batch_normalization_158 (BatchNo (None, 17, 17, 192)   576         conv2d_158[0][0]                 
____________________________________________________________________________________________________
batch_normalization_163 (BatchNo (None, 17, 17, 192)   576         conv2d_163[0][0]                 
____________________________________________________________________________________________________
batch_normalization_164 (BatchNo (None, 17, 17, 192)   576         conv2d_164[0][0]                 
____________________________________________________________________________________________________
activation_155 (Activation)      (None, 17, 17, 192)   0           batch_normalization_155[0][0]    
____________________________________________________________________________________________________
activation_158 (Activation)      (None, 17, 17, 192)   0           batch_normalization_158[0][0]    
____________________________________________________________________________________________________
activation_163 (Activation)      (None, 17, 17, 192)   0           batch_normalization_163[0][0]    
____________________________________________________________________________________________________
activation_164 (Activation)      (None, 17, 17, 192)   0           batch_normalization_164[0][0]    
____________________________________________________________________________________________________
mixed7 (Concatenate)             (None, 17, 17, 768)   0           activation_155[0][0]             
                                                                   activation_158[0][0]             
                                                                   activation_163[0][0]             
                                                                   activation_164[0][0]             
____________________________________________________________________________________________________
conv2d_167 (Conv2D)              (None, 17, 17, 192)   147456      mixed7[0][0]                     
____________________________________________________________________________________________________
batch_normalization_167 (BatchNo (None, 17, 17, 192)   576         conv2d_167[0][0]                 
____________________________________________________________________________________________________
activation_167 (Activation)      (None, 17, 17, 192)   0           batch_normalization_167[0][0]    
____________________________________________________________________________________________________
conv2d_168 (Conv2D)              (None, 17, 17, 192)   258048      activation_167[0][0]             
____________________________________________________________________________________________________
batch_normalization_168 (BatchNo (None, 17, 17, 192)   576         conv2d_168[0][0]                 
____________________________________________________________________________________________________
activation_168 (Activation)      (None, 17, 17, 192)   0           batch_normalization_168[0][0]    
____________________________________________________________________________________________________
conv2d_165 (Conv2D)              (None, 17, 17, 192)   147456      mixed7[0][0]                     
____________________________________________________________________________________________________
conv2d_169 (Conv2D)              (None, 17, 17, 192)   258048      activation_168[0][0]             
____________________________________________________________________________________________________
batch_normalization_165 (BatchNo (None, 17, 17, 192)   576         conv2d_165[0][0]                 
____________________________________________________________________________________________________
batch_normalization_169 (BatchNo (None, 17, 17, 192)   576         conv2d_169[0][0]                 
____________________________________________________________________________________________________
activation_165 (Activation)      (None, 17, 17, 192)   0           batch_normalization_165[0][0]    
____________________________________________________________________________________________________
activation_169 (Activation)      (None, 17, 17, 192)   0           batch_normalization_169[0][0]    
____________________________________________________________________________________________________
conv2d_166 (Conv2D)              (None, 8, 8, 320)     552960      activation_165[0][0]             
____________________________________________________________________________________________________
conv2d_170 (Conv2D)              (None, 8, 8, 192)     331776      activation_169[0][0]             
____________________________________________________________________________________________________
batch_normalization_166 (BatchNo (None, 8, 8, 320)     960         conv2d_166[0][0]                 
____________________________________________________________________________________________________
batch_normalization_170 (BatchNo (None, 8, 8, 192)     576         conv2d_170[0][0]                 
____________________________________________________________________________________________________
activation_166 (Activation)      (None, 8, 8, 320)     0           batch_normalization_166[0][0]    
____________________________________________________________________________________________________
activation_170 (Activation)      (None, 8, 8, 192)     0           batch_normalization_170[0][0]    
____________________________________________________________________________________________________
max_pooling2d_8 (MaxPooling2D)   (None, 8, 8, 768)     0           mixed7[0][0]                     
____________________________________________________________________________________________________
mixed8 (Concatenate)             (None, 8, 8, 1280)    0           activation_166[0][0]             
                                                                   activation_170[0][0]             
                                                                   max_pooling2d_8[0][0]            
____________________________________________________________________________________________________
conv2d_175 (Conv2D)              (None, 8, 8, 448)     573440      mixed8[0][0]                     
____________________________________________________________________________________________________
batch_normalization_175 (BatchNo (None, 8, 8, 448)     1344        conv2d_175[0][0]                 
____________________________________________________________________________________________________
activation_175 (Activation)      (None, 8, 8, 448)     0           batch_normalization_175[0][0]    
____________________________________________________________________________________________________
conv2d_172 (Conv2D)              (None, 8, 8, 384)     491520      mixed8[0][0]                     
____________________________________________________________________________________________________
conv2d_176 (Conv2D)              (None, 8, 8, 384)     1548288     activation_175[0][0]             
____________________________________________________________________________________________________
batch_normalization_172 (BatchNo (None, 8, 8, 384)     1152        conv2d_172[0][0]                 
____________________________________________________________________________________________________
batch_normalization_176 (BatchNo (None, 8, 8, 384)     1152        conv2d_176[0][0]                 
____________________________________________________________________________________________________
activation_172 (Activation)      (None, 8, 8, 384)     0           batch_normalization_172[0][0]    
____________________________________________________________________________________________________
activation_176 (Activation)      (None, 8, 8, 384)     0           batch_normalization_176[0][0]    
____________________________________________________________________________________________________
conv2d_173 (Conv2D)              (None, 8, 8, 384)     442368      activation_172[0][0]             
____________________________________________________________________________________________________
conv2d_174 (Conv2D)              (None, 8, 8, 384)     442368      activation_172[0][0]             
____________________________________________________________________________________________________
conv2d_177 (Conv2D)              (None, 8, 8, 384)     442368      activation_176[0][0]             
____________________________________________________________________________________________________
conv2d_178 (Conv2D)              (None, 8, 8, 384)     442368      activation_176[0][0]             
____________________________________________________________________________________________________
average_pooling2d_17 (AveragePoo (None, 8, 8, 1280)    0           mixed8[0][0]                     
____________________________________________________________________________________________________
conv2d_171 (Conv2D)              (None, 8, 8, 320)     409600      mixed8[0][0]                     
____________________________________________________________________________________________________
batch_normalization_173 (BatchNo (None, 8, 8, 384)     1152        conv2d_173[0][0]                 
____________________________________________________________________________________________________
batch_normalization_174 (BatchNo (None, 8, 8, 384)     1152        conv2d_174[0][0]                 
____________________________________________________________________________________________________
batch_normalization_177 (BatchNo (None, 8, 8, 384)     1152        conv2d_177[0][0]                 
____________________________________________________________________________________________________
batch_normalization_178 (BatchNo (None, 8, 8, 384)     1152        conv2d_178[0][0]                 
____________________________________________________________________________________________________
conv2d_179 (Conv2D)              (None, 8, 8, 192)     245760      average_pooling2d_17[0][0]       
____________________________________________________________________________________________________
batch_normalization_171 (BatchNo (None, 8, 8, 320)     960         conv2d_171[0][0]                 
____________________________________________________________________________________________________
activation_173 (Activation)      (None, 8, 8, 384)     0           batch_normalization_173[0][0]    
____________________________________________________________________________________________________
activation_174 (Activation)      (None, 8, 8, 384)     0           batch_normalization_174[0][0]    
____________________________________________________________________________________________________
activation_177 (Activation)      (None, 8, 8, 384)     0           batch_normalization_177[0][0]    
____________________________________________________________________________________________________
activation_178 (Activation)      (None, 8, 8, 384)     0           batch_normalization_178[0][0]    
____________________________________________________________________________________________________
batch_normalization_179 (BatchNo (None, 8, 8, 192)     576         conv2d_179[0][0]                 
____________________________________________________________________________________________________
activation_171 (Activation)      (None, 8, 8, 320)     0           batch_normalization_171[0][0]    
____________________________________________________________________________________________________
mixed9_0 (Concatenate)           (None, 8, 8, 768)     0           activation_173[0][0]             
                                                                   activation_174[0][0]             
____________________________________________________________________________________________________
concatenate_3 (Concatenate)      (None, 8, 8, 768)     0           activation_177[0][0]             
                                                                   activation_178[0][0]             
____________________________________________________________________________________________________
activation_179 (Activation)      (None, 8, 8, 192)     0           batch_normalization_179[0][0]    
____________________________________________________________________________________________________
mixed9 (Concatenate)             (None, 8, 8, 2048)    0           activation_171[0][0]             
                                                                   mixed9_0[0][0]                   
                                                                   concatenate_3[0][0]              
                                                                   activation_179[0][0]             
____________________________________________________________________________________________________
conv2d_184 (Conv2D)              (None, 8, 8, 448)     917504      mixed9[0][0]                     
____________________________________________________________________________________________________
batch_normalization_184 (BatchNo (None, 8, 8, 448)     1344        conv2d_184[0][0]                 
____________________________________________________________________________________________________
activation_184 (Activation)      (None, 8, 8, 448)     0           batch_normalization_184[0][0]    
____________________________________________________________________________________________________
conv2d_181 (Conv2D)              (None, 8, 8, 384)     786432      mixed9[0][0]                     
____________________________________________________________________________________________________
conv2d_185 (Conv2D)              (None, 8, 8, 384)     1548288     activation_184[0][0]             
____________________________________________________________________________________________________
batch_normalization_181 (BatchNo (None, 8, 8, 384)     1152        conv2d_181[0][0]                 
____________________________________________________________________________________________________
batch_normalization_185 (BatchNo (None, 8, 8, 384)     1152        conv2d_185[0][0]                 
____________________________________________________________________________________________________
activation_181 (Activation)      (None, 8, 8, 384)     0           batch_normalization_181[0][0]    
____________________________________________________________________________________________________
activation_185 (Activation)      (None, 8, 8, 384)     0           batch_normalization_185[0][0]    
____________________________________________________________________________________________________
conv2d_182 (Conv2D)              (None, 8, 8, 384)     442368      activation_181[0][0]             
____________________________________________________________________________________________________
conv2d_183 (Conv2D)              (None, 8, 8, 384)     442368      activation_181[0][0]             
____________________________________________________________________________________________________
conv2d_186 (Conv2D)              (None, 8, 8, 384)     442368      activation_185[0][0]             
____________________________________________________________________________________________________
conv2d_187 (Conv2D)              (None, 8, 8, 384)     442368      activation_185[0][0]             
____________________________________________________________________________________________________
average_pooling2d_18 (AveragePoo (None, 8, 8, 2048)    0           mixed9[0][0]                     
____________________________________________________________________________________________________
conv2d_180 (Conv2D)              (None, 8, 8, 320)     655360      mixed9[0][0]                     
____________________________________________________________________________________________________
batch_normalization_182 (BatchNo (None, 8, 8, 384)     1152        conv2d_182[0][0]                 
____________________________________________________________________________________________________
batch_normalization_183 (BatchNo (None, 8, 8, 384)     1152        conv2d_183[0][0]                 
____________________________________________________________________________________________________
batch_normalization_186 (BatchNo (None, 8, 8, 384)     1152        conv2d_186[0][0]                 
____________________________________________________________________________________________________
batch_normalization_187 (BatchNo (None, 8, 8, 384)     1152        conv2d_187[0][0]                 
____________________________________________________________________________________________________
conv2d_188 (Conv2D)              (None, 8, 8, 192)     393216      average_pooling2d_18[0][0]       
____________________________________________________________________________________________________
batch_normalization_180 (BatchNo (None, 8, 8, 320)     960         conv2d_180[0][0]                 
____________________________________________________________________________________________________
activation_182 (Activation)      (None, 8, 8, 384)     0           batch_normalization_182[0][0]    
____________________________________________________________________________________________________
activation_183 (Activation)      (None, 8, 8, 384)     0           batch_normalization_183[0][0]    
____________________________________________________________________________________________________
activation_186 (Activation)      (None, 8, 8, 384)     0           batch_normalization_186[0][0]    
____________________________________________________________________________________________________
activation_187 (Activation)      (None, 8, 8, 384)     0           batch_normalization_187[0][0]    
____________________________________________________________________________________________________
batch_normalization_188 (BatchNo (None, 8, 8, 192)     576         conv2d_188[0][0]                 
____________________________________________________________________________________________________
activation_180 (Activation)      (None, 8, 8, 320)     0           batch_normalization_180[0][0]    
____________________________________________________________________________________________________
mixed9_1 (Concatenate)           (None, 8, 8, 768)     0           activation_182[0][0]             
                                                                   activation_183[0][0]             
____________________________________________________________________________________________________
concatenate_4 (Concatenate)      (None, 8, 8, 768)     0           activation_186[0][0]             
                                                                   activation_187[0][0]             
____________________________________________________________________________________________________
activation_188 (Activation)      (None, 8, 8, 192)     0           batch_normalization_188[0][0]    
____________________________________________________________________________________________________
mixed10 (Concatenate)            (None, 8, 8, 2048)    0           activation_180[0][0]             
                                                                   mixed9_1[0][0]                   
                                                                   concatenate_4[0][0]              
                                                                   activation_188[0][0]             
____________________________________________________________________________________________________
global_average_pooling2d_2 (Glob (None, 2048)          0           mixed10[0][0]                    
____________________________________________________________________________________________________
dense_1 (Dense)                  (None, 1024)          2098176     global_average_pooling2d_2[0][0] 
____________________________________________________________________________________________________
dropout_1 (Dropout)              (None, 1024)          0           dense_1[0][0]                    
____________________________________________________________________________________________________
dense_2 (Dense)                  (None, 151)           154775      dropout_1[0][0]                  
====================================================================================================
Total params: 24,055,735
Trainable params: 2,252,951
Non-trainable params: 21,802,784
____________________________________________________________________________________________________

In [11]:
# Load images 
X_all = []
y_all = []
for i,c in enumerate(categories):
    for f in os.listdir(os.path.join(data_path,'train',c)):
        X_all += list(load_image(os.path.join(data_path,'train',c, f), draw=False))
        y_all += [i]
X_all = np.array(X_all)
y_all = np.array(y_all)

print(X_all.shape, y_all.shape)


(2717, 299, 299, 3) (2717,)

In [12]:
# Separate train and valid datasets
from sklearn.model_selection import train_test_split

X_train, X_valid, y_train, y_valid = train_test_split(X_all, y_all, train_size=0.7, random_state=42)

print(X_train.shape, X_valid.shape)
print(y_train.shape, y_valid.shape)


/home/jorge/anaconda3/envs/tf14/lib/python3.5/site-packages/sklearn/model_selection/_split.py:2026: FutureWarning: From version 0.21, test_size will always complement train_size unless both are specified.
  FutureWarning)
(1901, 299, 299, 3) (816, 299, 299, 3)
(1901,) (816,)

In [13]:
datagen = tf.keras.preprocessing.image.ImageDataGenerator(
        shear_range=0.2,
        zoom_range=0.2,
        width_shift_range=0.1,  # randomly shift images horizontally (fraction of total width)
        height_shift_range=0.1,  # randomly shift images vertically (fraction of total height)
        horizontal_flip=True)

In [ ]:
batch_size = 32
train_size = X_train.shape[0]

# Select optimizer and compile model
opt = tf.keras.optimizers.RMSprop(lr=1E-3)
model.compile(loss='sparse_categorical_crossentropy', optimizer=opt, metrics=['accuracy'])

# Train
tb_callback_ln = tf.keras.callbacks.TensorBoard(log_dir='/tmp/tensorboard/faces/inception')
history1 = model.fit_generator(datagen.flow(X_train, y_train),
                steps_per_epoch = train_size // batch_size,
                epochs = 30,
                validation_data = (X_valid, y_valid),
                callbacks=[tb_callback_ln])

In [20]:
plt.plot(history1.history['acc'], label='acc')
plt.plot(history1.history['val_acc'], label='val acc')
plt.legend(loc='lower right')
plt.show()



In [16]:
# Print the transformations of the first inception cell: mixed0
model2 = tf.keras.models.Model(inputs=base_model.input, outputs=base_model.get_layer('mixed0').output)

# Read image img
my_filelist = [f for f in listdir(join(data_path, 'train', categories[0])) if isfile(join(data_path, 'train', categories[0], f))]
img_path = join(data_path, 'train', categories[0], my_filelist[0])
img = load_image(img_path)
x = tf.keras.applications.inception_v3.preprocess_input(img)

mixed0_features = model2.predict(x)

print('Fists inception cell output shape: ', mixed0_features.shape)


Fists inception cell output shape:  (1, 35, 35, 256)

In [23]:
plt.rcParams['figure.figsize'] = (15, 15)        # size of images
plt.rcParams['image.cmap'] = 'gray'              # use grayscale 
fig = plt.figure()
n=0
for i in range(256): #Print the first 100 outputs of the 256.
    n += 1
    a = fig.add_subplot(16,16,n)
    plt.imshow(mixed0_features[0,:,:,i])



In [24]:
#Example of code to extract the first convolutional weights
conv2d_1_w = model2.get_layer(base_model.layers[1].name).get_weights()[0]
conv2d_1_w.shape


Out[24]:
(3, 3, 3, 32)

Fine tuning of the last layers

- We will freeze the bottom N layers and train the remaining top layers.

In [25]:
# List layer names to see how many layers we will freeze:
for i, layer in enumerate(base_model.layers):
    print(i, layer.name)


0 input_2
1 conv2d_95
2 batch_normalization_95
3 activation_95
4 conv2d_96
5 batch_normalization_96
6 activation_96
7 conv2d_97
8 batch_normalization_97
9 activation_97
10 max_pooling2d_5
11 conv2d_98
12 batch_normalization_98
13 activation_98
14 conv2d_99
15 batch_normalization_99
16 activation_99
17 max_pooling2d_6
18 conv2d_103
19 batch_normalization_103
20 activation_103
21 conv2d_101
22 conv2d_104
23 batch_normalization_101
24 batch_normalization_104
25 activation_101
26 activation_104
27 average_pooling2d_10
28 conv2d_100
29 conv2d_102
30 conv2d_105
31 conv2d_106
32 batch_normalization_100
33 batch_normalization_102
34 batch_normalization_105
35 batch_normalization_106
36 activation_100
37 activation_102
38 activation_105
39 activation_106
40 mixed0
41 conv2d_110
42 batch_normalization_110
43 activation_110
44 conv2d_108
45 conv2d_111
46 batch_normalization_108
47 batch_normalization_111
48 activation_108
49 activation_111
50 average_pooling2d_11
51 conv2d_107
52 conv2d_109
53 conv2d_112
54 conv2d_113
55 batch_normalization_107
56 batch_normalization_109
57 batch_normalization_112
58 batch_normalization_113
59 activation_107
60 activation_109
61 activation_112
62 activation_113
63 mixed1
64 conv2d_117
65 batch_normalization_117
66 activation_117
67 conv2d_115
68 conv2d_118
69 batch_normalization_115
70 batch_normalization_118
71 activation_115
72 activation_118
73 average_pooling2d_12
74 conv2d_114
75 conv2d_116
76 conv2d_119
77 conv2d_120
78 batch_normalization_114
79 batch_normalization_116
80 batch_normalization_119
81 batch_normalization_120
82 activation_114
83 activation_116
84 activation_119
85 activation_120
86 mixed2
87 conv2d_122
88 batch_normalization_122
89 activation_122
90 conv2d_123
91 batch_normalization_123
92 activation_123
93 conv2d_121
94 conv2d_124
95 batch_normalization_121
96 batch_normalization_124
97 activation_121
98 activation_124
99 max_pooling2d_7
100 mixed3
101 conv2d_129
102 batch_normalization_129
103 activation_129
104 conv2d_130
105 batch_normalization_130
106 activation_130
107 conv2d_126
108 conv2d_131
109 batch_normalization_126
110 batch_normalization_131
111 activation_126
112 activation_131
113 conv2d_127
114 conv2d_132
115 batch_normalization_127
116 batch_normalization_132
117 activation_127
118 activation_132
119 average_pooling2d_13
120 conv2d_125
121 conv2d_128
122 conv2d_133
123 conv2d_134
124 batch_normalization_125
125 batch_normalization_128
126 batch_normalization_133
127 batch_normalization_134
128 activation_125
129 activation_128
130 activation_133
131 activation_134
132 mixed4
133 conv2d_139
134 batch_normalization_139
135 activation_139
136 conv2d_140
137 batch_normalization_140
138 activation_140
139 conv2d_136
140 conv2d_141
141 batch_normalization_136
142 batch_normalization_141
143 activation_136
144 activation_141
145 conv2d_137
146 conv2d_142
147 batch_normalization_137
148 batch_normalization_142
149 activation_137
150 activation_142
151 average_pooling2d_14
152 conv2d_135
153 conv2d_138
154 conv2d_143
155 conv2d_144
156 batch_normalization_135
157 batch_normalization_138
158 batch_normalization_143
159 batch_normalization_144
160 activation_135
161 activation_138
162 activation_143
163 activation_144
164 mixed5
165 conv2d_149
166 batch_normalization_149
167 activation_149
168 conv2d_150
169 batch_normalization_150
170 activation_150
171 conv2d_146
172 conv2d_151
173 batch_normalization_146
174 batch_normalization_151
175 activation_146
176 activation_151
177 conv2d_147
178 conv2d_152
179 batch_normalization_147
180 batch_normalization_152
181 activation_147
182 activation_152
183 average_pooling2d_15
184 conv2d_145
185 conv2d_148
186 conv2d_153
187 conv2d_154
188 batch_normalization_145
189 batch_normalization_148
190 batch_normalization_153
191 batch_normalization_154
192 activation_145
193 activation_148
194 activation_153
195 activation_154
196 mixed6
197 conv2d_159
198 batch_normalization_159
199 activation_159
200 conv2d_160
201 batch_normalization_160
202 activation_160
203 conv2d_156
204 conv2d_161
205 batch_normalization_156
206 batch_normalization_161
207 activation_156
208 activation_161
209 conv2d_157
210 conv2d_162
211 batch_normalization_157
212 batch_normalization_162
213 activation_157
214 activation_162
215 average_pooling2d_16
216 conv2d_155
217 conv2d_158
218 conv2d_163
219 conv2d_164
220 batch_normalization_155
221 batch_normalization_158
222 batch_normalization_163
223 batch_normalization_164
224 activation_155
225 activation_158
226 activation_163
227 activation_164
228 mixed7
229 conv2d_167
230 batch_normalization_167
231 activation_167
232 conv2d_168
233 batch_normalization_168
234 activation_168
235 conv2d_165
236 conv2d_169
237 batch_normalization_165
238 batch_normalization_169
239 activation_165
240 activation_169
241 conv2d_166
242 conv2d_170
243 batch_normalization_166
244 batch_normalization_170
245 activation_166
246 activation_170
247 max_pooling2d_8
248 mixed8
249 conv2d_175
250 batch_normalization_175
251 activation_175
252 conv2d_172
253 conv2d_176
254 batch_normalization_172
255 batch_normalization_176
256 activation_172
257 activation_176
258 conv2d_173
259 conv2d_174
260 conv2d_177
261 conv2d_178
262 average_pooling2d_17
263 conv2d_171
264 batch_normalization_173
265 batch_normalization_174
266 batch_normalization_177
267 batch_normalization_178
268 conv2d_179
269 batch_normalization_171
270 activation_173
271 activation_174
272 activation_177
273 activation_178
274 batch_normalization_179
275 activation_171
276 mixed9_0
277 concatenate_3
278 activation_179
279 mixed9
280 conv2d_184
281 batch_normalization_184
282 activation_184
283 conv2d_181
284 conv2d_185
285 batch_normalization_181
286 batch_normalization_185
287 activation_181
288 activation_185
289 conv2d_182
290 conv2d_183
291 conv2d_186
292 conv2d_187
293 average_pooling2d_18
294 conv2d_180
295 batch_normalization_182
296 batch_normalization_183
297 batch_normalization_186
298 batch_normalization_187
299 conv2d_188
300 batch_normalization_180
301 activation_182
302 activation_183
303 activation_186
304 activation_187
305 batch_normalization_188
306 activation_180
307 mixed9_1
308 concatenate_4
309 activation_188
310 mixed10

In [26]:
# we chose to train the top 2 inception blocks, from the mixed8 layer to the last layer
# Then, freeze the first 249 layers and unfreeze the rest:
for layer in model.layers[:249]:
    layer.trainable = False
for layer in model.layers[249:]:
    layer.trainable = True

In [19]:
# Recompile the model for these modifications to take effect
# Use a low learning rate
opt = tf.keras.optimizers.SGD(lr=0.0001, momentum=0.9)
model.compile(optimizer=opt, loss='sparse_categorical_crossentropy', metrics=['accuracy'])


# Train the model again (to fine-tuning the top 2 inception blocks and the dense layers
history2 = model.fit_generator(datagen.flow(X_train, y_train),
            steps_per_epoch = train_size // batch_size,
            epochs = 30,
            validation_data = (X_valid, y_valid),
            callbacks=[tb_callback_ln])


Epoch 1/30
59/59 [==============================] - 33s - loss: 0.1691 - acc: 0.9560 - val_loss: 0.2274 - val_acc: 0.9363
Epoch 2/30
59/59 [==============================] - 28s - loss: 0.1480 - acc: 0.9659 - val_loss: 0.2080 - val_acc: 0.9400
Epoch 3/30
59/59 [==============================] - 28s - loss: 0.1352 - acc: 0.9634 - val_loss: 0.1915 - val_acc: 0.9449
Epoch 4/30
59/59 [==============================] - 29s - loss: 0.1191 - acc: 0.9709 - val_loss: 0.1766 - val_acc: 0.9473
Epoch 5/30
59/59 [==============================] - 29s - loss: 0.1212 - acc: 0.9680 - val_loss: 0.1679 - val_acc: 0.9498
Epoch 6/30
59/59 [==============================] - 28s - loss: 0.0999 - acc: 0.9738 - val_loss: 0.1626 - val_acc: 0.9522
Epoch 7/30
59/59 [==============================] - 29s - loss: 0.0973 - acc: 0.9772 - val_loss: 0.1567 - val_acc: 0.9547
Epoch 8/30
59/59 [==============================] - 28s - loss: 0.0947 - acc: 0.9780 - val_loss: 0.1536 - val_acc: 0.9547
Epoch 9/30
59/59 [==============================] - 29s - loss: 0.0749 - acc: 0.9841 - val_loss: 0.1485 - val_acc: 0.9534
Epoch 10/30
59/59 [==============================] - 28s - loss: 0.0856 - acc: 0.9762 - val_loss: 0.1477 - val_acc: 0.9571
Epoch 11/30
59/59 [==============================] - 28s - loss: 0.0714 - acc: 0.9820 - val_loss: 0.1440 - val_acc: 0.9571
Epoch 12/30
59/59 [==============================] - 28s - loss: 0.0850 - acc: 0.9820 - val_loss: 0.1399 - val_acc: 0.9583
Epoch 13/30
59/59 [==============================] - 28s - loss: 0.0794 - acc: 0.9799 - val_loss: 0.1362 - val_acc: 0.9608
Epoch 14/30
59/59 [==============================] - 29s - loss: 0.0606 - acc: 0.9849 - val_loss: 0.1352 - val_acc: 0.9596
Epoch 15/30
59/59 [==============================] - 29s - loss: 0.0542 - acc: 0.9910 - val_loss: 0.1326 - val_acc: 0.9608
Epoch 16/30
59/59 [==============================] - 28s - loss: 0.0667 - acc: 0.9873 - val_loss: 0.1309 - val_acc: 0.9608
Epoch 17/30
59/59 [==============================] - 28s - loss: 0.0763 - acc: 0.9765 - val_loss: 0.1286 - val_acc: 0.9620
Epoch 18/30
59/59 [==============================] - 29s - loss: 0.0699 - acc: 0.9812 - val_loss: 0.1281 - val_acc: 0.9632
Epoch 19/30
59/59 [==============================] - 28s - loss: 0.0592 - acc: 0.9860 - val_loss: 0.1259 - val_acc: 0.9620
Epoch 20/30
59/59 [==============================] - 28s - loss: 0.0653 - acc: 0.9844 - val_loss: 0.1220 - val_acc: 0.9645
Epoch 21/30
59/59 [==============================] - 28s - loss: 0.0641 - acc: 0.9855 - val_loss: 0.1189 - val_acc: 0.9657
Epoch 22/30
59/59 [==============================] - 29s - loss: 0.0606 - acc: 0.9846 - val_loss: 0.1171 - val_acc: 0.9657
Epoch 23/30
59/59 [==============================] - 29s - loss: 0.0627 - acc: 0.9883 - val_loss: 0.1163 - val_acc: 0.9645
Epoch 24/30
59/59 [==============================] - 28s - loss: 0.0546 - acc: 0.9902 - val_loss: 0.1134 - val_acc: 0.9669
Epoch 25/30
59/59 [==============================] - 28s - loss: 0.0578 - acc: 0.9868 - val_loss: 0.1132 - val_acc: 0.9681
Epoch 26/30
59/59 [==============================] - 29s - loss: 0.0568 - acc: 0.9894 - val_loss: 0.1139 - val_acc: 0.9657
Epoch 27/30
59/59 [==============================] - 29s - loss: 0.0552 - acc: 0.9889 - val_loss: 0.1116 - val_acc: 0.9645
Epoch 28/30
59/59 [==============================] - 29s - loss: 0.0530 - acc: 0.9894 - val_loss: 0.1121 - val_acc: 0.9645
Epoch 29/30
59/59 [==============================] - 29s - loss: 0.0551 - acc: 0.9894 - val_loss: 0.1095 - val_acc: 0.9669
Epoch 30/30
59/59 [==============================] - 29s - loss: 0.0567 - acc: 0.9868 - val_loss: 0.1056 - val_acc: 0.9694

In [ ]:

Evaluate the test data


In [20]:
test_filelist = [f for f in listdir(join(data_path, 'test')) if isfile(join(data_path, 'test', f))]
len(test_filelist)


Out[20]:
299

In [21]:
image_array = []
real_test = []
for f in test_filelist:
    real_test += [f.split('.')[0]]
    img_path = join(data_path, 'test', f)
    img = load_image(img_path, draw=False)
    image_array += list(img)
    
probability_test = model.predict(np.array(image_array))
probability_test.shape


Out[21]:
(299, 151)

In [22]:
real_test


Out[22]:
['dmcdow',
 'sherbe',
 'amtalb',
 'reaton',
 'adpoun',
 'dlwise',
 'gmidgl',
 'rjobyr',
 'wardcy',
 'cowie',
 'djevan',
 'smfarrf',
 'jjohns',
 '9540687',
 'jonesd',
 'mdcowl',
 'gjbods',
 '9540792',
 'gllong',
 'gmidgl',
 'kdaska',
 'pwest',
 '9540725',
 '9540814',
 'wylsow',
 '9540822',
 '9540822',
 '9540709',
 '9556273',
 'ihopki',
 'shamilc',
 'azavro',
 'nhahme',
 'oegill',
 'dfarre',
 'pjburr',
 'jedphi',
 'dlwise',
 'filip',
 'swren',
 'atfpou',
 'jlrums',
 'nhotoo',
 '9540547',
 '9540768',
 'cprice',
 'gpreec',
 '9540644',
 '9540512',
 'bmphil',
 'lcelli',
 'nandri',
 'cjhewi',
 'jjkeho',
 'mpdavie',
 'djpben',
 '9540849',
 '9540784',
 'ireloz',
 'mkvwon',
 'nmakri',
 'gpreec',
 'jepott',
 '9540504',
 'ldebna',
 'darodr',
 'gfilip',
 '9630643',
 '9540741',
 'jjkeho',
 'mpdavie',
 'pmspry',
 'agaffa',
 'tchand',
 '9540644',
 'wardcy',
 'arwebb',
 'dnoguy',
 'rscoop',
 'tthurs',
 'jjohns',
 '9540601',
 'rchadh',
 'jcgrij',
 'rwsick',
 'moshep',
 '9540717',
 '9540636',
 'mystery',
 'bcbesta',
 'gcprie',
 'hwyman',
 'smredh',
 'ischou',
 'dfarre',
 '9540652',
 'jepott',
 'mhoore',
 'adpoun',
 'pmspry',
 '9540784',
 'cpatki',
 'gdsamp',
 'pjmcar',
 '9540504',
 '9540792',
 'carund',
 'gghazv',
 'pgray',
 'rhosan',
 'mkvwon',
 'cowie',
 'pguan',
 'jgelln',
 '9540717',
 '9540849',
 'memiah',
 'jwdpag',
 'mroper',
 'rposbo',
 'arwebb',
 'elalio',
 'acatsa',
 'cjbowe',
 'cjhewi',
 'tjpret',
 'djotto',
 'shpill',
 'memiah',
 'wjsaic',
 '9540636',
 'todris',
 '9602283',
 'dnoguy',
 'djevan',
 'filip',
 'azavro',
 'rshaw',
 'rwsick',
 'cprice',
 '9540687',
 'kelbied',
 'mclarkd',
 'kphans',
 'nandri',
 'nmakri',
 'mdcowl',
 'spyount',
 '9630643',
 'rjwils',
 '9540601',
 'shpill',
 '9540768',
 'djtye',
 'amflem',
 'jgelln',
 'pwest',
 'mkotza',
 'rhosan',
 'smalga',
 'csjscu',
 'wjsaic',
 'oegill',
 'acatsa',
 'kphans',
 'pgfost',
 'pvaris',
 'rposbo',
 'hmgula',
 '9540741',
 'kdaska',
 'rchadh',
 'akopci',
 'ischou',
 'akopci',
 'aolcer',
 '9540474',
 'icolli',
 'darodr',
 'rbrown',
 'gllong',
 'mwilco',
 'nhotoo',
 '9540709',
 'wylsow',
 '9540652',
 'sherbe',
 'jlrums',
 'fmquaz',
 'moshep',
 'jjdacl',
 'gjbods',
 '9602283',
 'jonesd',
 'atfpou',
 'mroper',
 'dmcdow',
 'hmgula',
 'rbrown',
 '9540733',
 'smredh',
 'pgolds',
 'nhahme',
 'elalio',
 'mhoore',
 '9540474',
 'amtalb',
 'spyount',
 'cpatki',
 'jphoor',
 'dhaydo',
 'gfilip',
 'gmagul',
 '9540512',
 'aolcer',
 'jbgood',
 'pmbtin',
 'mwillc',
 'mwilco',
 'mamago',
 'rscoop',
 'smalga',
 'jphoor',
 '9556273',
 'llevet',
 'sdaly',
 'swren',
 'pjmcar',
 'mclarkd',
 '9540814',
 'djotto',
 'gmagul',
 'sdaly',
 'bmphil',
 'tjpret',
 'ldebna',
 'pmathi',
 'alebes',
 'rjwils',
 'pvaris',
 'rjobyr',
 'mamago',
 'tthurs',
 'fmquaz',
 'icolli',
 'pjburr',
 '9540695',
 'snhopp',
 'pmbtin',
 'alebes',
 'hwyman',
 'jbgood',
 'dkourkf',
 'gcprie',
 'ireloz',
 'lcelli',
 'csjscu',
 'shamilc',
 'smfarrf',
 'idsizm',
 'djtye',
 'agaffa',
 'pgray',
 '9540547',
 '9540563',
 'reaton',
 'bcbesta',
 '9540695',
 'pmathi',
 'jjdacl',
 'idsizm',
 'ihopki',
 'mystery',
 'mwillc',
 'pgolds',
 '9540628',
 '9540725',
 'jwdpag',
 'pgfost',
 'pchyta',
 'dkourkf',
 'llevet',
 'todris',
 'amflem',
 'rshaw',
 'gghazv',
 'djpben',
 '9540733',
 'kelbied',
 'dhaydo',
 '9540628',
 'gdsamp',
 'pchyta',
 'mkotza',
 'jedphi',
 'pguan',
 'tchand',
 'jcgrij',
 'snhopp']

In [24]:
print(len(real_test))
predict_test = [labels[np.argmax(p)] for p in probability_test ]
print(len(predict_test))


299
299

In [25]:
# Print errors
errors_index=[]
for i in range(len(real_test)):
    if real_test[i]!=predict_test[i]:
        errors_index += [i]
    
print('Pct error: ', len(errors_index)/len(predict_test))

fig = plt.figure()
n=0
for i in errors_index[:5]: 
    n += 1
    a = fig.add_subplot(5, 2, n)
    img_path = join(data_path, 'test', test_filelist[i])
    a.set_title('R:' + real_test[i])
    plt.imshow(plt.imread(img_path))

    n += 1
    a = fig.add_subplot(5, 2, n)
    img_path = join(data_path, 'train', predict_test[i], predict_test[i]+'.1.jpg')
    a.set_title('P:' + predict_test[i])
    plt.imshow(plt.imread(img_path))


Pct error:  0.013377926421404682

In [ ]: