In [1]:
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
from keras.utils import np_utils
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D, AveragePooling2D
from keras.optimizers import Adam
import glob
from PIL import Image

import keras
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.utils import np_utils
from keras.layers.core import Flatten, Dense, Dropout, Lambda


Using TensorFlow backend.

In [ ]:


In [2]:
def plots(ims, figsize=(12,6), rows=1, interp=False, titles=None):
    if type(ims[0]) is np.ndarray:
        ims = np.array(ims).astype(np.uint8)
        if (ims.shape[-1] != 3):
            ims = ims.transpose((0,2,3,1))
    f = plt.figure(figsize=figsize)
    for i in range(len(ims)):
        sp = f.add_subplot(rows, len(ims)//rows, i+1)
        sp.axis('Off')
        if titles is not None:
            sp.set_title(titles[i], fontsize=16)
        plt.imshow(ims[i], interpolation=None if interp else 'none')

In [3]:
from keras.preprocessing import image

BATCH_SIZE = 64
PATH="data/"

def get_fit_sample():
    gen = image.ImageDataGenerator()
    sample_batches = gen.flow_from_directory(PATH+'valid', target_size=(224,224), 
            class_mode='categorical', shuffle=False, batch_size=300)
    imgs, labels = next(sample_batches)
    return imgs

gen = image.ImageDataGenerator(featurewise_std_normalization=True)
gen.fit(get_fit_sample())
val_batches = gen.flow_from_directory(PATH+'valid', target_size=(224,224), 
            class_mode='categorical', shuffle=True, batch_size=BATCH_SIZE)

gen = image.ImageDataGenerator(featurewise_std_normalization=True)
gen.fit(get_fit_sample())
batches = gen.flow_from_directory(PATH+'train', target_size=(224,224), 
            class_mode='categorical', shuffle=True, batch_size=BATCH_SIZE)

#imgs,labels = next(batches)
#plots(imgs[:2])


Found 2000 images belonging to 2 classes.
Found 2000 images belonging to 2 classes.
Found 2000 images belonging to 2 classes.
Found 16997 images belonging to 2 classes.

In [ ]:


In [4]:
CLASSES = 2
INPUT_SHAPE = (224,224,3)
model = Sequential()
    
# Block 1
model.add(Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1', input_shape=INPUT_SHAPE))
model.add(Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool'))

# Block 2
model.add(Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1'))
model.add(Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool'))

# Block 3
model.add(Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1'))
model.add(Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2'))
model.add(Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3'))
model.add(Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv4'))
model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool'))

# Block 4
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv4'))
model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool'))

# Block 5
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv4'))
model.add(MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool'))

# Classification block
model.add(Flatten(name='flatten'))
model.add(Dense(4096, activation='relu', name='fc1'))
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu', name='fc2'))
model.add(Dropout(0.5))
model.add(Dense(CLASSES, activation='softmax', name='predictions'))

from keras.optimizers import SGD
sgd = SGD(lr=0.01, decay=0.0005, momentum=0.9, nesterov=False)


model.compile(optimizer=sgd, loss='mean_squared_error', metrics=['accuracy'])

In [5]:
%%time
hist = model.fit_generator(batches,  steps_per_epoch=100, epochs=20, validation_data=val_batches, validation_steps=10)
 
model.save('ConvNet-E-vgg19.h5')

# http://qiita.com/TypeNULL/items/4e4d7de11ab4361d6085
loss = hist.history['loss']
val_loss = hist.history['val_loss']
nb_epoch = len(loss)
plt.plot(range(nb_epoch), loss, marker='.', label='loss')
plt.plot(range(nb_epoch), val_loss, marker='.', label='val_loss')
plt.legend(loc='best', fontsize=10)
plt.grid()
plt.xlabel('epoch')
plt.ylabel('loss')
plt.show()


Epoch 1/20
100/100 [==============================] - 140s - loss: 0.2500 - acc: 0.4967 - val_loss: 0.2499 - val_acc: 0.5000
Epoch 2/20
100/100 [==============================] - 136s - loss: 0.2497 - acc: 0.5142 - val_loss: 0.2517 - val_acc: 0.4656
Epoch 3/20
100/100 [==============================] - 138s - loss: 0.2500 - acc: 0.5028 - val_loss: 0.2492 - val_acc: 0.5281
Epoch 4/20
100/100 [==============================] - 137s - loss: 0.2495 - acc: 0.5255 - val_loss: 0.2487 - val_acc: 0.5608
Epoch 5/20
100/100 [==============================] - 137s - loss: 0.2490 - acc: 0.5275 - val_loss: 0.2472 - val_acc: 0.5891
Epoch 6/20
100/100 [==============================] - 136s - loss: 0.2479 - acc: 0.5331 - val_loss: 0.2404 - val_acc: 0.6469
Epoch 7/20
100/100 [==============================] - 137s - loss: 0.2419 - acc: 0.5698 - val_loss: 0.2445 - val_acc: 0.5524
Epoch 8/20
100/100 [==============================] - 138s - loss: 0.2383 - acc: 0.5805 - val_loss: 0.2297 - val_acc: 0.6234
Epoch 9/20
100/100 [==============================] - 137s - loss: 0.2292 - acc: 0.6008 - val_loss: 0.2277 - val_acc: 0.5938
Epoch 10/20
100/100 [==============================] - 137s - loss: 0.2217 - acc: 0.6170 - val_loss: 0.2129 - val_acc: 0.6859
Epoch 11/20
100/100 [==============================] - 136s - loss: 0.2172 - acc: 0.6407 - val_loss: 0.2164 - val_acc: 0.6406
Epoch 12/20
100/100 [==============================] - 137s - loss: 0.2138 - acc: 0.6405 - val_loss: 0.2135 - val_acc: 0.6436
Epoch 13/20
100/100 [==============================] - 139s - loss: 0.2116 - acc: 0.6556 - val_loss: 0.2033 - val_acc: 0.6656
Epoch 14/20
100/100 [==============================] - 137s - loss: 0.2055 - acc: 0.6620 - val_loss: 0.2106 - val_acc: 0.6641
Epoch 15/20
100/100 [==============================] - 138s - loss: 0.2035 - acc: 0.6750 - val_loss: 0.2056 - val_acc: 0.6841
Epoch 16/20
100/100 [==============================] - 138s - loss: 0.2019 - acc: 0.6799 - val_loss: 0.2084 - val_acc: 0.6750
Epoch 17/20
100/100 [==============================] - 139s - loss: 0.2020 - acc: 0.6767 - val_loss: 0.2013 - val_acc: 0.6672
Epoch 18/20
100/100 [==============================] - 138s - loss: 0.1944 - acc: 0.6984 - val_loss: 0.1880 - val_acc: 0.7047
Epoch 19/20
100/100 [==============================] - 138s - loss: 0.1918 - acc: 0.6924 - val_loss: 0.1999 - val_acc: 0.6984
Epoch 20/20
100/100 [==============================] - 137s - loss: 0.1918 - acc: 0.6981 - val_loss: 0.2094 - val_acc: 0.6774
CPU times: user 30min 19s, sys: 5min 21s, total: 35min 40s
Wall time: 46min 3s

In [ ]: