Important: This notebook will only work with fastai-0.7.x. Do not try to run any fastai-1.x code from this path in the repository because it will load fastai-0.7.x

Introduction to our first task: 'Dogs vs Cats'


In [ ]:
%reload_ext autoreload
%autoreload 2
%matplotlib inline

In [ ]:
PATH = "data/dogscats/"
sz=224
batch_size=64

In [ ]:
import numpy as np
from keras.preprocessing.image import ImageDataGenerator
from keras.preprocessing import image
from keras.layers import Dropout, Flatten, Dense
from keras.applications import ResNet50
from keras.models import Model, Sequential
from keras.layers import Dense, GlobalAveragePooling2D
from keras import backend as K
from keras.applications.resnet50 import preprocess_input


Using TensorFlow backend.
/home/jhoward/anaconda3/lib/python3.6/importlib/_bootstrap.py:205: RuntimeWarning: compiletime version 3.5 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.6
  return f(*args, **kwds)

In [ ]:
train_data_dir = f'{PATH}train'
validation_data_dir = f'{PATH}valid'

In [ ]:
train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input,
    shear_range=0.2, zoom_range=0.2, horizontal_flip=True)

test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)

train_generator = train_datagen.flow_from_directory(train_data_dir,
    target_size=(sz, sz),
    batch_size=batch_size, class_mode='binary')

validation_generator = test_datagen.flow_from_directory(validation_data_dir,
    shuffle=False,
    target_size=(sz, sz),
    batch_size=batch_size, class_mode='binary')


Found 23000 images belonging to 2 classes.
Found 2000 images belonging to 2 classes.

In [ ]:
base_model = ResNet50(weights='imagenet', include_top=False)
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(1024, activation='relu')(x)
predictions = Dense(1, activation='sigmoid')(x)

In [ ]:
model = Model(inputs=base_model.input, outputs=predictions)
for layer in base_model.layers: layer.trainable = False
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])

In [ ]:
%%time
model.fit_generator(train_generator, train_generator.n // batch_size, epochs=3, workers=4,
        validation_data=validation_generator, validation_steps=validation_generator.n // batch_size)


Epoch 1/3
359/359 [==============================] - 81s 226ms/step - loss: 0.1780 - acc: 0.9486 - val_loss: 0.0805 - val_acc: 0.9743
Epoch 2/3
359/359 [==============================] - 77s 215ms/step - loss: 0.0928 - acc: 0.9697 - val_loss: 0.0592 - val_acc: 0.9814
Epoch 3/3
359/359 [==============================] - 74s 207ms/step - loss: 0.0746 - acc: 0.9730 - val_loss: 0.0617 - val_acc: 0.9814
CPU times: user 15min 53s, sys: 36.4 s, total: 16min 29s
Wall time: 3min 52s
Out[ ]:
<keras.callbacks.History at 0x7f2395643f98>

In [ ]:
split_at = 140
for layer in model.layers[:split_at]: layer.trainable = False
for layer in model.layers[split_at:]: layer.trainable = True
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])

In [ ]:
%%time
model.fit_generator(train_generator, train_generator.n // batch_size, epochs=1, workers=3,
        validation_data=validation_generator, validation_steps=validation_generator.n // batch_size)


Epoch 1/1
359/359 [==============================] - 217s 603ms/step - loss: 0.0762 - acc: 0.9741 - val_loss: 7.9436 - val_acc: 0.5055
CPU times: user 4min 47s, sys: 14.1 s, total: 5min 1s
Wall time: 3min 38s
Out[ ]:
<keras.callbacks.History at 0x7f22d8428f28>

In [ ]: