In [6]:
import os, shutil

original_dataset_dir = 'E:\\Datasets\\Kaggle\\dog_cat\\train'
base_dir = 'E:\\Datasets\\Kaggle\\dog_cat\\base'

os.mkdir(base_dir)

train_dir = os.path.join(base_dir, 'train')
os.mkdir(train_dir)

validation_dir = os.path.join(base_dir, 'validation')
os.mkdir(validation_dir)

test_dir = os.path.join(base_dir, 'test')
os.mkdir(test_dir)

train_cats_dir = os.path.join(train_dir, 'cats')
os.mkdir(train_cats_dir)

train_dogs_dir = os.path.join(train_dir, 'dogs')
os.mkdir(train_dogs_dir)

validation_cats_dir = os.path.join(validation_dir, 'cats')
os.mkdir(validation_cats_dir)

validation_dogs_dir = os.path.join(validation_dir, 'dogs')
os.mkdir(validation_dogs_dir)

test_cats_dir = os.path.join(test_dir, 'cats')
os.mkdir(test_cats_dir)

test_dogs_dir = os.path.join(test_dir, 'dogs')
os.mkdir(test_dogs_dir)

fnames = ['cat.{}.jpg'.format(i) for i in range(1000)]
for fname in fnames:
    src = os.path.join(original_dataset_dir, fname)
    dst = os.path.join(train_cats_dir, fname)
    shutil.copyfile(src, dst)
    
fnames = ['cat.{}.jpg'.format(i) for i in range(1000, 1500)]
for fname in fnames:
    src = os.path.join(original_dataset_dir, fname)
    dst = os.path.join(validation_cats_dir, fname)
    shutil.copyfile(src, dst)
    
fnames = ['cat.{}.jpg'.format(i) for i in range(1500, 2000)]
for fname in fnames:
    src = os.path.join(original_dataset_dir, fname)
    dst = os.path.join(test_cats_dir, fname)
    shutil.copyfile(src, dst)
    


fnames = ['dog.{}.jpg'.format(i) for i in range(1000)]
for fname in fnames:
    src = os.path.join(original_dataset_dir, fname)
    dst = os.path.join(train_dogs_dir, fname)
    shutil.copyfile(src, dst)
    
fnames = ['dog.{}.jpg'.format(i) for i in range(1000, 1500)]
for fname in fnames:
    src = os.path.join(original_dataset_dir, fname)
    dst = os.path.join(validation_dogs_dir, fname)
    shutil.copyfile(src, dst)
    
fnames = ['dog.{}.jpg'.format(i) for i in range(1500, 2000)]
for fname in fnames:
    src = os.path.join(original_dataset_dir, fname)
    dst = os.path.join(test_dogs_dir, fname)
    shutil.copyfile(src, dst)

In [7]:
from keras import layers
from keras import models

model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
model.add(layers.MaxPooling2D((2, 2))) 
model.add(layers.Conv2D(64, (3, 3), activation='relu')) 
model.add(layers.MaxPooling2D((2, 2))) 
model.add(layers.Conv2D(128, (3, 3), activation='relu')) 
model.add(layers.MaxPooling2D((2, 2))) 
model.add(layers.Conv2D(128, (3, 3), activation='relu')) 
model.add(layers.MaxPooling2D((2, 2))) 
model.add(layers.Flatten()) 
model.add(layers.Dense(512, activation='relu')) 
model.add(layers.Dense(1, activation='sigmoid'))


Using TensorFlow backend.

In [8]:
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d_1 (Conv2D)            (None, 148, 148, 32)      896       
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 74, 74, 32)        0         
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 72, 72, 64)        18496     
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 36, 36, 64)        0         
_________________________________________________________________
conv2d_3 (Conv2D)            (None, 34, 34, 128)       73856     
_________________________________________________________________
max_pooling2d_3 (MaxPooling2 (None, 17, 17, 128)       0         
_________________________________________________________________
conv2d_4 (Conv2D)            (None, 15, 15, 128)       147584    
_________________________________________________________________
max_pooling2d_4 (MaxPooling2 (None, 7, 7, 128)         0         
_________________________________________________________________
flatten_1 (Flatten)          (None, 6272)              0         
_________________________________________________________________
dense_1 (Dense)              (None, 512)               3211776   
_________________________________________________________________
dense_2 (Dense)              (None, 1)                 513       
=================================================================
Total params: 3,453,121
Trainable params: 3,453,121
Non-trainable params: 0
_________________________________________________________________

In [9]:
from keras import optimizers

model.compile(loss='binary_crossentropy', optimizer=optimizers.RMSprop(lr=1e-4),
             metrics=['acc'])

In [11]:
from keras.preprocessing.image import ImageDataGenerator

train_datagen = ImageDataGenerator(rescale=1./255)
valid_datagen = ImageDataGenerator(rescale=1./255)

train_generator = train_datagen.flow_from_directory(
    train_dir,
    target_size=(150, 150),
    batch_size=20,
    class_mode='binary'
)

validation_generator = valid_datagen.flow_from_directory(
    validation_dir,
    target_size=(150, 150),
    batch_size=20,
    class_mode='binary'
)


Found 2000 images belonging to 2 classes.
Found 1000 images belonging to 2 classes.

In [13]:
for data_batch, labels_batch in train_generator:
    print("data_batch shape", data_batch.shape)
    print("labels_batch shape", labels_batch.shape)
    break


data_batch shape (20, 150, 150, 3)
labels_batch shape (20,)

In [15]:
history = model.fit_generator(
    train_generator,
    steps_per_epoch=100,
    epochs=30,
    validation_data=validation_generator,
    validation_steps=50
)
model.save(os.path.join(base_dir, 'cats_dogs_small_1.h5'))


Epoch 1/30
100/100 [==============================] - 9s 85ms/step - loss: 0.6909 - acc: 0.5310 - val_loss: 0.6730 - val_acc: 0.6070
Epoch 2/30
100/100 [==============================] - 6s 58ms/step - loss: 0.6581 - acc: 0.6195 - val_loss: 0.6522 - val_acc: 0.6030
Epoch 3/30
100/100 [==============================] - 6s 58ms/step - loss: 0.6103 - acc: 0.6665 - val_loss: 0.6184 - val_acc: 0.6540
Epoch 4/30
100/100 [==============================] - 6s 61ms/step - loss: 0.5689 - acc: 0.7035 - val_loss: 0.5982 - val_acc: 0.6770
Epoch 5/30
100/100 [==============================] - 6s 59ms/step - loss: 0.5362 - acc: 0.7285 - val_loss: 0.6015 - val_acc: 0.6730
Epoch 6/30
100/100 [==============================] - 6s 57ms/step - loss: 0.5179 - acc: 0.7415 - val_loss: 0.5794 - val_acc: 0.6860
Epoch 7/30
100/100 [==============================] - 6s 60ms/step - loss: 0.4974 - acc: 0.7615 - val_loss: 0.6154 - val_acc: 0.6800
Epoch 8/30
100/100 [==============================] - 6s 59ms/step - loss: 0.4687 - acc: 0.7725 - val_loss: 0.5752 - val_acc: 0.6970
Epoch 9/30
100/100 [==============================] - 6s 58ms/step - loss: 0.4476 - acc: 0.7875 - val_loss: 0.6028 - val_acc: 0.7050
Epoch 10/30
100/100 [==============================] - 6s 60ms/step - loss: 0.4066 - acc: 0.8225 - val_loss: 0.5794 - val_acc: 0.7130
Epoch 11/30
100/100 [==============================] - 6s 60ms/step - loss: 0.3807 - acc: 0.8320 - val_loss: 0.5833 - val_acc: 0.7030
Epoch 12/30
100/100 [==============================] - 6s 58ms/step - loss: 0.3633 - acc: 0.8390 - val_loss: 0.5466 - val_acc: 0.7240
Epoch 13/30
100/100 [==============================] - 6s 59ms/step - loss: 0.3475 - acc: 0.8515 - val_loss: 0.5682 - val_acc: 0.7280
Epoch 14/30
100/100 [==============================] - 6s 58ms/step - loss: 0.3154 - acc: 0.8635 - val_loss: 0.7051 - val_acc: 0.6960
Epoch 15/30
100/100 [==============================] - 6s 59ms/step - loss: 0.3008 - acc: 0.8775 - val_loss: 0.5846 - val_acc: 0.7310
Epoch 16/30
100/100 [==============================] - 6s 57ms/step - loss: 0.2739 - acc: 0.8930 - val_loss: 0.6420 - val_acc: 0.7180
Epoch 17/30
100/100 [==============================] - 6s 59ms/step - loss: 0.2486 - acc: 0.8995 - val_loss: 0.6154 - val_acc: 0.7370
Epoch 18/30
100/100 [==============================] - 6s 58ms/step - loss: 0.2271 - acc: 0.9170 - val_loss: 0.6683 - val_acc: 0.7190
Epoch 19/30
100/100 [==============================] - 6s 58ms/step - loss: 0.2101 - acc: 0.9235 - val_loss: 0.6421 - val_acc: 0.7280
Epoch 20/30
100/100 [==============================] - 6s 58ms/step - loss: 0.1900 - acc: 0.9295 - val_loss: 0.6510 - val_acc: 0.7280
Epoch 21/30
100/100 [==============================] - 6s 57ms/step - loss: 0.1691 - acc: 0.9410 - val_loss: 0.6979 - val_acc: 0.7230
Epoch 22/30
100/100 [==============================] - 6s 58ms/step - loss: 0.1517 - acc: 0.9515 - val_loss: 0.7403 - val_acc: 0.7230
Epoch 23/30
100/100 [==============================] - 6s 58ms/step - loss: 0.1345 - acc: 0.9555 - val_loss: 0.7495 - val_acc: 0.7190
Epoch 24/30
100/100 [==============================] - 6s 58ms/step - loss: 0.1205 - acc: 0.9625 - val_loss: 0.7546 - val_acc: 0.7410
Epoch 25/30
100/100 [==============================] - 6s 58ms/step - loss: 0.0983 - acc: 0.9670 - val_loss: 0.8238 - val_acc: 0.7160
Epoch 26/30
100/100 [==============================] - 6s 58ms/step - loss: 0.0883 - acc: 0.9700 - val_loss: 0.9651 - val_acc: 0.7080
Epoch 27/30
100/100 [==============================] - 6s 58ms/step - loss: 0.0756 - acc: 0.9760 - val_loss: 0.9010 - val_acc: 0.7250
Epoch 28/30
100/100 [==============================] - 6s 58ms/step - loss: 0.0662 - acc: 0.9805 - val_loss: 0.9410 - val_acc: 0.7310
Epoch 29/30
100/100 [==============================] - 6s 58ms/step - loss: 0.0624 - acc: 0.9790 - val_loss: 1.0356 - val_acc: 0.7110
Epoch 30/30
100/100 [==============================] - 6s 57ms/step - loss: 0.0515 - acc: 0.9865 - val_loss: 0.9875 - val_acc: 0.7240

In [18]:
import matplotlib.pyplot as plt

acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(1, len(acc) + 1)

plt.plot(epochs, acc, 'bo', label='Train acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title("Training and validation accuracy")
plt.legend()

plt.figure()

plt.plot(epochs, loss, 'bo', label='Train loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title("Training and validation loss")
plt.legend()

plt.show()



In [19]:
# Data augmentation
datagen = ImageDataGenerator(
    rotation_range=40,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True,
    fill_mode='nearest'
)

In [22]:
from keras.preprocessing import image

fnames = [os.path.join(train_cats_dir, fname) for fname in os.listdir(train_cats_dir)]

img_path = fnames[3]

img = image.load_img(img_path, target_size=(150, 150))

x = image.img_to_array(img)

x = x.reshape((1, ) + x.shape)

i = 0 
for batch in datagen.flow(x, batch_size=1):
    plt.figure(i)
    plt.imshow(image.array_to_img(batch[0]))
    i += 1
    if i % 4 == 0:
        break
plt.show()



In [27]:
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
model.add(layers.MaxPooling2D((2, 2))) 
model.add(layers.Conv2D(64, (3, 3), activation='relu')) 
model.add(layers.MaxPooling2D((2, 2))) 
model.add(layers.Conv2D(128, (3, 3), activation='relu')) 
model.add(layers.MaxPooling2D((2, 2))) 
model.add(layers.Conv2D(128, (3, 3), activation='relu')) 
model.add(layers.MaxPooling2D((2, 2))) 
model.add(layers.Flatten()) 
model.add(layers.Dropout(0.5))
model.add(layers.Dense(512, activation='relu')) 
model.add(layers.Dense(1, activation='sigmoid'))


model.compile(loss='binary_crossentropy', optimizer=optimizers.RMSprop(lr=1e-4),
             metrics=['acc'])

In [24]:
from keras.preprocessing.image import ImageDataGenerator

train_datagen = ImageDataGenerator(rescale=1./255,
    rotation_range=40,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True,
    fill_mode='nearest')
valid_datagen = ImageDataGenerator(rescale=1./255,
    rotation_range=40,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True,
    fill_mode='nearest')

train_generator = train_datagen.flow_from_directory(
    train_dir,
    target_size=(150, 150),
    batch_size=20,
    class_mode='binary'
)

validation_generator = valid_datagen.flow_from_directory(
    validation_dir,
    target_size=(150, 150),
    batch_size=20,
    class_mode='binary'
)


Found 2000 images belonging to 2 classes.
Found 1000 images belonging to 2 classes.

In [28]:
history = model.fit_generator(
    train_generator,
    steps_per_epoch=100,
    epochs=100,
    validation_data=validation_generator,
    validation_steps=50
)
model.save(os.path.join(base_dir, 'cats_dogs_small_2.h5'))


Epoch 1/100
100/100 [==============================] - 15s 151ms/step - loss: 0.6945 - acc: 0.5100 - val_loss: 0.6927 - val_acc: 0.5000
Epoch 2/100
100/100 [==============================] - 14s 137ms/step - loss: 0.6861 - acc: 0.5435 - val_loss: 0.6838 - val_acc: 0.5510
Epoch 3/100
100/100 [==============================] - 14s 137ms/step - loss: 0.6744 - acc: 0.5625 - val_loss: 0.6914 - val_acc: 0.5600
Epoch 4/100
100/100 [==============================] - 14s 139ms/step - loss: 0.6548 - acc: 0.6020 - val_loss: 0.6394 - val_acc: 0.6300
Epoch 5/100
100/100 [==============================] - 14s 137ms/step - loss: 0.6374 - acc: 0.6260 - val_loss: 0.6263 - val_acc: 0.6380
Epoch 6/100
100/100 [==============================] - 14s 137ms/step - loss: 0.6249 - acc: 0.6445 - val_loss: 0.6144 - val_acc: 0.6650
Epoch 7/100
100/100 [==============================] - 14s 136ms/step - loss: 0.6084 - acc: 0.6585 - val_loss: 0.6093 - val_acc: 0.6580
Epoch 8/100
100/100 [==============================] - 14s 138ms/step - loss: 0.6081 - acc: 0.6545 - val_loss: 0.6031 - val_acc: 0.6490
Epoch 9/100
100/100 [==============================] - 14s 136ms/step - loss: 0.6045 - acc: 0.6655 - val_loss: 0.6001 - val_acc: 0.6560
Epoch 10/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5936 - acc: 0.6720 - val_loss: 0.5845 - val_acc: 0.6770
Epoch 11/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5871 - acc: 0.6905 - val_loss: 0.5915 - val_acc: 0.6740
Epoch 12/100
100/100 [==============================] - 14s 138ms/step - loss: 0.5791 - acc: 0.6865 - val_loss: 0.5785 - val_acc: 0.6860
Epoch 13/100
100/100 [==============================] - 14s 138ms/step - loss: 0.5746 - acc: 0.7015 - val_loss: 0.6390 - val_acc: 0.6460
Epoch 14/100
100/100 [==============================] - 14s 138ms/step - loss: 0.5826 - acc: 0.6760 - val_loss: 0.5704 - val_acc: 0.6870
Epoch 15/100
100/100 [==============================] - 14s 135ms/step - loss: 0.5694 - acc: 0.7045 - val_loss: 0.5638 - val_acc: 0.7010
Epoch 16/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5633 - acc: 0.6995 - val_loss: 0.5727 - val_acc: 0.6920
Epoch 17/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5627 - acc: 0.7020 - val_loss: 0.5927 - val_acc: 0.6780
Epoch 18/100
100/100 [==============================] - 14s 138ms/step - loss: 0.5635 - acc: 0.7095 - val_loss: 0.5525 - val_acc: 0.7120
Epoch 19/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5420 - acc: 0.7205 - val_loss: 0.5557 - val_acc: 0.6840
Epoch 20/100
100/100 [==============================] - 14s 138ms/step - loss: 0.5554 - acc: 0.7205 - val_loss: 0.5412 - val_acc: 0.7220
Epoch 21/100
100/100 [==============================] - 14s 138ms/step - loss: 0.5500 - acc: 0.7200 - val_loss: 0.5772 - val_acc: 0.6950
Epoch 22/100
100/100 [==============================] - 14s 136ms/step - loss: 0.5384 - acc: 0.7185 - val_loss: 0.5597 - val_acc: 0.7150
Epoch 23/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5413 - acc: 0.7235 - val_loss: 0.5925 - val_acc: 0.6960
Epoch 24/100
100/100 [==============================] - 14s 138ms/step - loss: 0.5263 - acc: 0.7320 - val_loss: 0.5651 - val_acc: 0.7030
Epoch 25/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5307 - acc: 0.7370 - val_loss: 0.5505 - val_acc: 0.7170
Epoch 26/100
100/100 [==============================] - 14s 141ms/step - loss: 0.5293 - acc: 0.7375 - val_loss: 0.5670 - val_acc: 0.7050
Epoch 27/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5243 - acc: 0.7295 - val_loss: 0.5418 - val_acc: 0.7220
Epoch 28/100
100/100 [==============================] - 14s 136ms/step - loss: 0.5320 - acc: 0.7205 - val_loss: 0.5486 - val_acc: 0.7260
Epoch 29/100
100/100 [==============================] - 14s 138ms/step - loss: 0.5337 - acc: 0.7330 - val_loss: 0.5402 - val_acc: 0.7180
Epoch 30/100
100/100 [==============================] - 14s 136ms/step - loss: 0.5260 - acc: 0.7420 - val_loss: 0.5894 - val_acc: 0.6750
Epoch 31/100
100/100 [==============================] - 14s 136ms/step - loss: 0.5112 - acc: 0.7385 - val_loss: 0.5189 - val_acc: 0.7330
Epoch 32/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5109 - acc: 0.7585 - val_loss: 0.5369 - val_acc: 0.7430
Epoch 33/100
100/100 [==============================] - 14s 136ms/step - loss: 0.5076 - acc: 0.7450 - val_loss: 0.5197 - val_acc: 0.7280
Epoch 34/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4954 - acc: 0.7555 - val_loss: 0.5251 - val_acc: 0.7350
Epoch 35/100
100/100 [==============================] - 14s 139ms/step - loss: 0.4956 - acc: 0.7525 - val_loss: 0.5340 - val_acc: 0.7270
Epoch 36/100
100/100 [==============================] - 14s 138ms/step - loss: 0.4920 - acc: 0.7605 - val_loss: 0.5156 - val_acc: 0.7350
Epoch 37/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4897 - acc: 0.7585 - val_loss: 0.5487 - val_acc: 0.7290
Epoch 38/100
100/100 [==============================] - 14s 136ms/step - loss: 0.4996 - acc: 0.7520 - val_loss: 0.5329 - val_acc: 0.7240
Epoch 39/100
100/100 [==============================] - 14s 135ms/step - loss: 0.5052 - acc: 0.7465 - val_loss: 0.5519 - val_acc: 0.7220
Epoch 40/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4961 - acc: 0.7520 - val_loss: 0.5219 - val_acc: 0.7460
Epoch 41/100
100/100 [==============================] - 14s 138ms/step - loss: 0.4914 - acc: 0.7570 - val_loss: 0.4978 - val_acc: 0.7630
Epoch 42/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4879 - acc: 0.7605 - val_loss: 0.5316 - val_acc: 0.7340
Epoch 43/100
100/100 [==============================] - 14s 136ms/step - loss: 0.4761 - acc: 0.7750 - val_loss: 0.6817 - val_acc: 0.6820
Epoch 44/100
100/100 [==============================] - 14s 136ms/step - loss: 0.4783 - acc: 0.7730 - val_loss: 0.5553 - val_acc: 0.7160
Epoch 45/100
100/100 [==============================] - 14s 137ms/step - loss: 0.5029 - acc: 0.7545 - val_loss: 0.5203 - val_acc: 0.7370
Epoch 46/100
100/100 [==============================] - 14s 138ms/step - loss: 0.4888 - acc: 0.7665 - val_loss: 0.4990 - val_acc: 0.7450
Epoch 47/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4777 - acc: 0.7700 - val_loss: 0.4820 - val_acc: 0.7720
Epoch 48/100
100/100 [==============================] - 14s 138ms/step - loss: 0.4810 - acc: 0.7660 - val_loss: 0.5285 - val_acc: 0.7350
Epoch 49/100
100/100 [==============================] - 14s 138ms/step - loss: 0.4723 - acc: 0.7800 - val_loss: 0.5093 - val_acc: 0.7470
Epoch 50/100
100/100 [==============================] - 14s 138ms/step - loss: 0.4676 - acc: 0.7725 - val_loss: 0.4863 - val_acc: 0.7510
Epoch 51/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4719 - acc: 0.7730 - val_loss: 0.5276 - val_acc: 0.7410
Epoch 52/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4696 - acc: 0.7695 - val_loss: 0.5314 - val_acc: 0.7420
Epoch 53/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4668 - acc: 0.7695 - val_loss: 0.5192 - val_acc: 0.7410
Epoch 54/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4593 - acc: 0.7820 - val_loss: 0.5270 - val_acc: 0.7350
Epoch 55/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4634 - acc: 0.7780 - val_loss: 0.5321 - val_acc: 0.7290
Epoch 56/100
100/100 [==============================] - 14s 139ms/step - loss: 0.4555 - acc: 0.7870 - val_loss: 0.4986 - val_acc: 0.7510
Epoch 57/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4555 - acc: 0.7765 - val_loss: 0.5135 - val_acc: 0.7440
Epoch 58/100
100/100 [==============================] - 14s 138ms/step - loss: 0.4530 - acc: 0.7885 - val_loss: 0.5744 - val_acc: 0.7130
Epoch 59/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4680 - acc: 0.7770 - val_loss: 0.5204 - val_acc: 0.7460
Epoch 60/100
100/100 [==============================] - 14s 138ms/step - loss: 0.4368 - acc: 0.7970 - val_loss: 0.4965 - val_acc: 0.7570
Epoch 61/100
100/100 [==============================] - 14s 139ms/step - loss: 0.4492 - acc: 0.7930 - val_loss: 0.5156 - val_acc: 0.7410
Epoch 62/100
100/100 [==============================] - 14s 140ms/step - loss: 0.4498 - acc: 0.7930 - val_loss: 0.4917 - val_acc: 0.7660
Epoch 63/100
100/100 [==============================] - 14s 139ms/step - loss: 0.4536 - acc: 0.7880 - val_loss: 0.5111 - val_acc: 0.7350
Epoch 64/100
100/100 [==============================] - 14s 138ms/step - loss: 0.4447 - acc: 0.7860 - val_loss: 0.5026 - val_acc: 0.7470
Epoch 65/100
100/100 [==============================] - 14s 139ms/step - loss: 0.4366 - acc: 0.7945 - val_loss: 0.4802 - val_acc: 0.7640
Epoch 66/100
100/100 [==============================] - 14s 139ms/step - loss: 0.4362 - acc: 0.7920 - val_loss: 0.5070 - val_acc: 0.7420
Epoch 67/100
100/100 [==============================] - 14s 140ms/step - loss: 0.4461 - acc: 0.7930 - val_loss: 0.5612 - val_acc: 0.7270
Epoch 68/100
100/100 [==============================] - 14s 139ms/step - loss: 0.4351 - acc: 0.7985 - val_loss: 0.5029 - val_acc: 0.7420
Epoch 69/100
100/100 [==============================] - 14s 140ms/step - loss: 0.4318 - acc: 0.8070 - val_loss: 0.4803 - val_acc: 0.7720
Epoch 70/100
100/100 [==============================] - 14s 140ms/step - loss: 0.4457 - acc: 0.8025 - val_loss: 0.4734 - val_acc: 0.7750
Epoch 71/100
100/100 [==============================] - 15s 145ms/step - loss: 0.4332 - acc: 0.7995 - val_loss: 0.4637 - val_acc: 0.7750
Epoch 72/100
100/100 [==============================] - 14s 140ms/step - loss: 0.4262 - acc: 0.8005 - val_loss: 0.4894 - val_acc: 0.7730
Epoch 73/100
100/100 [==============================] - 14s 142ms/step - loss: 0.4158 - acc: 0.8060 - val_loss: 0.4832 - val_acc: 0.7520
Epoch 74/100
100/100 [==============================] - 14s 141ms/step - loss: 0.4239 - acc: 0.8040 - val_loss: 0.4936 - val_acc: 0.7560
Epoch 75/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4319 - acc: 0.7945 - val_loss: 0.4867 - val_acc: 0.7580
Epoch 76/100
100/100 [==============================] - 14s 139ms/step - loss: 0.4346 - acc: 0.8020 - val_loss: 0.5174 - val_acc: 0.7450
Epoch 77/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4251 - acc: 0.8080 - val_loss: 0.4775 - val_acc: 0.7690
Epoch 78/100
100/100 [==============================] - 14s 136ms/step - loss: 0.4143 - acc: 0.8050 - val_loss: 0.4612 - val_acc: 0.7680
Epoch 79/100
100/100 [==============================] - 14s 136ms/step - loss: 0.4134 - acc: 0.8170 - val_loss: 0.5570 - val_acc: 0.7260
Epoch 80/100
100/100 [==============================] - 14s 136ms/step - loss: 0.4252 - acc: 0.8090 - val_loss: 0.5228 - val_acc: 0.7580
Epoch 81/100
100/100 [==============================] - 14s 136ms/step - loss: 0.4170 - acc: 0.8090 - val_loss: 0.5119 - val_acc: 0.7500
Epoch 82/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4018 - acc: 0.8150 - val_loss: 0.4920 - val_acc: 0.7580
Epoch 83/100
100/100 [==============================] - 14s 136ms/step - loss: 0.4050 - acc: 0.8020 - val_loss: 0.5131 - val_acc: 0.7490
Epoch 84/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4060 - acc: 0.8135 - val_loss: 0.4706 - val_acc: 0.7840
Epoch 85/100
100/100 [==============================] - 14s 137ms/step - loss: 0.4026 - acc: 0.8250 - val_loss: 0.4826 - val_acc: 0.7650
Epoch 86/100
100/100 [==============================] - 14s 137ms/step - loss: 0.3998 - acc: 0.8185 - val_loss: 0.5282 - val_acc: 0.7600
Epoch 87/100
 88/100 [=========================>....] - ETA: 1s - loss: 0.3904 - acc: 0.8324
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-28-5641380c72df> in <module>()
      4     epochs=100,
      5     validation_data=validation_generator,
----> 6     validation_steps=50
      7 )
      8 model.save(os.path.join(base_dir, 'cats_dogs_small_2.h5'))

E:\Miniconda3\envs\env3-gpu\lib\site-packages\keras\legacy\interfaces.py in wrapper(*args, **kwargs)
     85                 warnings.warn('Update your `' + object_name +
     86                               '` call to the Keras 2 API: ' + signature, stacklevel=2)
---> 87             return func(*args, **kwargs)
     88         wrapper._original_function = func
     89         return wrapper

E:\Miniconda3\envs\env3-gpu\lib\site-packages\keras\models.py in fit_generator(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)
   1225                                         use_multiprocessing=use_multiprocessing,
   1226                                         shuffle=shuffle,
-> 1227                                         initial_epoch=initial_epoch)
   1228 
   1229     @interfaces.legacy_generator_methods_support

E:\Miniconda3\envs\env3-gpu\lib\site-packages\keras\legacy\interfaces.py in wrapper(*args, **kwargs)
     85                 warnings.warn('Update your `' + object_name +
     86                               '` call to the Keras 2 API: ' + signature, stacklevel=2)
---> 87             return func(*args, **kwargs)
     88         wrapper._original_function = func
     89         return wrapper

E:\Miniconda3\envs\env3-gpu\lib\site-packages\keras\engine\training.py in fit_generator(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)
   2113                 batch_index = 0
   2114                 while steps_done < steps_per_epoch:
-> 2115                     generator_output = next(output_generator)
   2116 
   2117                     if not hasattr(generator_output, '__len__'):

E:\Miniconda3\envs\env3-gpu\lib\site-packages\keras\utils\data_utils.py in get(self)
    549         try:
    550             while self.is_running():
--> 551                 inputs = self.queue.get(block=True).get()
    552                 self.queue.task_done()
    553                 if inputs is not None:

E:\Miniconda3\envs\env3-gpu\lib\multiprocessing\pool.py in get(self, timeout)
    636 
    637     def get(self, timeout=None):
--> 638         self.wait(timeout)
    639         if not self.ready():
    640             raise TimeoutError

E:\Miniconda3\envs\env3-gpu\lib\multiprocessing\pool.py in wait(self, timeout)
    633 
    634     def wait(self, timeout=None):
--> 635         self._event.wait(timeout)
    636 
    637     def get(self, timeout=None):

E:\Miniconda3\envs\env3-gpu\lib\threading.py in wait(self, timeout)
    547             signaled = self._flag
    548             if not signaled:
--> 549                 signaled = self._cond.wait(timeout)
    550             return signaled
    551 

E:\Miniconda3\envs\env3-gpu\lib\threading.py in wait(self, timeout)
    291         try:    # restore state no matter what (e.g., KeyboardInterrupt)
    292             if timeout is None:
--> 293                 waiter.acquire()
    294                 gotit = True
    295             else:

KeyboardInterrupt: 

In [26]:
import matplotlib.pyplot as plt

acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(1, len(acc) + 1)

plt.plot(epochs, acc, 'bo', label='Train acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title("Training and validation accuracy")
plt.legend()

plt.figure()

plt.plot(epochs, loss, 'bo', label='Train loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title("Training and validation loss")
plt.legend()

plt.show()



In [ ]: