In [1]:
# Building the CNN

from keras.models import Sequential
from keras.layers import Convolution2D
from keras.layers import MaxPooling2D
from keras.layers import Flatten
from keras.layers import Dense
from keras.models import load_model
from keras.callbacks import EarlyStopping
# Initializing the CNN
classifier = Sequential()

# Step 1 - Convolution
classifier.add(Convolution2D(4, 3, 3, input_shape = (64, 64, 3),activation = 'relu'))


# Step 2 - Max Pooling
classifier.add(MaxPooling2D(pool_size = (2,2)))

# Adding a second convolution layer
classifier.add(Convolution2D(4, 3, 3, activation = 'relu'))
classifier.add(MaxPooling2D(pool_size = (2,2)))

# Adding a third convolution layer
classifier.add(Convolution2D(4, 3, 3, activation = 'relu'))
classifier.add(MaxPooling2D(pool_size = (2,2)))

# Adding a 4th convolution layer
classifier.add(Convolution2D(4, 3, 3, activation = 'relu'))
classifier.add(MaxPooling2D(pool_size = (2,2)))

# Adding a 5th convolution layer
#classifier.add(Convolution2D(64, 3, 3, activation = 'relu'))
#classifier.add(MaxPooling2D(pool_size = (2,2)))

# Adding a 6th convolution layer
#classifier.add(Convolution2D(256, 3, 3, activation = 'relu'))
#classifier.add(MaxPooling2D(pool_size = (2,2)))

# Adding a 5th convolution layer
#classifier.add(Convolution2D(512, 3, 3, activation = 'relu'))
#classifier.add(MaxPooling2D(pool_size = (2,2)))

# Step 3 - Flattening
classifier.add(Flatten())


# Step 4 - Full Connection
classifier.add(Dense(output_dim = 128, activation= 'relu'))
classifier.add(Dense(output_dim = 4,activation= 'softmax'))


# Compiling the CNN
classifier.compile(optimizer = 'rmsprop', loss = 'sparse_categorical_crossentropy', metrics = ['accuracy'])

# Part 2 - Fitting the CNN to the image
from keras.preprocessing.image import ImageDataGenerator

train_datagen = ImageDataGenerator(
        rescale=1./255,
        shear_range=0.2,
        zoom_range=0.2,
        horizontal_flip=True)

test_datagen = ImageDataGenerator(rescale=1./255)

train_generator = train_datagen.flow_from_directory(
        'C:\\demo6\\TrainingData',
        target_size=(64, 64),
        batch_size=10,
        class_mode='sparse')

validation_generator = test_datagen.flow_from_directory(
        'C:\\demo6\\ValidationData',
        target_size=(64, 64),
        batch_size=2,
        class_mode='sparse')
early_stopping = EarlyStopping(monitor='acc', patience=15, verbose=1, mode='max')

classifier.fit_generator(
        train_generator,
        samples_per_epoch=2000,     
        epochs=3,
        validation_data=validation_generator,
        callbacks=[early_stopping],
        validation_steps=1000)
classifier.save('C:\\demo6\\clipRun.h5')


Using TensorFlow backend.
C:\ProgramData\Anaconda3\lib\site-packages\ipykernel_launcher.py:14: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(4, (3, 3), input_shape=(64, 64, 3..., activation="relu")`
  
C:\ProgramData\Anaconda3\lib\site-packages\ipykernel_launcher.py:21: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(4, (3, 3), activation="relu")`
C:\ProgramData\Anaconda3\lib\site-packages\ipykernel_launcher.py:25: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(4, (3, 3), activation="relu")`
C:\ProgramData\Anaconda3\lib\site-packages\ipykernel_launcher.py:29: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(4, (3, 3), activation="relu")`
C:\ProgramData\Anaconda3\lib\site-packages\ipykernel_launcher.py:49: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(activation="relu", units=128)`
C:\ProgramData\Anaconda3\lib\site-packages\ipykernel_launcher.py:50: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(activation="softmax", units=4)`
Found 400 images belonging to 4 classes.
Found 120 images belonging to 4 classes.
C:\ProgramData\Anaconda3\lib\site-packages\ipykernel_launcher.py:86: UserWarning: Update your `fit_generator` call to the Keras 2 API: `fit_generator(<keras.pre..., epochs=3, validation_data=<keras.pre..., callbacks=[<keras.ca..., validation_steps=1000, steps_per_epoch=200)`
Epoch 1/3
200/200 [==============================] - 9s - loss: 1.2144 - acc: 0.4380 - val_loss: 1.1137 - val_acc: 0.5075
Epoch 2/3
200/200 [==============================] - 7s - loss: 0.7560 - acc: 0.6715 - val_loss: 1.0287 - val_acc: 0.5835
Epoch 3/3
200/200 [==============================] - 7s - loss: 0.5976 - acc: 0.7480 - val_loss: 0.8902 - val_acc: 0.6335