In [ ]:


In [ ]:
!pip install keras cython h5py

In [ ]:
!pip install opencv-python

Capstone Project:

Flowers Classification using Convolutional Neural Networks


In [2]:
import numpy as np
import pandas as pd
import keras


Using TensorFlow backend.
Couldn't import dot_parser, loading of dot files will not be possible.

In [3]:
from keras.layers import Input, Conv2D, MaxPool2D, Dropout, Activation, Dense, Flatten
from keras.models import Sequential
from keras.activations import relu, softmax
from keras.optimizers import Adam

In [4]:
from PIL import Image
import random
import pickle

In [177]:
import cv2
import os
# import urllib.request
# from urllib.request import Request, urlopen
# from urllib.error import URLError
# import socket  
# socket.setdefaulttimeout(1)

In [6]:
from keras.layers.normalization import BatchNormalization
from keras.preprocessing.image import ImageDataGenerator

In [7]:
from keras.models import model_from_json

In [264]:
import matplotlib.pyplot as plt
import pylab as pl

Data Collection

Scrap images from http://www.image-net.org


In [6]:
# category={'Sunflower':'http://www.image-net.org/api/text/imagenet.synset.geturls?wnid=n11978961',
#           'Peony':'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n11719286',
#           'Nigella':'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n11736851',
#           'Spathiphyllum':'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n11792341',
#           'Ragged_robin':'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n11811706',
#           'Soapwort':'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n11814584',
#           'Ice_plant':'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n11821184',
#           'Spring_beauty':'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n11859472',
#           'African_daisy':'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n11925303',
#           'Cornflower':'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n11947802'
#           }

In [7]:
# import cv2
# import os
# import urllib.request
# from urllib.request import Request, urlopen
# from urllib.error import URLError
# import socket  
# socket.setdefaulttimeout(1)



# def get_urls(urls_links):
#     url_list=urllib.request.urlopen(urls_links).read().decode().split('\r\n')
#     return url_list

In [8]:
# def download_images(urls_link,category_name):
#     if not os.path.exists(category_name):
#         os.makedirs(category_name)
#     count=1
#     url_list=get_urls(urls_link)
#     for url in url_list:
#         try:
#             path_name=str(category_name)+'/'+str(count)+'.'+str(category_name)+'.jpg'
#             urllib.request.urlretrieve(url,path_name)
             
#             img=cv2.imread(path_name)
#             resized_image=cv2.resize(img,(100,100))
#             cv2.imwrite(path_name,resized_image)
#         except Exception as e:
#             print(str(count)+str(e))
#         count+=1

In [9]:
# for cate in category:
#     download_images(category['cate'],cate)

In [ ]:

Data Preprocessing


In [9]:
all_image_dir='all_flower_images'

In [10]:
def label_img(img):
    img_label = img.split('.')[-2]
    return img_label

In [11]:
def jpg_image_to_array(path):
    """
  Loads JPEG image into 3D Numpy array of shape 
  (width, height, channels)
  """
    img=Image.open(path)
    img_arr=np.asarray(img).reshape((img.size[1], img.size[0],3))
    return img_arr

In [ ]:


In [12]:
def create_image_list(image_dir):
    img_dataset=[]
#     label_dataset=[]
    for img in os.listdir(image_dir):
        try: 
            img_path=str(image_dir)+'/'+str(img)
            image_array=jpg_image_to_array(img_path)
            label=label_img(img)
            img_dataset.append([image_array])  
#             label_dataset.append([label])

        except Exception as e:
            print(str(e))
#     np.save('image_dataset.npy',img_dataset)
#     return img_dataset

    img_list_arr=np.array(img_dataset)
    len_img_list=img_list_arr.shape[0]
    img_list_arr_reshape=img_list_arr.reshape(len_img_list,100,100,3)


    return img_list_arr_reshape

In [13]:
image_list_arr=create_image_list(all_image_dir)


cannot identify image file 'all_flower_images/.DS_Store'
cannot reshape array of size 16320 into shape (120,136,3)
cannot reshape array of size 12100 into shape (110,110,3)
cannot identify image file 'all_flower_images/370.Ragged_robin.jpg'
cannot reshape array of size 7000 into shape (70,100,3)
cannot reshape array of size 9216 into shape (96,96,3)
cannot identify image file 'all_flower_images/438.Spring_beauty.jpg'
cannot reshape array of size 90000 into shape (300,300,3)
cannot identify image file 'all_flower_images/472.Sunflower.jpg'
cannot reshape array of size 7000 into shape (70,100,3)
cannot identify image file 'all_flower_images/725.Spring_beauty.jpg'
cannot reshape array of size 2700 into shape (60,45,3)
cannot reshape array of size 36936 into shape (216,171,3)
cannot reshape array of size 13440 into shape (120,112,3)

In [14]:
image_list_arr.shape


Out[14]:
(11755, 100, 100, 3)

In [15]:
def create_label_list(image_dir):
#     img_dataset=[]
    label_dataset=[]
    for img in os.listdir(image_dir):
        try: 
            img_path=str(image_dir)+'/'+str(img)
            image_array=jpg_image_to_array(img_path)
            label=label_img(img)
#             img_dataset.append([image_array])  
            label_dataset.append([label])

        except Exception as e:
            print(str(e))
#     np.save('image_dataset.npy',img_dataset)
#     return img_dataset

    labels_list_arr=np.array(label_dataset)

    return labels_list_arr

In [16]:
label_list_arr=create_label_list(all_image_dir)


cannot identify image file 'all_flower_images/.DS_Store'
cannot reshape array of size 16320 into shape (120,136,3)
cannot reshape array of size 12100 into shape (110,110,3)
cannot identify image file 'all_flower_images/370.Ragged_robin.jpg'
cannot reshape array of size 7000 into shape (70,100,3)
cannot reshape array of size 9216 into shape (96,96,3)
cannot identify image file 'all_flower_images/438.Spring_beauty.jpg'
cannot reshape array of size 90000 into shape (300,300,3)
cannot identify image file 'all_flower_images/472.Sunflower.jpg'
cannot reshape array of size 7000 into shape (70,100,3)
cannot identify image file 'all_flower_images/725.Spring_beauty.jpg'
cannot reshape array of size 2700 into shape (60,45,3)
cannot reshape array of size 36936 into shape (216,171,3)
cannot reshape array of size 13440 into shape (120,112,3)

In [17]:
label_list_arr.shape


Out[17]:
(11755, 1)

In [18]:
import random

In [19]:
def train_test_split(X, y , percentage_of_train):
    a = X
    b = y
    c = list(zip(a,b))
    random.shuffle(c)
    a1, b1 = zip(*c)
    a2=np.array(a1)
    b2=np.array(b1)
    a2=a2.reshape(a.shape[0], 100, 100, 3)
    
    percentage=int(a.shape[0]*percentage_of_train)
    
    XX_train=a2[:percentage]
    XX_test=a2[percentage:]
    yy_train=b2[:percentage]
    yy_test=b2[percentage:]
    
    return XX_train, XX_test, yy_train, yy_test

In [20]:
X_train, X_test, y_train, y_test=train_test_split(image_list_arr,label_list_arr,0.8)

In [21]:
y_train.shape


Out[21]:
(9404, 1)

In [22]:
y_test.shape


Out[22]:
(2351, 1)

In [ ]:


In [249]:
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')

X_train_rescale=X_train/255
X_test_rescale=X_test/255

In [38]:
X_test.shape


Out[38]:
(2351, 100, 100, 3)

In [250]:
X_test_rescale


Out[250]:
array([[[[ 0.3019608 ,  0.28235295,  0.27058825],
         [ 0.29019609,  0.24705882,  0.22352941],
         [ 0.17254902,  0.13333334,  0.09803922],
         ..., 
         [ 0.4509804 ,  0.38039216,  0.37254903],
         [ 0.45490196,  0.40000001,  0.39607844],
         [ 0.41176471,  0.37254903,  0.36862746]],

        [[ 0.3019608 ,  0.28627452,  0.29019609],
         [ 0.29803923,  0.26666668,  0.25882354],
         [ 0.27058825,  0.23137255,  0.22352941],
         ..., 
         [ 0.44705883,  0.39215687,  0.38039216],
         [ 0.42352942,  0.38431373,  0.3764706 ],
         [ 0.42352942,  0.40784314,  0.39607844]],

        [[ 0.28627452,  0.28235295,  0.30588236],
         [ 0.34117648,  0.32941177,  0.35686275],
         [ 0.3019608 ,  0.27843139,  0.29411766],
         ..., 
         [ 0.36862746,  0.33333334,  0.30588236],
         [ 0.40392157,  0.38431373,  0.36078432],
         [ 0.36470589,  0.36078432,  0.34117648]],

        ..., 
        [[ 0.35686275,  0.29411766,  0.33333334],
         [ 0.38431373,  0.31764707,  0.3882353 ],
         [ 0.36470589,  0.30588236,  0.38039216],
         ..., 
         [ 0.16078432,  0.16078432,  0.20784314],
         [ 0.16862746,  0.16862746,  0.21568628],
         [ 0.16470589,  0.16470589,  0.21176471]],

        [[ 0.4627451 ,  0.3882353 ,  0.41176471],
         [ 0.46666667,  0.3882353 ,  0.43137255],
         [ 0.52941179,  0.4627451 ,  0.48627451],
         ..., 
         [ 0.1882353 ,  0.18039216,  0.26666668],
         [ 0.19607843,  0.1882353 ,  0.27450982],
         [ 0.2       ,  0.19215687,  0.27843139]],

        [[ 0.48235294,  0.45882353,  0.45882353],
         [ 0.58431375,  0.54901963,  0.56078434],
         [ 0.57254905,  0.54901963,  0.54901963],
         ..., 
         [ 0.18039216,  0.17254902,  0.26666668],
         [ 0.15294118,  0.14509805,  0.23921569],
         [ 0.15294118,  0.14509805,  0.23921569]]],


       [[[ 0.18039216,  0.21960784,  0.21568628],
         [ 0.23529412,  0.23921569,  0.20784314],
         [ 0.29019609,  0.27843139,  0.21176471],
         ..., 
         [ 0.90980393,  0.90980393,  0.95686275],
         [ 0.87450981,  0.87450981,  0.92156863],
         [ 0.84705883,  0.84705883,  0.89411765]],

        [[ 0.24313726,  0.28235295,  0.27843139],
         [ 0.26274511,  0.27843139,  0.21960784],
         [ 0.30980393,  0.30980393,  0.2       ],
         ..., 
         [ 0.81960785,  0.83137256,  0.86666667],
         [ 0.79607844,  0.80784315,  0.84313726],
         [ 0.75294119,  0.7647059 ,  0.80000001]],

        [[ 0.28235295,  0.32549021,  0.30980393],
         [ 0.26274511,  0.3019608 ,  0.19607843],
         [ 0.3137255 ,  0.34509805,  0.18431373],
         ..., 
         [ 0.72941178,  0.74901962,  0.7647059 ],
         [ 0.7019608 ,  0.72156864,  0.73725492],
         [ 0.63529414,  0.65490198,  0.67058825]],

        ..., 
        [[ 0.14901961,  0.21568628,  0.00392157],
         [ 0.15294118,  0.22352941,  0.07450981],
         [ 0.20784314,  0.29803923,  0.1882353 ],
         ..., 
         [ 0.33333334,  0.41960785,  0.32941177],
         [ 0.26666668,  0.36862746,  0.22352941],
         [ 0.1882353 ,  0.3019608 ,  0.12156863]],

        [[ 0.11372549,  0.19215687,  0.        ],
         [ 0.14117648,  0.21960784,  0.07450981],
         [ 0.23529412,  0.31764707,  0.19607843],
         ..., 
         [ 0.32549021,  0.40000001,  0.32156864],
         [ 0.26274511,  0.35686275,  0.22352941],
         [ 0.21176471,  0.32549021,  0.14509805]],

        [[ 0.08627451,  0.1882353 ,  0.00392157],
         [ 0.14509805,  0.23137255,  0.09019608],
         [ 0.27450982,  0.35294119,  0.20784314],
         ..., 
         [ 0.30588236,  0.37254903,  0.29803923],
         [ 0.25490198,  0.33725491,  0.21568628],
         [ 0.21568628,  0.32156864,  0.15294118]]],


       [[[ 0.64705884,  0.66274512,  0.51764709],
         [ 0.21568628,  0.27843139,  0.16470589],
         [ 0.43529412,  0.50588238,  0.40392157],
         ..., 
         [ 0.19607843,  0.29411766,  0.21176471],
         [ 0.18039216,  0.27058825,  0.23137255],
         [ 0.10980392,  0.20392157,  0.21176471]],

        [[ 0.61176473,  0.61960787,  0.43529412],
         [ 0.41176471,  0.46666667,  0.32549021],
         [ 0.47450981,  0.5411765 ,  0.40000001],
         ..., 
         [ 0.28627452,  0.35294119,  0.32156864],
         [ 0.28235295,  0.35686275,  0.34117648],
         [ 0.13725491,  0.20784314,  0.20784314]],

        [[ 0.57254905,  0.56862748,  0.34509805],
         [ 0.52156866,  0.55686277,  0.37254903],
         [ 0.40000001,  0.4627451 ,  0.28627452],
         ..., 
         [ 0.42745098,  0.45882353,  0.41568628],
         [ 0.44705883,  0.48627451,  0.45490196],
         [ 0.26666668,  0.30980393,  0.28627452]],

        ..., 
        [[ 0.42745098,  0.51372552,  0.27058825],
         [ 0.51764709,  0.59607846,  0.39607844],
         [ 0.35686275,  0.42745098,  0.27058825],
         ..., 
         [ 0.67058825,  0.67450982,  0.68235296],
         [ 0.66666669,  0.68235296,  0.627451  ],
         [ 0.13333334,  0.18039216,  0.17254902]],

        [[ 0.32549021,  0.42352942,  0.19607843],
         [ 0.23921569,  0.3137255 ,  0.19607843],
         [ 0.40392157,  0.45882353,  0.39215687],
         ..., 
         [ 0.627451  ,  0.63137257,  0.64705884],
         [ 0.60000002,  0.61176473,  0.56862748],
         [ 0.16470589,  0.21176471,  0.20392157]],

        [[ 0.25098041,  0.35294119,  0.21568628],
         [ 0.29019609,  0.3764706 ,  0.29411766],
         [ 0.45490196,  0.52156866,  0.44705883],
         ..., 
         [ 0.43921569,  0.4509804 ,  0.47058824],
         [ 0.42352942,  0.43529412,  0.40000001],
         [ 0.11372549,  0.16078432,  0.16078432]]],


       ..., 
       [[[ 0.2       ,  0.26666668,  0.19215687],
         [ 0.19215687,  0.25098041,  0.17647059],
         [ 0.16470589,  0.20784314,  0.14117648],
         ..., 
         [ 0.14117648,  0.1254902 ,  0.11372549],
         [ 0.15686275,  0.14117648,  0.12941177],
         [ 0.16078432,  0.14509805,  0.13333334]],

        [[ 0.16470589,  0.22352941,  0.14901961],
         [ 0.16862746,  0.21960784,  0.14901961],
         [ 0.16470589,  0.2       ,  0.13333334],
         ..., 
         [ 0.14901961,  0.13333334,  0.12156863],
         [ 0.16470589,  0.14901961,  0.13725491],
         [ 0.16470589,  0.14901961,  0.13725491]],

        [[ 0.13333334,  0.18039216,  0.10196079],
         [ 0.14117648,  0.17647059,  0.10196079],
         [ 0.13725491,  0.16470589,  0.09411765],
         ..., 
         [ 0.15686275,  0.14117648,  0.12941177],
         [ 0.16862746,  0.15294118,  0.14117648],
         [ 0.17254902,  0.15686275,  0.14509805]],

        ..., 
        [[ 0.24705882,  0.21960784,  0.1882353 ],
         [ 0.33333334,  0.30588236,  0.28235295],
         [ 0.30980393,  0.28235295,  0.25882354],
         ..., 
         [ 0.08627451,  0.04313726,  0.01960784],
         [ 0.09411765,  0.05098039,  0.02745098],
         [ 0.09803922,  0.0627451 ,  0.03529412]],

        [[ 0.40784314,  0.38039216,  0.34901962],
         [ 0.56078434,  0.53333336,  0.50980395],
         [ 0.45882353,  0.43137255,  0.40784314],
         ..., 
         [ 0.10588235,  0.05490196,  0.02352941],
         [ 0.10588235,  0.06666667,  0.03137255],
         [ 0.11764706,  0.07843138,  0.04313726]],

        [[ 0.23529412,  0.20784314,  0.17647059],
         [ 0.39607844,  0.36862746,  0.34509805],
         [ 0.28235295,  0.25490198,  0.23137255],
         ..., 
         [ 0.21176471,  0.15686275,  0.12156863],
         [ 0.21176471,  0.15686275,  0.12156863],
         [ 0.21176471,  0.16078432,  0.1254902 ]]],


       [[[ 0.17647059,  0.42352942,  0.21568628],
         [ 0.63137257,  0.80784315,  0.5529412 ],
         [ 0.66274512,  0.74901962,  0.49411765],
         ..., 
         [ 0.09803922,  0.14901961,  0.07843138],
         [ 0.10588235,  0.16862746,  0.0627451 ],
         [ 0.40000001,  0.50980395,  0.32156864]],

        [[ 0.29411766,  0.47450981,  0.23137255],
         [ 0.69803923,  0.8392157 ,  0.58823532],
         [ 0.63137257,  0.72549021,  0.49019608],
         ..., 
         [ 0.10196079,  0.15294118,  0.08235294],
         [ 0.05490196,  0.12941177,  0.01176471],
         [ 0.41176471,  0.52549022,  0.33725491]],

        [[ 0.38039216,  0.49803922,  0.20784314],
         [ 0.34509805,  0.4627451 ,  0.21176471],
         [ 0.63921571,  0.75294119,  0.5411765 ],
         ..., 
         [ 0.10588235,  0.16078432,  0.0627451 ],
         [ 0.51764709,  0.58431375,  0.44313726],
         [ 0.38039216,  0.49803922,  0.29411766]],

        ..., 
        [[ 0.38431373,  0.47058824,  0.29019609],
         [ 0.19607843,  0.26274511,  0.12156863],
         [ 0.09411765,  0.15294118,  0.07058824],
         ..., 
         [ 0.00392157,  0.09411765,  0.        ],
         [ 0.1254902 ,  0.22352941,  0.09803922],
         [ 0.07058824,  0.17647059,  0.04705882]],

        [[ 0.36078432,  0.44313726,  0.27450982],
         [ 0.21960784,  0.28627452,  0.15294118],
         [ 0.10196079,  0.16078432,  0.07843138],
         ..., 
         [ 0.12156863,  0.16470589,  0.10196079],
         [ 0.11372549,  0.15686275,  0.09411765],
         [ 0.10588235,  0.15686275,  0.09019608]],

        [[ 0.33333334,  0.41568628,  0.25490198],
         [ 0.21960784,  0.28627452,  0.16078432],
         [ 0.08235294,  0.14117648,  0.06666667],
         ..., 
         [ 0.14509805,  0.19215687,  0.13725491],
         [ 0.09019608,  0.14509805,  0.08627451],
         [ 0.09803922,  0.15294118,  0.09411765]]],


       [[[ 0.06666667,  0.07058824,  0.04705882],
         [ 0.08235294,  0.07058824,  0.05098039],
         [ 0.08235294,  0.0627451 ,  0.04705882],
         ..., 
         [ 0.8392157 ,  0.627451  ,  0.72941178],
         [ 0.8392157 ,  0.67450982,  0.76862746],
         [ 0.76078433,  0.56862748,  0.68627453]],

        [[ 0.05098039,  0.09019608,  0.05490196],
         [ 0.04705882,  0.06666667,  0.03921569],
         [ 0.08627451,  0.08235294,  0.0627451 ],
         ..., 
         [ 0.89411765,  0.58431375,  0.70980394],
         [ 0.93333334,  0.57254905,  0.73333335],
         [ 0.98823529,  0.58823532,  0.77254903]],

        [[ 0.02352941,  0.07058824,  0.02352941],
         [ 0.02745098,  0.06666667,  0.03137255],
         [ 0.02745098,  0.04705882,  0.01960784],
         ..., 
         [ 0.78823531,  0.35686275,  0.53333336],
         [ 0.83529413,  0.32941177,  0.5411765 ],
         [ 0.71372551,  0.1882353 ,  0.41960785]],

        ..., 
        [[ 0.17647059,  0.15686275,  0.14117648],
         [ 0.20784314,  0.20392157,  0.19607843],
         [ 0.21176471,  0.23137255,  0.21568628],
         ..., 
         [ 0.5529412 ,  0.56470591,  0.60000002],
         [ 0.54901963,  0.55686277,  0.60392159],
         [ 0.5411765 ,  0.54901963,  0.59607846]],

        [[ 0.14509805,  0.10588235,  0.10196079],
         [ 0.16862746,  0.15294118,  0.15686275],
         [ 0.19607843,  0.20392157,  0.2       ],
         ..., 
         [ 0.5411765 ,  0.54901963,  0.59607846],
         [ 0.53333336,  0.5411765 ,  0.58823532],
         [ 0.52156866,  0.52941179,  0.57647061]],

        [[ 0.16470589,  0.09411765,  0.08627451],
         [ 0.15686275,  0.11372549,  0.10588235],
         [ 0.16862746,  0.15294118,  0.14117648],
         ..., 
         [ 0.5529412 ,  0.56862748,  0.61176473],
         [ 0.5411765 ,  0.55686277,  0.60000002],
         [ 0.52549022,  0.5411765 ,  0.58431375]]]], dtype=float32)

In [26]:
from sklearn.preprocessing import LabelBinarizer
encoder = LabelBinarizer()
y_train_1hot = encoder.fit_transform(y_train)
y_test_1hot=encoder.fit_transform(y_test)

In [494]:
y_train_1hot


Out[494]:
array([[0, 0, 0, ..., 0, 0, 0],
       [0, 0, 0, ..., 0, 0, 0],
       [1, 0, 0, ..., 0, 0, 0],
       ..., 
       [0, 0, 0, ..., 0, 0, 0],
       [1, 0, 0, ..., 0, 0, 0],
       [0, 0, 0, ..., 0, 0, 0]])

In [28]:
y_test_1hot


Out[28]:
array([[0, 0, 0, ..., 0, 1, 0],
       [1, 0, 0, ..., 0, 0, 0],
       [0, 0, 0, ..., 0, 0, 0],
       ..., 
       [0, 0, 0, ..., 0, 1, 0],
       [0, 0, 1, ..., 0, 0, 0],
       [0, 0, 0, ..., 0, 0, 0]])

In [ ]:
# y_train_test11 = keras.utils.to_categorical(y_train_test1, 10)
# y_test_test11 = keras.utils.to_categorical(y_test_test1, 10)

In [ ]:

Model Building


In [ ]:
from keras import optimizers
sgd = optimizers.SGD(lr=.2, momentum=0.9, nesterov=True)

In [ ]:


In [ ]:

Data Augmentation


In [ ]:
'''
50 100 0.15 0.25 0.25 32 lr=0.4 150 =0.7 
    
 70 100 0.1 0.2 0.2 32 lr=0.4 150     =0.7

 70 100 0.1 0.2 0.2 128 lr= 0.4 150  = 0.71
 
 70 100 xx xx xx  128 lr=0.4    150  =0.71 
 
 30 60 xx xx xx  128 lr= 0.3   200  =0.74
 
'''

In [ ]:
#try4________________________________________________________

In [147]:
cnn18 = Sequential([
    Conv2D(30, kernel_size=(3, 3),
                 activation='relu',
                 input_shape=(100,100,3)),
#     Dropout(.1),
    MaxPool2D((2,2)),
    
    Conv2D(60, (3, 3), activation='relu'),
#     BatchNormalization(axis=-1),
#     Dropout(.2),
    MaxPool2D((2,2)),
    
#     Conv2D(60, (3, 3), activation='relu'),
#     BatchNormalization(axis=-1),
#     Dropout(.25),
#     MaxPool2D((2,2)),
    
#     Conv2D(80, (3, 3), activation='relu'),
#     BatchNormalization(axis=-1),
#     Dropout(.25),
#     MaxPool2D((2,2)),
    
#     Conv2D(100, (3, 3), activation='relu'),
#     Dropout(.5),
#     MaxPool2D((2,2)),
    
    Flatten(),
    Dense(128, activation='relu'),
#     BatchNormalization(axis=-1),
#     Dropout(.2),
    Dense(10, activation='softmax')
])

In [148]:
cnn18.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d_54 (Conv2D)           (None, 98, 98, 30)        840       
_________________________________________________________________
max_pooling2d_54 (MaxPooling (None, 49, 49, 30)        0         
_________________________________________________________________
conv2d_55 (Conv2D)           (None, 47, 47, 60)        16260     
_________________________________________________________________
max_pooling2d_55 (MaxPooling (None, 23, 23, 60)        0         
_________________________________________________________________
flatten_22 (Flatten)         (None, 31740)             0         
_________________________________________________________________
dense_43 (Dense)             (None, 128)               4062848   
_________________________________________________________________
dense_44 (Dense)             (None, 10)                1290      
=================================================================
Total params: 4,081,238
Trainable params: 4,081,238
Non-trainable params: 0
_________________________________________________________________

In [149]:
cnn18.compile(loss=keras.losses.categorical_crossentropy,
              optimizer=keras.optimizers.Adadelta(lr=0.3),
#               optimizer=Adam(lr=0.2),
#               optimizer=sgd,
              metrics=['accuracy'])

In [150]:
imgen_train2 = ImageDataGenerator(
    rescale=1./255,
    rotation_range=90,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.1,
    zoom_range=0.2,
    horizontal_flip=True,
    vertical_flip=True,
)

imgen_test2=ImageDataGenerator(rescale=1./255)

In [151]:
train_generator4 = imgen_train2.flow(X_train, y_train_1hot, batch_size=64)
test_generator4 = imgen_test2.flow(X_test, y_test_1hot, batch_size=64)

In [153]:
cnn18.fit_generator(train_generator4, steps_per_epoch=X_train.shape[0] // 64, epochs=200, 
                    validation_data=test_generator4, validation_steps=len(y_test_1hot)//64)


Epoch 1/200
146/146 [==============================] - 17s - loss: 1.8002 - acc: 0.3592 - val_loss: 1.6338 - val_acc: 0.4562
Epoch 2/200
146/146 [==============================] - 16s - loss: 1.5500 - acc: 0.4639 - val_loss: 1.4442 - val_acc: 0.5146
Epoch 3/200
146/146 [==============================] - 16s - loss: 1.4300 - acc: 0.5114 - val_loss: 1.4341 - val_acc: 0.5247
Epoch 4/200
146/146 [==============================] - 16s - loss: 1.3757 - acc: 0.5352 - val_loss: 1.3412 - val_acc: 0.5588
Epoch 5/200
146/146 [==============================] - 16s - loss: 1.2983 - acc: 0.5634 - val_loss: 1.2718 - val_acc: 0.5960
Epoch 6/200
146/146 [==============================] - 16s - loss: 1.2428 - acc: 0.5839 - val_loss: 1.3131 - val_acc: 0.5697
Epoch 7/200
146/146 [==============================] - 16s - loss: 1.2529 - acc: 0.5750 - val_loss: 1.2301 - val_acc: 0.5903
Epoch 8/200
146/146 [==============================] - 16s - loss: 1.1863 - acc: 0.6018 - val_loss: 1.2245 - val_acc: 0.6030
Epoch 9/200
146/146 [==============================] - 16s - loss: 1.1858 - acc: 0.6018 - val_loss: 1.1819 - val_acc: 0.6060
Epoch 10/200
146/146 [==============================] - 16s - loss: 1.1698 - acc: 0.6033 - val_loss: 1.1813 - val_acc: 0.6043
Epoch 11/200
146/146 [==============================] - 16s - loss: 1.1349 - acc: 0.6168 - val_loss: 1.1621 - val_acc: 0.6240
Epoch 12/200
146/146 [==============================] - 16s - loss: 1.1572 - acc: 0.6111 - val_loss: 1.1104 - val_acc: 0.6458
Epoch 13/200
146/146 [==============================] - 16s - loss: 1.1098 - acc: 0.6227 - val_loss: 1.1320 - val_acc: 0.6288
Epoch 14/200
146/146 [==============================] - 16s - loss: 1.1123 - acc: 0.6240 - val_loss: 1.1194 - val_acc: 0.6423
Epoch 15/200
146/146 [==============================] - 16s - loss: 1.0882 - acc: 0.6373 - val_loss: 1.0799 - val_acc: 0.6611
Epoch 16/200
146/146 [==============================] - 16s - loss: 1.0812 - acc: 0.6307 - val_loss: 1.0701 - val_acc: 0.6550
Epoch 17/200
146/146 [==============================] - 16s - loss: 1.0684 - acc: 0.6419 - val_loss: 1.0799 - val_acc: 0.6393
Epoch 18/200
146/146 [==============================] - 16s - loss: 1.0786 - acc: 0.6429 - val_loss: 1.0913 - val_acc: 0.6340
Epoch 19/200
146/146 [==============================] - 16s - loss: 1.0599 - acc: 0.6401 - val_loss: 1.0286 - val_acc: 0.6603
Epoch 20/200
146/146 [==============================] - 16s - loss: 1.0400 - acc: 0.6507 - val_loss: 1.1041 - val_acc: 0.6366
Epoch 21/200
146/146 [==============================] - 16s - loss: 1.0439 - acc: 0.6458 - val_loss: 1.0301 - val_acc: 0.6581
Epoch 22/200
146/146 [==============================] - 16s - loss: 1.0306 - acc: 0.6544 - val_loss: 1.0590 - val_acc: 0.6576
Epoch 23/200
146/146 [==============================] - 16s - loss: 1.0333 - acc: 0.6554 - val_loss: 1.0285 - val_acc: 0.6729
Epoch 24/200
146/146 [==============================] - 16s - loss: 1.0187 - acc: 0.6570 - val_loss: 1.0545 - val_acc: 0.6546
Epoch 25/200
146/146 [==============================] - 16s - loss: 1.0104 - acc: 0.6638 - val_loss: 1.0156 - val_acc: 0.6738
Epoch 26/200
146/146 [==============================] - 16s - loss: 1.0115 - acc: 0.6635 - val_loss: 1.0153 - val_acc: 0.6690
Epoch 27/200
146/146 [==============================] - 16s - loss: 1.0089 - acc: 0.6604 - val_loss: 1.0096 - val_acc: 0.6839
Epoch 28/200
146/146 [==============================] - 16s - loss: 0.9975 - acc: 0.6634 - val_loss: 0.9973 - val_acc: 0.6734
Epoch 29/200
146/146 [==============================] - 16s - loss: 0.9972 - acc: 0.6701 - val_loss: 1.0158 - val_acc: 0.6760
Epoch 30/200
146/146 [==============================] - 16s - loss: 0.9804 - acc: 0.6733 - val_loss: 1.0188 - val_acc: 0.6624
Epoch 31/200
146/146 [==============================] - 16s - loss: 0.9816 - acc: 0.6655 - val_loss: 0.9952 - val_acc: 0.6751
Epoch 32/200
146/146 [==============================] - 16s - loss: 0.9693 - acc: 0.6762 - val_loss: 1.0064 - val_acc: 0.6672
Epoch 33/200
146/146 [==============================] - 16s - loss: 0.9679 - acc: 0.6757 - val_loss: 0.9851 - val_acc: 0.6764
Epoch 34/200
146/146 [==============================] - 16s - loss: 0.9617 - acc: 0.6746 - val_loss: 0.9964 - val_acc: 0.6808
Epoch 35/200
146/146 [==============================] - 16s - loss: 0.9649 - acc: 0.6796 - val_loss: 1.0175 - val_acc: 0.6686
Epoch 36/200
146/146 [==============================] - 16s - loss: 0.9485 - acc: 0.6829 - val_loss: 0.9523 - val_acc: 0.6891
Epoch 37/200
146/146 [==============================] - 16s - loss: 0.9509 - acc: 0.6820 - val_loss: 0.9923 - val_acc: 0.6795
Epoch 38/200
146/146 [==============================] - 16s - loss: 0.9509 - acc: 0.6902 - val_loss: 0.9988 - val_acc: 0.6753
Epoch 39/200
146/146 [==============================] - 16s - loss: 0.9379 - acc: 0.6880 - val_loss: 0.9686 - val_acc: 0.6742
Epoch 40/200
146/146 [==============================] - 16s - loss: 0.9408 - acc: 0.6900 - val_loss: 0.9560 - val_acc: 0.6917
Epoch 41/200
146/146 [==============================] - 16s - loss: 0.9293 - acc: 0.6886 - val_loss: 0.9688 - val_acc: 0.6686
Epoch 42/200
146/146 [==============================] - 16s - loss: 0.9333 - acc: 0.6895 - val_loss: 0.9620 - val_acc: 0.6869
Epoch 43/200
146/146 [==============================] - 16s - loss: 0.9283 - acc: 0.6943 - val_loss: 1.0059 - val_acc: 0.6738
Epoch 44/200
146/146 [==============================] - 16s - loss: 0.9220 - acc: 0.6966 - val_loss: 0.9632 - val_acc: 0.6913
Epoch 45/200
146/146 [==============================] - 16s - loss: 0.9171 - acc: 0.6983 - val_loss: 0.9710 - val_acc: 0.6725
Epoch 46/200
146/146 [==============================] - 16s - loss: 0.9258 - acc: 0.6939 - val_loss: 0.9799 - val_acc: 0.6795
Epoch 47/200
146/146 [==============================] - 16s - loss: 0.8993 - acc: 0.7057 - val_loss: 0.9775 - val_acc: 0.6799
Epoch 48/200
146/146 [==============================] - 16s - loss: 0.9100 - acc: 0.6996 - val_loss: 0.9614 - val_acc: 0.6852
Epoch 49/200
146/146 [==============================] - 16s - loss: 0.8944 - acc: 0.7002 - val_loss: 0.9641 - val_acc: 0.6930
Epoch 50/200
146/146 [==============================] - 16s - loss: 0.9140 - acc: 0.6955 - val_loss: 0.9330 - val_acc: 0.6926
Epoch 51/200
146/146 [==============================] - 16s - loss: 0.8942 - acc: 0.7029 - val_loss: 0.9376 - val_acc: 0.6891
Epoch 52/200
146/146 [==============================] - 16s - loss: 0.8881 - acc: 0.7044 - val_loss: 0.9118 - val_acc: 0.6983
Epoch 53/200
146/146 [==============================] - 16s - loss: 0.9009 - acc: 0.6999 - val_loss: 0.9343 - val_acc: 0.6992
Epoch 54/200
146/146 [==============================] - 16s - loss: 0.8879 - acc: 0.7090 - val_loss: 0.9592 - val_acc: 0.6891
Epoch 55/200
146/146 [==============================] - 16s - loss: 0.8797 - acc: 0.7047 - val_loss: 0.9707 - val_acc: 0.6878
Epoch 56/200
146/146 [==============================] - 16s - loss: 0.8707 - acc: 0.7101 - val_loss: 0.9396 - val_acc: 0.6987
Epoch 57/200
146/146 [==============================] - 16s - loss: 0.8791 - acc: 0.7063 - val_loss: 0.9295 - val_acc: 0.6847
Epoch 58/200
146/146 [==============================] - 16s - loss: 0.8753 - acc: 0.7040 - val_loss: 0.9493 - val_acc: 0.6948
Epoch 59/200
146/146 [==============================] - 16s - loss: 0.8676 - acc: 0.7085 - val_loss: 0.9151 - val_acc: 0.6948
Epoch 60/200
146/146 [==============================] - 16s - loss: 0.8713 - acc: 0.7132 - val_loss: 0.9656 - val_acc: 0.6948
Epoch 61/200
146/146 [==============================] - 16s - loss: 0.8527 - acc: 0.7214 - val_loss: 0.9074 - val_acc: 0.7088
Epoch 62/200
146/146 [==============================] - 16s - loss: 0.8597 - acc: 0.7153 - val_loss: 0.9238 - val_acc: 0.6913
Epoch 63/200
146/146 [==============================] - 16s - loss: 0.8604 - acc: 0.7145 - val_loss: 0.9169 - val_acc: 0.7018
Epoch 64/200
146/146 [==============================] - 16s - loss: 0.8592 - acc: 0.7179 - val_loss: 0.9373 - val_acc: 0.6935
Epoch 65/200
146/146 [==============================] - 16s - loss: 0.8417 - acc: 0.7198 - val_loss: 0.9200 - val_acc: 0.7027
Epoch 66/200
146/146 [==============================] - 16s - loss: 0.8621 - acc: 0.7148 - val_loss: 0.9442 - val_acc: 0.6970
Epoch 67/200
146/146 [==============================] - 16s - loss: 0.8411 - acc: 0.7243 - val_loss: 0.9304 - val_acc: 0.6957
Epoch 68/200
146/146 [==============================] - 16s - loss: 0.8517 - acc: 0.7195 - val_loss: 0.8845 - val_acc: 0.7153
Epoch 69/200
146/146 [==============================] - 16s - loss: 0.8256 - acc: 0.7285 - val_loss: 0.9106 - val_acc: 0.7040
Epoch 70/200
146/146 [==============================] - 16s - loss: 0.8365 - acc: 0.7240 - val_loss: 0.9698 - val_acc: 0.6882
Epoch 71/200
146/146 [==============================] - 16s - loss: 0.8320 - acc: 0.7248 - val_loss: 0.8926 - val_acc: 0.7101
Epoch 72/200
146/146 [==============================] - 16s - loss: 0.8443 - acc: 0.7196 - val_loss: 0.9039 - val_acc: 0.7088
Epoch 73/200
146/146 [==============================] - 16s - loss: 0.8294 - acc: 0.7250 - val_loss: 0.9174 - val_acc: 0.7014
Epoch 74/200
146/146 [==============================] - 16s - loss: 0.8339 - acc: 0.7221 - val_loss: 0.9116 - val_acc: 0.7014
Epoch 75/200
146/146 [==============================] - 16s - loss: 0.8253 - acc: 0.7248 - val_loss: 0.9113 - val_acc: 0.7049
Epoch 76/200
146/146 [==============================] - 16s - loss: 0.8289 - acc: 0.7270 - val_loss: 0.9419 - val_acc: 0.6887
Epoch 77/200
146/146 [==============================] - 16s - loss: 0.8064 - acc: 0.7309 - val_loss: 0.8562 - val_acc: 0.7210
Epoch 78/200
146/146 [==============================] - 16s - loss: 0.8149 - acc: 0.7295 - val_loss: 0.8840 - val_acc: 0.7210
Epoch 79/200
146/146 [==============================] - 16s - loss: 0.8097 - acc: 0.7273 - val_loss: 0.9270 - val_acc: 0.6979
Epoch 80/200
146/146 [==============================] - 16s - loss: 0.8149 - acc: 0.7364 - val_loss: 0.8949 - val_acc: 0.7158
Epoch 81/200
146/146 [==============================] - 16s - loss: 0.8076 - acc: 0.7311 - val_loss: 0.8739 - val_acc: 0.7188
Epoch 82/200
146/146 [==============================] - 16s - loss: 0.8091 - acc: 0.7359 - val_loss: 0.9012 - val_acc: 0.7158
Epoch 83/200
146/146 [==============================] - 16s - loss: 0.7987 - acc: 0.7350 - val_loss: 0.8778 - val_acc: 0.7202
Epoch 84/200
146/146 [==============================] - 16s - loss: 0.8134 - acc: 0.7313 - val_loss: 0.8875 - val_acc: 0.7149
Epoch 85/200
146/146 [==============================] - 16s - loss: 0.7955 - acc: 0.7360 - val_loss: 0.9407 - val_acc: 0.6874
Epoch 86/200
146/146 [==============================] - 16s - loss: 0.8005 - acc: 0.7338 - val_loss: 0.9049 - val_acc: 0.7162
Epoch 87/200
146/146 [==============================] - 16s - loss: 0.8014 - acc: 0.7358 - val_loss: 0.8808 - val_acc: 0.7127
Epoch 88/200
146/146 [==============================] - 16s - loss: 0.7891 - acc: 0.7362 - val_loss: 0.8421 - val_acc: 0.7149
Epoch 89/200
146/146 [==============================] - 16s - loss: 0.7922 - acc: 0.7391 - val_loss: 0.9179 - val_acc: 0.6939
Epoch 90/200
146/146 [==============================] - 16s - loss: 0.8017 - acc: 0.7311 - val_loss: 0.8906 - val_acc: 0.7114
Epoch 91/200
146/146 [==============================] - 16s - loss: 0.7842 - acc: 0.7373 - val_loss: 0.8971 - val_acc: 0.7092
Epoch 92/200
146/146 [==============================] - 16s - loss: 0.7877 - acc: 0.7407 - val_loss: 0.8860 - val_acc: 0.7049
Epoch 93/200
146/146 [==============================] - 16s - loss: 0.7874 - acc: 0.7412 - val_loss: 0.8778 - val_acc: 0.7167
Epoch 94/200
146/146 [==============================] - 16s - loss: 0.7754 - acc: 0.7449 - val_loss: 0.8712 - val_acc: 0.7084
Epoch 95/200
146/146 [==============================] - 16s - loss: 0.7777 - acc: 0.7431 - val_loss: 0.9290 - val_acc: 0.7057
Epoch 96/200
146/146 [==============================] - 16s - loss: 0.7765 - acc: 0.7465 - val_loss: 0.9053 - val_acc: 0.7005
Epoch 97/200
146/146 [==============================] - 16s - loss: 0.7652 - acc: 0.7486 - val_loss: 0.8922 - val_acc: 0.7202
Epoch 98/200
146/146 [==============================] - 16s - loss: 0.7630 - acc: 0.7512 - val_loss: 0.8754 - val_acc: 0.7101
Epoch 99/200
146/146 [==============================] - 16s - loss: 0.7768 - acc: 0.7412 - val_loss: 0.8989 - val_acc: 0.7088
Epoch 100/200
146/146 [==============================] - 16s - loss: 0.7643 - acc: 0.7491 - val_loss: 0.8904 - val_acc: 0.7149
Epoch 101/200
146/146 [==============================] - 16s - loss: 0.7618 - acc: 0.7473 - val_loss: 0.8819 - val_acc: 0.7097
Epoch 102/200
146/146 [==============================] - 16s - loss: 0.7686 - acc: 0.7455 - val_loss: 0.9028 - val_acc: 0.7097
Epoch 103/200
146/146 [==============================] - 16s - loss: 0.7575 - acc: 0.7512 - val_loss: 0.8621 - val_acc: 0.7202
Epoch 104/200
146/146 [==============================] - 16s - loss: 0.7830 - acc: 0.7400 - val_loss: 0.8523 - val_acc: 0.7219
Epoch 105/200
146/146 [==============================] - 16s - loss: 0.7429 - acc: 0.7578 - val_loss: 0.8994 - val_acc: 0.7114
Epoch 106/200
146/146 [==============================] - 16s - loss: 0.7548 - acc: 0.7516 - val_loss: 0.8405 - val_acc: 0.7237
Epoch 107/200
146/146 [==============================] - 16s - loss: 0.7704 - acc: 0.7427 - val_loss: 0.8947 - val_acc: 0.7123
Epoch 108/200
146/146 [==============================] - 16s - loss: 0.7426 - acc: 0.7532 - val_loss: 0.8735 - val_acc: 0.7184
Epoch 109/200
146/146 [==============================] - 16s - loss: 0.7583 - acc: 0.7529 - val_loss: 0.8535 - val_acc: 0.7250
Epoch 110/200
146/146 [==============================] - 17s - loss: 0.7545 - acc: 0.7472 - val_loss: 0.8314 - val_acc: 0.7328
Epoch 111/200
146/146 [==============================] - 16s - loss: 0.7388 - acc: 0.7559 - val_loss: 0.8830 - val_acc: 0.7140
Epoch 112/200
146/146 [==============================] - 16s - loss: 0.7408 - acc: 0.7582 - val_loss: 0.8768 - val_acc: 0.7131
Epoch 113/200
146/146 [==============================] - 16s - loss: 0.7541 - acc: 0.7451 - val_loss: 0.8860 - val_acc: 0.7228
Epoch 114/200
146/146 [==============================] - 16s - loss: 0.7538 - acc: 0.7508 - val_loss: 0.9236 - val_acc: 0.7000
Epoch 115/200
146/146 [==============================] - 16s - loss: 0.7396 - acc: 0.7556 - val_loss: 0.8672 - val_acc: 0.7110
Epoch 116/200
146/146 [==============================] - 16s - loss: 0.7426 - acc: 0.7527 - val_loss: 0.8868 - val_acc: 0.7084
Epoch 117/200
146/146 [==============================] - 16s - loss: 0.7374 - acc: 0.7545 - val_loss: 0.8757 - val_acc: 0.7241
Epoch 118/200
146/146 [==============================] - 16s - loss: 0.7245 - acc: 0.7575 - val_loss: 0.8503 - val_acc: 0.7223
Epoch 119/200
146/146 [==============================] - 16s - loss: 0.7409 - acc: 0.7564 - val_loss: 0.8462 - val_acc: 0.7254
Epoch 120/200
146/146 [==============================] - 17s - loss: 0.7402 - acc: 0.7570 - val_loss: 0.8631 - val_acc: 0.7180
Epoch 121/200
146/146 [==============================] - 16s - loss: 0.7389 - acc: 0.7566 - val_loss: 0.9145 - val_acc: 0.7018
Epoch 122/200
146/146 [==============================] - 16s - loss: 0.7350 - acc: 0.7573 - val_loss: 0.8751 - val_acc: 0.7114
Epoch 123/200
146/146 [==============================] - 16s - loss: 0.7285 - acc: 0.7581 - val_loss: 0.8608 - val_acc: 0.7202
Epoch 124/200
146/146 [==============================] - 16s - loss: 0.7286 - acc: 0.7570 - val_loss: 0.8678 - val_acc: 0.7223
Epoch 125/200
146/146 [==============================] - 16s - loss: 0.7185 - acc: 0.7623 - val_loss: 0.8581 - val_acc: 0.7219
Epoch 126/200
146/146 [==============================] - 16s - loss: 0.7218 - acc: 0.7629 - val_loss: 0.8610 - val_acc: 0.7237
Epoch 127/200
146/146 [==============================] - 16s - loss: 0.7240 - acc: 0.7578 - val_loss: 0.8922 - val_acc: 0.7285
Epoch 128/200
146/146 [==============================] - 16s - loss: 0.7108 - acc: 0.7651 - val_loss: 0.8590 - val_acc: 0.7298
Epoch 129/200
146/146 [==============================] - 16s - loss: 0.7353 - acc: 0.7589 - val_loss: 0.8479 - val_acc: 0.7223
Epoch 130/200
146/146 [==============================] - 16s - loss: 0.7033 - acc: 0.7673 - val_loss: 0.8730 - val_acc: 0.7241
Epoch 131/200
146/146 [==============================] - 16s - loss: 0.7190 - acc: 0.7616 - val_loss: 0.8748 - val_acc: 0.7202
Epoch 132/200
146/146 [==============================] - 16s - loss: 0.7067 - acc: 0.7641 - val_loss: 0.8670 - val_acc: 0.7202
Epoch 133/200
146/146 [==============================] - 16s - loss: 0.7161 - acc: 0.7638 - val_loss: 0.8616 - val_acc: 0.7149
Epoch 134/200
146/146 [==============================] - 16s - loss: 0.7207 - acc: 0.7605 - val_loss: 0.9342 - val_acc: 0.6970
Epoch 135/200
146/146 [==============================] - 16s - loss: 0.7008 - acc: 0.7682 - val_loss: 0.8409 - val_acc: 0.7193
Epoch 136/200
146/146 [==============================] - 16s - loss: 0.7011 - acc: 0.7669 - val_loss: 0.8717 - val_acc: 0.7215
Epoch 137/200
146/146 [==============================] - 16s - loss: 0.7112 - acc: 0.7625 - val_loss: 0.8497 - val_acc: 0.7267
Epoch 138/200
146/146 [==============================] - 16s - loss: 0.7173 - acc: 0.7664 - val_loss: 0.8237 - val_acc: 0.7285
Epoch 139/200
146/146 [==============================] - 16s - loss: 0.7105 - acc: 0.7626 - val_loss: 0.8302 - val_acc: 0.7376
Epoch 140/200
146/146 [==============================] - 16s - loss: 0.6907 - acc: 0.7696 - val_loss: 0.8666 - val_acc: 0.7175
Epoch 141/200
146/146 [==============================] - 16s - loss: 0.6985 - acc: 0.7705 - val_loss: 0.8951 - val_acc: 0.7057
Epoch 142/200
146/146 [==============================] - 16s - loss: 0.7022 - acc: 0.7662 - val_loss: 0.8366 - val_acc: 0.7390
Epoch 143/200
146/146 [==============================] - 16s - loss: 0.7010 - acc: 0.7671 - val_loss: 0.8618 - val_acc: 0.7193
Epoch 144/200
146/146 [==============================] - 16s - loss: 0.6980 - acc: 0.7678 - val_loss: 0.8784 - val_acc: 0.7223
Epoch 145/200
146/146 [==============================] - 16s - loss: 0.6969 - acc: 0.7680 - val_loss: 0.8366 - val_acc: 0.7320
Epoch 146/200
146/146 [==============================] - 16s - loss: 0.7001 - acc: 0.7675 - val_loss: 0.8801 - val_acc: 0.7158
Epoch 147/200
146/146 [==============================] - 16s - loss: 0.7095 - acc: 0.7615 - val_loss: 0.8403 - val_acc: 0.7267
Epoch 148/200
146/146 [==============================] - 16s - loss: 0.6992 - acc: 0.7660 - val_loss: 0.8561 - val_acc: 0.7263
Epoch 149/200
146/146 [==============================] - 16s - loss: 0.6898 - acc: 0.7737 - val_loss: 0.8385 - val_acc: 0.7378
Epoch 150/200
146/146 [==============================] - 16s - loss: 0.6887 - acc: 0.7723 - val_loss: 0.8107 - val_acc: 0.7398
Epoch 151/200
146/146 [==============================] - 16s - loss: 0.6954 - acc: 0.7699 - val_loss: 0.8491 - val_acc: 0.7272
Epoch 152/200
146/146 [==============================] - 16s - loss: 0.6910 - acc: 0.7699 - val_loss: 0.8499 - val_acc: 0.7258
Epoch 153/200
146/146 [==============================] - 16s - loss: 0.6871 - acc: 0.7686 - val_loss: 0.8607 - val_acc: 0.7193
Epoch 154/200
146/146 [==============================] - 16s - loss: 0.6960 - acc: 0.7668 - val_loss: 0.8538 - val_acc: 0.7263
Epoch 155/200
146/146 [==============================] - 16s - loss: 0.6852 - acc: 0.7697 - val_loss: 0.8230 - val_acc: 0.7302
Epoch 156/200
146/146 [==============================] - 16s - loss: 0.6781 - acc: 0.7782 - val_loss: 0.8578 - val_acc: 0.7289
Epoch 157/200
146/146 [==============================] - 16s - loss: 0.6819 - acc: 0.7731 - val_loss: 0.8381 - val_acc: 0.7425
Epoch 158/200
146/146 [==============================] - 16s - loss: 0.6889 - acc: 0.7712 - val_loss: 0.8225 - val_acc: 0.7385
Epoch 159/200
146/146 [==============================] - 16s - loss: 0.6768 - acc: 0.7694 - val_loss: 0.8886 - val_acc: 0.7105
Epoch 160/200
146/146 [==============================] - 16s - loss: 0.6779 - acc: 0.7720 - val_loss: 0.8470 - val_acc: 0.7372
Epoch 161/200
146/146 [==============================] - 16s - loss: 0.6840 - acc: 0.7716 - val_loss: 0.8389 - val_acc: 0.7293
Epoch 162/200
146/146 [==============================] - 16s - loss: 0.6821 - acc: 0.7731 - val_loss: 0.8384 - val_acc: 0.7341
Epoch 163/200
146/146 [==============================] - 16s - loss: 0.6762 - acc: 0.7800 - val_loss: 0.8570 - val_acc: 0.7328
Epoch 164/200
146/146 [==============================] - 16s - loss: 0.6851 - acc: 0.7704 - val_loss: 0.8529 - val_acc: 0.7289
Epoch 165/200
146/146 [==============================] - 16s - loss: 0.6709 - acc: 0.7766 - val_loss: 0.8530 - val_acc: 0.7232
Epoch 166/200
146/146 [==============================] - 16s - loss: 0.6723 - acc: 0.7774 - val_loss: 0.8519 - val_acc: 0.7267
Epoch 167/200
146/146 [==============================] - 16s - loss: 0.6713 - acc: 0.7785 - val_loss: 0.8673 - val_acc: 0.7180
Epoch 168/200
146/146 [==============================] - 16s - loss: 0.6759 - acc: 0.7725 - val_loss: 0.9001 - val_acc: 0.7180
Epoch 169/200
146/146 [==============================] - 16s - loss: 0.6659 - acc: 0.7788 - val_loss: 0.8764 - val_acc: 0.7197
Epoch 170/200
146/146 [==============================] - 16s - loss: 0.6668 - acc: 0.7756 - val_loss: 0.8477 - val_acc: 0.7355
Epoch 171/200
146/146 [==============================] - 16s - loss: 0.6633 - acc: 0.7812 - val_loss: 0.8697 - val_acc: 0.7298
Epoch 172/200
146/146 [==============================] - 16s - loss: 0.6653 - acc: 0.7790 - val_loss: 0.8489 - val_acc: 0.7258
Epoch 173/200
146/146 [==============================] - 16s - loss: 0.6690 - acc: 0.7760 - val_loss: 0.8150 - val_acc: 0.7372
Epoch 174/200
146/146 [==============================] - 16s - loss: 0.6627 - acc: 0.7799 - val_loss: 0.8353 - val_acc: 0.7363
Epoch 175/200
146/146 [==============================] - 16s - loss: 0.6657 - acc: 0.7797 - val_loss: 0.8491 - val_acc: 0.7267
Epoch 176/200
146/146 [==============================] - 16s - loss: 0.6542 - acc: 0.7832 - val_loss: 0.8576 - val_acc: 0.7267
Epoch 177/200
146/146 [==============================] - 16s - loss: 0.6644 - acc: 0.7792 - val_loss: 0.8560 - val_acc: 0.7263
Epoch 178/200
146/146 [==============================] - 16s - loss: 0.6615 - acc: 0.7841 - val_loss: 0.8454 - val_acc: 0.7280
Epoch 179/200
146/146 [==============================] - 16s - loss: 0.6526 - acc: 0.7849 - val_loss: 0.8355 - val_acc: 0.7337
Epoch 180/200
146/146 [==============================] - 16s - loss: 0.6660 - acc: 0.7828 - val_loss: 0.8465 - val_acc: 0.7302
Epoch 181/200
146/146 [==============================] - 16s - loss: 0.6610 - acc: 0.7744 - val_loss: 0.8833 - val_acc: 0.7237
Epoch 182/200
146/146 [==============================] - 16s - loss: 0.6559 - acc: 0.7841 - val_loss: 0.8243 - val_acc: 0.7359
Epoch 183/200
146/146 [==============================] - 16s - loss: 0.6560 - acc: 0.7811 - val_loss: 0.8154 - val_acc: 0.7429
Epoch 184/200
146/146 [==============================] - 16s - loss: 0.6488 - acc: 0.7856 - val_loss: 0.8630 - val_acc: 0.7355
Epoch 185/200
146/146 [==============================] - 16s - loss: 0.6554 - acc: 0.7845 - val_loss: 0.8473 - val_acc: 0.7118
Epoch 186/200
146/146 [==============================] - 16s - loss: 0.6590 - acc: 0.7839 - val_loss: 0.8267 - val_acc: 0.7296
Epoch 187/200
146/146 [==============================] - 16s - loss: 0.6506 - acc: 0.7823 - val_loss: 0.8449 - val_acc: 0.7307
Epoch 188/200
146/146 [==============================] - 16s - loss: 0.6488 - acc: 0.7906 - val_loss: 0.8224 - val_acc: 0.7368
Epoch 189/200
146/146 [==============================] - 16s - loss: 0.6523 - acc: 0.7823 - val_loss: 0.8677 - val_acc: 0.7237
Epoch 190/200
146/146 [==============================] - 16s - loss: 0.6414 - acc: 0.7896 - val_loss: 0.8744 - val_acc: 0.7228
Epoch 191/200
146/146 [==============================] - 16s - loss: 0.6535 - acc: 0.7857 - val_loss: 0.8368 - val_acc: 0.7425
Epoch 192/200
146/146 [==============================] - 16s - loss: 0.6429 - acc: 0.7846 - val_loss: 0.8678 - val_acc: 0.7250
Epoch 193/200
146/146 [==============================] - 16s - loss: 0.6425 - acc: 0.7842 - val_loss: 0.8679 - val_acc: 0.7280
Epoch 194/200
146/146 [==============================] - 16s - loss: 0.6477 - acc: 0.7821 - val_loss: 0.8229 - val_acc: 0.7403
Epoch 195/200
146/146 [==============================] - 16s - loss: 0.6415 - acc: 0.7857 - val_loss: 0.8316 - val_acc: 0.7350
Epoch 196/200
146/146 [==============================] - 16s - loss: 0.6386 - acc: 0.7870 - val_loss: 0.8456 - val_acc: 0.7272
Epoch 197/200
146/146 [==============================] - 16s - loss: 0.6356 - acc: 0.7892 - val_loss: 0.7875 - val_acc: 0.7455
Epoch 198/200
146/146 [==============================] - 16s - loss: 0.6411 - acc: 0.7895 - val_loss: 0.8324 - val_acc: 0.7390
Epoch 199/200
146/146 [==============================] - 16s - loss: 0.6343 - acc: 0.7900 - val_loss: 0.7903 - val_acc: 0.7499
Epoch 200/200
146/146 [==============================] - 16s - loss: 0.6347 - acc: 0.7916 - val_loss: 0.8105 - val_acc: 0.7298
Out[153]:
<keras.callbacks.History at 0x7f13a3156650>

In [ ]:


In [158]:
# serialize model to JSON

cnn18_json = cnn18.to_json()

with open("cnn18.json", "w") as json_file:
    json_file.write(cnn18_json)
    
# serialize weights to HDF5
cnn18.save_weights("cnn18.h5")
print("Saved model to disk")


Saved model to disk

In [ ]:


In [8]:
# load json and create model
json_file = open('cnn18.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("cnn18.h5")
print("Loaded model from disk")


Loaded model from disk

In [ ]:


In [34]:
# evaluate loaded model on test data
loaded_model.compile(loss=keras.losses.categorical_crossentropy,
              optimizer=keras.optimizers.Adadelta(lr=0.3),
#               optimizer=Adam(lr=0.2),
#               optimizer=sgd,
              metrics=['accuracy'])

score2 = loaded_model.evaluate(X_test_rescale, y_test_1hot, verbose=0)
print("%s: %.2f%%" % (loaded_model.metrics_names[1], score2[1]*100))


acc: 78.52%

In [ ]:


In [ ]:

Prediction


In [198]:
def image_for_predict(im):
    resized_image=cv2.resize(im,(100,100))
    img_arr=np.asarray(resized_image).reshape((1,resized_image.shape[1], resized_image.shape[0],3))
    X_test = img_arr.astype('float32')
    X_test_rescale=X_test/255
    return X_test_rescale

In [129]:
def predict_one_image(model,image):
    y_pred_probas1 = model.predict(image)
    y_pred = np.argmax(y_pred_probas1)
    
    result_array=np.array([0,0,0,0,0,0,0,0,0,0])
    result_array[y_pred]=1
    result_array_reshape=result_array.reshape(1,10)
    
    result = encoder.inverse_transform(result_array_reshape)
    return result

In [140]:
test_predict_image1=X_test_rescale[88].reshape(1,100,100,3)

In [141]:
predict_one_image(loaded_model,test_predict_image1)


Out[141]:
array(['Peony'], 
      dtype='<U13')

In [ ]:


In [162]:
# flower_dir={
#     0:'African_daisy',
#     1:'Cornflower',
#     2:'Ice_plant',
#     3:'Nigella',
#     4:'Peony',
#     5:'Ragged_robin',
#     6:'Soapwort',
#     7:'Spathiphyllum',
#     8:'Spring_beauty',
#     9:'Sunflower'
# }

In [ ]:


In [ ]:


In [ ]:
# save the model to disk
# pickle.dump(cnn18, open('cnn18.sav', 'wb'))


# load the model from disk
# loaded_model1 = pickle.load(open('cnn1.sav', 'rb'))

In [ ]:


In [ ]:


In [ ]:


In [461]:
test_sunflower8 = cv2.imread('all_flower_images/1509.Sunflower.jpg')
test_peony8 = cv2.imread('all_flower_images/1502.Peony.jpg')

In [462]:
test_sunflower_4d=image_for_predict(test_sunflower8)
test_peony_4d=image_for_predict(test_peony8)

In [463]:
test_sunflower_for_predict=image_for_predict(test_sunflower8).reshape(100,100,3)
test_peony_for_predict=image_for_predict(test_peony8).reshape(100,100,3)

In [223]:
preview_sunflower = plt.imshow(test_sunflower_for_predict)
plt.show()



In [ ]:


In [224]:
preview_peony = plt.imshow(test_peony_for_predict)
plt.show()



In [470]:
test_sunflower_4d.shape


Out[470]:
(1, 100, 100, 3)

In [489]:
cnn18_display2 = Sequential()
cnn18_display2.add(Conv2D(3, kernel_size=(3, 3),
                 input_shape=(100,100,3)))

In [490]:
conv_sunflower=cnn18_display2.predict(test_sunflower_4d)

In [491]:
def visualize_flower(flower):
    flo=np.squeeze(flower,axis=0)
    print (flo.shape)
    plt.imshow(flo)
    plt.show()

In [492]:
visualize_flower(conv_sunflower)


(98, 98, 3)

In [ ]:


In [ ]:

Convolution visualizations

cnn_18_display

(Demo only)


In [ ]:
import theano

In [266]:
import matplotlib.cm as cm

In [ ]:


In [245]:
cnn18_display = Sequential()

cnn18_display.add(Conv2D(30, kernel_size=(3, 3),
                 input_shape=(100,100,3))) 
convout1 = Activation('relu')
cnn18_display.add(convout1)
cnn18_display.add(MaxPool2D((2, 2)))

cnn18_display.add(Conv2D(60, (3, 3)))                  
convout2 = Activation('relu')
cnn18_display.add(convout2)
cnn18_display.add(MaxPool2D((2, 2)))
          


cnn18_display.add(Flatten())
cnn18_display.add(Dense(128, activation='relu'))

cnn18_display.add(Dense(10, activation='softmax'))


cnn18_display.compile(loss=keras.losses.categorical_crossentropy,
              optimizer=keras.optimizers.Adadelta(lr=0.3),
#               optimizer=Adam(lr=0.2),
#               optimizer=sgd,
              metrics=['accuracy'])

In [246]:
cnn18_display.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d_3 (Conv2D)            (None, 98, 98, 30)        840       
_________________________________________________________________
activation_3 (Activation)    (None, 98, 98, 30)        0         
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 49, 49, 30)        0         
_________________________________________________________________
conv2d_4 (Conv2D)            (None, 47, 47, 60)        16260     
_________________________________________________________________
activation_4 (Activation)    (None, 47, 47, 60)        0         
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 23, 23, 60)        0         
_________________________________________________________________
flatten_1 (Flatten)          (None, 31740)             0         
_________________________________________________________________
dense_1 (Dense)              (None, 128)               4062848   
_________________________________________________________________
dense_2 (Dense)              (None, 10)                1290      
=================================================================
Total params: 4,081,238
Trainable params: 4,081,238
Non-trainable params: 0
_________________________________________________________________

In [253]:
cnn18_display.fit(X_train_rescale, y_train_1hot, validation_split=.2, epochs=5)


Train on 7523 samples, validate on 1881 samples
Epoch 1/5
7523/7523 [==============================] - 103s - loss: 1.6454 - acc: 0.4327 - val_loss: 1.7561 - val_acc: 0.3966
Epoch 2/5
7523/7523 [==============================] - 104s - loss: 1.3549 - acc: 0.5470 - val_loss: 1.4370 - val_acc: 0.5045
Epoch 3/5
7523/7523 [==============================] - 104s - loss: 1.1915 - acc: 0.6103 - val_loss: 1.4805 - val_acc: 0.4976
Epoch 4/5
7523/7523 [==============================] - 105s - loss: 1.0805 - acc: 0.6556 - val_loss: 1.4141 - val_acc: 0.5167
Epoch 5/5
7523/7523 [==============================] - 104s - loss: 0.9832 - acc: 0.6870 - val_loss: 1.4987 - val_acc: 0.5024
Out[253]:
<keras.callbacks.History at 0x11d011400>

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [260]:
from keras import backend as K

In [261]:
# K.learning_phase() is a flag that indicates if the network is in training or
# predict phase. It allow layer (e.g. Dropout) to only be applied during training
inputs = [K.learning_phase()] + cnn18_display.inputs

_convout1_f = K.function(inputs, [convout1.output])
def convout1_f(X):
    # The [0] is to disable the training phase flag
    return _convout1_f([0] + [X])

In [311]:
inputs = [K.learning_phase()] + cnn18_display.inputs

_convout2_f = K.function(inputs, [convout2.output])
def convout2_f(X):
    # The [0] is to disable the training phase flag
    return _convout2_f([0] + [X])

In [ ]:


In [308]:
# utility functions
from mpl_toolkits.axes_grid1 import make_axes_locatable

def nice_imshow(ax, data, vmin=None, vmax=None, cmap=None):
    """Wrapper around pl.imshow"""
    if cmap is None:
        cmap = cm.jet
    if vmin is None:
        vmin = data.min()
    if vmax is None:
        vmax = data.max()
    divider = make_axes_locatable(ax)
    cax = divider.append_axes("right", size="5%", pad=0.05)
    im = ax.imshow(data, vmin=vmin, vmax=vmax, interpolation='nearest', cmap=cmap)
    pl.colorbar(im, cax=cax)
    pl.show()

In [294]:
test_sunflower_for_display=image_for_predict(test_sunflower)
test_sunflower_for_display.shape


Out[294]:
(1, 100, 100, 3)

In [363]:
# Visualize the first layer of convolutions on an input image
X = test_peony2
pl.figure()
pl.title('input')
flower_image=nice_imshow(pl.gca(), np.squeeze(X), vmin=0, vmax=1, cmap=cm.binary)



In [336]:
C1.shape[0:2]


Out[336]:
(98, 98)

In [341]:
ddddd=6*98+5
ma.masked_all((3,3))


Out[341]:
masked_array(data =
 [[-- -- --]
 [-- -- --]
 [-- -- --]],
             mask =
 [[ True  True  True]
 [ True  True  True]
 [ True  True  True]],
       fill_value = 1e+20)

In [351]:
C1.reshape(30,98,98)[1]


Out[351]:
array([[ 26.17295074,  39.70990372,   0.        , ...,   9.70814419,
          0.        ,   1.00507796],
       [ 14.19150925,   0.        ,   2.03038526, ...,  13.614501  ,
         60.01481247,  10.80819607],
       [ 13.20535183,   0.        ,   0.        , ...,   0.        ,
         52.77131271,  56.3646698 ],
       ..., 
       [  0.        ,  11.9465847 ,  58.53059006, ...,  15.55350113,
         11.00073338,   0.        ],
       [  0.        ,  24.30467224,   0.        , ...,  29.03567123,
          0.        ,   0.        ],
       [  0.        ,  68.10700989,   0.        , ...,  16.19389153,
          0.        ,  90.66222382]], dtype=float32)

In [352]:
import numpy.ma as ma
def make_mosaic(imgs, nrows, ncols, border=1):
    """
    Given a set of images with all the same shape, makes a
    mosaic with nrows and ncols
    """
    nimgs = imgs.shape[0]
    imshape = imgs.shape[1:]
    
    mosaic = ma.masked_all((nrows * imshape[0] + (nrows - 1) * border,
                            ncols * imshape[1] + (ncols - 1) * border),
                            dtype=np.float32)
    
    paddedh = imshape[0] + border
    paddedw = imshape[1] + border
    for i in range(nimgs):
        row = int(np.floor(i / ncols))
        col = i % ncols
        
        mosaic[row * paddedh:row * paddedh + imshape[0],
               col * paddedw:col * paddedw + imshape[1]] = imgs[i]
    return mosaic

# pl.imshow(make_mosaic(np.random.random((9, 10, 10)), 3, 3, border=1))

In [ ]:

Visualize convolution result (after activation)


In [366]:
# Visualize convolution result (after activation)
C1 = convout1_f(test_peony2)
C1 = np.squeeze(C1)
CC1=C1.reshape(30,98,98)
print("C1 shape : ", CC1.shape)

pl.figure(figsize=(15, 15))
pl.suptitle('convout1')
nice_imshow(pl.gca(), make_mosaic(CC1, 6, 6), cmap=cm.binary)


C1 shape :  (30, 98, 98)

In [368]:
CCC1=CC1.reshape(1,98,98,30)

In [371]:
# Visualize convolution result (after activation)
C2 = convout2_f(CCC1)
C2 = np.squeeze(C2)
CC2 = C2.reshape(30,98,98)
print("C2 shape : ", CC2.shape)

pl.figure(figsize=(15, 15))
pl.suptitle('convout2')
nice_imshow(pl.gca(), make_mosaic(CC2, 6, 6), cmap=cm.binary)


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-371-d8c820eb7a30> in <module>()
      1 # Visualize convolution result (after activation)
----> 2 C2 = convout2_f(CCC1)
      3 C2 = np.squeeze(C2)
      4 CC2 = C2.reshape(30,98,98)
      5 print("C2 shape : ", CC2.shape)

<ipython-input-311-59a4a5befb32> in convout2_f(X)
      4 def convout2_f(X):
      5     # The [0] is to disable the training phase flag
----> 6     return _convout2_f([0] + [X])

/Users/Dan/anaconda/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py in __call__(self, inputs)
   2268         updated = session.run(self.outputs + [self.updates_op],
   2269                               feed_dict=feed_dict,
-> 2270                               **self.session_kwargs)
   2271         return updated[:len(self.outputs)]
   2272 

/Users/Dan/anaconda/lib/python3.6/site-packages/tensorflow/python/client/session.py in run(self, fetches, feed_dict, options, run_metadata)
    787     try:
    788       result = self._run(None, fetches, feed_dict, options_ptr,
--> 789                          run_metadata_ptr)
    790       if run_metadata:
    791         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/Users/Dan/anaconda/lib/python3.6/site-packages/tensorflow/python/client/session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
    973                 'Cannot feed value of shape %r for Tensor %r, '
    974                 'which has shape %r'
--> 975                 % (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))
    976           if not self.graph.is_feedable(subfeed_t):
    977             raise ValueError('Tensor %s may not be fed.' % subfeed_t)

ValueError: Cannot feed value of shape (1, 98, 98, 30) for Tensor 'conv2d_3_input:0', which has shape '(?, 100, 100, 3)'

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]: