In [1]:
    
import numpy as np
import pandas as pd
import os
from keras.models import Sequential
from keras.layers.convolutional import Convolution2D
from keras.layers.pooling import MaxPooling2D
from keras.layers.core import Flatten, Dense, Activation, Dropout
from keras.preprocessing import image
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras.utils.np_utils import to_categorical
from sklearn.model_selection import train_test_split
from matplotlib import pyplot as plt
    
    
In [2]:
    
# My LeNet architecture
model = Sequential()
# conv filters of 5x5 each
# Layer 1
model.add(Convolution2D(32, 5, 5, input_shape=(192, 192, 3)))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2, 2)))
# Layer 2
model.add(Convolution2D(64, 5, 5))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2, 2)))    
model.add(Flatten())
# Layer 3
model.add(Dense(1024))
model.add(Activation("relu"))
model.add(Dropout(0.5))
# Layer 4
model.add(Dense(512))
model.add(Activation("relu"))
model.add(Dropout(0.5))
# Layer 5
model.add(Dense(2))
model.add(Activation("softmax"))
    
In [3]:
    
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
    
In [69]:
    
X = []
y = []
    
In [5]:
    
with open("X.pkl", 'rb') as picklefile: 
    X = pickle.load(picklefile)
    
In [6]:
    
with open("y.pkl", 'rb') as picklefile: 
    y = pickle.load(picklefile)
    
In [70]:
    
# set folder path
folderpath = 'Images/Train/Undistorted/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        X.append(np.asarray(img))
        y.append(0)
    else:
        print filename, 'not a pic'
    
In [4]:
    
import pickle
    
In [12]:
    
with open('undistorted_X.pkl', 'wb') as picklefile:
    pickle.dump(X, picklefile)
    
In [13]:
    
with open('undistorted_y.pkl', 'wb') as picklefile:
    pickle.dump(y, picklefile)
    
In [7]:
    
# set folder path
folderpath = 'Images/Train/DigitalBlur2/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        X.append(np.asarray(img))
        y.append(1)
    else:
        print filename, 'not a pic'
    
In [9]:
    
len(y)
    
    Out[9]:
In [17]:
    
with open('X.pkl', 'wb') as picklefile:
    pickle.dump(X, picklefile)
    
In [18]:
    
with open('y.pkl', 'wb') as picklefile:
    pickle.dump(y, picklefile)
    
In [10]:
    
X_stacked = np.stack(X)
X_norm = X_stacked/255.
y_cat = to_categorical(y)
    
In [11]:
    
X_train, X_test, y_train, y_test = train_test_split(X_norm, y_cat, train_size=2500, random_state=42)
    
In [12]:
    
# Data augmenter
dg = image.ImageDataGenerator(horizontal_flip=True, vertical_flip=True)
    
In [13]:
    
model.load_weights('lenet_weights.h5')
    
In [14]:
    
cb_es = EarlyStopping(monitor='val_acc', patience=2, verbose=1)
cb_mc = ModelCheckpoint('lenet_weights2.h5', monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=True)
    
In [15]:
    
# Fit generator
# play with samples/epoch, nb_epoch, val_samples.
model.fit_generator(dg.flow(X_train, y_train), samples_per_epoch=3000, nb_epoch=30, validation_data=dg.flow(X_test, y_test), nb_val_samples=300, callbacks=[cb_es, cb_mc])
    
    
    
    
    Out[15]:
In [ ]:
    
    
In [32]:
    
clean_pics = []
blurry_pics = []
    
In [6]:
    
backBlur_pics = []
    
In [33]:
    
# set folder path
folderpath = 'Images/clearSamples/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        clean_pics.append(np.asarray(img))
    else:
        print filename, 'not a pic'
    
In [34]:
    
# set folder path
folderpath = 'Images/natblurSamples/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        blurry_pics.append(np.asarray(img))
    else:
        print filename, 'not a pic'
    
In [7]:
    
# set folder path
folderpath = 'Images/backBlurSamples/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        backBlur_pics.append(np.asarray(img))
    else:
        print filename, 'not a pic'
    
In [82]:
    
len(backBlur_pics)
    
    Out[82]:
In [9]:
    
backBlur_pics_array = np.stack(backBlur_pics)/255.
    
In [36]:
    
len(blurry_pics)
    
    Out[36]:
In [37]:
    
clean_pics_array = np.stack(clean_pics)/255.
    
In [38]:
    
blurry_pics_array = np.stack(blurry_pics)/255.
    
In [42]:
    
blurry_pics_array.shape
    
    Out[42]:
In [44]:
    
model.predict_classes(clean_pics_array)
    
    
    Out[44]:
In [91]:
    
model.predict_proba(clean_pics_array)
    
    
    Out[91]:
In [45]:
    
model.predict_classes(blurry_pics_array)
    
    
    Out[45]:
In [90]:
    
model.predict_proba(blurry_pics_array)
    
    
    Out[90]:
In [10]:
    
model.predict_proba(backBlur_pics_array)
    
    
    Out[10]:
In [89]:
    
plt.imshow(blurry_pics_array[0])
plt.show()
    
    
In [93]:
    
model.save('lenet_3rdPass.h5')
    
In [86]:
    
model.save_weights('test_weights.h5')
    
In [20]:
    
backBlur_pics = []
backBlur_filenames = []
# set folder path
folderpath = 'Images/backBlurAll_longIter/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        backBlur_filenames.append(filename)
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        backBlur_pics.append(np.asarray(img))
    else:
        print filename, 'not a pic'
df_backBlur = pd.DataFrame(backBlur_filenames, columns=['filename'])
    
    
In [21]:
    
backBlur_pics_array = np.stack(backBlur_pics)/255.
    
In [22]:
    
df_backBlur['blur_class'] = model.predict_classes(backBlur_pics_array)
    
    
In [23]:
    
if not os.path.exists(folderpath+'blurry'):
    os.mkdir(folderpath+'blurry')
for index, row in df_backBlur.iterrows():
    if row['blur_class'] == 1:
        oldpath = folderpath + row['filename']
        newpath = folderpath + 'blurry/' + row['filename']
        os.rename(oldpath, newpath)
    
In [16]:
    
natBlur_pics = []
natBlur_filenames = []
# set folder path
folderpath = 'Images/natBlurAll_longIter/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        natBlur_filenames.append(filename)
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        natBlur_pics.append(np.asarray(img))
    else:
        print filename, 'not a pic'
df_natBlur = pd.DataFrame(natBlur_filenames, columns=['filename'])
    
    
In [17]:
    
natBlur_pics_array = np.stack(natBlur_pics)/255.
    
In [18]:
    
df_natBlur['blur_class'] = model.predict_classes(natBlur_pics_array)
    
    
In [19]:
    
if not os.path.exists(folderpath+'blurry'):
    os.mkdir(folderpath+'blurry')
for index, row in df_natBlur.iterrows():
    if row['blur_class'] == 1:
        oldpath = folderpath + row['filename']
        newpath = folderpath + 'blurry/' + row['filename']
        os.rename(oldpath, newpath)
    
In [24]:
    
m3Blur_pics = []
m3Blur_filenames = []
# set folder path
folderpath = 'Images/M3Samples/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        m3Blur_filenames.append(filename)
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        m3Blur_pics.append(np.asarray(img))
    else:
        print filename, 'not a pic'
df_m3Blur = pd.DataFrame(m3Blur_filenames, columns=['filename'])
    
In [25]:
    
m3Blur_pics_array = np.stack(m3Blur_pics)/255.
    
In [26]:
    
df_m3Blur['blur_class'] = model.predict_classes(m3Blur_pics_array)
    
    
In [27]:
    
if not os.path.exists(folderpath+'blurry'):
    os.mkdir(folderpath+'blurry')
for index, row in df_m3Blur.iterrows():
    if row['blur_class'] == 1:
        oldpath = folderpath + row['filename']
        newpath = folderpath + 'blurry/' + row['filename']
        os.rename(oldpath, newpath)
    
In [28]:
    
df_m3Blur
    
    Out[28]:
In [29]:
    
clearSample_pics = []
clearSample_filenames = []
# set folder path
folderpath = 'Images/clearSamples/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        clearSample_filenames.append(filename)
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        clearSample_pics.append(np.asarray(img))
    else:
        print filename, 'not a pic'
df_clearSample = pd.DataFrame(clearSample_filenames, columns=['filename'])
clearSample_pics_array = np.stack(clearSample_pics)/255.
df_clearSample['blur_class'] = model.predict_classes(clearSample_pics_array)
    
    
In [30]:
    
if not os.path.exists(folderpath+'blurry'):
    os.mkdir(folderpath+'blurry')
for index, row in df_clearSample.iterrows():
    if row['blur_class'] == 1:
        oldpath = folderpath + row['filename']
        newpath = folderpath + 'blurry/' + row['filename']
        os.rename(oldpath, newpath)
    
In [31]:
    
blurSample_pics = []
blurSample_filenames = []
# set folder path
folderpath = 'Images/natBlurSamples/'
# load image arrays
for filename in os.listdir(folderpath):
    if filename != '.DS_Store':
        blurSample_filenames.append(filename)
        imagepath = folderpath + filename
        img = image.load_img(imagepath, target_size=(192,192))
        blurSample_pics.append(np.asarray(img))
    else:
        print filename, 'not a pic'
df_blurSample = pd.DataFrame(blurSample_filenames, columns=['filename'])
blurSample_pics_array = np.stack(blurSample_pics)/255.
df_blurSample['blur_class'] = model.predict_classes(blurSample_pics_array)
    
    
In [33]:
    
if not os.path.exists(folderpath+'blurry'):
    os.mkdir(folderpath+'blurry')
for index, row in df_blurSample.iterrows():
    if row['blur_class'] == 1:
        oldpath = folderpath + row['filename']
        newpath = folderpath + 'blurry/' + row['filename']
        os.rename(oldpath, newpath)
    
In [ ]:
    
    
In [5]:
    
img = image.load_img('Images/NaturalBlurSet.xlsx', target_size=(192,192))
    
    
In [8]:
    
'.JPG'.lower()
    
    Out[8]:
In [ ]: