Quiz 5 - CNN and Bag of Features for object recognition

Brian Acevedo

Eugenio Pacceli

Renato Oliveira

Bag of features with SVM for CIFAR10 dataset


In [ ]:
import cv2
from sklearn.cluster import KMeans
from sklearn.svm import SVC
import numpy as np
from keras.datasets import cifar10
import matplotlib.pyplot as plt
from sklearn.metrics import accuracy_score

from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.utils import np_utils


if __name__=="__main__": 
    (x_train, y_train), (x_test, y_test) = cifar10.load_data()

    sift = cv2.xfeatures2d.SIFT_create() #Uses SIFT to get descriptors
    descritores = [] 
   

    #Extract descritors for all imagens on the dataset
    for i in np.arange(x_train.shape[0]):
        _,imgDesc = sift.detectAndCompute(x_train[i],None)
        if imgDesc is not None:
            for j in np.arange(imgDesc.shape[0]):
                descritores.append(imgDesc[j])

    descritores = np.array(descritores)
    print("Calculando centroides a partir dos descritores")

    #Apply KMeans to descriptors list to generate 10 centroids corresponding to the 10 visual words
    kmeans = KMeans(n_clusters=10).fit(descritores)

    treino_hist_X=[]
    treino_hist_Y=[]
    print("Calculando histogramas para as imagens do treino")

    #For each descriptor of an image, find the closest centroid
    #This return a vector with the number of each centroid for each descriptor of an image
    #Create an histogram out of acho vector for each imagem
    #Associate a label for each histogram
    for i in np.arange(x_train.shape[0]):
         _,imgDesc = sift.detectAndCompute(x_train[i],None)
         if imgDesc is not None:
             kcent = kmeans.predict(imgDesc) #Calcula centroide pada cada descritor
             hist,_ = np.histogram(kcent,bins=[0,1,2,3,4,5,6,7,8,9,10],normed=True)
             treino_hist_X.append(hist)
             treino_hist_Y.append(y_train[i])


    treino_hist_X = np.array(treino_hist_X).reshape((-1,10))
    treino_hist_Y = np.array(treino_hist_Y).reshape((-1,))


    #Train SVM classifier with histogram and label pairs from the training set
    
    modSVM = SVC()
    modSVM.fit(treino_hist_X,treino_hist_Y.reshape((-1,)))

    matriz_conf_teste = np.zeros((10,10))

    #Aplly SVM on the test set
    #Extract descriptor for each image
    #Find closest centroid
    #Create histogram for each image
    #Predict category with the SVM classifiear
    #Add error or hit on the confusion matrix
    #Lines correspond to true labels and columns correspond to predictions from the SVM classifier
    preds=[]
    labels=[]
    for i in np.arange(x_test.shape[0]):
         _,imgDesc = sift.detectAndCompute(x_test[i],None)
         if imgDesc is not None:
             kcent = kmeans.predict(imgDesc) #Calcula centroide pada cada descritor
             hist,_ = np.histogram(kcent,bins=[0,1,2,3,4,5,6,7,8,9,10],normed=True)
             ret = modSVM.predict(hist.reshape((1,-1)))[0]
             preds.append(ret)
             labels.append(y_test[i,0])
             real = y_test[i,0]
             matriz_conf_teste[real,ret] = matriz_conf_teste[real,ret] + 1

    print("accuracy is {0}".format(accuracy_score(labels,preds)))
    plt.pcolor(matriz_conf_teste,cmap="jet")
    plt.colorbar()
    plt.title("Confusion Matrix - Test Set")

Lenet 5 for CIFAR10 dataset


In [2]:
(x_train, y_train), (x_test, y_test) = cifar10.load_data() #utliza o dataset cifar10 do keras

nb_classes = 10
nb_epoch=200
batch_size=32

x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255

in_shape=x_train.shape[1:]

y_train = np_utils.to_categorical(y_train, nb_classes)

model = Sequential()

model.add(Conv2D(12, (5, 5), activation = 'relu', input_shape=in_shape, init='he_normal'))
model.add(MaxPooling2D(pool_size=(2, 2)))

model.add(Conv2D(25, (5, 5), activation = 'relu', init='he_normal'))
model.add(MaxPooling2D(pool_size=(2, 2)))

# Flatten the 3D output to 1D tensor for a fully connected layer to accept the input
model.add(Flatten())
model.add(Dense(180, activation = 'relu', init='he_normal'))
model.add(Dropout(0.5))
model.add(Dense(100, activation = 'relu', init='he_normal'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes, activation = 'softmax', init='he_normal')) #Last layer with one output per class

model.compile(loss='categorical_crossentropy', optimizer='adamax', metrics=["accuracy"])

model.fit(x_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=2)


C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel_launcher.py:25: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(12, (5, 5), input_shape=(32, 32, 3..., kernel_initializer="he_normal", activation="relu")`
C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel_launcher.py:28: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(25, (5, 5), kernel_initializer="he_normal", activation="relu")`
C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel_launcher.py:33: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(180, kernel_initializer="he_normal", activation="relu")`
C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel_launcher.py:35: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(100, kernel_initializer="he_normal", activation="relu")`
C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel_launcher.py:37: UserWarning: Update your `Dense` call to the Keras 2 API: `Dense(10, kernel_initializer="he_normal", activation="softmax")`
C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\keras\models.py:837: UserWarning: The `nb_epoch` argument in `fit` has been renamed `epochs`.
  warnings.warn('The `nb_epoch` argument in `fit` '
Epoch 1/200
13s - loss: 1.8041 - acc: 0.3234
Epoch 2/200
12s - loss: 1.5206 - acc: 0.4456
Epoch 3/200
13s - loss: 1.4021 - acc: 0.4976
Epoch 4/200
12s - loss: 1.3266 - acc: 0.5282
Epoch 5/200
14s - loss: 1.2673 - acc: 0.5529
Epoch 6/200
14s - loss: 1.2163 - acc: 0.5737
Epoch 7/200
13s - loss: 1.1723 - acc: 0.5903
Epoch 8/200
13s - loss: 1.1340 - acc: 0.6059
Epoch 9/200
12s - loss: 1.1021 - acc: 0.6170
Epoch 10/200
13s - loss: 1.0729 - acc: 0.6266
Epoch 11/200
14s - loss: 1.0487 - acc: 0.6379
Epoch 12/200
13s - loss: 1.0255 - acc: 0.6457
Epoch 13/200
14s - loss: 1.0039 - acc: 0.6535
Epoch 14/200
14s - loss: 0.9881 - acc: 0.6610
Epoch 15/200
15s - loss: 0.9658 - acc: 0.6669
Epoch 16/200
12s - loss: 0.9457 - acc: 0.6742
Epoch 17/200
14s - loss: 0.9260 - acc: 0.6787
Epoch 18/200
13s - loss: 0.9130 - acc: 0.6823
Epoch 19/200
13s - loss: 0.8973 - acc: 0.6899
Epoch 20/200
13s - loss: 0.8837 - acc: 0.6960
Epoch 21/200
13s - loss: 0.8685 - acc: 0.7014
Epoch 22/200
12s - loss: 0.8631 - acc: 0.7022
Epoch 23/200
12s - loss: 0.8445 - acc: 0.7079
Epoch 24/200
11s - loss: 0.8338 - acc: 0.7122
Epoch 25/200
12s - loss: 0.8202 - acc: 0.7169
Epoch 26/200
12s - loss: 0.8117 - acc: 0.7197
Epoch 27/200
13s - loss: 0.8014 - acc: 0.7234
Epoch 28/200
12s - loss: 0.7924 - acc: 0.7261
Epoch 29/200
13s - loss: 0.7876 - acc: 0.7279
Epoch 30/200
12s - loss: 0.7736 - acc: 0.7305
Epoch 31/200
13s - loss: 0.7718 - acc: 0.7327
Epoch 32/200
12s - loss: 0.7522 - acc: 0.7403
Epoch 33/200
12s - loss: 0.7477 - acc: 0.7410
Epoch 34/200
12s - loss: 0.7364 - acc: 0.7441
Epoch 35/200
13s - loss: 0.7331 - acc: 0.7462
Epoch 36/200
13s - loss: 0.7261 - acc: 0.7475
Epoch 37/200
13s - loss: 0.7188 - acc: 0.7502
Epoch 38/200
13s - loss: 0.7125 - acc: 0.7529
Epoch 39/200
14s - loss: 0.7079 - acc: 0.7545
Epoch 40/200
12s - loss: 0.6959 - acc: 0.7567
Epoch 41/200
13s - loss: 0.6918 - acc: 0.7605
Epoch 42/200
14s - loss: 0.6880 - acc: 0.7596
Epoch 43/200
13s - loss: 0.6794 - acc: 0.7616
Epoch 44/200
12s - loss: 0.6730 - acc: 0.7648
Epoch 45/200
14s - loss: 0.6671 - acc: 0.7669
Epoch 46/200
13s - loss: 0.6681 - acc: 0.7661
Epoch 47/200
12s - loss: 0.6591 - acc: 0.7686
Epoch 48/200
12s - loss: 0.6524 - acc: 0.7717
Epoch 49/200
11s - loss: 0.6482 - acc: 0.7753
Epoch 50/200
10s - loss: 0.6406 - acc: 0.7754
Epoch 51/200
11s - loss: 0.6387 - acc: 0.7760
Epoch 52/200
11s - loss: 0.6326 - acc: 0.7759
Epoch 53/200
12s - loss: 0.6254 - acc: 0.7814
Epoch 54/200
12s - loss: 0.6260 - acc: 0.7826
Epoch 55/200
10s - loss: 0.6255 - acc: 0.7802
Epoch 56/200
10s - loss: 0.6203 - acc: 0.7809
Epoch 57/200
11s - loss: 0.6125 - acc: 0.7865
Epoch 58/200
12s - loss: 0.6019 - acc: 0.7887
Epoch 59/200
14s - loss: 0.5996 - acc: 0.7885
Epoch 60/200
14s - loss: 0.6060 - acc: 0.7855
Epoch 61/200
14s - loss: 0.5929 - acc: 0.7928
Epoch 62/200
13s - loss: 0.6022 - acc: 0.7870
Epoch 63/200
13s - loss: 0.5911 - acc: 0.7921
Epoch 64/200
11s - loss: 0.5970 - acc: 0.7880
Epoch 65/200
12s - loss: 0.5856 - acc: 0.7929
Epoch 66/200
13s - loss: 0.5807 - acc: 0.7946
Epoch 67/200
13s - loss: 0.5769 - acc: 0.7976
Epoch 68/200
13s - loss: 0.5773 - acc: 0.7947
Epoch 69/200
12s - loss: 0.5765 - acc: 0.7959
Epoch 70/200
13s - loss: 0.5621 - acc: 0.7989
Epoch 71/200
12s - loss: 0.5645 - acc: 0.8023
Epoch 72/200
12s - loss: 0.5608 - acc: 0.8032
Epoch 73/200
12s - loss: 0.5597 - acc: 0.8023
Epoch 74/200
12s - loss: 0.5542 - acc: 0.8036
Epoch 75/200
12s - loss: 0.5456 - acc: 0.8071
Epoch 76/200
12s - loss: 0.5481 - acc: 0.8056
Epoch 77/200
12s - loss: 0.5539 - acc: 0.8062
Epoch 78/200
12s - loss: 0.5483 - acc: 0.8067
Epoch 79/200
13s - loss: 0.5428 - acc: 0.8069
Epoch 80/200
12s - loss: 0.5390 - acc: 0.8069
Epoch 81/200
12s - loss: 0.5442 - acc: 0.8099
Epoch 82/200
12s - loss: 0.5390 - acc: 0.8102
Epoch 83/200
12s - loss: 0.5316 - acc: 0.8104
Epoch 84/200
13s - loss: 0.5350 - acc: 0.8100
Epoch 85/200
12s - loss: 0.5301 - acc: 0.8111
Epoch 86/200
11s - loss: 0.5294 - acc: 0.8127
Epoch 87/200
11s - loss: 0.5299 - acc: 0.8118
Epoch 88/200
11s - loss: 0.5249 - acc: 0.8153
Epoch 89/200
10s - loss: 0.5186 - acc: 0.8161
Epoch 90/200
10s - loss: 0.5133 - acc: 0.8201
Epoch 91/200
10s - loss: 0.5156 - acc: 0.8163
Epoch 92/200
10s - loss: 0.5179 - acc: 0.8157
Epoch 93/200
11s - loss: 0.5114 - acc: 0.8192
Epoch 94/200
10s - loss: 0.5092 - acc: 0.8180
Epoch 95/200
10s - loss: 0.5086 - acc: 0.8188
Epoch 96/200
11s - loss: 0.5041 - acc: 0.8206
Epoch 97/200
12s - loss: 0.5005 - acc: 0.8232
Epoch 98/200
12s - loss: 0.4990 - acc: 0.8217
Epoch 99/200
11s - loss: 0.5017 - acc: 0.8211
Epoch 100/200
11s - loss: 0.4921 - acc: 0.8234
Epoch 101/200
11s - loss: 0.4973 - acc: 0.8225
Epoch 102/200
11s - loss: 0.4939 - acc: 0.8240
Epoch 103/200
11s - loss: 0.4892 - acc: 0.8246
Epoch 104/200
12s - loss: 0.4939 - acc: 0.8249
Epoch 105/200
11s - loss: 0.4908 - acc: 0.8253
Epoch 106/200
11s - loss: 0.4947 - acc: 0.8230
Epoch 107/200
11s - loss: 0.4885 - acc: 0.8258
Epoch 108/200
12s - loss: 0.4887 - acc: 0.8257
Epoch 109/200
13s - loss: 0.4830 - acc: 0.8282
Epoch 110/200
12s - loss: 0.4834 - acc: 0.8295
Epoch 111/200
12s - loss: 0.4853 - acc: 0.8270
Epoch 112/200
12s - loss: 0.4821 - acc: 0.8298
Epoch 113/200
13s - loss: 0.4769 - acc: 0.8278
Epoch 114/200
12s - loss: 0.4760 - acc: 0.8298
Epoch 115/200
11s - loss: 0.4801 - acc: 0.8292
Epoch 116/200
10s - loss: 0.4743 - acc: 0.8322
Epoch 117/200
12s - loss: 0.4709 - acc: 0.8318
Epoch 118/200
12s - loss: 0.4695 - acc: 0.8326
Epoch 119/200
12s - loss: 0.4647 - acc: 0.8342
Epoch 120/200
12s - loss: 0.4652 - acc: 0.8340
Epoch 121/200
12s - loss: 0.4659 - acc: 0.8353
Epoch 122/200
14s - loss: 0.4552 - acc: 0.8384
Epoch 123/200
13s - loss: 0.4631 - acc: 0.8339
Epoch 124/200
12s - loss: 0.4611 - acc: 0.8355
Epoch 125/200
12s - loss: 0.4669 - acc: 0.8328
Epoch 126/200
12s - loss: 0.4683 - acc: 0.8328
Epoch 127/200
13s - loss: 0.4621 - acc: 0.8346
Epoch 128/200
12s - loss: 0.4610 - acc: 0.8365
Epoch 129/200
12s - loss: 0.4577 - acc: 0.8353
Epoch 130/200
13s - loss: 0.4631 - acc: 0.8334
Epoch 131/200
13s - loss: 0.4495 - acc: 0.8388
Epoch 132/200
13s - loss: 0.4593 - acc: 0.8351
Epoch 133/200
13s - loss: 0.4591 - acc: 0.8348
Epoch 134/200
12s - loss: 0.4496 - acc: 0.8376
Epoch 135/200
13s - loss: 0.4500 - acc: 0.8413
Epoch 136/200
13s - loss: 0.4507 - acc: 0.8392
Epoch 137/200
13s - loss: 0.4496 - acc: 0.8377
Epoch 138/200
13s - loss: 0.4482 - acc: 0.8410
Epoch 139/200
13s - loss: 0.4414 - acc: 0.8398
Epoch 140/200
13s - loss: 0.4490 - acc: 0.8409
Epoch 141/200
14s - loss: 0.4496 - acc: 0.8398
Epoch 142/200
13s - loss: 0.4458 - acc: 0.8408
Epoch 143/200
13s - loss: 0.4438 - acc: 0.8421
Epoch 144/200
12s - loss: 0.4435 - acc: 0.8416
Epoch 145/200
13s - loss: 0.4408 - acc: 0.8427
Epoch 146/200
13s - loss: 0.4380 - acc: 0.8444
Epoch 147/200
12s - loss: 0.4369 - acc: 0.8417
Epoch 148/200
12s - loss: 0.4380 - acc: 0.8415
Epoch 149/200
13s - loss: 0.4423 - acc: 0.8420
Epoch 150/200
12s - loss: 0.4387 - acc: 0.8430
Epoch 151/200
12s - loss: 0.4343 - acc: 0.8442
Epoch 152/200
13s - loss: 0.4310 - acc: 0.8465
Epoch 153/200
13s - loss: 0.4327 - acc: 0.8452
Epoch 154/200
12s - loss: 0.4313 - acc: 0.8463
Epoch 155/200
13s - loss: 0.4256 - acc: 0.8469
Epoch 156/200
11s - loss: 0.4375 - acc: 0.8443
Epoch 157/200
12s - loss: 0.4285 - acc: 0.8451
Epoch 158/200
13s - loss: 0.4274 - acc: 0.8460
Epoch 159/200
13s - loss: 0.4360 - acc: 0.8441
Epoch 160/200
12s - loss: 0.4337 - acc: 0.8444
Epoch 161/200
11s - loss: 0.4300 - acc: 0.8451
Epoch 162/200
13s - loss: 0.4281 - acc: 0.8466
Epoch 163/200
12s - loss: 0.4250 - acc: 0.8475
Epoch 164/200
14s - loss: 0.4212 - acc: 0.8477
Epoch 165/200
13s - loss: 0.4217 - acc: 0.8495
Epoch 166/200
13s - loss: 0.4242 - acc: 0.8483
Epoch 167/200
11s - loss: 0.4250 - acc: 0.8478
Epoch 168/200
13s - loss: 0.4284 - acc: 0.8484
Epoch 169/200
13s - loss: 0.4236 - acc: 0.8500
Epoch 170/200
14s - loss: 0.4202 - acc: 0.8498
Epoch 171/200
13s - loss: 0.4233 - acc: 0.8471
Epoch 172/200
13s - loss: 0.4209 - acc: 0.8471
Epoch 173/200
13s - loss: 0.4166 - acc: 0.8497
Epoch 174/200
13s - loss: 0.4197 - acc: 0.8507
Epoch 175/200
13s - loss: 0.4190 - acc: 0.8505
Epoch 176/200
12s - loss: 0.4170 - acc: 0.8498
Epoch 177/200
11s - loss: 0.4199 - acc: 0.8503
Epoch 178/200
13s - loss: 0.4148 - acc: 0.8515
Epoch 179/200
14s - loss: 0.4120 - acc: 0.8513
Epoch 180/200
14s - loss: 0.4111 - acc: 0.8524
Epoch 181/200
13s - loss: 0.4141 - acc: 0.8505
Epoch 182/200
14s - loss: 0.4154 - acc: 0.8524
Epoch 183/200
13s - loss: 0.4106 - acc: 0.8531
Epoch 184/200
14s - loss: 0.4092 - acc: 0.8543
Epoch 185/200
14s - loss: 0.4148 - acc: 0.8527
Epoch 186/200
12s - loss: 0.4123 - acc: 0.8524
Epoch 187/200
12s - loss: 0.4077 - acc: 0.8527
Epoch 188/200
13s - loss: 0.4006 - acc: 0.8566
Epoch 189/200
13s - loss: 0.4094 - acc: 0.8531
Epoch 190/200
13s - loss: 0.4075 - acc: 0.8547
Epoch 191/200
14s - loss: 0.4055 - acc: 0.8560
Epoch 192/200
14s - loss: 0.4017 - acc: 0.8566
Epoch 193/200
12s - loss: 0.4019 - acc: 0.8550
Epoch 194/200
13s - loss: 0.4071 - acc: 0.8544
Epoch 195/200
13s - loss: 0.4086 - acc: 0.8520
Epoch 196/200
13s - loss: 0.4025 - acc: 0.8550
Epoch 197/200
11s - loss: 0.4101 - acc: 0.8545
Epoch 198/200
10s - loss: 0.4083 - acc: 0.8551
Epoch 199/200
10s - loss: 0.4013 - acc: 0.8551
Epoch 200/200
10s - loss: 0.4047 - acc: 0.8562
Out[2]:
<keras.callbacks.History at 0x25d78ef6898>

In [59]:
%matplotlib inline
import matplotlib.pyplot as plt
from sklearn.metrics import accuracy_score

resp = model.predict_classes(x_test)

respLab = []
for i in range(0,len(y_test)):
    respLab.append(y_test[i][0])

mat_conf_teste = np.zeros((10,10))

print(" - Accuracy on test dataset {0}".format(accuracy_score(resp,respLab)))

for i in np.arange(resp.shape[0]):
    mat_conf_teste[respLab[i],resp[i]] = mat_conf_teste[respLab[i],resp[i]] + 1

plt.pcolor(mat_conf_teste,cmap="jet")
plt.colorbar()
plt.title("Confusion matrix for the test set")


 9344/10000 [===========================>..] - ETA: 0s - Accuracy on test dataset 0.681
Out[59]:
<matplotlib.text.Text at 0x25eac791160>

In [71]:
resp = model.predict_classes(x_train)

respLab = []

for i in range(0,len(y_train)):
    respLab.append(np.where(y_train[i] ==1)[0])

mat_conf_treino = np.zeros((10,10))

print("Accuracy on training dataset {0}".format(accuracy_score(respLab,resp)))

for i in np.arange(resp.shape[0]):
    mat_conf_treino[respLab[i],resp[i]] = mat_conf_treino[respLab[i],resp[i]] + 1

plt.pcolor(mat_conf_treino,cmap="jet")
plt.colorbar()
plt.title("Confusion matrix for the training set")


49504/50000 [============================>.] - ETA: 0sAccuracy on training dataset 0.96358
Out[71]:
<matplotlib.text.Text at 0x25ead379c50>

In [ ]: