In [1]:
from keras.models import Sequential
from keras.layers.core import Flatten, Dense, Dropout
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.optimizers import SGD
import cv2, numpy as np
import pandas as pd


Using TensorFlow backend.

In [2]:
''' basic package '''
import os
# 告訴系統要第幾張卡被看到。 Ex. 硬體總共有8張顯卡,以下設定只讓系統看到第1張顯卡
# 若沒設定,則 Tensorflow 在運行時,預設會把所有卡都佔用
# 要看裝置內顯卡數量及目前狀態的話,請在終端機內輸入 "nvidia-smi"
# 若你的裝置只有一張顯卡可以使用,可以忽略此設定
os.environ["CUDA_VISIBLE_DEVICES"] = '0'

In [3]:
train_path = "/data/examples/may_the_4_be_with_u/where_am_i/train"
test_path = "/data/examples/may_the_4_be_with_u/where_am_i/testset"

In [4]:
img_cat = pd.read_csv("/data/examples/may_the_4_be_with_u/where_am_i/mid_term_mapping.txt", 
                      encoding="utf-8", header=None)
img_cat.columns = ["dirs","class"]
img_cat #class: 0~14 共15類


Out[4]:
dirs class
0 CALsuburb 9
1 PARoffice 7
2 bedroom 12
3 coast 10
4 forest 4
5 highway 14
6 industrial 2
7 insidecity 3
8 kitchen 0
9 livingroom 5
10 mountain 8
11 opencountry 6
12 store 11
13 street 1
14 tallbuilding 13

In [5]:
import os

train_img_files = pd.DataFrame(columns=["dirs","files","class"])

for index, row in img_cat.iterrows():
    #print(row['dirs'], row['class'])
    for _root, _dirs, _files in os.walk(train_path+"/"+row['dirs']):
        temp = pd.DataFrame({ 'dirs':row['dirs'], 'files': _files, 'class':row['class']})
        train_img_files = pd.concat([train_img_files, temp], axis=0)

train_img_files.head(3)


Out[5]:
class dirs files
0 9.0 CALsuburb image_0141.jpg
1 9.0 CALsuburb image_0117.jpg
2 9.0 CALsuburb image_0016.jpg

In [6]:
len(train_img_files)


Out[6]:
2985

In [7]:
train_img_files = train_img_files.reset_index(drop=True)

In [8]:
import os

test_img_files = []

for _root, _dirs, _files in os.walk(test_path):
    test_img_files = _files
    
test_img_files[0:3]


Out[8]:
['7dc74b51e229d841272c0795cffed857d0e6038a4be0c9fe51bce09b7bcb1cac.jpg',
 '1b1940d44fe4f5b76e89f876de3d2514a51b50057cae3e7ace84d3299aeaecbf.jpg',
 '7cb3479dac3e7dcc69241f4cd957380a48399b1aa0480db310f55dfc40a18960.jpg']

In [9]:
len(test_img_files)


Out[9]:
1500

In [10]:
from keras.preprocessing import image

IMG_WIDTH = 224
IMG_HEIGHT = 224
IMG_CHANNELS = 3
ttl_train_imgs = len(train_img_files)
ttl_test_imgs = len(test_img_files)

train_X = np.zeros((ttl_train_imgs, IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS), dtype=np.uint8)
test_X = np.zeros((ttl_test_imgs, IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS), dtype=np.uint8)

In [11]:
from keras.applications.vgg19 import preprocess_input

for index, row in train_img_files.iterrows():
    img = image.load_img(train_path+"/"+row['dirs']+"/"+row['files'], target_size=(IMG_WIDTH, IMG_HEIGHT)) # for vgg input size: 224 x 224
    temp = image.img_to_array(img)
    temp = np.expand_dims(temp, axis=0)
    train_X[index] = preprocess_input(temp)
    img.close()
    
len(train_X)


Out[11]:
2985

In [12]:
for index in range(len(test_img_files)):
    img = image.load_img(test_path+"/"+test_img_files[index], target_size=(IMG_WIDTH, IMG_HEIGHT)) # for vgg input size: 224 x 224
    temp = image.img_to_array(img)
    temp = np.expand_dims(temp, axis=0)
    test_X[index] = preprocess_input(temp)
    img.close()
    
len(test_X)


Out[12]:
1500

In [13]:
import gc
del temp
gc.collect()


Out[13]:
198

In [14]:
train_X = train_X.astype('float32') / 255.0
test_X = test_X.astype('float32') / 255.0

In [15]:
train_X[:,:,0] = (train_X[:,:,0]-train_X[:,:,0].mean())/train_X[:,:,0].std()
train_X[:,:,1] = (train_X[:,:,1]-train_X[:,:,1].mean())/train_X[:,:,1].std()
train_X[:,:,2] = (train_X[:,:,2]-train_X[:,:,2].mean())/train_X[:,:,2].std()

test_X[:,:,0] = (test_X[:,:,0]-test_X[:,:,0].mean())/test_X[:,:,0].std()
test_X[:,:,1] = (test_X[:,:,1]-test_X[:,:,1].mean())/test_X[:,:,1].std()
test_X[:,:,2] = (test_X[:,:,2]-test_X[:,:,2].mean())/test_X[:,:,2].std()

In [16]:
X_img_train_normalize = train_X
X_img_test_normalize = test_X

In [17]:
from keras.utils import np_utils
y_label_train_OneHot = np_utils.to_categorical(train_img_files["class"].astype('int'))
y_label_train_OneHot.shape


Out[17]:
(2985, 15)

build model


In [19]:
from keras.applications.vgg19 import VGG19
from keras.preprocessing import image
from keras.applications.vgg19 import preprocess_input
from keras.layers import Input, Flatten, Dense, Dropout
from keras.models import Model
import numpy as np

In [20]:
model_vgg19_conv = VGG19(weights='imagenet', include_top=False, classes=15)
model_vgg19_conv.summary()


Downloading data from https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5
80142336/80134624 [==============================] - 22s 0us/step
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_1 (InputLayer)         (None, None, None, 3)     0         
_________________________________________________________________
block1_conv1 (Conv2D)        (None, None, None, 64)    1792      
_________________________________________________________________
block1_conv2 (Conv2D)        (None, None, None, 64)    36928     
_________________________________________________________________
block1_pool (MaxPooling2D)   (None, None, None, 64)    0         
_________________________________________________________________
block2_conv1 (Conv2D)        (None, None, None, 128)   73856     
_________________________________________________________________
block2_conv2 (Conv2D)        (None, None, None, 128)   147584    
_________________________________________________________________
block2_pool (MaxPooling2D)   (None, None, None, 128)   0         
_________________________________________________________________
block3_conv1 (Conv2D)        (None, None, None, 256)   295168    
_________________________________________________________________
block3_conv2 (Conv2D)        (None, None, None, 256)   590080    
_________________________________________________________________
block3_conv3 (Conv2D)        (None, None, None, 256)   590080    
_________________________________________________________________
block3_conv4 (Conv2D)        (None, None, None, 256)   590080    
_________________________________________________________________
block3_pool (MaxPooling2D)   (None, None, None, 256)   0         
_________________________________________________________________
block4_conv1 (Conv2D)        (None, None, None, 512)   1180160   
_________________________________________________________________
block4_conv2 (Conv2D)        (None, None, None, 512)   2359808   
_________________________________________________________________
block4_conv3 (Conv2D)        (None, None, None, 512)   2359808   
_________________________________________________________________
block4_conv4 (Conv2D)        (None, None, None, 512)   2359808   
_________________________________________________________________
block4_pool (MaxPooling2D)   (None, None, None, 512)   0         
_________________________________________________________________
block5_conv1 (Conv2D)        (None, None, None, 512)   2359808   
_________________________________________________________________
block5_conv2 (Conv2D)        (None, None, None, 512)   2359808   
_________________________________________________________________
block5_conv3 (Conv2D)        (None, None, None, 512)   2359808   
_________________________________________________________________
block5_conv4 (Conv2D)        (None, None, None, 512)   2359808   
_________________________________________________________________
block5_pool (MaxPooling2D)   (None, None, None, 512)   0         
=================================================================
Total params: 20,024,384
Trainable params: 20,024,384
Non-trainable params: 0
_________________________________________________________________

In [21]:
#Create your own input format (here 224x224x3)
input = Input(shape=(IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS),name = 'image_input')

In [22]:
#Use the generated model 
output_vgg19_conv = model_vgg19_conv(input)

In [23]:
#Add the fully-connected layers 
x = Flatten(name='flatten')(output_vgg19_conv)
x = Dense(512, activation='relu', name='fc1')(x)
x = Dense(256, activation='relu', name='fc2')(x)
x = Dense(15, activation='softmax', name='predictions')(x)

In [24]:
#Create your own model 
my_model = Model(input=input, output=x)


/opt/conda/lib/python3.6/site-packages/ipykernel_launcher.py:2: UserWarning: Update your `Model` call to the Keras 2 API: `Model(inputs=Tensor("im..., outputs=Tensor("pr...)`
  

In [25]:
my_model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
image_input (InputLayer)     (None, 224, 224, 3)       0         
_________________________________________________________________
vgg19 (Model)                multiple                  20024384  
_________________________________________________________________
flatten (Flatten)            (None, 25088)             0         
_________________________________________________________________
fc1 (Dense)                  (None, 512)               12845568  
_________________________________________________________________
fc2 (Dense)                  (None, 256)               131328    
_________________________________________________________________
predictions (Dense)          (None, 15)                3855      
=================================================================
Total params: 33,005,135
Trainable params: 33,005,135
Non-trainable params: 0
_________________________________________________________________

training model


In [26]:
from keras.models import optimizers

#, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0
opt_adam = optimizers.Adam( lr=0.001, beta_1=0.9, beta_2=0.99999, decay=0.0001 )

In [27]:
my_model.compile(loss='categorical_crossentropy', optimizer= opt_adam, metrics=['accuracy'])

In [28]:
from keras.preprocessing.image import ImageDataGenerator

print('Using real-time data augmentation.')
# This will do preprocessing and realtime data augmentation:
datagen = ImageDataGenerator(
    rotation_range=30,  # randomly rotate images in the range (degrees, 0 to 180)
    width_shift_range=0.1,  # randomly shift images horizontally (fraction of total width)
    height_shift_range=0.1,  # randomly shift images vertically (fraction of total height)
    horizontal_flip=True,  # randomly flip images
    vertical_flip=True)  # randomly flip images


Using real-time data augmentation.

In [29]:
from sklearn.model_selection import train_test_split

X_train, X_test, y_train, y_test = train_test_split( X_img_train_normalize, y_label_train_OneHot, 
                                                    test_size = 0.3, random_state=100)

In [30]:
from keras.callbacks import EarlyStopping

# earlystop
earlystop = EarlyStopping(monitor='val_acc', patience=50, verbose=0)

# Fit the model on the batches generated by datagen.flow().
train_history = my_model.fit_generator(
                    datagen.flow(X_train, y_train, batch_size=32),
                    epochs=200, workers=-1,
                    validation_data=(X_test, y_test),
                    callbacks=[earlystop]
                    )


Epoch 1/200
66/66 [==============================] - 55s 838ms/step - loss: 2.8174 - acc: 0.0987 - val_loss: 2.6728 - val_acc: 0.0926
Epoch 2/200
66/66 [==============================] - 43s 650ms/step - loss: 2.6172 - acc: 0.1153 - val_loss: 2.6808 - val_acc: 0.0893
Epoch 3/200
66/66 [==============================] - 42s 641ms/step - loss: 2.5492 - acc: 0.1435 - val_loss: 2.4234 - val_acc: 0.2009
Epoch 4/200
66/66 [==============================] - 43s 658ms/step - loss: 2.3214 - acc: 0.2347 - val_loss: 2.2646 - val_acc: 0.2723
Epoch 5/200
66/66 [==============================] - 43s 655ms/step - loss: 2.2758 - acc: 0.2498 - val_loss: 2.3045 - val_acc: 0.2478
Epoch 6/200
66/66 [==============================] - 43s 657ms/step - loss: 2.1150 - acc: 0.2903 - val_loss: 2.1402 - val_acc: 0.2935
Epoch 7/200
66/66 [==============================] - 42s 643ms/step - loss: 2.0598 - acc: 0.3024 - val_loss: 2.0574 - val_acc: 0.3382
Epoch 8/200
66/66 [==============================] - 43s 648ms/step - loss: 2.0065 - acc: 0.3282 - val_loss: 2.2700 - val_acc: 0.2533
Epoch 9/200
66/66 [==============================] - 43s 649ms/step - loss: 2.0331 - acc: 0.3242 - val_loss: 1.8881 - val_acc: 0.3493
Epoch 10/200
66/66 [==============================] - 42s 641ms/step - loss: 1.9502 - acc: 0.3272 - val_loss: 1.9816 - val_acc: 0.3382
Epoch 11/200
66/66 [==============================] - 43s 654ms/step - loss: 1.8998 - acc: 0.3486 - val_loss: 1.8024 - val_acc: 0.3951
Epoch 12/200
66/66 [==============================] - 42s 640ms/step - loss: 1.8853 - acc: 0.3549 - val_loss: 1.8101 - val_acc: 0.3761
Epoch 13/200
66/66 [==============================] - 43s 645ms/step - loss: 1.7899 - acc: 0.3725 - val_loss: 1.6788 - val_acc: 0.4118
Epoch 14/200
66/66 [==============================] - 43s 644ms/step - loss: 1.7588 - acc: 0.4139 - val_loss: 1.9704 - val_acc: 0.3482
Epoch 15/200
66/66 [==============================] - 45s 688ms/step - loss: 1.6888 - acc: 0.4125 - val_loss: 1.7658 - val_acc: 0.4107
Epoch 16/200
66/66 [==============================] - 43s 652ms/step - loss: 1.7065 - acc: 0.3995 - val_loss: 1.6829 - val_acc: 0.4353
Epoch 17/200
66/66 [==============================] - 43s 650ms/step - loss: 1.6647 - acc: 0.4130 - val_loss: 1.8379 - val_acc: 0.3839
Epoch 18/200
66/66 [==============================] - 46s 695ms/step - loss: 1.6542 - acc: 0.4213 - val_loss: 1.7409 - val_acc: 0.3839
Epoch 19/200
66/66 [==============================] - 42s 638ms/step - loss: 1.6206 - acc: 0.4355 - val_loss: 1.6217 - val_acc: 0.4408
Epoch 20/200
66/66 [==============================] - 43s 652ms/step - loss: 1.5923 - acc: 0.4423 - val_loss: 1.5137 - val_acc: 0.4688
Epoch 21/200
66/66 [==============================] - 47s 709ms/step - loss: 1.5816 - acc: 0.4362 - val_loss: 1.5884 - val_acc: 0.4587
Epoch 22/200
66/66 [==============================] - 43s 658ms/step - loss: 1.5695 - acc: 0.4549 - val_loss: 1.5573 - val_acc: 0.4453
Epoch 23/200
66/66 [==============================] - 43s 652ms/step - loss: 1.5119 - acc: 0.4577 - val_loss: 1.5148 - val_acc: 0.4688
Epoch 24/200
66/66 [==============================] - 43s 653ms/step - loss: 1.4725 - acc: 0.4746 - val_loss: 1.5659 - val_acc: 0.4431
Epoch 25/200
66/66 [==============================] - 42s 642ms/step - loss: 1.4552 - acc: 0.4705 - val_loss: 1.5587 - val_acc: 0.4542
Epoch 26/200
66/66 [==============================] - 43s 648ms/step - loss: 1.4392 - acc: 0.4817 - val_loss: 1.6740 - val_acc: 0.4040
Epoch 27/200
66/66 [==============================] - 43s 647ms/step - loss: 1.4351 - acc: 0.4966 - val_loss: 1.5192 - val_acc: 0.4364
Epoch 28/200
66/66 [==============================] - 42s 644ms/step - loss: 1.4675 - acc: 0.4760 - val_loss: 1.4428 - val_acc: 0.4821
Epoch 29/200
66/66 [==============================] - 43s 648ms/step - loss: 1.3996 - acc: 0.4902 - val_loss: 1.5300 - val_acc: 0.4609
Epoch 30/200
66/66 [==============================] - 43s 648ms/step - loss: 1.4551 - acc: 0.4821 - val_loss: 1.3712 - val_acc: 0.5033
Epoch 31/200
66/66 [==============================] - 42s 642ms/step - loss: 1.3916 - acc: 0.5193 - val_loss: 1.4572 - val_acc: 0.4621
Epoch 32/200
66/66 [==============================] - 43s 653ms/step - loss: 1.4498 - acc: 0.4686 - val_loss: 1.3428 - val_acc: 0.5145
Epoch 33/200
66/66 [==============================] - 43s 653ms/step - loss: 1.3755 - acc: 0.5004 - val_loss: 1.5365 - val_acc: 0.4743
Epoch 34/200
66/66 [==============================] - 43s 648ms/step - loss: 1.3829 - acc: 0.5068 - val_loss: 1.3267 - val_acc: 0.5324
Epoch 35/200
66/66 [==============================] - 43s 651ms/step - loss: 1.3103 - acc: 0.5264 - val_loss: 1.4344 - val_acc: 0.5112
Epoch 36/200
66/66 [==============================] - 42s 641ms/step - loss: 1.2963 - acc: 0.5307 - val_loss: 1.5461 - val_acc: 0.4833
Epoch 37/200
66/66 [==============================] - 43s 645ms/step - loss: 1.3352 - acc: 0.5174 - val_loss: 1.4012 - val_acc: 0.4967
Epoch 38/200
66/66 [==============================] - 43s 651ms/step - loss: 1.3049 - acc: 0.5458 - val_loss: 1.3473 - val_acc: 0.5290
Epoch 39/200
66/66 [==============================] - 42s 639ms/step - loss: 1.2531 - acc: 0.5470 - val_loss: 1.2917 - val_acc: 0.5290
Epoch 40/200
66/66 [==============================] - 43s 653ms/step - loss: 1.2380 - acc: 0.5494 - val_loss: 1.2976 - val_acc: 0.5324
Epoch 41/200
66/66 [==============================] - 43s 647ms/step - loss: 1.2330 - acc: 0.5517 - val_loss: 1.4116 - val_acc: 0.4978
Epoch 42/200
66/66 [==============================] - 43s 653ms/step - loss: 1.2481 - acc: 0.5382 - val_loss: 1.2508 - val_acc: 0.5357
Epoch 43/200
66/66 [==============================] - 42s 642ms/step - loss: 1.2389 - acc: 0.5647 - val_loss: 1.4801 - val_acc: 0.5089
Epoch 44/200
66/66 [==============================] - 42s 638ms/step - loss: 1.2580 - acc: 0.5468 - val_loss: 1.3400 - val_acc: 0.5033
Epoch 45/200
66/66 [==============================] - 42s 638ms/step - loss: 1.1896 - acc: 0.5626 - val_loss: 1.4202 - val_acc: 0.5212
Epoch 46/200
66/66 [==============================] - 43s 645ms/step - loss: 1.1940 - acc: 0.5704 - val_loss: 1.3535 - val_acc: 0.5033
Epoch 47/200
66/66 [==============================] - 43s 646ms/step - loss: 1.1536 - acc: 0.5794 - val_loss: 1.2105 - val_acc: 0.5603
Epoch 48/200
66/66 [==============================] - 43s 651ms/step - loss: 1.1714 - acc: 0.5719 - val_loss: 1.3524 - val_acc: 0.5279
Epoch 49/200
66/66 [==============================] - 43s 649ms/step - loss: 1.2431 - acc: 0.5524 - val_loss: 1.2409 - val_acc: 0.5737
Epoch 50/200
66/66 [==============================] - 47s 714ms/step - loss: 1.2017 - acc: 0.5636 - val_loss: 1.2507 - val_acc: 0.5647
Epoch 51/200
66/66 [==============================] - 43s 648ms/step - loss: 1.1679 - acc: 0.5738 - val_loss: 1.2635 - val_acc: 0.5580
Epoch 52/200
66/66 [==============================] - 43s 644ms/step - loss: 1.1987 - acc: 0.5832 - val_loss: 1.2716 - val_acc: 0.5513
Epoch 53/200
66/66 [==============================] - 43s 644ms/step - loss: 1.1769 - acc: 0.5678 - val_loss: 1.1997 - val_acc: 0.5580
Epoch 54/200
66/66 [==============================] - 42s 639ms/step - loss: 1.1749 - acc: 0.5794 - val_loss: 1.1614 - val_acc: 0.5949
Epoch 55/200
66/66 [==============================] - 43s 648ms/step - loss: 1.1140 - acc: 0.5963 - val_loss: 1.2528 - val_acc: 0.5725
Epoch 56/200
66/66 [==============================] - 43s 644ms/step - loss: 1.0973 - acc: 0.5981 - val_loss: 1.1937 - val_acc: 0.5804
Epoch 57/200
66/66 [==============================] - 43s 653ms/step - loss: 1.1294 - acc: 0.5908 - val_loss: 1.2054 - val_acc: 0.5770
Epoch 58/200
66/66 [==============================] - 43s 645ms/step - loss: 1.1258 - acc: 0.5972 - val_loss: 1.3353 - val_acc: 0.5558
Epoch 59/200
66/66 [==============================] - 42s 643ms/step - loss: 1.0851 - acc: 0.6093 - val_loss: 1.2216 - val_acc: 0.5558
Epoch 60/200
66/66 [==============================] - 42s 640ms/step - loss: 1.0837 - acc: 0.6083 - val_loss: 1.1644 - val_acc: 0.5904
Epoch 61/200
66/66 [==============================] - 45s 681ms/step - loss: 1.0992 - acc: 0.6000 - val_loss: 1.3661 - val_acc: 0.5424
Epoch 62/200
66/66 [==============================] - 44s 667ms/step - loss: 1.1348 - acc: 0.6031 - val_loss: 1.2940 - val_acc: 0.5413
Epoch 63/200
66/66 [==============================] - 43s 651ms/step - loss: 1.1093 - acc: 0.5958 - val_loss: 1.2018 - val_acc: 0.5558
Epoch 64/200
66/66 [==============================] - 42s 644ms/step - loss: 1.1085 - acc: 0.6048 - val_loss: 1.2982 - val_acc: 0.5502
Epoch 65/200
66/66 [==============================] - 43s 648ms/step - loss: 1.0859 - acc: 0.6171 - val_loss: 1.2783 - val_acc: 0.5335
Epoch 66/200
66/66 [==============================] - 43s 644ms/step - loss: 1.0508 - acc: 0.6114 - val_loss: 1.2262 - val_acc: 0.5636
Epoch 67/200
66/66 [==============================] - 42s 644ms/step - loss: 1.0918 - acc: 0.6133 - val_loss: 1.1792 - val_acc: 0.5770
Epoch 68/200
66/66 [==============================] - 44s 660ms/step - loss: 1.0335 - acc: 0.6346 - val_loss: 1.1325 - val_acc: 0.6083
Epoch 69/200
66/66 [==============================] - 44s 667ms/step - loss: 1.0531 - acc: 0.6169 - val_loss: 1.1484 - val_acc: 0.5815
Epoch 70/200
66/66 [==============================] - 44s 665ms/step - loss: 1.0348 - acc: 0.6308 - val_loss: 1.2533 - val_acc: 0.5525
Epoch 71/200
66/66 [==============================] - 42s 639ms/step - loss: 1.0823 - acc: 0.6024 - val_loss: 1.2264 - val_acc: 0.5703
Epoch 72/200
66/66 [==============================] - 43s 645ms/step - loss: 1.0412 - acc: 0.6263 - val_loss: 1.1275 - val_acc: 0.5882
Epoch 73/200
66/66 [==============================] - 43s 649ms/step - loss: 0.9950 - acc: 0.6512 - val_loss: 1.1297 - val_acc: 0.5926
Epoch 74/200
66/66 [==============================] - 42s 637ms/step - loss: 1.0013 - acc: 0.6408 - val_loss: 1.2012 - val_acc: 0.5826
Epoch 75/200
66/66 [==============================] - 43s 649ms/step - loss: 1.0058 - acc: 0.6386 - val_loss: 1.1634 - val_acc: 0.5938
Epoch 76/200
66/66 [==============================] - 43s 649ms/step - loss: 0.9776 - acc: 0.6484 - val_loss: 1.1726 - val_acc: 0.5748
Epoch 77/200
66/66 [==============================] - 44s 663ms/step - loss: 0.9694 - acc: 0.6538 - val_loss: 1.1412 - val_acc: 0.5960
Epoch 78/200
66/66 [==============================] - 42s 641ms/step - loss: 1.0055 - acc: 0.6391 - val_loss: 1.2064 - val_acc: 0.5569
Epoch 79/200
66/66 [==============================] - 42s 643ms/step - loss: 0.9766 - acc: 0.6346 - val_loss: 1.2556 - val_acc: 0.5625
Epoch 80/200
66/66 [==============================] - 43s 655ms/step - loss: 0.9389 - acc: 0.6521 - val_loss: 1.2257 - val_acc: 0.5737
Epoch 81/200
66/66 [==============================] - 42s 641ms/step - loss: 0.9310 - acc: 0.6635 - val_loss: 1.1838 - val_acc: 0.5592
Epoch 82/200
66/66 [==============================] - 42s 640ms/step - loss: 0.9408 - acc: 0.6533 - val_loss: 1.1171 - val_acc: 0.5971
Epoch 83/200
66/66 [==============================] - 43s 645ms/step - loss: 0.9672 - acc: 0.6434 - val_loss: 1.3080 - val_acc: 0.5357
Epoch 84/200
66/66 [==============================] - 42s 641ms/step - loss: 0.9220 - acc: 0.6661 - val_loss: 1.1217 - val_acc: 0.6038
Epoch 85/200
66/66 [==============================] - 43s 649ms/step - loss: 0.9281 - acc: 0.6635 - val_loss: 1.1131 - val_acc: 0.6049
Epoch 86/200
66/66 [==============================] - 43s 645ms/step - loss: 0.9447 - acc: 0.6708 - val_loss: 1.1532 - val_acc: 0.5960
Epoch 87/200
66/66 [==============================] - 43s 652ms/step - loss: 0.9073 - acc: 0.6614 - val_loss: 1.0905 - val_acc: 0.6038
Epoch 88/200
66/66 [==============================] - 43s 652ms/step - loss: 0.8914 - acc: 0.6768 - val_loss: 1.1375 - val_acc: 0.5938
Epoch 89/200
66/66 [==============================] - 47s 705ms/step - loss: 0.9473 - acc: 0.6635 - val_loss: 1.1025 - val_acc: 0.5826
Epoch 90/200
66/66 [==============================] - 43s 649ms/step - loss: 0.8794 - acc: 0.6794 - val_loss: 1.1401 - val_acc: 0.5960
Epoch 91/200
66/66 [==============================] - 43s 650ms/step - loss: 0.9176 - acc: 0.6659 - val_loss: 1.2425 - val_acc: 0.5792
Epoch 92/200
66/66 [==============================] - 42s 640ms/step - loss: 0.8908 - acc: 0.6661 - val_loss: 1.1263 - val_acc: 0.5960
Epoch 93/200
66/66 [==============================] - 42s 641ms/step - loss: 0.8493 - acc: 0.6960 - val_loss: 1.1763 - val_acc: 0.5826
Epoch 94/200
66/66 [==============================] - 43s 650ms/step - loss: 0.8934 - acc: 0.6718 - val_loss: 1.1875 - val_acc: 0.5971
Epoch 95/200
66/66 [==============================] - 42s 639ms/step - loss: 0.8562 - acc: 0.6834 - val_loss: 1.1148 - val_acc: 0.6150
Epoch 96/200
66/66 [==============================] - 42s 638ms/step - loss: 0.8769 - acc: 0.6808 - val_loss: 1.0965 - val_acc: 0.5993
Epoch 97/200
66/66 [==============================] - 42s 642ms/step - loss: 0.8295 - acc: 0.6919 - val_loss: 1.0669 - val_acc: 0.6105
Epoch 98/200
66/66 [==============================] - 47s 712ms/step - loss: 0.8582 - acc: 0.6914 - val_loss: 1.1127 - val_acc: 0.6261
Epoch 99/200
66/66 [==============================] - 46s 695ms/step - loss: 0.7988 - acc: 0.7232 - val_loss: 1.1074 - val_acc: 0.6272
Epoch 100/200
66/66 [==============================] - 43s 646ms/step - loss: 0.8554 - acc: 0.6936 - val_loss: 1.1287 - val_acc: 0.6138
Epoch 101/200
66/66 [==============================] - 43s 647ms/step - loss: 0.8219 - acc: 0.6997 - val_loss: 1.1828 - val_acc: 0.5859
Epoch 102/200
66/66 [==============================] - 43s 654ms/step - loss: 0.8419 - acc: 0.7057 - val_loss: 1.1263 - val_acc: 0.6116
Epoch 103/200
66/66 [==============================] - 47s 708ms/step - loss: 0.8173 - acc: 0.7118 - val_loss: 1.1756 - val_acc: 0.5882
Epoch 104/200
66/66 [==============================] - 44s 661ms/step - loss: 0.8431 - acc: 0.6905 - val_loss: 1.0569 - val_acc: 0.6295
Epoch 105/200
66/66 [==============================] - 43s 655ms/step - loss: 0.8238 - acc: 0.7094 - val_loss: 1.1453 - val_acc: 0.6116
Epoch 106/200
66/66 [==============================] - 42s 638ms/step - loss: 0.7775 - acc: 0.7267 - val_loss: 1.1693 - val_acc: 0.6150
Epoch 107/200
66/66 [==============================] - 43s 650ms/step - loss: 0.8675 - acc: 0.6858 - val_loss: 1.0846 - val_acc: 0.6205
Epoch 108/200
66/66 [==============================] - 43s 650ms/step - loss: 0.7684 - acc: 0.7187 - val_loss: 1.1285 - val_acc: 0.6194
Epoch 109/200
66/66 [==============================] - 46s 699ms/step - loss: 0.7668 - acc: 0.7173 - val_loss: 1.0835 - val_acc: 0.6350
Epoch 110/200
66/66 [==============================] - 46s 697ms/step - loss: 0.7768 - acc: 0.7177 - val_loss: 1.2088 - val_acc: 0.5949
Epoch 111/200
66/66 [==============================] - 43s 653ms/step - loss: 0.7513 - acc: 0.7227 - val_loss: 1.2099 - val_acc: 0.6172
Epoch 112/200
66/66 [==============================] - 43s 655ms/step - loss: 0.7886 - acc: 0.7009 - val_loss: 1.1512 - val_acc: 0.6138
Epoch 113/200
66/66 [==============================] - 42s 637ms/step - loss: 0.7734 - acc: 0.7139 - val_loss: 1.1262 - val_acc: 0.6027
Epoch 114/200
66/66 [==============================] - 43s 646ms/step - loss: 0.8220 - acc: 0.7038 - val_loss: 1.1672 - val_acc: 0.6071
Epoch 115/200
66/66 [==============================] - 43s 650ms/step - loss: 0.7745 - acc: 0.7161 - val_loss: 1.0869 - val_acc: 0.6205
Epoch 116/200
66/66 [==============================] - 42s 640ms/step - loss: 0.8086 - acc: 0.6931 - val_loss: 1.0461 - val_acc: 0.6440
Epoch 117/200
66/66 [==============================] - 42s 642ms/step - loss: 0.7562 - acc: 0.7208 - val_loss: 1.1299 - val_acc: 0.6362
Epoch 118/200
66/66 [==============================] - 43s 652ms/step - loss: 0.7383 - acc: 0.7393 - val_loss: 1.1266 - val_acc: 0.6295
Epoch 119/200
66/66 [==============================] - 42s 632ms/step - loss: 0.7614 - acc: 0.7232 - val_loss: 1.0732 - val_acc: 0.6328
Epoch 120/200
66/66 [==============================] - 42s 639ms/step - loss: 0.7486 - acc: 0.7353 - val_loss: 1.0815 - val_acc: 0.6496
Epoch 121/200
66/66 [==============================] - 42s 644ms/step - loss: 0.7302 - acc: 0.7360 - val_loss: 1.1352 - val_acc: 0.6283
Epoch 122/200
66/66 [==============================] - 43s 646ms/step - loss: 0.7160 - acc: 0.7312 - val_loss: 1.0731 - val_acc: 0.6306
Epoch 123/200
66/66 [==============================] - 42s 640ms/step - loss: 0.6965 - acc: 0.7369 - val_loss: 1.0523 - val_acc: 0.6540
Epoch 124/200
66/66 [==============================] - 42s 639ms/step - loss: 0.7293 - acc: 0.7331 - val_loss: 1.0664 - val_acc: 0.6362
Epoch 125/200
66/66 [==============================] - 43s 648ms/step - loss: 0.7310 - acc: 0.7407 - val_loss: 1.1090 - val_acc: 0.6384
Epoch 126/200
66/66 [==============================] - 42s 639ms/step - loss: 0.7504 - acc: 0.7305 - val_loss: 1.0608 - val_acc: 0.6462
Epoch 127/200
66/66 [==============================] - 44s 662ms/step - loss: 0.7021 - acc: 0.7362 - val_loss: 1.0914 - val_acc: 0.6496
Epoch 128/200
66/66 [==============================] - 42s 629ms/step - loss: 0.7287 - acc: 0.7367 - val_loss: 1.0295 - val_acc: 0.6440
Epoch 129/200
66/66 [==============================] - 42s 637ms/step - loss: 0.6756 - acc: 0.7483 - val_loss: 1.1223 - val_acc: 0.6384
Epoch 130/200
66/66 [==============================] - 42s 639ms/step - loss: 0.6850 - acc: 0.7480 - val_loss: 1.1149 - val_acc: 0.6496
Epoch 131/200
66/66 [==============================] - 42s 641ms/step - loss: 0.7242 - acc: 0.7395 - val_loss: 1.1623 - val_acc: 0.6317
Epoch 132/200
66/66 [==============================] - 46s 704ms/step - loss: 0.7261 - acc: 0.7206 - val_loss: 1.0766 - val_acc: 0.6462
Epoch 133/200
66/66 [==============================] - 42s 638ms/step - loss: 0.6604 - acc: 0.7582 - val_loss: 1.1287 - val_acc: 0.6362
Epoch 134/200
66/66 [==============================] - 42s 643ms/step - loss: 0.6865 - acc: 0.7483 - val_loss: 1.1868 - val_acc: 0.6116
Epoch 135/200
66/66 [==============================] - 43s 657ms/step - loss: 0.7141 - acc: 0.7523 - val_loss: 1.1201 - val_acc: 0.6496
Epoch 136/200
66/66 [==============================] - 42s 642ms/step - loss: 0.7117 - acc: 0.7379 - val_loss: 1.0956 - val_acc: 0.6373
Epoch 137/200
66/66 [==============================] - 42s 641ms/step - loss: 0.6603 - acc: 0.7547 - val_loss: 1.1326 - val_acc: 0.6138
Epoch 138/200
66/66 [==============================] - 42s 639ms/step - loss: 0.6544 - acc: 0.7639 - val_loss: 1.2570 - val_acc: 0.6049
Epoch 139/200
66/66 [==============================] - 42s 634ms/step - loss: 0.7098 - acc: 0.7343 - val_loss: 1.0818 - val_acc: 0.6395
Epoch 140/200
66/66 [==============================] - 42s 638ms/step - loss: 0.6853 - acc: 0.7582 - val_loss: 1.1144 - val_acc: 0.6373
Epoch 141/200
66/66 [==============================] - 42s 643ms/step - loss: 0.6852 - acc: 0.7485 - val_loss: 1.1440 - val_acc: 0.6417
Epoch 142/200
66/66 [==============================] - 42s 640ms/step - loss: 0.6582 - acc: 0.7601 - val_loss: 1.1992 - val_acc: 0.6317
Epoch 143/200
66/66 [==============================] - 43s 644ms/step - loss: 0.6670 - acc: 0.7613 - val_loss: 1.1853 - val_acc: 0.6161
Epoch 144/200
66/66 [==============================] - 42s 643ms/step - loss: 0.6288 - acc: 0.7667 - val_loss: 1.0716 - val_acc: 0.6574
Epoch 145/200
66/66 [==============================] - 42s 643ms/step - loss: 0.6201 - acc: 0.7767 - val_loss: 1.1520 - val_acc: 0.6440
Epoch 146/200
66/66 [==============================] - 42s 639ms/step - loss: 0.6585 - acc: 0.7570 - val_loss: 1.0884 - val_acc: 0.6328
Epoch 147/200
66/66 [==============================] - 43s 649ms/step - loss: 0.6557 - acc: 0.7608 - val_loss: 1.1284 - val_acc: 0.6596
Epoch 148/200
66/66 [==============================] - 42s 642ms/step - loss: 0.5831 - acc: 0.7983 - val_loss: 1.1396 - val_acc: 0.6540
Epoch 149/200
66/66 [==============================] - 43s 652ms/step - loss: 0.6375 - acc: 0.7634 - val_loss: 1.1478 - val_acc: 0.6484
Epoch 150/200
66/66 [==============================] - 43s 645ms/step - loss: 0.6065 - acc: 0.7862 - val_loss: 1.2841 - val_acc: 0.6183
Epoch 151/200
66/66 [==============================] - 42s 637ms/step - loss: 0.6593 - acc: 0.7672 - val_loss: 1.0719 - val_acc: 0.6741
Epoch 152/200
66/66 [==============================] - 42s 636ms/step - loss: 0.6381 - acc: 0.7698 - val_loss: 1.1288 - val_acc: 0.6529
Epoch 153/200
66/66 [==============================] - 42s 641ms/step - loss: 0.6392 - acc: 0.7703 - val_loss: 1.2306 - val_acc: 0.6116
Epoch 154/200
66/66 [==============================] - 42s 643ms/step - loss: 0.6129 - acc: 0.7845 - val_loss: 1.0753 - val_acc: 0.6730
Epoch 155/200
66/66 [==============================] - 42s 639ms/step - loss: 0.6130 - acc: 0.7788 - val_loss: 1.1409 - val_acc: 0.6540
Epoch 156/200
66/66 [==============================] - 42s 642ms/step - loss: 0.5948 - acc: 0.7798 - val_loss: 1.1311 - val_acc: 0.6473
Epoch 157/200
66/66 [==============================] - 42s 635ms/step - loss: 0.6003 - acc: 0.7722 - val_loss: 1.1662 - val_acc: 0.6473
Epoch 158/200
66/66 [==============================] - 42s 639ms/step - loss: 0.5889 - acc: 0.7814 - val_loss: 1.1179 - val_acc: 0.6618
Epoch 159/200
66/66 [==============================] - 43s 646ms/step - loss: 0.6122 - acc: 0.7800 - val_loss: 1.1723 - val_acc: 0.6395
Epoch 160/200
66/66 [==============================] - 42s 634ms/step - loss: 0.5510 - acc: 0.7952 - val_loss: 1.1653 - val_acc: 0.6462
Epoch 161/200
66/66 [==============================] - 42s 634ms/step - loss: 0.5598 - acc: 0.7975 - val_loss: 1.1559 - val_acc: 0.6629
Epoch 162/200
66/66 [==============================] - 42s 642ms/step - loss: 0.5818 - acc: 0.7788 - val_loss: 1.1983 - val_acc: 0.6462
Epoch 163/200
66/66 [==============================] - 43s 644ms/step - loss: 0.5732 - acc: 0.7921 - val_loss: 1.1307 - val_acc: 0.6652
Epoch 164/200
66/66 [==============================] - 42s 634ms/step - loss: 0.5607 - acc: 0.7945 - val_loss: 1.1818 - val_acc: 0.6306
Epoch 165/200
66/66 [==============================] - 43s 649ms/step - loss: 0.5687 - acc: 0.7975 - val_loss: 1.0756 - val_acc: 0.6685
Epoch 166/200
66/66 [==============================] - 42s 639ms/step - loss: 0.6007 - acc: 0.7793 - val_loss: 1.1665 - val_acc: 0.6384
Epoch 167/200
66/66 [==============================] - 42s 639ms/step - loss: 0.5365 - acc: 0.8084 - val_loss: 1.2053 - val_acc: 0.6295
Epoch 168/200
66/66 [==============================] - 42s 641ms/step - loss: 0.5362 - acc: 0.8011 - val_loss: 1.2299 - val_acc: 0.6607
Epoch 169/200
66/66 [==============================] - 42s 640ms/step - loss: 0.5735 - acc: 0.7978 - val_loss: 1.1015 - val_acc: 0.6641
Epoch 170/200
66/66 [==============================] - 42s 643ms/step - loss: 0.5592 - acc: 0.8084 - val_loss: 1.1391 - val_acc: 0.6585
Epoch 171/200
66/66 [==============================] - 41s 628ms/step - loss: 0.5101 - acc: 0.8186 - val_loss: 1.1265 - val_acc: 0.6663
Epoch 172/200
66/66 [==============================] - 43s 645ms/step - loss: 0.5554 - acc: 0.7959 - val_loss: 1.2105 - val_acc: 0.6373
Epoch 173/200
66/66 [==============================] - 42s 633ms/step - loss: 0.4842 - acc: 0.8250 - val_loss: 1.1564 - val_acc: 0.6629
Epoch 174/200
66/66 [==============================] - 42s 634ms/step - loss: 0.5055 - acc: 0.8141 - val_loss: 1.2250 - val_acc: 0.6417
Epoch 175/200
66/66 [==============================] - 43s 644ms/step - loss: 0.5928 - acc: 0.7838 - val_loss: 1.2865 - val_acc: 0.6116
Epoch 176/200
66/66 [==============================] - 42s 639ms/step - loss: 0.5566 - acc: 0.7938 - val_loss: 1.1708 - val_acc: 0.6473
Epoch 177/200
66/66 [==============================] - 42s 639ms/step - loss: 0.5243 - acc: 0.8129 - val_loss: 1.2517 - val_acc: 0.6295
Epoch 178/200
66/66 [==============================] - 43s 648ms/step - loss: 0.5587 - acc: 0.7978 - val_loss: 1.1865 - val_acc: 0.6429
Epoch 179/200
66/66 [==============================] - 43s 650ms/step - loss: 0.5209 - acc: 0.8120 - val_loss: 1.2077 - val_acc: 0.6641
Epoch 180/200
66/66 [==============================] - 42s 635ms/step - loss: 0.5249 - acc: 0.8013 - val_loss: 1.1984 - val_acc: 0.6518
Epoch 181/200
66/66 [==============================] - 43s 646ms/step - loss: 0.4881 - acc: 0.8151 - val_loss: 1.1340 - val_acc: 0.6618
Epoch 182/200
66/66 [==============================] - 42s 634ms/step - loss: 0.5012 - acc: 0.8198 - val_loss: 1.1831 - val_acc: 0.6652
Epoch 183/200
66/66 [==============================] - 42s 637ms/step - loss: 0.4886 - acc: 0.8231 - val_loss: 1.1720 - val_acc: 0.6641
Epoch 184/200
66/66 [==============================] - 41s 628ms/step - loss: 0.5008 - acc: 0.8155 - val_loss: 1.1901 - val_acc: 0.6350
Epoch 185/200
66/66 [==============================] - 42s 640ms/step - loss: 0.5148 - acc: 0.8143 - val_loss: 1.1401 - val_acc: 0.6708
Epoch 186/200
66/66 [==============================] - 42s 636ms/step - loss: 0.4843 - acc: 0.8148 - val_loss: 1.2817 - val_acc: 0.6484
Epoch 187/200
66/66 [==============================] - 42s 634ms/step - loss: 0.4904 - acc: 0.8297 - val_loss: 1.2409 - val_acc: 0.6473
Epoch 188/200
66/66 [==============================] - 42s 630ms/step - loss: 0.4852 - acc: 0.8136 - val_loss: 1.3485 - val_acc: 0.6272
Epoch 189/200
66/66 [==============================] - 42s 644ms/step - loss: 0.5028 - acc: 0.8080 - val_loss: 1.1985 - val_acc: 0.6440
Epoch 190/200
66/66 [==============================] - 43s 645ms/step - loss: 0.4997 - acc: 0.8113 - val_loss: 1.2105 - val_acc: 0.6484
Epoch 191/200
66/66 [==============================] - 42s 636ms/step - loss: 0.4681 - acc: 0.8338 - val_loss: 1.1980 - val_acc: 0.6484
Epoch 192/200
66/66 [==============================] - 43s 647ms/step - loss: 0.4840 - acc: 0.8266 - val_loss: 1.2251 - val_acc: 0.6618
Epoch 193/200
66/66 [==============================] - 42s 635ms/step - loss: 0.4895 - acc: 0.8231 - val_loss: 1.1926 - val_acc: 0.6540
Epoch 194/200
66/66 [==============================] - 43s 648ms/step - loss: 0.4938 - acc: 0.8281 - val_loss: 1.3246 - val_acc: 0.6540
Epoch 195/200
66/66 [==============================] - 42s 633ms/step - loss: 0.4812 - acc: 0.8271 - val_loss: 1.2495 - val_acc: 0.6529
Epoch 196/200
66/66 [==============================] - 42s 630ms/step - loss: 0.4767 - acc: 0.8319 - val_loss: 1.2825 - val_acc: 0.6440
Epoch 197/200
66/66 [==============================] - 42s 640ms/step - loss: 0.4942 - acc: 0.8122 - val_loss: 1.2033 - val_acc: 0.6373
Epoch 198/200
66/66 [==============================] - 42s 643ms/step - loss: 0.4788 - acc: 0.8269 - val_loss: 1.2045 - val_acc: 0.6563
Epoch 199/200
66/66 [==============================] - 42s 638ms/step - loss: 0.4532 - acc: 0.8203 - val_loss: 1.2004 - val_acc: 0.6574
Epoch 200/200
66/66 [==============================] - 42s 636ms/step - loss: 0.4484 - acc: 0.8428 - val_loss: 1.1930 - val_acc: 0.6551

In [31]:
import matplotlib.pyplot as plt
def show_train_history(train_token,test_token):
    plt.plot(train_history.history[train_token])
    plt.plot(train_history.history[test_token])
    plt.title('Train History')
    #plt.ylabel('Accuracy')
    plt.ylabel(train_token)
    plt.xlabel('Epoch')
    plt.legend([train_token, test_token], loc='upper left')
    plt.show()

In [32]:
show_train_history('loss','val_loss')



In [33]:
show_train_history('acc','val_acc')



In [34]:
scores = my_model.evaluate(X_img_train_normalize, y_label_train_OneHot, verbose=0)
scores


Out[34]:
[0.55067741571109696, 0.82345058628462464]

In [35]:
pred_train=my_model.predict(X_img_train_normalize)

In [36]:
prediction_train = np.empty(len(pred_train))
for i in range(pred_train.shape[0]):
    prediction_train[i] = np.argmax(pred_train[i])

In [37]:
train_img_files["class"].astype('int').reshape(-1)


/opt/conda/lib/python3.6/site-packages/ipykernel_launcher.py:1: FutureWarning: reshape is deprecated and will raise in a subsequent release. Please use .values.reshape(...) instead
  """Entry point for launching an IPython kernel.
Out[37]:
array([ 9,  9,  9, ..., 13, 13, 13])

In [38]:
import pandas as pd

pd.crosstab(train_img_files["class"].astype('int').reshape(-1),prediction_train,
            rownames=['class'],colnames=['predict'])


/opt/conda/lib/python3.6/site-packages/ipykernel_launcher.py:3: FutureWarning: reshape is deprecated and will raise in a subsequent release. Please use .values.reshape(...) instead
  This is separate from the ipykernel package so we can avoid doing imports until
Out[38]:
predict 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 10.0 11.0 12.0 13.0 14.0
class
0 85 0 0 6 0 12 0 2 0 0 0 3 2 0 0
1 0 180 3 4 0 1 0 0 0 1 0 1 0 2 0
2 0 2 149 12 0 10 0 1 3 1 0 20 0 12 1
3 3 4 0 174 0 5 0 1 0 3 1 16 0 1 0
4 0 0 0 0 218 0 1 0 6 0 0 0 0 3 0
5 10 1 4 10 0 130 0 2 0 1 0 22 8 1 0
6 0 1 1 2 4 0 238 0 21 1 37 0 0 0 5
7 29 0 0 1 0 14 0 60 0 0 0 2 8 1 0
8 0 0 6 0 8 0 23 0 227 1 3 1 0 4 1
9 0 0 1 0 0 0 0 0 0 140 0 0 0 0 0
10 0 0 1 0 1 0 15 0 8 0 233 0 0 0 2
11 4 2 4 8 0 3 0 1 0 0 0 192 0 1 0
12 6 0 7 4 0 34 0 1 0 1 1 5 56 1 0
13 0 0 8 1 0 3 0 0 2 0 0 3 0 239 0
14 0 1 2 1 0 1 5 0 5 2 5 1 0 0 137

In [39]:
pred=my_model.predict(X_img_test_normalize)

In [40]:
pred[0]


Out[40]:
array([  5.63765760e-04,   7.36021949e-03,   4.05607857e-02,
         1.14499859e-03,   4.21267828e-07,   5.74365318e-01,
         3.56340638e-06,   1.05790030e-04,   1.41352662e-04,
         2.26507359e-03,   5.33056550e-08,   1.15378257e-02,
         2.00325623e-01,   1.61350712e-01,   2.74529069e-04], dtype=float32)

In [41]:
prediction = np.empty(len(pred))
for i in range(pred.shape[0]):
    prediction[i] = np.argmax(pred[i])

In [42]:
prediction[0]


Out[42]:
5.0

In [43]:
#test_img_files

In [44]:
img_sub = pd.read_csv("/data/examples/may_the_4_be_with_u/where_am_i/img-submission.csv", encoding="utf-8")
img_sub.head(2)


Out[44]:
id class
0 c117693e1cf24a5232090d1548cb11d4e5ea0df65680c4... 1
1 96baacc2e97886a998807ce197574821a6dc83c227c746... 1

In [45]:
Submission = pd.DataFrame({ 'id': test_img_files, 'class': prediction }, columns=["id","class"])
Submission["class"] = Submission["class"].astype('int')
Submission.head(2)


Out[45]:
id class
0 7dc74b51e229d841272c0795cffed857d0e6038a4be0c9... 5
1 1b1940d44fe4f5b76e89f876de3d2514a51b50057cae3e... 3

In [46]:
img_sub[img_sub["id"] == "3d766cf9e984b45cd7b6fa0550aaf2c33e1cc881c5036f907891cf76e0fea790"]


Out[46]:
id class
1410 3d766cf9e984b45cd7b6fa0550aaf2c33e1cc881c5036f... 1

In [47]:
Submission[Submission["id"] == "3d766cf9e984b45cd7b6fa0550aaf2c33e1cc881c5036f907891cf76e0fea790.jpg"]


Out[47]:
id class
154 3d766cf9e984b45cd7b6fa0550aaf2c33e1cc881c5036f... 0

In [48]:
Submission["id"] = Submission["id"].apply(lambda x: x.replace(".jpg", ""))

In [49]:
Submission[Submission["id"] == "3d766cf9e984b45cd7b6fa0550aaf2c33e1cc881c5036f907891cf76e0fea790"]


Out[49]:
id class
154 3d766cf9e984b45cd7b6fa0550aaf2c33e1cc881c5036f... 0

In [50]:
_test = img_sub.merge(Submission, left_on='id', right_on='id', how='outer')

In [51]:
_test.head(3)


Out[51]:
id class_x class_y
0 c117693e1cf24a5232090d1548cb11d4e5ea0df65680c4... 1 12
1 96baacc2e97886a998807ce197574821a6dc83c227c746... 1 14
2 f9bd26db23eb9b544ca78be79b11b4d1259e802885861d... 1 5

In [52]:
# Generate Submission File:
Submission[["id","class"]] = _test[["id","class_y"]]
Submission.to_csv("submission_vgg_e200_std.csv", index=False)

儲存模型架構及權重值


In [54]:
my_model.save_weights("SaveModel/VGG_Model.h5")
print("Saved model to disk")


Saved model to disk

In [55]:
model_yaml = my_model.to_yaml()
with open("SaveModel/VGG_Model.yaml", "w") as yaml_file:
    yaml_file.write(model_yaml)

In [56]:
model_json = my_model.to_json()
with open("SaveModel/VGG_Model.json", "w") as json_file:
    json_file.write(model_json)

In [ ]: