예제 9-2 미리 학습한 모델을 사용하는 방법


In [2]:
# %load /home/sjkim/.jupyter/head.py
%matplotlib inline
%load_ext autoreload 
%autoreload 2
from importlib import reload

import matplotlib.pyplot as plt
import numpy as np

import pandas as pd
import os
#os.environ["CUDA_VISIBLE_DEVICES"]="0"

# seaborn
#import seaborn as sns
#sns.set( style = 'white', font_scale = 1.7)
#sns.set_style('ticks')
#plt.rcParams['savefig.dpi'] = 200

# font for matplotlib
#import matplotlib
#import matplotlib.font_manager as fm
#fm.get_fontconfig_fonts()
#font_location = '/usr/share/fonts/truetype/nanum/NanumGothicBold.ttf'
#font_name = fm.FontProperties(fname=font_location).get_name()
#matplotlib.rc('font', family=font_name)


The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload

In [2]:
import ex9_2_pretarined_method as example


Using TensorFlow backend.

In [3]:
example.main()


(800, 32, 32, 3) (800, 1)
X_train shape: (800, 32, 32, 3)
800 train samples
200 test samples
(800, 32, 32, 3)
Epoch 1/100
10/10 [==============================] - 1s - loss: 13.1209 - acc: 0.0889 - val_loss: 11.5896 - val_acc: 0.0950
Epoch 2/100
10/10 [==============================] - 0s - loss: 12.1266 - acc: 0.1315 - val_loss: 10.3677 - val_acc: 0.1600
Epoch 3/100
10/10 [==============================] - 0s - loss: 11.4561 - acc: 0.1547 - val_loss: 9.8880 - val_acc: 0.1950
Epoch 4/100
10/10 [==============================] - 0s - loss: 10.7811 - acc: 0.1833 - val_loss: 8.8622 - val_acc: 0.2050
Epoch 5/100
10/10 [==============================] - 0s - loss: 10.1624 - acc: 0.2161 - val_loss: 8.1952 - val_acc: 0.2550
Epoch 6/100
10/10 [==============================] - 0s - loss: 9.9038 - acc: 0.2226 - val_loss: 7.6528 - val_acc: 0.2950
Epoch 7/100
10/10 [==============================] - 0s - loss: 8.7929 - acc: 0.2752 - val_loss: 7.2329 - val_acc: 0.3300
Epoch 8/100
10/10 [==============================] - 0s - loss: 8.4259 - acc: 0.3001 - val_loss: 6.7457 - val_acc: 0.3450
Epoch 9/100
10/10 [==============================] - 0s - loss: 7.9649 - acc: 0.3224 - val_loss: 6.2460 - val_acc: 0.3800
Epoch 10/100
10/10 [==============================] - 0s - loss: 7.7023 - acc: 0.3302 - val_loss: 6.0524 - val_acc: 0.4250
Epoch 11/100
10/10 [==============================] - 0s - loss: 6.9466 - acc: 0.3666 - val_loss: 5.8366 - val_acc: 0.4250
Epoch 12/100
10/10 [==============================] - 0s - loss: 7.1256 - acc: 0.3331 - val_loss: 5.7109 - val_acc: 0.4250
Epoch 13/100
10/10 [==============================] - 0s - loss: 6.1787 - acc: 0.4057 - val_loss: 5.6455 - val_acc: 0.4300
Epoch 14/100
10/10 [==============================] - 0s - loss: 6.1382 - acc: 0.4386 - val_loss: 5.3549 - val_acc: 0.4350
Epoch 15/100
10/10 [==============================] - 0s - loss: 5.9543 - acc: 0.4277 - val_loss: 5.2512 - val_acc: 0.4350
Epoch 16/100
10/10 [==============================] - 0s - loss: 5.2100 - acc: 0.4673 - val_loss: 5.0852 - val_acc: 0.4550
Epoch 17/100
10/10 [==============================] - 0s - loss: 5.3330 - acc: 0.4443 - val_loss: 5.0204 - val_acc: 0.4550
Epoch 18/100
10/10 [==============================] - 0s - loss: 4.9509 - acc: 0.4713 - val_loss: 4.9465 - val_acc: 0.4450
Epoch 19/100
10/10 [==============================] - 0s - loss: 4.7398 - acc: 0.4987 - val_loss: 4.8894 - val_acc: 0.4400
Epoch 20/100
10/10 [==============================] - 0s - loss: 4.6859 - acc: 0.5102 - val_loss: 4.8214 - val_acc: 0.4450
Epoch 21/100
10/10 [==============================] - 0s - loss: 4.5677 - acc: 0.5058 - val_loss: 4.8035 - val_acc: 0.4500
Epoch 22/100
10/10 [==============================] - 0s - loss: 3.9868 - acc: 0.5372 - val_loss: 4.7284 - val_acc: 0.4500
Epoch 23/100
10/10 [==============================] - 0s - loss: 4.0218 - acc: 0.5456 - val_loss: 4.7045 - val_acc: 0.4500
Epoch 24/100
10/10 [==============================] - 0s - loss: 3.7893 - acc: 0.5376 - val_loss: 4.6348 - val_acc: 0.4500
Epoch 25/100
10/10 [==============================] - 0s - loss: 3.8261 - acc: 0.5455 - val_loss: 4.6014 - val_acc: 0.4600
Epoch 26/100
10/10 [==============================] - 0s - loss: 3.4687 - acc: 0.5814 - val_loss: 4.5546 - val_acc: 0.4600
Epoch 27/100
10/10 [==============================] - 0s - loss: 3.2467 - acc: 0.5751 - val_loss: 4.4888 - val_acc: 0.4550
Epoch 28/100
10/10 [==============================] - 0s - loss: 3.2480 - acc: 0.5888 - val_loss: 4.4371 - val_acc: 0.4650
Epoch 29/100
10/10 [==============================] - 0s - loss: 3.1168 - acc: 0.5902 - val_loss: 4.3219 - val_acc: 0.4600
Epoch 30/100
10/10 [==============================] - 0s - loss: 2.8626 - acc: 0.6082 - val_loss: 4.3235 - val_acc: 0.4600
Epoch 31/100
10/10 [==============================] - 0s - loss: 2.8111 - acc: 0.5983 - val_loss: 4.2617 - val_acc: 0.4550
Epoch 32/100
10/10 [==============================] - 0s - loss: 2.8982 - acc: 0.6158 - val_loss: 4.1624 - val_acc: 0.4600
Epoch 33/100
10/10 [==============================] - 0s - loss: 2.8361 - acc: 0.6131 - val_loss: 4.1252 - val_acc: 0.4600
Epoch 34/100
10/10 [==============================] - 0s - loss: 2.3051 - acc: 0.6454 - val_loss: 4.0928 - val_acc: 0.4650
Epoch 35/100
10/10 [==============================] - 0s - loss: 2.4088 - acc: 0.6314 - val_loss: 4.0297 - val_acc: 0.4700
Epoch 36/100
10/10 [==============================] - 0s - loss: 2.5244 - acc: 0.6169 - val_loss: 3.9978 - val_acc: 0.4700
Epoch 37/100
10/10 [==============================] - 0s - loss: 2.3503 - acc: 0.6473 - val_loss: 3.9818 - val_acc: 0.4750
Epoch 38/100
10/10 [==============================] - 0s - loss: 2.2483 - acc: 0.6417 - val_loss: 3.9343 - val_acc: 0.4700
Epoch 39/100
10/10 [==============================] - 0s - loss: 2.2594 - acc: 0.6376 - val_loss: 3.8927 - val_acc: 0.4650
Epoch 40/100
10/10 [==============================] - 0s - loss: 2.3094 - acc: 0.6528 - val_loss: 3.8362 - val_acc: 0.4750
Epoch 41/100
10/10 [==============================] - 0s - loss: 1.9502 - acc: 0.6733 - val_loss: 3.7935 - val_acc: 0.4700
Epoch 42/100
10/10 [==============================] - 0s - loss: 1.9654 - acc: 0.6822 - val_loss: 3.7663 - val_acc: 0.4700
Epoch 43/100
10/10 [==============================] - 0s - loss: 2.0017 - acc: 0.6674 - val_loss: 3.7325 - val_acc: 0.4800
Epoch 44/100
10/10 [==============================] - 0s - loss: 1.7686 - acc: 0.7092 - val_loss: 3.7122 - val_acc: 0.4700
Epoch 45/100
10/10 [==============================] - 0s - loss: 1.7232 - acc: 0.7098 - val_loss: 3.6783 - val_acc: 0.4800
Epoch 46/100
10/10 [==============================] - 0s - loss: 1.5886 - acc: 0.6985 - val_loss: 3.6796 - val_acc: 0.4800
Epoch 47/100
10/10 [==============================] - 0s - loss: 1.6716 - acc: 0.7098 - val_loss: 3.6340 - val_acc: 0.4800
Epoch 48/100
10/10 [==============================] - 0s - loss: 1.5614 - acc: 0.7088 - val_loss: 3.6302 - val_acc: 0.4800
Epoch 49/100
10/10 [==============================] - 0s - loss: 1.7158 - acc: 0.6898 - val_loss: 3.5794 - val_acc: 0.4850
Epoch 50/100
10/10 [==============================] - 0s - loss: 1.5347 - acc: 0.7132 - val_loss: 3.5673 - val_acc: 0.4850
Epoch 51/100
10/10 [==============================] - 0s - loss: 1.3397 - acc: 0.7388 - val_loss: 3.5157 - val_acc: 0.4900
Epoch 52/100
10/10 [==============================] - 0s - loss: 1.2939 - acc: 0.7304 - val_loss: 3.4827 - val_acc: 0.4900
Epoch 53/100
10/10 [==============================] - 0s - loss: 1.4042 - acc: 0.7340 - val_loss: 3.4525 - val_acc: 0.4900
Epoch 54/100
10/10 [==============================] - 0s - loss: 1.1983 - acc: 0.7632 - val_loss: 3.4584 - val_acc: 0.4850
Epoch 55/100
10/10 [==============================] - 0s - loss: 1.2286 - acc: 0.7408 - val_loss: 3.4501 - val_acc: 0.4800
Epoch 56/100
10/10 [==============================] - 0s - loss: 1.0283 - acc: 0.7773 - val_loss: 3.4218 - val_acc: 0.4850
Epoch 57/100
10/10 [==============================] - 0s - loss: 1.1750 - acc: 0.7641 - val_loss: 3.4164 - val_acc: 0.4700
Epoch 58/100
10/10 [==============================] - 0s - loss: 1.1204 - acc: 0.7668 - val_loss: 3.4018 - val_acc: 0.4700
Epoch 59/100
10/10 [==============================] - 0s - loss: 0.9608 - acc: 0.7659 - val_loss: 3.3846 - val_acc: 0.5000
Epoch 60/100
10/10 [==============================] - 0s - loss: 1.0326 - acc: 0.7790 - val_loss: 3.3539 - val_acc: 0.4900
Epoch 61/100
10/10 [==============================] - 0s - loss: 1.0400 - acc: 0.7800 - val_loss: 3.3332 - val_acc: 0.4800
Epoch 62/100
10/10 [==============================] - 0s - loss: 0.8976 - acc: 0.7918 - val_loss: 3.3442 - val_acc: 0.4750
Epoch 63/100
10/10 [==============================] - 0s - loss: 0.8903 - acc: 0.7896 - val_loss: 3.3334 - val_acc: 0.4800
Epoch 64/100
10/10 [==============================] - 0s - loss: 0.9482 - acc: 0.7894 - val_loss: 3.3023 - val_acc: 0.4900
Epoch 65/100
10/10 [==============================] - 0s - loss: 0.6705 - acc: 0.8165 - val_loss: 3.2951 - val_acc: 0.4850
Epoch 66/100
10/10 [==============================] - 0s - loss: 0.8168 - acc: 0.8128 - val_loss: 3.2965 - val_acc: 0.4850
Epoch 67/100
10/10 [==============================] - 0s - loss: 0.8467 - acc: 0.8042 - val_loss: 3.2567 - val_acc: 0.4950
Epoch 68/100
10/10 [==============================] - 0s - loss: 0.6818 - acc: 0.8282 - val_loss: 3.2670 - val_acc: 0.4750
Epoch 69/100
10/10 [==============================] - 0s - loss: 0.9091 - acc: 0.7879 - val_loss: 3.2648 - val_acc: 0.4900
Epoch 70/100
10/10 [==============================] - 0s - loss: 0.6497 - acc: 0.8398 - val_loss: 3.2419 - val_acc: 0.4850
Epoch 71/100
10/10 [==============================] - 0s - loss: 0.7165 - acc: 0.8195 - val_loss: 3.2230 - val_acc: 0.4850
Epoch 72/100
10/10 [==============================] - 0s - loss: 0.6480 - acc: 0.8349 - val_loss: 3.2151 - val_acc: 0.4800
Epoch 73/100
10/10 [==============================] - 0s - loss: 0.5979 - acc: 0.8323 - val_loss: 3.2164 - val_acc: 0.4900
Epoch 74/100
10/10 [==============================] - 0s - loss: 0.5971 - acc: 0.8433 - val_loss: 3.2153 - val_acc: 0.4600
Epoch 75/100
10/10 [==============================] - 0s - loss: 0.5538 - acc: 0.8418 - val_loss: 3.2177 - val_acc: 0.4850
Epoch 76/100
10/10 [==============================] - 0s - loss: 0.5999 - acc: 0.8358 - val_loss: 3.2198 - val_acc: 0.4700
Epoch 77/100
10/10 [==============================] - 0s - loss: 0.5605 - acc: 0.8422 - val_loss: 3.2480 - val_acc: 0.4900
Epoch 78/100
10/10 [==============================] - 0s - loss: 0.5145 - acc: 0.8638 - val_loss: 3.2627 - val_acc: 0.4900
Epoch 79/100
10/10 [==============================] - 0s - loss: 0.5333 - acc: 0.8456 - val_loss: 3.2647 - val_acc: 0.4850
Epoch 80/100
10/10 [==============================] - 0s - loss: 0.4909 - acc: 0.8567 - val_loss: 3.2619 - val_acc: 0.4950
Epoch 81/100
10/10 [==============================] - 0s - loss: 0.4917 - acc: 0.8663 - val_loss: 3.2388 - val_acc: 0.5000
Epoch 82/100
10/10 [==============================] - 0s - loss: 0.5272 - acc: 0.8392 - val_loss: 3.2498 - val_acc: 0.5000
Epoch 83/100
10/10 [==============================] - 0s - loss: 0.4136 - acc: 0.8706 - val_loss: 3.2486 - val_acc: 0.4900
Epoch 84/100
10/10 [==============================] - 0s - loss: 0.5809 - acc: 0.8302 - val_loss: 3.2475 - val_acc: 0.4850
Epoch 85/100
10/10 [==============================] - 0s - loss: 0.4741 - acc: 0.8759 - val_loss: 3.2433 - val_acc: 0.5000
Epoch 86/100
10/10 [==============================] - 0s - loss: 0.4661 - acc: 0.8761 - val_loss: 3.2433 - val_acc: 0.5050
Epoch 87/100
10/10 [==============================] - 0s - loss: 0.3839 - acc: 0.8885 - val_loss: 3.2371 - val_acc: 0.5050
Epoch 88/100
10/10 [==============================] - 0s - loss: 0.4196 - acc: 0.8652 - val_loss: 3.2329 - val_acc: 0.5000
Epoch 89/100
10/10 [==============================] - 0s - loss: 0.4234 - acc: 0.8716 - val_loss: 3.2356 - val_acc: 0.4900
Epoch 90/100
10/10 [==============================] - 0s - loss: 0.4075 - acc: 0.8863 - val_loss: 3.2170 - val_acc: 0.4950
Epoch 91/100
10/10 [==============================] - 0s - loss: 0.3557 - acc: 0.8874 - val_loss: 3.2388 - val_acc: 0.5000
Epoch 92/100
10/10 [==============================] - 0s - loss: 0.3918 - acc: 0.8858 - val_loss: 3.2193 - val_acc: 0.5100
Epoch 93/100
10/10 [==============================] - 0s - loss: 0.2882 - acc: 0.9147 - val_loss: 3.2299 - val_acc: 0.5000
Epoch 94/100
10/10 [==============================] - 0s - loss: 0.4053 - acc: 0.8866 - val_loss: 3.2068 - val_acc: 0.5050
Epoch 95/100
10/10 [==============================] - 0s - loss: 0.3589 - acc: 0.8976 - val_loss: 3.1966 - val_acc: 0.5050
Epoch 96/100
10/10 [==============================] - 0s - loss: 0.3244 - acc: 0.8904 - val_loss: 3.2181 - val_acc: 0.5050
Epoch 97/100
10/10 [==============================] - 0s - loss: 0.3607 - acc: 0.8902 - val_loss: 3.2260 - val_acc: 0.5000
Epoch 98/100
10/10 [==============================] - 0s - loss: 0.3242 - acc: 0.9097 - val_loss: 3.2146 - val_acc: 0.5100
Epoch 99/100
10/10 [==============================] - 0s - loss: 0.3457 - acc: 0.9094 - val_loss: 3.2360 - val_acc: 0.4950
Epoch 100/100
10/10 [==============================] - 0s - loss: 0.4023 - acc: 0.8930 - val_loss: 3.2632 - val_acc: 0.5050
Confusion matrix
[[16  0  0  2  0  0  0  2  3  0]
 [ 0 11  1  1  0  0  0  0  0  5]
 [ 2  1  6  1  4  1  2  1  0  0]
 [ 1  0  0  4  0  4  4  1  0  1]
 [ 2  0  2  0  7  3  1  4  1  1]
 [ 0  1  2  5  2  8  1  1  0  1]
 [ 1  0  3  1  2  0 17  1  0  0]
 [ 2  2  1  2  2  1  0  6  0  2]
 [ 0  2  0  3  2  0  0  0 13  0]
 [ 1  4  0  1  0  1  0  0  1 13]]
Test score: 3.26324853897
Test accuracy: 0.505
Output results are saved in output_2ef16725-3167-4d61-bd9d-8aa79b3e8c72

In [ ]: