예제 9-1 이미지 데이터 수 늘리기


In [2]:
# %load /home/sjkim/.jupyter/head.py
%matplotlib inline
%load_ext autoreload 
%autoreload 2
from importlib import reload

import matplotlib.pyplot as plt
import numpy as np

import pandas as pd
import os
#os.environ["CUDA_VISIBLE_DEVICES"]="0"

# seaborn
#import seaborn as sns
#sns.set( style = 'white', font_scale = 1.7)
#sns.set_style('ticks')
#plt.rcParams['savefig.dpi'] = 200

# font for matplotlib
#import matplotlib
#import matplotlib.font_manager as fm
#fm.get_fontconfig_fonts()
#font_location = '/usr/share/fonts/truetype/nanum/NanumGothicBold.ttf'
#font_name = fm.FontProperties(fname=font_location).get_name()
#matplotlib.rc('font', family=font_name)


The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload

In [2]:
import ex9_1_applications_agumentation as example


Using TensorFlow backend.

In [3]:
example.main()


(800, 32, 32, 3) (800, 1)
X_train shape: (800, 32, 32, 3)
800 train samples
200 test samples
data.input_shape (32, 32, 3)
(800, 32, 32, 3)
Epoch 1/100
10/10 [==============================] - 2s - loss: 2.2830 - acc: 0.1350 - val_loss: 2.2549 - val_acc: 0.1150
Epoch 2/100
10/10 [==============================] - 0s - loss: 2.2529 - acc: 0.1421 - val_loss: 2.1973 - val_acc: 0.2200
Epoch 3/100
10/10 [==============================] - 0s - loss: 2.2079 - acc: 0.2034 - val_loss: 2.1641 - val_acc: 0.2000
Epoch 4/100
10/10 [==============================] - 0s - loss: 2.1363 - acc: 0.2244 - val_loss: 2.0768 - val_acc: 0.2600
Epoch 5/100
10/10 [==============================] - 0s - loss: 2.0652 - acc: 0.2481 - val_loss: 2.0373 - val_acc: 0.3150
Epoch 6/100
10/10 [==============================] - 0s - loss: 2.0235 - acc: 0.2917 - val_loss: 1.9584 - val_acc: 0.2950
Epoch 7/100
10/10 [==============================] - 0s - loss: 1.9935 - acc: 0.2608 - val_loss: 2.2242 - val_acc: 0.2000
Epoch 8/100
10/10 [==============================] - 0s - loss: 1.9463 - acc: 0.3143 - val_loss: 1.9550 - val_acc: 0.3150
Epoch 9/100
10/10 [==============================] - 0s - loss: 1.9040 - acc: 0.3129 - val_loss: 1.9361 - val_acc: 0.2950
Epoch 10/100
10/10 [==============================] - 0s - loss: 1.7629 - acc: 0.3817 - val_loss: 1.8505 - val_acc: 0.3600
Epoch 11/100
10/10 [==============================] - 0s - loss: 1.7995 - acc: 0.3668 - val_loss: 1.8306 - val_acc: 0.3550
Epoch 12/100
10/10 [==============================] - 0s - loss: 1.8312 - acc: 0.3667 - val_loss: 1.8521 - val_acc: 0.3650
Epoch 13/100
10/10 [==============================] - 0s - loss: 1.6824 - acc: 0.4182 - val_loss: 1.7835 - val_acc: 0.4150
Epoch 14/100
10/10 [==============================] - 0s - loss: 1.6749 - acc: 0.4269 - val_loss: 1.8076 - val_acc: 0.3800
Epoch 15/100
10/10 [==============================] - 0s - loss: 1.5412 - acc: 0.4790 - val_loss: 1.9165 - val_acc: 0.3150
Epoch 16/100
10/10 [==============================] - 0s - loss: 1.4986 - acc: 0.4741 - val_loss: 2.0921 - val_acc: 0.2750
Epoch 17/100
10/10 [==============================] - 0s - loss: 1.5754 - acc: 0.4750 - val_loss: 1.7618 - val_acc: 0.3800
Epoch 18/100
10/10 [==============================] - 0s - loss: 1.3775 - acc: 0.5380 - val_loss: 1.8593 - val_acc: 0.3850
Epoch 19/100
10/10 [==============================] - 0s - loss: 1.4424 - acc: 0.4867 - val_loss: 1.8172 - val_acc: 0.3900
Epoch 20/100
10/10 [==============================] - 0s - loss: 1.2821 - acc: 0.5716 - val_loss: 1.8098 - val_acc: 0.3550
Epoch 21/100
10/10 [==============================] - 0s - loss: 1.2953 - acc: 0.5744 - val_loss: 1.8673 - val_acc: 0.3600
Epoch 22/100
10/10 [==============================] - 0s - loss: 1.2846 - acc: 0.5762 - val_loss: 1.7395 - val_acc: 0.4000
Epoch 23/100
10/10 [==============================] - 0s - loss: 1.1899 - acc: 0.5953 - val_loss: 1.7552 - val_acc: 0.3750
Epoch 24/100
10/10 [==============================] - 0s - loss: 1.1967 - acc: 0.6004 - val_loss: 1.7696 - val_acc: 0.3950
Epoch 25/100
10/10 [==============================] - 0s - loss: 1.0650 - acc: 0.6405 - val_loss: 1.7183 - val_acc: 0.4550
Epoch 26/100
10/10 [==============================] - 0s - loss: 1.2030 - acc: 0.5827 - val_loss: 1.7491 - val_acc: 0.3850
Epoch 27/100
10/10 [==============================] - 0s - loss: 0.9985 - acc: 0.6600 - val_loss: 1.7695 - val_acc: 0.4300
Epoch 28/100
10/10 [==============================] - 0s - loss: 1.0335 - acc: 0.6533 - val_loss: 1.9064 - val_acc: 0.3150
Epoch 29/100
10/10 [==============================] - 0s - loss: 0.9492 - acc: 0.6551 - val_loss: 1.7885 - val_acc: 0.4150
Epoch 30/100
10/10 [==============================] - 0s - loss: 0.9998 - acc: 0.6496 - val_loss: 1.8088 - val_acc: 0.3900
Epoch 31/100
10/10 [==============================] - 0s - loss: 0.8554 - acc: 0.7172 - val_loss: 1.8497 - val_acc: 0.3800
Epoch 32/100
10/10 [==============================] - 0s - loss: 0.8279 - acc: 0.7234 - val_loss: 1.8303 - val_acc: 0.3650
Epoch 33/100
10/10 [==============================] - 0s - loss: 0.7778 - acc: 0.7437 - val_loss: 1.8187 - val_acc: 0.3950
Epoch 34/100
10/10 [==============================] - 0s - loss: 0.7735 - acc: 0.7337 - val_loss: 1.8299 - val_acc: 0.4200
Epoch 35/100
10/10 [==============================] - 0s - loss: 0.7918 - acc: 0.7368 - val_loss: 1.8473 - val_acc: 0.4100
Epoch 36/100
10/10 [==============================] - ETA: 0s - loss: 0.7377 - acc: 0.730 - 0s - loss: 0.7299 - acc: 0.7342 - val_loss: 1.8116 - val_acc: 0.4000
Epoch 37/100
10/10 [==============================] - 0s - loss: 0.6551 - acc: 0.7853 - val_loss: 1.9142 - val_acc: 0.4000
Epoch 38/100
10/10 [==============================] - 0s - loss: 0.6704 - acc: 0.7890 - val_loss: 1.8827 - val_acc: 0.4000
Epoch 39/100
10/10 [==============================] - 0s - loss: 0.6102 - acc: 0.7968 - val_loss: 1.9297 - val_acc: 0.4050
Epoch 40/100
10/10 [==============================] - 0s - loss: 0.6219 - acc: 0.8032 - val_loss: 1.9055 - val_acc: 0.4050
Epoch 41/100
10/10 [==============================] - 0s - loss: 0.5339 - acc: 0.8336 - val_loss: 1.9282 - val_acc: 0.4000
Epoch 42/100
10/10 [==============================] - 0s - loss: 0.6102 - acc: 0.7857 - val_loss: 2.0008 - val_acc: 0.3950
Epoch 43/100
10/10 [==============================] - 0s - loss: 0.5535 - acc: 0.8188 - val_loss: 1.8737 - val_acc: 0.3750
Epoch 44/100
10/10 [==============================] - 0s - loss: 0.5400 - acc: 0.8383 - val_loss: 1.9275 - val_acc: 0.3900
Epoch 45/100
10/10 [==============================] - 0s - loss: 0.4970 - acc: 0.8546 - val_loss: 1.9633 - val_acc: 0.3900
Epoch 46/100
10/10 [==============================] - 0s - loss: 0.4653 - acc: 0.8416 - val_loss: 1.9860 - val_acc: 0.4000
Epoch 47/100
10/10 [==============================] - 0s - loss: 0.4957 - acc: 0.8448 - val_loss: 1.9586 - val_acc: 0.4150
Epoch 48/100
10/10 [==============================] - 0s - loss: 0.4152 - acc: 0.8713 - val_loss: 1.9868 - val_acc: 0.3800
Epoch 49/100
10/10 [==============================] - 0s - loss: 0.4139 - acc: 0.8660 - val_loss: 2.1303 - val_acc: 0.3800
Epoch 50/100
10/10 [==============================] - 0s - loss: 0.4070 - acc: 0.8730 - val_loss: 1.9920 - val_acc: 0.3950
Epoch 51/100
10/10 [==============================] - 0s - loss: 0.3935 - acc: 0.8776 - val_loss: 1.9491 - val_acc: 0.3850
Epoch 52/100
10/10 [==============================] - 0s - loss: 0.3708 - acc: 0.8935 - val_loss: 2.0625 - val_acc: 0.3850
Epoch 53/100
10/10 [==============================] - 0s - loss: 0.3874 - acc: 0.8815 - val_loss: 2.0695 - val_acc: 0.4000
Epoch 54/100
10/10 [==============================] - 0s - loss: 0.3602 - acc: 0.8981 - val_loss: 2.0742 - val_acc: 0.4000
Epoch 55/100
10/10 [==============================] - 0s - loss: 0.3599 - acc: 0.8897 - val_loss: 2.0677 - val_acc: 0.4000
Epoch 56/100
10/10 [==============================] - 0s - loss: 0.3377 - acc: 0.9012 - val_loss: 2.2486 - val_acc: 0.3900
Epoch 57/100
10/10 [==============================] - 0s - loss: 0.3060 - acc: 0.9074 - val_loss: 2.1315 - val_acc: 0.4000
Epoch 58/100
10/10 [==============================] - 0s - loss: 0.3421 - acc: 0.9000 - val_loss: 2.0544 - val_acc: 0.4200
Epoch 59/100
10/10 [==============================] - 0s - loss: 0.2822 - acc: 0.9222 - val_loss: 2.1310 - val_acc: 0.4000
Epoch 60/100
10/10 [==============================] - 0s - loss: 0.2677 - acc: 0.9276 - val_loss: 2.1145 - val_acc: 0.4050
Epoch 61/100
10/10 [==============================] - 0s - loss: 0.2822 - acc: 0.9253 - val_loss: 2.2248 - val_acc: 0.4050
Epoch 62/100
10/10 [==============================] - 0s - loss: 0.2629 - acc: 0.9225 - val_loss: 2.2128 - val_acc: 0.3950
Epoch 63/100
10/10 [==============================] - 0s - loss: 0.2517 - acc: 0.9324 - val_loss: 2.2132 - val_acc: 0.4050
Epoch 64/100
10/10 [==============================] - 0s - loss: 0.2601 - acc: 0.9349 - val_loss: 2.2305 - val_acc: 0.3850
Epoch 65/100
10/10 [==============================] - 0s - loss: 0.2333 - acc: 0.9399 - val_loss: 2.2382 - val_acc: 0.3750
Epoch 66/100
10/10 [==============================] - 0s - loss: 0.2266 - acc: 0.9393 - val_loss: 2.2904 - val_acc: 0.3950
Epoch 67/100
10/10 [==============================] - 0s - loss: 0.2345 - acc: 0.9299 - val_loss: 2.2886 - val_acc: 0.4000
Epoch 68/100
10/10 [==============================] - 0s - loss: 0.2126 - acc: 0.9391 - val_loss: 2.2925 - val_acc: 0.4100
Epoch 69/100
10/10 [==============================] - 0s - loss: 0.2192 - acc: 0.9322 - val_loss: 2.3837 - val_acc: 0.3700
Epoch 70/100
10/10 [==============================] - 0s - loss: 0.2039 - acc: 0.9489 - val_loss: 2.3866 - val_acc: 0.3800
Epoch 71/100
10/10 [==============================] - 0s - loss: 0.2179 - acc: 0.9310 - val_loss: 2.3099 - val_acc: 0.3950
Epoch 72/100
10/10 [==============================] - 0s - loss: 0.2195 - acc: 0.9338 - val_loss: 2.3587 - val_acc: 0.3650
Epoch 73/100
10/10 [==============================] - 0s - loss: 0.1977 - acc: 0.9510 - val_loss: 2.4040 - val_acc: 0.4000
Epoch 74/100
10/10 [==============================] - 0s - loss: 0.1923 - acc: 0.9436 - val_loss: 2.3637 - val_acc: 0.3900
Epoch 75/100
10/10 [==============================] - 0s - loss: 0.1989 - acc: 0.9383 - val_loss: 2.3917 - val_acc: 0.3850
Epoch 76/100
10/10 [==============================] - 0s - loss: 0.1783 - acc: 0.9517 - val_loss: 2.4202 - val_acc: 0.4000
Epoch 77/100
10/10 [==============================] - 0s - loss: 0.1829 - acc: 0.9473 - val_loss: 2.6715 - val_acc: 0.3950
Epoch 78/100
10/10 [==============================] - 0s - loss: 0.1928 - acc: 0.9380 - val_loss: 2.3455 - val_acc: 0.3850
Epoch 79/100
10/10 [==============================] - 0s - loss: 0.1552 - acc: 0.9587 - val_loss: 2.3645 - val_acc: 0.4200
Epoch 80/100
10/10 [==============================] - 0s - loss: 0.1322 - acc: 0.9685 - val_loss: 2.4730 - val_acc: 0.4050
Epoch 81/100
10/10 [==============================] - 0s - loss: 0.1531 - acc: 0.9585 - val_loss: 2.4950 - val_acc: 0.4050
Epoch 82/100
10/10 [==============================] - 0s - loss: 0.1884 - acc: 0.9378 - val_loss: 2.4360 - val_acc: 0.4000
Epoch 83/100
10/10 [==============================] - 0s - loss: 0.1517 - acc: 0.9593 - val_loss: 2.4540 - val_acc: 0.3750
Epoch 84/100
10/10 [==============================] - 0s - loss: 0.1198 - acc: 0.9709 - val_loss: 2.5794 - val_acc: 0.3750
Epoch 85/100
10/10 [==============================] - 0s - loss: 0.1370 - acc: 0.9669 - val_loss: 2.4486 - val_acc: 0.3900
Epoch 86/100
10/10 [==============================] - 0s - loss: 0.1375 - acc: 0.9606 - val_loss: 2.5145 - val_acc: 0.3750
Epoch 87/100
10/10 [==============================] - 0s - loss: 0.1322 - acc: 0.9709 - val_loss: 2.5161 - val_acc: 0.3800
Epoch 88/100
10/10 [==============================] - 0s - loss: 0.1474 - acc: 0.9622 - val_loss: 2.4697 - val_acc: 0.4150
Epoch 89/100
10/10 [==============================] - 0s - loss: 0.1265 - acc: 0.9648 - val_loss: 2.5103 - val_acc: 0.3900
Epoch 90/100
10/10 [==============================] - 0s - loss: 0.1150 - acc: 0.9748 - val_loss: 2.5871 - val_acc: 0.3900
Epoch 91/100
10/10 [==============================] - 0s - loss: 0.1528 - acc: 0.9566 - val_loss: 2.5095 - val_acc: 0.4000
Epoch 92/100
10/10 [==============================] - 0s - loss: 0.1305 - acc: 0.9662 - val_loss: 2.5378 - val_acc: 0.3850
Epoch 93/100
10/10 [==============================] - 0s - loss: 0.1195 - acc: 0.9676 - val_loss: 2.5647 - val_acc: 0.3650
Epoch 94/100
10/10 [==============================] - 0s - loss: 0.1206 - acc: 0.9727 - val_loss: 2.5445 - val_acc: 0.4000
Epoch 95/100
10/10 [==============================] - 0s - loss: 0.0966 - acc: 0.9795 - val_loss: 2.6122 - val_acc: 0.4200
Epoch 96/100
10/10 [==============================] - 0s - loss: 0.1290 - acc: 0.9608 - val_loss: 2.5815 - val_acc: 0.3800
Epoch 97/100
10/10 [==============================] - 0s - loss: 0.1080 - acc: 0.9658 - val_loss: 2.5738 - val_acc: 0.3950
Epoch 98/100
10/10 [==============================] - 0s - loss: 0.1044 - acc: 0.9695 - val_loss: 2.5774 - val_acc: 0.3850
Epoch 99/100
10/10 [==============================] - 0s - loss: 0.1283 - acc: 0.9609 - val_loss: 2.5939 - val_acc: 0.3950
Epoch 100/100
10/10 [==============================] - 0s - loss: 0.1180 - acc: 0.9621 - val_loss: 2.5520 - val_acc: 0.3850
Confusion matrix
[[10  1  1  1  2  1  0  0  5  1]
 [ 2 15  0  0  1  0  1  0  0  3]
 [ 2  0  4  3  4  3  2  1  0  1]
 [ 1  3  2  5  3  4  0  0  1  2]
 [ 4  0  4  0  3  2  2  2  0  1]
 [ 1  2  1  4  3 11  0  1  0  0]
 [ 0  1  1  2  3  2  7  1  1  0]
 [ 0  2  0  2  1  1  0  9  0  0]
 [ 3  3  1  0  1  2  0  0  3  2]
 [ 1  5  1  3  0  1  1  1  3 10]]
Test score: 2.55200974464
Test accuracy: 0.385
Output results are saved in output_29d5472f-6b90-47de-9385-a1f56577559f

In [ ]: