In [1]:
# 분류 DNN 모델 구현 ########################
from keras import layers, models
 
class DNN(models.Sequential):
    def __init__(self, Nin, Nh_l, Nout):
        super().__init__()
        
        self.add(layers.Dense(Nh_l[0], activation='relu', 
                 input_shape=(Nin,), name='Hidden-1'))
        self.add(layers.Dropout(0.01))
        
        self.add(layers.Dense(Nh_l[1], activation='relu', 
                 name='Hidden-2'))       
        self.add(layers.Dropout(0.01))
        
        self.add(layers.Dense(Nout, activation='softmax'))

        self.compile(loss='categorical_crossentropy', 
                         optimizer='adam', 
                         metrics=['accuracy'])

        
# 데이터 준비 ##############################
import numpy as np
from keras import datasets  # mnist
from keras.utils import np_utils  # to_categorical


def Data_func():
    (X_train, y_train), (X_test, y_test) = datasets.cifar10.load_data()

    Y_train = np_utils.to_categorical(y_train)
    Y_test = np_utils.to_categorical(y_test)

    L, W, H, C = X_train.shape
    X_train = X_train.reshape(-1, W * H * C)
    X_test = X_test.reshape(-1, W * H * C)

    X_train = X_train / 255.0
    X_test = X_test / 255.0
    
    return (X_train, Y_train), (X_test, Y_test)


# 학습 효과 분석 ##############################
from ann_mnist_cl import plot_loss, plot_acc
import matplotlib.pyplot as plt


# 분류 DNN 학습 및 테스팅 ####################
def main():
    Nh_l = [100, 50]
    number_of_class = 10
    Nout = number_of_class

    (X_train, Y_train), (X_test, Y_test) = Data_func()
    model = DNN(X_train.shape[1], Nh_l, Nout)
    history = model.fit(X_train, y_train, epochs=10, batch_size=100, validation_split=0.2)
    
    performace_test = model.evaluate(X_test, y_test, batch_size=100)
    print('Test Loss and Accuracy ->', performace_test)

    plot_acc(history)
    plt.show()
    plot_loss(history)
    plt.show()


Using TensorFlow backend.

In [2]:
Nin = 784
Nh_l = [50, 50]
number_of_class = 10
Nout = number_of_class

(X_train, Y_train), (X_test, Y_test) = Data_func()
model = DNN(X_train.shape[1], Nh_l, Nout)
history = model.fit(X_train, Y_train, epochs=100, batch_size=100, validation_split=0.2)

performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)

plot_acc(history)
plt.show()
plot_loss(history)
plt.show()


Train on 40000 samples, validate on 10000 samples
Epoch 1/100
40000/40000 [==============================] - 5s - loss: 2.0069 - acc: 0.2572 - val_loss: 1.8648 - val_acc: 0.3290
Epoch 2/100
40000/40000 [==============================] - 3s - loss: 1.8403 - acc: 0.3310 - val_loss: 1.8155 - val_acc: 0.3460
Epoch 3/100
40000/40000 [==============================] - 3s - loss: 1.7981 - acc: 0.3485 - val_loss: 1.7769 - val_acc: 0.3624
Epoch 4/100
40000/40000 [==============================] - 3s - loss: 1.7782 - acc: 0.3571 - val_loss: 1.7821 - val_acc: 0.3560
Epoch 5/100
40000/40000 [==============================] - 3s - loss: 1.7577 - acc: 0.3680 - val_loss: 1.7746 - val_acc: 0.3566
Epoch 6/100
40000/40000 [==============================] - 3s - loss: 1.7425 - acc: 0.3725 - val_loss: 1.7547 - val_acc: 0.3689
Epoch 7/100
40000/40000 [==============================] - 3s - loss: 1.7208 - acc: 0.3827 - val_loss: 1.7270 - val_acc: 0.3815
Epoch 8/100
40000/40000 [==============================] - 3s - loss: 1.7176 - acc: 0.3835 - val_loss: 1.7143 - val_acc: 0.3859
Epoch 9/100
40000/40000 [==============================] - 3s - loss: 1.7085 - acc: 0.3874 - val_loss: 1.7446 - val_acc: 0.3763
Epoch 10/100
40000/40000 [==============================] - 3s - loss: 1.7052 - acc: 0.3882 - val_loss: 1.7035 - val_acc: 0.3918
Epoch 11/100
40000/40000 [==============================] - 3s - loss: 1.6974 - acc: 0.3897 - val_loss: 1.7069 - val_acc: 0.3908
Epoch 12/100
40000/40000 [==============================] - 3s - loss: 1.6851 - acc: 0.3963 - val_loss: 1.7193 - val_acc: 0.3871
Epoch 13/100
40000/40000 [==============================] - 3s - loss: 1.6815 - acc: 0.3957 - val_loss: 1.7302 - val_acc: 0.3741
Epoch 14/100
40000/40000 [==============================] - 3s - loss: 1.6750 - acc: 0.4007 - val_loss: 1.7199 - val_acc: 0.3823
Epoch 15/100
40000/40000 [==============================] - 3s - loss: 1.6704 - acc: 0.4002 - val_loss: 1.6860 - val_acc: 0.3931
Epoch 16/100
40000/40000 [==============================] - 3s - loss: 1.6607 - acc: 0.4056 - val_loss: 1.7301 - val_acc: 0.3734
Epoch 17/100
40000/40000 [==============================] - 3s - loss: 1.6608 - acc: 0.4047 - val_loss: 1.6939 - val_acc: 0.3917
Epoch 18/100
40000/40000 [==============================] - 3s - loss: 1.6633 - acc: 0.4045 - val_loss: 1.7281 - val_acc: 0.3674
Epoch 19/100
40000/40000 [==============================] - 3s - loss: 1.6535 - acc: 0.4075 - val_loss: 1.6725 - val_acc: 0.4002
Epoch 20/100
40000/40000 [==============================] - 3s - loss: 1.6496 - acc: 0.4090 - val_loss: 1.7053 - val_acc: 0.3869
Epoch 21/100
40000/40000 [==============================] - 3s - loss: 1.6466 - acc: 0.4093 - val_loss: 1.6659 - val_acc: 0.4041
Epoch 22/100
40000/40000 [==============================] - 3s - loss: 1.6474 - acc: 0.4097 - val_loss: 1.6670 - val_acc: 0.3992
Epoch 23/100
40000/40000 [==============================] - 3s - loss: 1.6416 - acc: 0.4139 - val_loss: 1.6569 - val_acc: 0.4023
Epoch 24/100
40000/40000 [==============================] - 3s - loss: 1.6359 - acc: 0.4148 - val_loss: 1.6655 - val_acc: 0.4041
Epoch 25/100
40000/40000 [==============================] - 3s - loss: 1.6321 - acc: 0.4145 - val_loss: 1.6754 - val_acc: 0.3969
Epoch 26/100
40000/40000 [==============================] - 3s - loss: 1.6306 - acc: 0.4164 - val_loss: 1.6582 - val_acc: 0.4009
Epoch 27/100
40000/40000 [==============================] - 3s - loss: 1.6326 - acc: 0.4146 - val_loss: 1.6846 - val_acc: 0.3972
Epoch 28/100
40000/40000 [==============================] - 3s - loss: 1.6297 - acc: 0.4145 - val_loss: 1.6794 - val_acc: 0.4000
Epoch 29/100
40000/40000 [==============================] - 3s - loss: 1.6280 - acc: 0.4140 - val_loss: 1.6639 - val_acc: 0.3994
Epoch 30/100
40000/40000 [==============================] - 3s - loss: 1.6274 - acc: 0.4154 - val_loss: 1.6585 - val_acc: 0.3983
Epoch 31/100
40000/40000 [==============================] - 3s - loss: 1.6223 - acc: 0.4173 - val_loss: 1.6935 - val_acc: 0.3818
Epoch 32/100
40000/40000 [==============================] - 3s - loss: 1.6212 - acc: 0.4168 - val_loss: 1.6659 - val_acc: 0.4002
Epoch 33/100
40000/40000 [==============================] - 3s - loss: 1.6193 - acc: 0.4183 - val_loss: 1.6775 - val_acc: 0.3949
Epoch 34/100
40000/40000 [==============================] - 3s - loss: 1.6215 - acc: 0.4163 - val_loss: 1.6541 - val_acc: 0.4022
Epoch 35/100
40000/40000 [==============================] - 3s - loss: 1.6174 - acc: 0.4184 - val_loss: 1.6646 - val_acc: 0.3954
Epoch 36/100
40000/40000 [==============================] - 3s - loss: 1.6180 - acc: 0.4183 - val_loss: 1.6518 - val_acc: 0.4071
Epoch 37/100
40000/40000 [==============================] - 3s - loss: 1.6113 - acc: 0.4226 - val_loss: 1.6717 - val_acc: 0.3947
Epoch 38/100
40000/40000 [==============================] - 3s - loss: 1.6129 - acc: 0.4205 - val_loss: 1.6532 - val_acc: 0.4053
Epoch 39/100
40000/40000 [==============================] - 3s - loss: 1.6071 - acc: 0.4226 - val_loss: 1.6525 - val_acc: 0.4056
Epoch 40/100
40000/40000 [==============================] - 3s - loss: 1.6095 - acc: 0.4209 - val_loss: 1.6602 - val_acc: 0.4045
Epoch 41/100
40000/40000 [==============================] - 3s - loss: 1.6055 - acc: 0.4223 - val_loss: 1.6726 - val_acc: 0.3957
Epoch 42/100
40000/40000 [==============================] - 3s - loss: 1.6085 - acc: 0.4225 - val_loss: 1.6597 - val_acc: 0.3981
Epoch 43/100
40000/40000 [==============================] - 3s - loss: 1.6041 - acc: 0.4233 - val_loss: 1.6551 - val_acc: 0.3957
Epoch 44/100
40000/40000 [==============================] - 3s - loss: 1.6086 - acc: 0.4208 - val_loss: 1.6573 - val_acc: 0.3983
Epoch 45/100
40000/40000 [==============================] - 3s - loss: 1.6072 - acc: 0.4221 - val_loss: 1.6794 - val_acc: 0.3923
Epoch 46/100
40000/40000 [==============================] - 3s - loss: 1.6079 - acc: 0.4230 - val_loss: 1.6924 - val_acc: 0.3915
Epoch 47/100
40000/40000 [==============================] - 3s - loss: 1.6003 - acc: 0.4245 - val_loss: 1.6514 - val_acc: 0.4020
Epoch 48/100
40000/40000 [==============================] - 3s - loss: 1.5971 - acc: 0.4251 - val_loss: 1.6472 - val_acc: 0.4022
Epoch 49/100
40000/40000 [==============================] - 3s - loss: 1.6052 - acc: 0.4228 - val_loss: 1.6606 - val_acc: 0.4014
Epoch 50/100
40000/40000 [==============================] - 3s - loss: 1.6038 - acc: 0.4242 - val_loss: 1.6542 - val_acc: 0.4063
Epoch 51/100
40000/40000 [==============================] - 3s - loss: 1.6004 - acc: 0.4259 - val_loss: 1.6529 - val_acc: 0.4043
Epoch 52/100
40000/40000 [==============================] - 3s - loss: 1.5957 - acc: 0.4254 - val_loss: 1.6561 - val_acc: 0.4060
Epoch 53/100
40000/40000 [==============================] - 3s - loss: 1.5969 - acc: 0.4244 - val_loss: 1.6617 - val_acc: 0.4014
Epoch 54/100
40000/40000 [==============================] - 3s - loss: 1.5978 - acc: 0.4247 - val_loss: 1.6536 - val_acc: 0.4075
Epoch 55/100
40000/40000 [==============================] - 3s - loss: 1.5972 - acc: 0.4257 - val_loss: 1.6487 - val_acc: 0.4094
Epoch 56/100
40000/40000 [==============================] - 3s - loss: 1.5975 - acc: 0.4247 - val_loss: 1.6540 - val_acc: 0.4065
Epoch 57/100
40000/40000 [==============================] - 4s - loss: 1.5910 - acc: 0.4270 - val_loss: 1.6835 - val_acc: 0.3930
Epoch 58/100
40000/40000 [==============================] - 3s - loss: 1.5886 - acc: 0.4280 - val_loss: 1.6428 - val_acc: 0.4099
Epoch 59/100
40000/40000 [==============================] - 3s - loss: 1.5922 - acc: 0.4283 - val_loss: 1.6477 - val_acc: 0.4036
Epoch 60/100
40000/40000 [==============================] - 3s - loss: 1.5943 - acc: 0.4261 - val_loss: 1.6440 - val_acc: 0.4072
Epoch 61/100
40000/40000 [==============================] - 3s - loss: 1.5893 - acc: 0.4294 - val_loss: 1.6447 - val_acc: 0.4116
Epoch 62/100
40000/40000 [==============================] - 3s - loss: 1.5851 - acc: 0.4292 - val_loss: 1.6820 - val_acc: 0.3943
Epoch 63/100
40000/40000 [==============================] - 3s - loss: 1.5848 - acc: 0.4303 - val_loss: 1.7240 - val_acc: 0.3857
Epoch 64/100
40000/40000 [==============================] - 3s - loss: 1.5882 - acc: 0.4286 - val_loss: 1.6495 - val_acc: 0.4045
Epoch 65/100
40000/40000 [==============================] - 3s - loss: 1.5869 - acc: 0.4299 - val_loss: 1.6716 - val_acc: 0.3960
Epoch 66/100
40000/40000 [==============================] - 3s - loss: 1.5884 - acc: 0.4301 - val_loss: 1.6565 - val_acc: 0.4040
Epoch 67/100
40000/40000 [==============================] - 3s - loss: 1.5832 - acc: 0.4302 - val_loss: 1.6538 - val_acc: 0.4017
Epoch 68/100
40000/40000 [==============================] - 3s - loss: 1.5859 - acc: 0.4305 - val_loss: 1.6486 - val_acc: 0.4100
Epoch 69/100
40000/40000 [==============================] - 3s - loss: 1.5811 - acc: 0.4320 - val_loss: 1.6649 - val_acc: 0.4020
Epoch 70/100
40000/40000 [==============================] - 3s - loss: 1.5825 - acc: 0.4303 - val_loss: 1.6404 - val_acc: 0.4103
Epoch 71/100
40000/40000 [==============================] - 4s - loss: 1.5840 - acc: 0.4299 - val_loss: 1.6702 - val_acc: 0.4003
Epoch 72/100
40000/40000 [==============================] - 4s - loss: 1.5784 - acc: 0.4309 - val_loss: 1.6550 - val_acc: 0.4033
Epoch 73/100
40000/40000 [==============================] - 3s - loss: 1.5761 - acc: 0.4337 - val_loss: 1.6531 - val_acc: 0.4032
Epoch 74/100
40000/40000 [==============================] - 3s - loss: 1.5781 - acc: 0.4343 - val_loss: 1.6488 - val_acc: 0.4080
Epoch 75/100
40000/40000 [==============================] - 3s - loss: 1.5769 - acc: 0.4326 - val_loss: 1.6466 - val_acc: 0.4097
Epoch 76/100
40000/40000 [==============================] - 3s - loss: 1.5819 - acc: 0.4302 - val_loss: 1.6442 - val_acc: 0.4115
Epoch 77/100
40000/40000 [==============================] - 3s - loss: 1.5759 - acc: 0.4323 - val_loss: 1.6598 - val_acc: 0.4040
Epoch 78/100
40000/40000 [==============================] - 3s - loss: 1.5771 - acc: 0.4353 - val_loss: 1.6412 - val_acc: 0.4130
Epoch 79/100
40000/40000 [==============================] - 4s - loss: 1.5839 - acc: 0.4283 - val_loss: 1.6461 - val_acc: 0.4073
Epoch 80/100
40000/40000 [==============================] - 3s - loss: 1.5774 - acc: 0.4324 - val_loss: 1.6557 - val_acc: 0.4060
Epoch 81/100
40000/40000 [==============================] - 3s - loss: 1.5833 - acc: 0.4309 - val_loss: 1.6526 - val_acc: 0.4030
Epoch 82/100
40000/40000 [==============================] - 4s - loss: 1.5766 - acc: 0.4325 - val_loss: 1.6673 - val_acc: 0.4001
Epoch 83/100
40000/40000 [==============================] - 4s - loss: 1.5758 - acc: 0.4345 - val_loss: 1.6496 - val_acc: 0.4089
Epoch 84/100
40000/40000 [==============================] - 3s - loss: 1.5754 - acc: 0.4351 - val_loss: 1.6568 - val_acc: 0.4061
Epoch 85/100
40000/40000 [==============================] - 5s - loss: 1.5723 - acc: 0.4330 - val_loss: 1.6480 - val_acc: 0.4103
Epoch 86/100
40000/40000 [==============================] - 5s - loss: 1.5753 - acc: 0.4337 - val_loss: 1.6523 - val_acc: 0.4071
Epoch 87/100
40000/40000 [==============================] - 6s - loss: 1.5753 - acc: 0.4334 - val_loss: 1.6619 - val_acc: 0.4028
Epoch 88/100
40000/40000 [==============================] - 5s - loss: 1.5821 - acc: 0.4315 - val_loss: 1.6772 - val_acc: 0.3906
Epoch 89/100
40000/40000 [==============================] - 5s - loss: 1.5793 - acc: 0.4316 - val_loss: 1.7047 - val_acc: 0.3832
Epoch 90/100
40000/40000 [==============================] - 5s - loss: 1.5708 - acc: 0.4359 - val_loss: 1.6661 - val_acc: 0.3959
Epoch 91/100
40000/40000 [==============================] - 5s - loss: 1.5686 - acc: 0.4344 - val_loss: 1.6554 - val_acc: 0.4074
Epoch 92/100
40000/40000 [==============================] - 5s - loss: 1.5768 - acc: 0.4309 - val_loss: 1.6559 - val_acc: 0.4058
Epoch 93/100
40000/40000 [==============================] - 5s - loss: 1.5747 - acc: 0.4328 - val_loss: 1.6560 - val_acc: 0.3998
Epoch 94/100
40000/40000 [==============================] - 5s - loss: 1.5713 - acc: 0.4346 - val_loss: 1.6494 - val_acc: 0.4070
Epoch 95/100
40000/40000 [==============================] - 5s - loss: 1.5755 - acc: 0.4306 - val_loss: 1.6619 - val_acc: 0.4045
Epoch 96/100
40000/40000 [==============================] - 5s - loss: 1.5663 - acc: 0.4353 - val_loss: 1.6536 - val_acc: 0.4025
Epoch 97/100
40000/40000 [==============================] - 6s - loss: 1.5648 - acc: 0.4383 - val_loss: 1.6735 - val_acc: 0.4003
Epoch 98/100
40000/40000 [==============================] - 5s - loss: 1.5680 - acc: 0.4371 - val_loss: 1.6527 - val_acc: 0.4037
Epoch 99/100
40000/40000 [==============================] - 5s - loss: 1.5649 - acc: 0.4344 - val_loss: 1.6721 - val_acc: 0.3971
Epoch 100/100
40000/40000 [==============================] - 5s - loss: 1.5668 - acc: 0.4358 - val_loss: 1.6726 - val_acc: 0.3970
 9300/10000 [==========================>...] - ETA: 0sTest Loss and Accuracy -> [1.653979502916336, 0.41009998738765718]

In [ ]: