In [30]:
# 분류 DNN 모델 구현 ########################
from keras import layers, models
 
class DNN(models.Sequential):
    def __init__(self, Nin, Nh_l, Nout):
        super().__init__()
        
        self.add(layers.Dense(Nh_l[0], activation='relu', 
                 input_shape=(Nin,), name='Hidden-1'))
        # self.add(layers.Dropout(0.25))
        
        self.add(layers.Dense(Nh_l[1], activation='relu', 
                 name='Hidden-2'))       
        # self.add(layers.Dropout(0.5))
        
        self.add(layers.Dense(Nout, activation='softmax'))

        self.compile(loss='categorical_crossentropy', 
                         optimizer='adam', 
                         metrics=['accuracy'])

        
# 데이터 준비 ##############################
import numpy as np
from keras import datasets  # mnist
from keras.utils import np_utils  # to_categorical


def Data_func():
    (X_train, y_train), (X_test, y_test) = datasets.cifar10.load_data()

    Y_train = np_utils.to_categorical(y_train)
    Y_test = np_utils.to_categorical(y_test)

    L, W, H, C = X_train.shape
    X_train = X_train.reshape(-1, W * H * C)
    X_test = X_test.reshape(-1, W * H * C)

    X_train = X_train / 255.0
    X_test = X_test / 255.0
    
    return (X_train, Y_train), (X_test, Y_test)


# 학습 효과 분석 ##############################
from ann_mnist_cl import plot_loss, plot_acc
import matplotlib.pyplot as plt


# 분류 DNN 학습 및 테스팅 ####################
def main():
    Nh_l = [100, 50]
    number_of_class = 10
    Nout = number_of_class

    (X_train, Y_train), (X_test, Y_test) = Data_func()
    model = DNN(X_train.shape[1], Nh_l, Nout)
    history = model.fit(X_train, y_train, epochs=10, batch_size=100, validation_split=0.2)
    
    performace_test = model.evaluate(X_test, y_test, batch_size=100)
    print('Test Loss and Accuracy ->', performace_test)

    plot_acc(history)
    plt.show()
    plot_loss(history)
    plt.show()

In [31]:
Nin = 784
Nh_l = [100, 50]
number_of_class = 10
Nout = number_of_class

(X_train, Y_train), (X_test, Y_test) = Data_func()
model = DNN(X_train.shape[1], Nh_l, Nout)
history = model.fit(X_train, Y_train, epochs=100, batch_size=100, validation_split=0.2)

performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)

plot_acc(history)
plt.show()
plot_loss(history)
plt.show()


Train on 40000 samples, validate on 10000 samples
Epoch 1/100
40000/40000 [==============================] - 3s - loss: 1.9003 - acc: 0.3168 - val_loss: 1.7845 - val_acc: 0.3660
Epoch 2/100
40000/40000 [==============================] - 3s - loss: 1.7306 - acc: 0.3829 - val_loss: 1.7149 - val_acc: 0.3903
Epoch 3/100
40000/40000 [==============================] - 3s - loss: 1.6635 - acc: 0.4077 - val_loss: 1.6714 - val_acc: 0.4079
Epoch 4/100
40000/40000 [==============================] - 3s - loss: 1.6153 - acc: 0.4222 - val_loss: 1.6346 - val_acc: 0.4226
Epoch 5/100
40000/40000 [==============================] - 3s - loss: 1.5839 - acc: 0.4374 - val_loss: 1.6970 - val_acc: 0.3932
Epoch 6/100
40000/40000 [==============================] - 3s - loss: 1.5572 - acc: 0.4454 - val_loss: 1.6148 - val_acc: 0.4215
Epoch 7/100
40000/40000 [==============================] - 3s - loss: 1.5405 - acc: 0.4510 - val_loss: 1.5825 - val_acc: 0.4455
Epoch 8/100
40000/40000 [==============================] - 3s - loss: 1.5145 - acc: 0.4626 - val_loss: 1.5583 - val_acc: 0.4493
Epoch 9/100
40000/40000 [==============================] - 3s - loss: 1.4908 - acc: 0.4717 - val_loss: 1.5387 - val_acc: 0.4570
Epoch 10/100
40000/40000 [==============================] - 3s - loss: 1.4783 - acc: 0.4750 - val_loss: 1.5855 - val_acc: 0.4456
Epoch 11/100
40000/40000 [==============================] - 3s - loss: 1.4619 - acc: 0.4796 - val_loss: 1.5385 - val_acc: 0.4600
Epoch 12/100
40000/40000 [==============================] - 3s - loss: 1.4452 - acc: 0.4861 - val_loss: 1.5250 - val_acc: 0.4628
Epoch 13/100
40000/40000 [==============================] - 3s - loss: 1.4344 - acc: 0.4907 - val_loss: 1.5311 - val_acc: 0.4602
Epoch 14/100
40000/40000 [==============================] - 3s - loss: 1.4210 - acc: 0.4950 - val_loss: 1.5498 - val_acc: 0.4550
Epoch 15/100
40000/40000 [==============================] - 3s - loss: 1.4121 - acc: 0.4962 - val_loss: 1.5426 - val_acc: 0.4570
Epoch 16/100
40000/40000 [==============================] - 3s - loss: 1.4020 - acc: 0.4979 - val_loss: 1.4918 - val_acc: 0.4765
Epoch 17/100
40000/40000 [==============================] - 3s - loss: 1.3958 - acc: 0.5028 - val_loss: 1.5257 - val_acc: 0.4689
Epoch 18/100
40000/40000 [==============================] - 3s - loss: 1.3901 - acc: 0.5041 - val_loss: 1.4971 - val_acc: 0.4732
Epoch 19/100
40000/40000 [==============================] - 3s - loss: 1.3803 - acc: 0.5057 - val_loss: 1.5044 - val_acc: 0.4701
Epoch 20/100
40000/40000 [==============================] - 3s - loss: 1.3696 - acc: 0.5109 - val_loss: 1.5130 - val_acc: 0.4706
Epoch 21/100
40000/40000 [==============================] - 3s - loss: 1.3698 - acc: 0.5116 - val_loss: 1.5359 - val_acc: 0.4625
Epoch 22/100
40000/40000 [==============================] - 3s - loss: 1.3545 - acc: 0.5148 - val_loss: 1.5142 - val_acc: 0.4705
Epoch 23/100
40000/40000 [==============================] - 3s - loss: 1.3517 - acc: 0.5184 - val_loss: 1.4748 - val_acc: 0.4810
Epoch 24/100
40000/40000 [==============================] - 3s - loss: 1.3466 - acc: 0.5191 - val_loss: 1.4731 - val_acc: 0.4874
Epoch 25/100
40000/40000 [==============================] - 3s - loss: 1.3358 - acc: 0.5227 - val_loss: 1.5037 - val_acc: 0.4747
Epoch 26/100
40000/40000 [==============================] - 3s - loss: 1.3316 - acc: 0.5239 - val_loss: 1.4703 - val_acc: 0.4851
Epoch 27/100
40000/40000 [==============================] - 3s - loss: 1.3221 - acc: 0.5260 - val_loss: 1.4848 - val_acc: 0.4827
Epoch 28/100
40000/40000 [==============================] - 3s - loss: 1.3262 - acc: 0.5258 - val_loss: 1.4728 - val_acc: 0.4841
Epoch 29/100
40000/40000 [==============================] - 3s - loss: 1.3189 - acc: 0.5269 - val_loss: 1.4962 - val_acc: 0.4805
Epoch 30/100
40000/40000 [==============================] - 3s - loss: 1.3145 - acc: 0.5318 - val_loss: 1.4802 - val_acc: 0.4852
Epoch 31/100
40000/40000 [==============================] - 3s - loss: 1.3049 - acc: 0.5330 - val_loss: 1.5118 - val_acc: 0.4838
Epoch 32/100
40000/40000 [==============================] - 3s - loss: 1.3041 - acc: 0.5334 - val_loss: 1.4751 - val_acc: 0.4873
Epoch 33/100
40000/40000 [==============================] - 3s - loss: 1.2944 - acc: 0.5383 - val_loss: 1.4884 - val_acc: 0.4832
Epoch 34/100
40000/40000 [==============================] - 3s - loss: 1.2928 - acc: 0.5400 - val_loss: 1.4854 - val_acc: 0.4859
Epoch 35/100
40000/40000 [==============================] - 3s - loss: 1.2868 - acc: 0.5415 - val_loss: 1.4700 - val_acc: 0.4916
Epoch 36/100
40000/40000 [==============================] - 3s - loss: 1.2857 - acc: 0.5407 - val_loss: 1.4851 - val_acc: 0.4863
Epoch 37/100
40000/40000 [==============================] - 3s - loss: 1.2844 - acc: 0.5402 - val_loss: 1.4941 - val_acc: 0.4834
Epoch 38/100
40000/40000 [==============================] - 3s - loss: 1.2762 - acc: 0.5454 - val_loss: 1.4948 - val_acc: 0.4749
Epoch 39/100
40000/40000 [==============================] - 3s - loss: 1.2776 - acc: 0.5443 - val_loss: 1.4934 - val_acc: 0.4835
Epoch 40/100
40000/40000 [==============================] - 3s - loss: 1.2672 - acc: 0.5463 - val_loss: 1.4720 - val_acc: 0.4930
Epoch 41/100
40000/40000 [==============================] - 3s - loss: 1.2718 - acc: 0.5468 - val_loss: 1.4752 - val_acc: 0.4885
Epoch 42/100
40000/40000 [==============================] - 3s - loss: 1.2600 - acc: 0.5461 - val_loss: 1.4768 - val_acc: 0.4943
Epoch 43/100
40000/40000 [==============================] - 3s - loss: 1.2682 - acc: 0.5483 - val_loss: 1.5543 - val_acc: 0.4770
Epoch 44/100
40000/40000 [==============================] - 3s - loss: 1.2591 - acc: 0.5499 - val_loss: 1.4789 - val_acc: 0.4900
Epoch 45/100
40000/40000 [==============================] - 3s - loss: 1.2484 - acc: 0.5539 - val_loss: 1.4882 - val_acc: 0.4904
Epoch 46/100
40000/40000 [==============================] - 3s - loss: 1.2484 - acc: 0.5553 - val_loss: 1.4967 - val_acc: 0.4801
Epoch 47/100
40000/40000 [==============================] - 3s - loss: 1.2422 - acc: 0.5549 - val_loss: 1.5068 - val_acc: 0.4779
Epoch 48/100
40000/40000 [==============================] - 3s - loss: 1.2472 - acc: 0.5563 - val_loss: 1.4971 - val_acc: 0.4845
Epoch 49/100
40000/40000 [==============================] - 3s - loss: 1.2397 - acc: 0.5574 - val_loss: 1.4992 - val_acc: 0.4838
Epoch 50/100
40000/40000 [==============================] - 3s - loss: 1.2387 - acc: 0.5589 - val_loss: 1.5388 - val_acc: 0.4632
Epoch 51/100
40000/40000 [==============================] - 3s - loss: 1.2349 - acc: 0.5598 - val_loss: 1.5572 - val_acc: 0.4727
Epoch 52/100
40000/40000 [==============================] - 3s - loss: 1.2315 - acc: 0.5582 - val_loss: 1.4839 - val_acc: 0.4887
Epoch 53/100
40000/40000 [==============================] - 3s - loss: 1.2266 - acc: 0.5619 - val_loss: 1.5237 - val_acc: 0.4701
Epoch 54/100
40000/40000 [==============================] - 3s - loss: 1.2260 - acc: 0.5612 - val_loss: 1.4950 - val_acc: 0.4885
Epoch 55/100
40000/40000 [==============================] - 3s - loss: 1.2255 - acc: 0.5614 - val_loss: 1.4961 - val_acc: 0.4893
Epoch 56/100
40000/40000 [==============================] - 3s - loss: 1.2265 - acc: 0.5613 - val_loss: 1.4929 - val_acc: 0.4928
Epoch 57/100
40000/40000 [==============================] - 3s - loss: 1.2239 - acc: 0.5595 - val_loss: 1.4850 - val_acc: 0.4907
Epoch 58/100
40000/40000 [==============================] - 3s - loss: 1.2108 - acc: 0.5690 - val_loss: 1.4987 - val_acc: 0.4790
Epoch 59/100
40000/40000 [==============================] - 3s - loss: 1.2186 - acc: 0.5648 - val_loss: 1.5107 - val_acc: 0.4865
Epoch 60/100
40000/40000 [==============================] - 3s - loss: 1.2103 - acc: 0.5652 - val_loss: 1.5055 - val_acc: 0.4881
Epoch 61/100
40000/40000 [==============================] - 3s - loss: 1.2013 - acc: 0.5681 - val_loss: 1.4903 - val_acc: 0.4881
Epoch 62/100
40000/40000 [==============================] - 3s - loss: 1.2037 - acc: 0.5704 - val_loss: 1.5291 - val_acc: 0.4828
Epoch 63/100
40000/40000 [==============================] - 3s - loss: 1.2030 - acc: 0.5708 - val_loss: 1.4897 - val_acc: 0.4870
Epoch 64/100
40000/40000 [==============================] - 3s - loss: 1.1905 - acc: 0.5771 - val_loss: 1.5063 - val_acc: 0.4883
Epoch 65/100
40000/40000 [==============================] - 3s - loss: 1.2019 - acc: 0.5715 - val_loss: 1.6012 - val_acc: 0.4646
Epoch 66/100
40000/40000 [==============================] - 3s - loss: 1.1927 - acc: 0.5717 - val_loss: 1.4866 - val_acc: 0.4911
Epoch 67/100
40000/40000 [==============================] - 3s - loss: 1.1987 - acc: 0.5716 - val_loss: 1.4925 - val_acc: 0.4878
Epoch 68/100
40000/40000 [==============================] - 3s - loss: 1.1888 - acc: 0.5758 - val_loss: 1.5122 - val_acc: 0.4863
Epoch 69/100
40000/40000 [==============================] - 3s - loss: 1.1883 - acc: 0.5763 - val_loss: 1.4771 - val_acc: 0.4968
Epoch 70/100
40000/40000 [==============================] - 3s - loss: 1.1843 - acc: 0.5749 - val_loss: 1.5211 - val_acc: 0.4849
Epoch 71/100
40000/40000 [==============================] - 3s - loss: 1.1844 - acc: 0.5729 - val_loss: 1.5011 - val_acc: 0.4875
Epoch 72/100
40000/40000 [==============================] - 3s - loss: 1.1796 - acc: 0.5779 - val_loss: 1.4917 - val_acc: 0.4956
Epoch 73/100
40000/40000 [==============================] - 3s - loss: 1.1718 - acc: 0.5811 - val_loss: 1.5276 - val_acc: 0.4791
Epoch 74/100
40000/40000 [==============================] - 3s - loss: 1.1777 - acc: 0.5798 - val_loss: 1.4998 - val_acc: 0.4879
Epoch 75/100
40000/40000 [==============================] - 3s - loss: 1.1785 - acc: 0.5767 - val_loss: 1.4985 - val_acc: 0.4908
Epoch 76/100
40000/40000 [==============================] - 3s - loss: 1.1697 - acc: 0.5818 - val_loss: 1.5191 - val_acc: 0.4804
Epoch 77/100
40000/40000 [==============================] - 3s - loss: 1.1721 - acc: 0.5794 - val_loss: 1.5157 - val_acc: 0.4856
Epoch 78/100
40000/40000 [==============================] - 3s - loss: 1.1647 - acc: 0.5846 - val_loss: 1.5256 - val_acc: 0.4845
Epoch 79/100
40000/40000 [==============================] - 3s - loss: 1.1667 - acc: 0.5818 - val_loss: 1.5163 - val_acc: 0.4887
Epoch 80/100
40000/40000 [==============================] - 3s - loss: 1.1647 - acc: 0.5813 - val_loss: 1.5708 - val_acc: 0.4650
Epoch 81/100
40000/40000 [==============================] - 3s - loss: 1.1633 - acc: 0.5824 - val_loss: 1.5206 - val_acc: 0.4834
Epoch 82/100
40000/40000 [==============================] - 3s - loss: 1.1632 - acc: 0.5845 - val_loss: 1.5401 - val_acc: 0.4864
Epoch 83/100
40000/40000 [==============================] - 3s - loss: 1.1522 - acc: 0.5868 - val_loss: 1.5317 - val_acc: 0.4834
Epoch 84/100
40000/40000 [==============================] - 3s - loss: 1.1545 - acc: 0.5860 - val_loss: 1.5229 - val_acc: 0.4907
Epoch 85/100
40000/40000 [==============================] - 3s - loss: 1.1545 - acc: 0.5858 - val_loss: 1.5292 - val_acc: 0.4895
Epoch 86/100
40000/40000 [==============================] - 3s - loss: 1.1539 - acc: 0.5867 - val_loss: 1.5341 - val_acc: 0.4807
Epoch 87/100
40000/40000 [==============================] - 3s - loss: 1.1415 - acc: 0.5897 - val_loss: 1.5629 - val_acc: 0.4823
Epoch 88/100
40000/40000 [==============================] - 3s - loss: 1.1531 - acc: 0.5884 - val_loss: 1.5508 - val_acc: 0.4837
Epoch 89/100
40000/40000 [==============================] - 3s - loss: 1.1457 - acc: 0.5886 - val_loss: 1.5305 - val_acc: 0.4819
Epoch 90/100
40000/40000 [==============================] - 3s - loss: 1.1408 - acc: 0.5903 - val_loss: 1.5693 - val_acc: 0.4768
Epoch 91/100
40000/40000 [==============================] - 3s - loss: 1.1368 - acc: 0.5928 - val_loss: 1.5604 - val_acc: 0.4853
Epoch 92/100
40000/40000 [==============================] - 3s - loss: 1.1381 - acc: 0.5917 - val_loss: 1.5569 - val_acc: 0.4823
Epoch 93/100
40000/40000 [==============================] - 3s - loss: 1.1413 - acc: 0.5939 - val_loss: 1.5262 - val_acc: 0.4845
Epoch 94/100
40000/40000 [==============================] - 3s - loss: 1.1386 - acc: 0.5914 - val_loss: 1.5329 - val_acc: 0.4864
Epoch 95/100
40000/40000 [==============================] - 3s - loss: 1.1342 - acc: 0.5942 - val_loss: 1.5587 - val_acc: 0.4812
Epoch 96/100
40000/40000 [==============================] - 3s - loss: 1.1337 - acc: 0.5942 - val_loss: 1.5076 - val_acc: 0.4844
Epoch 97/100
40000/40000 [==============================] - 3s - loss: 1.1291 - acc: 0.5953 - val_loss: 1.5247 - val_acc: 0.4829
Epoch 98/100
40000/40000 [==============================] - 3s - loss: 1.1250 - acc: 0.5982 - val_loss: 1.5411 - val_acc: 0.4886
Epoch 99/100
40000/40000 [==============================] - 3s - loss: 1.1259 - acc: 0.5957 - val_loss: 1.5369 - val_acc: 0.4854
Epoch 100/100
40000/40000 [==============================] - 3s - loss: 1.1239 - acc: 0.5968 - val_loss: 1.5792 - val_acc: 0.4801
 9200/10000 [==========================>...] - ETA: 0sTest Loss and Accuracy -> [1.5587175858020783, 0.4866999840736389]

In [ ]: