In [1]:
# 분류 DNN 모델 구현 ########################
from keras import layers, models
 
class DNN(models.Sequential):
    def __init__(self, Nin, Nh_l, Nout):
        super().__init__()
        
        self.add(layers.Dense(Nh_l[0], activation='relu', 
                 input_shape=(Nin,), name='Hidden-1'))
        # self.add(layers.Dropout(0.25))
        
        self.add(layers.Dense(Nh_l[1], activation='relu', 
                 name='Hidden-2'))       
        # self.add(layers.Dropout(0.5))
        
        self.add(layers.Dense(Nout, activation='softmax'))

        self.compile(loss='categorical_crossentropy', 
                         optimizer='adam', 
                         metrics=['accuracy'])

        
# 데이터 준비 ##############################
import numpy as np
from keras import datasets  # mnist
from keras.utils import np_utils  # to_categorical


def Data_func():
    (X_train, y_train), (X_test, y_test) = datasets.cifar10.load_data()

    Y_train = np_utils.to_categorical(y_train)
    Y_test = np_utils.to_categorical(y_test)

    L, W, H, C = X_train.shape
    X_train = X_train.reshape(-1, W * H * C)
    X_test = X_test.reshape(-1, W * H * C)

    X_train = X_train / 255.0
    X_test = X_test / 255.0
    
    return (X_train, Y_train), (X_test, Y_test)


# 학습 효과 분석 ##############################
from ann_mnist_cl import plot_loss, plot_acc
import matplotlib.pyplot as plt


# 분류 DNN 학습 및 테스팅 ####################
def main():
    Nh_l = [100, 50]
    number_of_class = 10
    Nout = number_of_class

    (X_train, Y_train), (X_test, Y_test) = Data_func()
    model = DNN(X_train.shape[1], Nh_l, Nout)
    history = model.fit(X_train, y_train, epochs=10, batch_size=100, validation_split=0.2)
    
    performace_test = model.evaluate(X_test, y_test, batch_size=100)
    print('Test Loss and Accuracy ->', performace_test)

    plot_acc(history)
    plt.show()
    plot_loss(history)
    plt.show()


Using TensorFlow backend.

In [2]:
Nin = 784
Nh_l = [100, 50]
number_of_class = 10
Nout = number_of_class

(X_train, Y_train), (X_test, Y_test) = Data_func()
model = DNN(X_train.shape[1], Nh_l, Nout)
history = model.fit(X_train, Y_train, epochs=100, batch_size=100, validation_split=0.2)

performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)

plot_acc(history)
plt.show()
plot_loss(history)
plt.show()


Train on 40000 samples, validate on 10000 samples
Epoch 1/100
40000/40000 [==============================] - 7s - loss: 1.9257 - acc: 0.3055 - val_loss: 1.8080 - val_acc: 0.3532
Epoch 2/100
40000/40000 [==============================] - 5s - loss: 1.7438 - acc: 0.3796 - val_loss: 1.7046 - val_acc: 0.3937
Epoch 3/100
40000/40000 [==============================] - 5s - loss: 1.6728 - acc: 0.4053 - val_loss: 1.6790 - val_acc: 0.4041
Epoch 4/100
40000/40000 [==============================] - 5s - loss: 1.6218 - acc: 0.4243 - val_loss: 1.6455 - val_acc: 0.4152
Epoch 5/100
40000/40000 [==============================] - 5s - loss: 1.5910 - acc: 0.4317 - val_loss: 1.6671 - val_acc: 0.4154
Epoch 6/100
40000/40000 [==============================] - 5s - loss: 1.5478 - acc: 0.4483 - val_loss: 1.5907 - val_acc: 0.4394
Epoch 7/100
40000/40000 [==============================] - 5s - loss: 1.5321 - acc: 0.4520 - val_loss: 1.5700 - val_acc: 0.4433
Epoch 8/100
40000/40000 [==============================] - 5s - loss: 1.5112 - acc: 0.4604 - val_loss: 1.5561 - val_acc: 0.4469
Epoch 9/100
40000/40000 [==============================] - 5s - loss: 1.4858 - acc: 0.4709 - val_loss: 1.5337 - val_acc: 0.4561
Epoch 10/100
40000/40000 [==============================] - 5s - loss: 1.4738 - acc: 0.4736 - val_loss: 1.5237 - val_acc: 0.4596
Epoch 11/100
40000/40000 [==============================] - 5s - loss: 1.4614 - acc: 0.4811 - val_loss: 1.6034 - val_acc: 0.4366
Epoch 12/100
40000/40000 [==============================] - 6s - loss: 1.4529 - acc: 0.4840 - val_loss: 1.5186 - val_acc: 0.4656
Epoch 13/100
40000/40000 [==============================] - 5s - loss: 1.4358 - acc: 0.4896 - val_loss: 1.5016 - val_acc: 0.4710
Epoch 14/100
40000/40000 [==============================] - 5s - loss: 1.4242 - acc: 0.4938 - val_loss: 1.5240 - val_acc: 0.4626
Epoch 15/100
40000/40000 [==============================] - 5s - loss: 1.4175 - acc: 0.4940 - val_loss: 1.5242 - val_acc: 0.4635
Epoch 16/100
40000/40000 [==============================] - 3s - loss: 1.4097 - acc: 0.4973 - val_loss: 1.5317 - val_acc: 0.4596
Epoch 17/100
40000/40000 [==============================] - 3s - loss: 1.3979 - acc: 0.5012 - val_loss: 1.5345 - val_acc: 0.4548
Epoch 18/100
40000/40000 [==============================] - 3s - loss: 1.3980 - acc: 0.5007 - val_loss: 1.5455 - val_acc: 0.4500
Epoch 19/100
40000/40000 [==============================] - 4s - loss: 1.3825 - acc: 0.5071 - val_loss: 1.5110 - val_acc: 0.4697
Epoch 20/100
40000/40000 [==============================] - 3s - loss: 1.3795 - acc: 0.5054 - val_loss: 1.4799 - val_acc: 0.4773
Epoch 21/100
40000/40000 [==============================] - 3s - loss: 1.3766 - acc: 0.5071 - val_loss: 1.4823 - val_acc: 0.4767
Epoch 22/100
40000/40000 [==============================] - 3s - loss: 1.3635 - acc: 0.5136 - val_loss: 1.5175 - val_acc: 0.4622
Epoch 23/100
40000/40000 [==============================] - 3s - loss: 1.3608 - acc: 0.5132 - val_loss: 1.4843 - val_acc: 0.4784
Epoch 24/100
40000/40000 [==============================] - 3s - loss: 1.3539 - acc: 0.5182 - val_loss: 1.4893 - val_acc: 0.4800
Epoch 25/100
40000/40000 [==============================] - 3s - loss: 1.3484 - acc: 0.5188 - val_loss: 1.5207 - val_acc: 0.4685
Epoch 26/100
40000/40000 [==============================] - 3s - loss: 1.3500 - acc: 0.5157 - val_loss: 1.4825 - val_acc: 0.4767
Epoch 27/100
40000/40000 [==============================] - 3s - loss: 1.3375 - acc: 0.5231 - val_loss: 1.4959 - val_acc: 0.4747
Epoch 28/100
40000/40000 [==============================] - 3s - loss: 1.3331 - acc: 0.5240 - val_loss: 1.5184 - val_acc: 0.4634
Epoch 29/100
40000/40000 [==============================] - 3s - loss: 1.3304 - acc: 0.5261 - val_loss: 1.4827 - val_acc: 0.4796
Epoch 30/100
40000/40000 [==============================] - 3s - loss: 1.3207 - acc: 0.5285 - val_loss: 1.5079 - val_acc: 0.4795
Epoch 31/100
40000/40000 [==============================] - 3s - loss: 1.3171 - acc: 0.5272 - val_loss: 1.4746 - val_acc: 0.4830
Epoch 32/100
40000/40000 [==============================] - 3s - loss: 1.3154 - acc: 0.5301 - val_loss: 1.4974 - val_acc: 0.4762
Epoch 33/100
40000/40000 [==============================] - 3s - loss: 1.3080 - acc: 0.5340 - val_loss: 1.4794 - val_acc: 0.4868
Epoch 34/100
40000/40000 [==============================] - 3s - loss: 1.3031 - acc: 0.5326 - val_loss: 1.5035 - val_acc: 0.4749
Epoch 35/100
40000/40000 [==============================] - 3s - loss: 1.3001 - acc: 0.5356 - val_loss: 1.4830 - val_acc: 0.4794
Epoch 36/100
40000/40000 [==============================] - 3s - loss: 1.2914 - acc: 0.5391 - val_loss: 1.5037 - val_acc: 0.4735
Epoch 37/100
40000/40000 [==============================] - 3s - loss: 1.2933 - acc: 0.5377 - val_loss: 1.5027 - val_acc: 0.4775
Epoch 38/100
40000/40000 [==============================] - 3s - loss: 1.2869 - acc: 0.5402 - val_loss: 1.5188 - val_acc: 0.4711
Epoch 39/100
40000/40000 [==============================] - 3s - loss: 1.2844 - acc: 0.5415 - val_loss: 1.4995 - val_acc: 0.4825
Epoch 40/100
40000/40000 [==============================] - 3s - loss: 1.2892 - acc: 0.5408 - val_loss: 1.4945 - val_acc: 0.4819
Epoch 41/100
40000/40000 [==============================] - 3s - loss: 1.2794 - acc: 0.5434 - val_loss: 1.4844 - val_acc: 0.4854
Epoch 42/100
40000/40000 [==============================] - 3s - loss: 1.2698 - acc: 0.5470 - val_loss: 1.5040 - val_acc: 0.4769
Epoch 43/100
40000/40000 [==============================] - 3s - loss: 1.2707 - acc: 0.5463 - val_loss: 1.4876 - val_acc: 0.4778
Epoch 44/100
40000/40000 [==============================] - 3s - loss: 1.2661 - acc: 0.5487 - val_loss: 1.4763 - val_acc: 0.4891
Epoch 45/100
40000/40000 [==============================] - 3s - loss: 1.2670 - acc: 0.5482 - val_loss: 1.5011 - val_acc: 0.4842
Epoch 46/100
40000/40000 [==============================] - 3s - loss: 1.2594 - acc: 0.5534 - val_loss: 1.5104 - val_acc: 0.4702
Epoch 47/100
40000/40000 [==============================] - 3s - loss: 1.2559 - acc: 0.5525 - val_loss: 1.4855 - val_acc: 0.4857
Epoch 48/100
40000/40000 [==============================] - 3s - loss: 1.2532 - acc: 0.5521 - val_loss: 1.5157 - val_acc: 0.4792
Epoch 49/100
40000/40000 [==============================] - 3s - loss: 1.2528 - acc: 0.5543 - val_loss: 1.5168 - val_acc: 0.4788
Epoch 50/100
40000/40000 [==============================] - 3s - loss: 1.2479 - acc: 0.5536 - val_loss: 1.5297 - val_acc: 0.4754
Epoch 51/100
40000/40000 [==============================] - 3s - loss: 1.2442 - acc: 0.5574 - val_loss: 1.4859 - val_acc: 0.4853
Epoch 52/100
40000/40000 [==============================] - 3s - loss: 1.2412 - acc: 0.5586 - val_loss: 1.4863 - val_acc: 0.4903
Epoch 53/100
40000/40000 [==============================] - 3s - loss: 1.2377 - acc: 0.5578 - val_loss: 1.5100 - val_acc: 0.4802
Epoch 54/100
40000/40000 [==============================] - 3s - loss: 1.2339 - acc: 0.5604 - val_loss: 1.5297 - val_acc: 0.4755
Epoch 55/100
40000/40000 [==============================] - 3s - loss: 1.2339 - acc: 0.5584 - val_loss: 1.5209 - val_acc: 0.4706
Epoch 56/100
40000/40000 [==============================] - 3s - loss: 1.2288 - acc: 0.5631 - val_loss: 1.5031 - val_acc: 0.4815
Epoch 57/100
40000/40000 [==============================] - 3s - loss: 1.2253 - acc: 0.5627 - val_loss: 1.5069 - val_acc: 0.4841
Epoch 58/100
40000/40000 [==============================] - 3s - loss: 1.2248 - acc: 0.5632 - val_loss: 1.5712 - val_acc: 0.4626
Epoch 59/100
40000/40000 [==============================] - 3s - loss: 1.2276 - acc: 0.5617 - val_loss: 1.5258 - val_acc: 0.4744
Epoch 60/100
40000/40000 [==============================] - 3s - loss: 1.2282 - acc: 0.5610 - val_loss: 1.4863 - val_acc: 0.4868
Epoch 61/100
40000/40000 [==============================] - 3s - loss: 1.2159 - acc: 0.5654 - val_loss: 1.4984 - val_acc: 0.4832
Epoch 62/100
40000/40000 [==============================] - 3s - loss: 1.2148 - acc: 0.5665 - val_loss: 1.5109 - val_acc: 0.4816
Epoch 63/100
40000/40000 [==============================] - 3s - loss: 1.2118 - acc: 0.5675 - val_loss: 1.4864 - val_acc: 0.4873
Epoch 64/100
40000/40000 [==============================] - 3s - loss: 1.2134 - acc: 0.5686 - val_loss: 1.5624 - val_acc: 0.4723
Epoch 65/100
40000/40000 [==============================] - 3s - loss: 1.2115 - acc: 0.5680 - val_loss: 1.5311 - val_acc: 0.4731
Epoch 66/100
40000/40000 [==============================] - 3s - loss: 1.2062 - acc: 0.5697 - val_loss: 1.5291 - val_acc: 0.4786
Epoch 67/100
40000/40000 [==============================] - 3s - loss: 1.2050 - acc: 0.5697 - val_loss: 1.4995 - val_acc: 0.4898
Epoch 68/100
40000/40000 [==============================] - 3s - loss: 1.2006 - acc: 0.5735 - val_loss: 1.5047 - val_acc: 0.4846
Epoch 69/100
40000/40000 [==============================] - 3s - loss: 1.2007 - acc: 0.5711 - val_loss: 1.5094 - val_acc: 0.4871
Epoch 70/100
40000/40000 [==============================] - 3s - loss: 1.2040 - acc: 0.5700 - val_loss: 1.5606 - val_acc: 0.4707
Epoch 71/100
40000/40000 [==============================] - 3s - loss: 1.1924 - acc: 0.5746 - val_loss: 1.5028 - val_acc: 0.4871
Epoch 72/100
40000/40000 [==============================] - 3s - loss: 1.1949 - acc: 0.5725 - val_loss: 1.5130 - val_acc: 0.4853
Epoch 73/100
40000/40000 [==============================] - 3s - loss: 1.1942 - acc: 0.5727 - val_loss: 1.4985 - val_acc: 0.4900
Epoch 74/100
40000/40000 [==============================] - 3s - loss: 1.1882 - acc: 0.5778 - val_loss: 1.5276 - val_acc: 0.4809
Epoch 75/100
40000/40000 [==============================] - 3s - loss: 1.1855 - acc: 0.5768 - val_loss: 1.5042 - val_acc: 0.4879
Epoch 76/100
40000/40000 [==============================] - 3s - loss: 1.1906 - acc: 0.5748 - val_loss: 1.5707 - val_acc: 0.4748
Epoch 77/100
40000/40000 [==============================] - 3s - loss: 1.1830 - acc: 0.5767 - val_loss: 1.5294 - val_acc: 0.4806
Epoch 78/100
40000/40000 [==============================] - 3s - loss: 1.1778 - acc: 0.5782 - val_loss: 1.5355 - val_acc: 0.4790
Epoch 79/100
40000/40000 [==============================] - 3s - loss: 1.1830 - acc: 0.5768 - val_loss: 1.5257 - val_acc: 0.4824
Epoch 80/100
40000/40000 [==============================] - 3s - loss: 1.1750 - acc: 0.5815 - val_loss: 1.5521 - val_acc: 0.4801
Epoch 81/100
40000/40000 [==============================] - 3s - loss: 1.1731 - acc: 0.5818 - val_loss: 1.5246 - val_acc: 0.4799
Epoch 82/100
40000/40000 [==============================] - 3s - loss: 1.1768 - acc: 0.5817 - val_loss: 1.5446 - val_acc: 0.4843
Epoch 83/100
40000/40000 [==============================] - 3s - loss: 1.1721 - acc: 0.5812 - val_loss: 1.5306 - val_acc: 0.4802
Epoch 84/100
40000/40000 [==============================] - 3s - loss: 1.1725 - acc: 0.5810 - val_loss: 1.5519 - val_acc: 0.4746
Epoch 85/100
40000/40000 [==============================] - 3s - loss: 1.1697 - acc: 0.5812 - val_loss: 1.5322 - val_acc: 0.4852
Epoch 86/100
40000/40000 [==============================] - 3s - loss: 1.1726 - acc: 0.5809 - val_loss: 1.5533 - val_acc: 0.4769
Epoch 87/100
40000/40000 [==============================] - 3s - loss: 1.1574 - acc: 0.5861 - val_loss: 1.5454 - val_acc: 0.4794
Epoch 88/100
40000/40000 [==============================] - 3s - loss: 1.1648 - acc: 0.5818 - val_loss: 1.5430 - val_acc: 0.4806
Epoch 89/100
40000/40000 [==============================] - 3s - loss: 1.1603 - acc: 0.5869 - val_loss: 1.5516 - val_acc: 0.4772
Epoch 90/100
40000/40000 [==============================] - 3s - loss: 1.1607 - acc: 0.5866 - val_loss: 1.5461 - val_acc: 0.4852
Epoch 91/100
40000/40000 [==============================] - 3s - loss: 1.1623 - acc: 0.5862 - val_loss: 1.5290 - val_acc: 0.4893
Epoch 92/100
40000/40000 [==============================] - 3s - loss: 1.1604 - acc: 0.5843 - val_loss: 1.5531 - val_acc: 0.4748
Epoch 93/100
40000/40000 [==============================] - 3s - loss: 1.1558 - acc: 0.5875 - val_loss: 1.5282 - val_acc: 0.4840
Epoch 94/100
40000/40000 [==============================] - 3s - loss: 1.1532 - acc: 0.5859 - val_loss: 1.5955 - val_acc: 0.4726
Epoch 95/100
40000/40000 [==============================] - 3s - loss: 1.1565 - acc: 0.5874 - val_loss: 1.5395 - val_acc: 0.4789
Epoch 96/100
40000/40000 [==============================] - 3s - loss: 1.1515 - acc: 0.5862 - val_loss: 1.5634 - val_acc: 0.4795
Epoch 97/100
40000/40000 [==============================] - 3s - loss: 1.1516 - acc: 0.5866 - val_loss: 1.5521 - val_acc: 0.4825
Epoch 98/100
40000/40000 [==============================] - 3s - loss: 1.1532 - acc: 0.5871 - val_loss: 1.5490 - val_acc: 0.4795
Epoch 99/100
40000/40000 [==============================] - 3s - loss: 1.1545 - acc: 0.5872 - val_loss: 1.5731 - val_acc: 0.4776
Epoch 100/100
40000/40000 [==============================] - 3s - loss: 1.1410 - acc: 0.5932 - val_loss: 1.5446 - val_acc: 0.4808
10000/10000 [==============================] - 0s     
Test Loss and Accuracy -> [1.5246370542049408, 0.48859998196363447]

In [ ]: