In [1]:
# 분류 DNN 모델 구현 ########################
from keras import layers, models
 
class DNN(models.Sequential):
    def __init__(self, Nin, Nh_l, Nout):
        super().__init__()
        
        self.add(layers.Dense(Nh_l[0], activation='relu', 
                 input_shape=(Nin,), name='Hidden-1'))
        self.add(layers.Dropout(0.05))
        
        self.add(layers.Dense(Nh_l[1], activation='relu', 
                 name='Hidden-2'))       
        self.add(layers.Dropout(0.2))
        
        self.add(layers.Dense(Nout, activation='softmax'))

        self.compile(loss='categorical_crossentropy', 
                         optimizer='adam', 
                         metrics=['accuracy'])

        
# 데이터 준비 ##############################
import numpy as np
from keras import datasets  # mnist
from keras.utils import np_utils  # to_categorical


def Data_func():
    (X_train, y_train), (X_test, y_test) = datasets.cifar10.load_data()

    Y_train = np_utils.to_categorical(y_train)
    Y_test = np_utils.to_categorical(y_test)

    L, W, H, C = X_train.shape
    X_train = X_train.reshape(-1, W * H * C)
    X_test = X_test.reshape(-1, W * H * C)

    X_train = X_train / 255.0
    X_test = X_test / 255.0
    
    return (X_train, Y_train), (X_test, Y_test)


# 학습 효과 분석 ##############################
from ann_mnist_cl import plot_loss, plot_acc
import matplotlib.pyplot as plt


# 분류 DNN 학습 및 테스팅 ####################
def main():
    Nh_l = [100, 50]
    number_of_class = 10
    Nout = number_of_class

    (X_train, Y_train), (X_test, Y_test) = Data_func()
    model = DNN(X_train.shape[1], Nh_l, Nout)
    history = model.fit(X_train, y_train, epochs=10, batch_size=100, validation_split=0.2)
    
    performace_test = model.evaluate(X_test, y_test, batch_size=100)
    print('Test Loss and Accuracy ->', performace_test)

    plot_acc(history)
    plt.show()
    plot_loss(history)
    plt.show()


Using TensorFlow backend.

In [2]:
Nin = 784
Nh_l = [100, 50]
number_of_class = 10
Nout = number_of_class

(X_train, Y_train), (X_test, Y_test) = Data_func()
model = DNN(X_train.shape[1], Nh_l, Nout)
history = model.fit(X_train, Y_train, epochs=100, batch_size=100, validation_split=0.2)

performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)

plot_acc(history)
plt.show()
plot_loss(history)
plt.show()


Train on 40000 samples, validate on 10000 samples
Epoch 1/100
40000/40000 [==============================] - 5s - loss: 2.0152 - acc: 0.2598 - val_loss: 1.8963 - val_acc: 0.3170
Epoch 2/100
40000/40000 [==============================] - 3s - loss: 1.8638 - acc: 0.3271 - val_loss: 1.7833 - val_acc: 0.3634
Epoch 3/100
40000/40000 [==============================] - 3s - loss: 1.7959 - acc: 0.3534 - val_loss: 1.7472 - val_acc: 0.3782
Epoch 4/100
40000/40000 [==============================] - 3s - loss: 1.7590 - acc: 0.3667 - val_loss: 1.6917 - val_acc: 0.3932
Epoch 5/100
40000/40000 [==============================] - 3s - loss: 1.7310 - acc: 0.3763 - val_loss: 1.7360 - val_acc: 0.3830
Epoch 6/100
40000/40000 [==============================] - 3s - loss: 1.7010 - acc: 0.3871 - val_loss: 1.6557 - val_acc: 0.4092
Epoch 7/100
40000/40000 [==============================] - 3s - loss: 1.6886 - acc: 0.3938 - val_loss: 1.6319 - val_acc: 0.4151
Epoch 8/100
40000/40000 [==============================] - 3s - loss: 1.6739 - acc: 0.3986 - val_loss: 1.6282 - val_acc: 0.4152
Epoch 9/100
40000/40000 [==============================] - 3s - loss: 1.6527 - acc: 0.4060 - val_loss: 1.6467 - val_acc: 0.4082
Epoch 10/100
40000/40000 [==============================] - 3s - loss: 1.6501 - acc: 0.4052 - val_loss: 1.6065 - val_acc: 0.4320
Epoch 11/100
40000/40000 [==============================] - 3s - loss: 1.6335 - acc: 0.4112 - val_loss: 1.5978 - val_acc: 0.4311
Epoch 12/100
40000/40000 [==============================] - 3s - loss: 1.6193 - acc: 0.4163 - val_loss: 1.5961 - val_acc: 0.4266
Epoch 13/100
40000/40000 [==============================] - 3s - loss: 1.6185 - acc: 0.4187 - val_loss: 1.5892 - val_acc: 0.4356
Epoch 14/100
40000/40000 [==============================] - 3s - loss: 1.6110 - acc: 0.4220 - val_loss: 1.6099 - val_acc: 0.4190
Epoch 15/100
40000/40000 [==============================] - 3s - loss: 1.6094 - acc: 0.4214 - val_loss: 1.5732 - val_acc: 0.4414
Epoch 16/100
40000/40000 [==============================] - 3s - loss: 1.6050 - acc: 0.4206 - val_loss: 1.5937 - val_acc: 0.4307
Epoch 17/100
40000/40000 [==============================] - 3s - loss: 1.5908 - acc: 0.4246 - val_loss: 1.5657 - val_acc: 0.4438
Epoch 18/100
40000/40000 [==============================] - 3s - loss: 1.5938 - acc: 0.4253 - val_loss: 1.5678 - val_acc: 0.4493
Epoch 19/100
40000/40000 [==============================] - 3s - loss: 1.5862 - acc: 0.4275 - val_loss: 1.5588 - val_acc: 0.4436
Epoch 20/100
40000/40000 [==============================] - 3s - loss: 1.5750 - acc: 0.4338 - val_loss: 1.5797 - val_acc: 0.4395
Epoch 21/100
40000/40000 [==============================] - 3s - loss: 1.5794 - acc: 0.4310 - val_loss: 1.5789 - val_acc: 0.4363
Epoch 22/100
40000/40000 [==============================] - 3s - loss: 1.5660 - acc: 0.4348 - val_loss: 1.5761 - val_acc: 0.4379
Epoch 23/100
40000/40000 [==============================] - 3s - loss: 1.5640 - acc: 0.4362 - val_loss: 1.5925 - val_acc: 0.4256
Epoch 24/100
40000/40000 [==============================] - 3s - loss: 1.5573 - acc: 0.4400 - val_loss: 1.5564 - val_acc: 0.4469
Epoch 25/100
40000/40000 [==============================] - 3s - loss: 1.5590 - acc: 0.4385 - val_loss: 1.5486 - val_acc: 0.4527
Epoch 26/100
40000/40000 [==============================] - 3s - loss: 1.5511 - acc: 0.4397 - val_loss: 1.5487 - val_acc: 0.4478
Epoch 27/100
40000/40000 [==============================] - 3s - loss: 1.5448 - acc: 0.4438 - val_loss: 1.5532 - val_acc: 0.4435
Epoch 28/100
40000/40000 [==============================] - 3s - loss: 1.5570 - acc: 0.4404 - val_loss: 1.5728 - val_acc: 0.4408
Epoch 29/100
40000/40000 [==============================] - 3s - loss: 1.5414 - acc: 0.4449 - val_loss: 1.5396 - val_acc: 0.4555
Epoch 30/100
40000/40000 [==============================] - 3s - loss: 1.5385 - acc: 0.4457 - val_loss: 1.5345 - val_acc: 0.4507
Epoch 31/100
40000/40000 [==============================] - 3s - loss: 1.5417 - acc: 0.4464 - val_loss: 1.5440 - val_acc: 0.4510
Epoch 32/100
40000/40000 [==============================] - 3s - loss: 1.5344 - acc: 0.4480 - val_loss: 1.5382 - val_acc: 0.4517
Epoch 33/100
40000/40000 [==============================] - 3s - loss: 1.5276 - acc: 0.4517 - val_loss: 1.5328 - val_acc: 0.4589
Epoch 34/100
40000/40000 [==============================] - 3s - loss: 1.5325 - acc: 0.4476 - val_loss: 1.5257 - val_acc: 0.4580
Epoch 35/100
40000/40000 [==============================] - 3s - loss: 1.5287 - acc: 0.4512 - val_loss: 1.5599 - val_acc: 0.4479
Epoch 36/100
40000/40000 [==============================] - 3s - loss: 1.5256 - acc: 0.4540 - val_loss: 1.5240 - val_acc: 0.4569
Epoch 37/100
40000/40000 [==============================] - 3s - loss: 1.5276 - acc: 0.4518 - val_loss: 1.5308 - val_acc: 0.4602
Epoch 38/100
40000/40000 [==============================] - 3s - loss: 1.5248 - acc: 0.4525 - val_loss: 1.5623 - val_acc: 0.4476
Epoch 39/100
40000/40000 [==============================] - 3s - loss: 1.5256 - acc: 0.4536 - val_loss: 1.5270 - val_acc: 0.4548
Epoch 40/100
40000/40000 [==============================] - 3s - loss: 1.5114 - acc: 0.4575 - val_loss: 1.5286 - val_acc: 0.4592
Epoch 41/100
40000/40000 [==============================] - 3s - loss: 1.5234 - acc: 0.4546 - val_loss: 1.5449 - val_acc: 0.4536
Epoch 42/100
40000/40000 [==============================] - 3s - loss: 1.5116 - acc: 0.4566 - val_loss: 1.5709 - val_acc: 0.4420
Epoch 43/100
40000/40000 [==============================] - 3s - loss: 1.5155 - acc: 0.4573 - val_loss: 1.5434 - val_acc: 0.4479
Epoch 44/100
40000/40000 [==============================] - 3s - loss: 1.5094 - acc: 0.4550 - val_loss: 1.5144 - val_acc: 0.4641
Epoch 45/100
40000/40000 [==============================] - 3s - loss: 1.5047 - acc: 0.4594 - val_loss: 1.5521 - val_acc: 0.4504
Epoch 46/100
40000/40000 [==============================] - 3s - loss: 1.5032 - acc: 0.4609 - val_loss: 1.5585 - val_acc: 0.4482
Epoch 47/100
40000/40000 [==============================] - 3s - loss: 1.5019 - acc: 0.4601 - val_loss: 1.5410 - val_acc: 0.4547
Epoch 48/100
40000/40000 [==============================] - 3s - loss: 1.4977 - acc: 0.4607 - val_loss: 1.5143 - val_acc: 0.4624
Epoch 49/100
40000/40000 [==============================] - 3s - loss: 1.5065 - acc: 0.4600 - val_loss: 1.5138 - val_acc: 0.4653
Epoch 50/100
40000/40000 [==============================] - 3s - loss: 1.4972 - acc: 0.4655 - val_loss: 1.5220 - val_acc: 0.4606
Epoch 51/100
40000/40000 [==============================] - 3s - loss: 1.4977 - acc: 0.4632 - val_loss: 1.5338 - val_acc: 0.4580
Epoch 52/100
40000/40000 [==============================] - 3s - loss: 1.4982 - acc: 0.4654 - val_loss: 1.5339 - val_acc: 0.4543
Epoch 53/100
40000/40000 [==============================] - 3s - loss: 1.4933 - acc: 0.4643 - val_loss: 1.5278 - val_acc: 0.4549
Epoch 54/100
40000/40000 [==============================] - 3s - loss: 1.4994 - acc: 0.4635 - val_loss: 1.5317 - val_acc: 0.4536
Epoch 55/100
40000/40000 [==============================] - 3s - loss: 1.4923 - acc: 0.4624 - val_loss: 1.5380 - val_acc: 0.4577
Epoch 56/100
40000/40000 [==============================] - 3s - loss: 1.4946 - acc: 0.4652 - val_loss: 1.5372 - val_acc: 0.4549
Epoch 57/100
40000/40000 [==============================] - 3s - loss: 1.4840 - acc: 0.4668 - val_loss: 1.5226 - val_acc: 0.4658
Epoch 58/100
40000/40000 [==============================] - 3s - loss: 1.4857 - acc: 0.4668 - val_loss: 1.5312 - val_acc: 0.4591
Epoch 59/100
40000/40000 [==============================] - 3s - loss: 1.4865 - acc: 0.4654 - val_loss: 1.5212 - val_acc: 0.4614
Epoch 60/100
40000/40000 [==============================] - 3s - loss: 1.4877 - acc: 0.4636 - val_loss: 1.5330 - val_acc: 0.4573
Epoch 61/100
40000/40000 [==============================] - 3s - loss: 1.4866 - acc: 0.4645 - val_loss: 1.5162 - val_acc: 0.4656
Epoch 62/100
40000/40000 [==============================] - 3s - loss: 1.4792 - acc: 0.4688 - val_loss: 1.5270 - val_acc: 0.4554
Epoch 63/100
40000/40000 [==============================] - 3s - loss: 1.4824 - acc: 0.4651 - val_loss: 1.5260 - val_acc: 0.4623
Epoch 64/100
40000/40000 [==============================] - 3s - loss: 1.4767 - acc: 0.4713 - val_loss: 1.5137 - val_acc: 0.4652
Epoch 65/100
40000/40000 [==============================] - 3s - loss: 1.4755 - acc: 0.4696 - val_loss: 1.5223 - val_acc: 0.4614
Epoch 66/100
40000/40000 [==============================] - 3s - loss: 1.4725 - acc: 0.4693 - val_loss: 1.5247 - val_acc: 0.4561
Epoch 67/100
40000/40000 [==============================] - 3s - loss: 1.4792 - acc: 0.4684 - val_loss: 1.5237 - val_acc: 0.4630
Epoch 68/100
40000/40000 [==============================] - 3s - loss: 1.4747 - acc: 0.4679 - val_loss: 1.5209 - val_acc: 0.4610
Epoch 69/100
40000/40000 [==============================] - 3s - loss: 1.4736 - acc: 0.4700 - val_loss: 1.5208 - val_acc: 0.4566
Epoch 70/100
40000/40000 [==============================] - 3s - loss: 1.4735 - acc: 0.4692 - val_loss: 1.5277 - val_acc: 0.4597
Epoch 71/100
40000/40000 [==============================] - 3s - loss: 1.4750 - acc: 0.4698 - val_loss: 1.5080 - val_acc: 0.4701
Epoch 72/100
40000/40000 [==============================] - 3s - loss: 1.4731 - acc: 0.4716 - val_loss: 1.5128 - val_acc: 0.4568
Epoch 73/100
40000/40000 [==============================] - 3s - loss: 1.4678 - acc: 0.4692 - val_loss: 1.5250 - val_acc: 0.4608
Epoch 74/100
40000/40000 [==============================] - 3s - loss: 1.4695 - acc: 0.4738 - val_loss: 1.5267 - val_acc: 0.4599
Epoch 75/100
40000/40000 [==============================] - 3s - loss: 1.4730 - acc: 0.4721 - val_loss: 1.5226 - val_acc: 0.4553
Epoch 76/100
40000/40000 [==============================] - 3s - loss: 1.4681 - acc: 0.4703 - val_loss: 1.5339 - val_acc: 0.4581
Epoch 77/100
40000/40000 [==============================] - 3s - loss: 1.4643 - acc: 0.4750 - val_loss: 1.5122 - val_acc: 0.4685
Epoch 78/100
40000/40000 [==============================] - 3s - loss: 1.4623 - acc: 0.4759 - val_loss: 1.5301 - val_acc: 0.4612
Epoch 79/100
40000/40000 [==============================] - 3s - loss: 1.4646 - acc: 0.4733 - val_loss: 1.5213 - val_acc: 0.4618
Epoch 80/100
40000/40000 [==============================] - 3s - loss: 1.4672 - acc: 0.4735 - val_loss: 1.5098 - val_acc: 0.4710
Epoch 81/100
40000/40000 [==============================] - 3s - loss: 1.4589 - acc: 0.4765 - val_loss: 1.5180 - val_acc: 0.4624
Epoch 82/100
40000/40000 [==============================] - 3s - loss: 1.4567 - acc: 0.4760 - val_loss: 1.5236 - val_acc: 0.4563
Epoch 83/100
40000/40000 [==============================] - 4s - loss: 1.4595 - acc: 0.4737 - val_loss: 1.5102 - val_acc: 0.4657
Epoch 84/100
40000/40000 [==============================] - 3s - loss: 1.4599 - acc: 0.4732 - val_loss: 1.5313 - val_acc: 0.4655
Epoch 85/100
40000/40000 [==============================] - 3s - loss: 1.4559 - acc: 0.4784 - val_loss: 1.5386 - val_acc: 0.4545
Epoch 86/100
40000/40000 [==============================] - 3s - loss: 1.4601 - acc: 0.4748 - val_loss: 1.5154 - val_acc: 0.4675
Epoch 87/100
40000/40000 [==============================] - 3s - loss: 1.4515 - acc: 0.4788 - val_loss: 1.5193 - val_acc: 0.4643
Epoch 88/100
40000/40000 [==============================] - 3s - loss: 1.4556 - acc: 0.4812 - val_loss: 1.5078 - val_acc: 0.4651
Epoch 89/100
40000/40000 [==============================] - 3s - loss: 1.4526 - acc: 0.4790 - val_loss: 1.5194 - val_acc: 0.4653
Epoch 90/100
40000/40000 [==============================] - 3s - loss: 1.4510 - acc: 0.4808 - val_loss: 1.5426 - val_acc: 0.4528
Epoch 91/100
40000/40000 [==============================] - 3s - loss: 1.4463 - acc: 0.4785 - val_loss: 1.5198 - val_acc: 0.4622
Epoch 92/100
40000/40000 [==============================] - 3s - loss: 1.4562 - acc: 0.4761 - val_loss: 1.5352 - val_acc: 0.4589
Epoch 93/100
40000/40000 [==============================] - 3s - loss: 1.4507 - acc: 0.4773 - val_loss: 1.5104 - val_acc: 0.4668
Epoch 94/100
40000/40000 [==============================] - 3s - loss: 1.4509 - acc: 0.4781 - val_loss: 1.5285 - val_acc: 0.4569
Epoch 95/100
40000/40000 [==============================] - 3s - loss: 1.4466 - acc: 0.4774 - val_loss: 1.5322 - val_acc: 0.4636
Epoch 96/100
40000/40000 [==============================] - 3s - loss: 1.4461 - acc: 0.4779 - val_loss: 1.5089 - val_acc: 0.4658
Epoch 97/100
40000/40000 [==============================] - 3s - loss: 1.4414 - acc: 0.4797 - val_loss: 1.5048 - val_acc: 0.4670
Epoch 98/100
40000/40000 [==============================] - 3s - loss: 1.4467 - acc: 0.4774 - val_loss: 1.5414 - val_acc: 0.4550
Epoch 99/100
40000/40000 [==============================] - 3s - loss: 1.4440 - acc: 0.4800 - val_loss: 1.5072 - val_acc: 0.4687
Epoch 100/100
40000/40000 [==============================] - 3s - loss: 1.4479 - acc: 0.4752 - val_loss: 1.5241 - val_acc: 0.4621
 9700/10000 [============================>.] - ETA: 0sTest Loss and Accuracy -> [1.5061633121967315, 0.46069998413324353]

In [ ]: