In [1]:
# 분류 DNN 모델 구현 ########################
from keras import layers, models
 
class DNN(models.Sequential):
    def __init__(self, Nin, Nh_l, Nout):
        super().__init__()
        
        self.add(layers.Dense(Nh_l[0], activation='relu', 
                 input_shape=(Nin,), name='Hidden-1'))
        self.add(layers.Dropout(0.01))
        
        self.add(layers.Dense(Nh_l[1], activation='relu', 
                 name='Hidden-2'))       
        self.add(layers.Dropout(0))
        
        self.add(layers.Dense(Nout, activation='softmax'))

        self.compile(loss='categorical_crossentropy', 
                         optimizer='adam', 
                         metrics=['accuracy'])

        
# 데이터 준비 ##############################
import numpy as np
from keras import datasets  # mnist
from keras.utils import np_utils  # to_categorical


def Data_func():
    (X_train, y_train), (X_test, y_test) = datasets.cifar10.load_data()

    Y_train = np_utils.to_categorical(y_train)
    Y_test = np_utils.to_categorical(y_test)

    L, W, H, C = X_train.shape
    X_train = X_train.reshape(-1, W * H * C)
    X_test = X_test.reshape(-1, W * H * C)

    X_train = X_train / 255.0
    X_test = X_test / 255.0
    
    return (X_train, Y_train), (X_test, Y_test)


# 학습 효과 분석 ##############################
from ann_mnist_cl import plot_loss, plot_acc
import matplotlib.pyplot as plt


# 분류 DNN 학습 및 테스팅 ####################
def main():
    Nh_l = [100, 50]
    number_of_class = 10
    Nout = number_of_class

    (X_train, Y_train), (X_test, Y_test) = Data_func()
    model = DNN(X_train.shape[1], Nh_l, Nout)
    history = model.fit(X_train, y_train, epochs=10, batch_size=100, validation_split=0.2)
    
    performace_test = model.evaluate(X_test, y_test, batch_size=100)
    print('Test Loss and Accuracy ->', performace_test)

    plot_acc(history)
    plt.show()
    plot_loss(history)
    plt.show()


Using TensorFlow backend.

In [2]:
Nin = 784
Nh_l = [50, 50]
number_of_class = 10
Nout = number_of_class

(X_train, Y_train), (X_test, Y_test) = Data_func()
model = DNN(X_train.shape[1], Nh_l, Nout)
history = model.fit(X_train, Y_train, epochs=100, batch_size=100, validation_split=0.2)

performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)

plot_acc(history)
plt.show()
plot_loss(history)
plt.show()


Train on 40000 samples, validate on 10000 samples
Epoch 1/100
40000/40000 [==============================] - 5s - loss: 1.9441 - acc: 0.2953 - val_loss: 1.8389 - val_acc: 0.3467
Epoch 2/100
40000/40000 [==============================] - 4s - loss: 1.7803 - acc: 0.3616 - val_loss: 1.7667 - val_acc: 0.3670
Epoch 3/100
40000/40000 [==============================] - 3s - loss: 1.7222 - acc: 0.3857 - val_loss: 1.7250 - val_acc: 0.3883
Epoch 4/100
40000/40000 [==============================] - 3s - loss: 1.6956 - acc: 0.3921 - val_loss: 1.7028 - val_acc: 0.3930
Epoch 5/100
40000/40000 [==============================] - 3s - loss: 1.6684 - acc: 0.4056 - val_loss: 1.6666 - val_acc: 0.4057
Epoch 6/100
40000/40000 [==============================] - 3s - loss: 1.6350 - acc: 0.4166 - val_loss: 1.6522 - val_acc: 0.4128
Epoch 7/100
40000/40000 [==============================] - 3s - loss: 1.6271 - acc: 0.4210 - val_loss: 1.6465 - val_acc: 0.4134
Epoch 8/100
40000/40000 [==============================] - 3s - loss: 1.6110 - acc: 0.4257 - val_loss: 1.6368 - val_acc: 0.4199
Epoch 9/100
40000/40000 [==============================] - 3s - loss: 1.5972 - acc: 0.4322 - val_loss: 1.6560 - val_acc: 0.4048
Epoch 10/100
40000/40000 [==============================] - 3s - loss: 1.5876 - acc: 0.4338 - val_loss: 1.6081 - val_acc: 0.4287
Epoch 11/100
40000/40000 [==============================] - 4s - loss: 1.5694 - acc: 0.4392 - val_loss: 1.6108 - val_acc: 0.4286
Epoch 12/100
40000/40000 [==============================] - 3s - loss: 1.5605 - acc: 0.4433 - val_loss: 1.6065 - val_acc: 0.4301
Epoch 13/100
40000/40000 [==============================] - 3s - loss: 1.5522 - acc: 0.4458 - val_loss: 1.5984 - val_acc: 0.4366
Epoch 14/100
40000/40000 [==============================] - 3s - loss: 1.5440 - acc: 0.4492 - val_loss: 1.6313 - val_acc: 0.4229
Epoch 15/100
40000/40000 [==============================] - 3s - loss: 1.5316 - acc: 0.4535 - val_loss: 1.5728 - val_acc: 0.4407
Epoch 16/100
40000/40000 [==============================] - 3s - loss: 1.5275 - acc: 0.4533 - val_loss: 1.5846 - val_acc: 0.4358
Epoch 17/100
40000/40000 [==============================] - 3s - loss: 1.5239 - acc: 0.4544 - val_loss: 1.5775 - val_acc: 0.4434
Epoch 18/100
40000/40000 [==============================] - 3s - loss: 1.5134 - acc: 0.4589 - val_loss: 1.6022 - val_acc: 0.4308
Epoch 19/100
40000/40000 [==============================] - 3s - loss: 1.5109 - acc: 0.4608 - val_loss: 1.6093 - val_acc: 0.4314
Epoch 20/100
40000/40000 [==============================] - 3s - loss: 1.5076 - acc: 0.4616 - val_loss: 1.6459 - val_acc: 0.4281
Epoch 21/100
40000/40000 [==============================] - 3s - loss: 1.5034 - acc: 0.4629 - val_loss: 1.5716 - val_acc: 0.4397
Epoch 22/100
40000/40000 [==============================] - 3s - loss: 1.4993 - acc: 0.4654 - val_loss: 1.5685 - val_acc: 0.4427
Epoch 23/100
40000/40000 [==============================] - 3s - loss: 1.4933 - acc: 0.4679 - val_loss: 1.5827 - val_acc: 0.4420
Epoch 24/100
40000/40000 [==============================] - 3s - loss: 1.4865 - acc: 0.4713 - val_loss: 1.5790 - val_acc: 0.4387
Epoch 25/100
40000/40000 [==============================] - 3s - loss: 1.4842 - acc: 0.4709 - val_loss: 1.5738 - val_acc: 0.4419
Epoch 26/100
40000/40000 [==============================] - 3s - loss: 1.4831 - acc: 0.4707 - val_loss: 1.5400 - val_acc: 0.4520
Epoch 27/100
40000/40000 [==============================] - 3s - loss: 1.4853 - acc: 0.4692 - val_loss: 1.5669 - val_acc: 0.4446
Epoch 28/100
40000/40000 [==============================] - 3s - loss: 1.4698 - acc: 0.4737 - val_loss: 1.5440 - val_acc: 0.4514
Epoch 29/100
40000/40000 [==============================] - 3s - loss: 1.4678 - acc: 0.4763 - val_loss: 1.5584 - val_acc: 0.4485
Epoch 30/100
40000/40000 [==============================] - 3s - loss: 1.4681 - acc: 0.4763 - val_loss: 1.5502 - val_acc: 0.4473
Epoch 31/100
40000/40000 [==============================] - 3s - loss: 1.4668 - acc: 0.4739 - val_loss: 1.5676 - val_acc: 0.4465
Epoch 32/100
40000/40000 [==============================] - 3s - loss: 1.4625 - acc: 0.4758 - val_loss: 1.5402 - val_acc: 0.4528
Epoch 33/100
40000/40000 [==============================] - 3s - loss: 1.4573 - acc: 0.4802 - val_loss: 1.5472 - val_acc: 0.4524
Epoch 34/100
40000/40000 [==============================] - 3s - loss: 1.4585 - acc: 0.4763 - val_loss: 1.5411 - val_acc: 0.4564
Epoch 35/100
40000/40000 [==============================] - 3s - loss: 1.4542 - acc: 0.4791 - val_loss: 1.5368 - val_acc: 0.4556
Epoch 36/100
40000/40000 [==============================] - 3s - loss: 1.4478 - acc: 0.4820 - val_loss: 1.5512 - val_acc: 0.4527
Epoch 37/100
40000/40000 [==============================] - 3s - loss: 1.4527 - acc: 0.4805 - val_loss: 1.5520 - val_acc: 0.4517
Epoch 38/100
40000/40000 [==============================] - 3s - loss: 1.4465 - acc: 0.4804 - val_loss: 1.5592 - val_acc: 0.4472
Epoch 39/100
40000/40000 [==============================] - 3s - loss: 1.4349 - acc: 0.4862 - val_loss: 1.5348 - val_acc: 0.4629
Epoch 40/100
40000/40000 [==============================] - 3s - loss: 1.4475 - acc: 0.4826 - val_loss: 1.5284 - val_acc: 0.4606
Epoch 41/100
40000/40000 [==============================] - 3s - loss: 1.4394 - acc: 0.4856 - val_loss: 1.5221 - val_acc: 0.4590
Epoch 42/100
40000/40000 [==============================] - 3s - loss: 1.4405 - acc: 0.4855 - val_loss: 1.5409 - val_acc: 0.4545
Epoch 43/100
40000/40000 [==============================] - 3s - loss: 1.4369 - acc: 0.4891 - val_loss: 1.5426 - val_acc: 0.4585
Epoch 44/100
40000/40000 [==============================] - 3s - loss: 1.4383 - acc: 0.4847 - val_loss: 1.5461 - val_acc: 0.4531
Epoch 45/100
40000/40000 [==============================] - 3s - loss: 1.4334 - acc: 0.4882 - val_loss: 1.5284 - val_acc: 0.4564
Epoch 46/100
40000/40000 [==============================] - 3s - loss: 1.4320 - acc: 0.4882 - val_loss: 1.5431 - val_acc: 0.4570
Epoch 47/100
40000/40000 [==============================] - 3s - loss: 1.4289 - acc: 0.4893 - val_loss: 1.5596 - val_acc: 0.4491
Epoch 48/100
40000/40000 [==============================] - 3s - loss: 1.4239 - acc: 0.4912 - val_loss: 1.5510 - val_acc: 0.4455
Epoch 49/100
40000/40000 [==============================] - 3s - loss: 1.4281 - acc: 0.4903 - val_loss: 1.5695 - val_acc: 0.4469
Epoch 50/100
40000/40000 [==============================] - 3s - loss: 1.4284 - acc: 0.4893 - val_loss: 1.5517 - val_acc: 0.4508
Epoch 51/100
40000/40000 [==============================] - 3s - loss: 1.4185 - acc: 0.4944 - val_loss: 1.5403 - val_acc: 0.4557
Epoch 52/100
40000/40000 [==============================] - 3s - loss: 1.4206 - acc: 0.4943 - val_loss: 1.5560 - val_acc: 0.4546
Epoch 53/100
40000/40000 [==============================] - 3s - loss: 1.4150 - acc: 0.4928 - val_loss: 1.5281 - val_acc: 0.4589
Epoch 54/100
40000/40000 [==============================] - 3s - loss: 1.4181 - acc: 0.4927 - val_loss: 1.5329 - val_acc: 0.4601
Epoch 55/100
40000/40000 [==============================] - 3s - loss: 1.4161 - acc: 0.4927 - val_loss: 1.5327 - val_acc: 0.4578
Epoch 56/100
40000/40000 [==============================] - 3s - loss: 1.4076 - acc: 0.4965 - val_loss: 1.5522 - val_acc: 0.4551
Epoch 57/100
40000/40000 [==============================] - 3s - loss: 1.4148 - acc: 0.4949 - val_loss: 1.5575 - val_acc: 0.4534
Epoch 58/100
40000/40000 [==============================] - 3s - loss: 1.4130 - acc: 0.4940 - val_loss: 1.5293 - val_acc: 0.4561
Epoch 59/100
40000/40000 [==============================] - 3s - loss: 1.4077 - acc: 0.4959 - val_loss: 1.5345 - val_acc: 0.4593
Epoch 60/100
40000/40000 [==============================] - 3s - loss: 1.4076 - acc: 0.4965 - val_loss: 1.5489 - val_acc: 0.4569
Epoch 61/100
40000/40000 [==============================] - 3s - loss: 1.4039 - acc: 0.5002 - val_loss: 1.5278 - val_acc: 0.4597
Epoch 62/100
40000/40000 [==============================] - 3s - loss: 1.4074 - acc: 0.4969 - val_loss: 1.5356 - val_acc: 0.4565
Epoch 63/100
40000/40000 [==============================] - 3s - loss: 1.3960 - acc: 0.4993 - val_loss: 1.5290 - val_acc: 0.4588
Epoch 64/100
40000/40000 [==============================] - 3s - loss: 1.4000 - acc: 0.4984 - val_loss: 1.5269 - val_acc: 0.4642
Epoch 65/100
40000/40000 [==============================] - 3s - loss: 1.3963 - acc: 0.5011 - val_loss: 1.5566 - val_acc: 0.4582
Epoch 66/100
40000/40000 [==============================] - 3s - loss: 1.3998 - acc: 0.4988 - val_loss: 1.5147 - val_acc: 0.4719
Epoch 67/100
40000/40000 [==============================] - 3s - loss: 1.3959 - acc: 0.5033 - val_loss: 1.5285 - val_acc: 0.4582
Epoch 68/100
40000/40000 [==============================] - 3s - loss: 1.3945 - acc: 0.5002 - val_loss: 1.5345 - val_acc: 0.4625
Epoch 69/100
40000/40000 [==============================] - 3s - loss: 1.3946 - acc: 0.5013 - val_loss: 1.5631 - val_acc: 0.4535
Epoch 70/100
40000/40000 [==============================] - 3s - loss: 1.4008 - acc: 0.4992 - val_loss: 1.5417 - val_acc: 0.4605
Epoch 71/100
40000/40000 [==============================] - 3s - loss: 1.3914 - acc: 0.5028 - val_loss: 1.5358 - val_acc: 0.4617
Epoch 72/100
40000/40000 [==============================] - 3s - loss: 1.3917 - acc: 0.5035 - val_loss: 1.5335 - val_acc: 0.4609
Epoch 73/100
40000/40000 [==============================] - 3s - loss: 1.3899 - acc: 0.5044 - val_loss: 1.5075 - val_acc: 0.4689
Epoch 74/100
40000/40000 [==============================] - 3s - loss: 1.3916 - acc: 0.5021 - val_loss: 1.5593 - val_acc: 0.4505
Epoch 75/100
40000/40000 [==============================] - 3s - loss: 1.3824 - acc: 0.5082 - val_loss: 1.5475 - val_acc: 0.4551
Epoch 76/100
40000/40000 [==============================] - 3s - loss: 1.3886 - acc: 0.5023 - val_loss: 1.5532 - val_acc: 0.4608
Epoch 77/100
40000/40000 [==============================] - 3s - loss: 1.3932 - acc: 0.5036 - val_loss: 1.5210 - val_acc: 0.4608
Epoch 78/100
40000/40000 [==============================] - 3s - loss: 1.3870 - acc: 0.5025 - val_loss: 1.5541 - val_acc: 0.4537
Epoch 79/100
40000/40000 [==============================] - 3s - loss: 1.3820 - acc: 0.5076 - val_loss: 1.5422 - val_acc: 0.4571
Epoch 80/100
40000/40000 [==============================] - 3s - loss: 1.3785 - acc: 0.5077 - val_loss: 1.5467 - val_acc: 0.4581
Epoch 81/100
40000/40000 [==============================] - 3s - loss: 1.3779 - acc: 0.5082 - val_loss: 1.5165 - val_acc: 0.4663
Epoch 82/100
40000/40000 [==============================] - 3s - loss: 1.3769 - acc: 0.5095 - val_loss: 1.5238 - val_acc: 0.4591
Epoch 83/100
40000/40000 [==============================] - 3s - loss: 1.3772 - acc: 0.5052 - val_loss: 1.5357 - val_acc: 0.4609
Epoch 84/100
40000/40000 [==============================] - 3s - loss: 1.3788 - acc: 0.5083 - val_loss: 1.5339 - val_acc: 0.4608
Epoch 85/100
40000/40000 [==============================] - 3s - loss: 1.3756 - acc: 0.5091 - val_loss: 1.5432 - val_acc: 0.4572
Epoch 86/100
40000/40000 [==============================] - 3s - loss: 1.3723 - acc: 0.5096 - val_loss: 1.5401 - val_acc: 0.4623
Epoch 87/100
40000/40000 [==============================] - 3s - loss: 1.3743 - acc: 0.5101 - val_loss: 1.5525 - val_acc: 0.4580
Epoch 88/100
40000/40000 [==============================] - 3s - loss: 1.3751 - acc: 0.5089 - val_loss: 1.5297 - val_acc: 0.4613
Epoch 89/100
40000/40000 [==============================] - 3s - loss: 1.3718 - acc: 0.5109 - val_loss: 1.5291 - val_acc: 0.4603
Epoch 90/100
40000/40000 [==============================] - 3s - loss: 1.3722 - acc: 0.5106 - val_loss: 1.5361 - val_acc: 0.4656
Epoch 91/100
40000/40000 [==============================] - 3s - loss: 1.3698 - acc: 0.5112 - val_loss: 1.5292 - val_acc: 0.4636
Epoch 92/100
40000/40000 [==============================] - 3s - loss: 1.3666 - acc: 0.5112 - val_loss: 1.5618 - val_acc: 0.4562
Epoch 93/100
40000/40000 [==============================] - 3s - loss: 1.3685 - acc: 0.5102 - val_loss: 1.5252 - val_acc: 0.4660
Epoch 94/100
40000/40000 [==============================] - 3s - loss: 1.3751 - acc: 0.5093 - val_loss: 1.5245 - val_acc: 0.4611
Epoch 95/100
40000/40000 [==============================] - 3s - loss: 1.3641 - acc: 0.5116 - val_loss: 1.5523 - val_acc: 0.4536
Epoch 96/100
40000/40000 [==============================] - 3s - loss: 1.3639 - acc: 0.5125 - val_loss: 1.5166 - val_acc: 0.4722
Epoch 97/100
40000/40000 [==============================] - 3s - loss: 1.3639 - acc: 0.5121 - val_loss: 1.5368 - val_acc: 0.4594
Epoch 98/100
40000/40000 [==============================] - 3s - loss: 1.3638 - acc: 0.5121 - val_loss: 1.5291 - val_acc: 0.4611
Epoch 99/100
40000/40000 [==============================] - 3s - loss: 1.3603 - acc: 0.5138 - val_loss: 1.5245 - val_acc: 0.4617
Epoch 100/100
40000/40000 [==============================] - 3s - loss: 1.3672 - acc: 0.5109 - val_loss: 1.5286 - val_acc: 0.4600
 9600/10000 [===========================>..] - ETA: 0sTest Loss and Accuracy -> [1.5139019942283631, 0.46919998466968538]

In [ ]: