In [1]:
# 분류 DNN 모델 구현 ########################
from keras import layers, models
 
class DNN(models.Sequential):
    def __init__(self, Nin, Nh_l, Nout):
        super().__init__()
        
        self.add(layers.Dense(Nh_l[0], activation='relu', 
                 input_shape=(Nin,), name='Hidden-1'))
        self.add(layers.Dropout(0.05))
        
        self.add(layers.Dense(Nh_l[1], activation='relu', 
                 name='Hidden-2'))       
        self.add(layers.Dropout(0.1))
        
        self.add(layers.Dense(Nout, activation='softmax'))

        self.compile(loss='categorical_crossentropy', 
                         optimizer='adam', 
                         metrics=['accuracy'])

        
# 데이터 준비 ##############################
import numpy as np
from keras import datasets  # mnist
from keras.utils import np_utils  # to_categorical


def Data_func():
    (X_train, y_train), (X_test, y_test) = datasets.cifar10.load_data()

    Y_train = np_utils.to_categorical(y_train)
    Y_test = np_utils.to_categorical(y_test)

    L, W, H, C = X_train.shape
    X_train = X_train.reshape(-1, W * H * C)
    X_test = X_test.reshape(-1, W * H * C)

    X_train = X_train / 255.0
    X_test = X_test / 255.0
    
    return (X_train, Y_train), (X_test, Y_test)


# 학습 효과 분석 ##############################
from ann_mnist_cl import plot_loss, plot_acc
import matplotlib.pyplot as plt


# 분류 DNN 학습 및 테스팅 ####################
def main():
    Nh_l = [100, 50]
    number_of_class = 10
    Nout = number_of_class

    (X_train, Y_train), (X_test, Y_test) = Data_func()
    model = DNN(X_train.shape[1], Nh_l, Nout)
    history = model.fit(X_train, y_train, epochs=10, batch_size=100, validation_split=0.2)
    
    performace_test = model.evaluate(X_test, y_test, batch_size=100)
    print('Test Loss and Accuracy ->', performace_test)

    plot_acc(history)
    plt.show()
    plot_loss(history)
    plt.show()


Using TensorFlow backend.

In [2]:
Nin = 784
Nh_l = [50, 50]
number_of_class = 10
Nout = number_of_class

(X_train, Y_train), (X_test, Y_test) = Data_func()
model = DNN(X_train.shape[1], Nh_l, Nout)
history = model.fit(X_train, Y_train, epochs=100, batch_size=100, validation_split=0.2)

performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)

plot_acc(history)
plt.show()
plot_loss(history)
plt.show()


Train on 40000 samples, validate on 10000 samples
Epoch 1/100
40000/40000 [==============================] - 5s - loss: 1.9953 - acc: 0.2689 - val_loss: 1.8309 - val_acc: 0.3426
Epoch 2/100
40000/40000 [==============================] - 3s - loss: 1.8394 - acc: 0.3330 - val_loss: 1.7579 - val_acc: 0.3739
Epoch 3/100
40000/40000 [==============================] - 3s - loss: 1.7818 - acc: 0.3605 - val_loss: 1.7438 - val_acc: 0.3828
Epoch 4/100
40000/40000 [==============================] - 3s - loss: 1.7448 - acc: 0.3756 - val_loss: 1.7211 - val_acc: 0.3824
Epoch 5/100
40000/40000 [==============================] - 3s - loss: 1.7135 - acc: 0.3850 - val_loss: 1.6867 - val_acc: 0.4001
Epoch 6/100
40000/40000 [==============================] - 3s - loss: 1.6931 - acc: 0.3958 - val_loss: 1.6698 - val_acc: 0.4076
Epoch 7/100
40000/40000 [==============================] - 3s - loss: 1.6818 - acc: 0.3961 - val_loss: 1.6346 - val_acc: 0.4142
Epoch 8/100
40000/40000 [==============================] - 3s - loss: 1.6634 - acc: 0.4035 - val_loss: 1.6416 - val_acc: 0.4157
Epoch 9/100
40000/40000 [==============================] - 3s - loss: 1.6442 - acc: 0.4089 - val_loss: 1.6159 - val_acc: 0.4261
Epoch 10/100
40000/40000 [==============================] - 3s - loss: 1.6398 - acc: 0.4122 - val_loss: 1.6439 - val_acc: 0.4117
Epoch 11/100
40000/40000 [==============================] - 3s - loss: 1.6253 - acc: 0.4182 - val_loss: 1.6086 - val_acc: 0.4284
Epoch 12/100
40000/40000 [==============================] - 3s - loss: 1.6128 - acc: 0.4228 - val_loss: 1.6114 - val_acc: 0.4289
Epoch 13/100
40000/40000 [==============================] - 3s - loss: 1.6022 - acc: 0.4267 - val_loss: 1.5891 - val_acc: 0.4368
Epoch 14/100
40000/40000 [==============================] - 3s - loss: 1.6009 - acc: 0.4256 - val_loss: 1.5957 - val_acc: 0.4309
Epoch 15/100
40000/40000 [==============================] - 3s - loss: 1.5915 - acc: 0.4285 - val_loss: 1.5865 - val_acc: 0.4402
Epoch 16/100
40000/40000 [==============================] - 3s - loss: 1.5923 - acc: 0.4283 - val_loss: 1.5913 - val_acc: 0.4408
Epoch 17/100
40000/40000 [==============================] - 3s - loss: 1.5889 - acc: 0.4262 - val_loss: 1.5948 - val_acc: 0.4386
Epoch 18/100
40000/40000 [==============================] - 3s - loss: 1.5804 - acc: 0.4329 - val_loss: 1.5705 - val_acc: 0.4386
Epoch 19/100
40000/40000 [==============================] - 3s - loss: 1.5739 - acc: 0.4370 - val_loss: 1.5916 - val_acc: 0.4340
Epoch 20/100
40000/40000 [==============================] - 3s - loss: 1.5750 - acc: 0.4371 - val_loss: 1.5834 - val_acc: 0.4376
Epoch 21/100
40000/40000 [==============================] - 3s - loss: 1.5644 - acc: 0.4374 - val_loss: 1.6091 - val_acc: 0.4328
Epoch 22/100
40000/40000 [==============================] - 3s - loss: 1.5625 - acc: 0.4411 - val_loss: 1.5614 - val_acc: 0.4494
Epoch 23/100
40000/40000 [==============================] - 4s - loss: 1.5583 - acc: 0.4410 - val_loss: 1.5763 - val_acc: 0.4393
Epoch 24/100
40000/40000 [==============================] - 3s - loss: 1.5553 - acc: 0.4422 - val_loss: 1.5807 - val_acc: 0.4408
Epoch 25/100
40000/40000 [==============================] - 3s - loss: 1.5502 - acc: 0.4470 - val_loss: 1.5697 - val_acc: 0.4444
Epoch 26/100
40000/40000 [==============================] - 3s - loss: 1.5504 - acc: 0.4461 - val_loss: 1.5540 - val_acc: 0.4541
Epoch 27/100
40000/40000 [==============================] - 3s - loss: 1.5474 - acc: 0.4451 - val_loss: 1.5640 - val_acc: 0.4472
Epoch 28/100
40000/40000 [==============================] - 3s - loss: 1.5416 - acc: 0.4497 - val_loss: 1.5563 - val_acc: 0.4513
Epoch 29/100
40000/40000 [==============================] - 3s - loss: 1.5450 - acc: 0.4472 - val_loss: 1.5541 - val_acc: 0.4498
Epoch 30/100
40000/40000 [==============================] - 3s - loss: 1.5411 - acc: 0.4438 - val_loss: 1.5457 - val_acc: 0.4498
Epoch 31/100
40000/40000 [==============================] - 3s - loss: 1.5334 - acc: 0.4510 - val_loss: 1.5540 - val_acc: 0.4535
Epoch 32/100
40000/40000 [==============================] - 3s - loss: 1.5322 - acc: 0.4514 - val_loss: 1.5641 - val_acc: 0.4487
Epoch 33/100
40000/40000 [==============================] - 3s - loss: 1.5268 - acc: 0.4518 - val_loss: 1.5528 - val_acc: 0.4487
Epoch 34/100
40000/40000 [==============================] - 3s - loss: 1.5275 - acc: 0.4510 - val_loss: 1.5499 - val_acc: 0.4455
Epoch 35/100
40000/40000 [==============================] - 3s - loss: 1.5367 - acc: 0.4500 - val_loss: 1.5612 - val_acc: 0.4492
Epoch 36/100
40000/40000 [==============================] - 3s - loss: 1.5323 - acc: 0.4507 - val_loss: 1.5714 - val_acc: 0.4322
Epoch 37/100
40000/40000 [==============================] - 3s - loss: 1.5199 - acc: 0.4531 - val_loss: 1.5571 - val_acc: 0.4510
Epoch 38/100
40000/40000 [==============================] - 3s - loss: 1.5148 - acc: 0.4548 - val_loss: 1.5416 - val_acc: 0.4533
Epoch 39/100
40000/40000 [==============================] - 3s - loss: 1.5163 - acc: 0.4546 - val_loss: 1.5374 - val_acc: 0.4565
Epoch 40/100
40000/40000 [==============================] - 3s - loss: 1.5218 - acc: 0.4538 - val_loss: 1.5460 - val_acc: 0.4507
Epoch 41/100
40000/40000 [==============================] - 3s - loss: 1.5178 - acc: 0.4553 - val_loss: 1.5605 - val_acc: 0.4504
Epoch 42/100
40000/40000 [==============================] - 3s - loss: 1.5103 - acc: 0.4583 - val_loss: 1.5424 - val_acc: 0.4580
Epoch 43/100
40000/40000 [==============================] - 3s - loss: 1.5050 - acc: 0.4609 - val_loss: 1.5621 - val_acc: 0.4528
Epoch 44/100
40000/40000 [==============================] - 3s - loss: 1.5024 - acc: 0.4570 - val_loss: 1.5499 - val_acc: 0.4528
Epoch 45/100
40000/40000 [==============================] - 3s - loss: 1.5071 - acc: 0.4587 - val_loss: 1.5494 - val_acc: 0.4510
Epoch 46/100
40000/40000 [==============================] - 3s - loss: 1.5015 - acc: 0.4617 - val_loss: 1.5408 - val_acc: 0.4509
Epoch 47/100
40000/40000 [==============================] - 3s - loss: 1.5070 - acc: 0.4582 - val_loss: 1.5548 - val_acc: 0.4499
Epoch 48/100
40000/40000 [==============================] - 3s - loss: 1.5044 - acc: 0.4612 - val_loss: 1.5372 - val_acc: 0.4563
Epoch 49/100
40000/40000 [==============================] - 3s - loss: 1.4981 - acc: 0.4627 - val_loss: 1.5372 - val_acc: 0.4522
Epoch 50/100
40000/40000 [==============================] - 3s - loss: 1.4961 - acc: 0.4637 - val_loss: 1.5698 - val_acc: 0.4405
Epoch 51/100
40000/40000 [==============================] - 3s - loss: 1.4970 - acc: 0.4606 - val_loss: 1.5605 - val_acc: 0.4470
Epoch 52/100
40000/40000 [==============================] - 3s - loss: 1.4969 - acc: 0.4627 - val_loss: 1.5500 - val_acc: 0.4526
Epoch 53/100
40000/40000 [==============================] - 3s - loss: 1.4919 - acc: 0.4623 - val_loss: 1.5418 - val_acc: 0.4487
Epoch 54/100
40000/40000 [==============================] - 3s - loss: 1.4903 - acc: 0.4663 - val_loss: 1.5386 - val_acc: 0.4579
Epoch 55/100
40000/40000 [==============================] - 3s - loss: 1.4925 - acc: 0.4639 - val_loss: 1.5341 - val_acc: 0.4569
Epoch 56/100
40000/40000 [==============================] - 3s - loss: 1.4927 - acc: 0.4665 - val_loss: 1.5481 - val_acc: 0.4500
Epoch 57/100
40000/40000 [==============================] - 3s - loss: 1.4872 - acc: 0.4646 - val_loss: 1.5364 - val_acc: 0.4564
Epoch 58/100
40000/40000 [==============================] - 3s - loss: 1.4850 - acc: 0.4672 - val_loss: 1.5554 - val_acc: 0.4496
Epoch 59/100
40000/40000 [==============================] - 3s - loss: 1.4821 - acc: 0.4701 - val_loss: 1.5304 - val_acc: 0.4580
Epoch 60/100
40000/40000 [==============================] - 3s - loss: 1.4836 - acc: 0.4680 - val_loss: 1.5438 - val_acc: 0.4486
Epoch 61/100
40000/40000 [==============================] - 3s - loss: 1.4839 - acc: 0.4675 - val_loss: 1.5254 - val_acc: 0.4590
Epoch 62/100
40000/40000 [==============================] - 3s - loss: 1.4797 - acc: 0.4672 - val_loss: 1.5588 - val_acc: 0.4535
Epoch 63/100
40000/40000 [==============================] - 3s - loss: 1.4853 - acc: 0.4682 - val_loss: 1.5523 - val_acc: 0.4494
Epoch 64/100
40000/40000 [==============================] - 3s - loss: 1.4779 - acc: 0.4704 - val_loss: 1.5488 - val_acc: 0.4528
Epoch 65/100
40000/40000 [==============================] - 3s - loss: 1.4723 - acc: 0.4725 - val_loss: 1.5482 - val_acc: 0.4544
Epoch 66/100
40000/40000 [==============================] - 3s - loss: 1.4740 - acc: 0.4709 - val_loss: 1.5548 - val_acc: 0.4509
Epoch 67/100
40000/40000 [==============================] - 3s - loss: 1.4773 - acc: 0.4707 - val_loss: 1.5431 - val_acc: 0.4523
Epoch 68/100
40000/40000 [==============================] - 3s - loss: 1.4660 - acc: 0.4761 - val_loss: 1.5538 - val_acc: 0.4431
Epoch 69/100
40000/40000 [==============================] - 4s - loss: 1.4750 - acc: 0.4701 - val_loss: 1.5272 - val_acc: 0.4621
Epoch 70/100
40000/40000 [==============================] - 3s - loss: 1.4736 - acc: 0.4703 - val_loss: 1.5370 - val_acc: 0.4587
Epoch 71/100
40000/40000 [==============================] - 3s - loss: 1.4729 - acc: 0.4734 - val_loss: 1.5400 - val_acc: 0.4528
Epoch 72/100
40000/40000 [==============================] - 3s - loss: 1.4724 - acc: 0.4696 - val_loss: 1.5363 - val_acc: 0.4503
Epoch 73/100
40000/40000 [==============================] - 3s - loss: 1.4625 - acc: 0.4766 - val_loss: 1.5494 - val_acc: 0.4485
Epoch 74/100
40000/40000 [==============================] - 3s - loss: 1.4682 - acc: 0.4727 - val_loss: 1.5488 - val_acc: 0.4493
Epoch 75/100
40000/40000 [==============================] - 3s - loss: 1.4699 - acc: 0.4725 - val_loss: 1.5526 - val_acc: 0.4500
Epoch 76/100
40000/40000 [==============================] - 3s - loss: 1.4664 - acc: 0.4726 - val_loss: 1.5350 - val_acc: 0.4543
Epoch 77/100
40000/40000 [==============================] - 3s - loss: 1.4615 - acc: 0.4742 - val_loss: 1.5493 - val_acc: 0.4565
Epoch 78/100
40000/40000 [==============================] - 3s - loss: 1.4655 - acc: 0.4746 - val_loss: 1.5697 - val_acc: 0.4437
Epoch 79/100
40000/40000 [==============================] - 3s - loss: 1.4629 - acc: 0.4727 - val_loss: 1.5460 - val_acc: 0.4525
Epoch 80/100
40000/40000 [==============================] - 3s - loss: 1.4605 - acc: 0.4745 - val_loss: 1.5610 - val_acc: 0.4476
Epoch 81/100
40000/40000 [==============================] - 3s - loss: 1.4615 - acc: 0.4749 - val_loss: 1.5529 - val_acc: 0.4554
Epoch 82/100
40000/40000 [==============================] - 3s - loss: 1.4591 - acc: 0.4757 - val_loss: 1.5527 - val_acc: 0.4514
Epoch 83/100
40000/40000 [==============================] - 3s - loss: 1.4584 - acc: 0.4769 - val_loss: 1.5388 - val_acc: 0.4556
Epoch 84/100
40000/40000 [==============================] - 3s - loss: 1.4561 - acc: 0.4755 - val_loss: 1.5645 - val_acc: 0.4458
Epoch 85/100
40000/40000 [==============================] - 3s - loss: 1.4581 - acc: 0.4790 - val_loss: 1.5465 - val_acc: 0.4516
Epoch 86/100
40000/40000 [==============================] - 3s - loss: 1.4503 - acc: 0.4804 - val_loss: 1.5439 - val_acc: 0.4527
Epoch 87/100
40000/40000 [==============================] - 3s - loss: 1.4513 - acc: 0.4806 - val_loss: 1.5223 - val_acc: 0.4627
Epoch 88/100
40000/40000 [==============================] - 4s - loss: 1.4480 - acc: 0.4791 - val_loss: 1.5433 - val_acc: 0.4562
Epoch 89/100
40000/40000 [==============================] - 3s - loss: 1.4528 - acc: 0.4781 - val_loss: 1.5593 - val_acc: 0.4512
Epoch 90/100
40000/40000 [==============================] - 3s - loss: 1.4576 - acc: 0.4779 - val_loss: 1.5364 - val_acc: 0.4529
Epoch 91/100
40000/40000 [==============================] - 3s - loss: 1.4481 - acc: 0.4795 - val_loss: 1.5505 - val_acc: 0.4581
Epoch 92/100
40000/40000 [==============================] - 3s - loss: 1.4445 - acc: 0.4841 - val_loss: 1.5329 - val_acc: 0.4569
Epoch 93/100
40000/40000 [==============================] - 3s - loss: 1.4471 - acc: 0.4791 - val_loss: 1.5287 - val_acc: 0.4619
Epoch 94/100
40000/40000 [==============================] - 3s - loss: 1.4483 - acc: 0.4802 - val_loss: 1.5391 - val_acc: 0.4561
Epoch 95/100
40000/40000 [==============================] - 3s - loss: 1.4462 - acc: 0.4820 - val_loss: 1.5482 - val_acc: 0.4457
Epoch 96/100
40000/40000 [==============================] - 3s - loss: 1.4512 - acc: 0.4794 - val_loss: 1.5537 - val_acc: 0.4461
Epoch 97/100
40000/40000 [==============================] - 3s - loss: 1.4456 - acc: 0.4814 - val_loss: 1.5322 - val_acc: 0.4587
Epoch 98/100
40000/40000 [==============================] - 3s - loss: 1.4447 - acc: 0.4835 - val_loss: 1.5496 - val_acc: 0.4484
Epoch 99/100
40000/40000 [==============================] - 3s - loss: 1.4413 - acc: 0.4829 - val_loss: 1.5455 - val_acc: 0.4543
Epoch 100/100
40000/40000 [==============================] - 3s - loss: 1.4411 - acc: 0.4838 - val_loss: 1.5557 - val_acc: 0.4483
 9800/10000 [============================>.] - ETA: 0sTest Loss and Accuracy -> [1.526335608959198, 0.45069998711347581]

In [ ]: