In [1]:
# 분류 DNN 모델 구현 ########################
from keras import layers, models
 
class DNN(models.Sequential):
    def __init__(self, Nin, Nh_l, Nout):
        super().__init__()
        
        self.add(layers.Dense(Nh_l[0], activation='relu', 
                 input_shape=(Nin,), name='Hidden-1'))
        self.add(layers.Dropout(0.05))
        
        self.add(layers.Dense(Nh_l[1], activation='relu', 
                 name='Hidden-2'))       
        self.add(layers.Dropout(0.05))
        
        self.add(layers.Dense(Nout, activation='softmax'))

        self.compile(loss='categorical_crossentropy', 
                         optimizer='adam', 
                         metrics=['accuracy'])

        
# 데이터 준비 ##############################
import numpy as np
from keras import datasets  # mnist
from keras.utils import np_utils  # to_categorical


def Data_func():
    (X_train, y_train), (X_test, y_test) = datasets.cifar10.load_data()

    Y_train = np_utils.to_categorical(y_train)
    Y_test = np_utils.to_categorical(y_test)

    L, W, H, C = X_train.shape
    X_train = X_train.reshape(-1, W * H * C)
    X_test = X_test.reshape(-1, W * H * C)

    X_train = X_train / 255.0
    X_test = X_test / 255.0
    
    return (X_train, Y_train), (X_test, Y_test)


# 학습 효과 분석 ##############################
from ann_mnist_cl import plot_loss, plot_acc
import matplotlib.pyplot as plt


# 분류 DNN 학습 및 테스팅 ####################
def main():
    Nh_l = [100, 50]
    number_of_class = 10
    Nout = number_of_class

    (X_train, Y_train), (X_test, Y_test) = Data_func()
    model = DNN(X_train.shape[1], Nh_l, Nout)
    history = model.fit(X_train, y_train, epochs=10, batch_size=100, validation_split=0.2)
    
    performace_test = model.evaluate(X_test, y_test, batch_size=100)
    print('Test Loss and Accuracy ->', performace_test)

    plot_acc(history)
    plt.show()
    plot_loss(history)
    plt.show()


Using TensorFlow backend.

In [2]:
Nin = 784
Nh_l = [50, 50]
number_of_class = 10
Nout = number_of_class

(X_train, Y_train), (X_test, Y_test) = Data_func()
model = DNN(X_train.shape[1], Nh_l, Nout)
history = model.fit(X_train, Y_train, epochs=100, batch_size=100, validation_split=0.2)

performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)

plot_acc(history)
plt.show()
plot_loss(history)
plt.show()


Train on 40000 samples, validate on 10000 samples
Epoch 1/100
40000/40000 [==============================] - 5s - loss: 2.0157 - acc: 0.2567 - val_loss: 1.9055 - val_acc: 0.2951
Epoch 2/100
40000/40000 [==============================] - 3s - loss: 1.8444 - acc: 0.3306 - val_loss: 1.7971 - val_acc: 0.3543
Epoch 3/100
40000/40000 [==============================] - 3s - loss: 1.8072 - acc: 0.3456 - val_loss: 1.7475 - val_acc: 0.3748
Epoch 4/100
40000/40000 [==============================] - 3s - loss: 1.7629 - acc: 0.3647 - val_loss: 1.7492 - val_acc: 0.3732
Epoch 5/100
40000/40000 [==============================] - 3s - loss: 1.7361 - acc: 0.3733 - val_loss: 1.7019 - val_acc: 0.3947
Epoch 6/100
40000/40000 [==============================] - 3s - loss: 1.7156 - acc: 0.3792 - val_loss: 1.6657 - val_acc: 0.4063
Epoch 7/100
40000/40000 [==============================] - 4s - loss: 1.7004 - acc: 0.3866 - val_loss: 1.6666 - val_acc: 0.4088
Epoch 8/100
40000/40000 [==============================] - 3s - loss: 1.6825 - acc: 0.3942 - val_loss: 1.6490 - val_acc: 0.4148
Epoch 9/100
40000/40000 [==============================] - 3s - loss: 1.6663 - acc: 0.3981 - val_loss: 1.6484 - val_acc: 0.4095
Epoch 10/100
40000/40000 [==============================] - 3s - loss: 1.6628 - acc: 0.3999 - val_loss: 1.6319 - val_acc: 0.4206
Epoch 11/100
40000/40000 [==============================] - 4s - loss: 1.6466 - acc: 0.4078 - val_loss: 1.6408 - val_acc: 0.4170
Epoch 12/100
40000/40000 [==============================] - 3s - loss: 1.6402 - acc: 0.4107 - val_loss: 1.6446 - val_acc: 0.4135
Epoch 13/100
40000/40000 [==============================] - 3s - loss: 1.6320 - acc: 0.4111 - val_loss: 1.6252 - val_acc: 0.4187
Epoch 14/100
40000/40000 [==============================] - 3s - loss: 1.6281 - acc: 0.4135 - val_loss: 1.6352 - val_acc: 0.4195
Epoch 15/100
40000/40000 [==============================] - 3s - loss: 1.6197 - acc: 0.4159 - val_loss: 1.6305 - val_acc: 0.4165
Epoch 16/100
40000/40000 [==============================] - 3s - loss: 1.6139 - acc: 0.4187 - val_loss: 1.6244 - val_acc: 0.4245
Epoch 17/100
40000/40000 [==============================] - 3s - loss: 1.6141 - acc: 0.4212 - val_loss: 1.5988 - val_acc: 0.4326
Epoch 18/100
40000/40000 [==============================] - 3s - loss: 1.6077 - acc: 0.4190 - val_loss: 1.6136 - val_acc: 0.4231
Epoch 19/100
40000/40000 [==============================] - 3s - loss: 1.6001 - acc: 0.4240 - val_loss: 1.5982 - val_acc: 0.4294
Epoch 20/100
40000/40000 [==============================] - 4s - loss: 1.5968 - acc: 0.4260 - val_loss: 1.6108 - val_acc: 0.4206
Epoch 21/100
40000/40000 [==============================] - 3s - loss: 1.5943 - acc: 0.4264 - val_loss: 1.6094 - val_acc: 0.4193
Epoch 22/100
40000/40000 [==============================] - 3s - loss: 1.5908 - acc: 0.4267 - val_loss: 1.6039 - val_acc: 0.4243
Epoch 23/100
40000/40000 [==============================] - 3s - loss: 1.5857 - acc: 0.4270 - val_loss: 1.6017 - val_acc: 0.4291
Epoch 24/100
40000/40000 [==============================] - 3s - loss: 1.5853 - acc: 0.4317 - val_loss: 1.6062 - val_acc: 0.4259
Epoch 25/100
40000/40000 [==============================] - 3s - loss: 1.5802 - acc: 0.4302 - val_loss: 1.6026 - val_acc: 0.4225
Epoch 26/100
40000/40000 [==============================] - 3s - loss: 1.5790 - acc: 0.4298 - val_loss: 1.5926 - val_acc: 0.4281
Epoch 27/100
40000/40000 [==============================] - 3s - loss: 1.5748 - acc: 0.4350 - val_loss: 1.5794 - val_acc: 0.4364
Epoch 28/100
40000/40000 [==============================] - 3s - loss: 1.5817 - acc: 0.4327 - val_loss: 1.5680 - val_acc: 0.4384
Epoch 29/100
40000/40000 [==============================] - 3s - loss: 1.5676 - acc: 0.4374 - val_loss: 1.5958 - val_acc: 0.4255
Epoch 30/100
40000/40000 [==============================] - 3s - loss: 1.5719 - acc: 0.4358 - val_loss: 1.5799 - val_acc: 0.4325
Epoch 31/100
40000/40000 [==============================] - 3s - loss: 1.5649 - acc: 0.4355 - val_loss: 1.6138 - val_acc: 0.4148
Epoch 32/100
40000/40000 [==============================] - 3s - loss: 1.5566 - acc: 0.4378 - val_loss: 1.5812 - val_acc: 0.4340
Epoch 33/100
40000/40000 [==============================] - 3s - loss: 1.5568 - acc: 0.4389 - val_loss: 1.5877 - val_acc: 0.4350
Epoch 34/100
40000/40000 [==============================] - 3s - loss: 1.5612 - acc: 0.4379 - val_loss: 1.5756 - val_acc: 0.4340
Epoch 35/100
40000/40000 [==============================] - 3s - loss: 1.5567 - acc: 0.4404 - val_loss: 1.5696 - val_acc: 0.4409
Epoch 36/100
40000/40000 [==============================] - 3s - loss: 1.5492 - acc: 0.4438 - val_loss: 1.5834 - val_acc: 0.4346
Epoch 37/100
40000/40000 [==============================] - 3s - loss: 1.5529 - acc: 0.4404 - val_loss: 1.5746 - val_acc: 0.4388
Epoch 38/100
40000/40000 [==============================] - 3s - loss: 1.5568 - acc: 0.4397 - val_loss: 1.5752 - val_acc: 0.4397
Epoch 39/100
40000/40000 [==============================] - 3s - loss: 1.5486 - acc: 0.4422 - val_loss: 1.5629 - val_acc: 0.4439
Epoch 40/100
40000/40000 [==============================] - 3s - loss: 1.5472 - acc: 0.4444 - val_loss: 1.5671 - val_acc: 0.4406
Epoch 41/100
40000/40000 [==============================] - 3s - loss: 1.5353 - acc: 0.4450 - val_loss: 1.5766 - val_acc: 0.4365
Epoch 42/100
40000/40000 [==============================] - 3s - loss: 1.5413 - acc: 0.4451 - val_loss: 1.5673 - val_acc: 0.4399
Epoch 43/100
40000/40000 [==============================] - 3s - loss: 1.5463 - acc: 0.4415 - val_loss: 1.5633 - val_acc: 0.4452
Epoch 44/100
40000/40000 [==============================] - 3s - loss: 1.5376 - acc: 0.4482 - val_loss: 1.5612 - val_acc: 0.4459
Epoch 45/100
40000/40000 [==============================] - 3s - loss: 1.5431 - acc: 0.4449 - val_loss: 1.5704 - val_acc: 0.4442
Epoch 46/100
40000/40000 [==============================] - 3s - loss: 1.5364 - acc: 0.4454 - val_loss: 1.5739 - val_acc: 0.4371
Epoch 47/100
40000/40000 [==============================] - 3s - loss: 1.5366 - acc: 0.4484 - val_loss: 1.5691 - val_acc: 0.4416
Epoch 48/100
40000/40000 [==============================] - 3s - loss: 1.5324 - acc: 0.4507 - val_loss: 1.5554 - val_acc: 0.4414
Epoch 49/100
40000/40000 [==============================] - 3s - loss: 1.5341 - acc: 0.4487 - val_loss: 1.5696 - val_acc: 0.4434
Epoch 50/100
40000/40000 [==============================] - 3s - loss: 1.5319 - acc: 0.4501 - val_loss: 1.5748 - val_acc: 0.4406
Epoch 51/100
40000/40000 [==============================] - 3s - loss: 1.5302 - acc: 0.4482 - val_loss: 1.5678 - val_acc: 0.4445
Epoch 52/100
40000/40000 [==============================] - 3s - loss: 1.5263 - acc: 0.4526 - val_loss: 1.5426 - val_acc: 0.4537
Epoch 53/100
40000/40000 [==============================] - 4s - loss: 1.5225 - acc: 0.4493 - val_loss: 1.5572 - val_acc: 0.4451
Epoch 54/100
40000/40000 [==============================] - 3s - loss: 1.5222 - acc: 0.4542 - val_loss: 1.5564 - val_acc: 0.4412
Epoch 55/100
40000/40000 [==============================] - 3s - loss: 1.5275 - acc: 0.4495 - val_loss: 1.5684 - val_acc: 0.4421
Epoch 56/100
40000/40000 [==============================] - 3s - loss: 1.5217 - acc: 0.4535 - val_loss: 1.5479 - val_acc: 0.4506
Epoch 57/100
40000/40000 [==============================] - 3s - loss: 1.5218 - acc: 0.4533 - val_loss: 1.5518 - val_acc: 0.4506
Epoch 58/100
40000/40000 [==============================] - 3s - loss: 1.5217 - acc: 0.4561 - val_loss: 1.5587 - val_acc: 0.4442
Epoch 59/100
40000/40000 [==============================] - 3s - loss: 1.5158 - acc: 0.4522 - val_loss: 1.5623 - val_acc: 0.4412
Epoch 60/100
40000/40000 [==============================] - 3s - loss: 1.5151 - acc: 0.4532 - val_loss: 1.5581 - val_acc: 0.4475
Epoch 61/100
40000/40000 [==============================] - 3s - loss: 1.5173 - acc: 0.4560 - val_loss: 1.5461 - val_acc: 0.4493
Epoch 62/100
40000/40000 [==============================] - 3s - loss: 1.5095 - acc: 0.4572 - val_loss: 1.5490 - val_acc: 0.4482
Epoch 63/100
40000/40000 [==============================] - 3s - loss: 1.5107 - acc: 0.4569 - val_loss: 1.5462 - val_acc: 0.4510
Epoch 64/100
40000/40000 [==============================] - 3s - loss: 1.5098 - acc: 0.4588 - val_loss: 1.5631 - val_acc: 0.4438
Epoch 65/100
40000/40000 [==============================] - 3s - loss: 1.5120 - acc: 0.4595 - val_loss: 1.5428 - val_acc: 0.4528
Epoch 66/100
40000/40000 [==============================] - 3s - loss: 1.5066 - acc: 0.4585 - val_loss: 1.5735 - val_acc: 0.4341
Epoch 67/100
40000/40000 [==============================] - 3s - loss: 1.5088 - acc: 0.4550 - val_loss: 1.5511 - val_acc: 0.4466
Epoch 68/100
40000/40000 [==============================] - 3s - loss: 1.5180 - acc: 0.4534 - val_loss: 1.5471 - val_acc: 0.4489
Epoch 69/100
40000/40000 [==============================] - 3s - loss: 1.5126 - acc: 0.4551 - val_loss: 1.5488 - val_acc: 0.4510
Epoch 70/100
40000/40000 [==============================] - 3s - loss: 1.5020 - acc: 0.4603 - val_loss: 1.5470 - val_acc: 0.4483
Epoch 71/100
40000/40000 [==============================] - 3s - loss: 1.5018 - acc: 0.4571 - val_loss: 1.5596 - val_acc: 0.4401
Epoch 72/100
40000/40000 [==============================] - 3s - loss: 1.5071 - acc: 0.4560 - val_loss: 1.5578 - val_acc: 0.4456
Epoch 73/100
40000/40000 [==============================] - 3s - loss: 1.5037 - acc: 0.4581 - val_loss: 1.5709 - val_acc: 0.4400
Epoch 74/100
40000/40000 [==============================] - 3s - loss: 1.5014 - acc: 0.4599 - val_loss: 1.5560 - val_acc: 0.4447
Epoch 75/100
40000/40000 [==============================] - 3s - loss: 1.5026 - acc: 0.4604 - val_loss: 1.5414 - val_acc: 0.4494
Epoch 76/100
40000/40000 [==============================] - 3s - loss: 1.5052 - acc: 0.4593 - val_loss: 1.5674 - val_acc: 0.4429
Epoch 77/100
40000/40000 [==============================] - 3s - loss: 1.4984 - acc: 0.4609 - val_loss: 1.5548 - val_acc: 0.4453
Epoch 78/100
40000/40000 [==============================] - 3s - loss: 1.4982 - acc: 0.4619 - val_loss: 1.5486 - val_acc: 0.4515
Epoch 79/100
40000/40000 [==============================] - 3s - loss: 1.4978 - acc: 0.4587 - val_loss: 1.5488 - val_acc: 0.4514
Epoch 80/100
40000/40000 [==============================] - 3s - loss: 1.4993 - acc: 0.4620 - val_loss: 1.5612 - val_acc: 0.4480
Epoch 81/100
40000/40000 [==============================] - 3s - loss: 1.5008 - acc: 0.4611 - val_loss: 1.5573 - val_acc: 0.4454
Epoch 82/100
40000/40000 [==============================] - 3s - loss: 1.4982 - acc: 0.4612 - val_loss: 1.5651 - val_acc: 0.4412
Epoch 83/100
40000/40000 [==============================] - 3s - loss: 1.4960 - acc: 0.4637 - val_loss: 1.5645 - val_acc: 0.4477
Epoch 84/100
40000/40000 [==============================] - 3s - loss: 1.4911 - acc: 0.4642 - val_loss: 1.5507 - val_acc: 0.4510
Epoch 85/100
40000/40000 [==============================] - 3s - loss: 1.4958 - acc: 0.4613 - val_loss: 1.5455 - val_acc: 0.4477
Epoch 86/100
40000/40000 [==============================] - 3s - loss: 1.4949 - acc: 0.4656 - val_loss: 1.5400 - val_acc: 0.4499
Epoch 87/100
40000/40000 [==============================] - 3s - loss: 1.4959 - acc: 0.4611 - val_loss: 1.5511 - val_acc: 0.4460
Epoch 88/100
40000/40000 [==============================] - 3s - loss: 1.4957 - acc: 0.4618 - val_loss: 1.5635 - val_acc: 0.4490
Epoch 89/100
40000/40000 [==============================] - 3s - loss: 1.4934 - acc: 0.4621 - val_loss: 1.5390 - val_acc: 0.4559
Epoch 90/100
40000/40000 [==============================] - 3s - loss: 1.4901 - acc: 0.4639 - val_loss: 1.5601 - val_acc: 0.4461
Epoch 91/100
40000/40000 [==============================] - 3s - loss: 1.4909 - acc: 0.4630 - val_loss: 1.5555 - val_acc: 0.4446
Epoch 92/100
40000/40000 [==============================] - 3s - loss: 1.4881 - acc: 0.4641 - val_loss: 1.5385 - val_acc: 0.4524
Epoch 93/100
40000/40000 [==============================] - 3s - loss: 1.4873 - acc: 0.4665 - val_loss: 1.5684 - val_acc: 0.4400
Epoch 94/100
40000/40000 [==============================] - 3s - loss: 1.4907 - acc: 0.4649 - val_loss: 1.5666 - val_acc: 0.4410
Epoch 95/100
40000/40000 [==============================] - 3s - loss: 1.4918 - acc: 0.4630 - val_loss: 1.5552 - val_acc: 0.4467
Epoch 96/100
40000/40000 [==============================] - 3s - loss: 1.4867 - acc: 0.4664 - val_loss: 1.5469 - val_acc: 0.4540
Epoch 97/100
40000/40000 [==============================] - 3s - loss: 1.4862 - acc: 0.4663 - val_loss: 1.5671 - val_acc: 0.4434
Epoch 98/100
40000/40000 [==============================] - 3s - loss: 1.4865 - acc: 0.4651 - val_loss: 1.5490 - val_acc: 0.4505
Epoch 99/100
40000/40000 [==============================] - 3s - loss: 1.4827 - acc: 0.4662 - val_loss: 1.5409 - val_acc: 0.4550
Epoch 100/100
40000/40000 [==============================] - 3s - loss: 1.4865 - acc: 0.4667 - val_loss: 1.5582 - val_acc: 0.4468
 9700/10000 [============================>.] - ETA: 0sTest Loss and Accuracy -> [1.549043711423874, 0.44729998648166658]

In [ ]: