In [1]:
# 분류 DNN 모델 구현 ########################
from keras import layers, models
class DNN(models.Sequential):
def __init__(self, Nin, Nh_l, Nout):
super().__init__()
self.add(layers.Dense(Nh_l[0], activation='relu',
input_shape=(Nin,), name='Hidden-1'))
self.add(layers.Dense(Nh_l[1], activation='relu',
name='Hidden-2'))
self.add(layers.Dense(Nout, activation='softmax'))
self.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# 데이터 준비 ##############################
from ann_mnist_cl import Data_func
# 학습 효과 분석 ##############################
from ann_mnist_cl import plot_loss, plot_acc
import matplotlib.pyplot as plt
# 분류 DNN 학습 및 테스팅 ####################
def main():
Nin = 784
Nh_l = [100, 50]
number_of_class = 10
Nout = number_of_class
(X_train, Y_train), (X_test, Y_test) = Data_func()
model = DNN(Nin, Nh_l, Nout)
history = model.fit(X_train, y_train, epochs=10, batch_size=100, validation_split=0.2)
performace_test = model.evaluate(X_test, y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)
plot_acc(history)
plt.show()
plot_loss(history)
plt.show()
In [3]:
Nin = 784
Nh_l = [100, 50]
number_of_class = 10
Nout = number_of_class
(X_train, Y_train), (X_test, Y_test) = Data_func()
model = DNN(Nin, Nh_l, Nout)
history = model.fit(X_train, Y_train, epochs=10, batch_size=100, validation_split=0.2)
performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)
plot_acc(history)
plt.show()
plot_loss(history)
plt.show()
In [ ]: