예제 6-1 완전 연결 계층을 이용한 AE


In [2]:
# %load /home/sjkim/.jupyter/head.py
%matplotlib inline
%load_ext autoreload 
%autoreload 2
from importlib import reload

import matplotlib.pyplot as plt
import numpy as np

import pandas as pd
import os
#os.environ["CUDA_VISIBLE_DEVICES"]="0"

# seaborn
#import seaborn as sns
#sns.set( style = 'white', font_scale = 1.7)
#sns.set_style('ticks')
#plt.rcParams['savefig.dpi'] = 200

# font for matplotlib
#import matplotlib
#import matplotlib.font_manager as fm
#fm.get_fontconfig_fonts()
#font_location = '/usr/share/fonts/truetype/nanum/NanumGothicBold.ttf'
#font_name = fm.FontProperties(fname=font_location).get_name()
#matplotlib.rc('font', family=font_name)


The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload

In [2]:
import ex6_1_ae_fc_mnist_mc as example


Using TensorFlow backend.
(60000, 784)
(10000, 784)

In [3]:
example.main()


Train on 60000 samples, validate on 10000 samples
Epoch 1/10
60000/60000 [==============================] - 2s - loss: 0.3648 - acc: 0.7527 - val_loss: 0.2718 - val_acc: 0.7934
Epoch 2/10
60000/60000 [==============================] - 1s - loss: 0.2641 - acc: 0.7968 - val_loss: 0.2530 - val_acc: 0.7968
Epoch 3/10
60000/60000 [==============================] - 1s - loss: 0.2431 - acc: 0.7969 - val_loss: 0.2308 - val_acc: 0.7970
Epoch 4/10
60000/60000 [==============================] - 1s - loss: 0.2223 - acc: 0.7976 - val_loss: 0.2117 - val_acc: 0.7978
Epoch 5/10
60000/60000 [==============================] - 1s - loss: 0.2058 - acc: 0.7989 - val_loss: 0.1978 - val_acc: 0.7992
Epoch 6/10
60000/60000 [==============================] - 1s - loss: 0.1939 - acc: 0.8001 - val_loss: 0.1878 - val_acc: 0.8008
Epoch 7/10
60000/60000 [==============================] - 1s - loss: 0.1850 - acc: 0.8013 - val_loss: 0.1798 - val_acc: 0.8016
Epoch 8/10
60000/60000 [==============================] - 1s - loss: 0.1778 - acc: 0.8025 - val_loss: 0.1735 - val_acc: 0.8030
Epoch 9/10
60000/60000 [==============================] - 1s - loss: 0.1718 - acc: 0.8035 - val_loss: 0.1678 - val_acc: 0.8033
Epoch 10/10
60000/60000 [==============================] - 1s - loss: 0.1667 - acc: 0.8043 - val_loss: 0.1631 - val_acc: 0.8040

In [ ]: