In [2]:
# %load /home/sjkim/.jupyter/head.py
%matplotlib inline
%load_ext autoreload 
%autoreload 2
from importlib import reload

import matplotlib.pyplot as plt
import numpy as np

import pandas as pd
import os
#os.environ["CUDA_VISIBLE_DEVICES"]="0"

# seaborn
#import seaborn as sns
#sns.set( style = 'white', font_scale = 1.7)
#sns.set_style('ticks')
#plt.rcParams['savefig.dpi'] = 200

# font for matplotlib
#import matplotlib
#import matplotlib.font_manager as fm
#fm.get_fontconfig_fonts()
#font_location = '/usr/share/fonts/truetype/nanum/NanumGothicBold.ttf'
#font_name = fm.FontProperties(fname=font_location).get_name()
#matplotlib.rc('font', family=font_name)


The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload

In [19]:
import ex2_2_ann_rg
reload(ex2_2_ann_rg)


Out[19]:
<module 'ex2_2_ann_rg' from '/home/sjkim/Dropbox/Aspuru-Guzik/python_lab/py3/keraspp/ex2_2_ann_rg.py'>

In [20]:
ex2_2_ann_rg.main()


Train on 323 samples, validate on 81 samples
Epoch 1/100
0s - loss: 549.2300 - val_loss: 544.1247
Epoch 2/100
0s - loss: 390.5700 - val_loss: 103.3929
Epoch 3/100
0s - loss: 98.1901 - val_loss: 85.2140
Epoch 4/100
0s - loss: 77.4438 - val_loss: 81.0683
Epoch 5/100
0s - loss: 66.6654 - val_loss: 61.9228
Epoch 6/100
0s - loss: 61.4270 - val_loss: 65.4058
Epoch 7/100
0s - loss: 71.9419 - val_loss: 58.9549
Epoch 8/100
0s - loss: 51.9858 - val_loss: 93.1046
Epoch 9/100
0s - loss: 62.5920 - val_loss: 62.6678
Epoch 10/100
0s - loss: 47.1879 - val_loss: 53.1253
Epoch 11/100
0s - loss: 42.8130 - val_loss: 49.0748
Epoch 12/100
0s - loss: 40.8272 - val_loss: 41.4224
Epoch 13/100
0s - loss: 38.3668 - val_loss: 54.4867
Epoch 14/100
0s - loss: 42.8160 - val_loss: 41.1169
Epoch 15/100
0s - loss: 40.6437 - val_loss: 44.5767
Epoch 16/100
0s - loss: 34.1632 - val_loss: 44.1284
Epoch 17/100
0s - loss: 32.6142 - val_loss: 35.6701
Epoch 18/100
0s - loss: 31.8371 - val_loss: 41.4578
Epoch 19/100
0s - loss: 31.4799 - val_loss: 33.7412
Epoch 20/100
0s - loss: 31.6695 - val_loss: 31.0509
Epoch 21/100
0s - loss: 30.1381 - val_loss: 29.6005
Epoch 22/100
0s - loss: 29.6422 - val_loss: 29.8011
Epoch 23/100
0s - loss: 27.3097 - val_loss: 32.7661
Epoch 24/100
0s - loss: 28.1749 - val_loss: 31.3858
Epoch 25/100
0s - loss: 29.1247 - val_loss: 37.3145
Epoch 26/100
0s - loss: 47.5037 - val_loss: 61.4627
Epoch 27/100
0s - loss: 43.6486 - val_loss: 35.6210
Epoch 28/100
0s - loss: 33.7183 - val_loss: 31.1215
Epoch 29/100
0s - loss: 32.7949 - val_loss: 91.2134
Epoch 30/100
0s - loss: 84.2713 - val_loss: 25.8957
Epoch 31/100
0s - loss: 25.5216 - val_loss: 36.2650
Epoch 32/100
0s - loss: 28.5257 - val_loss: 40.8993
Epoch 33/100
0s - loss: 72.1279 - val_loss: 43.6365
Epoch 34/100
0s - loss: 41.9866 - val_loss: 27.7356
Epoch 35/100
0s - loss: 26.8115 - val_loss: 33.6829
Epoch 36/100
0s - loss: 27.2886 - val_loss: 49.3238
Epoch 37/100
0s - loss: 63.5212 - val_loss: 25.4363
Epoch 38/100
0s - loss: 24.6916 - val_loss: 25.2577
Epoch 39/100
0s - loss: 25.0479 - val_loss: 24.3853
Epoch 40/100
0s - loss: 26.0277 - val_loss: 24.0724
Epoch 41/100
0s - loss: 24.5822 - val_loss: 26.5253
Epoch 42/100
0s - loss: 27.0171 - val_loss: 25.3956
Epoch 43/100
0s - loss: 24.5532 - val_loss: 26.1544
Epoch 44/100
0s - loss: 24.5788 - val_loss: 24.1822
Epoch 45/100
0s - loss: 27.3841 - val_loss: 36.1446
Epoch 46/100
0s - loss: 32.5138 - val_loss: 25.3374
Epoch 47/100
0s - loss: 25.1175 - val_loss: 49.5074
Epoch 48/100
0s - loss: 39.5790 - val_loss: 40.8372
Epoch 49/100
0s - loss: 27.3132 - val_loss: 25.7616
Epoch 50/100
0s - loss: 24.0742 - val_loss: 33.7171
Epoch 51/100
0s - loss: 39.2199 - val_loss: 23.2041
Epoch 52/100
0s - loss: 23.2444 - val_loss: 24.4851
Epoch 53/100
0s - loss: 27.8802 - val_loss: 46.7543
Epoch 54/100
0s - loss: 29.9209 - val_loss: 46.1203
Epoch 55/100
0s - loss: 37.3527 - val_loss: 77.2600
Epoch 56/100
0s - loss: 43.2760 - val_loss: 47.8863
Epoch 57/100
0s - loss: 33.8143 - val_loss: 24.2472
Epoch 58/100
0s - loss: 23.8081 - val_loss: 22.6879
Epoch 59/100
0s - loss: 23.1451 - val_loss: 27.6580
Epoch 60/100
0s - loss: 24.2453 - val_loss: 40.6752
Epoch 61/100
0s - loss: 28.9283 - val_loss: 25.5648
Epoch 62/100
0s - loss: 23.6439 - val_loss: 23.7677
Epoch 63/100
0s - loss: 30.2104 - val_loss: 22.3435
Epoch 64/100
0s - loss: 23.2911 - val_loss: 27.7983
Epoch 65/100
0s - loss: 34.9106 - val_loss: 34.5690
Epoch 66/100
0s - loss: 36.8702 - val_loss: 23.0353
Epoch 67/100
0s - loss: 27.9201 - val_loss: 24.5019
Epoch 68/100
0s - loss: 25.6354 - val_loss: 22.2898
Epoch 69/100
0s - loss: 24.0518 - val_loss: 22.5758
Epoch 70/100
0s - loss: 22.6869 - val_loss: 22.1801
Epoch 71/100
0s - loss: 24.7605 - val_loss: 21.6672
Epoch 72/100
0s - loss: 25.1309 - val_loss: 23.6435
Epoch 73/100
0s - loss: 22.6495 - val_loss: 24.3107
Epoch 74/100
0s - loss: 25.7740 - val_loss: 38.4270
Epoch 75/100
0s - loss: 31.0539 - val_loss: 25.8740
Epoch 76/100
0s - loss: 23.7850 - val_loss: 24.1559
Epoch 77/100
0s - loss: 29.6644 - val_loss: 24.6882
Epoch 78/100
0s - loss: 24.3368 - val_loss: 21.3049
Epoch 79/100
0s - loss: 22.8251 - val_loss: 21.4472
Epoch 80/100
0s - loss: 23.4458 - val_loss: 24.6260
Epoch 81/100
0s - loss: 29.0920 - val_loss: 34.8721
Epoch 82/100
0s - loss: 37.4682 - val_loss: 22.1323
Epoch 83/100
0s - loss: 22.7357 - val_loss: 21.7718
Epoch 84/100
0s - loss: 22.8009 - val_loss: 24.7953
Epoch 85/100
0s - loss: 26.4292 - val_loss: 21.6159
Epoch 86/100
0s - loss: 26.7697 - val_loss: 29.5336
Epoch 87/100
0s - loss: 34.8803 - val_loss: 38.1017
Epoch 88/100
0s - loss: 31.0725 - val_loss: 21.2199
Epoch 89/100
0s - loss: 22.3300 - val_loss: 22.7985
Epoch 90/100
0s - loss: 22.4448 - val_loss: 21.4731
Epoch 91/100
0s - loss: 24.7583 - val_loss: 25.1043
Epoch 92/100
0s - loss: 23.1121 - val_loss: 21.2430
Epoch 93/100
0s - loss: 22.2409 - val_loss: 24.0846
Epoch 94/100
0s - loss: 22.1329 - val_loss: 26.7777
Epoch 95/100
0s - loss: 21.9800 - val_loss: 20.7085
Epoch 96/100
0s - loss: 22.2884 - val_loss: 20.5597
Epoch 97/100
0s - loss: 23.3004 - val_loss: 22.8056
Epoch 98/100
0s - loss: 22.8027 - val_loss: 23.2885
Epoch 99/100
0s - loss: 22.1984 - val_loss: 26.6096
Epoch 100/100
0s - loss: 26.8275 - val_loss: 31.8822
100/102 [============================>.] - ETA: 0s
Test Loss -> 30.14

In [ ]: