In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline

In [2]:
with open("param.yaml", "r") as file:
    param = yaml.load(file.read())
param


Out[2]:
{'forget_bias': 1.0,
 'learning_rate': 0.1,
 'length_of_sequences': 50,
 'num_of_hidden_nodes': 2,
 'num_of_input_nodes': 1,
 'num_of_output_nodes': 1,
 'num_of_prediction_epochs': 100,
 'num_of_training_epochs': 2000,
 'optimizer': 'GradientDescentOptimizer',
 'seed': 0,
 'size_of_mini_batch': 200,
 'train_data_path': '../train_data/normal.npy'}

In [3]:
train = np.load(param["train_data_path"])
train


Out[3]:
array([[  0.00000000e+00,   1.25333234e-01],
       [  1.25333234e-01,   2.48689887e-01],
       [  2.48689887e-01,   3.68124553e-01],
       ..., 
       [ -3.68124553e-01,  -2.48689887e-01],
       [ -2.48689887e-01,  -1.25333234e-01],
       [ -1.25333234e-01,   3.92877345e-15]])

In [4]:
initial = np.load("initial.npy")
initial


Out[4]:
array([  0.00000000e+00,   1.25333234e-01,   2.48689887e-01,
         3.68124553e-01,   4.81753674e-01,   5.87785252e-01,
         6.84547106e-01,   7.70513243e-01,   8.44327926e-01,
         9.04827052e-01,   9.51056516e-01,   9.82287251e-01,
         9.98026728e-01,   9.98026728e-01,   9.82287251e-01,
         9.51056516e-01,   9.04827052e-01,   8.44327926e-01,
         7.70513243e-01,   6.84547106e-01,   5.87785252e-01,
         4.81753674e-01,   3.68124553e-01,   2.48689887e-01,
         1.25333234e-01,  -3.21624530e-16,  -1.25333234e-01,
        -2.48689887e-01,  -3.68124553e-01,  -4.81753674e-01,
        -5.87785252e-01,  -6.84547106e-01,  -7.70513243e-01,
        -8.44327926e-01,  -9.04827052e-01,  -9.51056516e-01,
        -9.82287251e-01,  -9.98026728e-01,  -9.98026728e-01,
        -9.82287251e-01,  -9.51056516e-01,  -9.04827052e-01,
        -8.44327926e-01,  -7.70513243e-01,  -6.84547106e-01,
        -5.87785252e-01,  -4.81753674e-01,  -3.68124553e-01,
        -2.48689887e-01,  -1.25333234e-01])

In [5]:
output = np.load("output.npy")
output


Out[5]:
array([ 0.01311301,  0.13354006,  0.24452324,  0.34930223,  0.45246619,
        0.55727184,  0.66386026,  0.76783603,  0.86038578,  0.93154269,
        0.97527504,  0.9918974 ,  0.98608649,  0.96348995,  0.92867464,
        0.88461602,  0.83291483,  0.77415717,  0.7082116 ,  0.63443953,
        0.55186272,  0.45934409,  0.35585111,  0.24085523,  0.11487728,
       -0.01994736, -0.15959351, -0.29853466, -0.43085867, -0.55159652,
       -0.65752041, -0.74712729, -0.82014614, -0.87702906, -0.91862017,
       -0.94595569, -0.96010995, -0.9620524 , -0.95252335, -0.93193537,
       -0.90030307, -0.85719675, -0.80173373, -0.73264486, -0.64850599,
       -0.54828018, -0.43230787, -0.30358937, -0.16845392, -0.03514604,
        0.08976635,  0.20437618,  0.31121379,  0.41476923,  0.51898217,
        0.62530464,  0.73108029,  0.8288964 ,  0.90865403,  0.96243602,
        0.98833936,  0.98988253,  0.97268844,  0.94182438,  0.90082508,
        0.85173845,  0.79547292,  0.73212498,  0.66121674,  0.58187264,
        0.49298859,  0.39345902,  0.28252417,  0.16026904,  0.02820768,
       -0.11029578, -0.25014931, -0.38542172, -0.51068318, -0.6220569 ,
       -0.71747571, -0.7962988 , -0.85877067, -0.90562367, -0.93784469,
       -0.95651072, -0.96264356, -0.95707643, -0.9403463 , -0.91261446,
       -0.8736099 , -0.82260698, -0.75845653, -0.67974657, -0.5852167 ,
       -0.47458458, -0.34978139, -0.21599619, -0.08113085,  0.04720839])

In [6]:
losses = np.load("losses.npy")
losses


Out[6]:
array([[  1.00000000e+01,   5.13497651e-01],
       [  2.00000000e+01,   4.71058875e-01],
       [  3.00000000e+01,   3.43772322e-01],
       [  4.00000000e+01,   2.35591590e-01],
       [  5.00000000e+01,   1.62747189e-01],
       [  6.00000000e+01,   1.04189508e-01],
       [  7.00000000e+01,   8.41336846e-02],
       [  8.00000000e+01,   7.13223368e-02],
       [  9.00000000e+01,   6.05947003e-02],
       [  1.00000000e+02,   6.07711561e-02],
       [  1.10000000e+02,   5.09449244e-02],
       [  1.20000000e+02,   4.76497449e-02],
       [  1.30000000e+02,   4.23866361e-02],
       [  1.40000000e+02,   3.79525200e-02],
       [  1.50000000e+02,   3.52244414e-02],
       [  1.60000000e+02,   3.52246128e-02],
       [  1.70000000e+02,   3.16811837e-02],
       [  1.80000000e+02,   2.95859519e-02],
       [  1.90000000e+02,   2.78269816e-02],
       [  2.00000000e+02,   2.51248833e-02],
       [  2.10000000e+02,   2.12513786e-02],
       [  2.20000000e+02,   2.32319403e-02],
       [  2.30000000e+02,   1.99624486e-02],
       [  2.40000000e+02,   1.81624424e-02],
       [  2.50000000e+02,   1.68206878e-02],
       [  2.60000000e+02,   1.60265844e-02],
       [  2.70000000e+02,   1.39298104e-02],
       [  2.80000000e+02,   1.21076619e-02],
       [  2.90000000e+02,   1.08381649e-02],
       [  3.00000000e+02,   9.85505804e-03],
       [  3.10000000e+02,   7.96282198e-03],
       [  3.20000000e+02,   7.29049789e-03],
       [  3.30000000e+02,   6.29866263e-03],
       [  3.40000000e+02,   5.54646738e-03],
       [  3.50000000e+02,   3.99183528e-03],
       [  3.60000000e+02,   4.15997254e-03],
       [  3.70000000e+02,   4.11529653e-03],
       [  3.80000000e+02,   3.75507958e-03],
       [  3.90000000e+02,   3.16604250e-03],
       [  4.00000000e+02,   3.40545946e-03],
       [  4.10000000e+02,   3.13815055e-03],
       [  4.20000000e+02,   3.09214671e-03],
       [  4.30000000e+02,   2.85808626e-03],
       [  4.40000000e+02,   2.64981855e-03],
       [  4.50000000e+02,   3.05691781e-03],
       [  4.60000000e+02,   2.83524278e-03],
       [  4.70000000e+02,   2.70717265e-03],
       [  4.80000000e+02,   2.59353733e-03],
       [  4.90000000e+02,   2.91666854e-03],
       [  5.00000000e+02,   2.64900038e-03],
       [  5.10000000e+02,   2.29497836e-03],
       [  5.20000000e+02,   2.50129937e-03],
       [  5.30000000e+02,   2.64269742e-03],
       [  5.40000000e+02,   2.31181085e-03],
       [  5.50000000e+02,   2.38579372e-03],
       [  5.60000000e+02,   2.12836824e-03],
       [  5.70000000e+02,   2.17153737e-03],
       [  5.80000000e+02,   2.15383340e-03],
       [  5.90000000e+02,   2.27963366e-03],
       [  6.00000000e+02,   2.14716047e-03],
       [  6.10000000e+02,   2.08737608e-03],
       [  6.20000000e+02,   2.13648169e-03],
       [  6.30000000e+02,   2.13815179e-03],
       [  6.40000000e+02,   2.05223053e-03],
       [  6.50000000e+02,   2.01106723e-03],
       [  6.60000000e+02,   1.91995234e-03],
       [  6.70000000e+02,   1.76160154e-03],
       [  6.80000000e+02,   1.85484381e-03],
       [  6.90000000e+02,   1.97255495e-03],
       [  7.00000000e+02,   1.70651078e-03],
       [  7.10000000e+02,   1.88461086e-03],
       [  7.20000000e+02,   1.70335767e-03],
       [  7.30000000e+02,   1.80025818e-03],
       [  7.40000000e+02,   1.68715953e-03],
       [  7.50000000e+02,   1.77402410e-03],
       [  7.60000000e+02,   1.70327176e-03],
       [  7.70000000e+02,   1.41898636e-03],
       [  7.80000000e+02,   1.58348947e-03],
       [  7.90000000e+02,   1.49518612e-03],
       [  8.00000000e+02,   1.61369739e-03],
       [  8.10000000e+02,   1.56381121e-03],
       [  8.20000000e+02,   1.47295301e-03],
       [  8.30000000e+02,   1.40523014e-03],
       [  8.40000000e+02,   1.51853869e-03],
       [  8.50000000e+02,   1.41545769e-03],
       [  8.60000000e+02,   1.40633876e-03],
       [  8.70000000e+02,   1.40721502e-03],
       [  8.80000000e+02,   1.44402531e-03],
       [  8.90000000e+02,   1.35280937e-03],
       [  9.00000000e+02,   1.30475138e-03],
       [  9.10000000e+02,   1.18060503e-03],
       [  9.20000000e+02,   1.21864316e-03],
       [  9.30000000e+02,   1.16025214e-03],
       [  9.40000000e+02,   1.11927034e-03],
       [  9.50000000e+02,   1.15122611e-03],
       [  9.60000000e+02,   1.16532680e-03],
       [  9.70000000e+02,   1.10038300e-03],
       [  9.80000000e+02,   1.11531548e-03],
       [  9.90000000e+02,   1.06473512e-03],
       [  1.00000000e+03,   9.09202558e-04],
       [  1.01000000e+03,   9.82704572e-04],
       [  1.02000000e+03,   8.44699505e-04],
       [  1.03000000e+03,   1.06002251e-03],
       [  1.04000000e+03,   8.57956009e-04],
       [  1.05000000e+03,   9.75846197e-04],
       [  1.06000000e+03,   7.20903976e-04],
       [  1.07000000e+03,   7.87532656e-04],
       [  1.08000000e+03,   7.56485038e-04],
       [  1.09000000e+03,   8.10675905e-04],
       [  1.10000000e+03,   7.82994204e-04],
       [  1.11000000e+03,   7.42710545e-04],
       [  1.12000000e+03,   7.58478651e-04],
       [  1.13000000e+03,   6.89476670e-04],
       [  1.14000000e+03,   7.75623543e-04],
       [  1.15000000e+03,   8.26633302e-04],
       [  1.16000000e+03,   7.54329725e-04],
       [  1.17000000e+03,   6.65734347e-04],
       [  1.18000000e+03,   6.63045328e-04],
       [  1.19000000e+03,   5.99871622e-04],
       [  1.20000000e+03,   6.29754737e-04],
       [  1.21000000e+03,   6.21635932e-04],
       [  1.22000000e+03,   6.09093928e-04],
       [  1.23000000e+03,   6.96797506e-04],
       [  1.24000000e+03,   6.34086144e-04],
       [  1.25000000e+03,   5.88851282e-04],
       [  1.26000000e+03,   5.61622961e-04],
       [  1.27000000e+03,   5.66277420e-04],
       [  1.28000000e+03,   5.84167545e-04],
       [  1.29000000e+03,   5.67686686e-04],
       [  1.30000000e+03,   4.93959873e-04],
       [  1.31000000e+03,   5.15135878e-04],
       [  1.32000000e+03,   5.48032112e-04],
       [  1.33000000e+03,   5.50157274e-04],
       [  1.34000000e+03,   4.93907020e-04],
       [  1.35000000e+03,   5.06434822e-04],
       [  1.36000000e+03,   4.97319154e-04],
       [  1.37000000e+03,   5.32267790e-04],
       [  1.38000000e+03,   4.90465784e-04],
       [  1.39000000e+03,   4.95864253e-04],
       [  1.40000000e+03,   4.51744156e-04],
       [  1.41000000e+03,   3.92830931e-04],
       [  1.42000000e+03,   4.15726972e-04],
       [  1.43000000e+03,   4.15864051e-04],
       [  1.44000000e+03,   4.12001042e-04],
       [  1.45000000e+03,   4.28072293e-04],
       [  1.46000000e+03,   4.31612571e-04],
       [  1.47000000e+03,   4.16504656e-04],
       [  1.48000000e+03,   4.23760532e-04],
       [  1.49000000e+03,   3.68263485e-04],
       [  1.50000000e+03,   3.97093594e-04],
       [  1.51000000e+03,   3.88660294e-04],
       [  1.52000000e+03,   3.71333765e-04],
       [  1.53000000e+03,   4.28159459e-04],
       [  1.54000000e+03,   3.64999840e-04],
       [  1.55000000e+03,   3.54822056e-04],
       [  1.56000000e+03,   3.82053840e-04],
       [  1.57000000e+03,   3.53046868e-04],
       [  1.58000000e+03,   3.46817600e-04],
       [  1.59000000e+03,   3.41853272e-04],
       [  1.60000000e+03,   2.92314449e-04],
       [  1.61000000e+03,   3.53940530e-04],
       [  1.62000000e+03,   3.55084398e-04],
       [  1.63000000e+03,   3.28820839e-04],
       [  1.64000000e+03,   3.24780645e-04],
       [  1.65000000e+03,   2.97065097e-04],
       [  1.66000000e+03,   3.46610206e-04],
       [  1.67000000e+03,   3.08101909e-04],
       [  1.68000000e+03,   3.24001623e-04],
       [  1.69000000e+03,   3.48582224e-04],
       [  1.70000000e+03,   3.35805118e-04],
       [  1.71000000e+03,   3.15137731e-04],
       [  1.72000000e+03,   3.22643289e-04],
       [  1.73000000e+03,   3.46151821e-04],
       [  1.74000000e+03,   2.88158306e-04],
       [  1.75000000e+03,   3.39687249e-04],
       [  1.76000000e+03,   2.94681406e-04],
       [  1.77000000e+03,   3.23265238e-04],
       [  1.78000000e+03,   3.02624132e-04],
       [  1.79000000e+03,   3.10788688e-04],
       [  1.80000000e+03,   2.77069630e-04],
       [  1.81000000e+03,   2.93924619e-04],
       [  1.82000000e+03,   2.98938365e-04],
       [  1.83000000e+03,   2.82126595e-04],
       [  1.84000000e+03,   2.89427087e-04],
       [  1.85000000e+03,   3.01605993e-04],
       [  1.86000000e+03,   2.60362867e-04],
       [  1.87000000e+03,   2.92298326e-04],
       [  1.88000000e+03,   3.03198060e-04],
       [  1.89000000e+03,   2.90502037e-04],
       [  1.90000000e+03,   2.65587005e-04],
       [  1.91000000e+03,   2.92605313e-04],
       [  1.92000000e+03,   2.72187113e-04],
       [  1.93000000e+03,   2.64297007e-04],
       [  1.94000000e+03,   2.82288995e-04],
       [  1.95000000e+03,   2.58072774e-04],
       [  1.96000000e+03,   2.83025118e-04],
       [  1.97000000e+03,   2.69983837e-04],
       [  1.98000000e+03,   2.57016305e-04],
       [  1.99000000e+03,   2.58721790e-04],
       [  2.00000000e+03,   2.58973945e-04]])

In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])


Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x10dc08940>

In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")


Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x10dbdc7b8>

In [ ]: