In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline

In [2]:
with open("param.yaml", "r") as file:
    param = yaml.load(file.read())
param


Out[2]:
{'forget_bias': 0.25,
 'learning_rate': 0.1,
 'length_of_sequences': 50,
 'num_of_hidden_nodes': 2,
 'num_of_input_nodes': 1,
 'num_of_output_nodes': 1,
 'num_of_prediction_epochs': 100,
 'num_of_training_epochs': 2000,
 'optimizer': 'GradientDescentOptimizer',
 'seed': 0,
 'size_of_mini_batch': 100,
 'train_data_path': '../train_data/normal.npy'}

In [3]:
train = np.load(param["train_data_path"])
train


Out[3]:
array([[  0.00000000e+00,   1.25333234e-01],
       [  1.25333234e-01,   2.48689887e-01],
       [  2.48689887e-01,   3.68124553e-01],
       ..., 
       [ -3.68124553e-01,  -2.48689887e-01],
       [ -2.48689887e-01,  -1.25333234e-01],
       [ -1.25333234e-01,   3.92877345e-15]])

In [4]:
initial = np.load("initial.npy")
initial


Out[4]:
array([  0.00000000e+00,   1.25333234e-01,   2.48689887e-01,
         3.68124553e-01,   4.81753674e-01,   5.87785252e-01,
         6.84547106e-01,   7.70513243e-01,   8.44327926e-01,
         9.04827052e-01,   9.51056516e-01,   9.82287251e-01,
         9.98026728e-01,   9.98026728e-01,   9.82287251e-01,
         9.51056516e-01,   9.04827052e-01,   8.44327926e-01,
         7.70513243e-01,   6.84547106e-01,   5.87785252e-01,
         4.81753674e-01,   3.68124553e-01,   2.48689887e-01,
         1.25333234e-01,  -3.21624530e-16,  -1.25333234e-01,
        -2.48689887e-01,  -3.68124553e-01,  -4.81753674e-01,
        -5.87785252e-01,  -6.84547106e-01,  -7.70513243e-01,
        -8.44327926e-01,  -9.04827052e-01,  -9.51056516e-01,
        -9.82287251e-01,  -9.98026728e-01,  -9.98026728e-01,
        -9.82287251e-01,  -9.51056516e-01,  -9.04827052e-01,
        -8.44327926e-01,  -7.70513243e-01,  -6.84547106e-01,
        -5.87785252e-01,  -4.81753674e-01,  -3.68124553e-01,
        -2.48689887e-01,  -1.25333234e-01])

In [5]:
output = np.load("output.npy")
output


Out[5]:
array([ 0.01428467,  0.1694836 ,  0.34420919,  0.53124654,  0.70942104,
        0.85001886,  0.93688023,  0.9759289 ,  0.98244596,  0.96899819,
        0.94356585,  0.91098893,  0.87422454,  0.83510983,  0.79480034,
        0.75402451,  0.71323466,  0.67269951,  0.63256192,  0.59287524,
        0.55362636,  0.51474988,  0.47613645,  0.43763581,  0.3990576 ,
        0.3601687 ,  0.32068911,  0.28028649,  0.23857057,  0.19508937,
        0.14932959,  0.10072708,  0.04869321, -0.00733167, -0.0677864 ,
       -0.13284847, -0.20224753, -0.27506354, -0.34958073, -0.42330757,
       -0.49325356, -0.5564512 , -0.61055481, -0.6542697 , -0.68744743,
       -0.71087635, -0.7259171 , -0.73415101, -0.73712337, -0.7362026 ,
       -0.73252356, -0.72698724, -0.72028589, -0.71293759, -0.70532167,
       -0.69771028, -0.69029391, -0.68320155, -0.67651618, -0.67028749,
       -0.66454089, -0.65928364, -0.65451038, -0.65020728, -0.64635348,
       -0.6429249 , -0.63989437, -0.6372335 , -0.63491356, -0.63290572,
       -0.63118184, -0.62971473, -0.62847793, -0.62744713, -0.62659895,
       -0.62591183, -0.62536561, -0.62494195, -0.62462366, -0.62439549,
       -0.62424338, -0.62415481, -0.62411857, -0.62412488, -0.62416518,
       -0.6242317 , -0.624318  , -0.62441885, -0.62452924, -0.62464523,
       -0.62476337, -0.62488163, -0.62499714, -0.62510872, -0.62521517,
       -0.62531519, -0.62540877, -0.6254952 , -0.62557411, -0.62564576])

In [6]:
losses = np.load("losses.npy")
losses


Out[6]:
array([[  1.00000000e+01,   5.20177066e-01],
       [  2.00000000e+01,   4.96789336e-01],
       [  3.00000000e+01,   5.09636819e-01],
       [  4.00000000e+01,   5.04956186e-01],
       [  5.00000000e+01,   3.75555962e-01],
       [  6.00000000e+01,   2.91257203e-01],
       [  7.00000000e+01,   2.13277772e-01],
       [  8.00000000e+01,   1.17498398e-01],
       [  9.00000000e+01,   9.31859761e-02],
       [  1.00000000e+02,   7.08935857e-02],
       [  1.10000000e+02,   5.98928742e-02],
       [  1.20000000e+02,   4.47566994e-02],
       [  1.30000000e+02,   4.23476994e-02],
       [  1.40000000e+02,   4.06428352e-02],
       [  1.50000000e+02,   3.76752391e-02],
       [  1.60000000e+02,   3.00771110e-02],
       [  1.70000000e+02,   2.94041205e-02],
       [  1.80000000e+02,   2.66896002e-02],
       [  1.90000000e+02,   2.51652580e-02],
       [  2.00000000e+02,   2.67209820e-02],
       [  2.10000000e+02,   1.86185986e-02],
       [  2.20000000e+02,   2.18948480e-02],
       [  2.30000000e+02,   1.71200167e-02],
       [  2.40000000e+02,   2.03012750e-02],
       [  2.50000000e+02,   1.98183041e-02],
       [  2.60000000e+02,   1.47978831e-02],
       [  2.70000000e+02,   1.62749123e-02],
       [  2.80000000e+02,   1.37668634e-02],
       [  2.90000000e+02,   1.45754768e-02],
       [  3.00000000e+02,   1.29136201e-02],
       [  3.10000000e+02,   1.23119950e-02],
       [  3.20000000e+02,   1.27905915e-02],
       [  3.30000000e+02,   1.14185950e-02],
       [  3.40000000e+02,   1.04435543e-02],
       [  3.50000000e+02,   1.06118079e-02],
       [  3.60000000e+02,   1.15625644e-02],
       [  3.70000000e+02,   9.84590035e-03],
       [  3.80000000e+02,   9.61645320e-03],
       [  3.90000000e+02,   8.44064448e-03],
       [  4.00000000e+02,   8.98854248e-03],
       [  4.10000000e+02,   8.40069447e-03],
       [  4.20000000e+02,   6.86833635e-03],
       [  4.30000000e+02,   7.60791404e-03],
       [  4.40000000e+02,   8.41444265e-03],
       [  4.50000000e+02,   9.15427320e-03],
       [  4.60000000e+02,   8.15568026e-03],
       [  4.70000000e+02,   6.56267861e-03],
       [  4.80000000e+02,   7.13789929e-03],
       [  4.90000000e+02,   6.45832531e-03],
       [  5.00000000e+02,   7.08355289e-03],
       [  5.10000000e+02,   5.40106883e-03],
       [  5.20000000e+02,   6.82299072e-03],
       [  5.30000000e+02,   5.56963915e-03],
       [  5.40000000e+02,   5.79385832e-03],
       [  5.50000000e+02,   5.35018183e-03],
       [  5.60000000e+02,   5.07822074e-03],
       [  5.70000000e+02,   5.71333896e-03],
       [  5.80000000e+02,   5.04834298e-03],
       [  5.90000000e+02,   5.01439814e-03],
       [  6.00000000e+02,   4.24442114e-03],
       [  6.10000000e+02,   4.58677253e-03],
       [  6.20000000e+02,   4.27876227e-03],
       [  6.30000000e+02,   4.78460733e-03],
       [  6.40000000e+02,   4.17559454e-03],
       [  6.50000000e+02,   3.99725931e-03],
       [  6.60000000e+02,   3.28042475e-03],
       [  6.70000000e+02,   3.86618148e-03],
       [  6.80000000e+02,   3.69342696e-03],
       [  6.90000000e+02,   3.46668786e-03],
       [  7.00000000e+02,   3.35664931e-03],
       [  7.10000000e+02,   3.37300776e-03],
       [  7.20000000e+02,   3.68425250e-03],
       [  7.30000000e+02,   3.81434849e-03],
       [  7.40000000e+02,   2.79725343e-03],
       [  7.50000000e+02,   2.89307069e-03],
       [  7.60000000e+02,   2.67495215e-03],
       [  7.70000000e+02,   2.72118091e-03],
       [  7.80000000e+02,   2.57379538e-03],
       [  7.90000000e+02,   2.82959663e-03],
       [  8.00000000e+02,   2.97050108e-03],
       [  8.10000000e+02,   2.62424862e-03],
       [  8.20000000e+02,   2.61141895e-03],
       [  8.30000000e+02,   1.83743250e-03],
       [  8.40000000e+02,   2.56642769e-03],
       [  8.50000000e+02,   2.20427313e-03],
       [  8.60000000e+02,   2.34885397e-03],
       [  8.70000000e+02,   2.04864074e-03],
       [  8.80000000e+02,   1.98110333e-03],
       [  8.90000000e+02,   2.21980852e-03],
       [  9.00000000e+02,   1.81687286e-03],
       [  9.10000000e+02,   1.79255439e-03],
       [  9.20000000e+02,   1.88425032e-03],
       [  9.30000000e+02,   1.66707626e-03],
       [  9.40000000e+02,   1.94672495e-03],
       [  9.50000000e+02,   1.89828989e-03],
       [  9.60000000e+02,   1.48579408e-03],
       [  9.70000000e+02,   1.58331718e-03],
       [  9.80000000e+02,   1.94911461e-03],
       [  9.90000000e+02,   1.65202236e-03],
       [  1.00000000e+03,   1.50695082e-03],
       [  1.01000000e+03,   1.32092892e-03],
       [  1.02000000e+03,   1.48320047e-03],
       [  1.03000000e+03,   1.44161610e-03],
       [  1.04000000e+03,   1.47948531e-03],
       [  1.05000000e+03,   1.28523167e-03],
       [  1.06000000e+03,   1.16550864e-03],
       [  1.07000000e+03,   1.47426222e-03],
       [  1.08000000e+03,   1.25087018e-03],
       [  1.09000000e+03,   1.29041355e-03],
       [  1.10000000e+03,   1.18785549e-03],
       [  1.11000000e+03,   1.19287195e-03],
       [  1.12000000e+03,   1.08833576e-03],
       [  1.13000000e+03,   1.16305007e-03],
       [  1.14000000e+03,   1.24502112e-03],
       [  1.15000000e+03,   1.18794665e-03],
       [  1.16000000e+03,   9.60274134e-04],
       [  1.17000000e+03,   1.20816904e-03],
       [  1.18000000e+03,   9.77596617e-04],
       [  1.19000000e+03,   1.04122504e-03],
       [  1.20000000e+03,   1.06289284e-03],
       [  1.21000000e+03,   1.08428497e-03],
       [  1.22000000e+03,   1.03075232e-03],
       [  1.23000000e+03,   1.08961167e-03],
       [  1.24000000e+03,   9.99404932e-04],
       [  1.25000000e+03,   8.35685874e-04],
       [  1.26000000e+03,   1.00163068e-03],
       [  1.27000000e+03,   1.12376316e-03],
       [  1.28000000e+03,   1.07847457e-03],
       [  1.29000000e+03,   8.95125268e-04],
       [  1.30000000e+03,   9.78021300e-04],
       [  1.31000000e+03,   9.41455131e-04],
       [  1.32000000e+03,   1.02020858e-03],
       [  1.33000000e+03,   7.21598277e-04],
       [  1.34000000e+03,   9.16369841e-04],
       [  1.35000000e+03,   7.70207494e-04],
       [  1.36000000e+03,   7.74931861e-04],
       [  1.37000000e+03,   7.93478335e-04],
       [  1.38000000e+03,   7.06401479e-04],
       [  1.39000000e+03,   7.67454505e-04],
       [  1.40000000e+03,   8.88676208e-04],
       [  1.41000000e+03,   7.86519202e-04],
       [  1.42000000e+03,   7.82851886e-04],
       [  1.43000000e+03,   8.59733322e-04],
       [  1.44000000e+03,   7.18395866e-04],
       [  1.45000000e+03,   8.12255603e-04],
       [  1.46000000e+03,   7.77118257e-04],
       [  1.47000000e+03,   6.92225934e-04],
       [  1.48000000e+03,   7.55496090e-04],
       [  1.49000000e+03,   6.37657067e-04],
       [  1.50000000e+03,   7.95624685e-04],
       [  1.51000000e+03,   6.47933397e-04],
       [  1.52000000e+03,   6.52562769e-04],
       [  1.53000000e+03,   5.98601997e-04],
       [  1.54000000e+03,   5.51883131e-04],
       [  1.55000000e+03,   7.30836997e-04],
       [  1.56000000e+03,   6.65045984e-04],
       [  1.57000000e+03,   7.46738864e-04],
       [  1.58000000e+03,   7.51806947e-04],
       [  1.59000000e+03,   6.96881732e-04],
       [  1.60000000e+03,   6.61992119e-04],
       [  1.61000000e+03,   6.40720944e-04],
       [  1.62000000e+03,   6.31302304e-04],
       [  1.63000000e+03,   6.23274420e-04],
       [  1.64000000e+03,   5.28764969e-04],
       [  1.65000000e+03,   5.71916869e-04],
       [  1.66000000e+03,   4.97392146e-04],
       [  1.67000000e+03,   6.28163165e-04],
       [  1.68000000e+03,   6.21474581e-04],
       [  1.69000000e+03,   6.08276925e-04],
       [  1.70000000e+03,   6.10762392e-04],
       [  1.71000000e+03,   6.22082967e-04],
       [  1.72000000e+03,   5.08880068e-04],
       [  1.73000000e+03,   5.41933172e-04],
       [  1.74000000e+03,   5.39991306e-04],
       [  1.75000000e+03,   6.12684293e-04],
       [  1.76000000e+03,   5.67709794e-04],
       [  1.77000000e+03,   5.95866935e-04],
       [  1.78000000e+03,   5.41274552e-04],
       [  1.79000000e+03,   5.24575065e-04],
       [  1.80000000e+03,   5.74264443e-04],
       [  1.81000000e+03,   5.11011167e-04],
       [  1.82000000e+03,   4.68319107e-04],
       [  1.83000000e+03,   4.70327068e-04],
       [  1.84000000e+03,   5.55294217e-04],
       [  1.85000000e+03,   5.33863436e-04],
       [  1.86000000e+03,   5.63739159e-04],
       [  1.87000000e+03,   4.71770385e-04],
       [  1.88000000e+03,   5.74479403e-04],
       [  1.89000000e+03,   5.90657059e-04],
       [  1.90000000e+03,   5.29854617e-04],
       [  1.91000000e+03,   4.17057337e-04],
       [  1.92000000e+03,   5.53696940e-04],
       [  1.93000000e+03,   5.37808402e-04],
       [  1.94000000e+03,   5.71955112e-04],
       [  1.95000000e+03,   4.58898459e-04],
       [  1.96000000e+03,   5.45363990e-04],
       [  1.97000000e+03,   4.80870367e-04],
       [  1.98000000e+03,   6.05550304e-04],
       [  1.99000000e+03,   4.71796462e-04],
       [  2.00000000e+03,   4.29955282e-04]])

In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])


Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x1083cf940>

In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")


Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x1083a36d8>

In [ ]: