In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline

In [2]:
with open("param.yaml", "r") as file:
    param = yaml.load(file.read())
param


Out[2]:
{'forget_bias': 1.0,
 'learning_rate': 0.1,
 'length_of_sequences': 50,
 'num_of_hidden_nodes': 2,
 'num_of_input_nodes': 1,
 'num_of_output_nodes': 1,
 'num_of_prediction_epochs': 100,
 'num_of_training_epochs': 2000,
 'optimizer': 'GradientDescentOptimizer',
 'seed': 0,
 'size_of_mini_batch': 50,
 'train_data_path': '../train_data/normal.npy'}

In [3]:
train = np.load(param["train_data_path"])
train


Out[3]:
array([[  0.00000000e+00,   1.25333234e-01],
       [  1.25333234e-01,   2.48689887e-01],
       [  2.48689887e-01,   3.68124553e-01],
       ..., 
       [ -3.68124553e-01,  -2.48689887e-01],
       [ -2.48689887e-01,  -1.25333234e-01],
       [ -1.25333234e-01,   3.92877345e-15]])

In [4]:
initial = np.load("initial.npy")
initial


Out[4]:
array([  0.00000000e+00,   1.25333234e-01,   2.48689887e-01,
         3.68124553e-01,   4.81753674e-01,   5.87785252e-01,
         6.84547106e-01,   7.70513243e-01,   8.44327926e-01,
         9.04827052e-01,   9.51056516e-01,   9.82287251e-01,
         9.98026728e-01,   9.98026728e-01,   9.82287251e-01,
         9.51056516e-01,   9.04827052e-01,   8.44327926e-01,
         7.70513243e-01,   6.84547106e-01,   5.87785252e-01,
         4.81753674e-01,   3.68124553e-01,   2.48689887e-01,
         1.25333234e-01,  -3.21624530e-16,  -1.25333234e-01,
        -2.48689887e-01,  -3.68124553e-01,  -4.81753674e-01,
        -5.87785252e-01,  -6.84547106e-01,  -7.70513243e-01,
        -8.44327926e-01,  -9.04827052e-01,  -9.51056516e-01,
        -9.82287251e-01,  -9.98026728e-01,  -9.98026728e-01,
        -9.82287251e-01,  -9.51056516e-01,  -9.04827052e-01,
        -8.44327926e-01,  -7.70513243e-01,  -6.84547106e-01,
        -5.87785252e-01,  -4.81753674e-01,  -3.68124553e-01,
        -2.48689887e-01,  -1.25333234e-01])

In [5]:
output = np.load("output.npy")
output


Out[5]:
array([ 0.00605029,  0.1436711 ,  0.28119054,  0.41295895,  0.53488636,
        0.64437664,  0.73944843,  0.81808639,  0.87855673,  0.92034185,
        0.94462228,  0.95378113,  0.95047808,  0.93699789,  0.9150269 ,
        0.88566625,  0.84951532,  0.80674911,  0.75717348,  0.70025468,
        0.63513434,  0.56063968,  0.47531199,  0.3774915 ,  0.26553637,
        0.13831532, -0.00381312, -0.1573976 , -0.31418517, -0.46120694,
       -0.58469665, -0.67603981, -0.73445821, -0.76468468, -0.77302766,
       -0.7648741 , -0.74392951, -0.71234703, -0.6710813 , -0.62020802,
       -0.55916643, -0.48697165, -0.40246674, -0.30470136, -0.19349307,
       -0.0701133 ,  0.06219277,  0.19835132,  0.33254027,  0.45967573,
        0.57621598,  0.67979968,  0.76838458,  0.8399471 ,  0.89310539,
        0.92796803,  0.94623172,  0.95044065,  0.94312584,  0.92633128,
        0.90150094,  0.86952829,  0.83083916,  0.78546   ,  0.73306108,
        0.67298043,  0.60423446,  0.52553409,  0.43533027,  0.33194381,
        0.21387765,  0.08048415, -0.06679717, -0.22267023, -0.37672976,
       -0.51503086, -0.62536073, -0.70239377, -0.74810684, -0.76828504,
       -0.76900721, -0.75495589, -0.72913325, -0.69313455, -0.64750099,
       -0.59200454, -0.52587283, -0.44801334, -0.35731843, -0.25312886,
       -0.13587379, -0.0077326 ,  0.12709972,  0.2630097 ,  0.39435956,
        0.51677513,  0.62737918,  0.72405541,  0.80474603,  0.86761582])

In [6]:
losses = np.load("losses.npy")
losses


Out[6]:
array([[  1.00000000e+01,   4.48800892e-01],
       [  2.00000000e+01,   4.07370418e-01],
       [  3.00000000e+01,   2.70156711e-01],
       [  4.00000000e+01,   1.40986606e-01],
       [  5.00000000e+01,   1.08255446e-01],
       [  6.00000000e+01,   9.62115452e-02],
       [  7.00000000e+01,   7.83093721e-02],
       [  8.00000000e+01,   6.07048273e-02],
       [  9.00000000e+01,   5.97998351e-02],
       [  1.00000000e+02,   4.00288962e-02],
       [  1.10000000e+02,   4.52583991e-02],
       [  1.20000000e+02,   3.55131663e-02],
       [  1.30000000e+02,   3.25631015e-02],
       [  1.40000000e+02,   3.06645799e-02],
       [  1.50000000e+02,   2.35128030e-02],
       [  1.60000000e+02,   2.51336619e-02],
       [  1.70000000e+02,   1.98220480e-02],
       [  1.80000000e+02,   1.73724350e-02],
       [  1.90000000e+02,   1.48066906e-02],
       [  2.00000000e+02,   1.32668493e-02],
       [  2.10000000e+02,   1.13069713e-02],
       [  2.20000000e+02,   1.25198746e-02],
       [  2.30000000e+02,   8.09940975e-03],
       [  2.40000000e+02,   8.37696809e-03],
       [  2.50000000e+02,   8.21263902e-03],
       [  2.60000000e+02,   5.93196228e-03],
       [  2.70000000e+02,   5.97765436e-03],
       [  2.80000000e+02,   6.13768212e-03],
       [  2.90000000e+02,   4.62493813e-03],
       [  3.00000000e+02,   5.29376604e-03],
       [  3.10000000e+02,   3.82103981e-03],
       [  3.20000000e+02,   3.91275855e-03],
       [  3.30000000e+02,   5.65662747e-03],
       [  3.40000000e+02,   4.68799192e-03],
       [  3.50000000e+02,   3.13547999e-03],
       [  3.60000000e+02,   4.76001250e-03],
       [  3.70000000e+02,   4.60602390e-03],
       [  3.80000000e+02,   4.82889218e-03],
       [  3.90000000e+02,   3.24663986e-03],
       [  4.00000000e+02,   3.48727684e-03],
       [  4.10000000e+02,   3.79849365e-03],
       [  4.20000000e+02,   3.73729994e-03],
       [  4.30000000e+02,   3.28301825e-03],
       [  4.40000000e+02,   3.83302383e-03],
       [  4.50000000e+02,   3.45071754e-03],
       [  4.60000000e+02,   2.83001782e-03],
       [  4.70000000e+02,   2.53342744e-03],
       [  4.80000000e+02,   3.61555791e-03],
       [  4.90000000e+02,   3.04861460e-03],
       [  5.00000000e+02,   3.66976880e-03],
       [  5.10000000e+02,   2.95369863e-03],
       [  5.20000000e+02,   3.87373543e-03],
       [  5.30000000e+02,   2.33452464e-03],
       [  5.40000000e+02,   3.52992443e-03],
       [  5.50000000e+02,   1.91582146e-03],
       [  5.60000000e+02,   3.75476130e-03],
       [  5.70000000e+02,   3.22455191e-03],
       [  5.80000000e+02,   2.90925056e-03],
       [  5.90000000e+02,   3.03783081e-03],
       [  6.00000000e+02,   3.25110951e-03],
       [  6.10000000e+02,   2.34844862e-03],
       [  6.20000000e+02,   2.81866710e-03],
       [  6.30000000e+02,   2.19568354e-03],
       [  6.40000000e+02,   2.71349540e-03],
       [  6.50000000e+02,   2.81917630e-03],
       [  6.60000000e+02,   2.73557147e-03],
       [  6.70000000e+02,   2.29914626e-03],
       [  6.80000000e+02,   2.79168435e-03],
       [  6.90000000e+02,   2.42808554e-03],
       [  7.00000000e+02,   1.73963618e-03],
       [  7.10000000e+02,   2.37798924e-03],
       [  7.20000000e+02,   2.37585604e-03],
       [  7.30000000e+02,   2.47920537e-03],
       [  7.40000000e+02,   2.35551619e-03],
       [  7.50000000e+02,   1.82922091e-03],
       [  7.60000000e+02,   2.97129876e-03],
       [  7.70000000e+02,   2.08460935e-03],
       [  7.80000000e+02,   2.44995812e-03],
       [  7.90000000e+02,   2.45113228e-03],
       [  8.00000000e+02,   2.22971430e-03],
       [  8.10000000e+02,   2.35671434e-03],
       [  8.20000000e+02,   2.92396406e-03],
       [  8.30000000e+02,   2.27630418e-03],
       [  8.40000000e+02,   1.89890829e-03],
       [  8.50000000e+02,   2.35408917e-03],
       [  8.60000000e+02,   2.32181675e-03],
       [  8.70000000e+02,   2.49334355e-03],
       [  8.80000000e+02,   2.55632424e-03],
       [  8.90000000e+02,   2.47281836e-03],
       [  9.00000000e+02,   2.32568197e-03],
       [  9.10000000e+02,   2.11712252e-03],
       [  9.20000000e+02,   1.62985909e-03],
       [  9.30000000e+02,   2.08126637e-03],
       [  9.40000000e+02,   2.26301420e-03],
       [  9.50000000e+02,   1.85970764e-03],
       [  9.60000000e+02,   2.32217391e-03],
       [  9.70000000e+02,   1.64019060e-03],
       [  9.80000000e+02,   2.02045729e-03],
       [  9.90000000e+02,   2.10024999e-03],
       [  1.00000000e+03,   1.65149057e-03],
       [  1.01000000e+03,   1.91835407e-03],
       [  1.02000000e+03,   1.62408198e-03],
       [  1.03000000e+03,   1.51674706e-03],
       [  1.04000000e+03,   1.79473171e-03],
       [  1.05000000e+03,   1.49998779e-03],
       [  1.06000000e+03,   1.65460748e-03],
       [  1.07000000e+03,   2.03388790e-03],
       [  1.08000000e+03,   1.93317619e-03],
       [  1.09000000e+03,   1.90710102e-03],
       [  1.10000000e+03,   2.19604676e-03],
       [  1.11000000e+03,   1.50470575e-03],
       [  1.12000000e+03,   1.95620069e-03],
       [  1.13000000e+03,   1.86586753e-03],
       [  1.14000000e+03,   1.35002483e-03],
       [  1.15000000e+03,   2.37491005e-03],
       [  1.16000000e+03,   1.65425008e-03],
       [  1.17000000e+03,   1.85328012e-03],
       [  1.18000000e+03,   1.94051175e-03],
       [  1.19000000e+03,   1.38949859e-03],
       [  1.20000000e+03,   1.41539495e-03],
       [  1.21000000e+03,   1.45017623e-03],
       [  1.22000000e+03,   1.49397587e-03],
       [  1.23000000e+03,   1.94837095e-03],
       [  1.24000000e+03,   1.45024946e-03],
       [  1.25000000e+03,   1.77673053e-03],
       [  1.26000000e+03,   1.58400787e-03],
       [  1.27000000e+03,   1.47867377e-03],
       [  1.28000000e+03,   1.80332037e-03],
       [  1.29000000e+03,   1.38736365e-03],
       [  1.30000000e+03,   1.22042489e-03],
       [  1.31000000e+03,   1.28827943e-03],
       [  1.32000000e+03,   1.31311174e-03],
       [  1.33000000e+03,   1.26408192e-03],
       [  1.34000000e+03,   1.21319108e-03],
       [  1.35000000e+03,   1.52061880e-03],
       [  1.36000000e+03,   1.18865701e-03],
       [  1.37000000e+03,   1.38971244e-03],
       [  1.38000000e+03,   1.33357779e-03],
       [  1.39000000e+03,   1.29566342e-03],
       [  1.40000000e+03,   1.62974955e-03],
       [  1.41000000e+03,   1.22151722e-03],
       [  1.42000000e+03,   1.22268195e-03],
       [  1.43000000e+03,   1.17790559e-03],
       [  1.44000000e+03,   1.24149595e-03],
       [  1.45000000e+03,   1.44640659e-03],
       [  1.46000000e+03,   1.02989399e-03],
       [  1.47000000e+03,   1.11122790e-03],
       [  1.48000000e+03,   1.23555411e-03],
       [  1.49000000e+03,   1.15039677e-03],
       [  1.50000000e+03,   1.23518868e-03],
       [  1.51000000e+03,   1.23302802e-03],
       [  1.52000000e+03,   1.18085451e-03],
       [  1.53000000e+03,   1.47570984e-03],
       [  1.54000000e+03,   1.17384631e-03],
       [  1.55000000e+03,   9.56637203e-04],
       [  1.56000000e+03,   1.18612649e-03],
       [  1.57000000e+03,   1.24448701e-03],
       [  1.58000000e+03,   1.10593776e-03],
       [  1.59000000e+03,   9.95729235e-04],
       [  1.60000000e+03,   9.26991925e-04],
       [  1.61000000e+03,   1.38370320e-03],
       [  1.62000000e+03,   1.25816732e-03],
       [  1.63000000e+03,   1.02042872e-03],
       [  1.64000000e+03,   1.48986350e-03],
       [  1.65000000e+03,   8.83683388e-04],
       [  1.66000000e+03,   1.18476793e-03],
       [  1.67000000e+03,   9.61380953e-04],
       [  1.68000000e+03,   8.78513965e-04],
       [  1.69000000e+03,   9.38194746e-04],
       [  1.70000000e+03,   1.34150521e-03],
       [  1.71000000e+03,   8.10550759e-04],
       [  1.72000000e+03,   8.92015931e-04],
       [  1.73000000e+03,   1.16464566e-03],
       [  1.74000000e+03,   1.04963453e-03],
       [  1.75000000e+03,   1.08016958e-03],
       [  1.76000000e+03,   1.04651263e-03],
       [  1.77000000e+03,   1.02604879e-03],
       [  1.78000000e+03,   1.29214209e-03],
       [  1.79000000e+03,   8.28185061e-04],
       [  1.80000000e+03,   1.04267569e-03],
       [  1.81000000e+03,   6.78852724e-04],
       [  1.82000000e+03,   9.04220506e-04],
       [  1.83000000e+03,   9.64932318e-04],
       [  1.84000000e+03,   1.08271965e-03],
       [  1.85000000e+03,   9.79476026e-04],
       [  1.86000000e+03,   1.05643214e-03],
       [  1.87000000e+03,   1.19211408e-03],
       [  1.88000000e+03,   1.05968537e-03],
       [  1.89000000e+03,   1.04642462e-03],
       [  1.90000000e+03,   1.12252939e-03],
       [  1.91000000e+03,   8.39525252e-04],
       [  1.92000000e+03,   6.82258105e-04],
       [  1.93000000e+03,   1.07447652e-03],
       [  1.94000000e+03,   8.96543497e-04],
       [  1.95000000e+03,   9.66358872e-04],
       [  1.96000000e+03,   7.78164132e-04],
       [  1.97000000e+03,   9.23915417e-04],
       [  1.98000000e+03,   1.03913981e-03],
       [  1.99000000e+03,   6.95308961e-04],
       [  2.00000000e+03,   8.06271215e-04]])

In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])


Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x11083e940>

In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")


Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x110813780>

In [ ]: