In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline

In [2]:
with open("param.yaml", "r") as file:
    param = yaml.load(file.read())
param


Out[2]:
{'forget_bias': 1.0,
 'learning_rate': 0.1,
 'length_of_sequences': 60,
 'num_of_hidden_nodes': 2,
 'num_of_input_nodes': 1,
 'num_of_output_nodes': 1,
 'num_of_prediction_epochs': 100,
 'num_of_training_epochs': 2000,
 'optimizer': 'GradientDescentOptimizer',
 'seed': 0,
 'size_of_mini_batch': 100,
 'train_data_path': '../train_data/normal.npy'}

In [3]:
train = np.load(param["train_data_path"])
train


Out[3]:
array([[  0.00000000e+00,   1.25333234e-01],
       [  1.25333234e-01,   2.48689887e-01],
       [  2.48689887e-01,   3.68124553e-01],
       ..., 
       [ -3.68124553e-01,  -2.48689887e-01],
       [ -2.48689887e-01,  -1.25333234e-01],
       [ -1.25333234e-01,   3.92877345e-15]])

In [4]:
initial = np.load("initial.npy")
initial


Out[4]:
array([  0.00000000e+00,   1.25333234e-01,   2.48689887e-01,
         3.68124553e-01,   4.81753674e-01,   5.87785252e-01,
         6.84547106e-01,   7.70513243e-01,   8.44327926e-01,
         9.04827052e-01,   9.51056516e-01,   9.82287251e-01,
         9.98026728e-01,   9.98026728e-01,   9.82287251e-01,
         9.51056516e-01,   9.04827052e-01,   8.44327926e-01,
         7.70513243e-01,   6.84547106e-01,   5.87785252e-01,
         4.81753674e-01,   3.68124553e-01,   2.48689887e-01,
         1.25333234e-01,  -3.21624530e-16,  -1.25333234e-01,
        -2.48689887e-01,  -3.68124553e-01,  -4.81753674e-01,
        -5.87785252e-01,  -6.84547106e-01,  -7.70513243e-01,
        -8.44327926e-01,  -9.04827052e-01,  -9.51056516e-01,
        -9.82287251e-01,  -9.98026728e-01,  -9.98026728e-01,
        -9.82287251e-01,  -9.51056516e-01,  -9.04827052e-01,
        -8.44327926e-01,  -7.70513243e-01,  -6.84547106e-01,
        -5.87785252e-01,  -4.81753674e-01,  -3.68124553e-01,
        -2.48689887e-01,  -1.25333234e-01,   6.43249060e-16,
         1.25333234e-01,   2.48689887e-01,   3.68124553e-01,
         4.81753674e-01,   5.87785252e-01,   6.84547106e-01,
         7.70513243e-01,   8.44327926e-01,   9.04827052e-01])

In [5]:
output = np.load("output.npy")
output


Out[5]:
array([ 0.97054219,  0.99542677,  0.99664259,  0.97980571,  0.94903433,
        0.90724421,  0.85622942,  0.79683995,  0.72917295,  0.65274036,
        0.56663024,  0.46971533,  0.360993  ,  0.24014762,  0.1083529 ,
       -0.0309262 , -0.17179006, -0.3071579 , -0.43118346, -0.5411582 ,
       -0.63743806, -0.72183192, -0.79586411, -0.85972786, -0.9121083 ,
       -0.95087218, -0.97426164, -0.98175704, -0.974038  , -0.95231724,
       -0.91766751, -0.87065327, -0.8112452 , -0.73892903, -0.65298265,
       -0.55295956, -0.43940848, -0.3146871 , -0.18339175, -0.05171674,
        0.07443893,  0.19164547,  0.29994646,  0.40220353,  0.50240505,
        0.60358119,  0.70556152,  0.80333221,  0.88798857,  0.95103145,
        0.98856533,  1.00176418,  0.99455345,  0.97127593,  0.93548083,
        0.88959348,  0.83499062,  0.7721864 ,  0.70101511,  0.6207912 ,
        0.53047967,  0.42894408,  0.31536409,  0.18989888,  0.05454114,
       -0.08623961, -0.22583987, -0.35735172, -0.47602576, -0.58048177,
       -0.67187834, -0.75209439, -0.82221299, -0.88180852, -0.92912066,
       -0.96201837, -0.97917163, -0.98058474, -0.96723819, -0.94035602,
       -0.90083849, -0.84901559, -0.78464675, -0.707093  , -0.61566848,
       -0.51021785, -0.39190477, -0.26395342, -0.13173604, -0.00160569,
        0.12127978,  0.23482224,  0.34031877,  0.44130209,  0.54164779,
        0.64340353,  0.74461508,  0.8384285 ,  0.91547823,  0.96872914])

In [6]:
losses = np.load("losses.npy")
losses


Out[6]:
array([[  1.00000000e+01,   5.06403089e-01],
       [  2.00000000e+01,   4.65100288e-01],
       [  3.00000000e+01,   4.53504324e-01],
       [  4.00000000e+01,   4.30691004e-01],
       [  5.00000000e+01,   4.28720653e-01],
       [  6.00000000e+01,   2.52964824e-01],
       [  7.00000000e+01,   1.28045768e-01],
       [  8.00000000e+01,   9.34872925e-02],
       [  9.00000000e+01,   6.41007647e-02],
       [  1.00000000e+02,   5.14015183e-02],
       [  1.10000000e+02,   4.26473096e-02],
       [  1.20000000e+02,   3.38089503e-02],
       [  1.30000000e+02,   2.28780098e-02],
       [  1.40000000e+02,   1.59108117e-02],
       [  1.50000000e+02,   1.17567368e-02],
       [  1.60000000e+02,   7.27781793e-03],
       [  1.70000000e+02,   5.79333678e-03],
       [  1.80000000e+02,   4.57283249e-03],
       [  1.90000000e+02,   4.75384993e-03],
       [  2.00000000e+02,   3.71356611e-03],
       [  2.10000000e+02,   3.90847493e-03],
       [  2.20000000e+02,   2.94273742e-03],
       [  2.30000000e+02,   3.27187241e-03],
       [  2.40000000e+02,   3.23887938e-03],
       [  2.50000000e+02,   2.81045493e-03],
       [  2.60000000e+02,   2.46776524e-03],
       [  2.70000000e+02,   2.72319233e-03],
       [  2.80000000e+02,   2.48362566e-03],
       [  2.90000000e+02,   2.16595759e-03],
       [  3.00000000e+02,   2.25789077e-03],
       [  3.10000000e+02,   2.25824770e-03],
       [  3.20000000e+02,   2.17676419e-03],
       [  3.30000000e+02,   1.99272204e-03],
       [  3.40000000e+02,   1.86030532e-03],
       [  3.50000000e+02,   1.56256172e-03],
       [  3.60000000e+02,   1.55303883e-03],
       [  3.70000000e+02,   1.69490988e-03],
       [  3.80000000e+02,   1.58757810e-03],
       [  3.90000000e+02,   1.67618145e-03],
       [  4.00000000e+02,   1.47449540e-03],
       [  4.10000000e+02,   1.27437117e-03],
       [  4.20000000e+02,   1.17170962e-03],
       [  4.30000000e+02,   1.23217446e-03],
       [  4.40000000e+02,   1.24156335e-03],
       [  4.50000000e+02,   1.32552418e-03],
       [  4.60000000e+02,   1.12539809e-03],
       [  4.70000000e+02,   1.28757383e-03],
       [  4.80000000e+02,   1.25204073e-03],
       [  4.90000000e+02,   1.16147427e-03],
       [  5.00000000e+02,   9.60597245e-04],
       [  5.10000000e+02,   7.29211257e-04],
       [  5.20000000e+02,   1.06216129e-03],
       [  5.30000000e+02,   9.32767813e-04],
       [  5.40000000e+02,   9.26595356e-04],
       [  5.50000000e+02,   9.56396339e-04],
       [  5.60000000e+02,   7.88826321e-04],
       [  5.70000000e+02,   1.00343931e-03],
       [  5.80000000e+02,   1.00611302e-03],
       [  5.90000000e+02,   9.82254161e-04],
       [  6.00000000e+02,   6.86758547e-04],
       [  6.10000000e+02,   7.12899957e-04],
       [  6.20000000e+02,   7.27548904e-04],
       [  6.30000000e+02,   7.64576369e-04],
       [  6.40000000e+02,   7.58014037e-04],
       [  6.50000000e+02,   7.47552665e-04],
       [  6.60000000e+02,   8.14763305e-04],
       [  6.70000000e+02,   7.39449926e-04],
       [  6.80000000e+02,   8.02603085e-04],
       [  6.90000000e+02,   7.19150470e-04],
       [  7.00000000e+02,   7.02608202e-04],
       [  7.10000000e+02,   6.08150032e-04],
       [  7.20000000e+02,   7.03473983e-04],
       [  7.30000000e+02,   7.08433217e-04],
       [  7.40000000e+02,   7.24861457e-04],
       [  7.50000000e+02,   5.37842978e-04],
       [  7.60000000e+02,   6.41418563e-04],
       [  7.70000000e+02,   5.75507409e-04],
       [  7.80000000e+02,   6.03403838e-04],
       [  7.90000000e+02,   6.54002884e-04],
       [  8.00000000e+02,   5.54654165e-04],
       [  8.10000000e+02,   4.99101589e-04],
       [  8.20000000e+02,   6.56683056e-04],
       [  8.30000000e+02,   5.56540850e-04],
       [  8.40000000e+02,   6.25622924e-04],
       [  8.50000000e+02,   4.45001177e-04],
       [  8.60000000e+02,   5.40971174e-04],
       [  8.70000000e+02,   5.67872601e-04],
       [  8.80000000e+02,   4.75532404e-04],
       [  8.90000000e+02,   4.92830703e-04],
       [  9.00000000e+02,   5.90068870e-04],
       [  9.10000000e+02,   5.11613616e-04],
       [  9.20000000e+02,   4.64697456e-04],
       [  9.30000000e+02,   4.30622429e-04],
       [  9.40000000e+02,   5.59505192e-04],
       [  9.50000000e+02,   5.31189842e-04],
       [  9.60000000e+02,   5.05464501e-04],
       [  9.70000000e+02,   3.56924895e-04],
       [  9.80000000e+02,   4.99868009e-04],
       [  9.90000000e+02,   3.87771142e-04],
       [  1.00000000e+03,   4.41522920e-04],
       [  1.01000000e+03,   4.31795575e-04],
       [  1.02000000e+03,   5.34437015e-04],
       [  1.03000000e+03,   4.28901461e-04],
       [  1.04000000e+03,   3.87969078e-04],
       [  1.05000000e+03,   3.35424847e-04],
       [  1.06000000e+03,   4.73805732e-04],
       [  1.07000000e+03,   4.10100678e-04],
       [  1.08000000e+03,   3.57456418e-04],
       [  1.09000000e+03,   3.90132511e-04],
       [  1.10000000e+03,   4.39921481e-04],
       [  1.11000000e+03,   3.61111946e-04],
       [  1.12000000e+03,   3.77521355e-04],
       [  1.13000000e+03,   4.22277371e-04],
       [  1.14000000e+03,   4.74101602e-04],
       [  1.15000000e+03,   3.80392885e-04],
       [  1.16000000e+03,   3.88564164e-04],
       [  1.17000000e+03,   3.65295477e-04],
       [  1.18000000e+03,   3.40796338e-04],
       [  1.19000000e+03,   3.56410659e-04],
       [  1.20000000e+03,   4.02213162e-04],
       [  1.21000000e+03,   3.43823718e-04],
       [  1.22000000e+03,   3.85664112e-04],
       [  1.23000000e+03,   3.88184446e-04],
       [  1.24000000e+03,   4.04180726e-04],
       [  1.25000000e+03,   3.86747182e-04],
       [  1.26000000e+03,   3.41747858e-04],
       [  1.27000000e+03,   3.48885718e-04],
       [  1.28000000e+03,   3.13018973e-04],
       [  1.29000000e+03,   3.07582028e-04],
       [  1.30000000e+03,   3.73672869e-04],
       [  1.31000000e+03,   3.87388834e-04],
       [  1.32000000e+03,   4.19116666e-04],
       [  1.33000000e+03,   3.27760878e-04],
       [  1.34000000e+03,   3.08957970e-04],
       [  1.35000000e+03,   3.30988230e-04],
       [  1.36000000e+03,   3.46238434e-04],
       [  1.37000000e+03,   3.44896660e-04],
       [  1.38000000e+03,   3.70746973e-04],
       [  1.39000000e+03,   3.50284274e-04],
       [  1.40000000e+03,   3.14876030e-04],
       [  1.41000000e+03,   3.73970484e-04],
       [  1.42000000e+03,   3.45496956e-04],
       [  1.43000000e+03,   3.40876257e-04],
       [  1.44000000e+03,   3.22644337e-04],
       [  1.45000000e+03,   3.35026300e-04],
       [  1.46000000e+03,   2.88841547e-04],
       [  1.47000000e+03,   3.12957622e-04],
       [  1.48000000e+03,   3.08143673e-04],
       [  1.49000000e+03,   2.95434991e-04],
       [  1.50000000e+03,   2.98283383e-04],
       [  1.51000000e+03,   3.24022199e-04],
       [  1.52000000e+03,   2.56367523e-04],
       [  1.53000000e+03,   2.77088810e-04],
       [  1.54000000e+03,   3.02627857e-04],
       [  1.55000000e+03,   2.72747478e-04],
       [  1.56000000e+03,   3.06661386e-04],
       [  1.57000000e+03,   2.64829258e-04],
       [  1.58000000e+03,   2.76857172e-04],
       [  1.59000000e+03,   2.81696732e-04],
       [  1.60000000e+03,   3.00184474e-04],
       [  1.61000000e+03,   2.55607971e-04],
       [  1.62000000e+03,   2.76382896e-04],
       [  1.63000000e+03,   2.35493426e-04],
       [  1.64000000e+03,   2.48022145e-04],
       [  1.65000000e+03,   3.01513704e-04],
       [  1.66000000e+03,   2.66833609e-04],
       [  1.67000000e+03,   2.71970814e-04],
       [  1.68000000e+03,   2.68793898e-04],
       [  1.69000000e+03,   2.41466987e-04],
       [  1.70000000e+03,   2.77868385e-04],
       [  1.71000000e+03,   2.94675818e-04],
       [  1.72000000e+03,   2.45844160e-04],
       [  1.73000000e+03,   2.50226178e-04],
       [  1.74000000e+03,   2.61309848e-04],
       [  1.75000000e+03,   2.67283089e-04],
       [  1.76000000e+03,   2.87918781e-04],
       [  1.77000000e+03,   2.88327225e-04],
       [  1.78000000e+03,   2.44195980e-04],
       [  1.79000000e+03,   2.75243481e-04],
       [  1.80000000e+03,   2.20340080e-04],
       [  1.81000000e+03,   2.81056040e-04],
       [  1.82000000e+03,   2.27521130e-04],
       [  1.83000000e+03,   2.58146698e-04],
       [  1.84000000e+03,   2.18406873e-04],
       [  1.85000000e+03,   2.33469735e-04],
       [  1.86000000e+03,   2.25574753e-04],
       [  1.87000000e+03,   2.32736464e-04],
       [  1.88000000e+03,   2.42906332e-04],
       [  1.89000000e+03,   2.63917493e-04],
       [  1.90000000e+03,   2.74311577e-04],
       [  1.91000000e+03,   2.39641056e-04],
       [  1.92000000e+03,   1.75235909e-04],
       [  1.93000000e+03,   3.02739878e-04],
       [  1.94000000e+03,   2.05147531e-04],
       [  1.95000000e+03,   2.08231912e-04],
       [  1.96000000e+03,   2.31014754e-04],
       [  1.97000000e+03,   2.28317673e-04],
       [  1.98000000e+03,   2.48514290e-04],
       [  1.99000000e+03,   2.36065680e-04],
       [  2.00000000e+03,   2.02242474e-04]])

In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])


Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x112980940>

In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")


Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x1129525f8>

In [ ]: