In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline

In [2]:
with open("param.yaml", "r") as file:
    param = yaml.load(file.read())
param


Out[2]:
{'forget_bias': 1.0,
 'learning_rate': 0.1,
 'length_of_sequences': 50,
 'num_of_hidden_nodes': 4,
 'num_of_input_nodes': 1,
 'num_of_output_nodes': 1,
 'num_of_prediction_epochs': 100,
 'num_of_training_epochs': 2000,
 'optimizer': 'GradientDescentOptimizer',
 'seed': 0,
 'size_of_mini_batch': 100,
 'train_data_path': '../train_data/normal.npy'}

In [3]:
train = np.load(param["train_data_path"])
train


Out[3]:
array([[  0.00000000e+00,   1.25333234e-01],
       [  1.25333234e-01,   2.48689887e-01],
       [  2.48689887e-01,   3.68124553e-01],
       ..., 
       [ -3.68124553e-01,  -2.48689887e-01],
       [ -2.48689887e-01,  -1.25333234e-01],
       [ -1.25333234e-01,   3.92877345e-15]])

In [4]:
initial = np.load("initial.npy")
initial


Out[4]:
array([  0.00000000e+00,   1.25333234e-01,   2.48689887e-01,
         3.68124553e-01,   4.81753674e-01,   5.87785252e-01,
         6.84547106e-01,   7.70513243e-01,   8.44327926e-01,
         9.04827052e-01,   9.51056516e-01,   9.82287251e-01,
         9.98026728e-01,   9.98026728e-01,   9.82287251e-01,
         9.51056516e-01,   9.04827052e-01,   8.44327926e-01,
         7.70513243e-01,   6.84547106e-01,   5.87785252e-01,
         4.81753674e-01,   3.68124553e-01,   2.48689887e-01,
         1.25333234e-01,  -3.21624530e-16,  -1.25333234e-01,
        -2.48689887e-01,  -3.68124553e-01,  -4.81753674e-01,
        -5.87785252e-01,  -6.84547106e-01,  -7.70513243e-01,
        -8.44327926e-01,  -9.04827052e-01,  -9.51056516e-01,
        -9.82287251e-01,  -9.98026728e-01,  -9.98026728e-01,
        -9.82287251e-01,  -9.51056516e-01,  -9.04827052e-01,
        -8.44327926e-01,  -7.70513243e-01,  -6.84547106e-01,
        -5.87785252e-01,  -4.81753674e-01,  -3.68124553e-01,
        -2.48689887e-01,  -1.25333234e-01])

In [5]:
output = np.load("output.npy")
output


Out[5]:
array([ 0.01300612,  0.14269289,  0.26389512,  0.37760606,  0.48387495,
        0.58184886,  0.67041779,  0.75024784,  0.82346511,  0.89024734,
        0.9461937 ,  0.98375189,  0.99708796,  0.98564827,  0.95306802,
        0.90397108,  0.84202039,  0.76954663,  0.68783748,  0.59751284,
        0.49883077,  0.39193788,  0.27712443,  0.15513429,  0.02750632,
       -0.10319978, -0.23350781, -0.3595621 , -0.47790885, -0.58610457,
       -0.6828301 , -0.76750791, -0.83972347, -0.89882112, -0.94390404,
       -0.97415447, -0.98915982, -0.98898387, -0.97400367, -0.94469202,
       -0.90149498, -0.84484041, -0.77521652, -0.69321048, -0.59943056,
       -0.49441075, -0.37882268, -0.25418237, -0.12363175,  0.00814852,
        0.13636568,  0.25796035,  0.37210748,  0.47883782,  0.57736886,
        0.6665405 ,  0.74687517,  0.82047546,  0.88770187,  0.94438791,
        0.98303926,  0.99763048,  0.98735249,  0.95571828,  0.90736568,
        0.84602177,  0.77408195,  0.69287813,  0.60304952,  0.50485206,
        0.39841565,  0.28401664,  0.16239333,  0.03507507, -0.09541272,
       -0.22565687, -0.35185981, -0.47057483, -0.57930684, -0.67666531,
       -0.76202875, -0.83497894, -0.89487743, -0.94083214, -0.97200429,
       -0.98794317, -0.98867869, -0.97456777, -0.94607675, -0.90365124,
       -0.84771967, -0.77876961, -0.69739103, -0.60420144, -0.49974364,
       -0.38467273, -0.26044047, -0.13009486,  0.00174007,  0.13022906])

In [6]:
losses = np.load("losses.npy")
losses


Out[6]:
array([[  1.00000000e+01,   5.21178484e-01],
       [  2.00000000e+01,   4.75007892e-01],
       [  3.00000000e+01,   3.98932010e-01],
       [  4.00000000e+01,   2.22336680e-01],
       [  5.00000000e+01,   7.35786483e-02],
       [  6.00000000e+01,   2.55758259e-02],
       [  7.00000000e+01,   9.14295018e-03],
       [  8.00000000e+01,   5.43729728e-03],
       [  9.00000000e+01,   3.29838810e-03],
       [  1.00000000e+02,   4.13168827e-03],
       [  1.10000000e+02,   3.82932182e-03],
       [  1.20000000e+02,   2.85577145e-03],
       [  1.30000000e+02,   3.04991123e-03],
       [  1.40000000e+02,   2.32677022e-03],
       [  1.50000000e+02,   1.98322325e-03],
       [  1.60000000e+02,   1.79492251e-03],
       [  1.70000000e+02,   1.74251711e-03],
       [  1.80000000e+02,   1.63180474e-03],
       [  1.90000000e+02,   1.45728095e-03],
       [  2.00000000e+02,   1.35194277e-03],
       [  2.10000000e+02,   1.26131997e-03],
       [  2.20000000e+02,   1.07836840e-03],
       [  2.30000000e+02,   1.05796137e-03],
       [  2.40000000e+02,   1.26661954e-03],
       [  2.50000000e+02,   1.13173237e-03],
       [  2.60000000e+02,   9.50232730e-04],
       [  2.70000000e+02,   9.41024278e-04],
       [  2.80000000e+02,   9.98848933e-04],
       [  2.90000000e+02,   8.94334749e-04],
       [  3.00000000e+02,   1.00235734e-03],
       [  3.10000000e+02,   9.00762621e-04],
       [  3.20000000e+02,   8.89859570e-04],
       [  3.30000000e+02,   8.92452605e-04],
       [  3.40000000e+02,   8.64734116e-04],
       [  3.50000000e+02,   9.39875317e-04],
       [  3.60000000e+02,   7.34102039e-04],
       [  3.70000000e+02,   7.66599609e-04],
       [  3.80000000e+02,   7.70658022e-04],
       [  3.90000000e+02,   7.36138318e-04],
       [  4.00000000e+02,   6.59249723e-04],
       [  4.10000000e+02,   7.43905432e-04],
       [  4.20000000e+02,   6.92824076e-04],
       [  4.30000000e+02,   6.97754615e-04],
       [  4.40000000e+02,   5.67031559e-04],
       [  4.50000000e+02,   6.25470479e-04],
       [  4.60000000e+02,   6.98966091e-04],
       [  4.70000000e+02,   5.81756816e-04],
       [  4.80000000e+02,   5.75503800e-04],
       [  4.90000000e+02,   5.76578605e-04],
       [  5.00000000e+02,   5.58569154e-04],
       [  5.10000000e+02,   6.11357216e-04],
       [  5.20000000e+02,   5.87052200e-04],
       [  5.30000000e+02,   7.05686223e-04],
       [  5.40000000e+02,   4.98343259e-04],
       [  5.50000000e+02,   6.23781467e-04],
       [  5.60000000e+02,   6.01176871e-04],
       [  5.70000000e+02,   5.83032612e-04],
       [  5.80000000e+02,   4.56418406e-04],
       [  5.90000000e+02,   4.56010108e-04],
       [  6.00000000e+02,   5.72916178e-04],
       [  6.10000000e+02,   5.82930050e-04],
       [  6.20000000e+02,   4.79573559e-04],
       [  6.30000000e+02,   4.72368760e-04],
       [  6.40000000e+02,   4.88367747e-04],
       [  6.50000000e+02,   4.77097143e-04],
       [  6.60000000e+02,   4.92955500e-04],
       [  6.70000000e+02,   4.32889385e-04],
       [  6.80000000e+02,   5.05397387e-04],
       [  6.90000000e+02,   4.79699374e-04],
       [  7.00000000e+02,   4.44976467e-04],
       [  7.10000000e+02,   4.49094834e-04],
       [  7.20000000e+02,   3.94276489e-04],
       [  7.30000000e+02,   4.33465553e-04],
       [  7.40000000e+02,   4.72839019e-04],
       [  7.50000000e+02,   3.55802855e-04],
       [  7.60000000e+02,   3.71601403e-04],
       [  7.70000000e+02,   3.68775945e-04],
       [  7.80000000e+02,   4.01629222e-04],
       [  7.90000000e+02,   3.92810878e-04],
       [  8.00000000e+02,   3.71220871e-04],
       [  8.10000000e+02,   3.33477830e-04],
       [  8.20000000e+02,   3.78963246e-04],
       [  8.30000000e+02,   3.66944732e-04],
       [  8.40000000e+02,   3.64511681e-04],
       [  8.50000000e+02,   3.22837033e-04],
       [  8.60000000e+02,   3.22846317e-04],
       [  8.70000000e+02,   3.26669367e-04],
       [  8.80000000e+02,   2.98270024e-04],
       [  8.90000000e+02,   2.81335902e-04],
       [  9.00000000e+02,   3.05217109e-04],
       [  9.10000000e+02,   3.16710328e-04],
       [  9.20000000e+02,   2.59509456e-04],
       [  9.30000000e+02,   3.01027583e-04],
       [  9.40000000e+02,   2.76188977e-04],
       [  9.50000000e+02,   3.04881745e-04],
       [  9.60000000e+02,   2.96746672e-04],
       [  9.70000000e+02,   3.12470715e-04],
       [  9.80000000e+02,   3.13777564e-04],
       [  9.90000000e+02,   3.01588821e-04],
       [  1.00000000e+03,   2.99870735e-04],
       [  1.01000000e+03,   2.94293364e-04],
       [  1.02000000e+03,   2.27787386e-04],
       [  1.03000000e+03,   2.91040662e-04],
       [  1.04000000e+03,   2.60494155e-04],
       [  1.05000000e+03,   2.49118282e-04],
       [  1.06000000e+03,   2.50990590e-04],
       [  1.07000000e+03,   2.48408760e-04],
       [  1.08000000e+03,   2.72440113e-04],
       [  1.09000000e+03,   2.45650270e-04],
       [  1.10000000e+03,   2.13227715e-04],
       [  1.11000000e+03,   2.55250401e-04],
       [  1.12000000e+03,   2.57362844e-04],
       [  1.13000000e+03,   2.36815438e-04],
       [  1.14000000e+03,   2.03450472e-04],
       [  1.15000000e+03,   2.41840738e-04],
       [  1.16000000e+03,   2.07096949e-04],
       [  1.17000000e+03,   1.62793192e-04],
       [  1.18000000e+03,   2.17324909e-04],
       [  1.19000000e+03,   1.86958467e-04],
       [  1.20000000e+03,   2.20756498e-04],
       [  1.21000000e+03,   2.00985931e-04],
       [  1.22000000e+03,   1.82007178e-04],
       [  1.23000000e+03,   2.24956893e-04],
       [  1.24000000e+03,   1.78102200e-04],
       [  1.25000000e+03,   2.10040365e-04],
       [  1.26000000e+03,   1.57751536e-04],
       [  1.27000000e+03,   1.79280192e-04],
       [  1.28000000e+03,   1.73886860e-04],
       [  1.29000000e+03,   1.40078118e-04],
       [  1.30000000e+03,   1.82124655e-04],
       [  1.31000000e+03,   1.81237439e-04],
       [  1.32000000e+03,   1.55974863e-04],
       [  1.33000000e+03,   1.97605375e-04],
       [  1.34000000e+03,   1.61644406e-04],
       [  1.35000000e+03,   1.54506488e-04],
       [  1.36000000e+03,   1.65280231e-04],
       [  1.37000000e+03,   1.67739636e-04],
       [  1.38000000e+03,   1.65263933e-04],
       [  1.39000000e+03,   1.52696695e-04],
       [  1.40000000e+03,   1.77238180e-04],
       [  1.41000000e+03,   1.20942816e-04],
       [  1.42000000e+03,   1.54588182e-04],
       [  1.43000000e+03,   1.55652480e-04],
       [  1.44000000e+03,   1.55368267e-04],
       [  1.45000000e+03,   1.49227693e-04],
       [  1.46000000e+03,   1.54048888e-04],
       [  1.47000000e+03,   1.41902186e-04],
       [  1.48000000e+03,   1.54228750e-04],
       [  1.49000000e+03,   1.38037518e-04],
       [  1.50000000e+03,   1.41547687e-04],
       [  1.51000000e+03,   1.53015630e-04],
       [  1.52000000e+03,   1.16370182e-04],
       [  1.53000000e+03,   1.29287626e-04],
       [  1.54000000e+03,   1.20198391e-04],
       [  1.55000000e+03,   1.21532452e-04],
       [  1.56000000e+03,   1.13088849e-04],
       [  1.57000000e+03,   1.25112871e-04],
       [  1.58000000e+03,   1.28669810e-04],
       [  1.59000000e+03,   1.34395377e-04],
       [  1.60000000e+03,   9.68147797e-05],
       [  1.61000000e+03,   1.11691559e-04],
       [  1.62000000e+03,   1.37827534e-04],
       [  1.63000000e+03,   9.69201137e-05],
       [  1.64000000e+03,   1.10213979e-04],
       [  1.65000000e+03,   1.37693190e-04],
       [  1.66000000e+03,   1.39818949e-04],
       [  1.67000000e+03,   9.15682904e-05],
       [  1.68000000e+03,   8.86160051e-05],
       [  1.69000000e+03,   1.03046907e-04],
       [  1.70000000e+03,   1.19423174e-04],
       [  1.71000000e+03,   9.35245989e-05],
       [  1.72000000e+03,   1.00304162e-04],
       [  1.73000000e+03,   2.12155021e-04],
       [  1.74000000e+03,   9.96402741e-05],
       [  1.75000000e+03,   1.09124296e-04],
       [  1.76000000e+03,   8.63652240e-05],
       [  1.77000000e+03,   8.32184014e-05],
       [  1.78000000e+03,   9.59588506e-05],
       [  1.79000000e+03,   9.59727258e-05],
       [  1.80000000e+03,   1.10213266e-04],
       [  1.81000000e+03,   9.14018528e-05],
       [  1.82000000e+03,   8.82496679e-05],
       [  1.83000000e+03,   9.17659854e-05],
       [  1.84000000e+03,   7.30974352e-05],
       [  1.85000000e+03,   1.06375293e-04],
       [  1.86000000e+03,   9.89717155e-05],
       [  1.87000000e+03,   7.51242478e-05],
       [  1.88000000e+03,   8.52404555e-05],
       [  1.89000000e+03,   8.63234527e-05],
       [  1.90000000e+03,   7.76623347e-05],
       [  1.91000000e+03,   7.47267404e-05],
       [  1.92000000e+03,   1.04812512e-04],
       [  1.93000000e+03,   8.30928329e-05],
       [  1.94000000e+03,   1.11087225e-04],
       [  1.95000000e+03,   1.01333280e-04],
       [  1.96000000e+03,   7.37966620e-05],
       [  1.97000000e+03,   6.89997614e-05],
       [  1.98000000e+03,   9.19437589e-05],
       [  1.99000000e+03,   6.95571216e-05],
       [  2.00000000e+03,   6.19084894e-05]])

In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])


Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x109ea3898>

In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")


Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x109e76940>

In [ ]: