In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline

In [2]:
with open("param.yaml", "r") as file:
    param = yaml.load(file.read())
param


Out[2]:
{'forget_bias': 1.0,
 'learning_rate': 0.1,
 'length_of_sequences': 50,
 'num_of_hidden_nodes': 2,
 'num_of_input_nodes': 1,
 'num_of_output_nodes': 1,
 'num_of_prediction_epochs': 100,
 'num_of_training_epochs': 2000,
 'optimizer': 'GradientDescentOptimizer',
 'seed': 0,
 'size_of_mini_batch': 100,
 'train_data_path': '../train_data/normal.npy'}

In [3]:
train = np.load(param["train_data_path"])
train


Out[3]:
array([[  0.00000000e+00,   1.25333234e-01],
       [  1.25333234e-01,   2.48689887e-01],
       [  2.48689887e-01,   3.68124553e-01],
       ..., 
       [ -3.68124553e-01,  -2.48689887e-01],
       [ -2.48689887e-01,  -1.25333234e-01],
       [ -1.25333234e-01,   3.92877345e-15]])

In [4]:
initial = np.load("initial.npy")
initial


Out[4]:
array([  0.00000000e+00,   1.25333234e-01,   2.48689887e-01,
         3.68124553e-01,   4.81753674e-01,   5.87785252e-01,
         6.84547106e-01,   7.70513243e-01,   8.44327926e-01,
         9.04827052e-01,   9.51056516e-01,   9.82287251e-01,
         9.98026728e-01,   9.98026728e-01,   9.82287251e-01,
         9.51056516e-01,   9.04827052e-01,   8.44327926e-01,
         7.70513243e-01,   6.84547106e-01,   5.87785252e-01,
         4.81753674e-01,   3.68124553e-01,   2.48689887e-01,
         1.25333234e-01,  -3.21624530e-16,  -1.25333234e-01,
        -2.48689887e-01,  -3.68124553e-01,  -4.81753674e-01,
        -5.87785252e-01,  -6.84547106e-01,  -7.70513243e-01,
        -8.44327926e-01,  -9.04827052e-01,  -9.51056516e-01,
        -9.82287251e-01,  -9.98026728e-01,  -9.98026728e-01,
        -9.82287251e-01,  -9.51056516e-01,  -9.04827052e-01,
        -8.44327926e-01,  -7.70513243e-01,  -6.84547106e-01,
        -5.87785252e-01,  -4.81753674e-01,  -3.68124553e-01,
        -2.48689887e-01,  -1.25333234e-01])

In [5]:
output = np.load("output.npy")
output


Out[5]:
array([ 0.01133673,  0.15492176,  0.30271977,  0.44977117,  0.58921814,
        0.71324146,  0.81498379,  0.89058036,  0.93998843,  0.96619099,
        0.97360379,  0.96670812,  0.94931561,  0.92434651,  0.89387292,
        0.85925227,  0.82126731,  0.78023773,  0.7360968 ,  0.68843406,
        0.63650692,  0.57922614,  0.5151242 ,  0.44232476,  0.35855478,
        0.26129156,  0.14820226,  0.01810195, -0.12748618, -0.28270563,
       -0.43724248, -0.57912713, -0.69913578, -0.79326099, -0.86196303,
       -0.90803504, -0.9348954 , -0.94572079, -0.94314921, -0.92925185,
       -0.90560192, -0.87336379, -0.83337641, -0.78622371, -0.73229378,
       -0.67182904, -0.60497272, -0.53181446, -0.45243973, -0.36698425,
       -0.27569714, -0.17901251, -0.07762931,  0.02740432,  0.13461025,
        0.24202015,  0.34716648,  0.44717056,  0.53897142,  0.61969727,
        0.68710285,  0.73992068,  0.7779848 ,  0.80209059,  0.81367284,
        0.81444532,  0.80610102,  0.7901147 ,  0.76764125,  0.73947489,
        0.70604163,  0.66740257,  0.6232512 ,  0.57290059,  0.51525784,
        0.44879788,  0.3715657 ,  0.28127837,  0.17565687,  0.05318983,
       -0.08552061, -0.23610872, -0.3896966 , -0.53467548, -0.66076076,
       -0.76227063, -0.83834219, -0.8910951 , -0.92375076, -0.93952793,
       -0.94119412, -0.93096584, -0.91054904, -0.88122153, -0.84391582,
       -0.79929215, -0.74779934, -0.68972731, -0.62525392, -0.55449033])

In [6]:
losses = np.load("losses.npy")
losses


Out[6]:
array([[  1.00000000e+01,   5.20243287e-01],
       [  2.00000000e+01,   4.92595434e-01],
       [  3.00000000e+01,   5.01385510e-01],
       [  4.00000000e+01,   4.91353065e-01],
       [  5.00000000e+01,   3.59666169e-01],
       [  6.00000000e+01,   2.44753614e-01],
       [  7.00000000e+01,   1.19064234e-01],
       [  8.00000000e+01,   5.42962924e-02],
       [  9.00000000e+01,   4.87622656e-02],
       [  1.00000000e+02,   4.00352776e-02],
       [  1.10000000e+02,   3.68244089e-02],
       [  1.20000000e+02,   2.94172876e-02],
       [  1.30000000e+02,   2.73088589e-02],
       [  1.40000000e+02,   2.90743727e-02],
       [  1.50000000e+02,   2.83052269e-02],
       [  1.60000000e+02,   2.25810166e-02],
       [  1.70000000e+02,   2.19949819e-02],
       [  1.80000000e+02,   2.04222221e-02],
       [  1.90000000e+02,   2.01858040e-02],
       [  2.00000000e+02,   2.14530285e-02],
       [  2.10000000e+02,   1.73267536e-02],
       [  2.20000000e+02,   1.83755904e-02],
       [  2.30000000e+02,   1.42588047e-02],
       [  2.40000000e+02,   1.74222644e-02],
       [  2.50000000e+02,   1.74356494e-02],
       [  2.60000000e+02,   1.44396471e-02],
       [  2.70000000e+02,   1.47989579e-02],
       [  2.80000000e+02,   1.39479963e-02],
       [  2.90000000e+02,   1.21392598e-02],
       [  3.00000000e+02,   1.18540954e-02],
       [  3.10000000e+02,   1.16247963e-02],
       [  3.20000000e+02,   1.19099328e-02],
       [  3.30000000e+02,   1.01121906e-02],
       [  3.40000000e+02,   8.98480415e-03],
       [  3.50000000e+02,   9.68577527e-03],
       [  3.60000000e+02,   1.02559021e-02],
       [  3.70000000e+02,   8.60874821e-03],
       [  3.80000000e+02,   8.66494700e-03],
       [  3.90000000e+02,   8.37636366e-03],
       [  4.00000000e+02,   8.46992806e-03],
       [  4.10000000e+02,   7.97344558e-03],
       [  4.20000000e+02,   6.70812000e-03],
       [  4.30000000e+02,   7.18934648e-03],
       [  4.40000000e+02,   7.37704569e-03],
       [  4.50000000e+02,   8.22227262e-03],
       [  4.60000000e+02,   7.40485964e-03],
       [  4.70000000e+02,   6.93778135e-03],
       [  4.80000000e+02,   6.22616010e-03],
       [  4.90000000e+02,   6.15961570e-03],
       [  5.00000000e+02,   6.02331618e-03],
       [  5.10000000e+02,   5.54964179e-03],
       [  5.20000000e+02,   5.63544035e-03],
       [  5.30000000e+02,   4.91877971e-03],
       [  5.40000000e+02,   5.64366346e-03],
       [  5.50000000e+02,   4.83991206e-03],
       [  5.60000000e+02,   4.38862899e-03],
       [  5.70000000e+02,   4.65195766e-03],
       [  5.80000000e+02,   4.11392702e-03],
       [  5.90000000e+02,   4.01122635e-03],
       [  6.00000000e+02,   3.31371208e-03],
       [  6.10000000e+02,   3.61082563e-03],
       [  6.20000000e+02,   3.28705134e-03],
       [  6.30000000e+02,   3.47970007e-03],
       [  6.40000000e+02,   3.13083362e-03],
       [  6.50000000e+02,   3.13100684e-03],
       [  6.60000000e+02,   2.38889689e-03],
       [  6.70000000e+02,   2.98917526e-03],
       [  6.80000000e+02,   2.60403869e-03],
       [  6.90000000e+02,   2.70902715e-03],
       [  7.00000000e+02,   2.28757504e-03],
       [  7.10000000e+02,   2.25634803e-03],
       [  7.20000000e+02,   2.25065299e-03],
       [  7.30000000e+02,   2.11460376e-03],
       [  7.40000000e+02,   1.59069884e-03],
       [  7.50000000e+02,   1.63200335e-03],
       [  7.60000000e+02,   1.61291356e-03],
       [  7.70000000e+02,   1.50447991e-03],
       [  7.80000000e+02,   1.34348404e-03],
       [  7.90000000e+02,   1.36733591e-03],
       [  8.00000000e+02,   1.37010694e-03],
       [  8.10000000e+02,   1.22922892e-03],
       [  8.20000000e+02,   1.17933424e-03],
       [  8.30000000e+02,   9.60834092e-04],
       [  8.40000000e+02,   9.82707134e-04],
       [  8.50000000e+02,   8.96560552e-04],
       [  8.60000000e+02,   9.42526443e-04],
       [  8.70000000e+02,   7.84035423e-04],
       [  8.80000000e+02,   7.71636085e-04],
       [  8.90000000e+02,   7.97518471e-04],
       [  9.00000000e+02,   6.79667050e-04],
       [  9.10000000e+02,   6.36116834e-04],
       [  9.20000000e+02,   7.62892189e-04],
       [  9.30000000e+02,   5.74593199e-04],
       [  9.40000000e+02,   6.53824536e-04],
       [  9.50000000e+02,   6.03449880e-04],
       [  9.60000000e+02,   5.51773992e-04],
       [  9.70000000e+02,   4.80962626e-04],
       [  9.80000000e+02,   5.47808653e-04],
       [  9.90000000e+02,   5.13695704e-04],
       [  1.00000000e+03,   4.71097039e-04],
       [  1.01000000e+03,   3.81501304e-04],
       [  1.02000000e+03,   4.16569266e-04],
       [  1.03000000e+03,   4.05381987e-04],
       [  1.04000000e+03,   4.35689639e-04],
       [  1.05000000e+03,   4.13052243e-04],
       [  1.06000000e+03,   3.44905042e-04],
       [  1.07000000e+03,   4.46472841e-04],
       [  1.08000000e+03,   3.51521070e-04],
       [  1.09000000e+03,   3.93866911e-04],
       [  1.10000000e+03,   3.22893437e-04],
       [  1.11000000e+03,   3.54902731e-04],
       [  1.12000000e+03,   3.55403201e-04],
       [  1.13000000e+03,   3.47026507e-04],
       [  1.14000000e+03,   3.67815112e-04],
       [  1.15000000e+03,   3.39893362e-04],
       [  1.16000000e+03,   3.30579322e-04],
       [  1.17000000e+03,   3.87271954e-04],
       [  1.18000000e+03,   2.52889848e-04],
       [  1.19000000e+03,   2.95847130e-04],
       [  1.20000000e+03,   3.13612341e-04],
       [  1.21000000e+03,   2.73308950e-04],
       [  1.22000000e+03,   3.44546512e-04],
       [  1.23000000e+03,   3.56843200e-04],
       [  1.24000000e+03,   3.67936154e-04],
       [  1.25000000e+03,   3.56175093e-04],
       [  1.26000000e+03,   3.39981547e-04],
       [  1.27000000e+03,   3.40948842e-04],
       [  1.28000000e+03,   2.92601733e-04],
       [  1.29000000e+03,   3.26126523e-04],
       [  1.30000000e+03,   3.35231511e-04],
       [  1.31000000e+03,   2.94974714e-04],
       [  1.32000000e+03,   3.41645093e-04],
       [  1.33000000e+03,   2.22753661e-04],
       [  1.34000000e+03,   3.09064519e-04],
       [  1.35000000e+03,   2.30097095e-04],
       [  1.36000000e+03,   2.65095849e-04],
       [  1.37000000e+03,   2.57833599e-04],
       [  1.38000000e+03,   2.95214879e-04],
       [  1.39000000e+03,   2.85403134e-04],
       [  1.40000000e+03,   2.65569688e-04],
       [  1.41000000e+03,   2.64884147e-04],
       [  1.42000000e+03,   2.77443789e-04],
       [  1.43000000e+03,   2.89349933e-04],
       [  1.44000000e+03,   3.01830121e-04],
       [  1.45000000e+03,   2.98708241e-04],
       [  1.46000000e+03,   2.64214497e-04],
       [  1.47000000e+03,   2.71171913e-04],
       [  1.48000000e+03,   2.66151910e-04],
       [  1.49000000e+03,   2.90051859e-04],
       [  1.50000000e+03,   2.80119915e-04],
       [  1.51000000e+03,   2.69466807e-04],
       [  1.52000000e+03,   2.62129674e-04],
       [  1.53000000e+03,   2.44191993e-04],
       [  1.54000000e+03,   2.25353797e-04],
       [  1.55000000e+03,   2.71113822e-04],
       [  1.56000000e+03,   2.90811964e-04],
       [  1.57000000e+03,   2.80629407e-04],
       [  1.58000000e+03,   2.75318482e-04],
       [  1.59000000e+03,   2.79436703e-04],
       [  1.60000000e+03,   2.92914221e-04],
       [  1.61000000e+03,   2.83219939e-04],
       [  1.62000000e+03,   2.47961550e-04],
       [  1.63000000e+03,   2.66158720e-04],
       [  1.64000000e+03,   1.95204804e-04],
       [  1.65000000e+03,   2.38259934e-04],
       [  1.66000000e+03,   2.12545914e-04],
       [  1.67000000e+03,   2.69888318e-04],
       [  1.68000000e+03,   2.67365773e-04],
       [  1.69000000e+03,   2.42784241e-04],
       [  1.70000000e+03,   2.56205589e-04],
       [  1.71000000e+03,   2.37503089e-04],
       [  1.72000000e+03,   2.54610321e-04],
       [  1.73000000e+03,   2.29985730e-04],
       [  1.74000000e+03,   2.15898530e-04],
       [  1.75000000e+03,   2.32763661e-04],
       [  1.76000000e+03,   2.52211990e-04],
       [  1.77000000e+03,   2.67368072e-04],
       [  1.78000000e+03,   2.16428016e-04],
       [  1.79000000e+03,   2.12942599e-04],
       [  1.80000000e+03,   2.80447653e-04],
       [  1.81000000e+03,   2.27673649e-04],
       [  1.82000000e+03,   2.36064487e-04],
       [  1.83000000e+03,   2.16482455e-04],
       [  1.84000000e+03,   2.58535787e-04],
       [  1.85000000e+03,   2.38137320e-04],
       [  1.86000000e+03,   2.59868626e-04],
       [  1.87000000e+03,   2.31765691e-04],
       [  1.88000000e+03,   2.74534803e-04],
       [  1.89000000e+03,   2.41528905e-04],
       [  1.90000000e+03,   2.56438274e-04],
       [  1.91000000e+03,   2.14143307e-04],
       [  1.92000000e+03,   2.53135251e-04],
       [  1.93000000e+03,   2.30945254e-04],
       [  1.94000000e+03,   2.59688095e-04],
       [  1.95000000e+03,   1.89327387e-04],
       [  1.96000000e+03,   2.59210530e-04],
       [  1.97000000e+03,   2.27051845e-04],
       [  1.98000000e+03,   2.52788363e-04],
       [  1.99000000e+03,   2.33982078e-04],
       [  2.00000000e+03,   2.02843963e-04]])

In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])


Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x10f6cd898>

In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")


Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x10f69f3c8>

In [ ]: