In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline
In [2]:
with open("param.yaml", "r") as file:
param = yaml.load(file.read())
param
Out[2]:
{'forget_bias': 1.0,
'learning_rate': 0.1,
'length_of_sequences': 70,
'num_of_hidden_nodes': 2,
'num_of_input_nodes': 1,
'num_of_output_nodes': 1,
'num_of_prediction_epochs': 100,
'num_of_training_epochs': 2000,
'optimizer': 'GradientDescentOptimizer',
'seed': 0,
'size_of_mini_batch': 100,
'train_data_path': '../train_data/normal.npy'}
In [3]:
train = np.load(param["train_data_path"])
train
Out[3]:
array([[ 0.00000000e+00, 1.25333234e-01],
[ 1.25333234e-01, 2.48689887e-01],
[ 2.48689887e-01, 3.68124553e-01],
...,
[ -3.68124553e-01, -2.48689887e-01],
[ -2.48689887e-01, -1.25333234e-01],
[ -1.25333234e-01, 3.92877345e-15]])
In [4]:
initial = np.load("initial.npy")
initial
Out[4]:
array([ 0.00000000e+00, 1.25333234e-01, 2.48689887e-01,
3.68124553e-01, 4.81753674e-01, 5.87785252e-01,
6.84547106e-01, 7.70513243e-01, 8.44327926e-01,
9.04827052e-01, 9.51056516e-01, 9.82287251e-01,
9.98026728e-01, 9.98026728e-01, 9.82287251e-01,
9.51056516e-01, 9.04827052e-01, 8.44327926e-01,
7.70513243e-01, 6.84547106e-01, 5.87785252e-01,
4.81753674e-01, 3.68124553e-01, 2.48689887e-01,
1.25333234e-01, -3.21624530e-16, -1.25333234e-01,
-2.48689887e-01, -3.68124553e-01, -4.81753674e-01,
-5.87785252e-01, -6.84547106e-01, -7.70513243e-01,
-8.44327926e-01, -9.04827052e-01, -9.51056516e-01,
-9.82287251e-01, -9.98026728e-01, -9.98026728e-01,
-9.82287251e-01, -9.51056516e-01, -9.04827052e-01,
-8.44327926e-01, -7.70513243e-01, -6.84547106e-01,
-5.87785252e-01, -4.81753674e-01, -3.68124553e-01,
-2.48689887e-01, -1.25333234e-01, 6.43249060e-16,
1.25333234e-01, 2.48689887e-01, 3.68124553e-01,
4.81753674e-01, 5.87785252e-01, 6.84547106e-01,
7.70513243e-01, 8.44327926e-01, 9.04827052e-01,
9.51056516e-01, 9.82287251e-01, 9.98026728e-01,
9.98026728e-01, 9.82287251e-01, 9.51056516e-01,
9.04827052e-01, 8.44327926e-01, 7.70513243e-01,
6.84547106e-01])
In [5]:
output = np.load("output.npy")
output
Out[5]:
array([ 0.62315255, 0.54665452, 0.45530486, 0.35139382, 0.23411691,
0.10390853, -0.03704954, -0.18447071, -0.33199799, -0.47185934,
-0.59637409, -0.69993216, -0.78025371, -0.83813661, -0.87618113,
-0.89748937, -0.90486014, -0.90046138, -0.88577843, -0.86167377,
-0.82847089, -0.7860294 , -0.73380947, -0.67093927, -0.59631169,
-0.50875121, -0.40730312, -0.29168874, -0.16291696, -0.02390003,
0.12026483, 0.26274908, 0.39627424, 0.51472646, 0.61429727,
0.69369882, 0.75362498, 0.7959376 , 0.82294619, 0.83692265,
0.83984011, 0.83326429, 0.8183316 , 0.79576826, 0.76591957,
0.72877949, 0.68401432, 0.63098687, 0.56879461, 0.49634039,
0.41247401, 0.31623939, 0.20725003, 0.08616637, -0.04483444,
-0.18176198, -0.31886524, -0.44922858, -0.56603056, -0.66413277,
-0.74112803, -0.79724371, -0.83439213, -0.85509562, -0.86174446,
-0.85624093, -0.83989686, -0.81345129, -0.77712923, -0.73070878,
-0.6735937 , -0.60490686, -0.52363855, -0.42889339, -0.32027954,
-0.19844738, -0.0656803 , 0.07372547, 0.21365884, 0.34716868,
0.46792889, 0.57150036, 0.65582365, 0.72092319, 0.76820105,
0.79971325, 0.81763446, 0.82393724, 0.82023782, 0.80774677,
0.78726935, 0.75922829, 0.7236889 , 0.68038338, 0.62873554,
0.56789714, 0.4968195 , 0.41438788, 0.3196609 , 0.21223342])
In [6]:
losses = np.load("losses.npy")
losses
Out[6]:
array([[ 1.00000000e+01, 4.79618609e-01],
[ 2.00000000e+01, 5.40107667e-01],
[ 3.00000000e+01, 5.15341938e-01],
[ 4.00000000e+01, 4.95510876e-01],
[ 5.00000000e+01, 4.94567722e-01],
[ 6.00000000e+01, 4.78342146e-01],
[ 7.00000000e+01, 3.75705034e-01],
[ 8.00000000e+01, 3.29462916e-01],
[ 9.00000000e+01, 2.24809900e-01],
[ 1.00000000e+02, 1.65608898e-01],
[ 1.10000000e+02, 1.24914169e-01],
[ 1.20000000e+02, 9.90404114e-02],
[ 1.30000000e+02, 8.22229981e-02],
[ 1.40000000e+02, 6.86122105e-02],
[ 1.50000000e+02, 5.31651005e-02],
[ 1.60000000e+02, 4.65173200e-02],
[ 1.70000000e+02, 3.75925787e-02],
[ 1.80000000e+02, 3.09311897e-02],
[ 1.90000000e+02, 3.17575373e-02],
[ 2.00000000e+02, 2.18343455e-02],
[ 2.10000000e+02, 2.06140764e-02],
[ 2.20000000e+02, 1.67572852e-02],
[ 2.30000000e+02, 1.36275645e-02],
[ 2.40000000e+02, 9.44721233e-03],
[ 2.50000000e+02, 8.43747705e-03],
[ 2.60000000e+02, 7.99871143e-03],
[ 2.70000000e+02, 5.43500297e-03],
[ 2.80000000e+02, 5.75964432e-03],
[ 2.90000000e+02, 4.18716064e-03],
[ 3.00000000e+02, 3.06826760e-03],
[ 3.10000000e+02, 3.11847567e-03],
[ 3.20000000e+02, 2.69980310e-03],
[ 3.30000000e+02, 3.20884516e-03],
[ 3.40000000e+02, 2.74455780e-03],
[ 3.50000000e+02, 2.47292593e-03],
[ 3.60000000e+02, 3.13712680e-03],
[ 3.70000000e+02, 2.77093076e-03],
[ 3.80000000e+02, 2.90882168e-03],
[ 3.90000000e+02, 2.75964569e-03],
[ 4.00000000e+02, 2.61654425e-03],
[ 4.10000000e+02, 2.36298260e-03],
[ 4.20000000e+02, 1.89715694e-03],
[ 4.30000000e+02, 2.72598863e-03],
[ 4.40000000e+02, 2.35387962e-03],
[ 4.50000000e+02, 2.18647951e-03],
[ 4.60000000e+02, 2.00867606e-03],
[ 4.70000000e+02, 2.31416081e-03],
[ 4.80000000e+02, 1.71974243e-03],
[ 4.90000000e+02, 2.09600385e-03],
[ 5.00000000e+02, 1.85976585e-03],
[ 5.10000000e+02, 2.05569528e-03],
[ 5.20000000e+02, 1.65785069e-03],
[ 5.30000000e+02, 2.02819193e-03],
[ 5.40000000e+02, 2.02111038e-03],
[ 5.50000000e+02, 1.81618938e-03],
[ 5.60000000e+02, 1.89268915e-03],
[ 5.70000000e+02, 2.16356432e-03],
[ 5.80000000e+02, 2.11596349e-03],
[ 5.90000000e+02, 1.80142466e-03],
[ 6.00000000e+02, 1.64836238e-03],
[ 6.10000000e+02, 1.77818921e-03],
[ 6.20000000e+02, 1.67977111e-03],
[ 6.30000000e+02, 1.66624179e-03],
[ 6.40000000e+02, 1.67892966e-03],
[ 6.50000000e+02, 1.82152027e-03],
[ 6.60000000e+02, 1.76722324e-03],
[ 6.70000000e+02, 1.72811956e-03],
[ 6.80000000e+02, 1.48373167e-03],
[ 6.90000000e+02, 1.69229286e-03],
[ 7.00000000e+02, 1.31136982e-03],
[ 7.10000000e+02, 1.53996621e-03],
[ 7.20000000e+02, 1.25463249e-03],
[ 7.30000000e+02, 1.59791415e-03],
[ 7.40000000e+02, 1.43514259e-03],
[ 7.50000000e+02, 1.39277999e-03],
[ 7.60000000e+02, 1.53124274e-03],
[ 7.70000000e+02, 1.35149702e-03],
[ 7.80000000e+02, 1.53238745e-03],
[ 7.90000000e+02, 1.40684459e-03],
[ 8.00000000e+02, 1.27870543e-03],
[ 8.10000000e+02, 1.22603297e-03],
[ 8.20000000e+02, 1.14512246e-03],
[ 8.30000000e+02, 1.38315081e-03],
[ 8.40000000e+02, 1.39701576e-03],
[ 8.50000000e+02, 1.33627886e-03],
[ 8.60000000e+02, 1.26489555e-03],
[ 8.70000000e+02, 1.13896106e-03],
[ 8.80000000e+02, 1.26927032e-03],
[ 8.90000000e+02, 1.36843114e-03],
[ 9.00000000e+02, 1.31426542e-03],
[ 9.10000000e+02, 1.23848603e-03],
[ 9.20000000e+02, 1.33252994e-03],
[ 9.30000000e+02, 1.22137694e-03],
[ 9.40000000e+02, 1.20844983e-03],
[ 9.50000000e+02, 1.25226867e-03],
[ 9.60000000e+02, 1.09944749e-03],
[ 9.70000000e+02, 1.21350121e-03],
[ 9.80000000e+02, 1.27549528e-03],
[ 9.90000000e+02, 1.20582082e-03],
[ 1.00000000e+03, 1.19363505e-03],
[ 1.01000000e+03, 1.17702549e-03],
[ 1.02000000e+03, 1.15889893e-03],
[ 1.03000000e+03, 1.13512506e-03],
[ 1.04000000e+03, 1.10094366e-03],
[ 1.05000000e+03, 1.09145883e-03],
[ 1.06000000e+03, 1.17636693e-03],
[ 1.07000000e+03, 1.07661111e-03],
[ 1.08000000e+03, 1.07177789e-03],
[ 1.09000000e+03, 1.15256640e-03],
[ 1.10000000e+03, 1.06110633e-03],
[ 1.11000000e+03, 1.05894019e-03],
[ 1.12000000e+03, 1.05167320e-03],
[ 1.13000000e+03, 1.07405696e-03],
[ 1.14000000e+03, 9.14291944e-04],
[ 1.15000000e+03, 9.55321477e-04],
[ 1.16000000e+03, 9.07967624e-04],
[ 1.17000000e+03, 9.84424958e-04],
[ 1.18000000e+03, 9.43598163e-04],
[ 1.19000000e+03, 1.01463776e-03],
[ 1.20000000e+03, 1.00621104e-03],
[ 1.21000000e+03, 9.81391873e-04],
[ 1.22000000e+03, 7.85237527e-04],
[ 1.23000000e+03, 1.03406829e-03],
[ 1.24000000e+03, 8.76088568e-04],
[ 1.25000000e+03, 1.06072787e-03],
[ 1.26000000e+03, 9.42769344e-04],
[ 1.27000000e+03, 8.39457964e-04],
[ 1.28000000e+03, 9.49165202e-04],
[ 1.29000000e+03, 8.40033987e-04],
[ 1.30000000e+03, 9.60244215e-04],
[ 1.31000000e+03, 7.37405440e-04],
[ 1.32000000e+03, 8.39766290e-04],
[ 1.33000000e+03, 8.13350256e-04],
[ 1.34000000e+03, 8.13845836e-04],
[ 1.35000000e+03, 8.06368422e-04],
[ 1.36000000e+03, 8.66328191e-04],
[ 1.37000000e+03, 7.94349180e-04],
[ 1.38000000e+03, 8.64449015e-04],
[ 1.39000000e+03, 8.33909493e-04],
[ 1.40000000e+03, 8.25901341e-04],
[ 1.41000000e+03, 8.59284715e-04],
[ 1.42000000e+03, 8.80539999e-04],
[ 1.43000000e+03, 6.52629649e-04],
[ 1.44000000e+03, 6.98186632e-04],
[ 1.45000000e+03, 8.46027280e-04],
[ 1.46000000e+03, 7.13806367e-04],
[ 1.47000000e+03, 7.84764416e-04],
[ 1.48000000e+03, 6.75049436e-04],
[ 1.49000000e+03, 8.29611847e-04],
[ 1.50000000e+03, 7.66571844e-04],
[ 1.51000000e+03, 7.86703837e-04],
[ 1.52000000e+03, 7.58278824e-04],
[ 1.53000000e+03, 7.49155879e-04],
[ 1.54000000e+03, 7.77323963e-04],
[ 1.55000000e+03, 6.74729177e-04],
[ 1.56000000e+03, 8.11565726e-04],
[ 1.57000000e+03, 7.40313611e-04],
[ 1.58000000e+03, 7.40257325e-04],
[ 1.59000000e+03, 6.88830740e-04],
[ 1.60000000e+03, 6.25791261e-04],
[ 1.61000000e+03, 6.56648888e-04],
[ 1.62000000e+03, 7.14924186e-04],
[ 1.63000000e+03, 7.83146184e-04],
[ 1.64000000e+03, 6.69811678e-04],
[ 1.65000000e+03, 6.50023343e-04],
[ 1.66000000e+03, 6.81165606e-04],
[ 1.67000000e+03, 6.20306411e-04],
[ 1.68000000e+03, 6.85493869e-04],
[ 1.69000000e+03, 7.69070699e-04],
[ 1.70000000e+03, 6.27239817e-04],
[ 1.71000000e+03, 6.44056650e-04],
[ 1.72000000e+03, 6.39265752e-04],
[ 1.73000000e+03, 7.88696285e-04],
[ 1.74000000e+03, 6.12740812e-04],
[ 1.75000000e+03, 6.23762200e-04],
[ 1.76000000e+03, 6.33345917e-04],
[ 1.77000000e+03, 6.08034898e-04],
[ 1.78000000e+03, 6.65509258e-04],
[ 1.79000000e+03, 5.66256291e-04],
[ 1.80000000e+03, 7.15761795e-04],
[ 1.81000000e+03, 6.15485595e-04],
[ 1.82000000e+03, 6.83100079e-04],
[ 1.83000000e+03, 6.79043238e-04],
[ 1.84000000e+03, 6.31776580e-04],
[ 1.85000000e+03, 5.98713290e-04],
[ 1.86000000e+03, 6.44068699e-04],
[ 1.87000000e+03, 6.19710481e-04],
[ 1.88000000e+03, 6.02419255e-04],
[ 1.89000000e+03, 5.36119041e-04],
[ 1.90000000e+03, 6.15597295e-04],
[ 1.91000000e+03, 6.27469446e-04],
[ 1.92000000e+03, 6.23598346e-04],
[ 1.93000000e+03, 6.30677911e-04],
[ 1.94000000e+03, 6.19192841e-04],
[ 1.95000000e+03, 6.06620044e-04],
[ 1.96000000e+03, 5.41625312e-04],
[ 1.97000000e+03, 6.27327943e-04],
[ 1.98000000e+03, 5.65266469e-04],
[ 1.99000000e+03, 6.04666828e-04],
[ 2.00000000e+03, 5.44412411e-04]])
In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])
Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x10f349898>
In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")
Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x10f31b828>
In [ ]:
Content source: nayutaya/tensorflow-rnn-sin
Similar notebooks: