In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline
In [2]:
with open("param.yaml", "r") as file:
param = yaml.load(file.read())
param
Out[2]:
{'forget_bias': 1.0,
'learning_rate': 0.1,
'length_of_sequences': 50,
'num_of_hidden_nodes': 2,
'num_of_input_nodes': 1,
'num_of_output_nodes': 1,
'num_of_prediction_epochs': 100,
'num_of_training_epochs': 2000,
'optimizer': 'GradientDescentOptimizer',
'seed': 0,
'size_of_mini_batch': 100,
'train_data_path': '../train_data/normal.npy'}
In [3]:
train = np.load(param["train_data_path"])
train
Out[3]:
array([[ 0.00000000e+00, 1.25333234e-01],
[ 1.25333234e-01, 2.48689887e-01],
[ 2.48689887e-01, 3.68124553e-01],
...,
[ -3.68124553e-01, -2.48689887e-01],
[ -2.48689887e-01, -1.25333234e-01],
[ -1.25333234e-01, 3.92877345e-15]])
In [4]:
initial = np.load("initial.npy")
initial
Out[4]:
array([ 0.00000000e+00, 1.25333234e-01, 2.48689887e-01,
3.68124553e-01, 4.81753674e-01, 5.87785252e-01,
6.84547106e-01, 7.70513243e-01, 8.44327926e-01,
9.04827052e-01, 9.51056516e-01, 9.82287251e-01,
9.98026728e-01, 9.98026728e-01, 9.82287251e-01,
9.51056516e-01, 9.04827052e-01, 8.44327926e-01,
7.70513243e-01, 6.84547106e-01, 5.87785252e-01,
4.81753674e-01, 3.68124553e-01, 2.48689887e-01,
1.25333234e-01, -3.21624530e-16, -1.25333234e-01,
-2.48689887e-01, -3.68124553e-01, -4.81753674e-01,
-5.87785252e-01, -6.84547106e-01, -7.70513243e-01,
-8.44327926e-01, -9.04827052e-01, -9.51056516e-01,
-9.82287251e-01, -9.98026728e-01, -9.98026728e-01,
-9.82287251e-01, -9.51056516e-01, -9.04827052e-01,
-8.44327926e-01, -7.70513243e-01, -6.84547106e-01,
-5.87785252e-01, -4.81753674e-01, -3.68124553e-01,
-2.48689887e-01, -1.25333234e-01])
In [5]:
output = np.load("output.npy")
output
Out[5]:
array([ 0.02220552, 0.17695722, 0.33647949, 0.49163169, 0.63202316,
0.74922162, 0.83912903, 0.90206307, 0.94112211, 0.96034342,
0.96352643, 0.95375103, 0.9333064 , 0.90378541, 0.86621767,
0.82118589, 0.76891816, 0.70935625, 0.64220965, 0.56700873,
0.48317075, 0.39009929, 0.28734344, 0.17484498, 0.05328465,
-0.07550181, -0.20816538, -0.33978349, -0.46436173, -0.5759452 ,
-0.66995436, -0.74408585, -0.79836076, -0.83447415, -0.85492909,
-0.86234403, -0.8590551 , -0.8469581 , -0.82748622, -0.80164975,
-0.77008945, -0.73312664, -0.69080073, -0.64289522, -0.58895433,
-0.52829409, -0.46002239, -0.3830792 , -0.29633063, -0.19875565,
-0.08977892, 0.03021525, 0.15923822, 0.29315606, 0.42569607,
0.54940462, 0.65741789, 0.74516785, 0.81107044, 0.85602802,
0.88235128, 0.8927713 , 0.88982815, 0.87560409, 0.85166794,
0.81911254, 0.77862537, 0.73055565, 0.67497426, 0.61172825,
0.54049462, 0.46084744, 0.37235501, 0.27472743, 0.1680339 ,
0.05299815, -0.06865866, -0.19393086, -0.31848121, -0.43703657,
-0.54426974, -0.63589543, -0.70948195, -0.76461357, -0.80245006,
-0.82502568, -0.8346259 , -0.83338428, -0.82308871, -0.80512464,
-0.78048694, -0.74981689, -0.71344137, -0.67140561, -0.62349612,
-0.56925654, -0.50800192, -0.43884283, -0.36073628, -0.2725957 ])
In [6]:
losses = np.load("losses.npy")
losses
Out[6]:
array([[ 1.00000000e+01, 5.23743391e-01],
[ 2.00000000e+01, 4.94891286e-01],
[ 3.00000000e+01, 4.98710543e-01],
[ 4.00000000e+01, 4.67874080e-01],
[ 5.00000000e+01, 3.10484111e-01],
[ 6.00000000e+01, 2.22195715e-01],
[ 7.00000000e+01, 1.63801759e-01],
[ 8.00000000e+01, 8.90101269e-02],
[ 9.00000000e+01, 6.87234998e-02],
[ 1.00000000e+02, 5.47289141e-02],
[ 1.10000000e+02, 4.63143401e-02],
[ 1.20000000e+02, 3.39613333e-02],
[ 1.30000000e+02, 3.41295712e-02],
[ 1.40000000e+02, 3.25229391e-02],
[ 1.50000000e+02, 2.56832838e-02],
[ 1.60000000e+02, 2.23396495e-02],
[ 1.70000000e+02, 2.26417109e-02],
[ 1.80000000e+02, 2.08777469e-02],
[ 1.90000000e+02, 1.72339063e-02],
[ 2.00000000e+02, 1.99982319e-02],
[ 2.10000000e+02, 1.35001875e-02],
[ 2.20000000e+02, 1.45024657e-02],
[ 2.30000000e+02, 1.19754495e-02],
[ 2.40000000e+02, 1.30401673e-02],
[ 2.50000000e+02, 1.17093120e-02],
[ 2.60000000e+02, 1.08915996e-02],
[ 2.70000000e+02, 1.05502419e-02],
[ 2.80000000e+02, 8.83982796e-03],
[ 2.90000000e+02, 7.25682266e-03],
[ 3.00000000e+02, 7.82503281e-03],
[ 3.10000000e+02, 7.67409196e-03],
[ 3.20000000e+02, 7.34860636e-03],
[ 3.30000000e+02, 6.07976364e-03],
[ 3.40000000e+02, 5.74384211e-03],
[ 3.50000000e+02, 4.58402419e-03],
[ 3.60000000e+02, 6.28242828e-03],
[ 3.70000000e+02, 5.28894737e-03],
[ 3.80000000e+02, 4.98718210e-03],
[ 3.90000000e+02, 5.19764191e-03],
[ 4.00000000e+02, 4.77626221e-03],
[ 4.10000000e+02, 4.62218421e-03],
[ 4.20000000e+02, 3.79945850e-03],
[ 4.30000000e+02, 3.91404144e-03],
[ 4.40000000e+02, 3.40476073e-03],
[ 4.50000000e+02, 3.79007705e-03],
[ 4.60000000e+02, 3.24301491e-03],
[ 4.70000000e+02, 2.85747857e-03],
[ 4.80000000e+02, 2.98091536e-03],
[ 4.90000000e+02, 3.06239421e-03],
[ 5.00000000e+02, 2.92160432e-03],
[ 5.10000000e+02, 2.39913561e-03],
[ 5.20000000e+02, 2.93029239e-03],
[ 5.30000000e+02, 2.29255785e-03],
[ 5.40000000e+02, 2.45815283e-03],
[ 5.50000000e+02, 2.79739033e-03],
[ 5.60000000e+02, 2.72411737e-03],
[ 5.70000000e+02, 2.07725167e-03],
[ 5.80000000e+02, 2.32753926e-03],
[ 5.90000000e+02, 1.93580415e-03],
[ 6.00000000e+02, 2.19451729e-03],
[ 6.10000000e+02, 1.85182493e-03],
[ 6.20000000e+02, 2.11565173e-03],
[ 6.30000000e+02, 1.60761003e-03],
[ 6.40000000e+02, 1.91048207e-03],
[ 6.50000000e+02, 1.70967472e-03],
[ 6.60000000e+02, 1.75186340e-03],
[ 6.70000000e+02, 1.70146569e-03],
[ 6.80000000e+02, 1.73606281e-03],
[ 6.90000000e+02, 1.49939326e-03],
[ 7.00000000e+02, 1.67312380e-03],
[ 7.10000000e+02, 1.61177921e-03],
[ 7.20000000e+02, 1.63622957e-03],
[ 7.30000000e+02, 1.53446663e-03],
[ 7.40000000e+02, 1.34711864e-03],
[ 7.50000000e+02, 1.37598393e-03],
[ 7.60000000e+02, 1.42106565e-03],
[ 7.70000000e+02, 1.10041664e-03],
[ 7.80000000e+02, 1.10590877e-03],
[ 7.90000000e+02, 1.03693840e-03],
[ 8.00000000e+02, 1.10693031e-03],
[ 8.10000000e+02, 1.12760079e-03],
[ 8.20000000e+02, 1.36499945e-03],
[ 8.30000000e+02, 1.13421946e-03],
[ 8.40000000e+02, 9.82957194e-04],
[ 8.50000000e+02, 1.27879053e-03],
[ 8.60000000e+02, 1.18486723e-03],
[ 8.70000000e+02, 9.78919910e-04],
[ 8.80000000e+02, 9.16758727e-04],
[ 8.90000000e+02, 1.05102966e-03],
[ 9.00000000e+02, 8.41029745e-04],
[ 9.10000000e+02, 1.02954498e-03],
[ 9.20000000e+02, 1.09348923e-03],
[ 9.30000000e+02, 9.21596715e-04],
[ 9.40000000e+02, 1.01260410e-03],
[ 9.50000000e+02, 1.15491415e-03],
[ 9.60000000e+02, 9.41028120e-04],
[ 9.70000000e+02, 8.48680735e-04],
[ 9.80000000e+02, 1.09033380e-03],
[ 9.90000000e+02, 9.63136088e-04],
[ 1.00000000e+03, 8.69731361e-04],
[ 1.01000000e+03, 8.62976594e-04],
[ 1.02000000e+03, 8.62953486e-04],
[ 1.03000000e+03, 9.10163217e-04],
[ 1.04000000e+03, 7.92748062e-04],
[ 1.05000000e+03, 6.91282446e-04],
[ 1.06000000e+03, 8.19663343e-04],
[ 1.07000000e+03, 1.01209793e-03],
[ 1.08000000e+03, 8.39184504e-04],
[ 1.09000000e+03, 8.10647151e-04],
[ 1.10000000e+03, 8.24360875e-04],
[ 1.11000000e+03, 7.71496969e-04],
[ 1.12000000e+03, 7.18168856e-04],
[ 1.13000000e+03, 7.48390798e-04],
[ 1.14000000e+03, 8.13153223e-04],
[ 1.15000000e+03, 7.95782486e-04],
[ 1.16000000e+03, 7.08316104e-04],
[ 1.17000000e+03, 8.58344138e-04],
[ 1.18000000e+03, 7.61858828e-04],
[ 1.19000000e+03, 6.87628693e-04],
[ 1.20000000e+03, 6.93029491e-04],
[ 1.21000000e+03, 7.87120138e-04],
[ 1.22000000e+03, 7.17477349e-04],
[ 1.23000000e+03, 7.33316760e-04],
[ 1.24000000e+03, 6.98375399e-04],
[ 1.25000000e+03, 5.90858806e-04],
[ 1.26000000e+03, 7.15189206e-04],
[ 1.27000000e+03, 6.95743365e-04],
[ 1.28000000e+03, 7.39817973e-04],
[ 1.29000000e+03, 6.64661813e-04],
[ 1.30000000e+03, 6.96594710e-04],
[ 1.31000000e+03, 6.92198402e-04],
[ 1.32000000e+03, 7.59316084e-04],
[ 1.33000000e+03, 5.94548357e-04],
[ 1.34000000e+03, 6.82506361e-04],
[ 1.35000000e+03, 6.26937719e-04],
[ 1.36000000e+03, 6.00635307e-04],
[ 1.37000000e+03, 6.60683902e-04],
[ 1.38000000e+03, 5.90138137e-04],
[ 1.39000000e+03, 5.26610762e-04],
[ 1.40000000e+03, 6.47702080e-04],
[ 1.41000000e+03, 6.22315507e-04],
[ 1.42000000e+03, 5.84963011e-04],
[ 1.43000000e+03, 6.45396998e-04],
[ 1.44000000e+03, 5.38576860e-04],
[ 1.45000000e+03, 6.73991453e-04],
[ 1.46000000e+03, 6.46879547e-04],
[ 1.47000000e+03, 5.67078358e-04],
[ 1.48000000e+03, 6.49933645e-04],
[ 1.49000000e+03, 4.96776833e-04],
[ 1.50000000e+03, 6.83067890e-04],
[ 1.51000000e+03, 5.70574310e-04],
[ 1.52000000e+03, 5.60387212e-04],
[ 1.53000000e+03, 4.90164966e-04],
[ 1.54000000e+03, 4.75333189e-04],
[ 1.55000000e+03, 5.71977522e-04],
[ 1.56000000e+03, 5.73533820e-04],
[ 1.57000000e+03, 5.91787684e-04],
[ 1.58000000e+03, 6.05609035e-04],
[ 1.59000000e+03, 5.46982803e-04],
[ 1.60000000e+03, 5.45559626e-04],
[ 1.61000000e+03, 5.21923008e-04],
[ 1.62000000e+03, 5.57434163e-04],
[ 1.63000000e+03, 5.20558795e-04],
[ 1.64000000e+03, 4.59000323e-04],
[ 1.65000000e+03, 5.12848434e-04],
[ 1.66000000e+03, 4.48702805e-04],
[ 1.67000000e+03, 5.20781381e-04],
[ 1.68000000e+03, 4.89920028e-04],
[ 1.69000000e+03, 4.96701919e-04],
[ 1.70000000e+03, 5.07282617e-04],
[ 1.71000000e+03, 5.13428880e-04],
[ 1.72000000e+03, 4.79963957e-04],
[ 1.73000000e+03, 4.09842498e-04],
[ 1.74000000e+03, 4.50230989e-04],
[ 1.75000000e+03, 5.12941042e-04],
[ 1.76000000e+03, 5.06314274e-04],
[ 1.77000000e+03, 5.02404931e-04],
[ 1.78000000e+03, 4.51179076e-04],
[ 1.79000000e+03, 4.84848017e-04],
[ 1.80000000e+03, 5.06206881e-04],
[ 1.81000000e+03, 4.48027393e-04],
[ 1.82000000e+03, 3.95686482e-04],
[ 1.83000000e+03, 4.23130026e-04],
[ 1.84000000e+03, 4.85251279e-04],
[ 1.85000000e+03, 4.70509898e-04],
[ 1.86000000e+03, 4.70560743e-04],
[ 1.87000000e+03, 4.03481972e-04],
[ 1.88000000e+03, 5.13000181e-04],
[ 1.89000000e+03, 4.63959062e-04],
[ 1.90000000e+03, 4.72538173e-04],
[ 1.91000000e+03, 3.77635966e-04],
[ 1.92000000e+03, 4.81549097e-04],
[ 1.93000000e+03, 4.46246297e-04],
[ 1.94000000e+03, 4.90528997e-04],
[ 1.95000000e+03, 4.03635611e-04],
[ 1.96000000e+03, 4.58088354e-04],
[ 1.97000000e+03, 4.28521482e-04],
[ 1.98000000e+03, 5.18360233e-04],
[ 1.99000000e+03, 4.14475566e-04],
[ 2.00000000e+03, 3.56514269e-04]])
In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])
Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x1119d5898>
In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")
Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x1119a7860>
In [ ]:
Content source: nayutaya/tensorflow-rnn-sin
Similar notebooks: