In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline
In [2]:
with open("param.yaml", "r") as file:
param = yaml.load(file.read())
param
Out[2]:
{'forget_bias': 1.0,
'learning_rate': 0.1,
'length_of_sequences': 50,
'num_of_hidden_nodes': 1,
'num_of_input_nodes': 1,
'num_of_output_nodes': 1,
'num_of_prediction_epochs': 100,
'num_of_training_epochs': 2000,
'optimizer': 'GradientDescentOptimizer',
'seed': 0,
'size_of_mini_batch': 100,
'train_data_path': '../train_data/normal.npy'}
In [3]:
train = np.load(param["train_data_path"])
train
Out[3]:
array([[ 0.00000000e+00, 1.25333234e-01],
[ 1.25333234e-01, 2.48689887e-01],
[ 2.48689887e-01, 3.68124553e-01],
...,
[ -3.68124553e-01, -2.48689887e-01],
[ -2.48689887e-01, -1.25333234e-01],
[ -1.25333234e-01, 3.92877345e-15]])
In [4]:
initial = np.load("initial.npy")
initial
Out[4]:
array([ 0.00000000e+00, 1.25333234e-01, 2.48689887e-01,
3.68124553e-01, 4.81753674e-01, 5.87785252e-01,
6.84547106e-01, 7.70513243e-01, 8.44327926e-01,
9.04827052e-01, 9.51056516e-01, 9.82287251e-01,
9.98026728e-01, 9.98026728e-01, 9.82287251e-01,
9.51056516e-01, 9.04827052e-01, 8.44327926e-01,
7.70513243e-01, 6.84547106e-01, 5.87785252e-01,
4.81753674e-01, 3.68124553e-01, 2.48689887e-01,
1.25333234e-01, -3.21624530e-16, -1.25333234e-01,
-2.48689887e-01, -3.68124553e-01, -4.81753674e-01,
-5.87785252e-01, -6.84547106e-01, -7.70513243e-01,
-8.44327926e-01, -9.04827052e-01, -9.51056516e-01,
-9.82287251e-01, -9.98026728e-01, -9.98026728e-01,
-9.82287251e-01, -9.51056516e-01, -9.04827052e-01,
-8.44327926e-01, -7.70513243e-01, -6.84547106e-01,
-5.87785252e-01, -4.81753674e-01, -3.68124553e-01,
-2.48689887e-01, -1.25333234e-01])
In [5]:
output = np.load("output.npy")
output
Out[5]:
array([-0.08699846, -0.08564895, -0.08592695, -0.08634382, -0.08679795,
-0.08728403, -0.08780378, -0.0883593 , -0.0889532 , -0.08958817,
-0.09026688, -0.09099257, -0.09176815, -0.09259725, -0.09348339,
-0.09443063, -0.09544307, -0.09652513, -0.09768158, -0.09891766,
-0.10023862, -0.10165018, -0.10315853, -0.10477018, -0.10649228,
-0.10833204, -0.11029744, -0.11239684, -0.1146391 , -0.11703384,
-0.11959106, -0.12232155, -0.12523651, -0.12834799, -0.13166851,
-0.13521147, -0.13899118, -0.14302218, -0.14732033, -0.15190184,
-0.1567837 , -0.16198361, -0.16752017, -0.17341238, -0.17967987,
-0.18634254, -0.19342071, -0.20093477, -0.20890528, -0.21735206,
-0.22629449, -0.23575097, -0.24573812, -0.256271 , -0.26736164,
-0.27901921, -0.29124856, -0.30405 , -0.31741825, -0.33134142,
-0.3458004 , -0.36076787, -0.37620783, -0.39207509, -0.40831459,
-0.42486191, -0.44164345, -0.45857722, -0.4755742 , -0.49253994,
-0.5093767 , -0.52598572, -0.54227 , -0.55813694, -0.57350063,
-0.58828437, -0.60242242, -0.61586124, -0.62856042, -0.64049268,
-0.65164351, -0.66201049, -0.67160195, -0.68043566, -0.688537 ,
-0.69593763, -0.70267385, -0.708785 , -0.71431243, -0.71929824,
-0.72378427, -0.72781163, -0.73141998, -0.73464704, -0.73752838,
-0.74009734, -0.74238485, -0.74441934, -0.74622697, -0.74783158])
In [6]:
losses = np.load("losses.npy")
losses
Out[6]:
array([[ 1.00000000e+01, 5.24970829e-01],
[ 2.00000000e+01, 5.02165854e-01],
[ 3.00000000e+01, 5.21976292e-01],
[ 4.00000000e+01, 5.39231241e-01],
[ 5.00000000e+01, 4.54001993e-01],
[ 6.00000000e+01, 4.74606544e-01],
[ 7.00000000e+01, 5.14866590e-01],
[ 8.00000000e+01, 4.63114232e-01],
[ 9.00000000e+01, 4.46319312e-01],
[ 1.00000000e+02, 5.51044106e-01],
[ 1.10000000e+02, 5.19893050e-01],
[ 1.20000000e+02, 5.52232981e-01],
[ 1.30000000e+02, 5.63150644e-01],
[ 1.40000000e+02, 5.12652874e-01],
[ 1.50000000e+02, 4.43862528e-01],
[ 1.60000000e+02, 4.54757333e-01],
[ 1.70000000e+02, 4.07229066e-01],
[ 1.80000000e+02, 2.94437885e-01],
[ 1.90000000e+02, 1.74438298e-01],
[ 2.00000000e+02, 1.17227241e-01],
[ 2.10000000e+02, 9.28259119e-02],
[ 2.20000000e+02, 8.57229009e-02],
[ 2.30000000e+02, 6.66624829e-02],
[ 2.40000000e+02, 6.39367551e-02],
[ 2.50000000e+02, 5.91579638e-02],
[ 2.60000000e+02, 4.92778495e-02],
[ 2.70000000e+02, 5.05197495e-02],
[ 2.80000000e+02, 4.17561233e-02],
[ 2.90000000e+02, 3.70528586e-02],
[ 3.00000000e+02, 3.59562337e-02],
[ 3.10000000e+02, 3.15468274e-02],
[ 3.20000000e+02, 3.34122069e-02],
[ 3.30000000e+02, 2.83443686e-02],
[ 3.40000000e+02, 2.60360483e-02],
[ 3.50000000e+02, 2.84902528e-02],
[ 3.60000000e+02, 2.85529494e-02],
[ 3.70000000e+02, 2.33652610e-02],
[ 3.80000000e+02, 2.33212523e-02],
[ 3.90000000e+02, 2.07243487e-02],
[ 4.00000000e+02, 2.18675397e-02],
[ 4.10000000e+02, 2.17489339e-02],
[ 4.20000000e+02, 1.77957509e-02],
[ 4.30000000e+02, 1.96765903e-02],
[ 4.40000000e+02, 2.10537259e-02],
[ 4.50000000e+02, 2.25521345e-02],
[ 4.60000000e+02, 2.11623479e-02],
[ 4.70000000e+02, 1.88118778e-02],
[ 4.80000000e+02, 1.79973412e-02],
[ 4.90000000e+02, 1.72890909e-02],
[ 5.00000000e+02, 1.77539997e-02],
[ 5.10000000e+02, 1.64153595e-02],
[ 5.20000000e+02, 1.70677919e-02],
[ 5.30000000e+02, 1.59268826e-02],
[ 5.40000000e+02, 1.71308592e-02],
[ 5.50000000e+02, 1.46934977e-02],
[ 5.60000000e+02, 1.35738086e-02],
[ 5.70000000e+02, 1.59684848e-02],
[ 5.80000000e+02, 1.37811629e-02],
[ 5.90000000e+02, 1.39221642e-02],
[ 6.00000000e+02, 1.33097507e-02],
[ 6.10000000e+02, 1.35420905e-02],
[ 6.20000000e+02, 1.25878491e-02],
[ 6.30000000e+02, 1.44119551e-02],
[ 6.40000000e+02, 1.31819174e-02],
[ 6.50000000e+02, 1.35006523e-02],
[ 6.60000000e+02, 1.12969885e-02],
[ 6.70000000e+02, 1.33185461e-02],
[ 6.80000000e+02, 1.28194448e-02],
[ 6.90000000e+02, 1.24936672e-02],
[ 7.00000000e+02, 1.12930182e-02],
[ 7.10000000e+02, 1.07863583e-02],
[ 7.20000000e+02, 1.20906942e-02],
[ 7.30000000e+02, 1.29156755e-02],
[ 7.40000000e+02, 1.05140284e-02],
[ 7.50000000e+02, 1.13994172e-02],
[ 7.60000000e+02, 1.00526242e-02],
[ 7.70000000e+02, 1.07305478e-02],
[ 7.80000000e+02, 1.00358725e-02],
[ 7.90000000e+02, 1.20549370e-02],
[ 8.00000000e+02, 1.20779313e-02],
[ 8.10000000e+02, 1.12246787e-02],
[ 8.20000000e+02, 1.01809883e-02],
[ 8.30000000e+02, 8.49824212e-03],
[ 8.40000000e+02, 1.11759268e-02],
[ 8.50000000e+02, 9.27789137e-03],
[ 8.60000000e+02, 1.04088141e-02],
[ 8.70000000e+02, 9.83861461e-03],
[ 8.80000000e+02, 9.28709377e-03],
[ 8.90000000e+02, 1.02626048e-02],
[ 9.00000000e+02, 8.72140378e-03],
[ 9.10000000e+02, 8.94727930e-03],
[ 9.20000000e+02, 9.43389814e-03],
[ 9.30000000e+02, 1.00873765e-02],
[ 9.40000000e+02, 8.85191001e-03],
[ 9.50000000e+02, 8.38382449e-03],
[ 9.60000000e+02, 8.18282925e-03],
[ 9.70000000e+02, 9.31667164e-03],
[ 9.80000000e+02, 9.57918167e-03],
[ 9.90000000e+02, 8.18235893e-03],
[ 1.00000000e+03, 9.12148505e-03],
[ 1.01000000e+03, 7.68371066e-03],
[ 1.02000000e+03, 7.98046310e-03],
[ 1.03000000e+03, 7.17705023e-03],
[ 1.04000000e+03, 9.03674867e-03],
[ 1.05000000e+03, 9.42576490e-03],
[ 1.06000000e+03, 7.10476143e-03],
[ 1.07000000e+03, 8.26850906e-03],
[ 1.08000000e+03, 8.38900916e-03],
[ 1.09000000e+03, 8.07069242e-03],
[ 1.10000000e+03, 7.83216488e-03],
[ 1.11000000e+03, 8.82564113e-03],
[ 1.12000000e+03, 6.98517682e-03],
[ 1.13000000e+03, 7.92842824e-03],
[ 1.14000000e+03, 7.50246877e-03],
[ 1.15000000e+03, 7.10098725e-03],
[ 1.16000000e+03, 6.98447600e-03],
[ 1.17000000e+03, 7.86724500e-03],
[ 1.18000000e+03, 7.85678998e-03],
[ 1.19000000e+03, 7.57053122e-03],
[ 1.20000000e+03, 7.13722548e-03],
[ 1.21000000e+03, 6.64981408e-03],
[ 1.22000000e+03, 6.94719516e-03],
[ 1.23000000e+03, 6.89419266e-03],
[ 1.24000000e+03, 6.90645119e-03],
[ 1.25000000e+03, 7.21116411e-03],
[ 1.26000000e+03, 6.01117266e-03],
[ 1.27000000e+03, 7.26602506e-03],
[ 1.28000000e+03, 5.97099401e-03],
[ 1.29000000e+03, 6.89710630e-03],
[ 1.30000000e+03, 6.78111985e-03],
[ 1.31000000e+03, 6.99955598e-03],
[ 1.32000000e+03, 6.23831525e-03],
[ 1.33000000e+03, 6.14073174e-03],
[ 1.34000000e+03, 7.05319410e-03],
[ 1.35000000e+03, 6.66815741e-03],
[ 1.36000000e+03, 5.84342470e-03],
[ 1.37000000e+03, 5.81708178e-03],
[ 1.38000000e+03, 6.29056944e-03],
[ 1.39000000e+03, 6.51696790e-03],
[ 1.40000000e+03, 6.52175536e-03],
[ 1.41000000e+03, 7.33786216e-03],
[ 1.42000000e+03, 6.24056300e-03],
[ 1.43000000e+03, 5.86394500e-03],
[ 1.44000000e+03, 6.72449730e-03],
[ 1.45000000e+03, 6.59021363e-03],
[ 1.46000000e+03, 6.42565126e-03],
[ 1.47000000e+03, 6.11220906e-03],
[ 1.48000000e+03, 5.49821090e-03],
[ 1.49000000e+03, 6.15825364e-03],
[ 1.50000000e+03, 5.30183455e-03],
[ 1.51000000e+03, 5.44701889e-03],
[ 1.52000000e+03, 5.96840261e-03],
[ 1.53000000e+03, 5.84102143e-03],
[ 1.54000000e+03, 6.02063257e-03],
[ 1.55000000e+03, 5.79390302e-03],
[ 1.56000000e+03, 5.74832549e-03],
[ 1.57000000e+03, 5.62326051e-03],
[ 1.58000000e+03, 5.37041528e-03],
[ 1.59000000e+03, 5.27119264e-03],
[ 1.60000000e+03, 6.31239079e-03],
[ 1.61000000e+03, 6.53806468e-03],
[ 1.62000000e+03, 5.17426711e-03],
[ 1.63000000e+03, 5.30378940e-03],
[ 1.64000000e+03, 5.91227412e-03],
[ 1.65000000e+03, 5.02133137e-03],
[ 1.66000000e+03, 4.64896904e-03],
[ 1.67000000e+03, 5.38665522e-03],
[ 1.68000000e+03, 5.34902327e-03],
[ 1.69000000e+03, 5.29049756e-03],
[ 1.70000000e+03, 5.55971125e-03],
[ 1.71000000e+03, 4.92698513e-03],
[ 1.72000000e+03, 5.30393375e-03],
[ 1.73000000e+03, 5.52117778e-03],
[ 1.74000000e+03, 4.67033545e-03],
[ 1.75000000e+03, 5.63142588e-03],
[ 1.76000000e+03, 5.04138134e-03],
[ 1.77000000e+03, 5.51196700e-03],
[ 1.78000000e+03, 5.41979773e-03],
[ 1.79000000e+03, 5.46867587e-03],
[ 1.80000000e+03, 4.73293802e-03],
[ 1.81000000e+03, 5.09474752e-03],
[ 1.82000000e+03, 4.67019482e-03],
[ 1.83000000e+03, 4.77354927e-03],
[ 1.84000000e+03, 4.83772717e-03],
[ 1.85000000e+03, 5.21602295e-03],
[ 1.86000000e+03, 4.19009151e-03],
[ 1.87000000e+03, 4.83265892e-03],
[ 1.88000000e+03, 4.83431527e-03],
[ 1.89000000e+03, 5.14559634e-03],
[ 1.90000000e+03, 5.44076553e-03],
[ 1.91000000e+03, 4.79972782e-03],
[ 1.92000000e+03, 4.48630610e-03],
[ 1.93000000e+03, 5.33251604e-03],
[ 1.94000000e+03, 4.69741784e-03],
[ 1.95000000e+03, 4.89854254e-03],
[ 1.96000000e+03, 5.02321124e-03],
[ 1.97000000e+03, 4.91055287e-03],
[ 1.98000000e+03, 4.45442786e-03],
[ 1.99000000e+03, 4.23920760e-03],
[ 2.00000000e+03, 4.46196645e-03]])
In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])
Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x106d09940>
In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")
Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x106cda6d8>
In [ ]:
Content source: nayutaya/tensorflow-rnn-sin
Similar notebooks: