In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline
In [2]:
with open("param.yaml", "r") as file:
param = yaml.load(file.read())
param
Out[2]:
{'forget_bias': 1.0,
'learning_rate': 0.02,
'length_of_sequences': 50,
'num_of_hidden_nodes': 2,
'num_of_input_nodes': 1,
'num_of_output_nodes': 1,
'num_of_prediction_epochs': 100,
'num_of_training_epochs': 2000,
'optimizer': 'GradientDescentOptimizer',
'seed': 0,
'size_of_mini_batch': 100,
'train_data_path': '../train_data/normal.npy'}
In [3]:
train = np.load(param["train_data_path"])
train
Out[3]:
array([[ 0.00000000e+00, 1.25333234e-01],
[ 1.25333234e-01, 2.48689887e-01],
[ 2.48689887e-01, 3.68124553e-01],
...,
[ -3.68124553e-01, -2.48689887e-01],
[ -2.48689887e-01, -1.25333234e-01],
[ -1.25333234e-01, 3.92877345e-15]])
In [4]:
initial = np.load("initial.npy")
initial
Out[4]:
array([ 0.00000000e+00, 1.25333234e-01, 2.48689887e-01,
3.68124553e-01, 4.81753674e-01, 5.87785252e-01,
6.84547106e-01, 7.70513243e-01, 8.44327926e-01,
9.04827052e-01, 9.51056516e-01, 9.82287251e-01,
9.98026728e-01, 9.98026728e-01, 9.82287251e-01,
9.51056516e-01, 9.04827052e-01, 8.44327926e-01,
7.70513243e-01, 6.84547106e-01, 5.87785252e-01,
4.81753674e-01, 3.68124553e-01, 2.48689887e-01,
1.25333234e-01, -3.21624530e-16, -1.25333234e-01,
-2.48689887e-01, -3.68124553e-01, -4.81753674e-01,
-5.87785252e-01, -6.84547106e-01, -7.70513243e-01,
-8.44327926e-01, -9.04827052e-01, -9.51056516e-01,
-9.82287251e-01, -9.98026728e-01, -9.98026728e-01,
-9.82287251e-01, -9.51056516e-01, -9.04827052e-01,
-8.44327926e-01, -7.70513243e-01, -6.84547106e-01,
-5.87785252e-01, -4.81753674e-01, -3.68124553e-01,
-2.48689887e-01, -1.25333234e-01])
In [5]:
output = np.load("output.npy")
output
Out[5]:
array([-0.09543324, -0.06062651, -0.03316562, -0.01280821, 0.00210615,
0.01298707, 0.02095232, 0.02684927, 0.03130194, 0.03476161,
0.03755079, 0.03989857, 0.04196724, 0.04387192, 0.04569472,
0.04749483, 0.04931542, 0.05118895, 0.05314033, 0.05518964,
0.05735358, 0.0596468 , 0.06208278, 0.06467434, 0.06743403,
0.07037447, 0.07350861, 0.07684982, 0.08041193, 0.08420961,
0.0882581 , 0.09257342, 0.09717241, 0.10207273, 0.10729285,
0.11285202, 0.11877032, 0.12506846, 0.13176787, 0.13889049,
0.14645866, 0.1544949 , 0.16302176, 0.17206141, 0.18163542,
0.19176415, 0.20246631, 0.21375853, 0.22565451, 0.23816422,
0.25129312, 0.26504138, 0.27940261, 0.29436323, 0.30990121,
0.32598549, 0.34257519, 0.35961902, 0.37705529, 0.39481199,
0.41280761, 0.43095213, 0.44914871, 0.46729577, 0.48528934,
0.50302589, 0.52040505, 0.53733224, 0.55372131, 0.56949651,
0.58459395, 0.59896296, 0.61256611, 0.62537903, 0.63738996,
0.64859867, 0.65901494, 0.66865742, 0.67755157, 0.68572861,
0.69322371, 0.70007521, 0.70632255, 0.71200639, 0.71716714,
0.72184432, 0.72607636, 0.72990006, 0.73334992, 0.73645908,
0.73925811, 0.74177569, 0.74403799, 0.74606949, 0.74789238,
0.74952716, 0.75099254, 0.75230527, 0.75348091, 0.75453323])
In [6]:
losses = np.load("losses.npy")
losses
Out[6]:
array([[ 1.00000000e+01, 5.17859817e-01],
[ 2.00000000e+01, 4.98998046e-01],
[ 3.00000000e+01, 5.17539799e-01],
[ 4.00000000e+01, 5.33684254e-01],
[ 5.00000000e+01, 4.50120538e-01],
[ 6.00000000e+01, 4.66546029e-01],
[ 7.00000000e+01, 5.05139887e-01],
[ 8.00000000e+01, 4.55322713e-01],
[ 9.00000000e+01, 4.42264915e-01],
[ 1.00000000e+02, 5.33956170e-01],
[ 1.10000000e+02, 5.00837803e-01],
[ 1.20000000e+02, 5.28975248e-01],
[ 1.30000000e+02, 5.35401702e-01],
[ 1.40000000e+02, 4.87327665e-01],
[ 1.50000000e+02, 4.23016965e-01],
[ 1.60000000e+02, 4.57310975e-01],
[ 1.70000000e+02, 4.68445003e-01],
[ 1.80000000e+02, 4.57484215e-01],
[ 1.90000000e+02, 4.08328593e-01],
[ 2.00000000e+02, 3.75749350e-01],
[ 2.10000000e+02, 3.98944020e-01],
[ 2.20000000e+02, 3.37599069e-01],
[ 2.30000000e+02, 3.64709496e-01],
[ 2.40000000e+02, 2.71187246e-01],
[ 2.50000000e+02, 2.51345515e-01],
[ 2.60000000e+02, 2.64558494e-01],
[ 2.70000000e+02, 2.21746430e-01],
[ 2.80000000e+02, 1.93782657e-01],
[ 2.90000000e+02, 1.87349617e-01],
[ 3.00000000e+02, 1.76416963e-01],
[ 3.10000000e+02, 1.41304001e-01],
[ 3.20000000e+02, 1.30974799e-01],
[ 3.30000000e+02, 1.18791178e-01],
[ 3.40000000e+02, 1.12017252e-01],
[ 3.50000000e+02, 1.02567978e-01],
[ 3.60000000e+02, 8.54820982e-02],
[ 3.70000000e+02, 8.04091617e-02],
[ 3.80000000e+02, 7.72179514e-02],
[ 3.90000000e+02, 6.63317889e-02],
[ 4.00000000e+02, 6.81277737e-02],
[ 4.10000000e+02, 7.29839429e-02],
[ 4.20000000e+02, 5.78701012e-02],
[ 4.30000000e+02, 6.36226907e-02],
[ 4.40000000e+02, 6.09040111e-02],
[ 4.50000000e+02, 6.05218075e-02],
[ 4.60000000e+02, 5.73993102e-02],
[ 4.70000000e+02, 5.35745509e-02],
[ 4.80000000e+02, 5.15523143e-02],
[ 4.90000000e+02, 4.79386561e-02],
[ 5.00000000e+02, 5.05429320e-02],
[ 5.10000000e+02, 4.91594411e-02],
[ 5.20000000e+02, 4.91954088e-02],
[ 5.30000000e+02, 4.47381511e-02],
[ 5.40000000e+02, 5.06808050e-02],
[ 5.50000000e+02, 4.23950665e-02],
[ 5.60000000e+02, 3.71910036e-02],
[ 5.70000000e+02, 4.46714051e-02],
[ 5.80000000e+02, 4.08655629e-02],
[ 5.90000000e+02, 4.09289896e-02],
[ 6.00000000e+02, 3.61069068e-02],
[ 6.10000000e+02, 3.81100364e-02],
[ 6.20000000e+02, 3.78715843e-02],
[ 6.30000000e+02, 3.78756300e-02],
[ 6.40000000e+02, 3.59824747e-02],
[ 6.50000000e+02, 3.92697901e-02],
[ 6.60000000e+02, 3.34591568e-02],
[ 6.70000000e+02, 3.74063626e-02],
[ 6.80000000e+02, 3.41914445e-02],
[ 6.90000000e+02, 3.70244682e-02],
[ 7.00000000e+02, 3.09089608e-02],
[ 7.10000000e+02, 2.93999966e-02],
[ 7.20000000e+02, 3.22674774e-02],
[ 7.30000000e+02, 3.28461677e-02],
[ 7.40000000e+02, 2.80348882e-02],
[ 7.50000000e+02, 3.10689788e-02],
[ 7.60000000e+02, 2.76543181e-02],
[ 7.70000000e+02, 2.90908907e-02],
[ 7.80000000e+02, 2.68693343e-02],
[ 7.90000000e+02, 3.13737765e-02],
[ 8.00000000e+02, 3.01977824e-02],
[ 8.10000000e+02, 3.01491395e-02],
[ 8.20000000e+02, 2.72558220e-02],
[ 8.30000000e+02, 2.35948507e-02],
[ 8.40000000e+02, 2.95558739e-02],
[ 8.50000000e+02, 2.38943808e-02],
[ 8.60000000e+02, 2.70705745e-02],
[ 8.70000000e+02, 2.59567164e-02],
[ 8.80000000e+02, 2.44831219e-02],
[ 8.90000000e+02, 2.65099239e-02],
[ 9.00000000e+02, 2.34903544e-02],
[ 9.10000000e+02, 2.39725038e-02],
[ 9.20000000e+02, 2.44069807e-02],
[ 9.30000000e+02, 2.67997161e-02],
[ 9.40000000e+02, 2.28764098e-02],
[ 9.50000000e+02, 2.11875495e-02],
[ 9.60000000e+02, 2.08958201e-02],
[ 9.70000000e+02, 2.31778137e-02],
[ 9.80000000e+02, 2.31511835e-02],
[ 9.90000000e+02, 2.01585628e-02],
[ 1.00000000e+03, 2.30377223e-02],
[ 1.01000000e+03, 2.03283932e-02],
[ 1.02000000e+03, 1.97151657e-02],
[ 1.03000000e+03, 1.86410733e-02],
[ 1.04000000e+03, 2.27146503e-02],
[ 1.05000000e+03, 2.38351244e-02],
[ 1.06000000e+03, 1.81305818e-02],
[ 1.07000000e+03, 1.96415894e-02],
[ 1.08000000e+03, 2.07977481e-02],
[ 1.09000000e+03, 1.99295487e-02],
[ 1.10000000e+03, 1.84272658e-02],
[ 1.11000000e+03, 2.12104600e-02],
[ 1.12000000e+03, 1.74766257e-02],
[ 1.13000000e+03, 1.93148535e-02],
[ 1.14000000e+03, 1.70696788e-02],
[ 1.15000000e+03, 1.77788325e-02],
[ 1.16000000e+03, 1.70950405e-02],
[ 1.17000000e+03, 1.87355001e-02],
[ 1.18000000e+03, 1.90025978e-02],
[ 1.19000000e+03, 1.79176908e-02],
[ 1.20000000e+03, 1.68702286e-02],
[ 1.21000000e+03, 1.63305420e-02],
[ 1.22000000e+03, 1.66777819e-02],
[ 1.23000000e+03, 1.66886039e-02],
[ 1.24000000e+03, 1.66538712e-02],
[ 1.25000000e+03, 1.67828854e-02],
[ 1.26000000e+03, 1.49708334e-02],
[ 1.27000000e+03, 1.66316107e-02],
[ 1.28000000e+03, 1.41832912e-02],
[ 1.29000000e+03, 1.61960535e-02],
[ 1.30000000e+03, 1.54079655e-02],
[ 1.31000000e+03, 1.59229729e-02],
[ 1.32000000e+03, 1.43228360e-02],
[ 1.33000000e+03, 1.43423248e-02],
[ 1.34000000e+03, 1.58582684e-02],
[ 1.35000000e+03, 1.53038381e-02],
[ 1.36000000e+03, 1.30197909e-02],
[ 1.37000000e+03, 1.28818275e-02],
[ 1.38000000e+03, 1.44328317e-02],
[ 1.39000000e+03, 1.42328823e-02],
[ 1.40000000e+03, 1.46151092e-02],
[ 1.41000000e+03, 1.58220306e-02],
[ 1.42000000e+03, 1.42635154e-02],
[ 1.43000000e+03, 1.30550573e-02],
[ 1.44000000e+03, 1.47180595e-02],
[ 1.45000000e+03, 1.41027831e-02],
[ 1.46000000e+03, 1.39672421e-02],
[ 1.47000000e+03, 1.32001350e-02],
[ 1.48000000e+03, 1.21397497e-02],
[ 1.49000000e+03, 1.31679084e-02],
[ 1.50000000e+03, 1.14337495e-02],
[ 1.51000000e+03, 1.18541168e-02],
[ 1.52000000e+03, 1.28194140e-02],
[ 1.53000000e+03, 1.28950002e-02],
[ 1.54000000e+03, 1.25990808e-02],
[ 1.55000000e+03, 1.23996986e-02],
[ 1.56000000e+03, 1.20278625e-02],
[ 1.57000000e+03, 1.17429821e-02],
[ 1.58000000e+03, 1.13942139e-02],
[ 1.59000000e+03, 1.08894408e-02],
[ 1.60000000e+03, 1.32966591e-02],
[ 1.61000000e+03, 1.36806490e-02],
[ 1.62000000e+03, 1.07701560e-02],
[ 1.63000000e+03, 1.09960940e-02],
[ 1.64000000e+03, 1.21318670e-02],
[ 1.65000000e+03, 1.02160964e-02],
[ 1.66000000e+03, 9.66790225e-03],
[ 1.67000000e+03, 1.11636901e-02],
[ 1.68000000e+03, 1.07182637e-02],
[ 1.69000000e+03, 1.03527093e-02],
[ 1.70000000e+03, 1.08238189e-02],
[ 1.71000000e+03, 9.85411927e-03],
[ 1.72000000e+03, 1.07999323e-02],
[ 1.73000000e+03, 1.08791664e-02],
[ 1.74000000e+03, 9.16867703e-03],
[ 1.75000000e+03, 1.05869761e-02],
[ 1.76000000e+03, 9.78048705e-03],
[ 1.77000000e+03, 1.04347467e-02],
[ 1.78000000e+03, 1.05488654e-02],
[ 1.79000000e+03, 1.02532366e-02],
[ 1.80000000e+03, 9.21108946e-03],
[ 1.81000000e+03, 9.53976624e-03],
[ 1.82000000e+03, 8.72293580e-03],
[ 1.83000000e+03, 8.77364539e-03],
[ 1.84000000e+03, 8.85140803e-03],
[ 1.85000000e+03, 9.62420274e-03],
[ 1.86000000e+03, 7.60515500e-03],
[ 1.87000000e+03, 8.91204365e-03],
[ 1.88000000e+03, 8.45636427e-03],
[ 1.89000000e+03, 9.03903600e-03],
[ 1.90000000e+03, 9.32669640e-03],
[ 1.91000000e+03, 8.61874130e-03],
[ 1.92000000e+03, 7.98130129e-03],
[ 1.93000000e+03, 9.29717906e-03],
[ 1.94000000e+03, 8.17752443e-03],
[ 1.95000000e+03, 8.42652470e-03],
[ 1.96000000e+03, 8.42485763e-03],
[ 1.97000000e+03, 7.93940481e-03],
[ 1.98000000e+03, 7.88308959e-03],
[ 1.99000000e+03, 7.14018242e-03],
[ 2.00000000e+03, 7.14133866e-03]])
In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])
Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x111b51940>
In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")
Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x111b24780>
In [ ]:
Content source: nayutaya/tensorflow-rnn-sin
Similar notebooks: