In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline
In [2]:
with open("param.yaml", "r") as file:
param = yaml.load(file.read())
param
Out[2]:
{'forget_bias': 1.0,
'learning_rate': 0.5,
'length_of_sequences': 50,
'num_of_hidden_nodes': 2,
'num_of_input_nodes': 1,
'num_of_output_nodes': 1,
'num_of_prediction_epochs': 100,
'num_of_training_epochs': 2000,
'optimizer': 'GradientDescentOptimizer',
'seed': 0,
'size_of_mini_batch': 100,
'train_data_path': '../train_data/normal.npy'}
In [3]:
train = np.load(param["train_data_path"])
train
Out[3]:
array([[ 0.00000000e+00, 1.25333234e-01],
[ 1.25333234e-01, 2.48689887e-01],
[ 2.48689887e-01, 3.68124553e-01],
...,
[ -3.68124553e-01, -2.48689887e-01],
[ -2.48689887e-01, -1.25333234e-01],
[ -1.25333234e-01, 3.92877345e-15]])
In [4]:
initial = np.load("initial.npy")
initial
Out[4]:
array([ 0.00000000e+00, 1.25333234e-01, 2.48689887e-01,
3.68124553e-01, 4.81753674e-01, 5.87785252e-01,
6.84547106e-01, 7.70513243e-01, 8.44327926e-01,
9.04827052e-01, 9.51056516e-01, 9.82287251e-01,
9.98026728e-01, 9.98026728e-01, 9.82287251e-01,
9.51056516e-01, 9.04827052e-01, 8.44327926e-01,
7.70513243e-01, 6.84547106e-01, 5.87785252e-01,
4.81753674e-01, 3.68124553e-01, 2.48689887e-01,
1.25333234e-01, -3.21624530e-16, -1.25333234e-01,
-2.48689887e-01, -3.68124553e-01, -4.81753674e-01,
-5.87785252e-01, -6.84547106e-01, -7.70513243e-01,
-8.44327926e-01, -9.04827052e-01, -9.51056516e-01,
-9.82287251e-01, -9.98026728e-01, -9.98026728e-01,
-9.82287251e-01, -9.51056516e-01, -9.04827052e-01,
-8.44327926e-01, -7.70513243e-01, -6.84547106e-01,
-5.87785252e-01, -4.81753674e-01, -3.68124553e-01,
-2.48689887e-01, -1.25333234e-01])
In [5]:
output = np.load("output.npy")
output
Out[5]:
array([ -2.89351046e-02, 4.70549762e-02, 8.91841352e-02,
9.56257880e-02, 6.58415854e-02, -9.80496407e-06,
-1.00909412e-01, -2.33765915e-01, -3.91262800e-01,
-5.60204148e-01, -7.23248959e-01, -8.65128160e-01,
-9.78696465e-01, -1.06517673e+00, -1.12993598e+00,
-1.17871284e+00, -1.21608496e+00, -1.24531591e+00,
-1.26863873e+00, -1.28756928e+00, -1.30314612e+00,
-1.31609678e+00, -1.32694495e+00, -1.33607936e+00,
-1.34379745e+00, -1.35033393e+00, -1.35587716e+00,
-1.36058152e+00, -1.36457598e+00, -1.36796820e+00,
-1.37084889e+00, -1.37329483e+00, -1.37537158e+00,
-1.37713468e+00, -1.37863076e+00, -1.37990046e+00,
-1.38097787e+00, -1.38189173e+00, -1.38266718e+00,
-1.38332486e+00, -1.38388264e+00, -1.38435566e+00,
-1.38475668e+00, -1.38509679e+00, -1.38538527e+00,
-1.38562977e+00, -1.38583720e+00, -1.38601303e+00,
-1.38616204e+00, -1.38628852e+00, -1.38639545e+00,
-1.38648617e+00, -1.38656342e+00, -1.38662851e+00,
-1.38668382e+00, -1.38673079e+00, -1.38677061e+00,
-1.38680422e+00, -1.38683283e+00, -1.38685703e+00,
-1.38687766e+00, -1.38689494e+00, -1.38690972e+00,
-1.38692224e+00, -1.38693273e+00, -1.38694191e+00,
-1.38694942e+00, -1.38695586e+00, -1.38696134e+00,
-1.38696587e+00, -1.38696969e+00, -1.38697302e+00,
-1.38697577e+00, -1.38697827e+00, -1.38698018e+00,
-1.38698196e+00, -1.38698339e+00, -1.38698471e+00,
-1.38698566e+00, -1.38698661e+00, -1.38698733e+00,
-1.38698792e+00, -1.38698852e+00, -1.38698924e+00,
-1.38698947e+00, -1.38698995e+00, -1.38699007e+00,
-1.38699031e+00, -1.38699043e+00, -1.38699067e+00,
-1.38699090e+00, -1.38699102e+00, -1.38699126e+00,
-1.38699138e+00, -1.38699138e+00, -1.38699150e+00,
-1.38699162e+00, -1.38699162e+00, -1.38699150e+00,
-1.38699162e+00])
In [6]:
losses = np.load("losses.npy")
losses
Out[6]:
array([[ 1.00000000e+01, 3.29082876e-01],
[ 2.00000000e+01, 8.61236155e-02],
[ 3.00000000e+01, 4.93995696e-02],
[ 4.00000000e+01, 5.12662940e-02],
[ 5.00000000e+01, 4.51714173e-02],
[ 6.00000000e+01, 2.12486554e-02],
[ 7.00000000e+01, 4.31775376e-02],
[ 8.00000000e+01, 5.81898540e-02],
[ 9.00000000e+01, 2.14595608e-02],
[ 1.00000000e+02, 1.15892859e-02],
[ 1.10000000e+02, 9.14667547e-03],
[ 1.20000000e+02, 7.12994561e-02],
[ 1.30000000e+02, 9.64986905e-03],
[ 1.40000000e+02, 6.87214686e-03],
[ 1.50000000e+02, 6.12145383e-03],
[ 1.60000000e+02, 6.25809794e-03],
[ 1.70000000e+02, 6.78430796e-02],
[ 1.80000000e+02, 2.18180101e-02],
[ 1.90000000e+02, 5.35034481e-03],
[ 2.00000000e+02, 5.17852791e-03],
[ 2.10000000e+02, 4.03791061e-03],
[ 2.20000000e+02, 3.38686979e-03],
[ 2.30000000e+02, 2.87548210e-02],
[ 2.40000000e+02, 2.48847418e-02],
[ 2.50000000e+02, 4.13393183e-03],
[ 2.60000000e+02, 2.90879677e-03],
[ 2.70000000e+02, 2.63714185e-03],
[ 2.80000000e+02, 2.62779230e-03],
[ 2.90000000e+02, 2.83988216e-03],
[ 3.00000000e+02, 3.37786670e-03],
[ 3.10000000e+02, 3.47137894e-03],
[ 3.20000000e+02, 4.37675277e-03],
[ 3.30000000e+02, 1.05746724e-02],
[ 3.40000000e+02, 4.16547805e-02],
[ 3.50000000e+02, 3.62072373e-03],
[ 3.60000000e+02, 3.88795254e-03],
[ 3.70000000e+02, 2.38357717e-03],
[ 3.80000000e+02, 3.73674370e-03],
[ 3.90000000e+02, 4.93265549e-03],
[ 4.00000000e+02, 6.38453010e-03],
[ 4.10000000e+02, 1.72910723e-03],
[ 4.20000000e+02, 1.84115139e-03],
[ 4.30000000e+02, 2.08979007e-03],
[ 4.40000000e+02, 1.32999604e-03],
[ 4.50000000e+02, 1.61635608e-03],
[ 4.60000000e+02, 1.09612208e-03],
[ 4.70000000e+02, 1.38378679e-03],
[ 4.80000000e+02, 1.83142349e-03],
[ 4.90000000e+02, 2.25510169e-03],
[ 5.00000000e+02, 1.95748135e-02],
[ 5.10000000e+02, 1.39374016e-02],
[ 5.20000000e+02, 5.30189276e-03],
[ 5.30000000e+02, 4.90416866e-03],
[ 5.40000000e+02, 4.96186502e-03],
[ 5.50000000e+02, 2.17752461e-03],
[ 5.60000000e+02, 1.18150190e-03],
[ 5.70000000e+02, 9.32271476e-04],
[ 5.80000000e+02, 9.45433392e-04],
[ 5.90000000e+02, 8.78541614e-04],
[ 6.00000000e+02, 8.57671665e-04],
[ 6.10000000e+02, 2.78549921e-03],
[ 6.20000000e+02, 6.73760055e-03],
[ 6.30000000e+02, 1.00567648e-02],
[ 6.40000000e+02, 3.94475507e-03],
[ 6.50000000e+02, 1.50588749e-03],
[ 6.60000000e+02, 6.69506658e-03],
[ 6.70000000e+02, 5.26860543e-03],
[ 6.80000000e+02, 3.93547816e-03],
[ 6.90000000e+02, 2.15574214e-03],
[ 7.00000000e+02, 1.74959470e-03],
[ 7.10000000e+02, 7.99782923e-04],
[ 7.20000000e+02, 8.05387623e-04],
[ 7.30000000e+02, 8.04517127e-04],
[ 7.40000000e+02, 1.15450041e-03],
[ 7.50000000e+02, 2.69657699e-03],
[ 7.60000000e+02, 5.23904897e-03],
[ 7.70000000e+02, 4.05439781e-03],
[ 7.80000000e+02, 1.28805160e-03],
[ 7.90000000e+02, 6.51382608e-04],
[ 8.00000000e+02, 1.78131205e-03],
[ 8.10000000e+02, 2.73370440e-03],
[ 8.20000000e+02, 3.87497549e-03],
[ 8.30000000e+02, 2.48506735e-03],
[ 8.40000000e+02, 1.19281409e-03],
[ 8.50000000e+02, 8.84005742e-04],
[ 8.60000000e+02, 8.09002086e-04],
[ 8.70000000e+02, 5.18891611e-04],
[ 8.80000000e+02, 4.85806755e-04],
[ 8.90000000e+02, 4.22566605e-04],
[ 9.00000000e+02, 4.99748276e-04],
[ 9.10000000e+02, 6.73349190e-04],
[ 9.20000000e+02, 1.36621529e-03],
[ 9.30000000e+02, 2.34906259e-03],
[ 9.40000000e+02, 1.24414521e-03],
[ 9.50000000e+02, 1.06996275e-03],
[ 9.60000000e+02, 1.35857519e-03],
[ 9.70000000e+02, 1.77227485e-03],
[ 9.80000000e+02, 2.41312268e-03],
[ 9.90000000e+02, 1.39880809e-03],
[ 1.00000000e+03, 5.02648298e-04],
[ 1.01000000e+03, 7.64668162e-04],
[ 1.02000000e+03, 5.95820195e-04],
[ 1.03000000e+03, 7.06674007e-04],
[ 1.04000000e+03, 7.95079221e-04],
[ 1.05000000e+03, 9.49726906e-04],
[ 1.06000000e+03, 3.97022907e-03],
[ 1.07000000e+03, 6.18674094e-03],
[ 1.08000000e+03, 6.04170840e-03],
[ 1.09000000e+03, 3.10690980e-03],
[ 1.10000000e+03, 5.32673346e-03],
[ 1.11000000e+03, 2.84368475e-03],
[ 1.12000000e+03, 9.32524679e-04],
[ 1.13000000e+03, 8.73287034e-04],
[ 1.14000000e+03, 6.60641876e-04],
[ 1.15000000e+03, 5.34390216e-04],
[ 1.16000000e+03, 4.43657191e-04],
[ 1.17000000e+03, 3.87384964e-04],
[ 1.18000000e+03, 2.25938018e-03],
[ 1.19000000e+03, 1.27379061e-03],
[ 1.20000000e+03, 2.47914763e-03],
[ 1.21000000e+03, 1.28297147e-03],
[ 1.22000000e+03, 2.05471879e-03],
[ 1.23000000e+03, 7.87216588e-04],
[ 1.24000000e+03, 5.13212522e-04],
[ 1.25000000e+03, 4.85832541e-04],
[ 1.26000000e+03, 3.22945038e-04],
[ 1.27000000e+03, 3.17915779e-04],
[ 1.28000000e+03, 3.07918817e-04],
[ 1.29000000e+03, 6.63624727e-04],
[ 1.30000000e+03, 4.35431517e-04],
[ 1.31000000e+03, 9.13535361e-04],
[ 1.32000000e+03, 1.24266755e-03],
[ 1.33000000e+03, 3.25635914e-03],
[ 1.34000000e+03, 2.28239223e-03],
[ 1.35000000e+03, 2.69141677e-03],
[ 1.36000000e+03, 3.67952767e-03],
[ 1.37000000e+03, 6.70912862e-03],
[ 1.38000000e+03, 1.84378889e-03],
[ 1.39000000e+03, 1.00554409e-03],
[ 1.40000000e+03, 1.36158557e-03],
[ 1.41000000e+03, 1.47539051e-03],
[ 1.42000000e+03, 2.25596465e-04],
[ 1.43000000e+03, 2.67873751e-04],
[ 1.44000000e+03, 6.82484359e-04],
[ 1.45000000e+03, 1.77705975e-03],
[ 1.46000000e+03, 1.33512553e-03],
[ 1.47000000e+03, 6.82436395e-04],
[ 1.48000000e+03, 3.11202952e-04],
[ 1.49000000e+03, 3.72188981e-04],
[ 1.50000000e+03, 3.27757938e-04],
[ 1.51000000e+03, 2.86106253e-04],
[ 1.52000000e+03, 2.39151268e-04],
[ 1.53000000e+03, 3.62463150e-04],
[ 1.54000000e+03, 8.88126495e-04],
[ 1.55000000e+03, 1.00624014e-03],
[ 1.56000000e+03, 9.95489885e-04],
[ 1.57000000e+03, 1.19497138e-03],
[ 1.58000000e+03, 5.23035240e-04],
[ 1.59000000e+03, 2.90645577e-04],
[ 1.60000000e+03, 7.08664651e-04],
[ 1.61000000e+03, 1.00172963e-03],
[ 1.62000000e+03, 1.47847156e-03],
[ 1.63000000e+03, 1.65125134e-03],
[ 1.64000000e+03, 2.42329366e-03],
[ 1.65000000e+03, 5.48125617e-03],
[ 1.66000000e+03, 2.81687174e-03],
[ 1.67000000e+03, 1.20629359e-03],
[ 1.68000000e+03, 6.10824616e-04],
[ 1.69000000e+03, 4.52381530e-04],
[ 1.70000000e+03, 2.96841870e-04],
[ 1.71000000e+03, 4.06084408e-04],
[ 1.72000000e+03, 2.04487529e-04],
[ 1.73000000e+03, 1.91571671e-04],
[ 1.74000000e+03, 2.23557799e-04],
[ 1.75000000e+03, 1.87395679e-04],
[ 1.76000000e+03, 4.89564147e-04],
[ 1.77000000e+03, 6.82698737e-04],
[ 1.78000000e+03, 3.05149268e-04],
[ 1.79000000e+03, 6.92200672e-04],
[ 1.80000000e+03, 2.63523863e-04],
[ 1.81000000e+03, 3.46253597e-04],
[ 1.82000000e+03, 4.29725536e-04],
[ 1.83000000e+03, 1.92100517e-04],
[ 1.84000000e+03, 3.23592045e-04],
[ 1.85000000e+03, 3.50364629e-04],
[ 1.86000000e+03, 5.40185894e-04],
[ 1.87000000e+03, 4.39441268e-04],
[ 1.88000000e+03, 9.04918707e-04],
[ 1.89000000e+03, 7.30265398e-04],
[ 1.90000000e+03, 5.43148722e-04],
[ 1.91000000e+03, 5.62248752e-04],
[ 1.92000000e+03, 5.28857112e-04],
[ 1.93000000e+03, 3.82640428e-04],
[ 1.94000000e+03, 1.18241040e-03],
[ 1.95000000e+03, 1.06256397e-03],
[ 1.96000000e+03, 1.60182838e-03],
[ 1.97000000e+03, 5.51682059e-03],
[ 1.98000000e+03, 7.66568445e-03],
[ 1.99000000e+03, 2.95509677e-03],
[ 2.00000000e+03, 1.39434461e-03]])
In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])
Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x10fe909b0>
In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")
Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x10fe01780>
In [ ]:
Content source: nayutaya/tensorflow-rnn-sin
Similar notebooks: