In [1]:
import pandas as pd
import numpy as np
import yaml
%matplotlib inline
In [2]:
with open("param.yaml", "r") as file:
param = yaml.load(file.read())
param
Out[2]:
{'forget_bias': 1.0,
'learning_rate': 0.1,
'length_of_sequences': 50,
'num_of_hidden_nodes': 2,
'num_of_input_nodes': 1,
'num_of_output_nodes': 1,
'num_of_prediction_epochs': 100,
'num_of_training_epochs': 1000,
'optimizer': 'GradientDescentOptimizer',
'seed': 0,
'size_of_mini_batch': 100,
'train_data_path': '../train_data/normal.npy'}
In [3]:
train = np.load(param["train_data_path"])
train
Out[3]:
array([[ 0.00000000e+00, 1.25333234e-01],
[ 1.25333234e-01, 2.48689887e-01],
[ 2.48689887e-01, 3.68124553e-01],
...,
[ -3.68124553e-01, -2.48689887e-01],
[ -2.48689887e-01, -1.25333234e-01],
[ -1.25333234e-01, 3.92877345e-15]])
In [4]:
initial = np.load("initial.npy")
initial
Out[4]:
array([ 0.00000000e+00, 1.25333234e-01, 2.48689887e-01,
3.68124553e-01, 4.81753674e-01, 5.87785252e-01,
6.84547106e-01, 7.70513243e-01, 8.44327926e-01,
9.04827052e-01, 9.51056516e-01, 9.82287251e-01,
9.98026728e-01, 9.98026728e-01, 9.82287251e-01,
9.51056516e-01, 9.04827052e-01, 8.44327926e-01,
7.70513243e-01, 6.84547106e-01, 5.87785252e-01,
4.81753674e-01, 3.68124553e-01, 2.48689887e-01,
1.25333234e-01, -3.21624530e-16, -1.25333234e-01,
-2.48689887e-01, -3.68124553e-01, -4.81753674e-01,
-5.87785252e-01, -6.84547106e-01, -7.70513243e-01,
-8.44327926e-01, -9.04827052e-01, -9.51056516e-01,
-9.82287251e-01, -9.98026728e-01, -9.98026728e-01,
-9.82287251e-01, -9.51056516e-01, -9.04827052e-01,
-8.44327926e-01, -7.70513243e-01, -6.84547106e-01,
-5.87785252e-01, -4.81753674e-01, -3.68124553e-01,
-2.48689887e-01, -1.25333234e-01])
In [5]:
output = np.load("output.npy")
output
Out[5]:
array([ 0.0127728 , 0.1310403 , 0.23837069, 0.33634672, 0.42851773,
0.52022696, 0.61741412, 0.72337723, 0.83214319, 0.92505288,
0.98404634, 1.00737941, 1.00453079, 0.98509812, 0.95508492,
0.91750491, 0.87355244, 0.82336402, 0.7664088 , 0.70167136,
0.62774467, 0.5429157 , 0.44533435, 0.33337703, 0.20633975,
0.06550732, -0.08470863, -0.23647925, -0.38021424, -0.50787091,
-0.61556482, -0.70355403, -0.77437353, -0.83100379, -0.87587857,
-0.91055095, -0.93567252, -0.95109713, -0.95604801, -0.9493407 ,
-0.92964518, -0.89574182, -0.84671843, -0.78208768, -0.70186687,
-0.6066975 , -0.49805892, -0.37853873, -0.25198358, -0.12325796,
0.00256959, 0.12145317, 0.23149195, 0.33348361, 0.43079326,
0.52863443, 0.63227844, 0.74259043, 0.84922445, 0.93206203,
0.97830808, 0.99146831, 0.98182786, 0.95775878, 0.92407131,
0.88301539, 0.83531916, 0.78080106, 0.71867323, 0.64769053,
0.5662514 , 0.47254452, 0.36485115, 0.24213722, 0.10502604,
-0.04302344, -0.19507505, -0.34176564, -0.4743064 , -0.58755028,
-0.68074125, -0.75597692, -0.81623483, -0.86414504, -0.90150976,
-0.92922103, -0.9473393 , -0.95524442, -0.95184314, -0.9358232 ,
-0.90591633, -0.86111474, -0.80080783, -0.72486591, -0.63374114,
-0.52865475, -0.41186175, -0.28685945, -0.15828006, -0.03123623])
In [6]:
losses = np.load("losses.npy")
losses
Out[6]:
array([[ 1.00000000e+01, 5.18846750e-01],
[ 2.00000000e+01, 4.83384222e-01],
[ 3.00000000e+01, 4.51294541e-01],
[ 4.00000000e+01, 3.50392312e-01],
[ 5.00000000e+01, 1.80954114e-01],
[ 6.00000000e+01, 1.19002916e-01],
[ 7.00000000e+01, 9.98966619e-02],
[ 8.00000000e+01, 6.89805001e-02],
[ 9.00000000e+01, 5.80052175e-02],
[ 1.00000000e+02, 3.88081968e-02],
[ 1.10000000e+02, 2.72250064e-02],
[ 1.20000000e+02, 1.86243150e-02],
[ 1.30000000e+02, 1.25858299e-02],
[ 1.40000000e+02, 8.33798945e-03],
[ 1.50000000e+02, 6.97205588e-03],
[ 1.60000000e+02, 5.56392595e-03],
[ 1.70000000e+02, 4.86959331e-03],
[ 1.80000000e+02, 4.32354677e-03],
[ 1.90000000e+02, 3.76959541e-03],
[ 2.00000000e+02, 2.74768611e-03],
[ 2.10000000e+02, 3.24924407e-03],
[ 2.20000000e+02, 2.73457076e-03],
[ 2.30000000e+02, 2.68376223e-03],
[ 2.40000000e+02, 2.75764009e-03],
[ 2.50000000e+02, 2.35474505e-03],
[ 2.60000000e+02, 2.21807370e-03],
[ 2.70000000e+02, 2.35458370e-03],
[ 2.80000000e+02, 2.51799799e-03],
[ 2.90000000e+02, 1.53199048e-03],
[ 3.00000000e+02, 1.79666583e-03],
[ 3.10000000e+02, 1.60613435e-03],
[ 3.20000000e+02, 1.54300476e-03],
[ 3.30000000e+02, 2.00040429e-03],
[ 3.40000000e+02, 1.93893281e-03],
[ 3.50000000e+02, 1.60032231e-03],
[ 3.60000000e+02, 1.58054230e-03],
[ 3.70000000e+02, 2.47045141e-03],
[ 3.80000000e+02, 1.95466005e-03],
[ 3.90000000e+02, 1.63034687e-03],
[ 4.00000000e+02, 1.50665920e-03],
[ 4.10000000e+02, 1.08108553e-03],
[ 4.20000000e+02, 1.19128800e-03],
[ 4.30000000e+02, 1.38448516e-03],
[ 4.40000000e+02, 1.05307845e-03],
[ 4.50000000e+02, 1.05050509e-03],
[ 4.60000000e+02, 1.53654767e-03],
[ 4.70000000e+02, 1.59023178e-03],
[ 4.80000000e+02, 1.96693325e-03],
[ 4.90000000e+02, 1.26423652e-03],
[ 5.00000000e+02, 1.09738379e-03],
[ 5.10000000e+02, 1.24295522e-03],
[ 5.20000000e+02, 1.38760242e-03],
[ 5.30000000e+02, 1.15816167e-03],
[ 5.40000000e+02, 1.12949731e-03],
[ 5.50000000e+02, 8.40070541e-04],
[ 5.60000000e+02, 1.23773236e-03],
[ 5.70000000e+02, 1.33238570e-03],
[ 5.80000000e+02, 9.42691287e-04],
[ 5.90000000e+02, 9.50353278e-04],
[ 6.00000000e+02, 1.05704169e-03],
[ 6.10000000e+02, 1.55080925e-03],
[ 6.20000000e+02, 1.68209488e-03],
[ 6.30000000e+02, 9.48672008e-04],
[ 6.40000000e+02, 1.65397371e-03],
[ 6.50000000e+02, 1.46342244e-03],
[ 6.60000000e+02, 1.08585181e-03],
[ 6.70000000e+02, 9.40324040e-04],
[ 6.80000000e+02, 9.29499744e-04],
[ 6.90000000e+02, 7.03773927e-04],
[ 7.00000000e+02, 1.24141551e-03],
[ 7.10000000e+02, 1.02793204e-03],
[ 7.20000000e+02, 1.82087615e-03],
[ 7.30000000e+02, 1.16088649e-03],
[ 7.40000000e+02, 1.07180816e-03],
[ 7.50000000e+02, 9.24449123e-04],
[ 7.60000000e+02, 9.49640176e-04],
[ 7.70000000e+02, 1.04694511e-03],
[ 7.80000000e+02, 8.09595338e-04],
[ 7.90000000e+02, 1.14679988e-03],
[ 8.00000000e+02, 8.92893295e-04],
[ 8.10000000e+02, 8.30984442e-04],
[ 8.20000000e+02, 8.64787085e-04],
[ 8.30000000e+02, 9.83023667e-04],
[ 8.40000000e+02, 8.92442593e-04],
[ 8.50000000e+02, 1.04978634e-03],
[ 8.60000000e+02, 8.84101784e-04],
[ 8.70000000e+02, 1.33903953e-03],
[ 8.80000000e+02, 9.89491469e-04],
[ 8.90000000e+02, 9.22970241e-04],
[ 9.00000000e+02, 1.30868342e-03],
[ 9.10000000e+02, 8.38359352e-04],
[ 9.20000000e+02, 7.91967090e-04],
[ 9.30000000e+02, 1.53447513e-03],
[ 9.40000000e+02, 7.60031922e-04],
[ 9.50000000e+02, 1.49538333e-03],
[ 9.60000000e+02, 7.79114314e-04],
[ 9.70000000e+02, 6.95165247e-04],
[ 9.80000000e+02, 6.81479520e-04],
[ 9.90000000e+02, 7.19470147e-04],
[ 1.00000000e+03, 7.36025395e-04]])
In [7]:
train_df = pd.DataFrame(train[:len(initial) + len(output), 0], columns=["train"])
initial_df = pd.DataFrame(initial, columns=["initial"])
output_df = pd.DataFrame(output, columns=["output"], index=range(len(initial), len(initial) + len(output)))
merged = pd.concat([train_df, initial_df, output_df])
merged.plot(figsize=(15, 5), grid=True, style=["-", "-", "k--"])
Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x10ec25898>
In [8]:
losses_df = pd.DataFrame(losses, columns=["epoch", "loss"])
losses_df.plot(figsize=(15, 5), grid=True, logy=True, x="epoch")
Out[8]:
<matplotlib.axes._subplots.AxesSubplot at 0x10ebf8940>
In [ ]:
Content source: nayutaya/tensorflow-rnn-sin
Similar notebooks: