In [3]:
from keras import layers
from keras.models import Model
import keras
import numpy as np
init = keras.initializers.RandomNormal(seed=1)
inp = layers.Input((5, 1))
out1 = layers.GRU(3, return_sequences=True, kernel_initializer=init, recurrent_initializer=init)(inp)
out2= layers.GRU(3, return_sequences=False, kernel_initializer=init, recurrent_initializer=init)(inp)
m1 = Model(inp, [out1, out2])
Using TensorFlow backend.
/Users/vanpelt/.pyenv/versions/3.6.4/Python.framework/Versions/3.6/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88
return f(*args, **kwds)
/Users/vanpelt/.pyenv/versions/3.6.4/Python.framework/Versions/3.6/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88
return f(*args, **kwds)
In [4]:
in_data = np.array([0.1,0.2,0.3,0.4,0.5]).reshape((1,5,1))
m1.predict(in_data)
Out[4]:
[array([[[ 0.00148446, -0.00529137, -0.001807 ],
[ 0.00379733, -0.01328251, -0.00453949],
[ 0.00656812, -0.02265439, -0.00774633],
[ 0.00958988, -0.03273204, -0.01119637],
[ 0.01274805, -0.04316805, -0.014771 ]]], dtype=float32),
array([[ 0.01274805, -0.04316805, -0.014771 ]], dtype=float32)]
In [19]:
m2 = Model(inp, out2)
m2.compile(loss="mse", optimizer="adam")
m2.fit(in_data, np.array([0.6,0.7,0.8]).reshape((1,3)), epochs=100)
Epoch 1/100
1/1 [==============================] - 1s 893ms/step - loss: 4.8180e-08
Epoch 2/100
1/1 [==============================] - 0s 6ms/step - loss: 7.0771e-07
Epoch 3/100
1/1 [==============================] - 0s 7ms/step - loss: 1.6701e-06
Epoch 4/100
1/1 [==============================] - 0s 7ms/step - loss: 3.3612e-07
Epoch 5/100
1/1 [==============================] - 0s 8ms/step - loss: 2.5461e-07
Epoch 6/100
1/1 [==============================] - 0s 6ms/step - loss: 8.8923e-07
Epoch 7/100
1/1 [==============================] - 0s 5ms/step - loss: 6.6962e-07
Epoch 8/100
1/1 [==============================] - 0s 5ms/step - loss: 4.6714e-07
Epoch 9/100
1/1 [==============================] - 0s 13ms/step - loss: 4.1562e-07
Epoch 10/100
1/1 [==============================] - 0s 8ms/step - loss: 2.3144e-07
Epoch 11/100
1/1 [==============================] - 0s 7ms/step - loss: 1.5412e-07
Epoch 12/100
1/1 [==============================] - 0s 7ms/step - loss: 3.1293e-07
Epoch 13/100
1/1 [==============================] - 0s 7ms/step - loss: 3.8327e-07
Epoch 14/100
1/1 [==============================] - 0s 6ms/step - loss: 2.3997e-07
Epoch 15/100
1/1 [==============================] - 0s 7ms/step - loss: 1.0587e-07
Epoch 16/100
1/1 [==============================] - 0s 5ms/step - loss: 1.3247e-07
Epoch 17/100
1/1 [==============================] - 0s 11ms/step - loss: 1.9310e-07
Epoch 18/100
1/1 [==============================] - 0s 8ms/step - loss: 1.6948e-07
Epoch 19/100
1/1 [==============================] - 0s 7ms/step - loss: 1.2511e-07
Epoch 20/100
1/1 [==============================] - 0s 5ms/step - loss: 1.0967e-07
Epoch 21/100
1/1 [==============================] - 0s 8ms/step - loss: 8.9338e-08
Epoch 22/100
1/1 [==============================] - 0s 5ms/step - loss: 6.4364e-08
Epoch 23/100
1/1 [==============================] - 0s 6ms/step - loss: 8.1154e-08
Epoch 24/100
1/1 [==============================] - 0s 5ms/step - loss: 1.1794e-07
Epoch 25/100
1/1 [==============================] - 0s 4ms/step - loss: 1.0157e-07
Epoch 26/100
1/1 [==============================] - 0s 4ms/step - loss: 4.1230e-08
Epoch 27/100
1/1 [==============================] - 0s 6ms/step - loss: 1.5311e-08
Epoch 28/100
1/1 [==============================] - 0s 6ms/step - loss: 4.1277e-08
Epoch 29/100
1/1 [==============================] - 0s 6ms/step - loss: 6.4377e-08
Epoch 30/100
1/1 [==============================] - 0s 5ms/step - loss: 6.1658e-08
Epoch 31/100
1/1 [==============================] - 0s 5ms/step - loss: 5.0076e-08
Epoch 32/100
1/1 [==============================] - 0s 6ms/step - loss: 3.2097e-08
Epoch 33/100
1/1 [==============================] - 0s 5ms/step - loss: 1.0019e-08
Epoch 34/100
1/1 [==============================] - 0s 6ms/step - loss: 9.2985e-09
Epoch 35/100
1/1 [==============================] - 0s 4ms/step - loss: 3.4180e-08
Epoch 36/100
1/1 [==============================] - 0s 6ms/step - loss: 4.6017e-08
Epoch 37/100
1/1 [==============================] - 0s 6ms/step - loss: 2.8501e-08
Epoch 38/100
1/1 [==============================] - 0s 7ms/step - loss: 9.0654e-09
Epoch 39/100
1/1 [==============================] - 0s 8ms/step - loss: 6.3524e-09
Epoch 40/100
1/1 [==============================] - 0s 5ms/step - loss: 1.1235e-08
Epoch 41/100
1/1 [==============================] - 0s 6ms/step - loss: 1.7032e-08
Epoch 42/100
1/1 [==============================] - 0s 4ms/step - loss: 2.1983e-08
Epoch 43/100
1/1 [==============================] - 0s 6ms/step - loss: 1.7184e-08
Epoch 44/100
1/1 [==============================] - 0s 4ms/step - loss: 4.8867e-09
Epoch 45/100
1/1 [==============================] - 0s 6ms/step - loss: 1.6790e-09
Epoch 46/100
1/1 [==============================] - 0s 14ms/step - loss: 8.9328e-09
Epoch 47/100
1/1 [==============================] - 0s 11ms/step - loss: 1.2237e-08
Epoch 48/100
1/1 [==============================] - 0s 7ms/step - loss: 8.9661e-09
Epoch 49/100
1/1 [==============================] - 0s 9ms/step - loss: 6.1873e-09
Epoch 50/100
1/1 [==============================] - 0s 4ms/step - loss: 4.2445e-09
Epoch 51/100
1/1 [==============================] - 0s 6ms/step - loss: 3.0862e-09
Epoch 52/100
1/1 [==============================] - 0s 5ms/step - loss: 5.1231e-09
Epoch 53/100
1/1 [==============================] - 0s 4ms/step - loss: 6.5026e-09
Epoch 54/100
1/1 [==============================] - 0s 6ms/step - loss: 3.8027e-09
Epoch 55/100
1/1 [==============================] - 0s 6ms/step - loss: 2.0219e-09
Epoch 56/100
1/1 [==============================] - 0s 6ms/step - loss: 3.3556e-09
Epoch 57/100
1/1 [==============================] - 0s 7ms/step - loss: 3.5795e-09
Epoch 58/100
1/1 [==============================] - 0s 6ms/step - loss: 2.4072e-09
Epoch 59/100
1/1 [==============================] - 0s 4ms/step - loss: 2.1213e-09
Epoch 60/100
1/1 [==============================] - 0s 4ms/step - loss: 1.8785e-09
Epoch 61/100
1/1 [==============================] - 0s 4ms/step - loss: 1.7144e-09
Epoch 62/100
1/1 [==============================] - 0s 5ms/step - loss: 2.4308e-09
Epoch 63/100
1/1 [==============================] - 0s 7ms/step - loss: 2.0688e-09
Epoch 64/100
1/1 [==============================] - 0s 8ms/step - loss: 6.1418e-10
Epoch 65/100
1/1 [==============================] - 0s 6ms/step - loss: 6.4036e-10
Epoch 66/100
1/1 [==============================] - 0s 7ms/step - loss: 1.5882e-09
Epoch 67/100
1/1 [==============================] - 0s 3ms/step - loss: 1.5704e-09
Epoch 68/100
1/1 [==============================] - 0s 4ms/step - loss: 1.0881e-09
Epoch 69/100
1/1 [==============================] - 0s 4ms/step - loss: 5.8033e-10
Epoch 70/100
1/1 [==============================] - 0s 6ms/step - loss: 2.1113e-10
Epoch 71/100
1/1 [==============================] - 0s 5ms/step - loss: 7.3032e-10
Epoch 72/100
1/1 [==============================] - 0s 5ms/step - loss: 1.2431e-09
Epoch 73/100
1/1 [==============================] - 0s 4ms/step - loss: 7.0470e-10
Epoch 74/100
1/1 [==============================] - 0s 8ms/step - loss: 1.4630e-10
Epoch 75/100
1/1 [==============================] - 0s 6ms/step - loss: 2.5163e-10
Epoch 76/100
1/1 [==============================] - 0s 6ms/step - loss: 4.6086e-10
Epoch 77/100
1/1 [==============================] - 0s 8ms/step - loss: 6.2272e-10
Epoch 78/100
1/1 [==============================] - 0s 6ms/step - loss: 5.0346e-10
Epoch 79/100
1/1 [==============================] - 0s 4ms/step - loss: 1.0143e-10
Epoch 80/100
1/1 [==============================] - 0s 4ms/step - loss: 9.6398e-11
Epoch 81/100
1/1 [==============================] - 0s 4ms/step - loss: 3.6527e-10
Epoch 82/100
1/1 [==============================] - 0s 5ms/step - loss: 3.3687e-10
Epoch 83/100
1/1 [==============================] - 0s 4ms/step - loss: 2.0566e-10
Epoch 84/100
1/1 [==============================] - 0s 7ms/step - loss: 1.3740e-10
Epoch 85/100
1/1 [==============================] - 0s 8ms/step - loss: 1.0054e-10
Epoch 86/100
1/1 [==============================] - 0s 8ms/step - loss: 1.8773e-10
Epoch 87/100
1/1 [==============================] - 0s 5ms/step - loss: 2.0106e-10
Epoch 88/100
1/1 [==============================] - 0s 5ms/step - loss: 8.3187e-11
Epoch 89/100
1/1 [==============================] - 0s 3ms/step - loss: 9.0865e-11
Epoch 90/100
1/1 [==============================] - 0s 4ms/step - loss: 1.2801e-10
Epoch 91/100
1/1 [==============================] - 0s 4ms/step - loss: 8.7246e-11
Epoch 92/100
1/1 [==============================] - 0s 5ms/step - loss: 7.9151e-11
Epoch 93/100
1/1 [==============================] - 0s 7ms/step - loss: 6.6197e-11
Epoch 94/100
1/1 [==============================] - 0s 6ms/step - loss: 6.4736e-11
Epoch 95/100
1/1 [==============================] - 0s 6ms/step - loss: 9.1857e-11
Epoch 96/100
1/1 [==============================] - 0s 8ms/step - loss: 4.7798e-11
Epoch 97/100
1/1 [==============================] - 0s 5ms/step - loss: 1.6923e-11
Epoch 98/100
1/1 [==============================] - 0s 5ms/step - loss: 5.4378e-11
Epoch 99/100
1/1 [==============================] - 0s 5ms/step - loss: 5.8906e-11
Epoch 100/100
1/1 [==============================] - 0s 5ms/step - loss: 4.3218e-11
Out[19]:
<keras.callbacks.History at 0x12a69ff28>
In [20]:
m2.predict(in_data)
Out[20]:
array([[0.6000006 , 0.70000726, 0.8000034 ]], dtype=float32)
In [21]:
m2.predict(np.array([0.4,0.5,0.6,0.7,0.8]).reshape((1,5,1)))
Out[21]:
array([[0.6888099, 0.7770717, 0.871939 ]], dtype=float32)
Content source: lukas/scikit-class
Similar notebooks: