In [1]:
import numpy as np
from keras.models import Model
from keras.layers import Input
from keras.layers.recurrent import LSTM
from keras import backend as K
import json
from collections import OrderedDict


Using TensorFlow backend.

In [2]:
def format_decimal(arr, places=6):
    return [round(x * 10**places) / 10**places for x in arr]

In [3]:
DATA = OrderedDict()

LSTM

[recurrent.LSTM.0] units=4, activation='tanh', recurrent_activation='hard_sigmoid'

Note dropout_W and dropout_U are only applied during training phase


In [4]:
data_in_shape = (3, 6)
rnn = LSTM(4, activation='tanh', recurrent_activation='hard_sigmoid')

layer_0 = Input(shape=data_in_shape)
layer_1 = rnn(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
weights = []
for i, w in enumerate(model.get_weights()):
    np.random.seed(3000 + i)
    weights.append(2 * np.random.random(w.shape) - 1)
model.set_weights(weights)
weight_names = ['W', 'U', 'b']
for w_i, w_name in enumerate(weight_names):
    print('{} shape:'.format(w_name), weights[w_i].shape)
    print('{}:'.format(w_name), format_decimal(weights[w_i].ravel().tolist()))

data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['recurrent.LSTM.0'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


W shape: (6, 16)
W: [0.021587, 0.171208, 0.42254, -0.348744, 0.040626, 0.479164, 0.684427, -0.694989, 0.064855, 0.056754, -0.076463, 0.184194, -0.793392, 0.627222, -0.855503, -0.362412, 0.855828, 0.681804, -0.326362, -0.218483, 0.302837, 0.321287, -0.052374, 0.066035, 0.80225, 0.994312, 0.829629, -0.750368, 0.311414, 0.458415, 0.51261, 0.022764, -0.828295, 0.929486, 0.472513, -0.651351, -0.39246, -0.263827, -0.29026, 0.315177, -0.824611, -0.228372, 0.696123, -0.8348, 0.96957, -0.042386, -0.39237, -0.273751, 0.318172, -0.262291, -0.419734, 0.824398, -0.960246, 0.882888, 0.226625, 0.422804, -0.061766, -0.273797, -0.740205, -0.786523, 0.340772, -0.485411, -0.942924, -0.698791, -0.459364, 0.074867, 0.122774, 0.446639, -0.23748, 0.97628, 0.836324, 0.935033, 0.293947, -0.682098, 0.954772, -0.073279, 0.501037, 0.917773, 0.673993, -0.984998, 0.610514, 0.393294, -0.12069, -0.443252, -0.812296, 0.240061, -0.601492, 0.395082, 0.919933, 0.731383, 0.489317, -0.139417, 0.662004, -0.563, -0.746144, -0.502416]
U shape: (4, 16)
U: [-0.486398, -0.924962, 0.924244, -0.099505, 0.697209, -0.988196, 0.312034, -0.82215, -0.450764, -0.702362, 0.86183, -0.505312, 0.224025, -0.210654, -0.378863, 0.270893, -0.812905, -0.668551, -0.447104, -0.95127, 0.740875, -0.871631, 0.889087, 0.883093, -0.980509, 0.98598, -0.645445, -0.873458, 0.401564, 0.718666, 0.454424, -0.149874, -0.545098, -0.362023, 0.681391, -0.626403, -0.090411, -0.773531, 0.402304, -0.775595, 0.013104, 0.861222, 0.47835, -0.616243, 0.159755, -0.29888, -0.784858, -0.419486, -0.611265, 0.750941, 0.906839, 0.756238, -0.144673, 0.857577, -0.233347, 0.151998, -0.23287, -0.323288, -0.554204, 0.631463, 0.228657, -0.40717, -0.938694, -0.797066]
b shape: (16,)
b: [-0.752966, 0.132946, 0.249025, -0.746897, -0.338058, 0.40026, -0.588537, 0.422014, -0.2607, 0.770741, -0.202932, -0.948132, 0.566542, 0.482049, -0.113059, 0.717462]

in shape: (3, 6)
in: [-0.753325, -0.20832, -0.757948, -0.844138, 0.220283, -0.381538, -0.597024, 0.401179, 0.139628, -0.718899, 0.646891, 0.326223, 0.856931, -0.130292, 0.924336, 0.209607, 0.747908, -0.765531]
out shape: (4,)
out: [0.025157, 0.340862, 0.236587, -0.102168]

[recurrent.LSTM.1] units=5, activation='sigmoid', recurrent_activation='sigmoid'

Note dropout_W and dropout_U are only applied during training phase


In [5]:
data_in_shape = (8, 5)
rnn = LSTM(5, activation='sigmoid', recurrent_activation='sigmoid')

layer_0 = Input(shape=data_in_shape)
layer_1 = rnn(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
weights = []
for i, w in enumerate(model.get_weights()):
    np.random.seed(3100 + i)
    weights.append(2 * np.random.random(w.shape) - 1)
model.set_weights(weights)
weight_names = ['W', 'U', 'b']
for w_i, w_name in enumerate(weight_names):
    print('{} shape:'.format(w_name), weights[w_i].shape)
    print('{}:'.format(w_name), format_decimal(weights[w_i].ravel().tolist()))

data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['recurrent.LSTM.1'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


W shape: (5, 20)
W: [0.904105, -0.877164, 0.33871, 0.812244, -0.802633, 0.595832, -0.981528, 0.378119, 0.41493, -0.11458, -0.692809, 0.875448, 0.318268, -0.627292, 0.941065, 0.916899, -0.78002, 0.54744, -0.282447, -0.214613, -0.087847, -0.404508, -0.411046, -0.425703, -0.603508, -0.541301, 0.056309, 0.082118, -0.687277, -0.231217, -0.646208, 0.668539, -0.081893, 0.495785, 0.63189, 0.027142, 0.605947, 0.346434, 0.995725, 0.061962, 0.471127, -0.310166, -0.229217, 0.682301, 0.219002, 0.503352, 0.169522, -0.959504, -0.231462, 0.500861, -0.379661, 0.986477, 0.827636, 0.832743, 0.415983, -0.240876, -0.141465, 0.532491, 0.428981, -0.15624, -0.473962, -0.681536, 0.673815, -0.180061, -0.575885, -0.913771, 0.743022, -0.636367, 0.062211, 0.037906, -0.306191, -0.537631, 0.189318, 0.951994, 0.840977, 0.699095, 0.502426, 0.422765, -0.182983, 0.412705, 0.983698, -0.124156, -0.774298, -0.073847, 0.21168, -0.355227, -0.436294, -0.676586, -0.44021, -0.947977, -0.173704, -0.207742, 0.349649, -0.747737, -0.500392, -0.401987, -0.876118, -0.913124, -0.895117, -0.499716]
U shape: (5, 20)
U: [-0.708522, -0.706998, 0.233695, 0.906727, -0.986561, 0.797093, -0.065787, 0.680991, 0.839202, 0.009629, -0.39912, -0.935261, 0.187645, 0.505894, -0.292493, 0.909563, -0.869531, -0.741938, -0.617129, -0.750299, 0.78066, -0.489417, 0.306455, -0.684443, -0.097986, 0.58979, 0.404458, 0.624373, 0.117453, -0.006815, -0.35842, 0.662326, -0.890617, 0.891066, -0.042107, 0.641752, -0.472995, -0.368807, -0.097789, 0.670207, 0.107198, 0.882032, 0.464538, -0.719207, -0.405612, -0.825646, -0.883975, 0.714731, -0.537945, 0.241298, -0.17753, -0.476467, 0.538848, -0.283935, 0.5183, -0.121804, -0.585215, 0.265924, -0.141693, 0.56808, 0.744637, 0.487378, -0.041827, -0.359161, 0.88678, 0.520241, 0.732665, 0.372201, 0.88503, -0.936812, -0.730826, -0.157315, -0.62689, 0.358158, -0.426776, 0.529963, 0.210689, 0.280205, 0.275805, 0.338371, 0.021063, -0.783356, -0.666266, -0.336966, 0.091107, -0.047504, 0.349973, -0.350952, 0.086999, 0.702717, -0.327907, 0.604804, -0.85941, -0.471312, 0.183139, 0.42747, -0.144995, -0.761184, 0.054661, -0.596663]
b shape: (20,)
b: [0.351652, 0.052999, 0.049473, -0.569237, 0.204228, 0.779938, 0.717971, 0.446785, -0.898506, 0.587256, 0.338361, -0.234015, -0.927665, -0.185907, -0.162251, 0.530251, -0.003898, 0.552709, -0.050923, -0.268882]

in shape: (8, 5)
in: [0.511747, -0.783125, -0.492879, 0.800655, 0.572511, -0.487136, 0.46024, -0.895998, -0.495468, 0.11452, 0.693938, -0.379336, 0.005202, 0.494934, -0.225437, -0.246465, -0.849792, 0.376289, -0.471726, -0.069163, 0.280505, -0.530568, -0.321166, -0.245045, -0.464593, -0.402877, -0.359471, -0.789757, 0.104967, -0.51705, -0.926441, 0.00999, -0.018231, 0.352377, -0.509285, 0.86149, -0.758643, 0.776341, -0.534127, 0.018696]
out shape: (5,)
out: [0.621899, 0.05448, 0.369449, 0.172543, 0.193786]

[recurrent.LSTM.2] units=4, activation='tanh', recurrent_activation='hard_sigmoid', return_sequences=True

Note dropout_W and dropout_U are only applied during training phase


In [6]:
data_in_shape = (3, 6)
rnn = LSTM(4, activation='tanh', recurrent_activation='hard_sigmoid',
           return_sequences=True)

layer_0 = Input(shape=data_in_shape)
layer_1 = rnn(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
weights = []
for i, w in enumerate(model.get_weights()):
    np.random.seed(3110 + i)
    weights.append(2 * np.random.random(w.shape) - 1)
model.set_weights(weights)
weight_names = ['W', 'U', 'b']
for w_i, w_name in enumerate(weight_names):
    print('{} shape:'.format(w_name), weights[w_i].shape)
    print('{}:'.format(w_name), format_decimal(weights[w_i].ravel().tolist()))

data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['recurrent.LSTM.2'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


W shape: (6, 16)
W: [0.737993, -0.987905, 0.734592, 0.869378, -0.219359, 0.16681, 0.618012, 0.167843, -0.129422, -0.743455, 0.398148, 0.849206, -0.941554, 0.796703, 0.939108, 0.23178, -0.652339, 0.019098, -0.925936, -0.892392, 0.785007, 0.794477, -0.682383, 0.542452, 0.94182, 0.523941, -0.490922, 0.052736, -0.231486, 0.352287, -0.533683, 0.677718, -0.068964, 0.957188, -0.825946, 0.288453, -0.059563, -0.153802, -0.838862, 0.91015, 0.444582, -0.760608, -0.767185, 0.527579, -0.47092, -0.403388, -0.82798, 0.396382, -0.54968, -0.110079, 0.289935, 0.643334, 0.271626, -0.742704, -0.878148, 0.445921, 0.694483, 0.305741, -0.036408, 0.793896, -0.765091, -0.143875, -0.292066, 0.645172, 0.420077, -0.865549, -0.694169, -0.987636, -0.075494, 0.153327, 0.301262, -0.547151, -0.805897, 0.224197, -0.449504, -0.740675, -0.562222, -0.078358, 0.678554, 0.588043, 0.468169, -0.552233, -0.293209, -0.255212, -0.600609, -0.920394, -0.241567, 0.638241, 0.069677, 0.550886, -0.668446, -0.206364, -0.836454, -0.679019, -0.579047, -0.194415]
U shape: (4, 16)
U: [-0.80119, -0.093136, -0.781478, 0.664981, 0.81755, 0.218071, -0.494078, 0.765253, 0.73909, -0.631062, 0.886984, -0.606791, -0.153821, -0.772063, 0.758903, -0.855796, -0.307768, 0.947849, 0.654099, 0.100526, -0.662352, -0.969299, 0.120011, -0.356979, 0.062594, 0.727154, -0.336611, -0.870729, -0.949392, -0.881842, 0.731806, 0.361095, -0.168111, 0.835201, 0.10262, -0.76506, -0.680559, 0.56447, 0.52546, 0.503997, -0.947011, -0.578522, 0.289283, 0.649655, 0.453943, 0.34202, 0.956858, 0.76988, -0.110212, -0.900381, -0.097136, 0.805348, -0.153694, 0.672428, 0.897543, 0.168938, 0.991301, 0.141932, -0.530237, -0.807775, -0.910187, -0.445946, -0.339299, -0.150569]
b shape: (16,)
b: [-0.70843, 0.498261, -0.623474, 0.535158, 0.001735, -0.057691, 0.831795, 0.950278, 0.786296, -0.004535, 0.449666, 0.784199, -0.625641, 0.054766, 0.765356, -0.434406]

in shape: (3, 6)
in: [-0.339007, -0.951627, 0.257882, 0.846312, 0.204243, -0.154216, -0.573902, -0.955183, -0.686984, 0.400696, 0.188089, -0.347584, 0.34867, 0.149636, 0.5524, -0.971856, 0.820963, 0.282971]
out shape: (3, 4)
out: [0.048062, -0.054377, 0.15781, 0.246894, 0.013608, 0.039657, 0.503141, 0.094743, -0.075479, -0.160097, 0.444001, 0.14916]

[recurrent.LSTM.3] units=4, activation='tanh', recurrent_activation='hard_sigmoid', return_sequences=False, go_backwards=True

Note dropout_W and dropout_U are only applied during training phase


In [7]:
data_in_shape = (3, 6)
rnn = LSTM(4, activation='tanh', recurrent_activation='hard_sigmoid',
           return_sequences=False, go_backwards=True)

layer_0 = Input(shape=data_in_shape)
layer_1 = rnn(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
weights = []
for i, w in enumerate(model.get_weights()):
    np.random.seed(3120 + i)
    weights.append(2 * np.random.random(w.shape) - 1)
model.set_weights(weights)
weight_names = ['W', 'U', 'b']
for w_i, w_name in enumerate(weight_names):
    print('{} shape:'.format(w_name), weights[w_i].shape)
    print('{}:'.format(w_name), format_decimal(weights[w_i].ravel().tolist()))

data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['recurrent.LSTM.3'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


W shape: (6, 16)
W: [-0.399353, -0.895862, -0.41293, 0.819636, 0.787156, 0.259826, -0.355711, -0.12699, 0.624865, -0.033767, 0.153665, 0.399579, -0.044954, -0.764389, -0.129632, -0.944864, 0.635317, -0.991724, 0.731262, -0.845773, 0.244354, -0.555815, -0.851347, 0.796695, 0.28536, -0.380158, -0.928391, -0.958666, 0.718092, -0.864773, 0.895271, 0.084197, 0.221055, -0.474646, 0.552902, -0.253556, 0.516427, 0.711978, 0.357177, 0.453715, -0.83785, -0.762261, -0.664082, 0.697361, -0.80853, -0.142768, -0.443145, 0.349797, 0.966243, -0.834386, 0.893595, -0.617738, 0.58295, -0.435765, 0.549147, 0.388394, 0.832709, -0.472379, -0.39995, 0.803289, -0.892217, -0.631879, 0.632926, -0.006492, -0.08419, 0.277637, -0.309461, -0.873106, -0.017556, 0.394943, -0.196936, 0.197863, -0.987893, -0.418935, 0.62081, -0.631432, 0.603959, -0.28261, 0.669425, 0.821414, -0.811792, -0.052494, -0.478485, 0.728073, 0.02656, 0.588014, 0.198001, 0.218637, -0.611665, -0.776832, 0.921578, 0.12272, -0.159767, 0.396714, 0.430573, 0.765812]
U shape: (4, 16)
U: [0.278761, -0.085793, -0.689403, -0.817458, -0.227681, -0.564636, 0.716448, -0.000751, 0.663013, 0.268259, 0.431448, 0.576852, 0.847214, 0.343712, 0.18475, 0.892598, 0.410263, -0.456105, 0.129722, 0.32196, 0.822134, -0.828802, -0.718326, -0.654823, 0.711797, 0.256293, -0.34323, 0.226173, 0.974305, 0.532244, 0.517144, -0.341975, 0.805438, 0.809611, -0.549865, -0.943035, -0.934518, -0.875885, 0.626661, -0.938315, 0.129619, -0.022854, 0.541641, -0.558275, -0.508106, 0.233078, -0.253346, 0.6672, -0.918203, 0.223328, -0.146988, 0.010386, -0.998701, -0.643875, 0.011375, -0.081274, 0.063804, -0.26994, -0.0854, -0.032498, -0.524119, -0.988964, -0.941631, -0.483964]
b shape: (16,)
b: [0.012297, 0.389123, 0.399996, -0.507058, -0.677136, -0.133757, -0.147228, 0.742572, -0.879183, 0.562181, -0.147981, 0.45699, -0.939416, 0.814432, -0.153507, 0.336024]

in shape: (3, 6)
in: [-0.15179, -0.805714, -0.961919, 0.939672, -0.406803, -0.396507, 0.093769, 0.331408, -0.851624, -0.624355, 0.286918, 0.192192, -0.611075, -0.272506, -0.507912, 0.924403, -0.098821, 0.732296]
out shape: (4,)
out: [0.081007, 0.497764, 0.132727, 0.118248]

[recurrent.LSTM.4] units=4, activation='tanh', recurrent_activation='hard_sigmoid', return_sequences=True, go_backwards=True

Note dropout_W and dropout_U are only applied during training phase


In [8]:
data_in_shape = (3, 6)
rnn = LSTM(4, activation='tanh', recurrent_activation='hard_sigmoid',
           return_sequences=True, go_backwards=True)

layer_0 = Input(shape=data_in_shape)
layer_1 = rnn(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
weights = []
for i, w in enumerate(model.get_weights()):
    np.random.seed(3120 + i)
    weights.append(2 * np.random.random(w.shape) - 1)
model.set_weights(weights)
weight_names = ['W', 'U', 'b']
for w_i, w_name in enumerate(weight_names):
    print('{} shape:'.format(w_name), weights[w_i].shape)
    print('{}:'.format(w_name), format_decimal(weights[w_i].ravel().tolist()))

data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['recurrent.LSTM.4'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


W shape: (6, 16)
W: [-0.399353, -0.895862, -0.41293, 0.819636, 0.787156, 0.259826, -0.355711, -0.12699, 0.624865, -0.033767, 0.153665, 0.399579, -0.044954, -0.764389, -0.129632, -0.944864, 0.635317, -0.991724, 0.731262, -0.845773, 0.244354, -0.555815, -0.851347, 0.796695, 0.28536, -0.380158, -0.928391, -0.958666, 0.718092, -0.864773, 0.895271, 0.084197, 0.221055, -0.474646, 0.552902, -0.253556, 0.516427, 0.711978, 0.357177, 0.453715, -0.83785, -0.762261, -0.664082, 0.697361, -0.80853, -0.142768, -0.443145, 0.349797, 0.966243, -0.834386, 0.893595, -0.617738, 0.58295, -0.435765, 0.549147, 0.388394, 0.832709, -0.472379, -0.39995, 0.803289, -0.892217, -0.631879, 0.632926, -0.006492, -0.08419, 0.277637, -0.309461, -0.873106, -0.017556, 0.394943, -0.196936, 0.197863, -0.987893, -0.418935, 0.62081, -0.631432, 0.603959, -0.28261, 0.669425, 0.821414, -0.811792, -0.052494, -0.478485, 0.728073, 0.02656, 0.588014, 0.198001, 0.218637, -0.611665, -0.776832, 0.921578, 0.12272, -0.159767, 0.396714, 0.430573, 0.765812]
U shape: (4, 16)
U: [0.278761, -0.085793, -0.689403, -0.817458, -0.227681, -0.564636, 0.716448, -0.000751, 0.663013, 0.268259, 0.431448, 0.576852, 0.847214, 0.343712, 0.18475, 0.892598, 0.410263, -0.456105, 0.129722, 0.32196, 0.822134, -0.828802, -0.718326, -0.654823, 0.711797, 0.256293, -0.34323, 0.226173, 0.974305, 0.532244, 0.517144, -0.341975, 0.805438, 0.809611, -0.549865, -0.943035, -0.934518, -0.875885, 0.626661, -0.938315, 0.129619, -0.022854, 0.541641, -0.558275, -0.508106, 0.233078, -0.253346, 0.6672, -0.918203, 0.223328, -0.146988, 0.010386, -0.998701, -0.643875, 0.011375, -0.081274, 0.063804, -0.26994, -0.0854, -0.032498, -0.524119, -0.988964, -0.941631, -0.483964]
b shape: (16,)
b: [0.012297, 0.389123, 0.399996, -0.507058, -0.677136, -0.133757, -0.147228, 0.742572, -0.879183, 0.562181, -0.147981, 0.45699, -0.939416, 0.814432, -0.153507, 0.336024]

in shape: (3, 6)
in: [-0.15179, -0.805714, -0.961919, 0.939672, -0.406803, -0.396507, 0.093769, 0.331408, -0.851624, -0.624355, 0.286918, 0.192192, -0.611075, -0.272506, -0.507912, 0.924403, -0.098821, 0.732296]
out shape: (3, 4)
out: [-0.039869, 0.051783, 0.211034, 0.209157, -0.164133, 0.355645, 0.170262, -0.111856, 0.081007, 0.497764, 0.132727, 0.118248]

[recurrent.LSTM.5] units=4, activation='tanh', recurrent_activation='hard_sigmoid', return_sequences=False, go_backwards=False, stateful=True

Note dropout_W and dropout_U are only applied during training phase

To test statefulness, model.predict is run twice


In [9]:
data_in_shape = (3, 6)
rnn = LSTM(4, activation='tanh', recurrent_activation='hard_sigmoid',
           return_sequences=False, go_backwards=False, stateful=True)

layer_0 = Input(batch_shape=(1, *data_in_shape))
layer_1 = rnn(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
weights = []
for i, w in enumerate(model.get_weights()):
    np.random.seed(3130 + i)
    weights.append(2 * np.random.random(w.shape) - 1)
model.set_weights(weights)
weight_names = ['W', 'U', 'b']
for w_i, w_name in enumerate(weight_names):
    print('{} shape:'.format(w_name), weights[w_i].shape)
    print('{}:'.format(w_name), format_decimal(weights[w_i].ravel().tolist()))

data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['recurrent.LSTM.5'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


W shape: (6, 16)
W: [-0.8498, 0.430774, 0.873073, 0.048923, 0.334624, -0.535482, -0.161324, 0.902336, 0.280753, 0.753689, -0.15467, -0.322404, -0.590549, 0.777274, -0.571364, 0.649152, 0.707209, -0.986659, 0.520748, -0.281583, -0.017415, 0.200441, -0.065796, 0.930076, -0.979278, -0.076096, -0.253233, 0.534729, -0.456539, -0.328327, 0.666324, -0.550168, 0.572946, 0.258097, 0.278902, 0.88237, 0.841026, 0.593731, 0.439719, 0.658633, 0.210103, -0.158248, -0.219485, 0.96215, -0.216797, 0.042074, 0.712456, 0.576437, 0.236331, 0.209346, -0.107554, -0.319436, 0.803039, 0.795168, 0.836552, 0.929391, -0.93503, 0.51693, -0.935837, 0.414736, -0.504766, -0.961246, 0.374652, -0.910804, 0.758076, 0.428739, -0.975447, -0.8947, -0.577597, 0.778909, -0.160486, 0.35176, 0.952946, 0.375849, 0.091075, 0.354457, -0.380646, 0.743773, 0.855056, 0.8086, -0.93462, -0.429751, 0.38859, 0.689388, 0.625022, -0.153817, -0.063389, -0.54276, 0.354462, 0.464064, -0.476255, -0.444326, -0.552878, 0.233093, -0.71522, 0.095086]
U shape: (4, 16)
U: [0.910033, -0.779975, 0.603546, 0.156029, 0.870504, -0.776493, 0.174083, -0.284585, -0.547467, 0.925727, 0.158879, -0.22124, 0.076485, -0.231166, 0.948932, 0.470572, 0.286061, -0.787186, -0.599283, -0.787769, 0.119607, 0.030586, 0.666647, 0.792727, 0.886104, 0.416546, -0.182666, 0.377953, 0.602132, 0.350282, 0.731257, 0.97233, -0.932709, 0.712936, 0.084777, -0.102671, 0.973234, -0.457406, 0.203686, -0.217829, 0.012293, -0.00983, -0.582936, -0.182595, -0.143616, -0.94615, -0.95671, -0.98114, 0.455458, -0.05547, 0.856833, 0.598765, 0.719789, 0.5325, -0.56002, 0.463818, 0.560565, 0.143572, 0.488289, 0.655092, 0.964739, -0.081963, -0.019162, -0.802279]
b shape: (16,)
b: [0.614807, 0.771408, 0.865982, -0.307693, 0.691855, 0.303404, -0.819509, 0.738984, -0.765253, 0.587616, -0.307639, -0.344203, 0.615247, 0.685789, 0.293608, 0.28084]

in shape: (3, 6)
in: [-0.460855, 0.880207, 0.776946, -0.841116, 0.958348, 0.53474, -0.207934, -0.251826, -0.675944, -0.37848, 0.523187, -0.75506, -0.702952, -0.372328, 0.280085, 0.207177, 0.895026, -0.528682]
out shape: (4,)
out: [0.375903, 0.458389, 0.112985, 0.199426]

[recurrent.LSTM.6] units=4, activation='tanh', recurrent_activation='hard_sigmoid', return_sequences=True, go_backwards=False, stateful=True

Note dropout_W and dropout_U are only applied during training phase

To test statefulness, model.predict is run twice


In [10]:
data_in_shape = (3, 6)
rnn = LSTM(4, activation='tanh', recurrent_activation='hard_sigmoid',
           return_sequences=True, go_backwards=False, stateful=True)

layer_0 = Input(batch_shape=(1, *data_in_shape))
layer_1 = rnn(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
weights = []
for i, w in enumerate(model.get_weights()):
    np.random.seed(3140 + i)
    weights.append(2 * np.random.random(w.shape) - 1)
model.set_weights(weights)
weight_names = ['W', 'U', 'b']
for w_i, w_name in enumerate(weight_names):
    print('{} shape:'.format(w_name), weights[w_i].shape)
    print('{}:'.format(w_name), format_decimal(weights[w_i].ravel().tolist()))

data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['recurrent.LSTM.6'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


W shape: (6, 16)
W: [-0.226446, 0.727963, -0.947492, -0.480227, 0.771183, 0.280201, -0.775829, 0.626171, 0.68377, 0.166377, 0.466782, 0.160253, -0.566836, -0.640068, 0.509834, -0.724127, 0.635193, -0.966521, 0.908026, -0.71012, 0.441452, -0.409814, 0.564184, -0.538442, -0.912846, 0.79386, 0.069924, 0.461391, -0.54725, 0.302492, -0.857499, -0.253477, -0.266564, -0.112861, 0.759745, 0.600893, 0.355036, -0.527019, 0.171414, -0.676096, -0.896771, -0.999243, -0.601509, 0.130658, -0.609191, 0.762231, 0.62638, 0.3142, 0.070571, -0.063705, -0.079421, -0.069564, 0.196045, 0.16474, -0.40898, -0.091101, -0.912095, -0.872325, 0.751147, -0.167119, 0.550729, 0.135446, -0.55051, 0.965996, 0.395989, -0.689458, 0.03557, 0.885503, -0.49227, -0.798626, 0.38071, 0.289817, 0.813399, 0.885651, 0.382049, -0.955529, -0.649721, 0.924003, 0.803036, -0.437818, -0.839386, -0.673986, -0.51929, 0.368413, -0.547194, 0.748589, 0.894908, 0.395991, 0.000263, 0.114199, -0.525642, -0.065138, -0.84562, 0.775626, 0.722957, 0.760239]
U shape: (4, 16)
U: [-0.819411, 0.247027, -0.205291, 0.667737, 0.682253, -0.502593, -0.57604, 0.585538, 0.811808, -0.133528, -0.851471, 0.808487, 0.777171, 0.436317, 0.023861, -0.226522, -0.19441, -0.69482, -0.047086, 0.580698, 0.770829, 0.327904, 0.855653, -0.087931, 0.724921, -0.663294, 0.221178, -0.873578, -0.29845, -0.307426, -0.309536, -0.47405, 0.508346, 0.240965, 0.17208, -0.697697, 0.304168, 0.840282, 0.121316, -0.028624, -0.072513, -0.514004, 0.452095, 0.792413, 0.877921, -0.234592, -0.255257, -0.034971, -0.692105, 0.775637, 0.483492, -0.045051, -0.811689, -0.791119, 0.942962, -0.331172, -0.740263, 0.311253, 0.807421, 0.119114, -0.080466, -0.615076, 0.521326, 0.240488]
b shape: (16,)
b: [0.365624, 0.066587, -0.385883, -0.271312, 0.695525, 0.255661, -0.814251, -0.1385, 0.346522, -0.204546, -0.082548, 0.189536, 0.250389, -0.4052, 0.633127, 0.660295]

in shape: (3, 6)
in: [0.878902, 0.312992, 0.135887, 0.596807, 0.10873, -0.517602, 0.111939, 0.914438, -0.340345, 0.435448, -0.646174, -0.038401, 0.351549, 0.332486, 0.43771, 0.466788, 0.772019, -0.28885]
out shape: (3, 4)
out: [-0.266918, -0.099913, 0.197515, 0.016451, -0.474102, -0.073446, 0.113511, 0.045791, -0.274091, -0.060948, 0.366839, -0.118142]

[recurrent.LSTM.7] units=4, activation='tanh', recurrent_activation='hard_sigmoid', return_sequences=False, go_backwards=True, stateful=True

Note dropout_W and dropout_U are only applied during training phase

To test statefulness, model.predict is run twice


In [11]:
data_in_shape = (3, 6)
rnn = LSTM(4, activation='tanh', recurrent_activation='hard_sigmoid',
           return_sequences=False, go_backwards=True, stateful=True)

layer_0 = Input(batch_shape=(1, *data_in_shape))
layer_1 = rnn(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
weights = []
for i, w in enumerate(model.get_weights()):
    np.random.seed(3150 + i)
    weights.append(2 * np.random.random(w.shape) - 1)
model.set_weights(weights)
weight_names = ['W', 'U', 'b']
for w_i, w_name in enumerate(weight_names):
    print('{} shape:'.format(w_name), weights[w_i].shape)
    print('{}:'.format(w_name), format_decimal(weights[w_i].ravel().tolist()))

data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['recurrent.LSTM.7'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


W shape: (6, 16)
W: [-0.583267, -0.821648, 0.597127, -0.06219, 0.097115, 0.59847, -0.178556, -0.822068, -0.219792, 0.331711, -0.374065, 0.937862, -0.971386, -0.429613, -0.947444, 0.912872, -0.596752, -0.409293, -0.946332, 0.321693, -0.539954, -0.499509, -0.100364, -0.075292, -0.761363, -0.756431, 0.412417, -0.890623, 0.599306, 0.130664, -0.381465, 0.086817, 0.17249, -0.14002, 0.793208, -0.975779, -0.821004, 0.57001, 0.945065, 0.247593, 0.966075, 0.684202, 0.172511, 0.433066, -0.360727, 0.10001, -0.688926, -0.317165, 0.939906, -0.678243, -0.311967, -0.274828, -0.297329, -0.47057, 0.680525, -0.581226, 0.608076, -0.350015, 0.704833, -0.023262, -0.863598, -0.629018, -0.12579, 0.112848, 0.606962, -0.105406, 0.873613, -0.270182, -0.118657, -0.443719, -0.651996, 0.973474, 0.923547, 0.110465, 0.467221, -0.847588, -0.623232, -0.239494, -0.342345, 0.240898, 0.366699, -0.411542, -0.072619, 0.090528, -0.093493, -0.999229, -0.610555, -0.884697, 0.9016, -0.792493, 0.567794, -0.073171, -0.970286, 0.863006, 0.473766, -0.464347]
U shape: (4, 16)
U: [0.488258, 0.168768, 0.947903, -0.817885, 0.490368, -0.652774, -0.683076, 0.821928, -0.948643, 0.995002, -0.810977, -0.048727, -0.829477, -0.164787, -0.577251, -0.262461, 0.728068, 0.946692, -0.873341, 0.779233, -0.154365, -0.206354, -0.258879, -0.658548, -0.692589, -0.381312, -0.285632, 0.311995, 0.089257, -0.79229, 0.124943, -0.313979, -0.871547, -0.655183, 0.24746, 0.951564, -0.255144, 0.517897, 0.380996, -0.825304, -0.310744, -0.738195, -0.663784, 0.407865, 0.913353, 0.607378, -0.615646, -0.510737, -0.756113, 0.451846, 0.531441, -0.729966, 0.559688, -0.458308, -0.750312, 0.271233, 0.752021, -0.436684, -0.960433, -0.448696, -0.736677, 0.01241, 0.401582, -0.744658]
b shape: (16,)
b: [-0.013069, 0.192357, 0.917856, 0.538656, -0.286434, -0.035023, 0.43928, -0.979826, 0.503243, 0.676672, 0.782223, -0.735866, 0.600414, -0.725464, -0.417174, -0.395111]

in shape: (3, 6)
in: [-0.4464, -0.419362, -0.29416, 0.6744, -0.447755, -0.504842, 0.980072, 0.916908, -0.228844, -0.424255, -0.6311, 0.810461, 0.542148, -0.226868, 0.316039, -0.029251, -0.764114, -0.807565]
out shape: (4,)
out: [-0.003723, 0.087683, 0.269376, -0.119548]

[recurrent.LSTM.8] units=4, activation='tanh', recurrent_activation='hard_sigmoid', use_bias=False, return_sequences=True, go_backwards=True, stateful=True

Note dropout_W and dropout_U are only applied during training phase

To test statefulness, model.predict is run twice


In [12]:
data_in_shape = (3, 6)
rnn = LSTM(4, activation='tanh', recurrent_activation='hard_sigmoid', use_bias=False,
           return_sequences=True, go_backwards=True, stateful=True)

layer_0 = Input(batch_shape=(1, *data_in_shape))
layer_1 = rnn(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
weights = []
for i, w in enumerate(model.get_weights()):
    np.random.seed(3160 + i)
    weights.append(2 * np.random.random(w.shape) - 1)
model.set_weights(weights)
weight_names = ['W', 'U']
for w_i, w_name in enumerate(weight_names):
    print('{} shape:'.format(w_name), weights[w_i].shape)
    print('{}:'.format(w_name), format_decimal(weights[w_i].ravel().tolist()))

data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['recurrent.LSTM.8'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


W shape: (6, 16)
W: [-0.830074, -0.390926, -0.155379, -0.175033, 0.339507, -0.898578, -0.413653, -0.783312, 0.390054, 0.291915, -0.639546, -0.358026, -0.299562, 0.530356, 0.699093, -0.657777, -0.805037, -0.737004, -0.533156, -0.02555, 0.961913, 0.354405, 0.414537, -0.856747, -0.087681, -0.779066, -0.062943, -0.787575, -0.140881, 0.475163, 0.832274, 0.032463, 0.093214, 0.293965, -0.721175, 0.076291, 0.887442, -0.069078, 0.771404, -0.558691, -0.535376, 0.985132, 0.501264, 0.081917, 0.208323, -0.556763, 0.959328, -0.628823, -0.67348, -0.134795, 0.741077, 0.048041, -0.253542, -0.073037, -0.446174, 0.998574, 0.720061, 0.368172, 0.524055, 0.658088, -0.303247, 0.777888, -0.498292, -0.57091, -0.784288, 0.360542, -0.22312, -0.54481, -0.109966, -0.828631, -0.046113, 0.693034, -0.269008, -0.093768, 0.416629, 0.377931, -0.156889, 0.150481, 0.317546, -0.373843, -0.564875, -0.444552, 0.706068, -0.383373, 0.989821, -0.666674, 0.684051, -0.951821, -0.855062, 0.578138, -0.347227, 0.69753, -0.791495, -0.279348, -0.211924, -0.342932]
U shape: (4, 16)
U: [0.24583, 0.988469, -0.405046, -0.656416, -0.895596, -0.990112, 0.890743, 0.530544, -0.934244, -0.057071, 0.749248, -0.961684, 0.237511, -0.050776, -0.680631, 0.155604, 0.537174, 0.755353, -0.194927, -0.09975, 0.933139, -0.872933, -0.69254, 0.629798, -0.276511, 0.58305, 0.987048, 0.350594, -0.695788, -0.455537, -0.983671, -0.4312, -0.935086, 0.812021, 0.992143, 0.008842, 0.123215, 0.451724, -0.458078, -0.191656, 0.452465, 0.236305, 0.454661, -0.613885, 0.828452, -0.185927, 0.102275, 0.47508, 0.209108, -0.894238, -0.084557, -0.874674, 0.856274, 0.895738, -0.061186, -0.673972, -0.371994, -0.914092, 0.931769, 0.707757, -0.343335, -0.558331, 0.438789, -0.022191]

in shape: (3, 6)
in: [0.187997, 0.893431, 0.107407, 0.308148, -0.278365, -0.319295, 0.898425, 0.741769, -0.011927, -0.795301, -0.522695, -0.760068, -0.400974, 0.769344, 0.423505, -0.360857, 0.211238, 0.549148]
out shape: (3, 4)
out: [-0.086446, -0.074775, -0.210118, -0.201747, 0.103375, -0.200166, -0.240455, -0.325251, 0.132833, -0.237517, -0.211225, -0.307349]

export for Keras.js tests


In [13]:
print(json.dumps(DATA))


{"recurrent.LSTM.0": {"weights": [{"shape": [6, 16], "data": [0.021587, 0.171208, 0.42254, -0.348744, 0.040626, 0.479164, 0.684427, -0.694989, 0.064855, 0.056754, -0.076463, 0.184194, -0.793392, 0.627222, -0.855503, -0.362412, 0.855828, 0.681804, -0.326362, -0.218483, 0.302837, 0.321287, -0.052374, 0.066035, 0.80225, 0.994312, 0.829629, -0.750368, 0.311414, 0.458415, 0.51261, 0.022764, -0.828295, 0.929486, 0.472513, -0.651351, -0.39246, -0.263827, -0.29026, 0.315177, -0.824611, -0.228372, 0.696123, -0.8348, 0.96957, -0.042386, -0.39237, -0.273751, 0.318172, -0.262291, -0.419734, 0.824398, -0.960246, 0.882888, 0.226625, 0.422804, -0.061766, -0.273797, -0.740205, -0.786523, 0.340772, -0.485411, -0.942924, -0.698791, -0.459364, 0.074867, 0.122774, 0.446639, -0.23748, 0.97628, 0.836324, 0.935033, 0.293947, -0.682098, 0.954772, -0.073279, 0.501037, 0.917773, 0.673993, -0.984998, 0.610514, 0.393294, -0.12069, -0.443252, -0.812296, 0.240061, -0.601492, 0.395082, 0.919933, 0.731383, 0.489317, -0.139417, 0.662004, -0.563, -0.746144, -0.502416]}, {"shape": [4, 16], "data": [-0.486398, -0.924962, 0.924244, -0.099505, 0.697209, -0.988196, 0.312034, -0.82215, -0.450764, -0.702362, 0.86183, -0.505312, 0.224025, -0.210654, -0.378863, 0.270893, -0.812905, -0.668551, -0.447104, -0.95127, 0.740875, -0.871631, 0.889087, 0.883093, -0.980509, 0.98598, -0.645445, -0.873458, 0.401564, 0.718666, 0.454424, -0.149874, -0.545098, -0.362023, 0.681391, -0.626403, -0.090411, -0.773531, 0.402304, -0.775595, 0.013104, 0.861222, 0.47835, -0.616243, 0.159755, -0.29888, -0.784858, -0.419486, -0.611265, 0.750941, 0.906839, 0.756238, -0.144673, 0.857577, -0.233347, 0.151998, -0.23287, -0.323288, -0.554204, 0.631463, 0.228657, -0.40717, -0.938694, -0.797066]}, {"shape": [16], "data": [-0.752966, 0.132946, 0.249025, -0.746897, -0.338058, 0.40026, -0.588537, 0.422014, -0.2607, 0.770741, -0.202932, -0.948132, 0.566542, 0.482049, -0.113059, 0.717462]}], "expected": {"shape": [4], "data": [0.025157, 0.340862, 0.236587, -0.102168]}, "input": {"shape": [3, 6], "data": [-0.753325, -0.20832, -0.757948, -0.844138, 0.220283, -0.381538, -0.597024, 0.401179, 0.139628, -0.718899, 0.646891, 0.326223, 0.856931, -0.130292, 0.924336, 0.209607, 0.747908, -0.765531]}}, "recurrent.LSTM.1": {"weights": [{"shape": [5, 20], "data": [0.904105, -0.877164, 0.33871, 0.812244, -0.802633, 0.595832, -0.981528, 0.378119, 0.41493, -0.11458, -0.692809, 0.875448, 0.318268, -0.627292, 0.941065, 0.916899, -0.78002, 0.54744, -0.282447, -0.214613, -0.087847, -0.404508, -0.411046, -0.425703, -0.603508, -0.541301, 0.056309, 0.082118, -0.687277, -0.231217, -0.646208, 0.668539, -0.081893, 0.495785, 0.63189, 0.027142, 0.605947, 0.346434, 0.995725, 0.061962, 0.471127, -0.310166, -0.229217, 0.682301, 0.219002, 0.503352, 0.169522, -0.959504, -0.231462, 0.500861, -0.379661, 0.986477, 0.827636, 0.832743, 0.415983, -0.240876, -0.141465, 0.532491, 0.428981, -0.15624, -0.473962, -0.681536, 0.673815, -0.180061, -0.575885, -0.913771, 0.743022, -0.636367, 0.062211, 0.037906, -0.306191, -0.537631, 0.189318, 0.951994, 0.840977, 0.699095, 0.502426, 0.422765, -0.182983, 0.412705, 0.983698, -0.124156, -0.774298, -0.073847, 0.21168, -0.355227, -0.436294, -0.676586, -0.44021, -0.947977, -0.173704, -0.207742, 0.349649, -0.747737, -0.500392, -0.401987, -0.876118, -0.913124, -0.895117, -0.499716]}, {"shape": [5, 20], "data": [-0.708522, -0.706998, 0.233695, 0.906727, -0.986561, 0.797093, -0.065787, 0.680991, 0.839202, 0.009629, -0.39912, -0.935261, 0.187645, 0.505894, -0.292493, 0.909563, -0.869531, -0.741938, -0.617129, -0.750299, 0.78066, -0.489417, 0.306455, -0.684443, -0.097986, 0.58979, 0.404458, 0.624373, 0.117453, -0.006815, -0.35842, 0.662326, -0.890617, 0.891066, -0.042107, 0.641752, -0.472995, -0.368807, -0.097789, 0.670207, 0.107198, 0.882032, 0.464538, -0.719207, -0.405612, -0.825646, -0.883975, 0.714731, -0.537945, 0.241298, -0.17753, -0.476467, 0.538848, -0.283935, 0.5183, -0.121804, -0.585215, 0.265924, -0.141693, 0.56808, 0.744637, 0.487378, -0.041827, -0.359161, 0.88678, 0.520241, 0.732665, 0.372201, 0.88503, -0.936812, -0.730826, -0.157315, -0.62689, 0.358158, -0.426776, 0.529963, 0.210689, 0.280205, 0.275805, 0.338371, 0.021063, -0.783356, -0.666266, -0.336966, 0.091107, -0.047504, 0.349973, -0.350952, 0.086999, 0.702717, -0.327907, 0.604804, -0.85941, -0.471312, 0.183139, 0.42747, -0.144995, -0.761184, 0.054661, -0.596663]}, {"shape": [20], "data": [0.351652, 0.052999, 0.049473, -0.569237, 0.204228, 0.779938, 0.717971, 0.446785, -0.898506, 0.587256, 0.338361, -0.234015, -0.927665, -0.185907, -0.162251, 0.530251, -0.003898, 0.552709, -0.050923, -0.268882]}], "expected": {"shape": [5], "data": [0.621899, 0.05448, 0.369449, 0.172543, 0.193786]}, "input": {"shape": [8, 5], "data": [0.511747, -0.783125, -0.492879, 0.800655, 0.572511, -0.487136, 0.46024, -0.895998, -0.495468, 0.11452, 0.693938, -0.379336, 0.005202, 0.494934, -0.225437, -0.246465, -0.849792, 0.376289, -0.471726, -0.069163, 0.280505, -0.530568, -0.321166, -0.245045, -0.464593, -0.402877, -0.359471, -0.789757, 0.104967, -0.51705, -0.926441, 0.00999, -0.018231, 0.352377, -0.509285, 0.86149, -0.758643, 0.776341, -0.534127, 0.018696]}}, "recurrent.LSTM.2": {"weights": [{"shape": [6, 16], "data": [0.737993, -0.987905, 0.734592, 0.869378, -0.219359, 0.16681, 0.618012, 0.167843, -0.129422, -0.743455, 0.398148, 0.849206, -0.941554, 0.796703, 0.939108, 0.23178, -0.652339, 0.019098, -0.925936, -0.892392, 0.785007, 0.794477, -0.682383, 0.542452, 0.94182, 0.523941, -0.490922, 0.052736, -0.231486, 0.352287, -0.533683, 0.677718, -0.068964, 0.957188, -0.825946, 0.288453, -0.059563, -0.153802, -0.838862, 0.91015, 0.444582, -0.760608, -0.767185, 0.527579, -0.47092, -0.403388, -0.82798, 0.396382, -0.54968, -0.110079, 0.289935, 0.643334, 0.271626, -0.742704, -0.878148, 0.445921, 0.694483, 0.305741, -0.036408, 0.793896, -0.765091, -0.143875, -0.292066, 0.645172, 0.420077, -0.865549, -0.694169, -0.987636, -0.075494, 0.153327, 0.301262, -0.547151, -0.805897, 0.224197, -0.449504, -0.740675, -0.562222, -0.078358, 0.678554, 0.588043, 0.468169, -0.552233, -0.293209, -0.255212, -0.600609, -0.920394, -0.241567, 0.638241, 0.069677, 0.550886, -0.668446, -0.206364, -0.836454, -0.679019, -0.579047, -0.194415]}, {"shape": [4, 16], "data": [-0.80119, -0.093136, -0.781478, 0.664981, 0.81755, 0.218071, -0.494078, 0.765253, 0.73909, -0.631062, 0.886984, -0.606791, -0.153821, -0.772063, 0.758903, -0.855796, -0.307768, 0.947849, 0.654099, 0.100526, -0.662352, -0.969299, 0.120011, -0.356979, 0.062594, 0.727154, -0.336611, -0.870729, -0.949392, -0.881842, 0.731806, 0.361095, -0.168111, 0.835201, 0.10262, -0.76506, -0.680559, 0.56447, 0.52546, 0.503997, -0.947011, -0.578522, 0.289283, 0.649655, 0.453943, 0.34202, 0.956858, 0.76988, -0.110212, -0.900381, -0.097136, 0.805348, -0.153694, 0.672428, 0.897543, 0.168938, 0.991301, 0.141932, -0.530237, -0.807775, -0.910187, -0.445946, -0.339299, -0.150569]}, {"shape": [16], "data": [-0.70843, 0.498261, -0.623474, 0.535158, 0.001735, -0.057691, 0.831795, 0.950278, 0.786296, -0.004535, 0.449666, 0.784199, -0.625641, 0.054766, 0.765356, -0.434406]}], "expected": {"shape": [3, 4], "data": [0.048062, -0.054377, 0.15781, 0.246894, 0.013608, 0.039657, 0.503141, 0.094743, -0.075479, -0.160097, 0.444001, 0.14916]}, "input": {"shape": [3, 6], "data": [-0.339007, -0.951627, 0.257882, 0.846312, 0.204243, -0.154216, -0.573902, -0.955183, -0.686984, 0.400696, 0.188089, -0.347584, 0.34867, 0.149636, 0.5524, -0.971856, 0.820963, 0.282971]}}, "recurrent.LSTM.3": {"weights": [{"shape": [6, 16], "data": [-0.399353, -0.895862, -0.41293, 0.819636, 0.787156, 0.259826, -0.355711, -0.12699, 0.624865, -0.033767, 0.153665, 0.399579, -0.044954, -0.764389, -0.129632, -0.944864, 0.635317, -0.991724, 0.731262, -0.845773, 0.244354, -0.555815, -0.851347, 0.796695, 0.28536, -0.380158, -0.928391, -0.958666, 0.718092, -0.864773, 0.895271, 0.084197, 0.221055, -0.474646, 0.552902, -0.253556, 0.516427, 0.711978, 0.357177, 0.453715, -0.83785, -0.762261, -0.664082, 0.697361, -0.80853, -0.142768, -0.443145, 0.349797, 0.966243, -0.834386, 0.893595, -0.617738, 0.58295, -0.435765, 0.549147, 0.388394, 0.832709, -0.472379, -0.39995, 0.803289, -0.892217, -0.631879, 0.632926, -0.006492, -0.08419, 0.277637, -0.309461, -0.873106, -0.017556, 0.394943, -0.196936, 0.197863, -0.987893, -0.418935, 0.62081, -0.631432, 0.603959, -0.28261, 0.669425, 0.821414, -0.811792, -0.052494, -0.478485, 0.728073, 0.02656, 0.588014, 0.198001, 0.218637, -0.611665, -0.776832, 0.921578, 0.12272, -0.159767, 0.396714, 0.430573, 0.765812]}, {"shape": [4, 16], "data": [0.278761, -0.085793, -0.689403, -0.817458, -0.227681, -0.564636, 0.716448, -0.000751, 0.663013, 0.268259, 0.431448, 0.576852, 0.847214, 0.343712, 0.18475, 0.892598, 0.410263, -0.456105, 0.129722, 0.32196, 0.822134, -0.828802, -0.718326, -0.654823, 0.711797, 0.256293, -0.34323, 0.226173, 0.974305, 0.532244, 0.517144, -0.341975, 0.805438, 0.809611, -0.549865, -0.943035, -0.934518, -0.875885, 0.626661, -0.938315, 0.129619, -0.022854, 0.541641, -0.558275, -0.508106, 0.233078, -0.253346, 0.6672, -0.918203, 0.223328, -0.146988, 0.010386, -0.998701, -0.643875, 0.011375, -0.081274, 0.063804, -0.26994, -0.0854, -0.032498, -0.524119, -0.988964, -0.941631, -0.483964]}, {"shape": [16], "data": [0.012297, 0.389123, 0.399996, -0.507058, -0.677136, -0.133757, -0.147228, 0.742572, -0.879183, 0.562181, -0.147981, 0.45699, -0.939416, 0.814432, -0.153507, 0.336024]}], "expected": {"shape": [4], "data": [0.081007, 0.497764, 0.132727, 0.118248]}, "input": {"shape": [3, 6], "data": [-0.15179, -0.805714, -0.961919, 0.939672, -0.406803, -0.396507, 0.093769, 0.331408, -0.851624, -0.624355, 0.286918, 0.192192, -0.611075, -0.272506, -0.507912, 0.924403, -0.098821, 0.732296]}}, "recurrent.LSTM.4": {"weights": [{"shape": [6, 16], "data": [-0.399353, -0.895862, -0.41293, 0.819636, 0.787156, 0.259826, -0.355711, -0.12699, 0.624865, -0.033767, 0.153665, 0.399579, -0.044954, -0.764389, -0.129632, -0.944864, 0.635317, -0.991724, 0.731262, -0.845773, 0.244354, -0.555815, -0.851347, 0.796695, 0.28536, -0.380158, -0.928391, -0.958666, 0.718092, -0.864773, 0.895271, 0.084197, 0.221055, -0.474646, 0.552902, -0.253556, 0.516427, 0.711978, 0.357177, 0.453715, -0.83785, -0.762261, -0.664082, 0.697361, -0.80853, -0.142768, -0.443145, 0.349797, 0.966243, -0.834386, 0.893595, -0.617738, 0.58295, -0.435765, 0.549147, 0.388394, 0.832709, -0.472379, -0.39995, 0.803289, -0.892217, -0.631879, 0.632926, -0.006492, -0.08419, 0.277637, -0.309461, -0.873106, -0.017556, 0.394943, -0.196936, 0.197863, -0.987893, -0.418935, 0.62081, -0.631432, 0.603959, -0.28261, 0.669425, 0.821414, -0.811792, -0.052494, -0.478485, 0.728073, 0.02656, 0.588014, 0.198001, 0.218637, -0.611665, -0.776832, 0.921578, 0.12272, -0.159767, 0.396714, 0.430573, 0.765812]}, {"shape": [4, 16], "data": [0.278761, -0.085793, -0.689403, -0.817458, -0.227681, -0.564636, 0.716448, -0.000751, 0.663013, 0.268259, 0.431448, 0.576852, 0.847214, 0.343712, 0.18475, 0.892598, 0.410263, -0.456105, 0.129722, 0.32196, 0.822134, -0.828802, -0.718326, -0.654823, 0.711797, 0.256293, -0.34323, 0.226173, 0.974305, 0.532244, 0.517144, -0.341975, 0.805438, 0.809611, -0.549865, -0.943035, -0.934518, -0.875885, 0.626661, -0.938315, 0.129619, -0.022854, 0.541641, -0.558275, -0.508106, 0.233078, -0.253346, 0.6672, -0.918203, 0.223328, -0.146988, 0.010386, -0.998701, -0.643875, 0.011375, -0.081274, 0.063804, -0.26994, -0.0854, -0.032498, -0.524119, -0.988964, -0.941631, -0.483964]}, {"shape": [16], "data": [0.012297, 0.389123, 0.399996, -0.507058, -0.677136, -0.133757, -0.147228, 0.742572, -0.879183, 0.562181, -0.147981, 0.45699, -0.939416, 0.814432, -0.153507, 0.336024]}], "expected": {"shape": [3, 4], "data": [-0.039869, 0.051783, 0.211034, 0.209157, -0.164133, 0.355645, 0.170262, -0.111856, 0.081007, 0.497764, 0.132727, 0.118248]}, "input": {"shape": [3, 6], "data": [-0.15179, -0.805714, -0.961919, 0.939672, -0.406803, -0.396507, 0.093769, 0.331408, -0.851624, -0.624355, 0.286918, 0.192192, -0.611075, -0.272506, -0.507912, 0.924403, -0.098821, 0.732296]}}, "recurrent.LSTM.5": {"weights": [{"shape": [6, 16], "data": [-0.8498, 0.430774, 0.873073, 0.048923, 0.334624, -0.535482, -0.161324, 0.902336, 0.280753, 0.753689, -0.15467, -0.322404, -0.590549, 0.777274, -0.571364, 0.649152, 0.707209, -0.986659, 0.520748, -0.281583, -0.017415, 0.200441, -0.065796, 0.930076, -0.979278, -0.076096, -0.253233, 0.534729, -0.456539, -0.328327, 0.666324, -0.550168, 0.572946, 0.258097, 0.278902, 0.88237, 0.841026, 0.593731, 0.439719, 0.658633, 0.210103, -0.158248, -0.219485, 0.96215, -0.216797, 0.042074, 0.712456, 0.576437, 0.236331, 0.209346, -0.107554, -0.319436, 0.803039, 0.795168, 0.836552, 0.929391, -0.93503, 0.51693, -0.935837, 0.414736, -0.504766, -0.961246, 0.374652, -0.910804, 0.758076, 0.428739, -0.975447, -0.8947, -0.577597, 0.778909, -0.160486, 0.35176, 0.952946, 0.375849, 0.091075, 0.354457, -0.380646, 0.743773, 0.855056, 0.8086, -0.93462, -0.429751, 0.38859, 0.689388, 0.625022, -0.153817, -0.063389, -0.54276, 0.354462, 0.464064, -0.476255, -0.444326, -0.552878, 0.233093, -0.71522, 0.095086]}, {"shape": [4, 16], "data": [0.910033, -0.779975, 0.603546, 0.156029, 0.870504, -0.776493, 0.174083, -0.284585, -0.547467, 0.925727, 0.158879, -0.22124, 0.076485, -0.231166, 0.948932, 0.470572, 0.286061, -0.787186, -0.599283, -0.787769, 0.119607, 0.030586, 0.666647, 0.792727, 0.886104, 0.416546, -0.182666, 0.377953, 0.602132, 0.350282, 0.731257, 0.97233, -0.932709, 0.712936, 0.084777, -0.102671, 0.973234, -0.457406, 0.203686, -0.217829, 0.012293, -0.00983, -0.582936, -0.182595, -0.143616, -0.94615, -0.95671, -0.98114, 0.455458, -0.05547, 0.856833, 0.598765, 0.719789, 0.5325, -0.56002, 0.463818, 0.560565, 0.143572, 0.488289, 0.655092, 0.964739, -0.081963, -0.019162, -0.802279]}, {"shape": [16], "data": [0.614807, 0.771408, 0.865982, -0.307693, 0.691855, 0.303404, -0.819509, 0.738984, -0.765253, 0.587616, -0.307639, -0.344203, 0.615247, 0.685789, 0.293608, 0.28084]}], "expected": {"shape": [4], "data": [0.375903, 0.458389, 0.112985, 0.199426]}, "input": {"shape": [3, 6], "data": [-0.460855, 0.880207, 0.776946, -0.841116, 0.958348, 0.53474, -0.207934, -0.251826, -0.675944, -0.37848, 0.523187, -0.75506, -0.702952, -0.372328, 0.280085, 0.207177, 0.895026, -0.528682]}}, "recurrent.LSTM.6": {"weights": [{"shape": [6, 16], "data": [-0.226446, 0.727963, -0.947492, -0.480227, 0.771183, 0.280201, -0.775829, 0.626171, 0.68377, 0.166377, 0.466782, 0.160253, -0.566836, -0.640068, 0.509834, -0.724127, 0.635193, -0.966521, 0.908026, -0.71012, 0.441452, -0.409814, 0.564184, -0.538442, -0.912846, 0.79386, 0.069924, 0.461391, -0.54725, 0.302492, -0.857499, -0.253477, -0.266564, -0.112861, 0.759745, 0.600893, 0.355036, -0.527019, 0.171414, -0.676096, -0.896771, -0.999243, -0.601509, 0.130658, -0.609191, 0.762231, 0.62638, 0.3142, 0.070571, -0.063705, -0.079421, -0.069564, 0.196045, 0.16474, -0.40898, -0.091101, -0.912095, -0.872325, 0.751147, -0.167119, 0.550729, 0.135446, -0.55051, 0.965996, 0.395989, -0.689458, 0.03557, 0.885503, -0.49227, -0.798626, 0.38071, 0.289817, 0.813399, 0.885651, 0.382049, -0.955529, -0.649721, 0.924003, 0.803036, -0.437818, -0.839386, -0.673986, -0.51929, 0.368413, -0.547194, 0.748589, 0.894908, 0.395991, 0.000263, 0.114199, -0.525642, -0.065138, -0.84562, 0.775626, 0.722957, 0.760239]}, {"shape": [4, 16], "data": [-0.819411, 0.247027, -0.205291, 0.667737, 0.682253, -0.502593, -0.57604, 0.585538, 0.811808, -0.133528, -0.851471, 0.808487, 0.777171, 0.436317, 0.023861, -0.226522, -0.19441, -0.69482, -0.047086, 0.580698, 0.770829, 0.327904, 0.855653, -0.087931, 0.724921, -0.663294, 0.221178, -0.873578, -0.29845, -0.307426, -0.309536, -0.47405, 0.508346, 0.240965, 0.17208, -0.697697, 0.304168, 0.840282, 0.121316, -0.028624, -0.072513, -0.514004, 0.452095, 0.792413, 0.877921, -0.234592, -0.255257, -0.034971, -0.692105, 0.775637, 0.483492, -0.045051, -0.811689, -0.791119, 0.942962, -0.331172, -0.740263, 0.311253, 0.807421, 0.119114, -0.080466, -0.615076, 0.521326, 0.240488]}, {"shape": [16], "data": [0.365624, 0.066587, -0.385883, -0.271312, 0.695525, 0.255661, -0.814251, -0.1385, 0.346522, -0.204546, -0.082548, 0.189536, 0.250389, -0.4052, 0.633127, 0.660295]}], "expected": {"shape": [3, 4], "data": [-0.266918, -0.099913, 0.197515, 0.016451, -0.474102, -0.073446, 0.113511, 0.045791, -0.274091, -0.060948, 0.366839, -0.118142]}, "input": {"shape": [3, 6], "data": [0.878902, 0.312992, 0.135887, 0.596807, 0.10873, -0.517602, 0.111939, 0.914438, -0.340345, 0.435448, -0.646174, -0.038401, 0.351549, 0.332486, 0.43771, 0.466788, 0.772019, -0.28885]}}, "recurrent.LSTM.7": {"weights": [{"shape": [6, 16], "data": [-0.583267, -0.821648, 0.597127, -0.06219, 0.097115, 0.59847, -0.178556, -0.822068, -0.219792, 0.331711, -0.374065, 0.937862, -0.971386, -0.429613, -0.947444, 0.912872, -0.596752, -0.409293, -0.946332, 0.321693, -0.539954, -0.499509, -0.100364, -0.075292, -0.761363, -0.756431, 0.412417, -0.890623, 0.599306, 0.130664, -0.381465, 0.086817, 0.17249, -0.14002, 0.793208, -0.975779, -0.821004, 0.57001, 0.945065, 0.247593, 0.966075, 0.684202, 0.172511, 0.433066, -0.360727, 0.10001, -0.688926, -0.317165, 0.939906, -0.678243, -0.311967, -0.274828, -0.297329, -0.47057, 0.680525, -0.581226, 0.608076, -0.350015, 0.704833, -0.023262, -0.863598, -0.629018, -0.12579, 0.112848, 0.606962, -0.105406, 0.873613, -0.270182, -0.118657, -0.443719, -0.651996, 0.973474, 0.923547, 0.110465, 0.467221, -0.847588, -0.623232, -0.239494, -0.342345, 0.240898, 0.366699, -0.411542, -0.072619, 0.090528, -0.093493, -0.999229, -0.610555, -0.884697, 0.9016, -0.792493, 0.567794, -0.073171, -0.970286, 0.863006, 0.473766, -0.464347]}, {"shape": [4, 16], "data": [0.488258, 0.168768, 0.947903, -0.817885, 0.490368, -0.652774, -0.683076, 0.821928, -0.948643, 0.995002, -0.810977, -0.048727, -0.829477, -0.164787, -0.577251, -0.262461, 0.728068, 0.946692, -0.873341, 0.779233, -0.154365, -0.206354, -0.258879, -0.658548, -0.692589, -0.381312, -0.285632, 0.311995, 0.089257, -0.79229, 0.124943, -0.313979, -0.871547, -0.655183, 0.24746, 0.951564, -0.255144, 0.517897, 0.380996, -0.825304, -0.310744, -0.738195, -0.663784, 0.407865, 0.913353, 0.607378, -0.615646, -0.510737, -0.756113, 0.451846, 0.531441, -0.729966, 0.559688, -0.458308, -0.750312, 0.271233, 0.752021, -0.436684, -0.960433, -0.448696, -0.736677, 0.01241, 0.401582, -0.744658]}, {"shape": [16], "data": [-0.013069, 0.192357, 0.917856, 0.538656, -0.286434, -0.035023, 0.43928, -0.979826, 0.503243, 0.676672, 0.782223, -0.735866, 0.600414, -0.725464, -0.417174, -0.395111]}], "expected": {"shape": [4], "data": [-0.003723, 0.087683, 0.269376, -0.119548]}, "input": {"shape": [3, 6], "data": [-0.4464, -0.419362, -0.29416, 0.6744, -0.447755, -0.504842, 0.980072, 0.916908, -0.228844, -0.424255, -0.6311, 0.810461, 0.542148, -0.226868, 0.316039, -0.029251, -0.764114, -0.807565]}}, "recurrent.LSTM.8": {"weights": [{"shape": [6, 16], "data": [-0.830074, -0.390926, -0.155379, -0.175033, 0.339507, -0.898578, -0.413653, -0.783312, 0.390054, 0.291915, -0.639546, -0.358026, -0.299562, 0.530356, 0.699093, -0.657777, -0.805037, -0.737004, -0.533156, -0.02555, 0.961913, 0.354405, 0.414537, -0.856747, -0.087681, -0.779066, -0.062943, -0.787575, -0.140881, 0.475163, 0.832274, 0.032463, 0.093214, 0.293965, -0.721175, 0.076291, 0.887442, -0.069078, 0.771404, -0.558691, -0.535376, 0.985132, 0.501264, 0.081917, 0.208323, -0.556763, 0.959328, -0.628823, -0.67348, -0.134795, 0.741077, 0.048041, -0.253542, -0.073037, -0.446174, 0.998574, 0.720061, 0.368172, 0.524055, 0.658088, -0.303247, 0.777888, -0.498292, -0.57091, -0.784288, 0.360542, -0.22312, -0.54481, -0.109966, -0.828631, -0.046113, 0.693034, -0.269008, -0.093768, 0.416629, 0.377931, -0.156889, 0.150481, 0.317546, -0.373843, -0.564875, -0.444552, 0.706068, -0.383373, 0.989821, -0.666674, 0.684051, -0.951821, -0.855062, 0.578138, -0.347227, 0.69753, -0.791495, -0.279348, -0.211924, -0.342932]}, {"shape": [4, 16], "data": [0.24583, 0.988469, -0.405046, -0.656416, -0.895596, -0.990112, 0.890743, 0.530544, -0.934244, -0.057071, 0.749248, -0.961684, 0.237511, -0.050776, -0.680631, 0.155604, 0.537174, 0.755353, -0.194927, -0.09975, 0.933139, -0.872933, -0.69254, 0.629798, -0.276511, 0.58305, 0.987048, 0.350594, -0.695788, -0.455537, -0.983671, -0.4312, -0.935086, 0.812021, 0.992143, 0.008842, 0.123215, 0.451724, -0.458078, -0.191656, 0.452465, 0.236305, 0.454661, -0.613885, 0.828452, -0.185927, 0.102275, 0.47508, 0.209108, -0.894238, -0.084557, -0.874674, 0.856274, 0.895738, -0.061186, -0.673972, -0.371994, -0.914092, 0.931769, 0.707757, -0.343335, -0.558331, 0.438789, -0.022191]}], "expected": {"shape": [3, 4], "data": [-0.086446, -0.074775, -0.210118, -0.201747, 0.103375, -0.200166, -0.240455, -0.325251, 0.132833, -0.237517, -0.211225, -0.307349]}, "input": {"shape": [3, 6], "data": [0.187997, 0.893431, 0.107407, 0.308148, -0.278365, -0.319295, 0.898425, 0.741769, -0.011927, -0.795301, -0.522695, -0.760068, -0.400974, 0.769344, 0.423505, -0.360857, 0.211238, 0.549148]}}}

In [ ]: