In [1]:
import numpy as np
from keras.models import Model
from keras.layers import Input
from keras.layers.convolutional import Cropping2D
from keras import backend as K
import json
from collections import OrderedDict


Using TensorFlow backend.

In [2]:
def format_decimal(arr, places=6):
    return [round(x * 10**places) / 10**places for x in arr]

In [3]:
DATA = OrderedDict()

Cropping2D

[convolutional.Cropping2D.0] cropping ((1,1),(1,1)) on 3x5x4 input, data_format='channels_last'


In [4]:
data_in_shape = (3, 5, 4)
L = Cropping2D(cropping=((1,1),(1, 1)), data_format='channels_last')

layer_0 = Input(shape=data_in_shape)
layer_1 = L(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
np.random.seed(250)
data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['convolutional.Cropping2D.0'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


in shape: (3, 5, 4)
in: [-0.570441, -0.454673, -0.285321, 0.237249, 0.282682, 0.428035, 0.160547, -0.332203, 0.546391, 0.272735, 0.010827, -0.763164, -0.442696, 0.381948, -0.676994, 0.753553, -0.031788, 0.915329, -0.738844, 0.269075, 0.434091, 0.991585, -0.944288, 0.258834, 0.162138, 0.565201, -0.492094, 0.170854, -0.139788, -0.710674, 0.406968, 0.705926, -0.094137, -0.793497, -0.040684, 0.522292, 0.490496, -0.651771, 0.293113, -0.580922, -0.396436, 0.864522, -0.411659, 0.317379, 0.771844, 0.507235, 0.067782, 0.922641, -0.944205, 0.383713, -0.380187, -0.875176, 0.799314, -0.681818, -0.761323, 0.768315, -0.298568, -0.611688, -0.768656, 0.525693]
out shape: (1, 3, 4)
out: [0.162138, 0.565201, -0.492094, 0.170854, -0.139788, -0.710674, 0.406968, 0.705926, -0.094137, -0.793497, -0.040684, 0.522292]

[convolutional.Cropping2D.1] cropping ((1,1),(1,1)) on 3x5x4 input, data_format='channels_first'


In [5]:
data_in_shape = (3, 5, 4)
L = Cropping2D(cropping=((1,1),(1,1)), data_format='channels_first')

layer_0 = Input(shape=data_in_shape)
layer_1 = L(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
np.random.seed(251)
data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['convolutional.Cropping2D.1'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


in shape: (3, 5, 4)
in: [0.275222, -0.793967, -0.468107, -0.841484, -0.295362, 0.78175, 0.068787, -0.261747, -0.625733, -0.042907, 0.861141, 0.85267, 0.956439, 0.717838, -0.99869, -0.963008, 0.013277, -0.180306, 0.832137, -0.385252, -0.524308, 0.659706, -0.905127, 0.526292, 0.832569, 0.084455, 0.23838, -0.046178, -0.735871, 0.776883, -0.394643, 0.498903, 0.029584, -0.17332, 0.628159, 0.445074, 0.638899, 0.597286, -0.055729, -0.825457, -0.675992, 0.931468, 0.666841, 0.624388, -0.412075, 0.716772, -0.053248, -0.0025, -0.743212, 0.967964, -0.259998, -0.14653, -0.823771, -0.506749, -0.94293, -0.452047, -0.492538, 0.733182, -0.219022, -0.514951]
out shape: (3, 3, 2)
out: [0.78175, 0.068787, -0.042907, 0.861141, 0.717838, -0.99869, 0.084455, 0.23838, 0.776883, -0.394643, -0.17332, 0.628159, 0.716772, -0.053248, 0.967964, -0.259998, -0.506749, -0.94293]

[convolutional.Cropping2D.2] cropping ((4,2),(3,1)) on 8x7x6 input, data_format='channels_last'


In [6]:
data_in_shape = (8, 7, 6)
L = Cropping2D(cropping=((4,2),(3,1)), data_format='channels_last')

layer_0 = Input(shape=data_in_shape)
layer_1 = L(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
np.random.seed(252)
data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['convolutional.Cropping2D.2'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


in shape: (8, 7, 6)
in: [-0.989173, -0.133618, -0.505338, 0.023259, 0.503982, -0.303769, -0.436321, 0.793911, 0.416102, 0.806405, -0.098342, -0.738022, -0.982676, 0.805073, 0.741244, -0.941634, -0.253526, -0.136544, -0.295772, 0.207565, -0.517246, -0.686963, -0.176235, -0.354111, -0.862411, -0.969822, 0.200074, 0.290718, -0.038623, 0.294839, 0.247968, 0.557946, -0.455596, 0.6624, 0.879529, -0.466772, 0.40423, 0.213794, 0.645662, -0.044634, -0.552595, 0.771242, -0.131944, -0.172725, 0.700856, -0.001994, 0.606737, -0.593306, 0.898062, -0.203771, 0.645788, 0.596358, -0.571654, -0.636819, -0.367376, -0.892607, -0.123297, 0.748404, -0.783012, 0.061963, -0.908546, -0.988517, 0.97308, 0.483845, 0.967956, 0.95863, 0.633963, -0.109138, -0.615821, -0.479821, 0.42827, 0.761351, 0.108265, -0.835679, -0.200177, 0.556308, -0.346135, -0.591775, 0.888818, 0.088756, 0.887542, 0.231398, -0.062874, 0.379803, 0.248259, -0.004909, -0.694836, 0.246241, 0.576698, 0.877474, 0.353439, 0.528707, 0.585634, -0.985339, 0.438776, -0.314863, 0.784171, 0.12525, 0.889265, -0.644213, 0.052514, 0.733269, 0.099278, -0.141949, 0.498542, 0.735955, 0.760313, -0.660278, -0.604273, 0.199522, 0.346309, -0.488666, 0.706332, 0.995574, 0.234642, 0.042779, 0.583139, 0.159919, 0.104808, 0.232585, 0.625624, 0.748204, -0.878399, 0.514601, -0.392989, -0.835864, 0.247256, -0.889704, -0.776558, 0.069902, -0.982291, 0.139565, 0.559765, 0.364254, -0.167329, -0.027032, -0.396669, 0.887928, 0.24142, -0.950083, -0.354758, 0.815999, -0.533224, 0.001955, -0.997966, -0.716888, 0.861882, -0.883, 0.797625, 0.602008, -0.633245, -0.710132, 0.608369, -0.784398, 0.576564, 0.633279, 0.542795, 0.90345, 0.457414, 0.242734, 0.310622, -0.82517, 0.262998, -0.571046, 0.936527, 0.878469, -0.347791, -0.859749, -0.952931, -0.129702, 0.685477, -0.796418, 0.377681, 0.858414, -0.835134, 0.014816, 0.103085, 0.850919, -0.005764, 0.715943, 0.003942, 0.746505, -0.422674, 0.819656, 0.037147, 0.078259, 0.609219, 0.240121, -0.457013, -0.947625, -0.598543, 0.754378, 0.694688, 0.001741, -0.731877, -0.442437, 0.116966, 0.360755, 0.396709, 0.018889, -0.243222, 0.03233, 0.154516, 0.04915, 0.942633, -0.225662, 0.778155, 0.927135, -0.298936, -0.73013, -0.978144, -0.838932, 0.085808, -0.636771, -0.209125, 0.739682, 0.447976, -0.737185, 0.645007, -0.038338, -0.031764, -0.667117, -0.075763, 0.701876, 0.505738, 0.997181, -0.554428, -0.862689, 0.966555, 0.401094, -0.489316, 0.197598, 0.440944, 0.437652, -0.078096, 0.271921, 0.702964, -0.154695, 0.964746, 0.089738, -0.530063, -0.374368, -0.501949, -0.435823, -0.059041, 0.059762, 0.18368, -0.879219, -0.853283, 0.463376, 0.790678, -0.906068, -0.772338, 0.117419, 0.409168, 0.219139, -0.017059, 0.552391, 0.835856, -0.710045, 0.404615, -0.182959, -0.331881, -0.395508, -0.730692, 0.252354, 0.915251, -0.79357, 0.70079, -0.011832, -0.18324, 0.070365, 0.769189, 0.383026, -0.868293, 0.800455, 0.727587, -0.62417, -0.300598, -0.63724, -0.08886, 0.176103, 0.086305, -0.782202, -0.719459, -0.153585, -0.037527, 0.923902, 0.043134, -0.500307, -0.862448, -0.591515, 0.803712, 0.066251, -0.016947, 0.412293, -0.004009, 0.370996, -0.556311, -0.286518, -0.454324, 0.069928, -0.002182, -0.425322, 0.99516, 0.537532, 0.054711, 0.22533, -0.238724, 0.495853, 0.901436, 0.405811, -0.430268, -0.542533, -0.663813, -0.007121, 0.462074, -0.349408, 0.257701, 0.493632, -0.674434, -0.235252, 0.039342, 0.166896, 0.724501, 0.41962, -0.566634, 0.811497, 0.614648, -0.109631, -0.830493, -0.022923, -0.890479, 0.487839, 0.967617, 0.629215]
out shape: (2, 3, 6)
out: [0.609218, 0.240121, -0.457013, -0.947625, -0.598543, 0.754378, 0.694688, 0.001741, -0.731877, -0.442437, 0.116966, 0.360755, 0.396709, 0.018889, -0.243222, 0.03233, 0.154516, 0.04915, 0.966555, 0.401094, -0.489316, 0.197598, 0.440944, 0.437652, -0.078096, 0.271921, 0.702964, -0.154695, 0.964746, 0.089738, -0.530063, -0.374368, -0.501949, -0.435823, -0.059041, 0.059762]

[convolutional.Cropping2D.3] cropping ((4,2),(3,1)) on 8x7x6 input, data_format='channels_first'


In [7]:
data_in_shape = (8, 7, 6)
L = Cropping2D(cropping=((4,2),(3,1)), data_format='channels_first')

layer_0 = Input(shape=data_in_shape)
layer_1 = L(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
np.random.seed(253)
data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['convolutional.Cropping2D.3'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


in shape: (8, 7, 6)
in: [-0.47588, 0.366985, 0.040173, 0.015578, -0.906159, 0.241982, -0.771299, -0.443554, -0.56404, -0.17751, 0.541277, -0.233327, 0.024369, 0.858275, 0.496191, 0.980574, -0.59522, 0.480899, 0.392553, -0.191718, 0.055121, 0.289836, -0.498339, 0.800408, 0.132679, -0.716649, 0.840092, -0.088837, -0.538209, -0.580887, -0.370128, -0.924933, -0.161736, -0.205619, 0.793729, -0.354472, 0.687519, 0.272041, -0.943352, -0.730959, -0.330419, -0.479307, 0.520387, 0.137906, 0.897598, 0.869815, 0.978562, 0.731387, 0.084559, 0.05709, -0.645057, 0.021524, -0.051466, -0.312923, 0.615681, -0.489544, -0.152496, 0.450475, 0.758223, 0.217489, 0.013859, 0.495027, 0.383425, -0.825141, 0.690579, 0.579147, 0.282404, 0.941596, -0.707396, -0.647516, 0.265602, 0.87606, 0.739125, 0.286697, 0.09401, 0.526318, -0.879911, 0.453206, -0.280944, 0.090286, 0.208239, 0.162421, -0.295482, -0.426305, 0.834307, 0.183841, -0.474458, -0.811003, -0.333046, 0.893681, -0.912594, -0.962315, -0.51711, -0.856975, 0.658169, 0.85087, 0.84138, 0.34874, -0.473288, 0.796885, 0.422969, 0.636483, 0.464166, -0.082329, -0.954513, -0.121966, -0.798025, -0.670698, 0.104121, -0.126565, -0.417565, -0.858904, 0.756732, -0.65472, -0.574664, -0.996033, 0.846295, -0.847627, -0.273482, -0.509544, 0.400331, 0.534662, -0.87151, 0.404809, -0.44717, 0.822294, 0.503082, 0.270164, -0.658415, -0.701712, 0.746818, 0.611278, 0.334617, 0.211907, -0.845499, 0.944048, -0.380413, -0.755165, 0.704526, -0.67208, 0.715308, -0.225126, 0.853273, 0.086899, 0.011703, 0.656981, 0.981653, 0.595181, -0.025823, -0.758488, -0.56116, -0.364684, 0.594148, 0.594276, 0.821631, 0.979455, 0.047796, -0.816002, -0.466124, 0.621008, 0.502493, 0.953933, -0.159216, 0.852515, -0.944342, -0.730746, -0.129779, 0.524284, -0.422867, 0.391181, 0.547067, 0.779583, -0.374275, -0.048478, -0.49625, -0.128063, -0.600403, 0.686605, -0.997366, 0.040306, -0.251598, 0.67078, 0.576913, 0.913989, 0.618136, -0.755829, -0.970582, -0.670476, 0.436509, -0.618264, 0.73819, 0.802415, -0.552254, -0.017248, -0.172653, 0.669384, 0.651401, -0.227696, -0.134427, -0.440003, 0.354427, -0.022474, -0.578134, 0.497115, -0.867034, 0.986613, -0.346709, 0.028696, -0.116495, -0.206995, -0.727949, -0.186437, 0.389951, 0.695853, 0.562059, 0.815718, 0.168899, 0.501429, -0.820591, 0.049532, -0.908312, -0.559237, -0.922854, -0.806119, -0.654867, 0.375783, 0.922612, 0.763925, -0.210298, -0.554287, 0.086046, -0.366844, -0.106961, -0.910715, 0.303629, 0.149593, 0.888866, 0.249378, -0.289456, 0.774148, 0.939393, 0.572596, 0.818565, 0.545632, 0.117497, 0.295189, -0.238975, -0.051112, -0.68902, 0.796684, 0.58131, 0.798951, 0.392432, 0.951852, 0.715108, 0.229935, -0.156429, 0.909932, 0.780446, 0.32528, -0.320843, 0.431677, 0.931402, 0.295305, 0.731226, -0.489043, -0.737128, 0.844895, 0.399776, 0.684985, -0.249303, -0.523916, 0.700168, -0.382668, 0.754608, -0.351194, 0.050378, 0.587836, -0.780805, 0.831432, -0.775832, -0.584214, -0.232651, -0.450748, 0.855845, 0.14897, -0.159432, 0.240878, -0.456529, -0.323843, -0.778939, -0.834748, 0.396857, -0.089168, -0.82026, 0.852223, -0.82394, -0.182707, 0.474993, 0.179051, 0.301964, 0.372534, -0.867402, -0.324884, 0.973617, -0.657061, -0.339828, 0.10618, 0.630653, 0.496004, -0.295136, -0.402468, 0.464663, 0.653989, 0.237709, -0.223135, 0.041715, -0.796822, 0.109474, -0.605052, 0.112833, 0.923897, 0.248119, -0.966283, 0.482448, 0.176094, 0.042929, 0.996736, 0.755495, -0.500939, -0.288351, -0.933661, -0.592465, -0.182451, -0.267614, -0.388485]
out shape: (8, 1, 2)
out: [-0.088837, -0.538209, -0.647516, 0.265602, -0.858904, 0.756732, 0.594276, 0.821631, 0.669384, 0.651401, 0.249378, -0.289456, 0.831432, -0.775832, 0.923897, 0.248119]

[convolutional.Cropping2D.4] cropping (2,3) on 8x7x6 input, data_format='channels_last'


In [8]:
data_in_shape = (8, 7, 6)
L = Cropping2D(cropping=(2,3), data_format='channels_last')

layer_0 = Input(shape=data_in_shape)
layer_1 = L(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
np.random.seed(254)
data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['convolutional.Cropping2D.4'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


in shape: (8, 7, 6)
in: [0.024124, 0.280236, -0.680013, -0.042458, -0.164273, 0.358409, 0.511014, -0.585272, -0.481578, 0.692702, 0.64189, -0.400252, -0.922248, -0.735105, -0.533918, 0.071402, 0.310474, 0.369868, 0.767931, -0.842066, -0.091189, 0.835301, -0.480484, 0.950819, -0.002131, 0.086491, -0.480947, 0.405572, -0.083803, -0.921447, -0.291545, 0.674087, -0.560444, 0.881432, 0.076544, 0.63549, -0.185686, -0.89067, 0.709257, -0.256164, -0.873627, 0.330906, -0.583426, -0.51286, 0.751485, 0.030077, -0.998662, 0.175588, 0.375807, -0.297778, -0.037149, -0.420746, -0.208204, 0.402424, -0.554688, -0.472029, 0.114337, 0.543154, -0.462202, -0.702131, -0.755152, 0.399132, -0.863878, 0.90242, -0.598578, 0.075729, -0.984584, -0.76995, -0.822436, 0.521519, -0.437606, 0.347583, 0.021953, 0.568181, 0.066634, 0.61829, -0.481882, 0.558214, 0.732847, -0.254362, 0.151457, 0.412434, -0.319702, -0.09085, 0.555778, 0.671859, -0.635198, 0.593103, -0.005002, -0.450857, 0.774167, -0.791044, -0.492777, -0.648434, -0.617374, -0.874596, 0.239172, -0.329255, -0.124378, -0.679687, -0.681269, 0.80559, 0.103336, 0.91569, -0.226668, 0.424476, 0.709728, -0.130921, -0.579856, 0.517511, 0.99394, -0.554744, 0.707467, -0.253984, -0.74488, 0.01915, -0.937241, 0.528811, 0.254405, -0.8029, 0.815474, -0.213763, 0.50079, -0.503395, 0.210021, -0.419686, 0.456614, 0.88218, -0.621765, 0.49563, -0.766721, 0.920173, 0.307054, -0.985315, 0.295907, -0.128988, 0.084168, 0.272106, 0.293384, 0.449009, 0.834872, -0.990419, -0.126779, 0.935827, -0.91633, -0.930113, -0.220291, 0.752014, 0.017501, -0.885473, -0.125301, -0.23717, -0.864103, 0.936996, -0.481888, 0.298993, 0.395852, -0.943194, 0.753792, -0.768603, -0.243377, 0.66616, 0.259826, 0.22605, 0.400443, 0.067723, -0.065587, -0.790676, 0.966871, -0.912638, 0.355244, -0.073181, 0.657103, 0.09007, -0.04048, 0.082647, 0.920559, 0.482534, -0.143117, -0.797387, 0.917364, 0.01561, -0.30353, 0.117881, -0.866152, 0.915653, 0.762948, -0.717669, 0.797181, 0.077211, 0.31406, 0.437614, -0.355494, 0.357562, -0.417286, -0.120372, -0.362672, 0.624621, 0.792828, 0.992511, 0.019958, -0.549153, -0.6375, -0.062739, -0.944945, -0.803626, 0.91846, 0.812719, -0.430243, -0.465176, 0.401421, -0.002137, 0.78021, 0.336852, -0.25664, -0.232235, -0.109423, -0.580016, 0.668781, 0.396865, -0.611044, 0.554487, -0.925514, 0.013001, 0.262365, 0.104009, -0.319333, -0.043008, -0.372638, 0.879342, -0.694172, 0.030522, 0.383238, 0.724839, -0.261347, 0.916554, 0.783577, 0.83219, 0.26797, 0.918329, -0.624446, 0.397899, 0.778276, -0.083593, 0.546356, 0.856049, 0.534116, -0.033082, -0.402185, -0.324623, -0.6421, -0.928564, -0.089948, 0.850967, 0.751319, -0.81352, -0.501143, -0.942755, 0.736537, -0.372481, 0.710303, -0.383222, 0.838276, -0.925631, -0.207597, 0.233132, -0.142178, -0.915557, -0.188239, -0.910884, -0.589853, -0.386997, -0.54772, -0.43656, -0.720973, -0.968259, 0.068156, 0.57743, -0.165393, -0.848904, -0.168921, -0.807576, 0.563418, -0.535884, -0.231633, 0.898762, -0.313521, -0.847164, -0.200325, -0.387748, -0.118249, 0.49213, -0.641636, 0.703024, -0.633493, 0.708461, -0.901467, 0.459244, 0.640117, -0.194619, -0.700387, -0.551761, -0.062893, -0.993202, -0.272018, 0.788043, -0.899394, -0.64761, 0.51654, 0.512262, 0.416768, 0.387547, 0.728484, 0.563032, 0.477188, 0.452492, 0.053106, -0.993751, -0.584517, 0.141826, 0.146845, 0.500992, -0.491195, 0.07354, 0.022871, -0.561667, 0.277436, 0.778925, -0.842296, 0.536302, -0.338116, -0.098279, 0.823896, -0.889689, 0.898098, 0.003918]
out shape: (4, 1, 6)
out: [0.103336, 0.91569, -0.226668, 0.424476, 0.709728, -0.130921, -0.91633, -0.930113, -0.220291, 0.752014, 0.017501, -0.885473, 0.762948, -0.717669, 0.797181, 0.077211, 0.31406, 0.437614, -0.372638, 0.879342, -0.694172, 0.030522, 0.383238, 0.724839]

[convolutional.Cropping2D.5] cropping 4 on 8x7x6 input, data_format='channels_last'


In [9]:
data_in_shape = (8, 7, 6)
L = Cropping2D(cropping=1, data_format='channels_last')

layer_0 = Input(shape=data_in_shape)
layer_1 = L(layer_0)
model = Model(inputs=layer_0, outputs=layer_1)

# set weights to random (use seed for reproducibility)
np.random.seed(255)
data_in = 2 * np.random.random(data_in_shape) - 1
result = model.predict(np.array([data_in]))
data_out_shape = result[0].shape
data_in_formatted = format_decimal(data_in.ravel().tolist())
data_out_formatted = format_decimal(result[0].ravel().tolist())
print('')
print('in shape:', data_in_shape)
print('in:', data_in_formatted)
print('out shape:', data_out_shape)
print('out:', data_out_formatted)

DATA['convolutional.Cropping2D.5'] = {
    'input': {'data': data_in_formatted, 'shape': data_in_shape},
    'expected': {'data': data_out_formatted, 'shape': data_out_shape}
}


in shape: (8, 7, 6)
in: [-0.072127, -0.553929, -0.355552, -0.936405, 0.556627, -0.482815, -0.225337, -0.640315, 0.023246, -0.638412, -0.797304, 0.284959, -0.569771, -0.685286, 0.002481, 0.398436, 0.11345, 0.416629, -0.526713, 0.962183, 0.021732, 0.922994, 0.07991, -0.164385, 0.461494, -0.982877, -0.142158, 0.175741, -0.124041, -0.875609, -0.528708, -0.911127, 0.782257, -0.509403, 0.573973, -0.151309, -0.895619, -0.721042, 0.483952, -0.745814, -0.588825, -0.154089, 0.423904, -0.262707, -0.517175, -0.535505, -0.266104, -0.46314, -0.216526, -0.864596, 0.841716, 0.402031, 0.873588, -0.457428, -0.348313, 0.910179, -0.408277, 0.734388, -0.024081, -0.550588, 0.293097, -0.542286, -0.269648, 0.213238, 0.514513, 0.266344, -0.112736, 0.845888, 0.188158, 0.728423, 0.754239, 0.909679, -0.298907, 0.890225, -0.176317, 0.710818, 0.522503, 0.564153, -0.742255, 0.183133, -0.478304, 0.306737, -0.556934, -0.411964, 0.641192, 0.868657, 0.52633, -0.519934, 0.997507, 0.516068, -0.557744, 0.956131, 0.239102, 0.539335, -0.325715, -0.830554, 0.189992, -0.637541, -0.375955, -0.645287, 0.483284, -0.541192, -0.802786, 0.454783, -0.952301, 0.916586, 0.169218, -0.523467, -0.405046, -0.569919, 0.408988, 0.823371, -0.074026, 0.605709, 0.473457, -0.195274, 0.48094, 0.836515, -0.045046, 0.619917, -0.763448, -0.439789, -0.949154, 0.025215, 0.073811, 0.671451, -0.284162, 0.141033, 0.631935, 0.830601, -0.267386, -0.074489, -0.195287, -0.411611, 0.009787, 0.018395, 0.477619, -0.992273, -0.415425, 0.050996, -0.28197, 0.190961, 0.80039, 0.963981, -0.828833, 0.628873, 0.944476, 0.989284, 0.292435, -0.135812, 0.817257, 0.078638, -0.719863, -0.31961, 0.38714, 0.466882, 0.352137, 0.213046, -0.772962, -0.200157, -0.034438, -0.622181, -0.912614, 0.088619, -0.919166, 0.362514, -0.860038, -0.650449, -0.175857, 0.86567, -0.511448, -0.372431, -0.218084, 0.5096, 0.725654, -0.819892, 0.430089, -0.579532, -0.103142, -0.845316, 0.180956, 0.257962, 0.720379, 0.048086, 0.297728, 0.333669, 0.623466, 0.714411, -0.131609, 0.64756, 0.369304, -0.572741, -0.225815, 0.669311, -0.253786, -0.674851, -0.614704, 0.554864, 0.125116, -0.810339, 0.2588, -0.275579, -0.20522, -0.568676, 0.816795, -0.71788, 0.852799, 0.562966, -0.760958, 0.695122, 0.891104, -0.155949, 0.865, 0.401708, -0.330151, 0.965756, 0.468588, 0.835171, 0.591632, 0.056241, -0.324038, 0.938888, -0.890862, 0.957758, 0.780012, -0.560753, -0.297767, 0.725068, -0.001314, -0.927054, -0.966923, -0.383373, -0.01167, -0.206478, -0.670776, 0.119641, -0.880218, 0.63602, -0.682603, -0.542413, -0.722915, -0.599202, -0.058567, -0.376446, 0.958634, -0.769698, 0.241078, 0.700414, -0.490651, 0.377774, -0.430236, 0.234139, 0.10109, -0.652245, 0.356743, 0.056223, 0.738797, -0.165398, 0.715225, 0.684639, 0.209056, 0.21576, -0.20409, 0.066743, 0.25442, -0.463116, 0.668761, 0.676056, 0.643255, -0.189575, -0.927627, -0.625871, -0.361955, 0.704399, 0.731502, 0.680832, -0.369761, 0.018435, -0.761988, 0.966888, -0.400392, -0.109436, 0.2672, 0.992306, -0.075279, 0.700069, 0.916605, 0.135154, -0.997403, -0.270679, 0.673764, -0.946554, 0.177582, 0.987289, 0.70395, -0.278027, 0.143042, -0.149866, -0.35637, 0.402414, -0.707286, -0.003026, -0.384013, -0.422233, -0.527134, 0.334918, -0.641306, 0.266584, -0.312698, -0.094288, -0.344686, 0.19365, -0.165182, 0.094844, 0.442193, 0.660109, 0.127797, -0.928351, -0.336308, 0.852677, 0.957003, -0.803783, -0.720178, 0.779735, 0.322964, 0.343773, -0.906857, -0.971971, -0.528146, -0.849202, -0.571979, 0.425754, -0.056767, -0.288022, -0.003958, -0.270402]
out shape: (6, 5, 6)
out: [-0.216526, -0.864596, 0.841716, 0.402031, 0.873588, -0.457428, -0.348313, 0.910179, -0.408277, 0.734388, -0.024081, -0.550588, 0.293097, -0.542286, -0.269648, 0.213238, 0.514513, 0.266344, -0.112736, 0.845888, 0.188158, 0.728423, 0.754239, 0.909679, -0.298907, 0.890225, -0.176317, 0.710818, 0.522503, 0.564153, -0.557744, 0.956131, 0.239102, 0.539335, -0.325715, -0.830554, 0.189992, -0.637541, -0.375955, -0.645287, 0.483284, -0.541192, -0.802786, 0.454783, -0.952301, 0.916586, 0.169218, -0.523467, -0.405046, -0.569919, 0.408988, 0.823371, -0.074026, 0.605709, 0.473457, -0.195274, 0.48094, 0.836515, -0.045046, 0.619917, -0.195287, -0.411611, 0.009787, 0.018395, 0.477619, -0.992273, -0.415425, 0.050996, -0.28197, 0.190961, 0.80039, 0.963981, -0.828833, 0.628873, 0.944476, 0.989284, 0.292435, -0.135812, 0.817257, 0.078638, -0.719863, -0.31961, 0.38714, 0.466882, 0.352137, 0.213046, -0.772962, -0.200157, -0.034438, -0.622181, 0.725654, -0.819892, 0.430089, -0.579532, -0.103142, -0.845316, 0.180956, 0.257962, 0.720379, 0.048086, 0.297728, 0.333669, 0.623466, 0.714411, -0.131609, 0.64756, 0.369304, -0.572741, -0.225815, 0.669311, -0.253786, -0.674851, -0.614704, 0.554864, 0.125116, -0.810339, 0.2588, -0.275579, -0.20522, -0.568676, 0.468588, 0.835171, 0.591632, 0.056241, -0.324038, 0.938888, -0.890862, 0.957758, 0.780012, -0.560753, -0.297767, 0.725068, -0.001314, -0.927054, -0.966923, -0.383373, -0.01167, -0.206478, -0.670776, 0.119641, -0.880218, 0.63602, -0.682603, -0.542413, -0.722915, -0.599202, -0.058567, -0.376446, 0.958634, -0.769698, 0.715225, 0.684639, 0.209056, 0.21576, -0.20409, 0.066743, 0.25442, -0.463116, 0.668761, 0.676056, 0.643255, -0.189575, -0.927627, -0.625871, -0.361955, 0.704399, 0.731502, 0.680832, -0.369761, 0.018435, -0.761988, 0.966888, -0.400392, -0.109436, 0.2672, 0.992306, -0.075278, 0.700069, 0.916605, 0.135154]

export for Keras.js tests


In [10]:
print(json.dumps(DATA))


{"convolutional.Cropping2D.0": {"input": {"shape": [3, 5, 4], "data": [-0.570441, -0.454673, -0.285321, 0.237249, 0.282682, 0.428035, 0.160547, -0.332203, 0.546391, 0.272735, 0.010827, -0.763164, -0.442696, 0.381948, -0.676994, 0.753553, -0.031788, 0.915329, -0.738844, 0.269075, 0.434091, 0.991585, -0.944288, 0.258834, 0.162138, 0.565201, -0.492094, 0.170854, -0.139788, -0.710674, 0.406968, 0.705926, -0.094137, -0.793497, -0.040684, 0.522292, 0.490496, -0.651771, 0.293113, -0.580922, -0.396436, 0.864522, -0.411659, 0.317379, 0.771844, 0.507235, 0.067782, 0.922641, -0.944205, 0.383713, -0.380187, -0.875176, 0.799314, -0.681818, -0.761323, 0.768315, -0.298568, -0.611688, -0.768656, 0.525693]}, "expected": {"shape": [1, 3, 4], "data": [0.162138, 0.565201, -0.492094, 0.170854, -0.139788, -0.710674, 0.406968, 0.705926, -0.094137, -0.793497, -0.040684, 0.522292]}}, "convolutional.Cropping2D.1": {"input": {"shape": [3, 5, 4], "data": [0.275222, -0.793967, -0.468107, -0.841484, -0.295362, 0.78175, 0.068787, -0.261747, -0.625733, -0.042907, 0.861141, 0.85267, 0.956439, 0.717838, -0.99869, -0.963008, 0.013277, -0.180306, 0.832137, -0.385252, -0.524308, 0.659706, -0.905127, 0.526292, 0.832569, 0.084455, 0.23838, -0.046178, -0.735871, 0.776883, -0.394643, 0.498903, 0.029584, -0.17332, 0.628159, 0.445074, 0.638899, 0.597286, -0.055729, -0.825457, -0.675992, 0.931468, 0.666841, 0.624388, -0.412075, 0.716772, -0.053248, -0.0025, -0.743212, 0.967964, -0.259998, -0.14653, -0.823771, -0.506749, -0.94293, -0.452047, -0.492538, 0.733182, -0.219022, -0.514951]}, "expected": {"shape": [3, 3, 2], "data": [0.78175, 0.068787, -0.042907, 0.861141, 0.717838, -0.99869, 0.084455, 0.23838, 0.776883, -0.394643, -0.17332, 0.628159, 0.716772, -0.053248, 0.967964, -0.259998, -0.506749, -0.94293]}}, "convolutional.Cropping2D.2": {"input": {"shape": [8, 7, 6], "data": [-0.989173, -0.133618, -0.505338, 0.023259, 0.503982, -0.303769, -0.436321, 0.793911, 0.416102, 0.806405, -0.098342, -0.738022, -0.982676, 0.805073, 0.741244, -0.941634, -0.253526, -0.136544, -0.295772, 0.207565, -0.517246, -0.686963, -0.176235, -0.354111, -0.862411, -0.969822, 0.200074, 0.290718, -0.038623, 0.294839, 0.247968, 0.557946, -0.455596, 0.6624, 0.879529, -0.466772, 0.40423, 0.213794, 0.645662, -0.044634, -0.552595, 0.771242, -0.131944, -0.172725, 0.700856, -0.001994, 0.606737, -0.593306, 0.898062, -0.203771, 0.645788, 0.596358, -0.571654, -0.636819, -0.367376, -0.892607, -0.123297, 0.748404, -0.783012, 0.061963, -0.908546, -0.988517, 0.97308, 0.483845, 0.967956, 0.95863, 0.633963, -0.109138, -0.615821, -0.479821, 0.42827, 0.761351, 0.108265, -0.835679, -0.200177, 0.556308, -0.346135, -0.591775, 0.888818, 0.088756, 0.887542, 0.231398, -0.062874, 0.379803, 0.248259, -0.004909, -0.694836, 0.246241, 0.576698, 0.877474, 0.353439, 0.528707, 0.585634, -0.985339, 0.438776, -0.314863, 0.784171, 0.12525, 0.889265, -0.644213, 0.052514, 0.733269, 0.099278, -0.141949, 0.498542, 0.735955, 0.760313, -0.660278, -0.604273, 0.199522, 0.346309, -0.488666, 0.706332, 0.995574, 0.234642, 0.042779, 0.583139, 0.159919, 0.104808, 0.232585, 0.625624, 0.748204, -0.878399, 0.514601, -0.392989, -0.835864, 0.247256, -0.889704, -0.776558, 0.069902, -0.982291, 0.139565, 0.559765, 0.364254, -0.167329, -0.027032, -0.396669, 0.887928, 0.24142, -0.950083, -0.354758, 0.815999, -0.533224, 0.001955, -0.997966, -0.716888, 0.861882, -0.883, 0.797625, 0.602008, -0.633245, -0.710132, 0.608369, -0.784398, 0.576564, 0.633279, 0.542795, 0.90345, 0.457414, 0.242734, 0.310622, -0.82517, 0.262998, -0.571046, 0.936527, 0.878469, -0.347791, -0.859749, -0.952931, -0.129702, 0.685477, -0.796418, 0.377681, 0.858414, -0.835134, 0.014816, 0.103085, 0.850919, -0.005764, 0.715943, 0.003942, 0.746505, -0.422674, 0.819656, 0.037147, 0.078259, 0.609219, 0.240121, -0.457013, -0.947625, -0.598543, 0.754378, 0.694688, 0.001741, -0.731877, -0.442437, 0.116966, 0.360755, 0.396709, 0.018889, -0.243222, 0.03233, 0.154516, 0.04915, 0.942633, -0.225662, 0.778155, 0.927135, -0.298936, -0.73013, -0.978144, -0.838932, 0.085808, -0.636771, -0.209125, 0.739682, 0.447976, -0.737185, 0.645007, -0.038338, -0.031764, -0.667117, -0.075763, 0.701876, 0.505738, 0.997181, -0.554428, -0.862689, 0.966555, 0.401094, -0.489316, 0.197598, 0.440944, 0.437652, -0.078096, 0.271921, 0.702964, -0.154695, 0.964746, 0.089738, -0.530063, -0.374368, -0.501949, -0.435823, -0.059041, 0.059762, 0.18368, -0.879219, -0.853283, 0.463376, 0.790678, -0.906068, -0.772338, 0.117419, 0.409168, 0.219139, -0.017059, 0.552391, 0.835856, -0.710045, 0.404615, -0.182959, -0.331881, -0.395508, -0.730692, 0.252354, 0.915251, -0.79357, 0.70079, -0.011832, -0.18324, 0.070365, 0.769189, 0.383026, -0.868293, 0.800455, 0.727587, -0.62417, -0.300598, -0.63724, -0.08886, 0.176103, 0.086305, -0.782202, -0.719459, -0.153585, -0.037527, 0.923902, 0.043134, -0.500307, -0.862448, -0.591515, 0.803712, 0.066251, -0.016947, 0.412293, -0.004009, 0.370996, -0.556311, -0.286518, -0.454324, 0.069928, -0.002182, -0.425322, 0.99516, 0.537532, 0.054711, 0.22533, -0.238724, 0.495853, 0.901436, 0.405811, -0.430268, -0.542533, -0.663813, -0.007121, 0.462074, -0.349408, 0.257701, 0.493632, -0.674434, -0.235252, 0.039342, 0.166896, 0.724501, 0.41962, -0.566634, 0.811497, 0.614648, -0.109631, -0.830493, -0.022923, -0.890479, 0.487839, 0.967617, 0.629215]}, "expected": {"shape": [2, 3, 6], "data": [0.609218, 0.240121, -0.457013, -0.947625, -0.598543, 0.754378, 0.694688, 0.001741, -0.731877, -0.442437, 0.116966, 0.360755, 0.396709, 0.018889, -0.243222, 0.03233, 0.154516, 0.04915, 0.966555, 0.401094, -0.489316, 0.197598, 0.440944, 0.437652, -0.078096, 0.271921, 0.702964, -0.154695, 0.964746, 0.089738, -0.530063, -0.374368, -0.501949, -0.435823, -0.059041, 0.059762]}}, "convolutional.Cropping2D.3": {"input": {"shape": [8, 7, 6], "data": [-0.47588, 0.366985, 0.040173, 0.015578, -0.906159, 0.241982, -0.771299, -0.443554, -0.56404, -0.17751, 0.541277, -0.233327, 0.024369, 0.858275, 0.496191, 0.980574, -0.59522, 0.480899, 0.392553, -0.191718, 0.055121, 0.289836, -0.498339, 0.800408, 0.132679, -0.716649, 0.840092, -0.088837, -0.538209, -0.580887, -0.370128, -0.924933, -0.161736, -0.205619, 0.793729, -0.354472, 0.687519, 0.272041, -0.943352, -0.730959, -0.330419, -0.479307, 0.520387, 0.137906, 0.897598, 0.869815, 0.978562, 0.731387, 0.084559, 0.05709, -0.645057, 0.021524, -0.051466, -0.312923, 0.615681, -0.489544, -0.152496, 0.450475, 0.758223, 0.217489, 0.013859, 0.495027, 0.383425, -0.825141, 0.690579, 0.579147, 0.282404, 0.941596, -0.707396, -0.647516, 0.265602, 0.87606, 0.739125, 0.286697, 0.09401, 0.526318, -0.879911, 0.453206, -0.280944, 0.090286, 0.208239, 0.162421, -0.295482, -0.426305, 0.834307, 0.183841, -0.474458, -0.811003, -0.333046, 0.893681, -0.912594, -0.962315, -0.51711, -0.856975, 0.658169, 0.85087, 0.84138, 0.34874, -0.473288, 0.796885, 0.422969, 0.636483, 0.464166, -0.082329, -0.954513, -0.121966, -0.798025, -0.670698, 0.104121, -0.126565, -0.417565, -0.858904, 0.756732, -0.65472, -0.574664, -0.996033, 0.846295, -0.847627, -0.273482, -0.509544, 0.400331, 0.534662, -0.87151, 0.404809, -0.44717, 0.822294, 0.503082, 0.270164, -0.658415, -0.701712, 0.746818, 0.611278, 0.334617, 0.211907, -0.845499, 0.944048, -0.380413, -0.755165, 0.704526, -0.67208, 0.715308, -0.225126, 0.853273, 0.086899, 0.011703, 0.656981, 0.981653, 0.595181, -0.025823, -0.758488, -0.56116, -0.364684, 0.594148, 0.594276, 0.821631, 0.979455, 0.047796, -0.816002, -0.466124, 0.621008, 0.502493, 0.953933, -0.159216, 0.852515, -0.944342, -0.730746, -0.129779, 0.524284, -0.422867, 0.391181, 0.547067, 0.779583, -0.374275, -0.048478, -0.49625, -0.128063, -0.600403, 0.686605, -0.997366, 0.040306, -0.251598, 0.67078, 0.576913, 0.913989, 0.618136, -0.755829, -0.970582, -0.670476, 0.436509, -0.618264, 0.73819, 0.802415, -0.552254, -0.017248, -0.172653, 0.669384, 0.651401, -0.227696, -0.134427, -0.440003, 0.354427, -0.022474, -0.578134, 0.497115, -0.867034, 0.986613, -0.346709, 0.028696, -0.116495, -0.206995, -0.727949, -0.186437, 0.389951, 0.695853, 0.562059, 0.815718, 0.168899, 0.501429, -0.820591, 0.049532, -0.908312, -0.559237, -0.922854, -0.806119, -0.654867, 0.375783, 0.922612, 0.763925, -0.210298, -0.554287, 0.086046, -0.366844, -0.106961, -0.910715, 0.303629, 0.149593, 0.888866, 0.249378, -0.289456, 0.774148, 0.939393, 0.572596, 0.818565, 0.545632, 0.117497, 0.295189, -0.238975, -0.051112, -0.68902, 0.796684, 0.58131, 0.798951, 0.392432, 0.951852, 0.715108, 0.229935, -0.156429, 0.909932, 0.780446, 0.32528, -0.320843, 0.431677, 0.931402, 0.295305, 0.731226, -0.489043, -0.737128, 0.844895, 0.399776, 0.684985, -0.249303, -0.523916, 0.700168, -0.382668, 0.754608, -0.351194, 0.050378, 0.587836, -0.780805, 0.831432, -0.775832, -0.584214, -0.232651, -0.450748, 0.855845, 0.14897, -0.159432, 0.240878, -0.456529, -0.323843, -0.778939, -0.834748, 0.396857, -0.089168, -0.82026, 0.852223, -0.82394, -0.182707, 0.474993, 0.179051, 0.301964, 0.372534, -0.867402, -0.324884, 0.973617, -0.657061, -0.339828, 0.10618, 0.630653, 0.496004, -0.295136, -0.402468, 0.464663, 0.653989, 0.237709, -0.223135, 0.041715, -0.796822, 0.109474, -0.605052, 0.112833, 0.923897, 0.248119, -0.966283, 0.482448, 0.176094, 0.042929, 0.996736, 0.755495, -0.500939, -0.288351, -0.933661, -0.592465, -0.182451, -0.267614, -0.388485]}, "expected": {"shape": [8, 1, 2], "data": [-0.088837, -0.538209, -0.647516, 0.265602, -0.858904, 0.756732, 0.594276, 0.821631, 0.669384, 0.651401, 0.249378, -0.289456, 0.831432, -0.775832, 0.923897, 0.248119]}}, "convolutional.Cropping2D.4": {"input": {"shape": [8, 7, 6], "data": [0.024124, 0.280236, -0.680013, -0.042458, -0.164273, 0.358409, 0.511014, -0.585272, -0.481578, 0.692702, 0.64189, -0.400252, -0.922248, -0.735105, -0.533918, 0.071402, 0.310474, 0.369868, 0.767931, -0.842066, -0.091189, 0.835301, -0.480484, 0.950819, -0.002131, 0.086491, -0.480947, 0.405572, -0.083803, -0.921447, -0.291545, 0.674087, -0.560444, 0.881432, 0.076544, 0.63549, -0.185686, -0.89067, 0.709257, -0.256164, -0.873627, 0.330906, -0.583426, -0.51286, 0.751485, 0.030077, -0.998662, 0.175588, 0.375807, -0.297778, -0.037149, -0.420746, -0.208204, 0.402424, -0.554688, -0.472029, 0.114337, 0.543154, -0.462202, -0.702131, -0.755152, 0.399132, -0.863878, 0.90242, -0.598578, 0.075729, -0.984584, -0.76995, -0.822436, 0.521519, -0.437606, 0.347583, 0.021953, 0.568181, 0.066634, 0.61829, -0.481882, 0.558214, 0.732847, -0.254362, 0.151457, 0.412434, -0.319702, -0.09085, 0.555778, 0.671859, -0.635198, 0.593103, -0.005002, -0.450857, 0.774167, -0.791044, -0.492777, -0.648434, -0.617374, -0.874596, 0.239172, -0.329255, -0.124378, -0.679687, -0.681269, 0.80559, 0.103336, 0.91569, -0.226668, 0.424476, 0.709728, -0.130921, -0.579856, 0.517511, 0.99394, -0.554744, 0.707467, -0.253984, -0.74488, 0.01915, -0.937241, 0.528811, 0.254405, -0.8029, 0.815474, -0.213763, 0.50079, -0.503395, 0.210021, -0.419686, 0.456614, 0.88218, -0.621765, 0.49563, -0.766721, 0.920173, 0.307054, -0.985315, 0.295907, -0.128988, 0.084168, 0.272106, 0.293384, 0.449009, 0.834872, -0.990419, -0.126779, 0.935827, -0.91633, -0.930113, -0.220291, 0.752014, 0.017501, -0.885473, -0.125301, -0.23717, -0.864103, 0.936996, -0.481888, 0.298993, 0.395852, -0.943194, 0.753792, -0.768603, -0.243377, 0.66616, 0.259826, 0.22605, 0.400443, 0.067723, -0.065587, -0.790676, 0.966871, -0.912638, 0.355244, -0.073181, 0.657103, 0.09007, -0.04048, 0.082647, 0.920559, 0.482534, -0.143117, -0.797387, 0.917364, 0.01561, -0.30353, 0.117881, -0.866152, 0.915653, 0.762948, -0.717669, 0.797181, 0.077211, 0.31406, 0.437614, -0.355494, 0.357562, -0.417286, -0.120372, -0.362672, 0.624621, 0.792828, 0.992511, 0.019958, -0.549153, -0.6375, -0.062739, -0.944945, -0.803626, 0.91846, 0.812719, -0.430243, -0.465176, 0.401421, -0.002137, 0.78021, 0.336852, -0.25664, -0.232235, -0.109423, -0.580016, 0.668781, 0.396865, -0.611044, 0.554487, -0.925514, 0.013001, 0.262365, 0.104009, -0.319333, -0.043008, -0.372638, 0.879342, -0.694172, 0.030522, 0.383238, 0.724839, -0.261347, 0.916554, 0.783577, 0.83219, 0.26797, 0.918329, -0.624446, 0.397899, 0.778276, -0.083593, 0.546356, 0.856049, 0.534116, -0.033082, -0.402185, -0.324623, -0.6421, -0.928564, -0.089948, 0.850967, 0.751319, -0.81352, -0.501143, -0.942755, 0.736537, -0.372481, 0.710303, -0.383222, 0.838276, -0.925631, -0.207597, 0.233132, -0.142178, -0.915557, -0.188239, -0.910884, -0.589853, -0.386997, -0.54772, -0.43656, -0.720973, -0.968259, 0.068156, 0.57743, -0.165393, -0.848904, -0.168921, -0.807576, 0.563418, -0.535884, -0.231633, 0.898762, -0.313521, -0.847164, -0.200325, -0.387748, -0.118249, 0.49213, -0.641636, 0.703024, -0.633493, 0.708461, -0.901467, 0.459244, 0.640117, -0.194619, -0.700387, -0.551761, -0.062893, -0.993202, -0.272018, 0.788043, -0.899394, -0.64761, 0.51654, 0.512262, 0.416768, 0.387547, 0.728484, 0.563032, 0.477188, 0.452492, 0.053106, -0.993751, -0.584517, 0.141826, 0.146845, 0.500992, -0.491195, 0.07354, 0.022871, -0.561667, 0.277436, 0.778925, -0.842296, 0.536302, -0.338116, -0.098279, 0.823896, -0.889689, 0.898098, 0.003918]}, "expected": {"shape": [4, 1, 6], "data": [0.103336, 0.91569, -0.226668, 0.424476, 0.709728, -0.130921, -0.91633, -0.930113, -0.220291, 0.752014, 0.017501, -0.885473, 0.762948, -0.717669, 0.797181, 0.077211, 0.31406, 0.437614, -0.372638, 0.879342, -0.694172, 0.030522, 0.383238, 0.724839]}}, "convolutional.Cropping2D.5": {"input": {"shape": [8, 7, 6], "data": [-0.072127, -0.553929, -0.355552, -0.936405, 0.556627, -0.482815, -0.225337, -0.640315, 0.023246, -0.638412, -0.797304, 0.284959, -0.569771, -0.685286, 0.002481, 0.398436, 0.11345, 0.416629, -0.526713, 0.962183, 0.021732, 0.922994, 0.07991, -0.164385, 0.461494, -0.982877, -0.142158, 0.175741, -0.124041, -0.875609, -0.528708, -0.911127, 0.782257, -0.509403, 0.573973, -0.151309, -0.895619, -0.721042, 0.483952, -0.745814, -0.588825, -0.154089, 0.423904, -0.262707, -0.517175, -0.535505, -0.266104, -0.46314, -0.216526, -0.864596, 0.841716, 0.402031, 0.873588, -0.457428, -0.348313, 0.910179, -0.408277, 0.734388, -0.024081, -0.550588, 0.293097, -0.542286, -0.269648, 0.213238, 0.514513, 0.266344, -0.112736, 0.845888, 0.188158, 0.728423, 0.754239, 0.909679, -0.298907, 0.890225, -0.176317, 0.710818, 0.522503, 0.564153, -0.742255, 0.183133, -0.478304, 0.306737, -0.556934, -0.411964, 0.641192, 0.868657, 0.52633, -0.519934, 0.997507, 0.516068, -0.557744, 0.956131, 0.239102, 0.539335, -0.325715, -0.830554, 0.189992, -0.637541, -0.375955, -0.645287, 0.483284, -0.541192, -0.802786, 0.454783, -0.952301, 0.916586, 0.169218, -0.523467, -0.405046, -0.569919, 0.408988, 0.823371, -0.074026, 0.605709, 0.473457, -0.195274, 0.48094, 0.836515, -0.045046, 0.619917, -0.763448, -0.439789, -0.949154, 0.025215, 0.073811, 0.671451, -0.284162, 0.141033, 0.631935, 0.830601, -0.267386, -0.074489, -0.195287, -0.411611, 0.009787, 0.018395, 0.477619, -0.992273, -0.415425, 0.050996, -0.28197, 0.190961, 0.80039, 0.963981, -0.828833, 0.628873, 0.944476, 0.989284, 0.292435, -0.135812, 0.817257, 0.078638, -0.719863, -0.31961, 0.38714, 0.466882, 0.352137, 0.213046, -0.772962, -0.200157, -0.034438, -0.622181, -0.912614, 0.088619, -0.919166, 0.362514, -0.860038, -0.650449, -0.175857, 0.86567, -0.511448, -0.372431, -0.218084, 0.5096, 0.725654, -0.819892, 0.430089, -0.579532, -0.103142, -0.845316, 0.180956, 0.257962, 0.720379, 0.048086, 0.297728, 0.333669, 0.623466, 0.714411, -0.131609, 0.64756, 0.369304, -0.572741, -0.225815, 0.669311, -0.253786, -0.674851, -0.614704, 0.554864, 0.125116, -0.810339, 0.2588, -0.275579, -0.20522, -0.568676, 0.816795, -0.71788, 0.852799, 0.562966, -0.760958, 0.695122, 0.891104, -0.155949, 0.865, 0.401708, -0.330151, 0.965756, 0.468588, 0.835171, 0.591632, 0.056241, -0.324038, 0.938888, -0.890862, 0.957758, 0.780012, -0.560753, -0.297767, 0.725068, -0.001314, -0.927054, -0.966923, -0.383373, -0.01167, -0.206478, -0.670776, 0.119641, -0.880218, 0.63602, -0.682603, -0.542413, -0.722915, -0.599202, -0.058567, -0.376446, 0.958634, -0.769698, 0.241078, 0.700414, -0.490651, 0.377774, -0.430236, 0.234139, 0.10109, -0.652245, 0.356743, 0.056223, 0.738797, -0.165398, 0.715225, 0.684639, 0.209056, 0.21576, -0.20409, 0.066743, 0.25442, -0.463116, 0.668761, 0.676056, 0.643255, -0.189575, -0.927627, -0.625871, -0.361955, 0.704399, 0.731502, 0.680832, -0.369761, 0.018435, -0.761988, 0.966888, -0.400392, -0.109436, 0.2672, 0.992306, -0.075279, 0.700069, 0.916605, 0.135154, -0.997403, -0.270679, 0.673764, -0.946554, 0.177582, 0.987289, 0.70395, -0.278027, 0.143042, -0.149866, -0.35637, 0.402414, -0.707286, -0.003026, -0.384013, -0.422233, -0.527134, 0.334918, -0.641306, 0.266584, -0.312698, -0.094288, -0.344686, 0.19365, -0.165182, 0.094844, 0.442193, 0.660109, 0.127797, -0.928351, -0.336308, 0.852677, 0.957003, -0.803783, -0.720178, 0.779735, 0.322964, 0.343773, -0.906857, -0.971971, -0.528146, -0.849202, -0.571979, 0.425754, -0.056767, -0.288022, -0.003958, -0.270402]}, "expected": {"shape": [6, 5, 6], "data": [-0.216526, -0.864596, 0.841716, 0.402031, 0.873588, -0.457428, -0.348313, 0.910179, -0.408277, 0.734388, -0.024081, -0.550588, 0.293097, -0.542286, -0.269648, 0.213238, 0.514513, 0.266344, -0.112736, 0.845888, 0.188158, 0.728423, 0.754239, 0.909679, -0.298907, 0.890225, -0.176317, 0.710818, 0.522503, 0.564153, -0.557744, 0.956131, 0.239102, 0.539335, -0.325715, -0.830554, 0.189992, -0.637541, -0.375955, -0.645287, 0.483284, -0.541192, -0.802786, 0.454783, -0.952301, 0.916586, 0.169218, -0.523467, -0.405046, -0.569919, 0.408988, 0.823371, -0.074026, 0.605709, 0.473457, -0.195274, 0.48094, 0.836515, -0.045046, 0.619917, -0.195287, -0.411611, 0.009787, 0.018395, 0.477619, -0.992273, -0.415425, 0.050996, -0.28197, 0.190961, 0.80039, 0.963981, -0.828833, 0.628873, 0.944476, 0.989284, 0.292435, -0.135812, 0.817257, 0.078638, -0.719863, -0.31961, 0.38714, 0.466882, 0.352137, 0.213046, -0.772962, -0.200157, -0.034438, -0.622181, 0.725654, -0.819892, 0.430089, -0.579532, -0.103142, -0.845316, 0.180956, 0.257962, 0.720379, 0.048086, 0.297728, 0.333669, 0.623466, 0.714411, -0.131609, 0.64756, 0.369304, -0.572741, -0.225815, 0.669311, -0.253786, -0.674851, -0.614704, 0.554864, 0.125116, -0.810339, 0.2588, -0.275579, -0.20522, -0.568676, 0.468588, 0.835171, 0.591632, 0.056241, -0.324038, 0.938888, -0.890862, 0.957758, 0.780012, -0.560753, -0.297767, 0.725068, -0.001314, -0.927054, -0.966923, -0.383373, -0.01167, -0.206478, -0.670776, 0.119641, -0.880218, 0.63602, -0.682603, -0.542413, -0.722915, -0.599202, -0.058567, -0.376446, 0.958634, -0.769698, 0.715225, 0.684639, 0.209056, 0.21576, -0.20409, 0.066743, 0.25442, -0.463116, 0.668761, 0.676056, 0.643255, -0.189575, -0.927627, -0.625871, -0.361955, 0.704399, 0.731502, 0.680832, -0.369761, 0.018435, -0.761988, 0.966888, -0.400392, -0.109436, 0.2672, 0.992306, -0.075278, 0.700069, 0.916605, 0.135154]}}}

In [ ]: