In [1]:
import matplotlib.pyplot as plt
import numpy as np
from utils.weights import get_weights
In [2]:
weights_dict = get_weights()
In [3]:
# plot the weight distribution of convolutional layers
for i in range(4):
conv_name = 'conv2d_%d' % (i + 1)
weights = weights_dict[conv_name]['kernel'].flatten()
plt.figure()
plt.hist(weights, bins=16)
plt.title(conv_name)
plt.show()
In [4]:
# plot the bias distribution of convolutional layers
for i in range(4):
conv_name = 'conv2d_%d' % (i + 1)
weights = weights_dict[conv_name]['bias'].flatten()
plt.figure()
plt.hist(weights, bins=16)
plt.title(conv_name)
plt.show()
In [13]:
# plot the weight distribution of dense layers
for i in range(2):
conv_name = 'dense_%d' % (i + 1)
weights = weights_dict[conv_name]['kernel'].flatten()
print 'max:', max(weights), 'min:', min(weights)
plt.figure()
plt.hist(weights, bins=32)
plt.title(conv_name)
plt.show()
In [12]:
import h5py
import os
# load data of output of each layer of 10000 testing samples from cifar10
data_each_layer = h5py.File('data/cifar-10_output.h5')
layers = data_each_layer['x_test_group'].keys()
print(layers)
In [20]:
useful_layers = ['conv2d_1_input', 'conv2d_1', u'batch_normalization_1', u'conv2d_2', u'batch_normalization_2',
u'conv2d_3', u'batch_normalization_3', u'conv2d_4', u'batch_normalization_4', u'dense_1', u'dense_2']
# plot the output of each layer
for layer in useful_layers:
data = data_each_layer['x_test_group'][layer][0:1000].flatten()
print 'max:', max(data), 'min:', min(data)
plt.figure()
plt.hist(data, bins=16)
plt.title(layer)
plt.show()
In [ ]: