Keras models visualizations


In [1]:
import numpy as np
np.random.seed(1337)
import datetime
from IPython.display import SVG
from keras.datasets import mnist
from keras import activations
from keras.layers import Dense, Input, concatenate, Conv1D, Conv2D, Dropout, MaxPooling1D, MaxPooling2D
from keras.layers import Dense, Flatten
from keras.models import Sequential, load_model
from keras.utils import plot_model
from keras.utils.vis_utils import model_to_dot
from matplotlib import gridspec
from matplotlib.ticker import NullFormatter, NullLocator, MultipleLocator
from scipy import stats
from sklearn.metrics import auc, roc_curve
from sklearn.model_selection import train_test_split
from vis.utils import utils
from vis.visualization import visualize_activation
from vis.visualization import visualize_saliency
import datetime
import keras
import matplotlib.pylab as plt
import pandas as pd
import seaborn as sns
import talos as ta
sns.set_palette('husl')
sns.set(style='ticks')


Using TensorFlow backend.

In [2]:
%matplotlib inline
plt.rcParams["figure.figsize"] = [17, 17]

For the purposes of this notebook, a simple model is constructed.


In [3]:
num_classes = 2
model = Sequential()
model.add(Conv1D(32, (5), strides = (1), input_shape = (18, 1), activation = 'tanh'))
model.add(MaxPooling1D(pool_size = (2), strides = (2)))
model.add(Conv1D(32, (3), strides = (1), input_shape = (18, 1), activation = 'tanh'))
model.add(Flatten())
model.add(Dense(300,                                            activation = 'tanh'))
model.add(Dropout(rate = 0.5))
model.add(Dense(300,                                            activation = 'tanh'))
model.add(Dropout(rate = 0.5))
model.add(Dense(num_classes,                                    activation = 'softmax', name = "preds"))
model.compile(loss="categorical_crossentropy", optimizer="nadam", metrics=['accuracy'])

Model checkpoints can be saved during training. They are usually saved in the HDF5 format.

A callback to make regular saves of a model to separate files could be something like the following:


In [4]:
checkpoint = keras.callbacks.ModelCheckpoint(
    filepath       = 'best_model.{epoch:02d}-{val_loss:.2f}.h5',
    monitor        = 'val_loss',
    save_best_only = True
)

To save only the latest model in training, something like the following callback could be used:


In [5]:
checkpoint = keras.callbacks.ModelCheckpoint(
    filepath       = 'model_latest.h5',
    monitor        = 'val_loss',
    save_best_only = True
)

A saved HDF5 file can be loaded as a model in a way like the following:


In [6]:
from keras.models import load_model
model = load_model('model_latest.h5')

A model can be summarized in a way like the following:


In [7]:
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv1d_1 (Conv1D)            (None, 14, 32)            192       
_________________________________________________________________
max_pooling1d_1 (MaxPooling1 (None, 7, 32)             0         
_________________________________________________________________
conv1d_2 (Conv1D)            (None, 5, 32)             3104      
_________________________________________________________________
flatten_1 (Flatten)          (None, 160)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 300)               48300     
_________________________________________________________________
dropout_1 (Dropout)          (None, 300)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 300)               90300     
_________________________________________________________________
dropout_2 (Dropout)          (None, 300)               0         
_________________________________________________________________
preds (Dense)                (None, 2)                 602       
=================================================================
Total params: 142,498
Trainable params: 142,498
Non-trainable params: 0
_________________________________________________________________

An SVG of the model can be displayed in Jupyter in a way like the following:


In [8]:
SVG(model_to_dot(model).create(prog='dot', format='svg'))


Out[8]:
G 139792753934520 conv1d_1: Conv1D 139792753935920 max_pooling1d_1: MaxPooling1D 139792753934520->139792753935920 139792753935304 conv1d_2: Conv1D 139792753935920->139792753935304 139792753521104 flatten_1: Flatten 139792753935304->139792753521104 139792753636968 dense_1: Dense 139792753521104->139792753636968 139792753701944 dropout_1: Dropout 139792753636968->139792753701944 139792753704744 dense_2: Dense 139792753701944->139792753704744 139792753022736 dropout_2: Dropout 139792753704744->139792753022736 139792753935528 preds: Dense 139792753022736->139792753935528 139792753934576 139792753934576 139792753934576->139792753934520

The layers of a model can be accessed:


In [9]:
model.layers


Out[9]:
[<keras.layers.convolutional.Conv1D at 0x7f240970c0b8>,
 <keras.layers.pooling.MaxPooling1D at 0x7f240970c630>,
 <keras.layers.convolutional.Conv1D at 0x7f240970c3c8>,
 <keras.layers.core.Flatten at 0x7f24096a71d0>,
 <keras.layers.core.Dense at 0x7f24096c3668>,
 <keras.layers.core.Dropout at 0x7f24096d3438>,
 <keras.layers.core.Dense at 0x7f24096d3f28>,
 <keras.layers.core.Dropout at 0x7f240962d710>,
 <keras.layers.core.Dense at 0x7f240970c4a8>]

The configuration of an individual layer can be inspected:


In [10]:
model.layers[8].get_config()


Out[10]:
{'name': 'preds',
 'trainable': True,
 'units': 2,
 'activation': 'softmax',
 'use_bias': True,
 'kernel_initializer': {'class_name': 'VarianceScaling',
  'config': {'scale': 1.0,
   'mode': 'fan_avg',
   'distribution': 'uniform',
   'seed': None}},
 'bias_initializer': {'class_name': 'Zeros', 'config': {}},
 'kernel_regularizer': None,
 'bias_regularizer': None,
 'activity_regularizer': None,
 'kernel_constraint': None,
 'bias_constraint': None}

The weights of individual layers can be accessed and visualized:


In [11]:
index = 0
plt.imshow(model.layers[index].get_weights()[0].squeeze(), cmap='gray')
plt.title(model.layers[index].get_config()['name']);



In [12]:
index = 4
plt.imshow(model.layers[index].get_weights()[0].squeeze(), cmap='gray')
plt.title(model.layers[index].get_config()['name']);



In [ ]: