In [1]:
import numpy as np
import matplotlib.pyplot as plt
from skimage.color import gray2rgb, rgb2gray, label2rgb # since the code wants color images
In [2]:
from sklearn.datasets import fetch_mldata
mnist = fetch_mldata('MNIST original')
# make each image color so lime_image works correctly
X_vec = np.stack([gray2rgb(iimg) for iimg in mnist.data.reshape((-1, 28, 28))],0)
y_vec = mnist.target.astype(np.uint8)
In [3]:
%matplotlib inline
fig, ax1 = plt.subplots(1,1)
ax1.imshow(X_vec[0], interpolation = 'none')
ax1.set_title('Digit: {}'.format(y_vec[0]))
Out[3]:
In [4]:
from sklearn.pipeline import Pipeline
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import Normalizer
class PipeStep(object):
"""
Wrapper for turning functions into pipeline transforms (no-fitting)
"""
def __init__(self, step_func):
self._step_func=step_func
def fit(self,*args):
return self
def transform(self,X):
return self._step_func(X)
makegray_step = PipeStep(lambda img_list: [rgb2gray(img) for img in img_list])
flatten_step = PipeStep(lambda img_list: [img.ravel() for img in img_list])
simple_rf_pipeline = Pipeline([
('Make Gray', makegray_step),
('Flatten Image', flatten_step),
#('Normalize', Normalizer()),
#('PCA', PCA(16)),
('RF', RandomForestClassifier())
])
In [5]:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X_vec, y_vec,
train_size=0.55)
In [6]:
simple_rf_pipeline.fit(X_train, y_train)
Out[6]:
In [7]:
%load_ext autoreload
%autoreload 2
import os,sys
try:
import lime
except:
sys.path.append(os.path.join('..', '..')) # add the current directory
import lime
In [8]:
from lime import lime_image
from lime.wrappers.scikit_image import SegmentationAlgorithm
explainer = lime_image.LimeImageExplainer(verbose = False)
segmenter = SegmentationAlgorithm('quickshift', kernel_size=1, max_dist=200, ratio=0.2)
In [9]:
%%time
explanation = explainer.explain_instance(X_test[0],
classifier_fn = simple_rf_pipeline.predict_proba,
top_labels=10, hide_color=0, num_samples=10000, segmentation_fn=segmenter)
In [10]:
temp, mask = explanation.get_image_and_mask(y_test[0], positive_only=True, num_features=10, hide_rest=False, min_weight = 0.01)
fig, (ax1, ax2) = plt.subplots(1,2, figsize = (8, 4))
ax1.imshow(label2rgb(mask,temp, bg_label = 0), interpolation = 'nearest')
ax1.set_title('Positive Regions for {}'.format(y_test[0]))
temp, mask = explanation.get_image_and_mask(y_test[0], positive_only=False, num_features=10, hide_rest=False, min_weight = 0.01)
ax2.imshow(label2rgb(3-mask,temp, bg_label = 0), interpolation = 'nearest')
ax2.set_title('Positive/Negative Regions for {}'.format(y_test[0]))
Out[10]:
In [11]:
# now show them for each class
fig, m_axs = plt.subplots(2,5, figsize = (12,6))
for i, c_ax in enumerate(m_axs.flatten()):
temp, mask = explanation.get_image_and_mask(i, positive_only=True, num_features=1000, hide_rest=False, min_weight = 0.01 )
c_ax.imshow(label2rgb(mask,X_test[0], bg_label = 0), interpolation = 'nearest')
c_ax.set_title('Positive for {}\nActual {}'.format(i, y_test[0]))
c_ax.axis('off')
In [12]:
pipe_pred_test = simple_rf_pipeline.predict(X_test)
wrong_idx = np.random.choice(np.where(pipe_pred_test!=y_test)[0])
print('Using #{} where the label was {} and the pipeline predicted {}'.format(wrong_idx, y_test[wrong_idx], pipe_pred_test[wrong_idx]))
In [13]:
%%time
explanation = explainer.explain_instance(X_test[wrong_idx],
classifier_fn = simple_rf_pipeline.predict_proba,
top_labels=10, hide_color=0, num_samples=10000, segmentation_fn=segmenter)
In [14]:
# now show them for each class
fig, m_axs = plt.subplots(2,5, figsize = (12,6))
for i, c_ax in enumerate(m_axs.flatten()):
temp, mask = explanation.get_image_and_mask(i, positive_only=True, num_features=10, hide_rest=False, min_weight = 0.01 )
c_ax.imshow(label2rgb(mask,temp, bg_label = 0), interpolation = 'nearest')
c_ax.set_title('Positive for {}\nActual {}'.format(i, y_test[wrong_idx]))
c_ax.axis('off')
In [ ]: