In [2]:
from os.path import expanduser, join

import numpy as np
from sklearn import utils

import matplotlib.pyplot as plt

from skimage.feature import peak_local_max

from localizer import config, util, visualization, models, keras_helpers

%matplotlib inline
#%matplotlib notebook

import seaborn as sns
sns.set(color_codes=True)


Using Theano backend.
Using gpu device 0: GeForce GTX 960 (CNMeM is disabled, CuDNN 4007)

In [199]:
import importlib
for module in (config, util, visualization, models, keras_helpers):
    importlib.reload(module)

In [4]:
data_dir = join(expanduser("~"), 'deeplocalizer_data', 'data_mxnet2')

In [5]:
X_train, y_train, X_test, y_test, X_val, y_val = util.load_or_restore_data(data_dir)


Restoring mmapped data
/usr/lib/python3.5/site-packages/numpy/core/memmap.py:263: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future
  offset=offset, order=order)

In [6]:
print(X_train.shape)
print(X_test.shape)
print(X_val.shape)


(164529, 1, 100, 100)
(18725, 1, 100, 100)
(18725, 1, 100, 100)

In [7]:
fig = visualization.plot_sample_images(X_train, y_train, random=True)



In [8]:
Xs_train = util.resize_data(X_train, config.filtersize)
Xs_val   = util.resize_data(X_val, config.filtersize)
Xs_test  = util.resize_data(X_test, config.filtersize)

print(Xs_train.shape)
print(Xs_test.shape)
print(Xs_val.shape)


164864/164529 [==============================] - 86s    
19456/18725 [===============================] - 9s    
19456/18725 [===============================] - 9s    
(164529, 1, 32, 32)
(18725, 1, 32, 32)
(18725, 1, 32, 32)

In [9]:
fig = visualization.plot_sample_images(Xs_train, y_train)



In [36]:
saliency_network = models.get_saliency_network(train=True, compile=False)

In [37]:
saliency_datagen = keras_helpers.get_datagen(Xs_train)
saliency_class_weight = [1., 1.]
saliency_weight_file = join(expanduser("~"), 'saliency-localizer-models', 'season_2015', 'saliency_weights-wobn')

In [38]:
saliency_history = keras_helpers.fit_model(saliency_network, saliency_datagen, Xs_train, y_train, Xs_val, y_val,
                                           saliency_weight_file, saliency_class_weight, batchsize=128, categorial=False)


Epoch 0
164736/164657 [==============================] - 50s - train loss: 0.0273    
18944/18853 [==============================] - 3s - test loss: 0.0153     

0 epochs since last improvement

Epoch 1
164736/164657 [==============================] - 41s - train loss: 0.0160    
18944/18853 [==============================] - 3s - test loss: 0.0119     

0 epochs since last improvement

Epoch 2
164736/164657 [==============================] - 48s - train loss: 0.0141    
18944/18853 [==============================] - 3s - test loss: 0.0104     

0 epochs since last improvement

Epoch 3
164736/164657 [==============================] - 49s - train loss: 0.0129    
18944/18853 [==============================] - 3s - test loss: 0.0097     

0 epochs since last improvement

Epoch 4
164736/164657 [==============================] - 50s - train loss: 0.0121    
18944/18853 [==============================] - 3s - test loss: 0.0088     

0 epochs since last improvement

Epoch 5
164736/164657 [==============================] - 49s - train loss: 0.0117    
18944/18853 [==============================] - 3s - test loss: 0.0083     

0 epochs since last improvement

Epoch 6
164736/164657 [==============================] - 49s - train loss: 0.0112    
18944/18853 [==============================] - 3s - test loss: 0.0081     

0 epochs since last improvement

Epoch 7
164736/164657 [==============================] - 50s - train loss: 0.0107    
18944/18853 [==============================] - 3s - test loss: 0.0080     

0 epochs since last improvement

Epoch 8
164736/164657 [==============================] - 49s - train loss: 0.0105    
18944/18853 [==============================] - 3s - test loss: 0.0077     

0 epochs since last improvement

Epoch 9
164736/164657 [==============================] - 50s - train loss: 0.0101    
18944/18853 [==============================] - 3s - test loss: 0.0074     

0 epochs since last improvement

Epoch 10
164736/164657 [==============================] - 49s - train loss: 0.0100    
18944/18853 [==============================] - 3s - test loss: 0.0071     

0 epochs since last improvement

Epoch 11
164736/164657 [==============================] - 49s - train loss: 0.0098    
18944/18853 [==============================] - 3s - test loss: 0.0069     

0 epochs since last improvement

Epoch 12
164736/164657 [==============================] - 49s - train loss: 0.0096    
18944/18853 [==============================] - 3s - test loss: 0.0068     

0 epochs since last improvement

Epoch 13
164736/164657 [==============================] - 49s - train loss: 0.0094    
18944/18853 [==============================] - 3s - test loss: 0.0070     

0 epochs since last improvement

Epoch 14
164736/164657 [==============================] - 49s - train loss: 0.0092    
18944/18853 [==============================] - 3s - test loss: 0.0064     

0 epochs since last improvement

Epoch 15
164736/164657 [==============================] - 49s - train loss: 0.0091    
18944/18853 [==============================] - 2s - test loss: 0.0062     

0 epochs since last improvement

Epoch 16
164736/164657 [==============================] - 50s - train loss: 0.0089    
18944/18853 [==============================] - 3s - test loss: 0.0063     

0 epochs since last improvement

Epoch 17
164736/164657 [==============================] - 49s - train loss: 0.0088    
18944/18853 [==============================] - 3s - test loss: 0.0061     

1 epochs since last improvement

Epoch 18
164736/164657 [==============================] - 39s - train loss: 0.0087    
18944/18853 [==============================] - 3s - test loss: 0.0074     

0 epochs since last improvement

Epoch 19
164736/164657 [==============================] - 50s - train loss: 0.0086    
18944/18853 [==============================] - 3s - test loss: 0.0060     

1 epochs since last improvement

Epoch 20
164736/164657 [==============================] - 50s - train loss: 0.0084    
18944/18853 [==============================] - 3s - test loss: 0.0058     

0 epochs since last improvement

Epoch 21
164736/164657 [==============================] - 50s - train loss: 0.0084    
18944/18853 [==============================] - 3s - test loss: 0.0065     

1 epochs since last improvement

Epoch 22
164736/164657 [==============================] - 49s - train loss: 0.0084    
18944/18853 [==============================] - 3s - test loss: 0.0055     

2 epochs since last improvement

Epoch 23
164736/164657 [==============================] - 49s - train loss: 0.0082    
18944/18853 [==============================] - 3s - test loss: 0.0055     

0 epochs since last improvement

Epoch 24
164736/164657 [==============================] - 49s - train loss: 0.0082    
18944/18853 [==============================] - 3s - test loss: 0.0054     

1 epochs since last improvement

Epoch 25
164736/164657 [==============================] - 49s - train loss: 0.0080    
18944/18853 [==============================] - 3s - test loss: 0.0052     

0 epochs since last improvement

Epoch 26
164736/164657 [==============================] - 49s - train loss: 0.0078    
18944/18853 [==============================] - 3s - test loss: 0.0052     

0 epochs since last improvement

Epoch 27
164736/164657 [==============================] - 49s - train loss: 0.0078    
18944/18853 [==============================] - 3s - test loss: 0.0052     

1 epochs since last improvement

Epoch 28
164736/164657 [==============================] - 49s - train loss: 0.0079    
18944/18853 [==============================] - 3s - test loss: 0.0051     

2 epochs since last improvement

Epoch 29
164736/164657 [==============================] - 49s - train loss: 0.0077    
18944/18853 [==============================] - 3s - test loss: 0.0050     

0 epochs since last improvement

Epoch 30
164736/164657 [==============================] - 49s - train loss: 0.0077    
18944/18853 [==============================] - 3s - test loss: 0.0051     

1 epochs since last improvement

Epoch 31
164736/164657 [==============================] - 49s - train loss: 0.0076    
18944/18853 [==============================] - 3s - test loss: 0.0049     

0 epochs since last improvement

Epoch 32
164736/164657 [==============================] - 49s - train loss: 0.0076    
18944/18853 [==============================] - 3s - test loss: 0.0051     

1 epochs since last improvement

Epoch 33
164736/164657 [==============================] - 49s - train loss: 0.0075    
18944/18853 [==============================] - 3s - test loss: 0.0049     

0 epochs since last improvement

Epoch 34
164736/164657 [==============================] - 49s - train loss: 0.0075    
18944/18853 [==============================] - 3s - test loss: 0.0049     

1 epochs since last improvement

Epoch 35
164736/164657 [==============================] - 49s - train loss: 0.0075    
18944/18853 [==============================] - 3s - test loss: 0.0049     

2 epochs since last improvement

Epoch 36
164736/164657 [==============================] - 34s - train loss: 0.0074    
18944/18853 [==============================] - 3s - test loss: 0.0046     

3 epochs since last improvement

Epoch 37
164736/164657 [==============================] - 50s - train loss: 0.0073    
18944/18853 [==============================] - 3s - test loss: 0.0047     

0 epochs since last improvement

Epoch 38
164736/164657 [==============================] - 50s - train loss: 0.0073    
18944/18853 [==============================] - 3s - test loss: 0.0045     

1 epochs since last improvement

Epoch 39
164736/164657 [==============================] - 50s - train loss: 0.0073    
18944/18853 [==============================] - 3s - test loss: 0.0046     

2 epochs since last improvement

Epoch 40
164736/164657 [==============================] - 49s - train loss: 0.0071    
18944/18853 [==============================] - 3s - test loss: 0.0044     

0 epochs since last improvement

Epoch 41
164736/164657 [==============================] - 50s - train loss: 0.0071    
18944/18853 [==============================] - 3s - test loss: 0.0045     

1 epochs since last improvement

Epoch 42
164736/164657 [==============================] - 49s - train loss: 0.0071    
18944/18853 [==============================] - 3s - test loss: 0.0045     

2 epochs since last improvement

Epoch 43
164736/164657 [==============================] - 49s - train loss: 0.0070    
18944/18853 [==============================] - 3s - test loss: 0.0044     

0 epochs since last improvement

Epoch 44
164736/164657 [==============================] - 49s - train loss: 0.0070    
18944/18853 [==============================] - 3s - test loss: 0.0046     

1 epochs since last improvement

Epoch 45
164736/164657 [==============================] - 49s - train loss: 0.0069    
18944/18853 [==============================] - 3s - test loss: 0.0042     

2 epochs since last improvement

Epoch 46
164736/164657 [==============================] - 50s - train loss: 0.0069    
18944/18853 [==============================] - 3s - test loss: 0.0044     

3 epochs since last improvement

Epoch 47
164736/164657 [==============================] - 50s - train loss: 0.0068    
18944/18853 [==============================] - 3s - test loss: 0.0042     

0 epochs since last improvement

Epoch 48
164736/164657 [==============================] - 49s - train loss: 0.0069    
18944/18853 [==============================] - 3s - test loss: 0.0042     

1 epochs since last improvement

Epoch 49
164736/164657 [==============================] - 49s - train loss: 0.0068    
18944/18853 [==============================] - 3s - test loss: 0.0042     

2 epochs since last improvement

Epoch 50
164736/164657 [==============================] - 49s - train loss: 0.0067    
18944/18853 [==============================] - 3s - test loss: 0.0042     

0 epochs since last improvement

Epoch 51
164736/164657 [==============================] - 50s - train loss: 0.0067    
18944/18853 [==============================] - 3s - test loss: 0.0041     

1 epochs since last improvement

Epoch 52
164736/164657 [==============================] - 49s - train loss: 0.0067    
18944/18853 [==============================] - 3s - test loss: 0.0042     

2 epochs since last improvement

Epoch 53
164736/164657 [==============================] - 40s - train loss: 0.0066    
18944/18853 [==============================] - 1s - test loss: 0.0042     

0 epochs since last improvement

Epoch 54
164736/164657 [==============================] - 41s - train loss: 0.0066    
18944/18853 [==============================] - 3s - test loss: 0.0040     

1 epochs since last improvement

Epoch 55
164736/164657 [==============================] - 47s - train loss: 0.0065    
18944/18853 [==============================] - 3s - test loss: 0.0040     

2 epochs since last improvement

Epoch 56
164736/164657 [==============================] - 48s - train loss: 0.0065    
18944/18853 [==============================] - 3s - test loss: 0.0041     

3 epochs since last improvement

Epoch 57
164736/164657 [==============================] - 48s - train loss: 0.0065    
18944/18853 [==============================] - 4s - test loss: 0.0040     

4 epochs since last improvement
Reduce learning rate to: 0.025000

Epoch 58
164736/164657 [==============================] - 52s - train loss: 0.0061    
18944/18853 [==============================] - 3s - test loss: 0.0038     

0 epochs since last improvement

Epoch 59
164736/164657 [==============================] - 47s - train loss: 0.0061    
18944/18853 [==============================] - 3s - test loss: 0.0037     

0 epochs since last improvement

Epoch 60
164736/164657 [==============================] - 47s - train loss: 0.0060    
18944/18853 [==============================] - 3s - test loss: 0.0037     

0 epochs since last improvement

Epoch 61
164736/164657 [==============================] - 47s - train loss: 0.0060    
18944/18853 [==============================] - 3s - test loss: 0.0037     

1 epochs since last improvement

Epoch 62
164736/164657 [==============================] - 48s - train loss: 0.0060    
18944/18853 [==============================] - 3s - test loss: 0.0037     

2 epochs since last improvement

Epoch 63
164736/164657 [==============================] - 47s - train loss: 0.0060    
18944/18853 [==============================] - 3s - test loss: 0.0037     

3 epochs since last improvement

Epoch 64
164736/164657 [==============================] - 47s - train loss: 0.0060    
18944/18853 [==============================] - 3s - test loss: 0.0037     

0 epochs since last improvement

Epoch 65
164736/164657 [==============================] - 47s - train loss: 0.0060    
18944/18853 [==============================] - 3s - test loss: 0.0037     

1 epochs since last improvement

Epoch 66
164736/164657 [==============================] - 47s - train loss: 0.0059    
18944/18853 [==============================] - 3s - test loss: 0.0037     

0 epochs since last improvement

Epoch 67
164736/164657 [==============================] - 47s - train loss: 0.0059    
18944/18853 [==============================] - 3s - test loss: 0.0037     

0 epochs since last improvement

Epoch 68
164736/164657 [==============================] - 48s - train loss: 0.0059    
18944/18853 [==============================] - 3s - test loss: 0.0037     

1 epochs since last improvement

Epoch 69
164736/164657 [==============================] - 41s - train loss: 0.0059    
18944/18853 [==============================] - 1s - test loss: 0.0036     

0 epochs since last improvement

Epoch 70
164736/164657 [==============================] - 47s - train loss: 0.0059    
18944/18853 [==============================] - 3s - test loss: 0.0036     

1 epochs since last improvement

Epoch 71
164736/164657 [==============================] - 47s - train loss: 0.0058    
18944/18853 [==============================] - 3s - test loss: 0.0036     

0 epochs since last improvement

Epoch 72
164736/164657 [==============================] - 47s - train loss: 0.0059    
18944/18853 [==============================] - 3s - test loss: 0.0037     

1 epochs since last improvement

Epoch 73
164736/164657 [==============================] - 47s - train loss: 0.0059    
18944/18853 [==============================] - 3s - test loss: 0.0036     

2 epochs since last improvement

Epoch 74
164736/164657 [==============================] - 48s - train loss: 0.0059    
18944/18853 [==============================] - 3s - test loss: 0.0037     

3 epochs since last improvement

Epoch 75
164736/164657 [==============================] - 54s - train loss: 0.0058    
18944/18853 [==============================] - 4s - test loss: 0.0037     

4 epochs since last improvement
Reduce learning rate to: 0.006250

Epoch 76
164736/164657 [==============================] - 61s - train loss: 0.0058    
18944/18853 [==============================] - 4s - test loss: 0.0036     

1 epochs since last improvement

Epoch 77
164736/164657 [==============================] - 66s - train loss: 0.0058    
18944/18853 [==============================] - 3s - test loss: 0.0036     

0 epochs since last improvement

Epoch 78
164736/164657 [==============================] - 67s - train loss: 0.0058    
18944/18853 [==============================] - 4s - test loss: 0.0036     

0 epochs since last improvement

Epoch 79
164736/164657 [==============================] - 64s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0036     

0 epochs since last improvement

Epoch 80
164736/164657 [==============================] - 61s - train loss: 0.0058    
18944/18853 [==============================] - 4s - test loss: 0.0036     

1 epochs since last improvement

Epoch 81
164736/164657 [==============================] - 61s - train loss: 0.0057    
18944/18853 [==============================] - 4s - test loss: 0.0037     

2 epochs since last improvement

Epoch 82
164736/164657 [==============================] - 60s - train loss: 0.0058    
18944/18853 [==============================] - 3s - test loss: 0.0036     

3 epochs since last improvement

Epoch 83
164736/164657 [==============================] - 60s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0036     

4 epochs since last improvement
Reduce learning rate to: 0.001563

Epoch 84
164736/164657 [==============================] - 52s - train loss: 0.0058    
18944/18853 [==============================] - 3s - test loss: 0.0036     

1 epochs since last improvement

Epoch 85
164736/164657 [==============================] - 42s - train loss: 0.0058    
18944/18853 [==============================] - 3s - test loss: 0.0035     

2 epochs since last improvement

Epoch 86
164736/164657 [==============================] - 53s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0035     

0 epochs since last improvement

Epoch 87
164736/164657 [==============================] - 53s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0035     

1 epochs since last improvement

Epoch 88
164736/164657 [==============================] - 53s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0035     

0 epochs since last improvement

Epoch 89
164736/164657 [==============================] - 53s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0035     

1 epochs since last improvement

Epoch 90
164736/164657 [==============================] - 53s - train loss: 0.0058    
18944/18853 [==============================] - 3s - test loss: 0.0035     

2 epochs since last improvement

Epoch 91
164736/164657 [==============================] - 50s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0035     

3 epochs since last improvement

Epoch 92
164736/164657 [==============================] - 51s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0035     

4 epochs since last improvement
Reduce learning rate to: 0.000391

Epoch 93
164736/164657 [==============================] - 50s - train loss: 0.0058    
18944/18853 [==============================] - 3s - test loss: 0.0036     

1 epochs since last improvement

Epoch 94
164736/164657 [==============================] - 51s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0036     

2 epochs since last improvement

Epoch 95
164736/164657 [==============================] - 51s - train loss: 0.0057    
18944/18853 [==============================] - 3s - test loss: 0.0035     
Epoch 00095: early stopping

3 epochs since last improvement

In [39]:
batch_error = np.array([hist[0] for hist in saliency_history.batch_hist])

In [40]:
plt.figure(figsize=(16, 6))
_ = plt.plot(batch_error)



In [41]:
batch_error.shape[0] / (658145 / 256)


Out[41]:
48.05827287299911

In [42]:
batch_error[-1]


Out[42]:
0.0035345587

In [43]:
saliency_network.load_weights(saliency_weight_file)

In [52]:
ys_out = keras_helpers.predict_model(saliency_network, Xs_test, saliency_datagen)


18944/18981 [============================>.] - ETA: 0s

In [55]:
max(ys_out[:, 1])


Out[55]:
0.99909210205078125

In [56]:
fig, axes = plt.subplots(nrows=1, ncols=2, sharex=True, sharey=True, figsize=(12, 6))
axes.flat[0].hist(y_test[:])
axes.flat[1].hist(ys_out[:, 1])

plt.tight_layout()



In [57]:
precision, recall, average_precision, thresholds, fpr, tpr, roc_auc = keras_helpers.evaluate_model(
    y_test > 0.8, ys_out, visualize=True)



In [58]:
saliency_threshold = keras_helpers.select_threshold(precision, recall, thresholds, min_value=0.98, optimize='recall')


('Recall', 0.98021454112038142)
('Precision', 0.48490566037735849)
('Threshold', 0.52992475032806396)
('F_2', 0.81393507521773556)
('F_0.5', 0.53942017578381218)

In [59]:
saliency_threshold = 0.5

In [150]:
convolution_container = models.get_saliency_network(train=False, shape=image_filtersize.shape)

In [151]:
convolution_function = keras_helpers.get_convolution_function(saliency_network, convolution_container)

In [152]:
image_path = join(expanduser("~"), 'deeplocalizer_data')
with open(join(image_path, 'test.txt'), 'r') as f:
    image_files = [line.split('\n')[0] for line in f.readlines()]

In [153]:
imfile = image_files[0]
image, image_filtersize, targetsize = util.preprocess_image(join(image_path, imfile), config.filtersize)

In [156]:
%%%timeit
convolution_function(image_filtersize.reshape((1, 1, image_filtersize.shape[0], image_filtersize.shape[1])))


The slowest run took 8.14 times longer than the fastest. This could mean that an intermediate result is being cached.
1 loop, best of 3: 48.1 ms per loop

In [212]:
saliency = convolution_function(image_filtersize.reshape((1, 1, image_filtersize.shape[0], image_filtersize.shape[1])))[0]

In [213]:
saliency.shape


Out[213]:
(1, 1, 952, 1272)

In [214]:
saliency = gaussian_filter(saliency[0, 0], sigma=3.)


/usr/lib/python3.5/site-packages/skimage/filters/_gaussian.py:13: skimage_deprecation: Call to deprecated function ``gaussian_filter``. Use ``skimage.filters.gaussian`` instead.
  multichannel=None):

In [ ]:
def get_saliency_image(self, image_fname):
        image, image_filtersize, targetsize = util.preprocess_image(
                    image_fname, config.filtersize)
        saliency = self.convolution_function(
            image_filtersize.reshape((1, 1, image_filtersize.shape[0],
                                      image_filtersize.shape[1])))
        saliency = gaussian_filter(saliency[0, 0], sigma=3.)
        return saliency, image

    def detect_tags(self, image_path, saliency_threshold=0.5):
        saliency, image = self.get_saliency_image(image_path)
        candidates = util.get_candidates(saliency, saliency_threshold)
        saliencies = util.extract_saliencies(candidates, saliency)
        candidates_img = util.scale_candidates(candidates, saliency)
        rois, mask = util.extract_rois(candidates_img, image)
        return saliencies[mask], candidates_img, rois

In [216]:
_ = visualization.plot_saliency_image(image_filtersize, saliency, config.filtersize, figsize=(12, 6))

#plt.savefig('saliency.png', dpi=300, bbox_inches='tight')



In [217]:
candidates = util.get_candidates(saliency, saliency_threshold, dist=config.filtersize[0] // 4)

In [218]:
saliencies = util.extract_saliencies(candidates, saliency)

In [219]:
candidates_img = util.scale_candidates(candidates, saliency)


/usr/lib/python3.5/site-packages/scipy/ndimage/interpolation.py:549: UserWarning: From scipy 0.13.0, the output shape of zoom() is calculated with round() instead of int() - for these inputs the size of the returned array has changed.
  "the returned array has changed.", UserWarning)

In [220]:
rois, mask = util.extract_rois(candidates_img, image)

In [221]:
fig = visualization.plot_sample_images(rois, saliencies)



In [224]:
plt.figure(figsize=(16, 16))
_ = plt.imshow(visualization.get_roi_overlay(candidates_img, image))