In [1]:
%load_ext autoreload
%autoreload 2

import cPickle as pickle
import os; import sys; sys.path.append('..'); sys.path.append('../gp/')
import gp
import gp.nets as nets

from nolearn.lasagne.visualize import plot_loss
from nolearn.lasagne.visualize import plot_conv_weights
from nolearn.lasagne.visualize import plot_conv_activity
from nolearn.lasagne.visualize import plot_occlusion

from sklearn.metrics import classification_report, accuracy_score, roc_curve, auc, precision_recall_fscore_support, f1_score, precision_recall_curve, average_precision_score, zero_one_loss


from matplotlib.pyplot import imshow
import matplotlib.pyplot as plt

%matplotlib inline


/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/font_manager.py:273: UserWarning: Matplotlib is building the font cache using fc-list. This may take a moment.
  warnings.warn('Matplotlib is building the font cache using fc-list. This may take a moment.')
Using gpu device 0: GeForce GTX TITAN (CNMeM is disabled, CuDNN 4007)
/home/d/nolearn/local/lib/python2.7/site-packages/theano/tensor/signal/downsample.py:6: UserWarning: downsample module has been moved to the theano.tensor.signal.pool module.
  "downsample module has been moved to the theano.tensor.signal.pool module.")

In [2]:
NETS = []
NETS.append('../nets/IPMLB_FULL.p') # image + prob + binary + large border

network_path = NETS[-1]

with open(network_path, 'rb') as f:
    net = pickle.load(f)

In [17]:
p = net.get_all_params()

In [42]:
X_test, y_test = gp.Patch.load_rgba_test_only('ipmlb')


Loaded /home/d/patches//ipmlb/ in 0.0836169719696 seconds.

In [39]:
net.layers_


Out[39]:
Layers([('input', <lasagne.layers.input.InputLayer at 0x7fbd5afc7710>),
        ('conv1', <lasagne.layers.conv.Conv2DLayer at 0x7fbd5afc7290>),
        ('pool1', <lasagne.layers.pool.MaxPool2DLayer at 0x7fbd2d2b9890>),
        ('dropout1', <lasagne.layers.noise.DropoutLayer at 0x7fbd2d2b98d0>),
        ('conv2', <lasagne.layers.conv.Conv2DLayer at 0x7fbd2d2d1dd0>),
        ('pool2', <lasagne.layers.pool.MaxPool2DLayer at 0x7fbd2d2e1090>),
        ('dropout2', <lasagne.layers.noise.DropoutLayer at 0x7fbd2d2e10d0>),
        ('conv3', <lasagne.layers.conv.Conv2DLayer at 0x7fbd2c5d20d0>),
        ('pool3', <lasagne.layers.pool.MaxPool2DLayer at 0x7fbd2c5d2350>),
        ('dropout3', <lasagne.layers.noise.DropoutLayer at 0x7fbd2c5d2390>),
        ('conv4', <lasagne.layers.conv.Conv2DLayer at 0x7fbd2c577110>),
        ('pool4', <lasagne.layers.pool.MaxPool2DLayer at 0x7fbd2c577390>),
        ('dropout4', <lasagne.layers.noise.DropoutLayer at 0x7fbd2c5773d0>),
        ('hidden5', <lasagne.layers.dense.DenseLayer at 0x7fbd2c59d150>),
        ('dropout5', <lasagne.layers.noise.DropoutLayer at 0x7fbd2c59d3d0>),
        ('output', <lasagne.layers.dense.DenseLayer at 0x7fbd2c53ea10>)])

In [47]:
X_test[0][0].shape


Out[47]:
(75, 75)

In [59]:
import theano
import theano.tensor as T
from lasagne.layers import get_output

In [63]:
x = X_test[100].reshape(1,4,75,75)
layer = net.layers_['hidden5']
xs = T.tensor4('xs').astype(theano.config.floatX)
get_activity = theano.function([xs], get_output(layer, xs))

activity = get_activity(x)

In [62]:
activity


Out[62]:
array([[ 0.19787416,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  2.21678138,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         1.45486271,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  1.11663008,  2.17394733,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  5.94943094,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  4.28511095,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  5.34763718,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         2.44152641,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  1.10784411,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         6.76438665,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  3.46508098,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  1.58092773,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.27093637,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.14891471,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         3.81826019,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  1.49646664,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  3.64025331,  1.09684145,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  2.72087336,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  2.12715912,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ]], dtype=float32)

In [64]:
activity


Out[64]:
array([[ 0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  2.09685564,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  2.57681513,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.02388591,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  1.06736326,
         0.        ,  0.        ,  1.19644725,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  1.39933348,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  3.56666112,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.2191759 ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         1.52145886,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.84565276,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.33094528,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         6.03848314,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  4.306705  ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  2.26578093,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
         0.        ,  0.        ]], dtype=float32)

In [51]:
plot_conv_activity(net.layers_['hidden5'], X_test[0].reshape(1,4,75,75))


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-51-092a44a7d1bb> in <module>()
----> 1 plot_conv_activity(net.layers_['hidden5'], X_test[0].reshape(1,4,75,75))

/home/d/nolearn/local/lib/python2.7/site-packages/nolearn/lasagne/visualize.pyc in plot_conv_activity(layer, x, figsize)
     81     ncols = nrows
     82 
---> 83     figs, axes = plt.subplots(nrows + 1, ncols, figsize=figsize)
     84     axes[0, ncols // 2].imshow(1 - x[0][0], cmap='gray',
     85                                interpolation='nearest')

/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/pyplot.pyc in subplots(nrows, ncols, sharex, sharey, squeeze, subplot_kw, gridspec_kw, **fig_kw)
   1210         else:
   1211             subplot_kw['sharey'] = axarr[sys[i]]
-> 1212         axarr[i] = fig.add_subplot(gs[i // ncols, i % ncols], **subplot_kw)
   1213 
   1214     # returned axis array will be always 2-d, even if nrows=ncols=1

/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/figure.pyc in add_subplot(self, *args, **kwargs)
   1005             a = subplot_class_factory(projection_class)(self, *args, **kwargs)
   1006 
-> 1007         self._axstack.add(key, a)
   1008         self.sca(a)
   1009         a._remove_method = self.__remove_ax

/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/figure.pyc in add(self, key, a)
    125             raise ValueError("first argument, %s, is not a valid key" % key)
    126 
--> 127         a_existing = self.get(key)
    128         if a_existing is not None:
    129             Stack.remove(self, (key, a_existing))

/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/figure.pyc in get(self, key)
     89         If it is not present, return None.
     90         """
---> 91         item = dict(self._elements).get(key)
     92         if item is None:
     93             return None

/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/gridspec.pyc in __eq__(self, other)
    472         return all((self._gridspec == other._gridspec,
    473                     self.num1 == other.num1,
--> 474                     self.num2 == other.num2))
    475 
    476     def __hash__(self):

KeyboardInterrupt: 

In [2]:
# load cremi A data
import h5py
import mahotas as mh
import numpy as np
import tifffile as tif

In [3]:
input_image = np.zeros((125,1250,1250))
input_rhoana = np.zeros((125,1250,1250), dtype=np.uint64)
input_gold = np.zeros((125,1250,1250), dtype=np.uint64)
input_prob = np.zeros((125,1250,1250))

for z in range(125):
    image, prob, gold, rhoana = gp.Cremi.read_section('/home/d/data/CREMI/C/', z)
    input_image[z] = image
    input_prob[z] = prob
    input_gold[z] = gold
    input_rhoana[z] = rhoana


Loading 000000000_000000000_000000000_image.png
Loading 000000000_000000000_000000001_image.png
Loading 000000000_000000000_000000002_image.png
Loading 000000000_000000000_000000003_image.png
Loading 000000000_000000000_000000004_image.png
Loading 000000000_000000000_000000005_image.png
Loading 000000000_000000000_000000006_image.png
Loading 000000000_000000000_000000007_image.png
Loading 000000000_000000000_000000008_image.png
Loading 000000000_000000000_000000009_image.png
Loading 000000000_000000000_000000010_image.png
Loading 000000000_000000000_000000011_image.png
Loading 000000000_000000000_000000012_image.png
Loading 000000000_000000000_000000013_image.png
Loading 000000000_000000000_000000014_image.png
Loading 000000000_000000000_000000015_image.png
Loading 000000000_000000000_000000016_image.png
Loading 000000000_000000000_000000017_image.png
Loading 000000000_000000000_000000018_image.png
Loading 000000000_000000000_000000019_image.png
Loading 000000000_000000000_000000020_image.png
Loading 000000000_000000000_000000021_image.png
Loading 000000000_000000000_000000022_image.png
Loading 000000000_000000000_000000023_image.png
Loading 000000000_000000000_000000024_image.png
Loading 000000000_000000000_000000025_image.png
Loading 000000000_000000000_000000026_image.png
Loading 000000000_000000000_000000027_image.png
Loading 000000000_000000000_000000028_image.png
Loading 000000000_000000000_000000029_image.png
Loading 000000000_000000000_000000030_image.png
Loading 000000000_000000000_000000031_image.png
Loading 000000000_000000000_000000032_image.png
Loading 000000000_000000000_000000033_image.png
Loading 000000000_000000000_000000034_image.png
Loading 000000000_000000000_000000035_image.png
Loading 000000000_000000000_000000036_image.png
Loading 000000000_000000000_000000037_image.png
Loading 000000000_000000000_000000038_image.png
Loading 000000000_000000000_000000039_image.png
Loading 000000000_000000000_000000040_image.png
Loading 000000000_000000000_000000041_image.png
Loading 000000000_000000000_000000042_image.png
Loading 000000000_000000000_000000043_image.png
Loading 000000000_000000000_000000044_image.png
Loading 000000000_000000000_000000045_image.png
Loading 000000000_000000000_000000046_image.png
Loading 000000000_000000000_000000047_image.png
Loading 000000000_000000000_000000048_image.png
Loading 000000000_000000000_000000049_image.png
Loading 000000000_000000000_000000050_image.png
Loading 000000000_000000000_000000051_image.png
Loading 000000000_000000000_000000052_image.png
Loading 000000000_000000000_000000053_image.png
Loading 000000000_000000000_000000054_image.png
Loading 000000000_000000000_000000055_image.png
Loading 000000000_000000000_000000056_image.png
Loading 000000000_000000000_000000057_image.png
Loading 000000000_000000000_000000058_image.png
Loading 000000000_000000000_000000059_image.png
Loading 000000000_000000000_000000060_image.png
Loading 000000000_000000000_000000061_image.png
Loading 000000000_000000000_000000062_image.png
Loading 000000000_000000000_000000063_image.png
Loading 000000000_000000000_000000064_image.png
Loading 000000000_000000000_000000065_image.png
Loading 000000000_000000000_000000066_image.png
Loading 000000000_000000000_000000067_image.png
Loading 000000000_000000000_000000068_image.png
Loading 000000000_000000000_000000069_image.png
Loading 000000000_000000000_000000070_image.png
Loading 000000000_000000000_000000071_image.png
Loading 000000000_000000000_000000072_image.png
Loading 000000000_000000000_000000073_image.png
Loading 000000000_000000000_000000074_image.png
Loading 000000000_000000000_000000075_image.png
Loading 000000000_000000000_000000076_image.png
Loading 000000000_000000000_000000077_image.png
Loading 000000000_000000000_000000078_image.png
Loading 000000000_000000000_000000079_image.png
Loading 000000000_000000000_000000080_image.png
Loading 000000000_000000000_000000081_image.png
Loading 000000000_000000000_000000082_image.png
Loading 000000000_000000000_000000083_image.png
Loading 000000000_000000000_000000084_image.png
Loading 000000000_000000000_000000085_image.png
Loading 000000000_000000000_000000086_image.png
Loading 000000000_000000000_000000087_image.png
Loading 000000000_000000000_000000088_image.png
Loading 000000000_000000000_000000089_image.png
Loading 000000000_000000000_000000090_image.png
Loading 000000000_000000000_000000091_image.png
Loading 000000000_000000000_000000092_image.png
Loading 000000000_000000000_000000093_image.png
Loading 000000000_000000000_000000094_image.png
Loading 000000000_000000000_000000095_image.png
Loading 000000000_000000000_000000096_image.png
Loading 000000000_000000000_000000097_image.png
Loading 000000000_000000000_000000098_image.png
Loading 000000000_000000000_000000099_image.png
Loading 000000000_000000000_000000100_image.png
Loading 000000000_000000000_000000101_image.png
Loading 000000000_000000000_000000102_image.png
Loading 000000000_000000000_000000103_image.png
Loading 000000000_000000000_000000104_image.png
Loading 000000000_000000000_000000105_image.png
Loading 000000000_000000000_000000106_image.png
Loading 000000000_000000000_000000107_image.png
Loading 000000000_000000000_000000108_image.png
Loading 000000000_000000000_000000109_image.png
Loading 000000000_000000000_000000110_image.png
Loading 000000000_000000000_000000111_image.png
Loading 000000000_000000000_000000112_image.png
Loading 000000000_000000000_000000113_image.png
Loading 000000000_000000000_000000114_image.png
Loading 000000000_000000000_000000115_image.png
Loading 000000000_000000000_000000116_image.png
Loading 000000000_000000000_000000117_image.png
Loading 000000000_000000000_000000118_image.png
Loading 000000000_000000000_000000119_image.png
Loading 000000000_000000000_000000120_image.png
Loading 000000000_000000000_000000121_image.png
Loading 000000000_000000000_000000122_image.png
Loading 000000000_000000000_000000123_image.png
Loading 000000000_000000000_000000124_image.png

In [25]:


In [5]:
gp.Util.view(gold)



In [6]:
gp.Util.view(rhoana)



In [7]:
gp.Util.view(prob, color=False)



In [8]:
gp.Util.view(image, color=False)



In [9]:
gp.Util.vi(input_rhoana, input_gold)


Out[9]:
2.032825794062817

In [10]:
gp.metrics.adapted_rand(input_rhoana, input_gold)


Out[10]:
0.58514442753365858

In [11]:
net.uuid = 'IPMLB'
bigM_cremiB = gp.Legacy.create_bigM_without_mask(net, input_image, input_prob, input_rhoana, verbose=True)


1648 generated in 32.1830060482 seconds.
Grouped into 296 patches in 0.00803518295288 seconds.
1760 generated in 37.3747820854 seconds.
Grouped into 305 patches in 0.0077919960022 seconds.

In [12]:
bigM_cA_after_95, out_cA_volume_after_auto_95, cA_auto_fixes_95, cA_auto_vi_s_95 = gp.Legacy.splits_global_from_M_automatic(net, 
                                                                                                                            bigM_cremiB, 
                                                                                                                            input_image, 
                                                                                                                            input_prob, 
                                                                                                                            input_rhoana,
                                                                                                                            input_gold, 
                                                                                                                            sureness_threshold=.95)

In [13]:
gp.Util.vi(out_cA_volume_after_auto_95, input_gold)


Out[13]:
2.9363190425761205

In [14]:
gp.metrics.adapted_rand(out_cA_volume_after_auto_95, input_gold)


Out[14]:
0.77019023534857156

In [15]:
cA_auto_fixes_95


Out[15]:
[(0, 0.99999666213989258),
 (0, 0.99998331069946289),
 (0, 0.99997556209564209),
 (0, 0.99996280670166016),
 (1, 0.9999578595161438),
 (0, 0.99995410442352295),
 (1, 0.99993646144866943),
 (0, 0.999930739402771),
 (1, 0.99991261959075928),
 (1, 0.99991154670715332),
 (1, 0.99995994567871094),
 (0, 0.99990248680114746),
 (0, 0.99984288215637207),
 (1, 0.99984192848205566),
 (0, 0.99983119964599609),
 (0, 0.99978169798851013),
 (0, 0.99977171421051025),
 (0, 0.99974232912063599),
 (0, 0.99972695112228394),
 (0, 0.99972623586654663),
 (1, 0.99968212842941284),
 (0, 0.99967628717422485),
 (0, 0.99961256980895996),
 (0, 0.99960362911224365),
 (0, 0.99972695112228394),
 (1, 0.99953120946884155),
 (1, 0.99950551986694336),
 (1, 0.99963210060069563),
 (0, 0.9993402361869812),
 (0, 0.99932390451431274),
 (1, 0.99922299385070801),
 (0, 0.99921345710754395),
 (1, 0.99920111894607544),
 (0, 0.99915897846221924),
 (0, 0.99927312135696411),
 (1, 0.99912625551223755),
 (1, 0.99892324209213257),
 (0, 0.99889695644378662),
 (1, 0.99870491027832031),
 (0, 0.99869978427886963),
 (0, 0.99857998923292379),
 (1, 0.99974256753921509),
 (0, 0.99856948852539062),
 (0, 0.99854159355163574),
 (1, 0.9985346794128418),
 (0, 0.99851232767105103),
 (0, 0.99846845865249634),
 (0, 0.99846690893173218),
 (0, 0.99843794107437134),
 (1, 0.99828076362609863),
 (0, 0.99812930822372437),
 (1, 0.99942082166671753),
 (0, 0.99793628023730385),
 (0, 0.9975583553314209),
 (0, 0.9984898449944668),
 (1, 0.99747854471206665),
 (0, 0.99743109941482544),
 (1, 0.99737617480669094),
 (1, 0.99723812937736511),
 (0, 0.9972231388092041),
 (0, 0.99691510200500488),
 (0, 0.99685335159301758),
 (0, 0.99667853116989136),
 (0, 0.99654132127761841),
 (1, 0.99776554107666016),
 (0, 0.99638292193412781),
 (0, 0.99632555246353149),
 (0, 0.99607390165328979),
 (1, 0.99602895975112915),
 (0, 0.9960207291114721),
 (1, 0.99576901924525596),
 (1, 0.99574452638626099),
 (0, 0.99530667066574097),
 (0, 0.9952051043510437),
 (0, 0.99492928385734558),
 (0, 0.99492452963434086),
 (1, 0.99480384588241577),
 (0, 0.99473816156387329),
 (0, 0.99978899955749512),
 (0, 0.99450594955875027),
 (0, 0.99450039863586426),
 (1, 0.99409261669431415),
 (0, 0.99837672710418701),
 (0, 0.99391035253840276),
 (1, 0.99999916553497314),
 (0, 0.99381726980209351),
 (1, 0.99371484758184969),
 (0, 0.99352288246154785),
 (1, 0.99351251125335693),
 (1, 0.99301069974899292),
 (0, 0.99780040979385376),
 (0, 0.99300283193588257),
 (1, 0.9928784966468811),
 (1, 0.99257129430770874),
 (1, 0.99143180251121521),
 (0, 0.99108892679214478),
 (0, 0.99108457565307617),
 (1, 0.99097520112991333),
 (0, 0.9906190037727356),
 (0, 0.9903557300567627),
 (0, 0.99793750047683716),
 (1, 0.98975338512161981),
 (0, 0.98781107681097724),
 (1, 0.98768579959869385),
 (1, 0.99954909086227417),
 (1, 0.98564153909683228),
 (0, 0.98490539390318876),
 (1, 0.99485349655151367),
 (1, 0.98644739389419556),
 (0, 0.98466798484114793),
 (1, 0.9844388732802396),
 (1, 0.98360729217529297),
 (0, 0.98308038711547852),
 (0, 0.98254531621932983),
 (0, 0.98211574554443359),
 (0, 0.98199433088302612),
 (1, 0.98153071236512901),
 (1, 0.98143678903579712),
 (0, 0.98130922196595305),
 (1, 0.98130404949188232),
 (0, 0.98053759336471558),
 (0, 0.98034855373974505),
 (0, 0.98008733987808228),
 (0, 0.97971522808074951),
 (0, 0.99897974729537964),
 (0, 0.9778677225112915),
 (0, 0.97697743851374952),
 (1, 0.97685390710830688),
 (0, 0.97675049304962158),
 (0, 0.97622926909844954),
 (1, 0.97471684217453003),
 (1, 0.9722212553024292),
 (0, 0.97103351354598999),
 (1, 0.97057364619855946),
 (1, 0.9689030647277832),
 (1, 0.96697825193405151),
 (1, 0.99195978045463562),
 (1, 0.9886060357093811),
 (1, 0.96597051420691293),
 (0, 0.96571922405012722),
 (0, 0.9633527398109436),
 (0, 0.96283888816833496),
 (0, 0.96259897947311401),
 (0, 0.96242666244506836),
 (0, 0.96207985588878497),
 (0, 0.96206372976303101),
 (1, 0.96063679456710815),
 (0, 0.9800870418548584),
 (0, 0.95816323609834309),
 (0, 0.95772537716433537),
 (0, 0.95685380697250366),
 (1, 0.99776756763458252),
 (0, 0.96664708852767944),
 (1, 0.95640191132343777),
 (0, 0.95563817024230957),
 (0, 0.95254972769933588),
 (0, 0.96986830234527588),
 (1, 0.95029139518737793)]

In [4]:
ar = []
for z in range(input_rhoana.shape[0]):
    ar.append(gp.metrics.adapted_rand(input_rhoana[z], input_gold[z]))

In [8]:
data = collections.OrderedDict()
data['Initial\nSegmentation'] = ar
data['GP*\n   (sim.)'] = []#cylinder_sim_user_vi_s[-1]
# data['GP*\n   (sim.)'] = []#[v - 0.1 for v in dojo_vi_95[2]]
data['FP\n   (sim.)'] = []#dojo_vi_95[2]

gp.Legacy.plot_arand(data, '/tmp/cremi.pdf')#, output_folder+'/dojo_vi.pdf')



In [6]:
import collections

In [ ]: