In [1]:
%load_ext autoreload
%autoreload 2

import cPickle as pickle
import os; import sys; sys.path.append('..')
import gp
import gp.nets as nets

from nolearn.lasagne.visualize import plot_loss
from nolearn.lasagne.visualize import plot_conv_weights
from nolearn.lasagne.visualize import plot_conv_activity
from nolearn.lasagne.visualize import plot_occlusion

from sklearn.metrics import classification_report, accuracy_score, roc_curve, auc, precision_recall_fscore_support, f1_score, precision_recall_curve, average_precision_score, zero_one_loss


from matplotlib.pyplot import imshow
import matplotlib.pyplot as plt

%matplotlib inline


Using gpu device 0: GeForce GTX TITAN (CNMeM is disabled, CuDNN 4007)
/home/d/nolearn/local/lib/python2.7/site-packages/theano/tensor/signal/downsample.py:6: UserWarning: downsample module has been moved to the theano.tensor.signal.pool module.
  "downsample module has been moved to the theano.tensor.signal.pool module.")

In [10]:
NETS = []
NETS.append('../nets/IP_FULL.p') # image + prob
NETS.append('../nets/IPLB_FULL.p') # image + large border
NETS.append('../nets/IPM_FULL.p') # image + prob + binary
NETS.append('../nets/IPMB_FULL.p') # image + prob + binary + border
NETS.append('../nets/IPMLB_FULL.p') # image + prob + binary + large border

network_path = NETS[3]

with open(network_path, 'rb') as f:
    net = pickle.load(f)

In [11]:
X_test, y_test = gp.Patch.load_rgba_test_only('ipmb')


Loaded /home/d/patches//ipmb/ in 0.107438087463 seconds.

In [12]:
test_prediction = net.predict(X_test)
test_prediction_prob = net.predict_proba(X_test)
print
print 'Precision/Recall:'
print classification_report(y_test, test_prediction)


Precision/Recall:
             precision    recall  f1-score   support

          0       0.96      0.93      0.94      8780
          1       0.93      0.96      0.94      8780

avg / total       0.94      0.94      0.94     17560


In [6]:
test_acc = net.score(X_test, y_test)
acc_score = accuracy_score(y_test, test_prediction)
print 'Test Accuracy:', test_acc
print 'Accuracy Score:', acc_score


Test Accuracy: 0.94589977221
Accuracy Score: 0.94589977221

In [13]:
plot_loss(net)


Out[13]:
<module 'matplotlib.pyplot' from '/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/pyplot.pyc'>

In [14]:
# ROC/AUC
fpr, tpr, _ = roc_curve(y_test, test_prediction_prob[:,1])
roc_auc = auc(fpr, tpr)
data = {}
data['CNN'] = (fpr, tpr, roc_auc)
gp.Legacy.plot_roc(data)



In [15]:
net.uuid = 'IPMB'

In [16]:
gp.Stats.run_dojo_xp(net)


Finding Top 5 merge errors..
22  merge errors found.
Creating dojo bigM..

Correcting merge errors with p < .05
   Mean VI improvement -0.00428083410261
   Median VI improvement 0.0
Correcting split errors with p > .95
   Mean VI improvement 0.0471353880513
   Median VI improvement 0.0148573761925

Correcting merge errors with p < .01
   Mean VI improvement -0.00491462935149
   Median VI improvement 0.0
Correcting split errors with p > .99
   Mean VI improvement 0.0304090052117
   Median VI improvement 0.0242047279787

Correcting merge errors by simulated user (er=0)
/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/pyplot.py:516: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).
  max_open_warning, RuntimeWarning)
   Mean VI improvement 0.00594970652485
   Median VI improvement 0.0
Correcting split errors by simulated user (er=0)
   Mean VI improvement 0.127238676644
   Median VI improvement 0.0924294846021

   Re-running simulated user with er=0 .. 0.2
/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/font_manager.py:1288: UserWarning: findfont: Font family [u'normal'] not found. Falling back to Bitstream Vera Sans
  (prop.get_family(), self.defaultFamily[fontext]))

In [17]:
gp.Stats.run_cylinder_xp(net)


Original median VI 0.379410649496
Creating cylinder bigM..
4772 generated in 311.328977108 seconds.
Grouped into 1159 patches in 0.0207710266113 seconds.
4568 generated in 408.179308176 seconds.
Grouped into 1115 patches in 0.0223331451416 seconds.
4624 generated in 395.91599822 seconds.
Grouped into 1123 patches in 0.0195620059967 seconds.
4276 generated in 403.413915873 seconds.
Grouped into 1054 patches in 0.020956993103 seconds.
4220 generated in 502.228272915 seconds.
Grouped into 1040 patches in 0.0207550525665 seconds.
4348 generated in 470.495831013 seconds.
Grouped into 1061 patches in 0.0214829444885 seconds.
4372 generated in 488.86192584 seconds.
Grouped into 1065 patches in 0.0216200351715 seconds.
4528 generated in 507.229362965 seconds.
Grouped into 1106 patches in 0.0223169326782 seconds.
4392 generated in 462.02737999 seconds.
Grouped into 1070 patches in 0.0215220451355 seconds.
4232 generated in 425.532567978 seconds.
Grouped into 1031 patches in 0.0209701061249 seconds.
4492 generated in 575.247015953 seconds.
Grouped into 1100 patches in 0.022078037262 seconds.
4256 generated in 518.28203702 seconds.
Grouped into 1050 patches in 0.020763874054 seconds.
4076 generated in 415.091445923 seconds.
Grouped into 1006 patches in 0.0200901031494 seconds.
3784 generated in 245.003094912 seconds.
Grouped into 930 patches in 0.018828868866 seconds.
3820 generated in 255.350924969 seconds.
Grouped into 935 patches in 0.0185861587524 seconds.
3544 generated in 341.168306112 seconds.
Grouped into 875 patches in 0.0173788070679 seconds.
3256 generated in 325.498118877 seconds.
Grouped into 798 patches in 0.0140368938446 seconds.
2800 generated in 327.48389101 seconds.
Grouped into 680 patches in 0.0145711898804 seconds.
3132 generated in 311.000331163 seconds.
Grouped into 772 patches in 0.0153019428253 seconds.
2696 generated in 279.896919966 seconds.
Grouped into 657 patches in 0.0132629871368 seconds.
2556 generated in 291.035890818 seconds.
Grouped into 627 patches in 0.0123569965363 seconds.
2332 generated in 241.73243618 seconds.
Grouped into 567 patches in 0.0113251209259 seconds.
2168 generated in 247.352375031 seconds.
Grouped into 534 patches in 0.0104370117188 seconds.
1964 generated in 230.534354925 seconds.
Grouped into 482 patches in 0.0101850032806 seconds.
1856 generated in 216.698638916 seconds.
Grouped into 454 patches in 0.00915312767029 seconds.
1712 generated in 196.166188002 seconds.
Grouped into 419 patches in 0.00736999511719 seconds.
1556 generated in 174.357916117 seconds.
Grouped into 380 patches in 0.00770306587219 seconds.
1420 generated in 93.4859130383 seconds.
Grouped into 351 patches in 0.00707602500916 seconds.
1512 generated in 101.495519161 seconds.
Grouped into 378 patches in 0.00794196128845 seconds.
1452 generated in 104.396973848 seconds.
Grouped into 360 patches in 0.00721287727356 seconds.
1296 generated in 95.167429924 seconds.
Grouped into 320 patches in 0.00695204734802 seconds.
1028 generated in 95.6141860485 seconds.
Grouped into 253 patches in 0.00516295433044 seconds.
996 generated in 115.183759928 seconds.
Grouped into 244 patches in 0.00490808486938 seconds.
836 generated in 101.306865931 seconds.
Grouped into 203 patches in 0.00410199165344 seconds.
800 generated in 102.42248702 seconds.
Grouped into 199 patches in 0.00398302078247 seconds.
744 generated in 99.3433718681 seconds.
Grouped into 186 patches in 0.00389909744263 seconds.
768 generated in 301.467064142 seconds.
Grouped into 186 patches in 0.00393295288086 seconds.
616 generated in 517.308564901 seconds.
Grouped into 154 patches in 0.00337409973145 seconds.
632 generated in 607.68285203 seconds.
Grouped into 158 patches in 0.00292110443115 seconds.
572 generated in 358.928737164 seconds.
Grouped into 143 patches in 0.00286483764648 seconds.
512 generated in 550.509482861 seconds.
Grouped into 128 patches in 0.00252890586853 seconds.
448 generated in 225.015904903 seconds.
Grouped into 112 patches in 0.00221800804138 seconds.
372 generated in 55.4369671345 seconds.
Grouped into 93 patches in 0.00186109542847 seconds.
316 generated in 46.4778199196 seconds.
Grouped into 79 patches in 0.00164079666138 seconds.
244 generated in 44.0339210033 seconds.
Grouped into 61 patches in 0.00127100944519 seconds.
176 generated in 29.4219899178 seconds.
Grouped into 44 patches in 0.000908136367798 seconds.
120 generated in 26.7378342152 seconds.
Grouped into 30 patches in 0.000691175460815 seconds.
56 generated in 15.219340086 seconds.
Grouped into 14 patches in 0.00037693977356 seconds.
4 generated in 8.35377907753 seconds.
Grouped into 1 patches in 7.79628753662e-05 seconds.
0 generated in 0.57372713089 seconds.
Grouped into 0 patches in 3.2901763916e-05 seconds.

Correcting split errors with p > .95
30 minutes done bigM_max= 0.999910116196
60 minutes done bigM_max= 0.999073624611
90 minutes done bigM_max= 0.999971985817
120 minutes done bigM_max= 0.981018185616
150 minutes done bigM_max= 0.950831592083
   Mean VI improvement 0.0417470978762
   Median VI improvement 0.0330154203137

Correcting split errors with p > .99
30 minutes done bigM_max= 0.999910116196
60 minutes done bigM_max= 0.999073624611
90 minutes done bigM_max= 0.999971985817
   Mean VI improvement 0.0360165447822
   Median VI improvement 0.0256142080213

Correcting split errors with p >= .0
30 minutes done bigM_max= 0.999910116196
60 minutes done bigM_max= 0.999073624611
90 minutes done bigM_max= 0.999971985817
120 minutes done bigM_max= 0.981018185616
150 minutes done bigM_max= 0.950831592083
180 minutes done bigM_max= 0.882512867451
210 minutes done bigM_max= 0.781772613525
240 minutes done bigM_max= 0.666864335537
270 minutes done bigM_max= 0.486370503902
300 minutes done bigM_max= 0.29535934329
330 minutes done bigM_max= 0.162888631225
360 minutes done bigM_max= 0.0844661593437
390 minutes done bigM_max= 0.0453877933323
420 minutes done bigM_max= 0.0636901459693
450 minutes done bigM_max= 0.0118481572717
480 minutes done bigM_max= 0.0213649608195
510 minutes done bigM_max= 0.469319134951
540 minutes done bigM_max= 0.00220291502774
570 minutes done bigM_max= 0.0012714904733
600 minutes done bigM_max= 0.000691394787282
630 minutes done bigM_max= 0.000388366548577
660 minutes done bigM_max= 0.00167659425642
690 minutes done bigM_max= 9.4221359177e-05
720 minutes done bigM_max= 5.76491031178e-05
750 minutes done bigM_max= 2.57156298176e-05
780 minutes done bigM_max= 6.54716905046e-06
810 minutes done bigM_max= 1.74910200733e-06
840 minutes done bigM_max= 2.28920583822e-07
870 minutes done bigM_max= 4.36075243354e-09
   Mean VI improvement -2.89758046728
   Median VI improvement -2.97207177797

Correcting split errors by simulated user (er=0)
   Mean VI improvement 0.0813942098041
   Median VI improvement 0.0789001976473
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-17-92c57450d604> in <module>()
----> 1 gp.Stats.run_cylinder_xp(net)

/home/d/Projects/gp/gp/stats.pyc in run_cylinder_xp(cnn)
    587     # gp.Legacy.plot_vi_combined(vi_s_per_correction_auto, vi_s_per_correction, output_folder+'/cylinder_combined_vi.pdf')
    588 
--> 589     gp.Legacy.plot_vi_combined_no_interpolation(vi_s_per_correction_auto, vi_s_per_correction, output_folder+'/cylinder_combined_vi_no_interpolation.pdf', sweetspot=len(cylinder_auto_fixes_95))
    590 
    591 

TypeError: plot_vi_combined_no_interpolation() got an unexpected keyword argument 'sweetspot'

In [11]:
# load dojo data
input_image, input_prob, input_gold, input_rhoana, dojo_bbox = gp.Legacy.read_dojo_data()

In [12]:
input_image[0].max()


Out[12]:
225

In [13]:
imshow(input_prob[2], cmap='gray')


Out[13]:
<matplotlib.image.AxesImage at 0x7f75e8df7250>

In [41]:
imshow(input_rhoana[0])


Out[41]:
<matplotlib.image.AxesImage at 0x7fb72bd079d0>

In [53]:
import numpy as np
def seg_to_color(slice):
	colors = np.zeros(slice.shape+(3,),dtype=np.uint8)
	colors[:,:,0] = np.mod(107*slice[:,:],700).astype(np.uint8)
	colors[:,:,1] = np.mod(509*slice[:,:],900).astype(np.uint8)
	colors[:,:,2] = np.mod(200*slice[:,:],777).astype(np.uint8)
	return colors

i_r = input_image[0]
rrr = np.zeros((i_r.shape[0], i_r.shape[1], 3), dtype=np.uint8)
for j in range(rrr.shape[0]):
    for i in range(rrr.shape[1]):
        rrr[j,i] = (i_r[j,i], i_r[j,i], i_r[j,i])
    
rrr[input_prob[0] > 220 ] = (0,255,0)

In [57]:
plt.figure()
imshow(rrr)


Out[57]:
<matplotlib.image.AxesImage at 0x7fb6c687f450>

In [ ]:


In [55]:
imshow(input_gold[0], cmap='gray')


Out[55]:
<matplotlib.image.AxesImage at 0x7fb6c681e9d0>

In [56]:
imshow(input_rhoana[0], cmap='gray')


Out[56]:
<matplotlib.image.AxesImage at 0x7fb72bcf0f50>

In [14]:
original_mean_VI, original_median_VI, original_VI_s = gp.Legacy.VI(input_gold, input_rhoana)
original_adapted_rand = gp.metrics.adapted_rand(input_rhoana, input_gold)

In [64]:
merge_errors = gp.Legacy.get_top5_merge_errors(net, input_image, input_prob, input_rhoana)


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-64-3647edb73200> in <module>()
----> 1 merge_errors = gp.Legacy.get_top5_merge_errors(net, input_image, input_prob, input_rhoana)

/home/d/Projects/gp/gp/legacy.py in get_top5_merge_errors(cnn, input_image, input_prob, input_rhoana, verbose)
    315                                               dilate=True,
    316                                               border_seeds=True,
--> 317                                               oversampling=False)
    318 
    319             if len(results) > 0:

/home/d/Projects/gp/gp/legacy.py in fix_single_merge(cnn, cropped_image, cropped_prob, cropped_binary, N, invert, dilate, border_seeds, erode, debug, before_merge_error, real_border, oversampling, crop)
    233 
    234           for i in range(5):
--> 235             eroded_ws2 = mh.erode(eroded_ws2)
    236 
    237           # Util.view(eroded_ws, large=True, color=False)

/home/d/nolearn/local/lib/python2.7/site-packages/mahotas/morph.pyc in erode(A, Bc, out, output)
    196     Bc = get_structuring_elem(A,Bc)
    197     output = _get_output(A, out, 'erode', output=output)
--> 198     return _morph.erode(A, Bc, output)
    199 
    200 

KeyboardInterrupt: 

In [66]:
first_rand = gp.metrics.adapted_rand(input_rhoana[0], input_gold[0])

In [78]:
net.uuid = 'IPMLB'

In [15]:
M = gp.Legacy.create_bigM_without_mask(net, input_image[0:1], input_prob[0:1], input_rhoana[0:1], verbose=False)

In [25]:
newM, new_rhoana, fixes, new_vi_s = gp.Legacy.splits_global_from_M_automatic(net, M, input_image[0:1], input_prob[0:1], 
                                                                             input_rhoana[0:1], input_gold[0:1], sureness_threshold=.95)

In [26]:
fixes


Out[26]:
[(1, 0.99975186586380005),
 (1, 0.99943643808364868),
 (1, 0.99432837963104248),
 (0, 0.98944807052612305),
 (1, 0.9682241678237915),
 (1, 0.95969843864440918),
 (1, 0.97200888395309448),
 (0, 0.95207822322845459)]

In [27]:
gp.metrics.adapted_rand(new_rhoana[0], input_gold[0])


Out[27]:
0.069209533050430649

In [89]:
first_rand


Out[89]:
0.073737703314086689

In [ ]: