In [1]:
%load_ext autoreload
%autoreload 2

import cPickle as pickle
import os; import sys; sys.path.append('..')
import gp
import gp.nets as nets

from nolearn.lasagne.visualize import plot_loss
from nolearn.lasagne.visualize import plot_conv_weights
from nolearn.lasagne.visualize import plot_conv_activity
from nolearn.lasagne.visualize import plot_occlusion

from sklearn.metrics import classification_report, accuracy_score, roc_curve, auc, precision_recall_fscore_support, f1_score, precision_recall_curve, average_precision_score, zero_one_loss


from matplotlib.pyplot import imshow
import matplotlib.pyplot as plt

%matplotlib inline


/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/font_manager.py:273: UserWarning: Matplotlib is building the font cache using fc-list. This may take a moment.
  warnings.warn('Matplotlib is building the font cache using fc-list. This may take a moment.')
Using gpu device 0: GeForce GTX TITAN (CNMeM is disabled, CuDNN 4007)
/home/d/nolearn/local/lib/python2.7/site-packages/theano/tensor/signal/downsample.py:6: UserWarning: downsample module has been moved to the theano.tensor.signal.pool module.
  "downsample module has been moved to the theano.tensor.signal.pool module.")

In [2]:
import nolearn.lasagne.visualize

In [3]:
NETS = []
NETS.append('../nets/IP_FULL.p') # image + prob
NETS.append('../nets/IPLB_FULL.p') # image + large border
NETS.append('../nets/IPM_FULL.p') # image + prob + binary
NETS.append('../nets/IPMB_FULL.p') # image + prob + binary + border
NETS.append('../nets/IPMLB_FULL.p') # image + prob + binary + large border

network_path = NETS[-1]

with open(network_path, 'rb') as f:
    net = pickle.load(f)

In [4]:
X_test, y_test = gp.Patch.load_rgba_test_only('ipmlb')


Loaded /home/d/patches//ipmlb/ in 0.0680668354034 seconds.

In [14]:
len(X_test)


Out[14]:
17560

In [5]:
test_prediction = net.predict(X_test)
test_prediction_prob = net.predict_proba(X_test)
print
print 'Precision/Recall:'
print classification_report(y_test, test_prediction)


Precision/Recall:
             precision    recall  f1-score   support

          0       0.96      0.92      0.94      8780
          1       0.92      0.96      0.94      8780

avg / total       0.94      0.94      0.94     17560


In [6]:
test_acc = net.score(X_test, y_test)
acc_score = accuracy_score(y_test, test_prediction)
print 'Test Accuracy:', test_acc
print 'Accuracy Score:', acc_score


Test Accuracy: 0.940432801822
Accuracy Score: 0.940432801822

In [7]:
plot_loss(net)


Out[7]:
<module 'matplotlib.pyplot' from '/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/pyplot.pyc'>

In [8]:
# ROC/AUC
fpr, tpr, _ = roc_curve(y_test, test_prediction_prob[:,1])
roc_auc = auc(fpr, tpr)
data = {}
data['CNN'] = (fpr, tpr, roc_auc)
gp.Legacy.plot_roc(data)



In [5]:
net.uuid = 'IPMLB'

In [6]:
gp.Stats.run_dojo_xp(net)


a
Finding Top 5 merge errors..
19  merge errors found.
Creating dojo bigM..

Correcting merge errors with p < .05
   Mean VI improvement 0.000689478277265
   Median VI improvement 0.0
Correcting split errors with p > .95
   Mean VI improvement 0.0418435697434
   Median VI improvement 0.0376444300922

Correcting merge errors by simulated user (er=0)
/home/d/nolearn/local/lib/python2.7/site-packages/matplotlib/pyplot.py:516: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).
  max_open_warning, RuntimeWarning)
   Mean VI improvement 0.00498339154038
   Median VI improvement 0.00813268853006
Correcting split errors by simulated user (er=0)
   Mean VI improvement 0.123369068111
   Median VI improvement 0.136681301308
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-6-2ef512bb0396> in <module>()
----> 1 gp.Stats.run_dojo_xp(net)

/home/d/Projects/gp/gp/stats.py in run_dojo_xp(cnn)
    270     data['Random\nRecommen-\ndations'] = random_recommendations
    271     data['Automatic\nCorrections\n(p=.95)'] = dojo_vi_95[2]
--> 272     data['Automatic\nCorrections\n(p=.99)'] = dojo_vi_99[2]
    273 
    274 

NameError: global name 'dojo_vi_99' is not defined

In [8]:
gp.Stats.run_dojo_xp(net)


a
Finding Top 5 merge errors..
11  merge errors found.
Creating dojo bigM..

Correcting merge errors with p < .05
---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
<ipython-input-8-2ef512bb0396> in <module>()
----> 1 gp.Stats.run_dojo_xp(net)

/home/d/Projects/gp/gp/stats.py in run_dojo_xp(cnn)
     74       #
     75       print 'Correcting merge errors with p < .05'
---> 76       bigM_dojo_05, corrected_rhoana_05 = gp.Legacy.perform_auto_merge_correction(cnn, bigM_dojo, input_image, input_prob, input_rhoana, merge_errors, .05)
     77 
     78       print '   Mean VI improvement', original_mean_VI-gp.Legacy.VI(input_gold, corrected_rhoana_05)[0]

/home/d/Projects/gp/gp/legacy.py in perform_auto_merge_correction(cnn, big_M, input_image, input_prob, input_rhoana, merge_errors, p)
    526             # print 'adding', label1, 'to', z
    527 
--> 528             new_m = Legacy.add_new_label_to_M(cnn, new_m, input_image[z], input_prob[z], new_rhoana, label1)
    529             new_m = Legacy.add_new_label_to_M(cnn, new_m, input_image[z], input_prob[z], new_rhoana, label2)
    530 

/home/d/Projects/gp/gp/legacy.py in add_new_label_to_M(cnn, m, input_image, input_prob, input_rhoana, label1)
    691       prediction = Patch.grab_group_test_and_unify(cnn, input_image, input_prob, input_rhoana, label1, l_neighbor, oversampling=False)
    692 
--> 693       m[label1,l_neighbor] = prediction
    694       m[l_neighbor,label1] = prediction
    695 

IndexError: index 2716 is out of bounds for axis 0 with size 2716

In [13]:
gp.Stats.run_cylinder_xp(net)


Original median VI 0.379410649496
Creating cylinder bigM..
4772 generated in 485.587388992 seconds.
Grouped into 1159 patches in 0.0223789215088 seconds.
4568 generated in 472.964470863 seconds.
Grouped into 1115 patches in 0.0217130184174 seconds.
4624 generated in 506.134793997 seconds.
Grouped into 1123 patches in 0.021656036377 seconds.
4276 generated in 473.910904169 seconds.
Grouped into 1054 patches in 0.0167679786682 seconds.
4220 generated in 318.505452156 seconds.
Grouped into 1040 patches in 0.0194969177246 seconds.
4348 generated in 314.940897942 seconds.
Grouped into 1061 patches in 0.0204341411591 seconds.
4372 generated in 280.479432106 seconds.
Grouped into 1065 patches in 0.0206258296967 seconds.
4528 generated in 287.444966793 seconds.
Grouped into 1106 patches in 0.0209999084473 seconds.
4392 generated in 277.018268824 seconds.
Grouped into 1070 patches in 0.0208740234375 seconds.
4232 generated in 313.791292906 seconds.
Grouped into 1031 patches in 0.0196208953857 seconds.
4492 generated in 534.986439943 seconds.
Grouped into 1100 patches in 0.021388053894 seconds.
4256 generated in 543.023267984 seconds.
Grouped into 1050 patches in 0.0204620361328 seconds.
4076 generated in 478.590109825 seconds.
Grouped into 1006 patches in 0.0164959430695 seconds.
3784 generated in 479.029245853 seconds.
Grouped into 930 patches in 0.0180399417877 seconds.
3820 generated in 418.119191885 seconds.
Grouped into 935 patches in 0.0153200626373 seconds.
3544 generated in 550.532541037 seconds.
Grouped into 875 patches in 0.0169169902802 seconds.
3256 generated in 459.519010067 seconds.
Grouped into 798 patches in 0.0156590938568 seconds.
2800 generated in 319.776812077 seconds.
Grouped into 680 patches in 0.0113310813904 seconds.
3132 generated in 314.387834072 seconds.
Grouped into 772 patches in 0.0154190063477 seconds.
2696 generated in 176.01072216 seconds.
Grouped into 657 patches in 0.0126891136169 seconds.
2556 generated in 159.71511507 seconds.
Grouped into 627 patches in 0.01229596138 seconds.
2332 generated in 158.054984808 seconds.
Grouped into 567 patches in 0.00915503501892 seconds.
2168 generated in 228.887804985 seconds.
Grouped into 534 patches in 0.0103299617767 seconds.
1964 generated in 223.064239979 seconds.
Grouped into 482 patches in 0.00959396362305 seconds.
1856 generated in 175.630971909 seconds.
Grouped into 454 patches in 0.00895810127258 seconds.
1712 generated in 211.616394043 seconds.
Grouped into 419 patches in 0.00811004638672 seconds.
1556 generated in 188.037892103 seconds.
Grouped into 380 patches in 0.00620293617249 seconds.
1420 generated in 163.845782995 seconds.
Grouped into 351 patches in 0.00686502456665 seconds.
1512 generated in 170.670491934 seconds.
Grouped into 378 patches in 0.00722408294678 seconds.
1452 generated in 177.842138052 seconds.
Grouped into 360 patches in 0.00693082809448 seconds.
1296 generated in 170.718465805 seconds.
Grouped into 320 patches in 0.00641918182373 seconds.
1028 generated in 132.760032177 seconds.
Grouped into 253 patches in 0.005126953125 seconds.
996 generated in 148.537099838 seconds.
Grouped into 244 patches in 0.00480794906616 seconds.
836 generated in 93.3061048985 seconds.
Grouped into 203 patches in 0.00416493415833 seconds.
800 generated in 113.014451027 seconds.
Grouped into 199 patches in 0.00325202941895 seconds.
744 generated in 106.82289505 seconds.
Grouped into 186 patches in 0.00358200073242 seconds.
768 generated in 113.217721939 seconds.
Grouped into 186 patches in 0.00367879867554 seconds.
616 generated in 103.045521021 seconds.
Grouped into 154 patches in 0.00300407409668 seconds.
632 generated in 102.869172096 seconds.
Grouped into 158 patches in 0.0030210018158 seconds.
572 generated in 94.6305019855 seconds.
Grouped into 143 patches in 0.00276494026184 seconds.
512 generated in 85.1142728329 seconds.
Grouped into 128 patches in 0.0024950504303 seconds.
448 generated in 66.5238518715 seconds.
Grouped into 112 patches in 0.00183486938477 seconds.
372 generated in 62.9190318584 seconds.
Grouped into 93 patches in 0.00183701515198 seconds.
316 generated in 49.5694658756 seconds.
Grouped into 79 patches in 0.00170183181763 seconds.
244 generated in 43.8118669987 seconds.
Grouped into 61 patches in 0.00118708610535 seconds.
176 generated in 34.7993199825 seconds.
Grouped into 44 patches in 0.000893115997314 seconds.
120 generated in 22.37490201 seconds.
Grouped into 30 patches in 0.000763893127441 seconds.
56 generated in 14.7497940063 seconds.
Grouped into 14 patches in 0.000308990478516 seconds.
4 generated in 8.11764883995 seconds.
Grouped into 1 patches in 6.48498535156e-05 seconds.
0 generated in 0.500217914581 seconds.
Grouped into 0 patches in 3.31401824951e-05 seconds.

Correcting split errors with p > .95
30 minutes done bigM_max= 0.999928116798
60 minutes done bigM_max= 0.999382019043
90 minutes done bigM_max= 0.997102558613
120 minutes done bigM_max= 0.99008256197
150 minutes done bigM_max= 0.984094202518
   Mean VI improvement 0.0403123303246
   Median VI improvement 0.0246614190212

Correcting split errors with p > .99
30 minutes done bigM_max= 0.999928116798
60 minutes done bigM_max= 0.999382019043
90 minutes done bigM_max= 0.997102558613
120 minutes done bigM_max= 0.99008256197
   Mean VI improvement 0.0329803506597
   Median VI improvement 0.0219774746577

Correcting split errors with p >= .0
30 minutes done bigM_max= 0.999928116798
60 minutes done bigM_max= 0.999382019043
90 minutes done bigM_max= 0.997102558613
120 minutes done bigM_max= 0.99008256197
150 minutes done bigM_max= 0.984094202518
180 minutes done bigM_max= 0.91908800602
210 minutes done bigM_max= 0.817092716694
240 minutes done bigM_max= 0.693986177444
270 minutes done bigM_max= 0.516857028008
300 minutes done bigM_max= 0.300107359886
330 minutes done bigM_max= 0.151971906424
360 minutes done bigM_max= 0.0728044956923
390 minutes done bigM_max= 0.0663990119329
420 minutes done bigM_max= 0.14493673205
450 minutes done bigM_max= 0.198690955131
480 minutes done bigM_max= 0.00528933526948
510 minutes done bigM_max= 0.00382569636453
540 minutes done bigM_max= 0.00151537824422
570 minutes done bigM_max= 0.0171242092277
600 minutes done bigM_max= 0.000678467819736
630 minutes done bigM_max= 0.000225595955271
660 minutes done bigM_max= 0.000116377959785
690 minutes done bigM_max= 5.02195907757e-05
720 minutes done bigM_max= 2.03927393159e-05
750 minutes done bigM_max= 6.70668243689e-06
780 minutes done bigM_max= 0.232427805662
810 minutes done bigM_max= 4.32282964535e-07
840 minutes done bigM_max= 3.00586771313e-07
870 minutes done bigM_max= 1.90562482461e-10
   Mean VI improvement -2.89497784854
   Median VI improvement -2.9805384045

Correcting split errors by simulated user (er=0)
   Mean VI improvement 0.0813859180456
   Median VI improvement 0.0785471115219
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-13-92c57450d604> in <module>()
----> 1 gp.Stats.run_cylinder_xp(net)

/home/d/Projects/gp/gp/stats.py in run_cylinder_xp(cnn)
    587     # gp.Legacy.plot_vi_combined(vi_s_per_correction_auto, vi_s_per_correction, output_folder+'/cylinder_combined_vi.pdf')
    588 
--> 589     gp.Legacy.plot_vi_combined_no_interpolation(vi_s_per_correction_auto, vi_s_per_correction, output_folder+'/cylinder_combined_vi_no_interpolation.pdf', sweetspot=len(cylinder_auto_fixes_95))
    590 
    591 

TypeError: plot_vi_combined_no_interpolation() got an unexpected keyword argument 'sweetspot'

In [ ]:


In [ ]:


In [36]:
# load dojo data
input_image, input_prob, input_gold, input_rhoana, dojo_bbox = gp.Legacy.read_dojo_data()

In [40]:
input_image[0].max()


Out[40]:
225

In [38]:
imshow(input_prob[2], cmap='gray')


Out[38]:
<matplotlib.image.AxesImage at 0x7fb6b90e42d0>

In [41]:
imshow(input_rhoana[0])


Out[41]:
<matplotlib.image.AxesImage at 0x7fb72bd079d0>

In [53]:
import numpy as np
def seg_to_color(slice):
	colors = np.zeros(slice.shape+(3,),dtype=np.uint8)
	colors[:,:,0] = np.mod(107*slice[:,:],700).astype(np.uint8)
	colors[:,:,1] = np.mod(509*slice[:,:],900).astype(np.uint8)
	colors[:,:,2] = np.mod(200*slice[:,:],777).astype(np.uint8)
	return colors

i_r = input_image[0]
rrr = np.zeros((i_r.shape[0], i_r.shape[1], 3), dtype=np.uint8)
for j in range(rrr.shape[0]):
    for i in range(rrr.shape[1]):
        rrr[j,i] = (i_r[j,i], i_r[j,i], i_r[j,i])
    
rrr[input_prob[0] > 220 ] = (0,255,0)

In [57]:
plt.figure()
imshow(rrr)


Out[57]:
<matplotlib.image.AxesImage at 0x7fb6c687f450>

In [ ]:


In [55]:
imshow(input_gold[0], cmap='gray')


Out[55]:
<matplotlib.image.AxesImage at 0x7fb6c681e9d0>

In [56]:
imshow(input_rhoana[0], cmap='gray')


Out[56]:
<matplotlib.image.AxesImage at 0x7fb72bcf0f50>

In [62]:
original_mean_VI, original_median_VI, original_VI_s = gp.Legacy.VI(input_gold, input_rhoana)
original_adapted_rand = gp.metrics.adapted_rand(input_rhoana, input_gold)

In [64]:
merge_errors = gp.Legacy.get_top5_merge_errors(net, input_image, input_prob, input_rhoana)


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-64-3647edb73200> in <module>()
----> 1 merge_errors = gp.Legacy.get_top5_merge_errors(net, input_image, input_prob, input_rhoana)

/home/d/Projects/gp/gp/legacy.py in get_top5_merge_errors(cnn, input_image, input_prob, input_rhoana, verbose)
    315                                               dilate=True,
    316                                               border_seeds=True,
--> 317                                               oversampling=False)
    318 
    319             if len(results) > 0:

/home/d/Projects/gp/gp/legacy.py in fix_single_merge(cnn, cropped_image, cropped_prob, cropped_binary, N, invert, dilate, border_seeds, erode, debug, before_merge_error, real_border, oversampling, crop)
    233 
    234           for i in range(5):
--> 235             eroded_ws2 = mh.erode(eroded_ws2)
    236 
    237           # Util.view(eroded_ws, large=True, color=False)

/home/d/nolearn/local/lib/python2.7/site-packages/mahotas/morph.pyc in erode(A, Bc, out, output)
    196     Bc = get_structuring_elem(A,Bc)
    197     output = _get_output(A, out, 'erode', output=output)
--> 198     return _morph.erode(A, Bc, output)
    199 
    200 

KeyboardInterrupt: 

In [66]:
first_rand = gp.metrics.adapted_rand(input_rhoana[0], input_gold[0])

In [78]:
net.uuid = 'IPMLB'

In [80]:
M = gp.Legacy.create_bigM_without_mask(net, input_image[0:1], input_prob[0:1], input_rhoana[0:1], verbose=False)

In [85]:
newM, new_rhoana, fixes, new_vi_s = gp.Legacy.splits_global_from_M_automatic(net, M, input_image[0:1], input_prob[0:1], 
                                                                             input_rhoana[0:1], input_gold[0:1], sureness_threshold=.95)

In [87]:
fixes


Out[87]:
[(1, 0.99998939037322998),
 (1, 0.99904423952102661),
 (0, 0.99768221378326416),
 (1, 0.99105918407440186),
 (1, 0.98917317390441895),
 (1, 0.97869938611984253),
 (0, 0.97463154792785645),
 (1, 0.96067178249359131),
 (1, 0.96036547422409058)]

In [88]:
gp.metrics.adapted_rand(new_rhoana[0], input_gold[0])


Out[88]:
0.067449536773076524

In [89]:
first_rand


Out[89]:
0.073737703314086689

In [ ]: