In [1]:
%pylab inline
import warnings
warnings.filterwarnings("ignore")
import nolearn
from nolearn.lasagne import NeuralNet
import readdata
import lasagne
from lasagne import layers
from sklearn import metrics
import detectobjects as det
import os.path
from scipy import misc
import cv2
from progress_bar import ProgressBar
import shapefeatures
from sklearn import ensemble


Populating the interactive namespace from numpy and matplotlib
Using gpu device 0: GRID K520 (CNMeM is disabled)

In [2]:
opts = {'img_dir': '../data/tuberculosis-subset2/',
        'annotation_dir': '../data/tuberculosis-subset2/',
        'detection_probability_threshold': 0.5,
        'detection_overlap_threshold': 0.3, 
        'gauss': 1,
        'patch_size': (160,160),
        'image_downsample' : 8,
        'detection_step': 5,
        'patch_creation_step': 40,
        'object_class': None,
        'negative_training_discard_rate': .9
       }
opts['patch_stride_training'] = int(opts['patch_size'][0]*.25)

In [3]:
reload(readdata)


Out[3]:
<module 'readdata' from 'readdata.pyc'>

In [6]:
trainfiles, valfiles, testfiles = readdata.create_sets(opts['img_dir'], train_set_proportion=.5, 
                                                  test_set_proportion=.5,
                                                  val_set_proportion=0)

train_y, train_X = readdata.create_patches(trainfiles, opts['annotation_dir'], opts['img_dir'], opts['patch_size'][0], opts['patch_stride_training'], grayscale=False, progressbar=True, downsample=opts['image_downsample'], objectclass=opts['object_class'], negative_discard_rate=opts['negative_training_discard_rate'])
test_y, test_X = readdata.create_patches(testfiles,  opts['annotation_dir'], opts['img_dir'], opts['patch_size'][0], opts['patch_stride_training'], grayscale=False, progressbar=True, downsample=opts['image_downsample'], objectclass=opts['object_class'], negative_discard_rate=opts['negative_training_discard_rate'])

# Cut down on disproportionately large numbers of negative patches
train_X, train_y = readdata.balance(train_X, train_y, mult_neg=100)
#test_X, test_y = readdata.balance(test_X, test_y, mult_neg=100)

# Create rotated and flipped versions of the positive patches
train_X, train_y = readdata.augment_positives(train_X, train_y)
test_X, test_y = readdata.augment_positives(test_X, test_y)

print '\n'
print '%d positive training examples, %d negative training examples' % (sum(train_y), len(train_y)-sum(train_y))
print '%d positive testing examples, %d negative testing examples' % (sum(test_y), len(test_y)-sum(test_y))
print '%d patches (%.1f%% positive)' % (len(train_y)+len(test_y), 100.*((sum(train_y)+sum(test_y))/(len(train_y)+len(test_y))))


 

5704 positive training examples, 71300 negative training examples
5992 positive testing examples, 103643 negative testing examples
186639 patches (6.3% positive)

View a random selection of positive and negative patches to see if they look right


In [7]:
N_samples_to_display = 10
pos_indices = np.where(train_y)[0]
pos_indices = pos_indices[np.random.permutation(len(pos_indices))]
for i in range(N_samples_to_display):
    plt.subplot(2,N_samples_to_display,i+1)
    example_pos = train_X[pos_indices[i],:,:,:]
    example_pos = np.swapaxes(example_pos,0,2)
    plt.imshow(example_pos)
    plt.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off')    

neg_indices = np.where(train_y==0)[0]
neg_indices = neg_indices[np.random.permutation(len(neg_indices))]
for i in range(N_samples_to_display,2*N_samples_to_display):
    plt.subplot(2,N_samples_to_display,i+1)
    example_neg = train_X[neg_indices[i],:,:,:]
    example_neg = np.swapaxes(example_neg,0,2)
    plt.imshow(example_neg)
    plt.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off')
plt.gcf().set_size_inches(1.5*N_samples_to_display,3)


CNN training


In [8]:
def CNN(n_epochs):
    net1 = NeuralNet(
        layers=[
        ('input', layers.InputLayer),
        ('conv1', layers.Conv2DLayer),      #Convolutional layer.  Params defined below
        ('pool1', layers.MaxPool2DLayer),   # Like downsampling, for execution speed
        ('conv2', layers.Conv2DLayer),
        ('hidden3', layers.DenseLayer),
        ('output', layers.DenseLayer),
        ],
        
    input_shape=(None, 3, opts['patch_size'][0]/opts['image_downsample'], 
                 opts['patch_size'][0]/opts['image_downsample']),
    conv1_num_filters=7, 
    conv1_filter_size=(3, 3), 
    conv1_nonlinearity=lasagne.nonlinearities.rectify,
        
    pool1_pool_size=(2, 2),
        
    conv2_num_filters=12, 
    conv2_filter_size=(2, 2),    
    conv2_nonlinearity=lasagne.nonlinearities.rectify,
        
    hidden3_num_units=100,
    output_num_units=2, 
    output_nonlinearity=lasagne.nonlinearities.softmax,

    update_learning_rate=0.0001,
    update_momentum=0.9,

    max_epochs=n_epochs,
    verbose=1,
    )
    return net1

cnn = CNN(500).fit(train_X, train_y)


# Neural Network with 77646 learnable parameters

## Layer information

  #  name     size
---  -------  -------
  0  input    3x20x20
  1  conv1    7x18x18
  2  pool1    7x9x9
  3  conv2    12x8x8
  4  hidden3  100
  5  output   2

  epoch    train loss    valid loss    train/val    valid acc  dur
-------  ------------  ------------  -----------  -----------  -----
      1       0.59214       0.19764      2.99607      0.92833  2.29s
      2       0.16736       0.14530      1.15182      0.94706  2.20s
      3       0.13442       0.11801      1.13905      0.96003  2.25s
      4       0.11727       0.10621      1.10413      0.96623  2.20s
      5       0.10796       0.09973      1.08246      0.96894  2.25s
      6       0.10156       0.09476      1.07173      0.97095  2.20s
      7       0.09703       0.09073      1.06939      0.97295  2.25s
      8       0.09385       0.08713      1.07710      0.97366  2.20s
      9       0.09058       0.08494      1.06639      0.97430  2.24s
     10       0.08812       0.08308      1.06060      0.97456  2.20s
     11       0.08640       0.08194      1.05444      0.97488  2.25s
     12       0.08608       0.08136      1.05795      0.97463  2.22s
     13       0.08625       0.08322      1.03643      0.97321  2.27s
     14       0.08941       0.08173      1.09389      0.97424  2.21s
     15       0.08494       0.08047      1.05552      0.97417  2.27s
     16       0.08145       0.08032      1.01406      0.97450  2.21s
     17       0.08124       0.08000      1.01543      0.97469  2.25s
     18       0.08027       0.07740      1.03707      0.97618  2.20s
     19       0.07567       0.07772      0.97366      0.97624  2.25s
     20       0.07554       0.08058      0.93753      0.97424  2.20s
     21       0.07800       0.07850      0.99361      0.97508  2.25s
     22       0.07465       0.07651      0.97566      0.97611  2.21s
     23       0.07261       0.07706      0.94230      0.97566  2.26s
     24       0.07266       0.07640      0.95108      0.97605  2.21s
     25       0.07171       0.07752      0.92511      0.97559  2.25s
     26       0.07102       0.07550      0.94065      0.97624  2.21s
     27       0.07021       0.07521      0.93351      0.97650  2.25s
     28       0.06944       0.07547      0.92012      0.97663  2.20s
     29       0.06805       0.07388      0.92114      0.97727  2.25s
     30       0.06770       0.07507      0.90180      0.97669  2.21s
     31       0.06736       0.07387      0.91195      0.97695  2.26s
     32       0.06692       0.07253      0.92261      0.97701  2.21s
     33       0.06579       0.07267      0.90530      0.97727  2.25s
     34       0.06570       0.07247      0.90655      0.97695  2.20s
     35       0.06530       0.07225      0.90378      0.97753  2.24s
     36       0.06498       0.07289      0.89140      0.97727  2.20s
     37       0.06466       0.07292      0.88672      0.97727  2.25s
     38       0.06438       0.07199      0.89434      0.97779  2.20s
     39       0.06359       0.07095      0.89622      0.97772  2.26s
     40       0.06297       0.07086      0.88870      0.97785  2.20s
     41       0.06236       0.06890      0.90503      0.97863  2.23s
     42       0.06125       0.06833      0.89632      0.97869  2.19s
     43       0.06061       0.06783      0.89349      0.97882  2.24s
     44       0.06018       0.06699      0.89843      0.97921  2.20s
     45       0.05955       0.06572      0.90613      0.98011  2.22s
     46       0.05901       0.06521      0.90495      0.98031  2.20s
     47       0.05856       0.06496      0.90153      0.98050  2.23s
     48       0.05812       0.06533      0.88967      0.97986  2.20s
     49       0.05791       0.06523      0.88789      0.97998  2.24s
     50       0.05730       0.06524      0.87835      0.98018  2.20s
     51       0.05692       0.06465      0.88041      0.97986  2.24s
     52       0.05655       0.06563      0.86173      0.97966  2.20s
     53       0.05623       0.06445      0.87233      0.98037  2.25s
     54       0.05572       0.06437      0.86566      0.97992  2.20s
     55       0.05533       0.06378      0.86761      0.98044  2.26s
     56       0.05500       0.06382      0.86179      0.98031  2.21s
     57       0.05460       0.06252      0.87341      0.98102  2.25s
     58       0.05424       0.06240      0.86920      0.98108  2.20s
     59       0.05394       0.06203      0.86964      0.98115  2.24s
     60       0.05371       0.06170      0.87044      0.98108  2.20s
     61       0.05340       0.06178      0.86424      0.98115  2.25s
     62       0.05315       0.06126      0.86754      0.98153  2.20s
     63       0.05275       0.06081      0.86738      0.98160  2.26s
     64       0.05245       0.06058      0.86568      0.98147  2.20s
     65       0.05216       0.05965      0.87452      0.98199  2.24s
     66       0.05192       0.05998      0.86570      0.98160  2.20s
     67       0.05164       0.05941      0.86929      0.98205  2.25s
     68       0.05131       0.05902      0.86943      0.98212  2.20s
     69       0.05114       0.05900      0.86676      0.98224  2.24s
     70       0.05086       0.05901      0.86190      0.98237  2.20s
     71       0.05069       0.05867      0.86391      0.98218  2.24s
     72       0.05034       0.05879      0.85635      0.98257  2.19s
     73       0.05021       0.05873      0.85495      0.98250  2.26s
     74       0.05000       0.05875      0.85103      0.98270  2.20s
     75       0.04975       0.05874      0.84692      0.98270  2.25s
     76       0.04959       0.05814      0.85299      0.98283  2.20s
     77       0.04934       0.05808      0.84961      0.98276  2.24s
     78       0.04917       0.05779      0.85095      0.98289  2.20s
     79       0.04894       0.05808      0.84266      0.98276  2.25s
     80       0.04875       0.05785      0.84273      0.98283  2.21s
     81       0.04853       0.05756      0.84315      0.98295  2.25s
     82       0.04838       0.05670      0.85317      0.98354  2.21s
     83       0.04815       0.05654      0.85153      0.98347  2.25s
     84       0.04799       0.05667      0.84684      0.98315  2.21s
     85       0.04780       0.05638      0.84795      0.98341  2.25s
     86       0.04758       0.05620      0.84655      0.98366  2.21s
     87       0.04748       0.05620      0.84487      0.98347  2.25s
     88       0.04727       0.05557      0.85065      0.98386  2.20s
     89       0.04707       0.05576      0.84408      0.98392  2.25s
     90       0.04691       0.05597      0.83810      0.98354  2.21s
     91       0.04676       0.05539      0.84417      0.98392  2.25s
     92       0.04654       0.05583      0.83364      0.98366  2.20s
     93       0.04648       0.05567      0.83490      0.98360  2.25s
     94       0.04635       0.05539      0.83670      0.98418  2.22s
     95       0.04610       0.05526      0.83432      0.98399  2.25s
     96       0.04601       0.05471      0.84109      0.98418  2.20s
     97       0.04578       0.05515      0.83015      0.98418  2.26s
     98       0.04570       0.05473      0.83513      0.98438  2.20s
     99       0.04550       0.05436      0.83702      0.98431  2.25s
    100       0.04527       0.05449      0.83093      0.98425  2.21s
    101       0.04510       0.05436      0.82968      0.98425  2.25s
    102       0.04492       0.05418      0.82912      0.98431  2.21s
    103       0.04476       0.05411      0.82715      0.98444  2.26s
    104       0.04457       0.05394      0.82630      0.98457  2.21s
    105       0.04449       0.05426      0.82007      0.98444  2.26s
    106       0.04446       0.05394      0.82424      0.98470  2.21s
    107       0.04425       0.05383      0.82208      0.98470  2.25s
    108       0.04412       0.05366      0.82225      0.98463  2.21s
    109       0.04399       0.05355      0.82162      0.98463  2.26s
    110       0.04387       0.05346      0.82054      0.98483  2.23s
    111       0.04363       0.05335      0.81791      0.98483  2.28s
    112       0.04354       0.05340      0.81530      0.98483  2.22s
    113       0.04334       0.05363      0.80819      0.98457  2.28s
    114       0.04332       0.05328      0.81305      0.98457  2.23s
    115       0.04309       0.05319      0.81020      0.98463  2.26s
    116       0.04288       0.05283      0.81171      0.98489  2.22s
    117       0.04271       0.05283      0.80842      0.98483  2.26s
    118       0.04265       0.05282      0.80738      0.98483  2.23s
    119       0.04266       0.05280      0.80790      0.98470  2.28s
    120       0.04245       0.05254      0.80796      0.98489  2.22s
    121       0.04224       0.05257      0.80357      0.98496  2.26s
    122       0.04211       0.05234      0.80447      0.98496  2.19s
    123       0.04191       0.05228      0.80168      0.98509  2.26s
    124       0.04188       0.05232      0.80038      0.98470  2.21s
    125       0.04178       0.05224      0.79977      0.98515  2.26s
    126       0.04161       0.05230      0.79555      0.98476  2.22s
    127       0.04146       0.05231      0.79251      0.98470  2.26s
    128       0.04140       0.05241      0.78986      0.98444  2.22s
    129       0.04131       0.05232      0.78946      0.98476  2.26s
    130       0.04123       0.05235      0.78764      0.98457  2.21s
    131       0.04105       0.05215      0.78704      0.98483  2.26s
    132       0.04094       0.05214      0.78522      0.98463  2.21s
    133       0.04077       0.05200      0.78403      0.98483  2.26s
    134       0.04069       0.05203      0.78216      0.98489  2.24s
    135       0.04052       0.05211      0.77762      0.98509  2.28s
    136       0.04049       0.05214      0.77666      0.98496  2.21s
    137       0.04035       0.05182      0.77859      0.98515  2.34s
    138       0.04011       0.05175      0.77512      0.98509  2.21s
    139       0.04008       0.05193      0.77181      0.98496  2.34s
    140       0.03994       0.05186      0.77021      0.98515  2.23s
    141       0.03985       0.05170      0.77081      0.98521  2.26s
    142       0.03968       0.05158      0.76923      0.98528  2.86s
    143       0.03955       0.05162      0.76626      0.98521  3.80s
    144       0.03945       0.05150      0.76603      0.98509  3.74s
    145       0.03931       0.05161      0.76169      0.98528  3.80s
    146       0.03923       0.05160      0.76034      0.98534  3.77s
    147       0.03911       0.05159      0.75798      0.98528  3.80s
    148       0.03902       0.05139      0.75923      0.98521  3.75s
    149       0.03887       0.05145      0.75565      0.98521  3.81s
    150       0.03880       0.05143      0.75446      0.98528  3.56s
    151       0.03872       0.05138      0.75366      0.98515  3.47s
    152       0.03860       0.05147      0.75005      0.98521  3.78s
    153       0.03851       0.05138      0.74950      0.98521  3.74s
    154       0.03840       0.05127      0.74895      0.98534  3.77s
    155       0.03829       0.05121      0.74777      0.98528  3.74s
    156       0.03818       0.05141      0.74273      0.98541  3.78s
    157       0.03812       0.05114      0.74544      0.98541  3.74s
    158       0.03799       0.05126      0.74106      0.98547  3.78s
    159       0.03785       0.05118      0.73946      0.98541  3.59s
    160       0.03772       0.05109      0.73835      0.98534  3.40s
    161       0.03768       0.05104      0.73816      0.98554  3.80s
    162       0.03753       0.05093      0.73685      0.98547  3.76s
    163       0.03746       0.05105      0.73382      0.98547  3.80s
    164       0.03735       0.05097      0.73271      0.98554  3.76s
    165       0.03727       0.05110      0.72937      0.98541  3.81s
    166       0.03713       0.05094      0.72882      0.98547  3.76s
    167       0.03706       0.05098      0.72700      0.98560  3.80s
    168       0.03700       0.05090      0.72676      0.98554  3.59s
    169       0.03687       0.05098      0.72318      0.98554  3.46s
    170       0.03677       0.05102      0.72076      0.98567  3.77s
    171       0.03666       0.05089      0.72037      0.98554  3.75s
    172       0.03660       0.05074      0.72140      0.98554  3.77s
    173       0.03650       0.05091      0.71697      0.98541  3.74s
    174       0.03636       0.05091      0.71428      0.98547  3.77s
    175       0.03630       0.05069      0.71614      0.98560  3.75s
    176       0.03624       0.05067      0.71529      0.98560  3.78s
    177       0.03613       0.05070      0.71259      0.98554  3.59s
    178       0.03604       0.05064      0.71157      0.98580  3.37s
    179       0.03594       0.05056      0.71093      0.98580  3.80s
    180       0.03586       0.05043      0.71109      0.98586  3.75s
    181       0.03583       0.05048      0.70989      0.98567  3.80s
    182       0.03566       0.05040      0.70752      0.98586  3.76s
    183       0.03561       0.05043      0.70601      0.98592  3.80s
    184       0.03551       0.05037      0.70499      0.98592  3.75s
    185       0.03543       0.05041      0.70272      0.98580  3.80s
    186       0.03532       0.05025      0.70291      0.98592  3.60s
    187       0.03527       0.05016      0.70315      0.98599  3.33s
    188       0.03520       0.05024      0.70074      0.98592  3.77s
    189       0.03510       0.05021      0.69910      0.98605  3.75s
    190       0.03503       0.05034      0.69586      0.98592  3.77s
    191       0.03498       0.05031      0.69538      0.98573  3.75s
    192       0.03487       0.05029      0.69343      0.98580  3.74s
    193       0.03480       0.05045      0.68984      0.98580  3.75s
    194       0.03476       0.05046      0.68891      0.98592  3.76s
    195       0.03472       0.05030      0.69034      0.98580  3.62s
    196       0.03459       0.05017      0.68936      0.98586  3.18s
    197       0.03452       0.05007      0.68934      0.98599  3.37s
    198       0.03438       0.05002      0.68734      0.98599  3.16s
    199       0.03433       0.05003      0.68607      0.98599  3.22s
    200       0.03427       0.05011      0.68380      0.98612  3.22s
    201       0.03421       0.05012      0.68246      0.98599  3.25s
    202       0.03405       0.04988      0.68268      0.98618  2.47s
    203       0.03404       0.05010      0.67954      0.98599  2.17s
    204       0.03392       0.05040      0.67305      0.98567  2.12s
    205       0.03386       0.05027      0.67353      0.98586  2.18s
    206       0.03382       0.05019      0.67379      0.98592  2.14s
    207       0.03372       0.05000      0.67445      0.98625  2.19s
    208       0.03361       0.05014      0.67034      0.98605  2.13s
    209       0.03356       0.05000      0.67116      0.98605  2.19s
    210       0.03346       0.05012      0.66757      0.98612  2.13s
    211       0.03337       0.05012      0.66576      0.98612  2.18s
    212       0.03336       0.05019      0.66461      0.98592  2.13s
    213       0.03325       0.05010      0.66371      0.98586  2.18s
    214       0.03321       0.05009      0.66303      0.98599  2.13s
    215       0.03308       0.05011      0.66011      0.98599  2.17s
    216       0.03305       0.05000      0.66090      0.98618  2.13s
    217       0.03301       0.04982      0.66260      0.98618  2.17s
    218       0.03291       0.04985      0.66011      0.98618  2.12s
    219       0.03282       0.04980      0.65902      0.98618  2.16s
    220       0.03264       0.04995      0.65341      0.98625  2.13s
    221       0.03263       0.04973      0.65622      0.98618  2.18s
    222       0.03255       0.04975      0.65433      0.98638  2.13s
    223       0.03245       0.04973      0.65257      0.98618  2.18s
    224       0.03239       0.05022      0.64493      0.98592  2.13s
    225       0.03231       0.04993      0.64718      0.98612  2.17s
    226       0.03232       0.05010      0.64521      0.98599  2.12s
    227       0.03227       0.04991      0.64657      0.98618  2.16s
    228       0.03214       0.05019      0.64036      0.98631  2.13s
    229       0.03206       0.04995      0.64180      0.98605  2.17s
    230       0.03199       0.05030      0.63600      0.98605  2.13s
    231       0.03197       0.04985      0.64133      0.98625  2.17s
    232       0.03192       0.05028      0.63471      0.98612  2.13s
    233       0.03179       0.05038      0.63100      0.98612  2.18s
    234       0.03179       0.04998      0.63600      0.98625  2.13s
    235       0.03169       0.05014      0.63205      0.98618  2.17s
    236       0.03158       0.04975      0.63479      0.98612  2.13s
    237       0.03157       0.04990      0.63275      0.98612  2.18s
    238       0.03149       0.04964      0.63440      0.98631  2.13s
    239       0.03141       0.04956      0.63387      0.98644  2.18s
    240       0.03138       0.04968      0.63160      0.98631  2.13s
    241       0.03124       0.05001      0.62474      0.98599  2.18s
    242       0.03121       0.05016      0.62233      0.98605  2.13s
    243       0.03114       0.04989      0.62431      0.98631  2.19s
    244       0.03107       0.04994      0.62218      0.98631  2.14s
    245       0.03100       0.04983      0.62206      0.98625  2.18s
    246       0.03092       0.04971      0.62205      0.98638  2.14s
    247       0.03089       0.04975      0.62087      0.98631  2.17s
    248       0.03078       0.04956      0.62104      0.98638  2.13s
    249       0.03067       0.04962      0.61810      0.98638  2.17s
    250       0.03065       0.04946      0.61966      0.98625  2.13s
    251       0.03057       0.04950      0.61765      0.98631  2.18s
    252       0.03051       0.04936      0.61813      0.98651  2.13s
    253       0.03044       0.04970      0.61253      0.98638  2.17s
    254       0.03038       0.04957      0.61293      0.98631  2.14s
    255       0.03030       0.04940      0.61330      0.98638  2.18s
    256       0.03030       0.04972      0.60938      0.98631  2.13s
    257       0.03019       0.04975      0.60674      0.98631  2.17s
    258       0.03010       0.04958      0.60715      0.98631  2.13s
    259       0.03006       0.04972      0.60467      0.98631  2.17s
    260       0.02999       0.04964      0.60414      0.98625  2.14s
    261       0.02990       0.04976      0.60089      0.98625  2.17s
    262       0.02987       0.04970      0.60091      0.98631  2.12s
    263       0.02978       0.04978      0.59820      0.98631  2.17s
    264       0.02981       0.04999      0.59644      0.98599  2.14s
    265       0.02963       0.04955      0.59805      0.98618  2.18s
    266       0.02955       0.04962      0.59538      0.98612  2.13s
    267       0.02956       0.04987      0.59285      0.98599  2.18s
    268       0.02947       0.04988      0.59085      0.98618  2.13s
    269       0.02940       0.04969      0.59166      0.98605  2.19s
    270       0.02933       0.04964      0.59078      0.98612  2.14s
    271       0.02926       0.05006      0.58456      0.98605  2.18s
    272       0.02930       0.04975      0.58887      0.98612  2.13s
    273       0.02918       0.04992      0.58465      0.98612  2.18s
    274       0.02913       0.05003      0.58229      0.98605  2.13s
    275       0.02902       0.04983      0.58246      0.98651  2.17s
    276       0.02901       0.04957      0.58508      0.98631  2.14s
    277       0.02888       0.04999      0.57775      0.98612  2.18s
    278       0.02886       0.04969      0.58086      0.98638  2.12s
    279       0.02884       0.04981      0.57899      0.98612  2.17s
    280       0.02871       0.04983      0.57612      0.98612  2.14s
    281       0.02870       0.04993      0.57481      0.98631  2.18s
    282       0.02861       0.04971      0.57553      0.98663  2.14s
    283       0.02858       0.05004      0.57118      0.98618  2.17s
    284       0.02850       0.05000      0.57006      0.98663  2.14s
    285       0.02840       0.04997      0.56832      0.98644  2.16s
    286       0.02832       0.04997      0.56676      0.98663  2.12s
    287       0.02837       0.05023      0.56476      0.98638  2.15s
    288       0.02821       0.05000      0.56410      0.98651  2.13s
    289       0.02822       0.05043      0.55969      0.98625  2.18s
    290       0.02812       0.05008      0.56152      0.98638  2.14s
    291       0.02809       0.05015      0.56007      0.98663  2.19s
    292       0.02805       0.05056      0.55475      0.98638  2.13s
    293       0.02802       0.05003      0.55992      0.98670  2.17s
    294       0.02791       0.05021      0.55591      0.98670  2.13s
    295       0.02786       0.05036      0.55322      0.98651  2.17s
    296       0.02783       0.05026      0.55377      0.98657  2.13s
    297       0.02776       0.05004      0.55479      0.98676  2.18s
    298       0.02774       0.05030      0.55158      0.98683  2.13s
    299       0.02762       0.05031      0.54889      0.98663  2.18s
    300       0.02758       0.05000      0.55173      0.98696  2.13s
    301       0.02758       0.05041      0.54710      0.98689  2.17s
    302       0.02753       0.05022      0.54813      0.98696  2.13s
    303       0.02745       0.05059      0.54265      0.98670  2.18s
    304       0.02741       0.05029      0.54506      0.98696  2.14s
    305       0.02732       0.05021      0.54406      0.98702  2.17s
    306       0.02725       0.05057      0.53878      0.98689  2.13s
    307       0.02720       0.05033      0.54039      0.98689  2.18s
    308       0.02705       0.05036      0.53723      0.98689  2.13s
    309       0.02708       0.05047      0.53659      0.98702  2.17s
    310       0.02702       0.05036      0.53647      0.98689  2.13s
    311       0.02697       0.05057      0.53331      0.98709  2.17s
    312       0.02695       0.05057      0.53284      0.98689  2.14s
    313       0.02687       0.05063      0.53068      0.98689  2.18s
    314       0.02678       0.05065      0.52874      0.98689  2.13s
    315       0.02678       0.05048      0.53047      0.98702  2.17s
    316       0.02671       0.05082      0.52560      0.98689  2.13s
    317       0.02664       0.05054      0.52705      0.98728  2.17s
    318       0.02654       0.05057      0.52491      0.98702  2.13s
    319       0.02652       0.05053      0.52481      0.98722  2.18s
    320       0.02646       0.05085      0.52033      0.98715  2.14s
    321       0.02647       0.05102      0.51887      0.98696  2.18s
    322       0.02635       0.05065      0.52018      0.98722  2.13s
    323       0.02630       0.05088      0.51684      0.98689  2.18s
    324       0.02627       0.05103      0.51485      0.98702  2.14s
    325       0.02623       0.05086      0.51562      0.98702  2.18s
    326       0.02616       0.05072      0.51579      0.98715  2.16s
    327       0.02609       0.05092      0.51237      0.98709  2.21s
    328       0.02613       0.05105      0.51189      0.98702  2.16s
    329       0.02609       0.05110      0.51054      0.98722  2.21s
    330       0.02599       0.05120      0.50767      0.98709  2.17s
    331       0.02590       0.05088      0.50895      0.98722  2.19s
    332       0.02588       0.05103      0.50721      0.98696  2.16s
    333       0.02581       0.05091      0.50694      0.98722  2.21s
    334       0.02577       0.05080      0.50740      0.98722  2.15s
    335       0.02577       0.05121      0.50313      0.98728  2.19s
    336       0.02569       0.05122      0.50159      0.98722  2.16s
    337       0.02567       0.05104      0.50298      0.98722  2.22s
    338       0.02558       0.05105      0.50098      0.98741  2.19s
    339       0.02556       0.05115      0.49977      0.98728  2.21s
    340       0.02551       0.05075      0.50269      0.98722  2.15s
    341       0.02543       0.05122      0.49650      0.98754  2.19s
    342       0.02541       0.05135      0.49488      0.98728  2.17s
    343       0.02536       0.05087      0.49849      0.98728  2.21s
    344       0.02528       0.05099      0.49583      0.98722  2.18s
    345       0.02524       0.05139      0.49122      0.98741  2.22s
    346       0.02522       0.05148      0.48994      0.98722  2.17s
    347       0.02517       0.05138      0.48993      0.98741  2.23s
    348       0.02514       0.05107      0.49228      0.98754  2.18s
    349       0.02505       0.05110      0.49020      0.98747  2.23s
    350       0.02505       0.05128      0.48838      0.98715  2.17s
    351       0.02497       0.05118      0.48777      0.98735  2.18s
    352       0.02492       0.05142      0.48461      0.98715  2.14s
    353       0.02488       0.05113      0.48670      0.98735  2.19s
    354       0.02484       0.05079      0.48906      0.98735  2.15s
    355       0.02480       0.05081      0.48817      0.98741  2.20s
    356       0.02473       0.05095      0.48547      0.98754  2.17s
    357       0.02469       0.05077      0.48633      0.98754  2.21s
    358       0.02466       0.05074      0.48604      0.98735  2.16s
    359       0.02462       0.05119      0.48093      0.98754  2.21s
    360       0.02457       0.05112      0.48071      0.98747  2.18s
    361       0.02454       0.05081      0.48292      0.98741  2.21s
    362       0.02449       0.05101      0.48002      0.98760  2.17s
    363       0.02446       0.05088      0.48073      0.98754  2.22s
    364       0.02439       0.05090      0.47920      0.98754  2.19s
    365       0.02435       0.05092      0.47826      0.98767  2.21s
    366       0.02428       0.05093      0.47668      0.98760  2.17s
    367       0.02419       0.05091      0.47522      0.98760  2.22s
    368       0.02420       0.05080      0.47635      0.98754  2.17s
    369       0.02414       0.05082      0.47495      0.98773  2.21s
    370       0.02412       0.05089      0.47391      0.98760  2.16s
    371       0.02403       0.05084      0.47277      0.98773  2.22s
    372       0.02408       0.05083      0.47368      0.98780  2.17s
    373       0.02398       0.05103      0.47000      0.98780  2.21s
    374       0.02397       0.05083      0.47153      0.98767  2.18s
    375       0.02386       0.05086      0.46920      0.98773  2.24s
    376       0.02385       0.05067      0.47075      0.98780  2.17s
    377       0.02377       0.05050      0.47082      0.98767  2.21s
    378       0.02376       0.05051      0.47043      0.98799  2.15s
    379       0.02370       0.05053      0.46903      0.98793  2.21s
    380       0.02364       0.05064      0.46685      0.98786  2.16s
    381       0.02371       0.05078      0.46701      0.98773  2.17s
    382       0.02361       0.05057      0.46695      0.98799  2.13s
    383       0.02360       0.05059      0.46643      0.98812  2.21s
    384       0.02353       0.05073      0.46377      0.98793  2.16s
    385       0.02347       0.05065      0.46340      0.98793  2.19s
    386       0.02343       0.05053      0.46376      0.98812  2.15s
    387       0.02335       0.05037      0.46363      0.98818  2.19s
    388       0.02333       0.05035      0.46337      0.98786  2.15s
    389       0.02327       0.05090      0.45712      0.98793  2.17s
    390       0.02325       0.05008      0.46436      0.98806  2.14s
    391       0.02317       0.05077      0.45638      0.98799  2.19s
    392       0.02323       0.05025      0.46221      0.98806  2.16s
    393       0.02313       0.05025      0.46024      0.98799  2.20s
    394       0.02312       0.05043      0.45844      0.98786  2.17s
    395       0.02305       0.05032      0.45820      0.98818  2.22s
    396       0.02299       0.05050      0.45533      0.98825  2.17s
    397       0.02293       0.05019      0.45691      0.98838  2.20s
    398       0.02289       0.05009      0.45687      0.98818  2.15s
    399       0.02284       0.05038      0.45345      0.98812  2.17s
    400       0.02279       0.05004      0.45551      0.98838  2.14s
    401       0.02276       0.05039      0.45167      0.98831  2.17s
    402       0.02277       0.05051      0.45075      0.98825  2.15s
    403       0.02279       0.05034      0.45278      0.98806  2.18s
    404       0.02269       0.05040      0.45013      0.98838  2.15s
    405       0.02261       0.05063      0.44647      0.98812  2.18s
    406       0.02260       0.05042      0.44821      0.98812  2.15s
    407       0.02257       0.05037      0.44814      0.98844  2.16s
    408       0.02246       0.05036      0.44606      0.98825  2.17s
    409       0.02248       0.05038      0.44619      0.98818  2.18s
    410       0.02246       0.05040      0.44566      0.98825  2.16s
    411       0.02240       0.05037      0.44478      0.98818  2.19s
    412       0.02240       0.05059      0.44287      0.98825  2.17s
    413       0.02232       0.05066      0.44066      0.98799  2.13s
    414       0.02226       0.05069      0.43912      0.98793  2.11s
    415       0.02227       0.05058      0.44020      0.98793  2.12s
    416       0.02222       0.05036      0.44118      0.98812  2.11s
    417       0.02220       0.05075      0.43749      0.98793  2.15s
    418       0.02214       0.05055      0.43797      0.98793  2.11s
    419       0.02211       0.05061      0.43685      0.98812  2.12s
    420       0.02200       0.05056      0.43508      0.98818  2.11s
    421       0.02193       0.05074      0.43223      0.98780  2.15s
    422       0.02200       0.05069      0.43406      0.98786  2.13s
    423       0.02195       0.05041      0.43540      0.98818  2.17s
    424       0.02178       0.05063      0.43029      0.98786  2.13s
    425       0.02184       0.05060      0.43161      0.98806  2.17s
    426       0.02172       0.05047      0.43043      0.98806  2.13s
    427       0.02178       0.05045      0.43171      0.98806  2.18s
    428       0.02168       0.05086      0.42628      0.98780  2.13s
    429       0.02170       0.05045      0.43026      0.98825  2.17s
    430       0.02164       0.05095      0.42464      0.98786  2.13s
    431       0.02168       0.05055      0.42883      0.98812  2.16s
    432       0.02160       0.05046      0.42801      0.98812  2.13s
    433       0.02150       0.05033      0.42716      0.98818  2.17s
    434       0.02147       0.05025      0.42721      0.98812  2.12s
    435       0.02140       0.05040      0.42460      0.98812  2.17s
    436       0.02136       0.05028      0.42489      0.98831  2.13s
    437       0.02135       0.05075      0.42078      0.98786  2.16s
    438       0.02130       0.05035      0.42304      0.98806  2.12s
    439       0.02131       0.05045      0.42239      0.98812  2.18s
    440       0.02119       0.05047      0.41985      0.98806  2.14s
    441       0.02115       0.05051      0.41883      0.98825  2.16s
    442       0.02106       0.05037      0.41805      0.98838  2.12s
    443       0.02103       0.05030      0.41807      0.98825  2.17s
    444       0.02104       0.05035      0.41791      0.98844  2.13s
    445       0.02105       0.05071      0.41520      0.98812  2.15s
    446       0.02105       0.05044      0.41732      0.98831  2.13s
    447       0.02099       0.05039      0.41659      0.98831  2.18s
    448       0.02087       0.05060      0.41245      0.98806  2.13s
    449       0.02089       0.05048      0.41393      0.98844  2.17s
    450       0.02079       0.05014      0.41462      0.98864  2.13s
    451       0.02075       0.05050      0.41095      0.98838  2.17s
    452       0.02077       0.05009      0.41455      0.98864  2.12s
    453       0.02069       0.05040      0.41048      0.98857  2.16s
    454       0.02068       0.05061      0.40854      0.98844  2.14s
    455       0.02056       0.05044      0.40767      0.98870  2.18s
    456       0.02060       0.05040      0.40868      0.98851  2.13s
    457       0.02048       0.05074      0.40366      0.98818  2.18s
    458       0.02052       0.05037      0.40728      0.98877  2.13s
    459       0.02042       0.05001      0.40826      0.98864  2.18s
    460       0.02035       0.05093      0.39944      0.98825  2.13s
    461       0.02036       0.05055      0.40284      0.98838  2.18s
    462       0.02029       0.05059      0.40104      0.98851  2.13s
    463       0.02027       0.05021      0.40358      0.98864  2.18s
    464       0.02028       0.05050      0.40152      0.98870  2.14s
    465       0.02023       0.05027      0.40251      0.98864  2.16s
    466       0.02015       0.05025      0.40100      0.98889  2.13s
    467       0.02013       0.05088      0.39561      0.98870  2.16s
    468       0.02016       0.05128      0.39316      0.98844  2.13s
    469       0.02009       0.05076      0.39580      0.98883  2.17s
    470       0.01999       0.05078      0.39365      0.98870  2.13s
    471       0.01997       0.05092      0.39210      0.98870  2.16s
    472       0.01998       0.05071      0.39400      0.98864  2.12s
    473       0.01987       0.05077      0.39143      0.98864  2.18s
    474       0.01986       0.05057      0.39274      0.98870  2.12s
    475       0.01982       0.05039      0.39330      0.98889  2.17s
    476       0.01973       0.05095      0.38729      0.98870  2.13s
    477       0.01979       0.05131      0.38571      0.98857  2.17s
    478       0.01976       0.05148      0.38377      0.98844  2.13s
    479       0.01970       0.05111      0.38540      0.98870  2.18s
    480       0.01973       0.05120      0.38539      0.98857  2.14s
    481       0.01969       0.05154      0.38196      0.98851  2.18s
    482       0.01956       0.05115      0.38233      0.98883  2.13s
    483       0.01961       0.05090      0.38535      0.98857  2.17s
    484       0.01958       0.05136      0.38123      0.98877  2.14s
    485       0.01946       0.05108      0.38097      0.98864  2.18s
    486       0.01946       0.05107      0.38106      0.98870  2.14s
    487       0.01940       0.05109      0.37980      0.98870  2.18s
    488       0.01933       0.05118      0.37758      0.98844  2.13s
    489       0.01933       0.05095      0.37932      0.98851  2.17s
    490       0.01933       0.05100      0.37910      0.98844  2.13s
    491       0.01932       0.05126      0.37684      0.98870  2.17s
    492       0.01924       0.05105      0.37684      0.98870  2.13s
    493       0.01918       0.05121      0.37460      0.98870  2.18s
    494       0.01918       0.05115      0.37503      0.98851  2.13s
    495       0.01918       0.05103      0.37596      0.98877  2.17s
    496       0.01909       0.05105      0.37389      0.98870  2.13s
    497       0.01906       0.05131      0.37141      0.98864  2.17s
    498       0.01898       0.05109      0.37151      0.98851  2.13s
    499       0.01898       0.05101      0.37219      0.98864  2.18s
    500       0.01901       0.05087      0.37362      0.98857  2.96s

In [ ]:

Make predictions and evaluate on test data


In [12]:
y_pred = cnn.predict_proba(test_X)

false_positive_rate, true_positive_rate, thresholds = metrics.roc_curve(test_y, y_pred[:,1])
roc_auc = metrics.auc(false_positive_rate, true_positive_rate)

precision, recall, thresholds = metrics.precision_recall_curve(test_y, y_pred[:,1])
average_precision = metrics.average_precision_score(test_y, y_pred[:, 1])

subplot(121)
plt.title('ROC: AUC = %0.2f'% roc_auc)
plt.plot(false_positive_rate, true_positive_rate, 'b')
plt.legend(loc='lower right')
plt.plot([0,1],[0,1],'r--')
plt.ylim([-.05, 1.05])
plt.xlim([-.05, 1.0])
plt.ylabel('True Positive Rate')
plt.xlabel('False Positive Rate')

subplot(122)
plt.plot(recall, precision)
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.ylim([0.0, 1.05])
plt.xlim([0.0, 1.0])
plt.title('Precision-Recall: AP={0:0.2f}'.format(average_precision))
plt.legend(loc="lower left")

plt.gcf().set_size_inches(10,4)

plt.savefig('figs/tuberculosis-patchevaluation.png', bbox_inches='tight')



In [10]:
y_pred = cnn.predict_proba(train_X)

false_positive_rate, true_positive_rate, thresholds = metrics.roc_curve(train_y, y_pred[:,1])
roc_auc = metrics.auc(false_positive_rate, true_positive_rate)

precision, recall, thresholds = metrics.precision_recall_curve(train_y, y_pred[:,1])
average_precision = metrics.average_precision_score(train_y, y_pred[:, 1])

subplot(121)
plt.title('ROC: AUC = %0.2f'% roc_auc)
plt.plot(false_positive_rate, true_positive_rate, 'b')
plt.legend(loc='lower right')
plt.plot([0,1],[0,1],'r--')
plt.ylim([-.05, 1.05])
plt.xlim([-.05, 1.0])
plt.ylabel('True Positive Rate')
plt.xlabel('False Positive Rate')

subplot(122)
plt.plot(recall, precision)
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.ylim([0.0, 1.05])
plt.xlim([0.0, 1.0])
plt.title('Precision-Recall: AP={0:0.2f}'.format(average_precision))
plt.legend(loc="lower left")

plt.gcf().set_size_inches(10,4)

plt.savefig('figs/tuberculosis-patchevaluation.png', bbox_inches='tight')



In [13]:
false_positive_rate, true_positive_rate, thresholds = metrics.roc_curve(test_y, y_pred[:,1])
true_positive_rate.shape, thresholds.shape
plt.plot(true_positive_rate, thresholds,label='True positive rate')
plt.plot(false_positive_rate, thresholds, label='False positive rate')
plt.xlabel('Threshold')
plt.ylim([0,1.01])
plt.legend(loc='upper left')


Out[13]:
<matplotlib.legend.Legend at 0x7f846c4a04d0>

Examine mistakes to understand network performance: false positives.

Find the negative-labelled patches with highest prediction score


In [ ]:
neg_indices = np.where(test_y==0)[0]
neg_scores = y_pred[neg_indices,1]
neg_indices = neg_indices[neg_scores.argsort()]
neg_indices = neg_indices[::-1]

neg_scores = y_pred[neg_indices,1]

N_samples_to_display = 12
offset = 55
for i in range(N_samples_to_display,2*N_samples_to_display):
    plt.subplot(2,N_samples_to_display,i+1)
    example_neg = test_X[neg_indices[i+offset],:,:,:]
    example_neg = np.swapaxes(example_neg,0,2)
    plt.imshow(example_neg)
    plt.title('%.3f' % neg_scores[i+offset])
    plt.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off')    

plt.gcf().set_size_inches(1.5*N_samples_to_display,3) 

plt.savefig('figs/tuberculosis-falsedetections.png', bbox_inches='tight')

See highest-scored test patches


In [ ]:
prob_range = [.95,1.]

tmp_scores = y_pred.copy()[:,1]
tmp_scores[tmp_scores<prob_range[0]] = -1
tmp_scores[tmp_scores>prob_range[1]] = -1

pos_indices = tmp_scores.argsort()
pos_indices = pos_indices[::-1]

N_samples_to_display = 12
offset = 0
for i in range(N_samples_to_display,2*N_samples_to_display):
    plt.subplot(2,N_samples_to_display,i+1)
    example_neg = test_X[pos_indices[i+offset],:,:,:]
    example_neg = np.swapaxes(example_neg,0,2)
    plt.imshow(example_neg)
    plt.title('%.3f' % (tmp_scores[pos_indices[i+offset]]))
    plt.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off')    

plt.gcf().set_size_inches(1.5*N_samples_to_display,3) 

plt.savefig('figs/tuberculosis-detectedpatches.png', bbox_inches='tight')

See lowest scored test patches


In [ ]:
pos_indices = y_pred[:,1].argsort()

N_samples_to_display = 12

for i in range(N_samples_to_display,2*N_samples_to_display):
    plt.subplot(2,N_samples_to_display,i+1)
    example_neg = test_X[pos_indices[i],:,:,:]
    example_neg = np.swapaxes(example_neg,0,2)
    plt.imshow(example_neg)
    plt.title('%.3f' % (y_pred[pos_indices[i],1]))
    plt.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off')    

plt.gcf().set_size_inches(1.5*N_samples_to_display,3) 

plt.savefig('figs/tuberculosis-testpatches-lowprob.png', bbox_inches='tight')

Example of objects detected in an entire image

The white boxes represent annotations in the training data. Red boxes are detections by the convnet.


In [ ]:


In [30]:
reload(det)

fname = testfiles[0]
imfile = opts['img_dir'] + fname
opts['detection_probability_threshold'] = 0.95
found = det.detect(imfile, cnn, opts)

im = misc.imread(imfile)

plt.box(False)
plt.xticks([])
plt.yticks([])

annofile = opts['annotation_dir'] + fname[:-3] + 'xml'
bboxes = readdata.get_bounding_boxes_for_single_image(annofile)
for bb in bboxes:
    bb = bb.astype(int)
    cv2.rectangle(im, (bb[0],bb[2]), (bb[1],bb[3]), (255,255,255), 8)  

for f in found:
    f = f.astype(int)
    cv2.rectangle(im, (f[0],f[1]), (f[2],f[3]), (255,0,0), 8)

plt.gcf().set_size_inches(10,10)
plt.title('Detected objects in %s' % (imfile))
plt.imshow(im)

#cv2.imwrite('detectionimages/detected-' + os.path.basename(imfile),im)


Out[30]:
<matplotlib.image.AxesImage at 0x7f845cbe6410>

Evaluation: compare with classification based on morphological feature extraction


In [ ]:
featureset = [3,7,11,12,15,17]
centiles = [0,25,50,75,100]

pb = ProgressBar(train_X.shape[0])
train_X_f = []
for i in range(train_X.shape[0]):
    if i % 100 == 0:
        pb.step(i)
    graypatch = cv2.cvtColor(np.swapaxes(train_X[i,:,:,:],0,2).astype('uint8'), cv2.COLOR_BGR2GRAY)
    train_X_f.append(shapefeatures.extract(graypatch,attributes=featureset,centiles=centiles, momentfeatures=True))
train_X_f = np.vstack(train_X_f)

test_X_f = []
for i in range(test_X.shape[0]):
    if i % 100 == 0:
        pb.step(i)
    graypatch = cv2.cvtColor(np.swapaxes(test_X[i,:,:,:],0,2).astype('uint8'), cv2.COLOR_BGR2GRAY)
    test_X_f.append(shapefeatures.extract(graypatch,attributes=featureset,centiles=centiles, momentfeatures=True))
test_X_f = np.vstack(test_X_f)

In [ ]:
clf = ensemble.ExtraTreesClassifier(n_estimators=100, max_depth=5, n_jobs=-1)
clf.fit(train_X_f, train_y)
y_pred_CLF = clf.predict_proba(test_X_f)

In [ ]:
false_positive_rate_CNN, true_positive_rate_CNN, thresholds_CNN = metrics.roc_curve(test_y, y_pred[:,1])
roc_auc_CNN = metrics.auc(false_positive_rate_CNN, true_positive_rate_CNN)

precision_CNN, recall_CNN, thresholds_CNN = metrics.precision_recall_curve(test_y, y_pred[:,1])
average_precision_CNN = metrics.average_precision_score(test_y, y_pred[:, 1])

false_positive_rate_CLF, true_positive_rate_CLF, thresholds_CLF = metrics.roc_curve(test_y, y_pred_CLF[:,1])
roc_auc_CLF = metrics.auc(false_positive_rate_CLF, true_positive_rate_CLF)

precision_CLF, recall_CLF, thresholds_CLF = metrics.precision_recall_curve(test_y, y_pred_CLF[:,1])
average_precision_CLF = metrics.average_precision_score(test_y, y_pred_CLF[:, 1])

subplot(211)
plt.title('ROC' )
plt.plot(false_positive_rate_CNN, true_positive_rate_CNN, 'b', label='CNN: AUC=%.2f' % (roc_auc_CNN))
plt.plot(false_positive_rate_CLF, true_positive_rate_CLF, 'k--', label='ERT: AUC=%.2f' % (roc_auc_CLF))
plt.legend(loc='lower right')
plt.ylim([-.05, 1.05])
plt.xlim([-.05, 1.0])
plt.ylabel('True Positive Rate')
plt.xlabel('False Positive Rate')

subplot(212)
plt.plot(recall_CNN, precision_CNN, label='CNN: AP=%.2f' % (average_precision_CNN))
plt.plot(recall_CLF, precision_CLF,'k--', label='ERT: AP=%.2f' % (average_precision_CLF))
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.ylim([0.0, 1.05])
plt.xlim([0.0, 1.0])
plt.title('Precision-Recall')
plt.legend(loc="lower left")

plt.gcf().set_size_inches(4,10)

plt.savefig('figs/tuberculosis-patchevaluation.png', bbox_inches='tight')

In [ ]:
results = {
"false_positive_rate_CNN": false_positive_rate_CNN,
"true_positive_rate_CNN": true_positive_rate_CNN,
"false_positive_rate_CLF": false_positive_rate_CLF,
"true_positive_rate_CLF": true_positive_rate_CLF,
"roc_auc_CNN": roc_auc_CNN,
"roc_auc_CLF": roc_auc_CLF,
"recall_CNN": recall_CNN,
"precision_CNN": precision_CNN,
"average_precision_CNN": average_precision_CNN,
"recall_CLF": recall_CLF,
"precision_CLF": precision_CLF,
"average_precision_CLF": average_precision_CLF,
"opts": opts
}
import pickle
pickle.dump(results,open('tuberculosis-results.pkl','w'))