In [11]:
%load_ext autoreload
%autoreload 2

import os; import sys; sys.path.append('..')
import gp
import gp.nets as nets

from nolearn.lasagne.visualize import plot_loss
from nolearn.lasagne.visualize import plot_conv_weights
from nolearn.lasagne.visualize import plot_conv_activity
from nolearn.lasagne.visualize import plot_occlusion

from matplotlib.pyplot import imshow
import matplotlib.pyplot as plt
%matplotlib inline


The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload

In [2]:
PATCH_PATH = ('cylinder2_rgb_small')

In [3]:
X_train, y_train, X_test, y_test = gp.Patch.load_rgb(PATCH_PATH)


Loaded /n/home05/haehn/patches_local//cylinder2_rgb_small/ in 0.363067865372 seconds.

In [4]:
gp.Util.view_rgba(X_train[100], y_train[100])


---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
<ipython-input-4-70e621d6e8ce> in <module>()
----> 1 gp.Util.view_rgba(X_train[100], y_train[100])

/n/home05/haehn/Projects/gp/gp/util.py in view_rgba(patch, text)
    144     prob = patch[1]
    145     binary = patch[2]
--> 146     border = patch[3]
    147 
    148     ax1.axis('off')

IndexError: index 3 is out of bounds for axis 0 with size 3

In [5]:
cnn = nets.RGBNet()


CNN configuration: 
    Our CNN with image, prob, merged_array as RGB.

    This includes dropout.
    

In [6]:
cnn = cnn.fit(X_train, y_train)


# Neural Network with 7133490 learnable parameters

## Layer information

  #  name      size
---  --------  --------
  0  input     3x75x75
  1  conv1     64x73x73
  2  pool1     64x36x36
  3  dropout1  64x36x36
  4  conv2     48x34x34
  5  pool2     48x17x17
  6  dropout2  48x17x17
  7  hidden3   512
  8  dropout3  512
  9  output    2

  epoch    trn loss    val loss    trn/val    valid acc  dur
-------  ----------  ----------  ---------  -----------  ------
      1     0.29937     0.21259    1.40821      0.92022  15.90s
      2     0.20038     0.17638    1.13608      0.93562  15.75s
      3     0.15791     0.14031    1.12545      0.94796  15.76s
      4     0.12245     0.10854    1.12817      0.96173  15.76s
      5     0.09646     0.08579    1.12430      0.97200  15.79s
      6     0.07258     0.07912    0.91729      0.97452  15.86s
      7     0.05239     0.06379    0.82127      0.98145  15.86s
      8     0.04547     0.05172    0.87917      0.98559  15.86s
      9     0.03323     0.04989    0.66613      0.98514  15.86s
     10     0.02975     0.04372    0.68057      0.98938  15.87s
     11     0.02608     0.04134    0.63080      0.99028  15.90s
     12     0.01858     0.05568    0.33368      0.98847  15.87s
     13     0.01710     0.04162    0.41092      0.99208  15.87s
     14     0.01654     0.04136    0.40003      0.99163  15.93s
     15     0.01311     0.03858    0.33990      0.99352  15.94s
     16     0.01402     0.05295    0.26477      0.99001  15.89s
     17     0.01176     0.03891    0.30218      0.99316  15.91s
     18     0.01012     0.05242    0.19314      0.99064  15.87s
     19     0.01094     0.03951    0.27677      0.99370  15.88s
     20     0.00863     0.04539    0.19019      0.99235  15.86s
     21     0.01090     0.05353    0.20355      0.99100  15.90s
     22     0.00819     0.05317    0.15398      0.99001  15.86s
     23     0.00853     0.05060    0.16862      0.99208  15.89s
     24     0.00588     0.05640    0.10427      0.99253  15.88s
     25     0.00569     0.05531    0.10293      0.99154  15.89s
     26     0.00587     0.07140    0.08228      0.99073  15.86s
     27     0.00709     0.05651    0.12555      0.99181  15.89s
     28     0.00596     0.05082    0.11728      0.99037  15.89s
     29     0.00612     0.05019    0.12183      0.99244  15.86s
     30     0.00484     0.05454    0.08867      0.99136  15.89s
     31     0.00555     0.05473    0.10137      0.99118  15.86s
     32     0.00520     0.05487    0.09480      0.99262  15.90s
     33     0.00399     0.04745    0.08399      0.99226  15.86s
     34     0.00400     0.06617    0.06038      0.99028  15.91s
     35     0.00372     0.05087    0.07312      0.99262  15.87s
     36     0.00455     0.06931    0.06566      0.99046  15.87s
     37     0.00358     0.04773    0.07495      0.99280  15.86s
     38     0.00394     0.06474    0.06090      0.99091  16.10s
     39     0.00468     0.06436    0.07264      0.99082  15.86s
     40     0.00335     0.05129    0.06539      0.99244  16.08s
     41     0.00325     0.06404    0.05073      0.99118  15.88s
     42     0.00550     0.07417    0.07413      0.98938  16.05s
     43     0.00394     0.05609    0.07018      0.99262  15.91s
     44     0.00228     0.06334    0.03603      0.99028  16.05s
     45     0.00236     0.05546    0.04248      0.99316  15.92s
     46     0.00357     0.06844    0.05210      0.99064  16.00s
     47     0.00430     0.07451    0.05777      0.99028  15.97s
     48     0.00273     0.06735    0.04056      0.99172  15.92s
     49     0.00269     0.06786    0.03960      0.99046  16.05s
     50     0.00364     0.07324    0.04969      0.99082  15.88s
     51     0.00297     0.06718    0.04423      0.99190  16.09s
     52     0.00345     0.08094    0.04263      0.99100  15.88s
     53     0.00281     0.06642    0.04232      0.99244  16.10s
     54     0.00344     0.06943    0.04951      0.99082  15.87s
     55     0.00316     0.07488    0.04222      0.99172  16.10s
     56     0.00214     0.07204    0.02977      0.99172  15.87s
     57     0.00160     0.06594    0.02433      0.99226  16.10s
     58     0.00138     0.07606    0.01816      0.99154  15.87s
     59     0.00266     0.04879    0.05458      0.99388  16.10s
     60     0.00244     0.07398    0.03304      0.99082  15.86s
     61     0.00167     0.06409    0.02609      0.99280  16.10s
     62     0.00312     0.05823    0.05356      0.99388  15.88s
     63     0.00262     0.07875    0.03330      0.98901  16.10s
     64     0.00183     0.06961    0.02628      0.99208  15.87s
     65     0.00149     0.06934    0.02144      0.99208  16.08s
Early stopping.
Best valid loss was 0.038577 at epoch 15.
Loaded parameters to layer 'conv1' (shape 64x3x3x3).
Loaded parameters to layer 'conv1' (shape 64).
Loaded parameters to layer 'conv2' (shape 48x64x3x3).
Loaded parameters to layer 'conv2' (shape 48).
Loaded parameters to layer 'hidden3' (shape 13872x512).
Loaded parameters to layer 'hidden3' (shape 512).
Loaded parameters to layer 'output' (shape 512x2).
Loaded parameters to layer 'output' (shape 2).

In [7]:
test_accuracy = cnn.score(X_test, y_test)

In [8]:
test_accuracy


Out[8]:
0.905511811023622

In [12]:
plot_loss(cnn)


Out[12]:
<module 'matplotlib.pyplot' from '/n/home05/haehn/nolearncox/lib/python2.7/site-packages/matplotlib-1.5.2-py2.7-linux-x86_64.egg/matplotlib/pyplot.pyc'>

In [ ]: