In [1]:
%load_ext autoreload
%autoreload 2
import cPickle as pickle
import os; import sys; sys.path.append('..')
import gp
import gp.nets as nets
from nolearn.lasagne.visualize import plot_loss
from nolearn.lasagne.visualize import plot_conv_weights
from nolearn.lasagne.visualize import plot_conv_activity
from nolearn.lasagne.visualize import plot_occlusion
from matplotlib.pyplot import imshow
import matplotlib.pyplot as plt
%matplotlib inline
Using gpu device 0: GeForce GTX TITAN X (CNMeM is disabled, CuDNN 4007)
/n/home05/haehn/nolearncox/lib/python2.7/site-packages/theano/tensor/signal/downsample.py:6: UserWarning: downsample module has been moved to the theano.tensor.signal.pool module.
"downsample module has been moved to the theano.tensor.signal.pool module.")
In [2]:
PATCH_PATH = ('cylinder2_rgb_small')
In [3]:
X_train, y_train, X_test, y_test = gp.Patch.load_rgb(PATCH_PATH)
Loaded /n/home05/haehn/patches_local//cylinder2_rgb_small/ in 0.00415086746216 seconds.
In [4]:
gp.Util.view_rgba(X_train[100], y_train[100])
In [5]:
cnn = nets.RGBNetPlus()
CNN configuration:
Our CNN with image, prob, merged_array as RGB.
This includes dropout. This also includes more layers.
In [6]:
cnn = cnn.fit(X_train, y_train)
# Neural Network with 170898 learnable parameters
## Layer information
# name size
--- -------- --------
0 input 3x75x75
1 conv1 64x73x73
2 pool1 64x36x36
3 dropout1 64x36x36
4 conv2 48x34x34
5 pool2 48x17x17
6 dropout2 48x17x17
7 conv3 48x15x15
8 pool3 48x7x7
9 dropout3 48x7x7
10 conv4 48x5x5
11 pool4 48x2x2
12 dropout4 48x2x2
13 hidden5 512
14 dropout5 512
15 output 2
epoch trn loss val loss trn/val valid acc dur
------- ---------- ---------- --------- ----------- ------
1 0.35907 0.27164 1.32185 0.90086 16.79s
2 0.23818 0.23006 1.03528 0.91086 16.71s
3 0.21420 0.20216 1.05954 0.92392 16.73s
4 0.20568 0.19130 1.07521 0.92914 16.73s
5 0.19666 0.18994 1.03538 0.92941 16.78s
6 0.18830 0.19670 0.95729 0.92256 16.82s
7 0.17907 0.17843 1.00359 0.93733 16.82s
8 0.17531 0.18581 0.94351 0.93031 16.83s
9 0.16684 0.15658 1.06552 0.94192 16.84s
10 0.16091 0.15927 1.01031 0.94003 16.84s
11 0.15783 0.15120 1.04388 0.94507 16.89s
12 0.14923 0.14641 1.01930 0.94769 16.91s
13 0.14590 0.13803 1.05702 0.95093 16.91s
14 0.14074 0.13239 1.06307 0.95210 16.90s
15 0.13380 0.13970 0.95776 0.94841 16.91s
16 0.13282 0.12554 1.05804 0.95732 16.91s
17 0.12575 0.12066 1.04216 0.95489 16.90s
18 0.12554 0.12431 1.00990 0.95534 16.92s
19 0.11948 0.11605 1.02956 0.96092 16.91s
20 0.11644 0.11147 1.04460 0.96380 16.92s
21 0.11208 0.11267 0.99475 0.96038 16.91s
22 0.10789 0.11551 0.93405 0.95795 16.90s
23 0.10817 0.11000 0.98344 0.96236 16.90s
24 0.10499 0.09585 1.09531 0.96759 16.90s
25 0.10411 0.09017 1.15457 0.96957 16.90s
26 0.09723 0.09729 0.99935 0.96677 16.90s
27 0.09703 0.09926 0.97756 0.96596 16.89s
28 0.09322 0.09865 0.94495 0.96623 16.91s
29 0.09483 0.09618 0.98595 0.96804 16.89s
30 0.08995 0.08449 1.06465 0.97335 16.90s
31 0.08950 0.08385 1.06730 0.97416 16.88s
32 0.08604 0.08195 1.04996 0.97335 16.89s
33 0.08268 0.08217 1.00623 0.97353 16.88s
34 0.08470 0.07941 1.06659 0.97389 16.89s
35 0.08425 0.07910 1.06510 0.97479 16.88s
36 0.07652 0.07337 1.04289 0.97650 16.89s
37 0.07779 0.07073 1.09981 0.97893 16.89s
38 0.07671 0.07825 0.98032 0.97659 16.90s
39 0.07470 0.07190 1.03893 0.97776 16.90s
40 0.07096 0.06797 1.04396 0.97938 16.91s
41 0.07152 0.06560 1.09026 0.97992 16.90s
42 0.06900 0.06714 1.02780 0.97983 16.91s
43 0.07036 0.06532 1.07709 0.98100 16.91s
44 0.06866 0.06749 1.01728 0.97911 16.90s
45 0.06758 0.06636 1.01833 0.97866 16.90s
46 0.06621 0.06277 1.05472 0.98019 16.91s
47 0.06748 0.07434 0.90761 0.97488 16.90s
48 0.06196 0.05428 1.14140 0.98415 16.90s
49 0.06563 0.06494 1.01053 0.98019 16.89s
50 0.06613 0.05811 1.13793 0.98379 16.90s
51 0.06085 0.06448 0.94372 0.97848 16.90s
52 0.06018 0.06325 0.95152 0.97938 16.89s
53 0.06030 0.05190 1.16175 0.98415 16.90s
54 0.05897 0.06341 0.92999 0.98100 16.91s
55 0.05858 0.06215 0.94258 0.98064 16.89s
56 0.05563 0.05647 0.98510 0.98361 16.90s
57 0.05802 0.06159 0.94192 0.98127 16.91s
58 0.05528 0.05557 0.99468 0.98361 16.91s
59 0.05775 0.05529 1.04452 0.98235 16.90s
60 0.05889 0.05524 1.06609 0.98361 16.90s
61 0.05263 0.04933 1.06701 0.98586 16.90s
62 0.05416 0.05686 0.95255 0.98325 16.91s
63 0.05362 0.05606 0.95658 0.98388 16.90s
64 0.05079 0.06002 0.84629 0.98190 16.90s
65 0.05269 0.04889 1.07776 0.98496 16.90s
66 0.05372 0.05547 0.96851 0.98226 16.90s
67 0.04968 0.05179 0.95932 0.98424 16.90s
68 0.04813 0.05547 0.86768 0.98379 16.90s
69 0.05022 0.05019 1.00065 0.98532 16.90s
70 0.04942 0.05300 0.93239 0.98469 16.90s
71 0.04943 0.05424 0.91141 0.98262 16.91s
72 0.04788 0.05358 0.89355 0.98505 16.91s
73 0.04825 0.05537 0.87139 0.98235 16.91s
74 0.04456 0.04543 0.98075 0.98784 16.91s
75 0.04877 0.05316 0.91743 0.98568 16.92s
76 0.04439 0.04577 0.96978 0.98721 16.92s
77 0.04745 0.04573 1.03757 0.98730 16.91s
78 0.04775 0.05274 0.90533 0.98469 16.91s
79 0.04481 0.06363 0.70432 0.98118 16.90s
80 0.04604 0.05342 0.86181 0.98577 16.91s
81 0.04585 0.05229 0.87672 0.98550 16.91s
82 0.04812 0.05190 0.92728 0.98550 16.90s
83 0.04559 0.05015 0.90919 0.98640 16.91s
84 0.04470 0.04700 0.95113 0.98685 16.91s
85 0.04039 0.04882 0.82733 0.98550 16.91s
86 0.04333 0.04883 0.88742 0.98442 16.91s
87 0.04432 0.04694 0.94427 0.98811 16.91s
88 0.04144 0.05768 0.71849 0.98478 16.91s
89 0.04185 0.05600 0.74735 0.98442 16.91s
90 0.03998 0.04491 0.89005 0.98811 16.91s
91 0.03874 0.05094 0.76052 0.98676 16.91s
92 0.04023 0.04240 0.94880 0.98829 16.90s
93 0.04279 0.05177 0.82667 0.98658 16.92s
94 0.03938 0.05313 0.74109 0.98568 16.92s
95 0.04132 0.05807 0.71164 0.98379 16.90s
96 0.03899 0.04701 0.82943 0.98685 16.92s
97 0.04133 0.05198 0.79498 0.98559 16.91s
98 0.03998 0.04789 0.83467 0.98910 16.90s
99 0.04122 0.05087 0.81029 0.98613 16.92s
100 0.03683 0.05104 0.72154 0.98640 16.91s
101 0.04023 0.04340 0.92700 0.98802 16.91s
102 0.03871 0.05131 0.75453 0.98748 16.91s
103 0.03700 0.05438 0.68036 0.98622 16.91s
104 0.03949 0.05719 0.69041 0.98541 16.91s
105 0.03738 0.04718 0.79236 0.98793 16.91s
106 0.03485 0.05103 0.68282 0.98739 16.91s
107 0.03740 0.05788 0.64616 0.98532 16.91s
108 0.03738 0.05382 0.69450 0.98703 16.91s
109 0.03452 0.05019 0.68774 0.98874 16.90s
110 0.03600 0.06529 0.55144 0.98460 16.90s
111 0.03540 0.05446 0.65009 0.98739 16.89s
112 0.03773 0.05332 0.70750 0.98658 16.90s
113 0.03422 0.06353 0.53867 0.98487 16.90s
114 0.03604 0.05904 0.61034 0.98631 16.90s
115 0.03713 0.04555 0.81507 0.98865 16.90s
116 0.03682 0.05547 0.66367 0.98658 16.90s
117 0.03295 0.05391 0.61123 0.98739 16.90s
118 0.03564 0.05779 0.61674 0.98586 16.90s
119 0.03534 0.04903 0.72077 0.98920 16.90s
120 0.03662 0.05949 0.61564 0.98523 16.91s
121 0.03579 0.04620 0.77468 0.98658 16.89s
122 0.03687 0.04388 0.84034 0.98757 16.90s
123 0.03199 0.04083 0.78334 0.98956 16.90s
124 0.03429 0.05442 0.63001 0.98559 16.91s
125 0.03533 0.03956 0.89303 0.98965 16.91s
126 0.03236 0.05060 0.63959 0.98775 16.89s
127 0.03306 0.04551 0.72644 0.98920 16.89s
128 0.03414 0.05368 0.63596 0.98721 16.90s
129 0.03170 0.06362 0.49835 0.98505 16.90s
130 0.03342 0.04983 0.67061 0.98739 16.90s
131 0.03471 0.05127 0.67700 0.98748 16.90s
132 0.03280 0.05516 0.59470 0.98595 16.90s
133 0.03269 0.05760 0.56751 0.98532 16.89s
134 0.03706 0.04881 0.75916 0.98793 16.89s
135 0.03327 0.04655 0.71459 0.98929 16.90s
136 0.03009 0.04519 0.66575 0.98947 16.90s
137 0.02972 0.05638 0.52720 0.98811 16.90s
138 0.03136 0.04839 0.64804 0.98883 16.90s
139 0.03246 0.05271 0.61578 0.98784 16.91s
140 0.03421 0.04283 0.79868 0.98983 16.90s
141 0.02843 0.04799 0.59243 0.98938 16.92s
142 0.03330 0.05237 0.63592 0.98901 16.89s
143 0.03421 0.05659 0.60448 0.98667 16.89s
144 0.03106 0.04603 0.67477 0.98739 16.90s
145 0.03035 0.03719 0.81604 0.99109 16.90s
146 0.03039 0.04672 0.65050 0.98766 16.91s
147 0.02885 0.04781 0.60343 0.98865 16.92s
148 0.03009 0.05008 0.60071 0.98730 16.90s
149 0.03039 0.04181 0.72694 0.98983 16.90s
150 0.03004 0.04937 0.60851 0.98793 16.90s
151 0.03086 0.05139 0.60059 0.98793 16.89s
152 0.03115 0.05099 0.61095 0.98865 16.90s
153 0.02948 0.05301 0.55618 0.98865 16.91s
154 0.02900 0.04592 0.63162 0.98983 16.90s
155 0.02907 0.04346 0.66877 0.99073 16.90s
156 0.02982 0.04861 0.61351 0.98883 16.90s
157 0.02993 0.05154 0.58069 0.98739 16.89s
158 0.03041 0.05694 0.53413 0.98694 16.90s
159 0.03189 0.06130 0.52029 0.98541 16.90s
160 0.02906 0.06067 0.47905 0.98622 16.89s
161 0.03232 0.05348 0.60429 0.98947 16.90s
162 0.02920 0.05066 0.57638 0.98929 16.90s
163 0.02924 0.05857 0.49926 0.98784 16.90s
164 0.02617 0.05508 0.47505 0.98703 16.90s
165 0.03115 0.04722 0.65972 0.98920 16.90s
166 0.02807 0.04529 0.61987 0.98901 16.90s
167 0.02908 0.04506 0.64547 0.98874 16.90s
168 0.02948 0.04968 0.59338 0.98766 16.89s
169 0.02823 0.04722 0.59782 0.98847 16.92s
170 0.03042 0.05159 0.58964 0.98775 16.90s
171 0.02894 0.04226 0.68483 0.98956 16.90s
172 0.02828 0.04817 0.58708 0.98766 16.90s
173 0.02942 0.05413 0.54349 0.98721 16.89s
174 0.02551 0.05017 0.50851 0.98901 16.89s
175 0.03126 0.04555 0.68635 0.99001 16.89s
176 0.02767 0.05217 0.53039 0.98956 16.90s
177 0.02707 0.04628 0.58499 0.98965 16.90s
178 0.02508 0.04235 0.59229 0.99109 16.90s
179 0.03145 0.04671 0.67320 0.98983 16.88s
180 0.02544 0.06000 0.42406 0.98820 16.89s
181 0.02719 0.06617 0.41093 0.98415 16.89s
182 0.02708 0.05175 0.52326 0.98721 16.90s
183 0.02615 0.04828 0.54165 0.98856 16.89s
184 0.02648 0.04425 0.59834 0.99001 16.89s
185 0.02902 0.05346 0.54288 0.98757 16.89s
186 0.02859 0.05057 0.56545 0.98838 16.90s
187 0.02733 0.05420 0.50415 0.98865 16.88s
188 0.02620 0.05425 0.48297 0.98811 16.89s
189 0.02502 0.04190 0.59708 0.99064 16.89s
190 0.02858 0.04849 0.58944 0.98947 16.89s
191 0.02982 0.04680 0.63709 0.98856 16.89s
192 0.02554 0.04829 0.52893 0.98901 16.89s
193 0.02522 0.04863 0.51856 0.98947 16.88s
194 0.02569 0.04843 0.53038 0.98775 16.91s
195 0.02640 0.04595 0.57451 0.98910 16.88s
Early stopping.
Best valid loss was 0.037194 at epoch 145.
Loaded parameters to layer 'conv1' (shape 64x3x3x3).
Loaded parameters to layer 'conv1' (shape 64).
Loaded parameters to layer 'conv2' (shape 48x64x3x3).
Loaded parameters to layer 'conv2' (shape 48).
Loaded parameters to layer 'conv3' (shape 48x48x3x3).
Loaded parameters to layer 'conv3' (shape 48).
Loaded parameters to layer 'conv4' (shape 48x48x3x3).
Loaded parameters to layer 'conv4' (shape 48).
Loaded parameters to layer 'hidden5' (shape 192x512).
Loaded parameters to layer 'hidden5' (shape 512).
Loaded parameters to layer 'output' (shape 512x2).
Loaded parameters to layer 'output' (shape 2).
In [7]:
test_accuracy = cnn.score(X_test, y_test)
In [8]:
test_accuracy
Out[8]:
0.9195725534308211
In [8]:
plot_loss(cnn)
Out[8]:
<module 'matplotlib.pyplot' from '/n/home05/haehn/nolearncox/lib/python2.7/site-packages/matplotlib-1.5.2-py2.7-linux-x86_64.egg/matplotlib/pyplot.pyc'>
In [9]:
# store CNN
sys.setrecursionlimit(1000000000)
with open(os.path.expanduser('~/Projects/gp/nets/RGBPlus.p'), 'wb') as f:
pickle.dump(cnn, f, -1)
In [ ]:
Content source: VCG/gp
Similar notebooks: