PyGreentea Network Generator

Load the dependencies


In [1]:
%matplotlib inline

from __future__ import print_function
import h5py
import numpy as np
from numpy import float32, int32, uint8, dtype
import sys
import matplotlib.pyplot as plt


pygt_path = '../PyGreentea'
import sys, os
sys.path.append(os.path.join(os.path.dirname(os.getcwd()), pygt_path))

import math

import PyGreentea as pygt

Load the default network template


In [2]:
netconf = pygt.netgen.NetConf()

Set the memory limits for the GPU


In [3]:
# We use cuDNN, so:
netconf.ignore_conv_buffer = True
# 4 GB total, ignore convolution buffer. Let's keep 0.5 GB for implementation dependent buffers.
netconf.mem_global_limit = 3.5 * 1024 * 1024 * 1024
# 4 GB convolution buffer limit
netconf.mem_buf_limit = 3.5 * 1024 * 1024 * 1024

Explore possible network input/output shapes for the chosen settings


In [4]:
# We test memory usage for training
mode = pygt.netgen.caffe_pb2.TRAIN
# The minimum we're interested in
shape_min = [100,100,100]
# And maximum
shape_max = [200,200,200]
# We want Z and Y to be independent, but X == Y
constraints = [None, lambda x: x[0], lambda x: x[1]]

netconf.u_netconfs[0].unet_conv_down = [[[3]]]
netconf.u_netconfs[0].unet_conv_up = [[[3]]]

# Compute (can be quite intensive)
inshape, outshape, fmaps = pygt.netgen.compute_valid_io_shapes(netconf,mode,shape_min,shape_max,constraints=constraints)


-- Invalid: [100] => []
-- Invalid: [101] => []
++++ Valid: [102] => [58]
-- Invalid: [103] => []
-- Invalid: [104] => []
-- Invalid: [105] => []
-- Invalid: [106] => []
-- Invalid: [107] => []
-- Invalid: [108] => []
-- Invalid: [109] => []
++++ Valid: [110] => [66]
-- Invalid: [111] => []
-- Invalid: [112] => []
-- Invalid: [113] => []
-- Invalid: [114] => []
-- Invalid: [115] => []
-- Invalid: [116] => []
-- Invalid: [117] => []
++++ Valid: [118] => [74]
-- Invalid: [119] => []
-- Invalid: [120] => []
-- Invalid: [121] => []
-- Invalid: [122] => []
-- Invalid: [123] => []
-- Invalid: [124] => []
-- Invalid: [125] => []
++++ Valid: [126] => [82]
-- Invalid: [127] => []
-- Invalid: [128] => []
-- Invalid: [129] => []
-- Invalid: [130] => []
-- Invalid: [131] => []
-- Invalid: [132] => []
-- Invalid: [133] => []
++++ Valid: [134] => [90]
-- Invalid: [135] => []
-- Invalid: [136] => []
-- Invalid: [137] => []
-- Invalid: [138] => []
-- Invalid: [139] => []
-- Invalid: [140] => []
-- Invalid: [141] => []
++++ Valid: [142] => [98]
-- Invalid: [143] => []
-- Invalid: [144] => []
-- Invalid: [145] => []
-- Invalid: [146] => []
-- Invalid: [147] => []
-- Invalid: [148] => []
-- Invalid: [149] => []
++++ Valid: [150] => [106]
-- Invalid: [151] => []
-- Invalid: [152] => []
-- Invalid: [153] => []
-- Invalid: [154] => []
-- Invalid: [155] => []
-- Invalid: [156] => []
-- Invalid: [157] => []
++++ Valid: [158] => [114]
-- Invalid: [159] => []
-- Invalid: [160] => []
-- Invalid: [161] => []
-- Invalid: [162] => []
-- Invalid: [163] => []
-- Invalid: [164] => []
-- Invalid: [165] => []
++++ Valid: [166] => [122]
-- Invalid: [167] => []
-- Invalid: [168] => []
-- Invalid: [169] => []
-- Invalid: [170] => []
-- Invalid: [171] => []
-- Invalid: [172] => []
-- Invalid: [173] => []
++++ Valid: [174] => [130]
-- Invalid: [175] => []
-- Invalid: [176] => []
-- Invalid: [177] => []
-- Invalid: [178] => []
-- Invalid: [179] => []
-- Invalid: [180] => []
-- Invalid: [181] => []
++++ Valid: [182] => [138]
-- Invalid: [183] => []
-- Invalid: [184] => []
-- Invalid: [185] => []
-- Invalid: [186] => []
-- Invalid: [187] => []
-- Invalid: [188] => []
-- Invalid: [189] => []
++++ Valid: [190] => [146]
-- Invalid: [191] => []
-- Invalid: [192] => []
-- Invalid: [193] => []
-- Invalid: [194] => []
-- Invalid: [195] => []
-- Invalid: [196] => []
-- Invalid: [197] => []
++++ Valid: [198] => [154]
-- Invalid: [199] => []
-- Invalid: [200] => []
++++ Valid: [102, 102] => [58, 58]
++++ Valid: [110, 110] => [66, 66]
++++ Valid: [118, 118] => [74, 74]
++++ Valid: [126, 126] => [82, 82]
++++ Valid: [134, 134] => [90, 90]
++++ Valid: [142, 142] => [98, 98]
++++ Valid: [150, 150] => [106, 106]
++++ Valid: [158, 158] => [114, 114]
++++ Valid: [166, 166] => [122, 122]
++++ Valid: [174, 174] => [130, 130]
++++ Valid: [182, 182] => [138, 138]
++++ Valid: [190, 190] => [146, 146]
++++ Valid: [198, 198] => [154, 154]
++++ Valid: [102, 102, 102] => [58, 58, 58]
++++ Valid: [110, 110, 110] => [66, 66, 66]
++++ Valid: [118, 118, 118] => [74, 74, 74]
++++ Valid: [126, 126, 126] => [82, 82, 82]
++++ Valid: [134, 134, 134] => [90, 90, 90]
++++ Valid: [142, 142, 142] => [98, 98, 98]
++++ Valid: [150, 150, 150] => [106, 106, 106]
++++ Valid: [158, 158, 158] => [114, 114, 114]
++++ Valid: [166, 166, 166] => [122, 122, 122]
++++ Valid: [174, 174, 174] => [130, 130, 130]
++++ Valid: [182, 182, 182] => [138, 138, 138]
++++ Valid: [190, 190, 190] => [146, 146, 146]
++++ Valid: [198, 198, 198] => [154, 154, 154]
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
32 in [1, 1]
64 in [1, 1]
128 in [1, 1]
96 in [64, 128]
79 in [64, 95]
71 in [64, 78]
75 in [72, 78]
73 in [72, 74]
74 in [74, 74]
73 in [74, 73]
Current shape: 0, [102, 102, 102], 73
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
32 in [1, 1]
64 in [1, 1]
48 in [32, 64]
56 in [49, 64]
60 in [57, 64]
58 in [57, 59]
57 in [57, 57]
56 in [57, 56]
Current shape: 1, [110, 110, 110], 56
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
32 in [1, 1]
64 in [1, 1]
48 in [32, 64]
39 in [32, 47]
43 in [40, 47]
45 in [44, 47]
44 in [44, 44]
43 in [44, 43]
Current shape: 2, [118, 118, 118], 43
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
32 in [1, 1]
64 in [1, 1]
48 in [32, 64]
39 in [32, 47]
35 in [32, 38]
33 in [32, 34]
34 in [34, 34]
Current shape: 3, [126, 126, 126], 34
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
32 in [1, 1]
24 in [16, 32]
28 in [25, 32]
26 in [25, 27]
27 in [27, 27]
Current shape: 4, [134, 134, 134], 27
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
32 in [1, 1]
24 in [16, 32]
19 in [16, 23]
21 in [20, 23]
22 in [22, 23]
21 in [22, 21]
Current shape: 5, [142, 142, 142], 21
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
32 in [1, 1]
24 in [16, 32]
19 in [16, 23]
17 in [16, 18]
18 in [18, 18]
17 in [18, 17]
Current shape: 6, [150, 150, 150], 17
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
12 in [8, 16]
14 in [13, 16]
15 in [15, 16]
14 in [15, 14]
Current shape: 7, [158, 158, 158], 14
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
12 in [8, 16]
14 in [13, 16]
13 in [13, 13]
12 in [13, 12]
Current shape: 8, [166, 166, 166], 12
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
12 in [8, 16]
9 in [8, 11]
10 in [10, 11]
11 in [11, 11]
10 in [11, 10]
Current shape: 9, [174, 174, 174], 10
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
16 in [1, 1]
12 in [8, 16]
9 in [8, 11]
8 in [8, 8]
Current shape: 10, [182, 182, 182], 8
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
6 in [4, 8]
7 in [7, 8]
8 in [8, 8]
7 in [8, 7]
Current shape: 11, [190, 190, 190], 7
2 in [1, 1]
4 in [1, 1]
8 in [1, 1]
6 in [4, 8]
7 in [7, 8]
6 in [7, 6]
Current shape: 12, [198, 198, 198], 6

Visualization


In [5]:
plt.figure()
# Combined output size versus feature map count
plt.scatter([x[0]*x[1]*x[2] for x in outshape], fmaps, alpha = 0.5)
plt.ylabel('Feature maps')
plt.xlabel('Combined output size')
plt.show()


Pick parameters, actually generate and store the network


In [6]:
netconf.input_shape = inshape[0]
netconf.output_shape = outshape[0]
netconf.fmap_start = fmaps[0]

print ('Input shape: %s' % netconf.input_shape)
print ('Output shape: %s' % netconf.output_shape)
print ('Feature maps: %s' % netconf.fmap_start)

netconf.loss_function = "euclid"
train_net_conf_euclid, test_net_conf = pygt.netgen.create_nets(netconf)
netconf.loss_function = "malis"
train_net_conf_malis, test_net_conf = pygt.netgen.create_nets(netconf)

with open('net_train_euclid.prototxt', 'w') as f:
    print(train_net_conf_euclid, file=f)
with open('net_train_malis.prototxt', 'w') as f:
    print(train_net_conf_malis, file=f)
with open('net_test.prototxt', 'w') as f:
    print(test_net_conf, file=f)


Input shape: [102, 102, 102]
Output shape: [58, 58, 58]
Feature maps: 73
Shape: [0]
f: 1 w: [102, 102, 102] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [1]
f: 73 w: [100, 100, 100] d: [1, 1, 1]
WM: 7884
CM: 114610464
AM: 584000000
Shape: [2]
f: 73 w: [50, 50, 50] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [3]
f: 219 w: [48, 48, 48] d: [1, 1, 1]
WM: 1726596
CM: 985500000
AM: 193757184
Shape: [4]
f: 219 w: [24, 24, 24] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [5]
f: 657 w: [22, 22, 22] d: [1, 1, 1]
WM: 15539364
CM: 326965248
AM: 55965888
Shape: [6]
f: 657 w: [11, 11, 11] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [7]
f: 1971 w: [9, 9, 9] d: [1, 1, 1]
WM: 139854276
CM: 94442436
AM: 11494872
Shape: [8]
f: 1971 w: [18, 18, 18] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [9]
f: 657 w: [18, 18, 18] d: [1, 1, 1]
WM: 5179788
CM: 367835904
AM: 0
Shape: [10]
f: 1314 w: [18, 18, 18] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [11]
f: 657 w: [16, 16, 16] d: [1, 1, 1]
WM: 93236184
CM: 827630784
AM: 21528576
Shape: [12]
f: 657 w: [32, 32, 32] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [13]
f: 219 w: [32, 32, 32] d: [1, 1, 1]
WM: 575532
CM: 688914432
AM: 0
Shape: [14]
f: 438 w: [32, 32, 32] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [15]
f: 219 w: [30, 30, 30] d: [1, 1, 1]
WM: 10359576
CM: 1550057472
AM: 47304000
Shape: [16]
f: 219 w: [60, 60, 60] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [17]
f: 73 w: [60, 60, 60] d: [1, 1, 1]
WM: 63948
CM: 1513728000
AM: 0
Shape: [18]
f: 146 w: [60, 60, 60] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [19]
f: 73 w: [58, 58, 58] d: [1, 1, 1]
WM: 1151064
CM: 3405888000
AM: 113945408
Shape: [20]
f: 3 w: [58, 58, 58] d: [1, 1, 1]
WM: 876
CM: 56972704
AM: 0
Max. memory requirements: 6104205920 B
Weight memory: 267695088 B
Max. conv buffer: 3405888000 B
Shape: [0]
f: 1 w: [102, 102, 102] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [1]
f: 73 w: [100, 100, 100] d: [1, 1, 1]
WM: 7884
CM: 114610464
AM: 584000000
Shape: [2]
f: 73 w: [50, 50, 50] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [3]
f: 219 w: [48, 48, 48] d: [1, 1, 1]
WM: 1726596
CM: 985500000
AM: 193757184
Shape: [4]
f: 219 w: [24, 24, 24] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [5]
f: 657 w: [22, 22, 22] d: [1, 1, 1]
WM: 15539364
CM: 326965248
AM: 55965888
Shape: [6]
f: 657 w: [11, 11, 11] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [7]
f: 1971 w: [9, 9, 9] d: [1, 1, 1]
WM: 139854276
CM: 94442436
AM: 11494872
Shape: [8]
f: 1971 w: [18, 18, 18] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [9]
f: 657 w: [18, 18, 18] d: [1, 1, 1]
WM: 5179788
CM: 367835904
AM: 0
Shape: [10]
f: 1314 w: [18, 18, 18] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [11]
f: 657 w: [16, 16, 16] d: [1, 1, 1]
WM: 93236184
CM: 827630784
AM: 21528576
Shape: [12]
f: 657 w: [32, 32, 32] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [13]
f: 219 w: [32, 32, 32] d: [1, 1, 1]
WM: 575532
CM: 688914432
AM: 0
Shape: [14]
f: 438 w: [32, 32, 32] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [15]
f: 219 w: [30, 30, 30] d: [1, 1, 1]
WM: 10359576
CM: 1550057472
AM: 47304000
Shape: [16]
f: 219 w: [60, 60, 60] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [17]
f: 73 w: [60, 60, 60] d: [1, 1, 1]
WM: 63948
CM: 1513728000
AM: 0
Shape: [18]
f: 146 w: [60, 60, 60] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [19]
f: 73 w: [58, 58, 58] d: [1, 1, 1]
WM: 1151064
CM: 3405888000
AM: 113945408
Shape: [20]
f: 3 w: [58, 58, 58] d: [1, 1, 1]
WM: 876
CM: 56972704
AM: 0
Max. memory requirements: 4888894504 B
Weight memory: 267695088 B
Max. conv buffer: 3405888000 B
Shape: [0]
f: 1 w: [102, 102, 102] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [1]
f: 73 w: [100, 100, 100] d: [1, 1, 1]
WM: 7884
CM: 114610464
AM: 584000000
Shape: [2]
f: 73 w: [50, 50, 50] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [3]
f: 219 w: [48, 48, 48] d: [1, 1, 1]
WM: 1726596
CM: 985500000
AM: 193757184
Shape: [4]
f: 219 w: [24, 24, 24] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [5]
f: 657 w: [22, 22, 22] d: [1, 1, 1]
WM: 15539364
CM: 326965248
AM: 55965888
Shape: [6]
f: 657 w: [11, 11, 11] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [7]
f: 1971 w: [9, 9, 9] d: [1, 1, 1]
WM: 139854276
CM: 94442436
AM: 11494872
Shape: [8]
f: 1971 w: [18, 18, 18] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [9]
f: 657 w: [18, 18, 18] d: [1, 1, 1]
WM: 5179788
CM: 367835904
AM: 0
Shape: [10]
f: 1314 w: [18, 18, 18] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [11]
f: 657 w: [16, 16, 16] d: [1, 1, 1]
WM: 93236184
CM: 827630784
AM: 21528576
Shape: [12]
f: 657 w: [32, 32, 32] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [13]
f: 219 w: [32, 32, 32] d: [1, 1, 1]
WM: 575532
CM: 688914432
AM: 0
Shape: [14]
f: 438 w: [32, 32, 32] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [15]
f: 219 w: [30, 30, 30] d: [1, 1, 1]
WM: 10359576
CM: 1550057472
AM: 47304000
Shape: [16]
f: 219 w: [60, 60, 60] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [17]
f: 73 w: [60, 60, 60] d: [1, 1, 1]
WM: 63948
CM: 1513728000
AM: 0
Shape: [18]
f: 146 w: [60, 60, 60] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [19]
f: 73 w: [58, 58, 58] d: [1, 1, 1]
WM: 1151064
CM: 3405888000
AM: 113945408
Shape: [20]
f: 3 w: [58, 58, 58] d: [1, 1, 1]
WM: 876
CM: 56972704
AM: 0
Max. memory requirements: 6104205920 B
Weight memory: 267695088 B
Max. conv buffer: 3405888000 B
Shape: [0]
f: 1 w: [102, 102, 102] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [1]
f: 73 w: [100, 100, 100] d: [1, 1, 1]
WM: 7884
CM: 114610464
AM: 584000000
Shape: [2]
f: 73 w: [50, 50, 50] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [3]
f: 219 w: [48, 48, 48] d: [1, 1, 1]
WM: 1726596
CM: 985500000
AM: 193757184
Shape: [4]
f: 219 w: [24, 24, 24] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [5]
f: 657 w: [22, 22, 22] d: [1, 1, 1]
WM: 15539364
CM: 326965248
AM: 55965888
Shape: [6]
f: 657 w: [11, 11, 11] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [7]
f: 1971 w: [9, 9, 9] d: [1, 1, 1]
WM: 139854276
CM: 94442436
AM: 11494872
Shape: [8]
f: 1971 w: [18, 18, 18] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [9]
f: 657 w: [18, 18, 18] d: [1, 1, 1]
WM: 5179788
CM: 367835904
AM: 0
Shape: [10]
f: 1314 w: [18, 18, 18] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [11]
f: 657 w: [16, 16, 16] d: [1, 1, 1]
WM: 93236184
CM: 827630784
AM: 21528576
Shape: [12]
f: 657 w: [32, 32, 32] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [13]
f: 219 w: [32, 32, 32] d: [1, 1, 1]
WM: 575532
CM: 688914432
AM: 0
Shape: [14]
f: 438 w: [32, 32, 32] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [15]
f: 219 w: [30, 30, 30] d: [1, 1, 1]
WM: 10359576
CM: 1550057472
AM: 47304000
Shape: [16]
f: 219 w: [60, 60, 60] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [17]
f: 73 w: [60, 60, 60] d: [1, 1, 1]
WM: 63948
CM: 1513728000
AM: 0
Shape: [18]
f: 146 w: [60, 60, 60] d: [1, 1, 1]
WM: 0
CM: 0
AM: 0
Shape: [19]
f: 73 w: [58, 58, 58] d: [1, 1, 1]
WM: 1151064
CM: 3405888000
AM: 113945408
Shape: [20]
f: 3 w: [58, 58, 58] d: [1, 1, 1]
WM: 876
CM: 56972704
AM: 0
Max. memory requirements: 4888894504 B
Weight memory: 267695088 B
Max. conv buffer: 3405888000 B

In [ ]:


In [ ]:


In [ ]:


In [ ]: