In [98]:
import mxnet as mx

In [99]:
inputdata = mx.sym.Variable("input")

In [100]:
def conv_layer(data_in, filter_size, pooling_size, is_conv,is_pooling):
    """
    :return: a single convolution layer symbol
    """
    #input layer
    layer = data_in
    if is_conv:
        input_layer = mx.sym.Convolution(data = layer, 
                                kernel = (filter_size, filter_size), 
                                num_filter = 64,
                                pad = (1, 1),
                                stride = (1, 1)
                                )
    
        #batchnorm
        batch_layer = mx.sym.BatchNorm(input_layer)
    
        #Activation layer
        layer = mx.sym.Activation(batch_layer,
                              act_type='relu',
                              )
    #pooling layer
    if is_pooling: 
        layer = mx.sym.Pooling(layer, 
                               kernel=(pooling_size,pooling_size),
                               pool_type='max'
                                   )
    
    return layer

In [101]:
output = conv_layer(inputdata, 3, 2,True,True)

In [102]:
mx.viz.plot_network(output)


Out[102]:
plot input input convolution41 Convolution 3x3/1x1, 64 convolution41->input batchnorm53 batchnorm53 batchnorm53->convolution41 activation57 Activation relu activation57->batchnorm53 pooling34 Pooling max, 2x2/1 pooling34->activation57

In [103]:
def get_conv_sym(n_layer, inputdata):

    """
    :return: symbol of a convolutional neural network
    """
    data_f = mx.sym.flatten(data=inputdata)
    
    layer = conv_layer(data_f,3,2,True,True)
    for i in range(n_layer - 1):
        layer = conv_layer(layer,3,2,True,True)
    layer = mx.sym.Flatten(layer)
    l = mx.sym.FullyConnected(layer,num_hidden = 10)
    return l

In [104]:
sym = get_conv_sym(5, inputdata)

In [105]:
mx.viz.plot_network(sym)


Out[105]:
plot input input flatten16 flatten16 flatten16->input convolution42 Convolution 3x3/1x1, 64 convolution42->flatten16 batchnorm54 batchnorm54 batchnorm54->convolution42 activation58 Activation relu activation58->batchnorm54 pooling35 Pooling max, 2x2/1 pooling35->activation58 convolution43 Convolution 3x3/1x1, 64 convolution43->pooling35 batchnorm55 batchnorm55 batchnorm55->convolution43 activation59 Activation relu activation59->batchnorm55 pooling36 Pooling max, 2x2/1 pooling36->activation59 convolution44 Convolution 3x3/1x1, 64 convolution44->pooling36 batchnorm56 batchnorm56 batchnorm56->convolution44 activation60 Activation relu activation60->batchnorm56 pooling37 Pooling max, 2x2/1 pooling37->activation60 convolution45 Convolution 3x3/1x1, 64 convolution45->pooling37 batchnorm57 batchnorm57 batchnorm57->convolution45 activation61 Activation relu activation61->batchnorm57 pooling38 Pooling max, 2x2/1 pooling38->activation61 convolution46 Convolution 3x3/1x1, 64 convolution46->pooling38 batchnorm58 batchnorm58 batchnorm58->convolution46 activation62 Activation relu activation62->batchnorm58 pooling39 Pooling max, 2x2/1 pooling39->activation62 flatten17 flatten17 flatten17->pooling39 fullyconnected25 FullyConnected 10 fullyconnected25->flatten17

In [106]:
def inception_layer(inputdata):
    
    l1 = conv_layer(inputdata, 1, 1, True, False)
    l21 = conv_layer(inputdata, 1, 1, True, False)
    l2 = conv_layer(l21, 3, 2, True, False)
    l31 = conv_layer(inputdata, 1, 1, True, False)
    l3 = conv_layer(l31, 5, 2, True, False)
    l41 = conv_layer(inputdata, 1, 3, False, True)
    l4 = conv_layer(l41, 1, 1, True, False)
    l = mx.sym.Concat(l1,l2,l3,l4)
    return l

In [107]:
inception = inception_layer(inputdata)
mx.viz.plot_network(inception)


Out[107]:
plot input input convolution47 Convolution 1x1/1x1, 64 convolution47->input batchnorm59 batchnorm59 batchnorm59->convolution47 activation63 Activation relu activation63->batchnorm59 convolution48 Convolution 1x1/1x1, 64 convolution48->input batchnorm60 batchnorm60 batchnorm60->convolution48 activation64 Activation relu activation64->batchnorm60 convolution49 Convolution 3x3/1x1, 64 convolution49->activation64 batchnorm61 batchnorm61 batchnorm61->convolution49 activation65 Activation relu activation65->batchnorm61 convolution50 Convolution 1x1/1x1, 64 convolution50->input batchnorm62 batchnorm62 batchnorm62->convolution50 activation66 Activation relu activation66->batchnorm62 convolution51 Convolution 5x5/1x1, 64 convolution51->activation66 batchnorm63 batchnorm63 batchnorm63->convolution51 activation67 Activation relu activation67->batchnorm63 pooling40 Pooling max, 3x3/1 pooling40->input convolution52 Convolution 1x1/1x1, 64 convolution52->pooling40 batchnorm64 batchnorm64 batchnorm64->convolution52 activation68 Activation relu activation68->batchnorm64 concat1 concat1 concat1->activation63 concat1->activation65 concat1->activation67 concat1->activation68

In [ ]: