In [6]:
KERAS_MODEL_FILEPATH = '../../demos/data/densenet121/densenet121.h5'

In [2]:
from keras.models import Model
from keras.layers.core import Dense, Dropout, Activation, Reshape
from keras.layers.convolutional import Conv2D, Conv2DTranspose, UpSampling2D
from keras.layers.pooling import AveragePooling2D, MaxPooling2D
from keras.layers.pooling import GlobalAveragePooling2D
from keras.layers import Input
from keras.layers.merge import concatenate
from keras.layers.normalization import BatchNormalization
from keras.regularizers import l2
from keras.utils.layer_utils import convert_all_kernels_in_model, convert_dense_weights_data_format
from keras.utils.data_utils import get_file
from keras.engine.topology import get_source_inputs
from keras.applications.imagenet_utils import _obtain_input_shape
from keras.applications.imagenet_utils import decode_predictions
import keras.backend as K

DENSENET_121_WEIGHTS_PATH = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-121-32.h5'
DENSENET_161_WEIGHTS_PATH = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-161-48.h5'
DENSENET_169_WEIGHTS_PATH = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-169-32.h5'
DENSENET_121_WEIGHTS_PATH_NO_TOP = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-121-32-no-top.h5'
DENSENET_161_WEIGHTS_PATH_NO_TOP = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-161-48-no-top.h5'
DENSENET_169_WEIGHTS_PATH_NO_TOP = r'https://github.com/titu1994/DenseNet/releases/download/v3.0/DenseNet-BC-169-32-no-top.h5'

def preprocess_input(x, data_format=None):
    """Preprocesses a tensor encoding a batch of images.

    # Arguments
        x: input Numpy tensor, 4D.
        data_format: data format of the image tensor.

    # Returns
        Preprocessed tensor.
    """
    if data_format is None:
        data_format = K.image_data_format()
    assert data_format in {'channels_last', 'channels_first'}

    if data_format == 'channels_first':
        if x.ndim == 3:
            # 'RGB'->'BGR'
            x = x[::-1, ...]
            # Zero-center by mean pixel
            x[0, :, :] -= 103.939
            x[1, :, :] -= 116.779
            x[2, :, :] -= 123.68
        else:
            x = x[:, ::-1, ...]
            x[:, 0, :, :] -= 103.939
            x[:, 1, :, :] -= 116.779
            x[:, 2, :, :] -= 123.68
    else:
        # 'RGB'->'BGR'
        x = x[..., ::-1]
        # Zero-center by mean pixel
        x[..., 0] -= 103.939
        x[..., 1] -= 116.779
        x[..., 2] -= 123.68

    x *= 0.017 # scale values

    return x


def DenseNet(input_shape=None, depth=40, nb_dense_block=3, growth_rate=12, nb_filter=-1, nb_layers_per_block=-1,
             bottleneck=False, reduction=0.0, dropout_rate=0.0, weight_decay=1e-4, subsample_initial_block=False,
             include_top=True, weights=None, input_tensor=None,
             classes=10, activation='softmax'):
    '''Instantiate the DenseNet architecture,
        optionally loading weights pre-trained
        on CIFAR-10. Note that when using TensorFlow,
        for best performance you should set
        `image_data_format='channels_last'` in your Keras config
        at ~/.keras/keras.json.
        The model and the weights are compatible with both
        TensorFlow and Theano. The dimension ordering
        convention used by the model is the one
        specified in your Keras config file.
        # Arguments
            input_shape: optional shape tuple, only to be specified
                if `include_top` is False (otherwise the input shape
                has to be `(32, 32, 3)` (with `channels_last` dim ordering)
                or `(3, 32, 32)` (with `channels_first` dim ordering).
                It should have exactly 3 inputs channels,
                and width and height should be no smaller than 8.
                E.g. `(200, 200, 3)` would be one valid value.
            depth: number or layers in the DenseNet
            nb_dense_block: number of dense blocks to add to end (generally = 3)
            growth_rate: number of filters to add per dense block
            nb_filter: initial number of filters. -1 indicates initial
                number of filters is 2 * growth_rate
            nb_layers_per_block: number of layers in each dense block.
                Can be a -1, positive integer or a list.
                If -1, calculates nb_layer_per_block from the network depth.
                If positive integer, a set number of layers per dense block.
                If list, nb_layer is used as provided. Note that list size must
                be (nb_dense_block + 1)
            bottleneck: flag to add bottleneck blocks in between dense blocks
            reduction: reduction factor of transition blocks.
                Note : reduction value is inverted to compute compression.
            dropout_rate: dropout rate
            weight_decay: weight decay rate
            subsample_initial_block: Set to True to subsample the initial convolution and
                add a MaxPool2D before the dense blocks are added.
            include_top: whether to include the fully-connected
                layer at the top of the network.
            weights: one of `None` (random initialization) or
                'imagenet' (pre-training on ImageNet)..
            input_tensor: optional Keras tensor (i.e. output of `layers.Input()`)
                to use as image input for the model.
            classes: optional number of classes to classify images
                into, only to be specified if `include_top` is True, and
                if no `weights` argument is specified.
            activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'.
                Note that if sigmoid is used, classes must be 1.
        # Returns
            A Keras model instance.
        '''

    if weights not in {'imagenet', None}:
        raise ValueError('The `weights` argument should be either '
                         '`None` (random initialization) or `cifar10` '
                         '(pre-training on CIFAR-10).')

    if weights == 'imagenet' and include_top and classes != 1000:
        raise ValueError('If using `weights` as ImageNet with `include_top`'
                         ' as true, `classes` should be 1000')

    if activation not in ['softmax', 'sigmoid']:
        raise ValueError('activation must be one of "softmax" or "sigmoid"')

    if activation == 'sigmoid' and classes != 1:
        raise ValueError('sigmoid activation can only be used when classes = 1')

    # Determine proper input shape
    input_shape = _obtain_input_shape(input_shape,
                                      default_size=32,
                                      min_size=8,
                                      data_format=K.image_data_format(),
                                      require_flatten=include_top)

    if input_tensor is None:
        img_input = Input(shape=input_shape)
    else:
        if not K.is_keras_tensor(input_tensor):
            img_input = Input(tensor=input_tensor, shape=input_shape)
        else:
            img_input = input_tensor

    x = __create_dense_net(classes, img_input, include_top, depth, nb_dense_block,
                           growth_rate, nb_filter, nb_layers_per_block, bottleneck, reduction,
                           dropout_rate, weight_decay, subsample_initial_block, activation)

    # Ensure that the model takes into account
    # any potential predecessors of `input_tensor`.
    if input_tensor is not None:
        inputs = get_source_inputs(input_tensor)
    else:
        inputs = img_input
    # Create model.
    model = Model(inputs, x, name='densenet')

    # load weights
    if weights == 'imagenet':
        weights_loaded = False

        if (depth == 121) and (nb_dense_block == 4) and (growth_rate == 32) and (nb_filter == 64) and \
                (bottleneck is True) and (reduction == 0.5) and (dropout_rate == 0.0) and (subsample_initial_block):
            if include_top:
                weights_path = get_file('DenseNet-BC-121-32.h5',
                                        DENSENET_121_WEIGHTS_PATH,
                                        cache_subdir='models',
                                        md5_hash='a439dd41aa672aef6daba4ee1fd54abd')
            else:
                weights_path = get_file('DenseNet-BC-121-32-no-top.h5',
                                        DENSENET_121_WEIGHTS_PATH_NO_TOP,
                                        cache_subdir='models',
                                        md5_hash='55e62a6358af8a0af0eedf399b5aea99')
            model.load_weights(weights_path)
            weights_loaded = True

        if (depth == 161) and (nb_dense_block == 4) and (growth_rate == 48) and (nb_filter == 96) and \
                (bottleneck is True) and (reduction == 0.5) and (dropout_rate == 0.0) and (subsample_initial_block):
            if include_top:
                weights_path = get_file('DenseNet-BC-161-48.h5',
                                        DENSENET_161_WEIGHTS_PATH,
                                        cache_subdir='models',
                                        md5_hash='6c326cf4fbdb57d31eff04333a23fcca')
            else:
                weights_path = get_file('DenseNet-BC-161-48-no-top.h5',
                                        DENSENET_161_WEIGHTS_PATH_NO_TOP,
                                        cache_subdir='models',
                                        md5_hash='1a9476b79f6b7673acaa2769e6427b92')
            model.load_weights(weights_path)
            weights_loaded = True

        if (depth == 169) and (nb_dense_block == 4) and (growth_rate == 32) and (nb_filter == 64) and \
                (bottleneck is True) and (reduction == 0.5) and (dropout_rate == 0.0) and (subsample_initial_block):
            if include_top:
                weights_path = get_file('DenseNet-BC-169-32.h5',
                                        DENSENET_169_WEIGHTS_PATH,
                                        cache_subdir='models',
                                        md5_hash='914869c361303d2e39dec640b4e606a6')
            else:
                weights_path = get_file('DenseNet-BC-169-32-no-top.h5',
                                        DENSENET_169_WEIGHTS_PATH_NO_TOP,
                                        cache_subdir='models',
                                        md5_hash='89c19e8276cfd10585d5fadc1df6859e')
            model.load_weights(weights_path)
            weights_loaded = True

        if weights_loaded:
            if K.backend() == 'theano':
                convert_all_kernels_in_model(model)

            if K.image_data_format() == 'channels_first' and K.backend() == 'tensorflow':
                warnings.warn('You are using the TensorFlow backend, yet you '
                              'are using the Theano '
                              'image data format convention '
                              '(`image_data_format="channels_first"`). '
                              'For best performance, set '
                              '`image_data_format="channels_last"` in '
                              'your Keras config '
                              'at ~/.keras/keras.json.')

            print("Weights for the model were loaded successfully")

    return model


def DenseNetFCN(input_shape, nb_dense_block=5, growth_rate=16, nb_layers_per_block=4,
                reduction=0.0, dropout_rate=0.0, weight_decay=1e-4, init_conv_filters=48,
                include_top=True, weights=None, input_tensor=None, classes=1, activation='softmax',
                upsampling_conv=128, upsampling_type='deconv'):
    '''Instantiate the DenseNet FCN architecture.
        Note that when using TensorFlow,
        for best performance you should set
        `image_data_format='channels_last'` in your Keras config
        at ~/.keras/keras.json.
        # Arguments
            nb_dense_block: number of dense blocks to add to end (generally = 3)
            growth_rate: number of filters to add per dense block
            nb_layers_per_block: number of layers in each dense block.
                Can be a positive integer or a list.
                If positive integer, a set number of layers per dense block.
                If list, nb_layer is used as provided. Note that list size must
                be (nb_dense_block + 1)
            reduction: reduction factor of transition blocks.
                Note : reduction value is inverted to compute compression.
            dropout_rate: dropout rate
            init_conv_filters: number of layers in the initial convolution layer
            include_top: whether to include the fully-connected
                layer at the top of the network.
            weights: one of `None` (random initialization) or
                'cifar10' (pre-training on CIFAR-10)..
            input_tensor: optional Keras tensor (i.e. output of `layers.Input()`)
                to use as image input for the model.
            input_shape: optional shape tuple, only to be specified
                if `include_top` is False (otherwise the input shape
                has to be `(32, 32, 3)` (with `channels_last` dim ordering)
                or `(3, 32, 32)` (with `channels_first` dim ordering).
                It should have exactly 3 inputs channels,
                and width and height should be no smaller than 8.
                E.g. `(200, 200, 3)` would be one valid value.
            classes: optional number of classes to classify images
                into, only to be specified if `include_top` is True, and
                if no `weights` argument is specified.
            activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'.
                Note that if sigmoid is used, classes must be 1.
            upsampling_conv: number of convolutional layers in upsampling via subpixel convolution
            upsampling_type: Can be one of 'upsampling', 'deconv' and
                'subpixel'. Defines type of upsampling algorithm used.
            batchsize: Fixed batch size. This is a temporary requirement for
                computation of output shape in the case of Deconvolution2D layers.
                Parameter will be removed in next iteration of Keras, which infers
                output shape of deconvolution layers automatically.
        # Returns
            A Keras model instance.
    '''

    if weights not in {None}:
        raise ValueError('The `weights` argument should be '
                         '`None` (random initialization) as no '
                         'model weights are provided.')

    upsampling_type = upsampling_type.lower()

    if upsampling_type not in ['upsampling', 'deconv', 'subpixel']:
        raise ValueError('Parameter "upsampling_type" must be one of "upsampling", '
                         '"deconv" or "subpixel".')

    if input_shape is None:
        raise ValueError('For fully convolutional models, input shape must be supplied.')

    if type(nb_layers_per_block) is not list and nb_dense_block < 1:
        raise ValueError('Number of dense layers per block must be greater than 1. Argument '
                         'value was %d.' % (nb_layers_per_block))

    if activation not in ['softmax', 'sigmoid']:
        raise ValueError('activation must be one of "softmax" or "sigmoid"')

    if activation == 'sigmoid' and classes != 1:
        raise ValueError('sigmoid activation can only be used when classes = 1')

    # Determine proper input shape
    min_size = 2 ** nb_dense_block

    if K.image_data_format() == 'channels_first':
        if input_shape is not None:
            if ((input_shape[1] is not None and input_shape[1] < min_size) or
                    (input_shape[2] is not None and input_shape[2] < min_size)):
                raise ValueError('Input size must be at least ' +
                                 str(min_size) + 'x' + str(min_size) + ', got '
                                                                       '`input_shape=' + str(input_shape) + '`')
        else:
            input_shape = (classes, None, None)
    else:
        if input_shape is not None:
            if ((input_shape[0] is not None and input_shape[0] < min_size) or
                    (input_shape[1] is not None and input_shape[1] < min_size)):
                raise ValueError('Input size must be at least ' +
                                 str(min_size) + 'x' + str(min_size) + ', got '
                                                                       '`input_shape=' + str(input_shape) + '`')
        else:
            input_shape = (None, None, classes)

    if input_tensor is None:
        img_input = Input(shape=input_shape)
    else:
        if not K.is_keras_tensor(input_tensor):
            img_input = Input(tensor=input_tensor, shape=input_shape)
        else:
            img_input = input_tensor

    x = __create_fcn_dense_net(classes, img_input, include_top, nb_dense_block,
                               growth_rate, reduction, dropout_rate, weight_decay,
                               nb_layers_per_block, upsampling_conv, upsampling_type,
                               init_conv_filters, input_shape, activation)

    # Ensure that the model takes into account
    # any potential predecessors of `input_tensor`.
    if input_tensor is not None:
        inputs = get_source_inputs(input_tensor)
    else:
        inputs = img_input
    # Create model.
    model = Model(inputs, x, name='fcn-densenet')

    return model


def DenseNetImageNet121(input_shape=None,
                        bottleneck=True,
                        reduction=0.5,
                        dropout_rate=0.0,
                        weight_decay=1e-4,
                        include_top=True,
                        weights='imagenet',
                        input_tensor=None,
                        classes=1000,
                        activation='softmax'):
    return DenseNet(input_shape, depth=121, nb_dense_block=4, growth_rate=32, nb_filter=64,
                    nb_layers_per_block=[6, 12, 24, 16], bottleneck=bottleneck, reduction=reduction,
                    dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True,
                    include_top=include_top, weights=weights, input_tensor=input_tensor,
                    classes=classes, activation=activation)


def DenseNetImageNet169(input_shape=None,
                        bottleneck=True,
                        reduction=0.5,
                        dropout_rate=0.0,
                        weight_decay=1e-4,
                        include_top=True,
                        weights='imagenet',
                        input_tensor=None,
                        classes=1000,
                        activation='softmax'):
    return DenseNet(input_shape, depth=169, nb_dense_block=4, growth_rate=32, nb_filter=64,
                    nb_layers_per_block=[6, 12, 32, 32], bottleneck=bottleneck, reduction=reduction,
                    dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True,
                    include_top=include_top, weights=weights, input_tensor=input_tensor,
                    classes=classes, activation=activation)


def DenseNetImageNet201(input_shape=None,
                        bottleneck=True,
                        reduction=0.5,
                        dropout_rate=0.0,
                        weight_decay=1e-4,
                        include_top=True,
                        weights=None,
                        input_tensor=None,
                        classes=1000,
                        activation='softmax'):
    return DenseNet(input_shape, depth=201, nb_dense_block=4, growth_rate=32, nb_filter=64,
                    nb_layers_per_block=[6, 12, 48, 32], bottleneck=bottleneck, reduction=reduction,
                    dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True,
                    include_top=include_top, weights=weights, input_tensor=input_tensor,
                    classes=classes, activation=activation)


def DenseNetImageNet264(input_shape=None,
                        bottleneck=True,
                        reduction=0.5,
                        dropout_rate=0.0,
                        weight_decay=1e-4,
                        include_top=True,
                        weights=None,
                        input_tensor=None,
                        classes=1000,
                        activation='softmax'):
    return DenseNet(input_shape, depth=201, nb_dense_block=4, growth_rate=32, nb_filter=64,
                    nb_layers_per_block=[6, 12, 64, 48], bottleneck=bottleneck, reduction=reduction,
                    dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True,
                    include_top=include_top, weights=weights, input_tensor=input_tensor,
                    classes=classes, activation=activation)


def DenseNetImageNet161(input_shape=None,
                        bottleneck=True,
                        reduction=0.5,
                        dropout_rate=0.0,
                        weight_decay=1e-4,
                        include_top=True,
                        weights='imagenet',
                        input_tensor=None,
                        classes=1000,
                        activation='softmax'):
    return DenseNet(input_shape, depth=161, nb_dense_block=4, growth_rate=48, nb_filter=96,
                    nb_layers_per_block=[6, 12, 36, 24], bottleneck=bottleneck, reduction=reduction,
                    dropout_rate=dropout_rate, weight_decay=weight_decay, subsample_initial_block=True,
                    include_top=include_top, weights=weights, input_tensor=input_tensor,
                    classes=classes, activation=activation)


def __conv_block(ip, nb_filter, bottleneck=False, dropout_rate=None, weight_decay=1e-4):
    ''' Apply BatchNorm, Relu, 3x3 Conv2D, optional bottleneck block and dropout
    Args:
        ip: Input keras tensor
        nb_filter: number of filters
        bottleneck: add bottleneck block
        dropout_rate: dropout rate
        weight_decay: weight decay factor
    Returns: keras tensor with batch_norm, relu and convolution2d added (optional bottleneck)
    '''
    concat_axis = 1 if K.image_data_format() == 'channels_first' else -1

    x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(ip)
    x = Activation('relu')(x)

    if bottleneck:
        inter_channel = nb_filter * 4  # Obtained from https://github.com/liuzhuang13/DenseNet/blob/master/densenet.lua

        x = Conv2D(inter_channel, (1, 1), kernel_initializer='he_normal', padding='same', use_bias=False,
                   kernel_regularizer=l2(weight_decay))(x)
        x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x)
        x = Activation('relu')(x)

    x = Conv2D(nb_filter, (3, 3), kernel_initializer='he_normal', padding='same', use_bias=False)(x)
    if dropout_rate:
        x = Dropout(dropout_rate)(x)

    return x


def __dense_block(x, nb_layers, nb_filter, growth_rate, bottleneck=False, dropout_rate=None, weight_decay=1e-4,
                  grow_nb_filters=True, return_concat_list=False):
    ''' Build a dense_block where the output of each conv_block is fed to subsequent ones
    Args:
        x: keras tensor
        nb_layers: the number of layers of conv_block to append to the model.
        nb_filter: number of filters
        growth_rate: growth rate
        bottleneck: bottleneck block
        dropout_rate: dropout rate
        weight_decay: weight decay factor
        grow_nb_filters: flag to decide to allow number of filters to grow
        return_concat_list: return the list of feature maps along with the actual output
    Returns: keras tensor with nb_layers of conv_block appended
    '''
    concat_axis = 1 if K.image_data_format() == 'channels_first' else -1

    x_list = [x]

    for i in range(nb_layers):
        cb = __conv_block(x, growth_rate, bottleneck, dropout_rate, weight_decay)
        x_list.append(cb)

        x = concatenate([x, cb], axis=concat_axis)

        if grow_nb_filters:
            nb_filter += growth_rate

    if return_concat_list:
        return x, nb_filter, x_list
    else:
        return x, nb_filter


def __transition_block(ip, nb_filter, compression=1.0, weight_decay=1e-4):
    ''' Apply BatchNorm, Relu 1x1, Conv2D, optional compression, dropout and Maxpooling2D
    Args:
        ip: keras tensor
        nb_filter: number of filters
        compression: calculated as 1 - reduction. Reduces the number of feature maps
                    in the transition block.
        dropout_rate: dropout rate
        weight_decay: weight decay factor
    Returns: keras tensor, after applying batch_norm, relu-conv, dropout, maxpool
    '''
    concat_axis = 1 if K.image_data_format() == 'channels_first' else -1

    x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(ip)
    x = Activation('relu')(x)
    x = Conv2D(int(nb_filter * compression), (1, 1), kernel_initializer='he_normal', padding='same', use_bias=False,
               kernel_regularizer=l2(weight_decay))(x)
    x = AveragePooling2D((2, 2), strides=(2, 2))(x)

    return x


def __transition_up_block(ip, nb_filters, type='deconv', weight_decay=1E-4):
    ''' SubpixelConvolutional Upscaling (factor = 2)
    Args:
        ip: keras tensor
        nb_filters: number of layers
        type: can be 'upsampling', 'subpixel', 'deconv'. Determines type of upsampling performed
        weight_decay: weight decay factor
    Returns: keras tensor, after applying upsampling operation.
    '''

    if type == 'upsampling':
        x = UpSampling2D()(ip)
    elif type == 'subpixel':
        x = Conv2D(nb_filters, (3, 3), activation='relu', padding='same', kernel_regularizer=l2(weight_decay),
                   use_bias=False, kernel_initializer='he_normal')(ip)
        x = SubPixelUpscaling(scale_factor=2)(x)
        x = Conv2D(nb_filters, (3, 3), activation='relu', padding='same', kernel_regularizer=l2(weight_decay),
                   use_bias=False, kernel_initializer='he_normal')(x)
    else:
        x = Conv2DTranspose(nb_filters, (3, 3), activation='relu', padding='same', strides=(2, 2),
                            kernel_initializer='he_normal', kernel_regularizer=l2(weight_decay))(ip)

    return x


def __create_dense_net(nb_classes, img_input, include_top, depth=40, nb_dense_block=3, growth_rate=12, nb_filter=-1,
                       nb_layers_per_block=-1, bottleneck=False, reduction=0.0, dropout_rate=None, weight_decay=1e-4,
                       subsample_initial_block=False, activation='softmax'):
    ''' Build the DenseNet model
    Args:
        nb_classes: number of classes
        img_input: tuple of shape (channels, rows, columns) or (rows, columns, channels)
        include_top: flag to include the final Dense layer
        depth: number or layers
        nb_dense_block: number of dense blocks to add to end (generally = 3)
        growth_rate: number of filters to add per dense block
        nb_filter: initial number of filters. Default -1 indicates initial number of filters is 2 * growth_rate
        nb_layers_per_block: number of layers in each dense block.
                Can be a -1, positive integer or a list.
                If -1, calculates nb_layer_per_block from the depth of the network.
                If positive integer, a set number of layers per dense block.
                If list, nb_layer is used as provided. Note that list size must
                be (nb_dense_block + 1)
        bottleneck: add bottleneck blocks
        reduction: reduction factor of transition blocks. Note : reduction value is inverted to compute compression
        dropout_rate: dropout rate
        weight_decay: weight decay rate
        subsample_initial_block: Set to True to subsample the initial convolution and
                add a MaxPool2D before the dense blocks are added.
        subsample_initial:
        activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'.
                Note that if sigmoid is used, classes must be 1.
    Returns: keras tensor with nb_layers of conv_block appended
    '''

    concat_axis = 1 if K.image_data_format() == 'channels_first' else -1

    if reduction != 0.0:
        assert reduction <= 1.0 and reduction > 0.0, 'reduction value must lie between 0.0 and 1.0'

    # layers in each dense block
    if type(nb_layers_per_block) is list or type(nb_layers_per_block) is tuple:
        nb_layers = list(nb_layers_per_block)  # Convert tuple to list

        assert len(nb_layers) == (nb_dense_block), 'If list, nb_layer is used as provided. ' \
                                                   'Note that list size must be (nb_dense_block)'
        final_nb_layer = nb_layers[-1]
        nb_layers = nb_layers[:-1]
    else:
        if nb_layers_per_block == -1:
            assert (depth - 4) % 3 == 0, 'Depth must be 3 N + 4 if nb_layers_per_block == -1'
            count = int((depth - 4) / 3)
            nb_layers = [count for _ in range(nb_dense_block)]
            final_nb_layer = count
        else:
            final_nb_layer = nb_layers_per_block
            nb_layers = [nb_layers_per_block] * nb_dense_block

    # compute initial nb_filter if -1, else accept users initial nb_filter
    if nb_filter <= 0:
        nb_filter = 2 * growth_rate

    # compute compression factor
    compression = 1.0 - reduction

    # Initial convolution
    if subsample_initial_block:
        initial_kernel = (7, 7)
        initial_strides = (2, 2)
    else:
        initial_kernel = (3, 3)
        initial_strides = (1, 1)

    x = Conv2D(nb_filter, initial_kernel, kernel_initializer='he_normal', padding='same',
               strides=initial_strides, use_bias=False, kernel_regularizer=l2(weight_decay))(img_input)

    if subsample_initial_block:
        x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x)
        x = Activation('relu')(x)
        x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x)

    # Add dense blocks
    for block_idx in range(nb_dense_block - 1):
        x, nb_filter = __dense_block(x, nb_layers[block_idx], nb_filter, growth_rate, bottleneck=bottleneck,
                                     dropout_rate=dropout_rate, weight_decay=weight_decay)
        # add transition_block
        x = __transition_block(x, nb_filter, compression=compression, weight_decay=weight_decay)
        nb_filter = int(nb_filter * compression)

    # The last dense_block does not have a transition_block
    x, nb_filter = __dense_block(x, final_nb_layer, nb_filter, growth_rate, bottleneck=bottleneck,
                                 dropout_rate=dropout_rate, weight_decay=weight_decay)

    x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x)
    x = Activation('relu')(x)
    x = GlobalAveragePooling2D()(x)

    if include_top:
        x = Dense(nb_classes, activation=activation)(x)

    return x


def __create_fcn_dense_net(nb_classes, img_input, include_top, nb_dense_block=5, growth_rate=12,
                           reduction=0.0, dropout_rate=None, weight_decay=1e-4,
                           nb_layers_per_block=4, nb_upsampling_conv=128, upsampling_type='upsampling',
                           init_conv_filters=48, input_shape=None, activation='deconv'):
    ''' Build the DenseNet model
    Args:
        nb_classes: number of classes
        img_input: tuple of shape (channels, rows, columns) or (rows, columns, channels)
        include_top: flag to include the final Dense layer
        nb_dense_block: number of dense blocks to add to end (generally = 3)
        growth_rate: number of filters to add per dense block
        reduction: reduction factor of transition blocks. Note : reduction value is inverted to compute compression
        dropout_rate: dropout rate
        weight_decay: weight decay
        nb_layers_per_block: number of layers in each dense block.
            Can be a positive integer or a list.
            If positive integer, a set number of layers per dense block.
            If list, nb_layer is used as provided. Note that list size must
            be (nb_dense_block + 1)
        nb_upsampling_conv: number of convolutional layers in upsampling via subpixel convolution
        upsampling_type: Can be one of 'upsampling', 'deconv' and 'subpixel'. Defines
            type of upsampling algorithm used.
        input_shape: Only used for shape inference in fully convolutional networks.
        activation: Type of activation at the top layer. Can be one of 'softmax' or 'sigmoid'.
                    Note that if sigmoid is used, classes must be 1.
    Returns: keras tensor with nb_layers of conv_block appended
    '''

    concat_axis = 1 if K.image_data_format() == 'channels_first' else -1

    if concat_axis == 1:  # channels_first dim ordering
        _, rows, cols = input_shape
    else:
        rows, cols, _ = input_shape

    if reduction != 0.0:
        assert reduction <= 1.0 and reduction > 0.0, 'reduction value must lie between 0.0 and 1.0'

    # check if upsampling_conv has minimum number of filters
    # minimum is set to 12, as at least 3 color channels are needed for correct upsampling
    assert nb_upsampling_conv > 12 and nb_upsampling_conv % 4 == 0, 'Parameter `upsampling_conv` number of channels must ' \
                                                                    'be a positive number divisible by 4 and greater ' \
                                                                    'than 12'

    # layers in each dense block
    if type(nb_layers_per_block) is list or type(nb_layers_per_block) is tuple:
        nb_layers = list(nb_layers_per_block)  # Convert tuple to list

        assert len(nb_layers) == (nb_dense_block + 1), 'If list, nb_layer is used as provided. ' \
                                                       'Note that list size must be (nb_dense_block + 1)'

        bottleneck_nb_layers = nb_layers[-1]
        rev_layers = nb_layers[::-1]
        nb_layers.extend(rev_layers[1:])
    else:
        bottleneck_nb_layers = nb_layers_per_block
        nb_layers = [nb_layers_per_block] * (2 * nb_dense_block + 1)

    # compute compression factor
    compression = 1.0 - reduction

    # Initial convolution
    x = Conv2D(init_conv_filters, (7, 7), kernel_initializer='he_normal', padding='same', name='initial_conv2D',
               use_bias=False, kernel_regularizer=l2(weight_decay))(img_input)
    x = BatchNormalization(axis=concat_axis, epsilon=1.1e-5)(x)
    x = Activation('relu')(x)

    nb_filter = init_conv_filters

    skip_list = []

    # Add dense blocks and transition down block
    for block_idx in range(nb_dense_block):
        x, nb_filter = __dense_block(x, nb_layers[block_idx], nb_filter, growth_rate, dropout_rate=dropout_rate,
                                     weight_decay=weight_decay)

        # Skip connection
        skip_list.append(x)

        # add transition_block
        x = __transition_block(x, nb_filter, compression=compression, weight_decay=weight_decay)

        nb_filter = int(nb_filter * compression)  # this is calculated inside transition_down_block

    # The last dense_block does not have a transition_down_block
    # return the concatenated feature maps without the concatenation of the input
    _, nb_filter, concat_list = __dense_block(x, bottleneck_nb_layers, nb_filter, growth_rate,
                                              dropout_rate=dropout_rate, weight_decay=weight_decay,
                                              return_concat_list=True)

    skip_list = skip_list[::-1]  # reverse the skip list

    # Add dense blocks and transition up block
    for block_idx in range(nb_dense_block):
        n_filters_keep = growth_rate * nb_layers[nb_dense_block + block_idx]

        # upsampling block must upsample only the feature maps (concat_list[1:]),
        # not the concatenation of the input with the feature maps (concat_list[0].
        l = concatenate(concat_list[1:], axis=concat_axis)

        t = __transition_up_block(l, nb_filters=n_filters_keep, type=upsampling_type, weight_decay=weight_decay)

        # concatenate the skip connection with the transition block
        x = concatenate([t, skip_list[block_idx]], axis=concat_axis)

        # Dont allow the feature map size to grow in upsampling dense blocks
        x_up, nb_filter, concat_list = __dense_block(x, nb_layers[nb_dense_block + block_idx + 1], nb_filter=growth_rate,
                                                     growth_rate=growth_rate, dropout_rate=dropout_rate,
                                                     weight_decay=weight_decay, return_concat_list=True,
                                                     grow_nb_filters=False)

    if include_top:
        x = Conv2D(nb_classes, (1, 1), activation='linear', padding='same', use_bias=False)(x_up)

        if K.image_data_format() == 'channels_first':
            channel, row, col = input_shape
        else:
            row, col, channel = input_shape

        x = Reshape((row * col, nb_classes))(x)
        x = Activation(activation)(x)
        x = Reshape((row, col, nb_classes))(x)
    else:
        x = x_up

    return x


Using TensorFlow backend.
/home/leon/miniconda3/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: compiletime version 3.5 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.6
  return f(*args, **kwds)

In [8]:
input_shape = (224, 224, 3)

model = DenseNetImageNet121(input_shape=input_shape, include_top=True, weights='imagenet')


Weights for the model were loaded successfully

In [9]:
model.summary()


__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_2 (InputLayer)            (None, 224, 224, 3)  0                                            
__________________________________________________________________________________________________
conv2d_121 (Conv2D)             (None, 112, 112, 64) 9408        input_2[0][0]                    
__________________________________________________________________________________________________
batch_normalization_122 (BatchN (None, 112, 112, 64) 256         conv2d_121[0][0]                 
__________________________________________________________________________________________________
activation_122 (Activation)     (None, 112, 112, 64) 0           batch_normalization_122[0][0]    
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D)  (None, 56, 56, 64)   0           activation_122[0][0]             
__________________________________________________________________________________________________
batch_normalization_123 (BatchN (None, 56, 56, 64)   256         max_pooling2d_2[0][0]            
__________________________________________________________________________________________________
activation_123 (Activation)     (None, 56, 56, 64)   0           batch_normalization_123[0][0]    
__________________________________________________________________________________________________
conv2d_122 (Conv2D)             (None, 56, 56, 128)  8192        activation_123[0][0]             
__________________________________________________________________________________________________
batch_normalization_124 (BatchN (None, 56, 56, 128)  512         conv2d_122[0][0]                 
__________________________________________________________________________________________________
activation_124 (Activation)     (None, 56, 56, 128)  0           batch_normalization_124[0][0]    
__________________________________________________________________________________________________
conv2d_123 (Conv2D)             (None, 56, 56, 32)   36864       activation_124[0][0]             
__________________________________________________________________________________________________
concatenate_59 (Concatenate)    (None, 56, 56, 96)   0           max_pooling2d_2[0][0]            
                                                                 conv2d_123[0][0]                 
__________________________________________________________________________________________________
batch_normalization_125 (BatchN (None, 56, 56, 96)   384         concatenate_59[0][0]             
__________________________________________________________________________________________________
activation_125 (Activation)     (None, 56, 56, 96)   0           batch_normalization_125[0][0]    
__________________________________________________________________________________________________
conv2d_124 (Conv2D)             (None, 56, 56, 128)  12288       activation_125[0][0]             
__________________________________________________________________________________________________
batch_normalization_126 (BatchN (None, 56, 56, 128)  512         conv2d_124[0][0]                 
__________________________________________________________________________________________________
activation_126 (Activation)     (None, 56, 56, 128)  0           batch_normalization_126[0][0]    
__________________________________________________________________________________________________
conv2d_125 (Conv2D)             (None, 56, 56, 32)   36864       activation_126[0][0]             
__________________________________________________________________________________________________
concatenate_60 (Concatenate)    (None, 56, 56, 128)  0           concatenate_59[0][0]             
                                                                 conv2d_125[0][0]                 
__________________________________________________________________________________________________
batch_normalization_127 (BatchN (None, 56, 56, 128)  512         concatenate_60[0][0]             
__________________________________________________________________________________________________
activation_127 (Activation)     (None, 56, 56, 128)  0           batch_normalization_127[0][0]    
__________________________________________________________________________________________________
conv2d_126 (Conv2D)             (None, 56, 56, 128)  16384       activation_127[0][0]             
__________________________________________________________________________________________________
batch_normalization_128 (BatchN (None, 56, 56, 128)  512         conv2d_126[0][0]                 
__________________________________________________________________________________________________
activation_128 (Activation)     (None, 56, 56, 128)  0           batch_normalization_128[0][0]    
__________________________________________________________________________________________________
conv2d_127 (Conv2D)             (None, 56, 56, 32)   36864       activation_128[0][0]             
__________________________________________________________________________________________________
concatenate_61 (Concatenate)    (None, 56, 56, 160)  0           concatenate_60[0][0]             
                                                                 conv2d_127[0][0]                 
__________________________________________________________________________________________________
batch_normalization_129 (BatchN (None, 56, 56, 160)  640         concatenate_61[0][0]             
__________________________________________________________________________________________________
activation_129 (Activation)     (None, 56, 56, 160)  0           batch_normalization_129[0][0]    
__________________________________________________________________________________________________
conv2d_128 (Conv2D)             (None, 56, 56, 128)  20480       activation_129[0][0]             
__________________________________________________________________________________________________
batch_normalization_130 (BatchN (None, 56, 56, 128)  512         conv2d_128[0][0]                 
__________________________________________________________________________________________________
activation_130 (Activation)     (None, 56, 56, 128)  0           batch_normalization_130[0][0]    
__________________________________________________________________________________________________
conv2d_129 (Conv2D)             (None, 56, 56, 32)   36864       activation_130[0][0]             
__________________________________________________________________________________________________
concatenate_62 (Concatenate)    (None, 56, 56, 192)  0           concatenate_61[0][0]             
                                                                 conv2d_129[0][0]                 
__________________________________________________________________________________________________
batch_normalization_131 (BatchN (None, 56, 56, 192)  768         concatenate_62[0][0]             
__________________________________________________________________________________________________
activation_131 (Activation)     (None, 56, 56, 192)  0           batch_normalization_131[0][0]    
__________________________________________________________________________________________________
conv2d_130 (Conv2D)             (None, 56, 56, 128)  24576       activation_131[0][0]             
__________________________________________________________________________________________________
batch_normalization_132 (BatchN (None, 56, 56, 128)  512         conv2d_130[0][0]                 
__________________________________________________________________________________________________
activation_132 (Activation)     (None, 56, 56, 128)  0           batch_normalization_132[0][0]    
__________________________________________________________________________________________________
conv2d_131 (Conv2D)             (None, 56, 56, 32)   36864       activation_132[0][0]             
__________________________________________________________________________________________________
concatenate_63 (Concatenate)    (None, 56, 56, 224)  0           concatenate_62[0][0]             
                                                                 conv2d_131[0][0]                 
__________________________________________________________________________________________________
batch_normalization_133 (BatchN (None, 56, 56, 224)  896         concatenate_63[0][0]             
__________________________________________________________________________________________________
activation_133 (Activation)     (None, 56, 56, 224)  0           batch_normalization_133[0][0]    
__________________________________________________________________________________________________
conv2d_132 (Conv2D)             (None, 56, 56, 128)  28672       activation_133[0][0]             
__________________________________________________________________________________________________
batch_normalization_134 (BatchN (None, 56, 56, 128)  512         conv2d_132[0][0]                 
__________________________________________________________________________________________________
activation_134 (Activation)     (None, 56, 56, 128)  0           batch_normalization_134[0][0]    
__________________________________________________________________________________________________
conv2d_133 (Conv2D)             (None, 56, 56, 32)   36864       activation_134[0][0]             
__________________________________________________________________________________________________
concatenate_64 (Concatenate)    (None, 56, 56, 256)  0           concatenate_63[0][0]             
                                                                 conv2d_133[0][0]                 
__________________________________________________________________________________________________
batch_normalization_135 (BatchN (None, 56, 56, 256)  1024        concatenate_64[0][0]             
__________________________________________________________________________________________________
activation_135 (Activation)     (None, 56, 56, 256)  0           batch_normalization_135[0][0]    
__________________________________________________________________________________________________
conv2d_134 (Conv2D)             (None, 56, 56, 128)  32768       activation_135[0][0]             
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, 28, 28, 128)  0           conv2d_134[0][0]                 
__________________________________________________________________________________________________
batch_normalization_136 (BatchN (None, 28, 28, 128)  512         average_pooling2d_4[0][0]        
__________________________________________________________________________________________________
activation_136 (Activation)     (None, 28, 28, 128)  0           batch_normalization_136[0][0]    
__________________________________________________________________________________________________
conv2d_135 (Conv2D)             (None, 28, 28, 128)  16384       activation_136[0][0]             
__________________________________________________________________________________________________
batch_normalization_137 (BatchN (None, 28, 28, 128)  512         conv2d_135[0][0]                 
__________________________________________________________________________________________________
activation_137 (Activation)     (None, 28, 28, 128)  0           batch_normalization_137[0][0]    
__________________________________________________________________________________________________
conv2d_136 (Conv2D)             (None, 28, 28, 32)   36864       activation_137[0][0]             
__________________________________________________________________________________________________
concatenate_65 (Concatenate)    (None, 28, 28, 160)  0           average_pooling2d_4[0][0]        
                                                                 conv2d_136[0][0]                 
__________________________________________________________________________________________________
batch_normalization_138 (BatchN (None, 28, 28, 160)  640         concatenate_65[0][0]             
__________________________________________________________________________________________________
activation_138 (Activation)     (None, 28, 28, 160)  0           batch_normalization_138[0][0]    
__________________________________________________________________________________________________
conv2d_137 (Conv2D)             (None, 28, 28, 128)  20480       activation_138[0][0]             
__________________________________________________________________________________________________
batch_normalization_139 (BatchN (None, 28, 28, 128)  512         conv2d_137[0][0]                 
__________________________________________________________________________________________________
activation_139 (Activation)     (None, 28, 28, 128)  0           batch_normalization_139[0][0]    
__________________________________________________________________________________________________
conv2d_138 (Conv2D)             (None, 28, 28, 32)   36864       activation_139[0][0]             
__________________________________________________________________________________________________
concatenate_66 (Concatenate)    (None, 28, 28, 192)  0           concatenate_65[0][0]             
                                                                 conv2d_138[0][0]                 
__________________________________________________________________________________________________
batch_normalization_140 (BatchN (None, 28, 28, 192)  768         concatenate_66[0][0]             
__________________________________________________________________________________________________
activation_140 (Activation)     (None, 28, 28, 192)  0           batch_normalization_140[0][0]    
__________________________________________________________________________________________________
conv2d_139 (Conv2D)             (None, 28, 28, 128)  24576       activation_140[0][0]             
__________________________________________________________________________________________________
batch_normalization_141 (BatchN (None, 28, 28, 128)  512         conv2d_139[0][0]                 
__________________________________________________________________________________________________
activation_141 (Activation)     (None, 28, 28, 128)  0           batch_normalization_141[0][0]    
__________________________________________________________________________________________________
conv2d_140 (Conv2D)             (None, 28, 28, 32)   36864       activation_141[0][0]             
__________________________________________________________________________________________________
concatenate_67 (Concatenate)    (None, 28, 28, 224)  0           concatenate_66[0][0]             
                                                                 conv2d_140[0][0]                 
__________________________________________________________________________________________________
batch_normalization_142 (BatchN (None, 28, 28, 224)  896         concatenate_67[0][0]             
__________________________________________________________________________________________________
activation_142 (Activation)     (None, 28, 28, 224)  0           batch_normalization_142[0][0]    
__________________________________________________________________________________________________
conv2d_141 (Conv2D)             (None, 28, 28, 128)  28672       activation_142[0][0]             
__________________________________________________________________________________________________
batch_normalization_143 (BatchN (None, 28, 28, 128)  512         conv2d_141[0][0]                 
__________________________________________________________________________________________________
activation_143 (Activation)     (None, 28, 28, 128)  0           batch_normalization_143[0][0]    
__________________________________________________________________________________________________
conv2d_142 (Conv2D)             (None, 28, 28, 32)   36864       activation_143[0][0]             
__________________________________________________________________________________________________
concatenate_68 (Concatenate)    (None, 28, 28, 256)  0           concatenate_67[0][0]             
                                                                 conv2d_142[0][0]                 
__________________________________________________________________________________________________
batch_normalization_144 (BatchN (None, 28, 28, 256)  1024        concatenate_68[0][0]             
__________________________________________________________________________________________________
activation_144 (Activation)     (None, 28, 28, 256)  0           batch_normalization_144[0][0]    
__________________________________________________________________________________________________
conv2d_143 (Conv2D)             (None, 28, 28, 128)  32768       activation_144[0][0]             
__________________________________________________________________________________________________
batch_normalization_145 (BatchN (None, 28, 28, 128)  512         conv2d_143[0][0]                 
__________________________________________________________________________________________________
activation_145 (Activation)     (None, 28, 28, 128)  0           batch_normalization_145[0][0]    
__________________________________________________________________________________________________
conv2d_144 (Conv2D)             (None, 28, 28, 32)   36864       activation_145[0][0]             
__________________________________________________________________________________________________
concatenate_69 (Concatenate)    (None, 28, 28, 288)  0           concatenate_68[0][0]             
                                                                 conv2d_144[0][0]                 
__________________________________________________________________________________________________
batch_normalization_146 (BatchN (None, 28, 28, 288)  1152        concatenate_69[0][0]             
__________________________________________________________________________________________________
activation_146 (Activation)     (None, 28, 28, 288)  0           batch_normalization_146[0][0]    
__________________________________________________________________________________________________
conv2d_145 (Conv2D)             (None, 28, 28, 128)  36864       activation_146[0][0]             
__________________________________________________________________________________________________
batch_normalization_147 (BatchN (None, 28, 28, 128)  512         conv2d_145[0][0]                 
__________________________________________________________________________________________________
activation_147 (Activation)     (None, 28, 28, 128)  0           batch_normalization_147[0][0]    
__________________________________________________________________________________________________
conv2d_146 (Conv2D)             (None, 28, 28, 32)   36864       activation_147[0][0]             
__________________________________________________________________________________________________
concatenate_70 (Concatenate)    (None, 28, 28, 320)  0           concatenate_69[0][0]             
                                                                 conv2d_146[0][0]                 
__________________________________________________________________________________________________
batch_normalization_148 (BatchN (None, 28, 28, 320)  1280        concatenate_70[0][0]             
__________________________________________________________________________________________________
activation_148 (Activation)     (None, 28, 28, 320)  0           batch_normalization_148[0][0]    
__________________________________________________________________________________________________
conv2d_147 (Conv2D)             (None, 28, 28, 128)  40960       activation_148[0][0]             
__________________________________________________________________________________________________
batch_normalization_149 (BatchN (None, 28, 28, 128)  512         conv2d_147[0][0]                 
__________________________________________________________________________________________________
activation_149 (Activation)     (None, 28, 28, 128)  0           batch_normalization_149[0][0]    
__________________________________________________________________________________________________
conv2d_148 (Conv2D)             (None, 28, 28, 32)   36864       activation_149[0][0]             
__________________________________________________________________________________________________
concatenate_71 (Concatenate)    (None, 28, 28, 352)  0           concatenate_70[0][0]             
                                                                 conv2d_148[0][0]                 
__________________________________________________________________________________________________
batch_normalization_150 (BatchN (None, 28, 28, 352)  1408        concatenate_71[0][0]             
__________________________________________________________________________________________________
activation_150 (Activation)     (None, 28, 28, 352)  0           batch_normalization_150[0][0]    
__________________________________________________________________________________________________
conv2d_149 (Conv2D)             (None, 28, 28, 128)  45056       activation_150[0][0]             
__________________________________________________________________________________________________
batch_normalization_151 (BatchN (None, 28, 28, 128)  512         conv2d_149[0][0]                 
__________________________________________________________________________________________________
activation_151 (Activation)     (None, 28, 28, 128)  0           batch_normalization_151[0][0]    
__________________________________________________________________________________________________
conv2d_150 (Conv2D)             (None, 28, 28, 32)   36864       activation_151[0][0]             
__________________________________________________________________________________________________
concatenate_72 (Concatenate)    (None, 28, 28, 384)  0           concatenate_71[0][0]             
                                                                 conv2d_150[0][0]                 
__________________________________________________________________________________________________
batch_normalization_152 (BatchN (None, 28, 28, 384)  1536        concatenate_72[0][0]             
__________________________________________________________________________________________________
activation_152 (Activation)     (None, 28, 28, 384)  0           batch_normalization_152[0][0]    
__________________________________________________________________________________________________
conv2d_151 (Conv2D)             (None, 28, 28, 128)  49152       activation_152[0][0]             
__________________________________________________________________________________________________
batch_normalization_153 (BatchN (None, 28, 28, 128)  512         conv2d_151[0][0]                 
__________________________________________________________________________________________________
activation_153 (Activation)     (None, 28, 28, 128)  0           batch_normalization_153[0][0]    
__________________________________________________________________________________________________
conv2d_152 (Conv2D)             (None, 28, 28, 32)   36864       activation_153[0][0]             
__________________________________________________________________________________________________
concatenate_73 (Concatenate)    (None, 28, 28, 416)  0           concatenate_72[0][0]             
                                                                 conv2d_152[0][0]                 
__________________________________________________________________________________________________
batch_normalization_154 (BatchN (None, 28, 28, 416)  1664        concatenate_73[0][0]             
__________________________________________________________________________________________________
activation_154 (Activation)     (None, 28, 28, 416)  0           batch_normalization_154[0][0]    
__________________________________________________________________________________________________
conv2d_153 (Conv2D)             (None, 28, 28, 128)  53248       activation_154[0][0]             
__________________________________________________________________________________________________
batch_normalization_155 (BatchN (None, 28, 28, 128)  512         conv2d_153[0][0]                 
__________________________________________________________________________________________________
activation_155 (Activation)     (None, 28, 28, 128)  0           batch_normalization_155[0][0]    
__________________________________________________________________________________________________
conv2d_154 (Conv2D)             (None, 28, 28, 32)   36864       activation_155[0][0]             
__________________________________________________________________________________________________
concatenate_74 (Concatenate)    (None, 28, 28, 448)  0           concatenate_73[0][0]             
                                                                 conv2d_154[0][0]                 
__________________________________________________________________________________________________
batch_normalization_156 (BatchN (None, 28, 28, 448)  1792        concatenate_74[0][0]             
__________________________________________________________________________________________________
activation_156 (Activation)     (None, 28, 28, 448)  0           batch_normalization_156[0][0]    
__________________________________________________________________________________________________
conv2d_155 (Conv2D)             (None, 28, 28, 128)  57344       activation_156[0][0]             
__________________________________________________________________________________________________
batch_normalization_157 (BatchN (None, 28, 28, 128)  512         conv2d_155[0][0]                 
__________________________________________________________________________________________________
activation_157 (Activation)     (None, 28, 28, 128)  0           batch_normalization_157[0][0]    
__________________________________________________________________________________________________
conv2d_156 (Conv2D)             (None, 28, 28, 32)   36864       activation_157[0][0]             
__________________________________________________________________________________________________
concatenate_75 (Concatenate)    (None, 28, 28, 480)  0           concatenate_74[0][0]             
                                                                 conv2d_156[0][0]                 
__________________________________________________________________________________________________
batch_normalization_158 (BatchN (None, 28, 28, 480)  1920        concatenate_75[0][0]             
__________________________________________________________________________________________________
activation_158 (Activation)     (None, 28, 28, 480)  0           batch_normalization_158[0][0]    
__________________________________________________________________________________________________
conv2d_157 (Conv2D)             (None, 28, 28, 128)  61440       activation_158[0][0]             
__________________________________________________________________________________________________
batch_normalization_159 (BatchN (None, 28, 28, 128)  512         conv2d_157[0][0]                 
__________________________________________________________________________________________________
activation_159 (Activation)     (None, 28, 28, 128)  0           batch_normalization_159[0][0]    
__________________________________________________________________________________________________
conv2d_158 (Conv2D)             (None, 28, 28, 32)   36864       activation_159[0][0]             
__________________________________________________________________________________________________
concatenate_76 (Concatenate)    (None, 28, 28, 512)  0           concatenate_75[0][0]             
                                                                 conv2d_158[0][0]                 
__________________________________________________________________________________________________
batch_normalization_160 (BatchN (None, 28, 28, 512)  2048        concatenate_76[0][0]             
__________________________________________________________________________________________________
activation_160 (Activation)     (None, 28, 28, 512)  0           batch_normalization_160[0][0]    
__________________________________________________________________________________________________
conv2d_159 (Conv2D)             (None, 28, 28, 256)  131072      activation_160[0][0]             
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, 14, 14, 256)  0           conv2d_159[0][0]                 
__________________________________________________________________________________________________
batch_normalization_161 (BatchN (None, 14, 14, 256)  1024        average_pooling2d_5[0][0]        
__________________________________________________________________________________________________
activation_161 (Activation)     (None, 14, 14, 256)  0           batch_normalization_161[0][0]    
__________________________________________________________________________________________________
conv2d_160 (Conv2D)             (None, 14, 14, 128)  32768       activation_161[0][0]             
__________________________________________________________________________________________________
batch_normalization_162 (BatchN (None, 14, 14, 128)  512         conv2d_160[0][0]                 
__________________________________________________________________________________________________
activation_162 (Activation)     (None, 14, 14, 128)  0           batch_normalization_162[0][0]    
__________________________________________________________________________________________________
conv2d_161 (Conv2D)             (None, 14, 14, 32)   36864       activation_162[0][0]             
__________________________________________________________________________________________________
concatenate_77 (Concatenate)    (None, 14, 14, 288)  0           average_pooling2d_5[0][0]        
                                                                 conv2d_161[0][0]                 
__________________________________________________________________________________________________
batch_normalization_163 (BatchN (None, 14, 14, 288)  1152        concatenate_77[0][0]             
__________________________________________________________________________________________________
activation_163 (Activation)     (None, 14, 14, 288)  0           batch_normalization_163[0][0]    
__________________________________________________________________________________________________
conv2d_162 (Conv2D)             (None, 14, 14, 128)  36864       activation_163[0][0]             
__________________________________________________________________________________________________
batch_normalization_164 (BatchN (None, 14, 14, 128)  512         conv2d_162[0][0]                 
__________________________________________________________________________________________________
activation_164 (Activation)     (None, 14, 14, 128)  0           batch_normalization_164[0][0]    
__________________________________________________________________________________________________
conv2d_163 (Conv2D)             (None, 14, 14, 32)   36864       activation_164[0][0]             
__________________________________________________________________________________________________
concatenate_78 (Concatenate)    (None, 14, 14, 320)  0           concatenate_77[0][0]             
                                                                 conv2d_163[0][0]                 
__________________________________________________________________________________________________
batch_normalization_165 (BatchN (None, 14, 14, 320)  1280        concatenate_78[0][0]             
__________________________________________________________________________________________________
activation_165 (Activation)     (None, 14, 14, 320)  0           batch_normalization_165[0][0]    
__________________________________________________________________________________________________
conv2d_164 (Conv2D)             (None, 14, 14, 128)  40960       activation_165[0][0]             
__________________________________________________________________________________________________
batch_normalization_166 (BatchN (None, 14, 14, 128)  512         conv2d_164[0][0]                 
__________________________________________________________________________________________________
activation_166 (Activation)     (None, 14, 14, 128)  0           batch_normalization_166[0][0]    
__________________________________________________________________________________________________
conv2d_165 (Conv2D)             (None, 14, 14, 32)   36864       activation_166[0][0]             
__________________________________________________________________________________________________
concatenate_79 (Concatenate)    (None, 14, 14, 352)  0           concatenate_78[0][0]             
                                                                 conv2d_165[0][0]                 
__________________________________________________________________________________________________
batch_normalization_167 (BatchN (None, 14, 14, 352)  1408        concatenate_79[0][0]             
__________________________________________________________________________________________________
activation_167 (Activation)     (None, 14, 14, 352)  0           batch_normalization_167[0][0]    
__________________________________________________________________________________________________
conv2d_166 (Conv2D)             (None, 14, 14, 128)  45056       activation_167[0][0]             
__________________________________________________________________________________________________
batch_normalization_168 (BatchN (None, 14, 14, 128)  512         conv2d_166[0][0]                 
__________________________________________________________________________________________________
activation_168 (Activation)     (None, 14, 14, 128)  0           batch_normalization_168[0][0]    
__________________________________________________________________________________________________
conv2d_167 (Conv2D)             (None, 14, 14, 32)   36864       activation_168[0][0]             
__________________________________________________________________________________________________
concatenate_80 (Concatenate)    (None, 14, 14, 384)  0           concatenate_79[0][0]             
                                                                 conv2d_167[0][0]                 
__________________________________________________________________________________________________
batch_normalization_169 (BatchN (None, 14, 14, 384)  1536        concatenate_80[0][0]             
__________________________________________________________________________________________________
activation_169 (Activation)     (None, 14, 14, 384)  0           batch_normalization_169[0][0]    
__________________________________________________________________________________________________
conv2d_168 (Conv2D)             (None, 14, 14, 128)  49152       activation_169[0][0]             
__________________________________________________________________________________________________
batch_normalization_170 (BatchN (None, 14, 14, 128)  512         conv2d_168[0][0]                 
__________________________________________________________________________________________________
activation_170 (Activation)     (None, 14, 14, 128)  0           batch_normalization_170[0][0]    
__________________________________________________________________________________________________
conv2d_169 (Conv2D)             (None, 14, 14, 32)   36864       activation_170[0][0]             
__________________________________________________________________________________________________
concatenate_81 (Concatenate)    (None, 14, 14, 416)  0           concatenate_80[0][0]             
                                                                 conv2d_169[0][0]                 
__________________________________________________________________________________________________
batch_normalization_171 (BatchN (None, 14, 14, 416)  1664        concatenate_81[0][0]             
__________________________________________________________________________________________________
activation_171 (Activation)     (None, 14, 14, 416)  0           batch_normalization_171[0][0]    
__________________________________________________________________________________________________
conv2d_170 (Conv2D)             (None, 14, 14, 128)  53248       activation_171[0][0]             
__________________________________________________________________________________________________
batch_normalization_172 (BatchN (None, 14, 14, 128)  512         conv2d_170[0][0]                 
__________________________________________________________________________________________________
activation_172 (Activation)     (None, 14, 14, 128)  0           batch_normalization_172[0][0]    
__________________________________________________________________________________________________
conv2d_171 (Conv2D)             (None, 14, 14, 32)   36864       activation_172[0][0]             
__________________________________________________________________________________________________
concatenate_82 (Concatenate)    (None, 14, 14, 448)  0           concatenate_81[0][0]             
                                                                 conv2d_171[0][0]                 
__________________________________________________________________________________________________
batch_normalization_173 (BatchN (None, 14, 14, 448)  1792        concatenate_82[0][0]             
__________________________________________________________________________________________________
activation_173 (Activation)     (None, 14, 14, 448)  0           batch_normalization_173[0][0]    
__________________________________________________________________________________________________
conv2d_172 (Conv2D)             (None, 14, 14, 128)  57344       activation_173[0][0]             
__________________________________________________________________________________________________
batch_normalization_174 (BatchN (None, 14, 14, 128)  512         conv2d_172[0][0]                 
__________________________________________________________________________________________________
activation_174 (Activation)     (None, 14, 14, 128)  0           batch_normalization_174[0][0]    
__________________________________________________________________________________________________
conv2d_173 (Conv2D)             (None, 14, 14, 32)   36864       activation_174[0][0]             
__________________________________________________________________________________________________
concatenate_83 (Concatenate)    (None, 14, 14, 480)  0           concatenate_82[0][0]             
                                                                 conv2d_173[0][0]                 
__________________________________________________________________________________________________
batch_normalization_175 (BatchN (None, 14, 14, 480)  1920        concatenate_83[0][0]             
__________________________________________________________________________________________________
activation_175 (Activation)     (None, 14, 14, 480)  0           batch_normalization_175[0][0]    
__________________________________________________________________________________________________
conv2d_174 (Conv2D)             (None, 14, 14, 128)  61440       activation_175[0][0]             
__________________________________________________________________________________________________
batch_normalization_176 (BatchN (None, 14, 14, 128)  512         conv2d_174[0][0]                 
__________________________________________________________________________________________________
activation_176 (Activation)     (None, 14, 14, 128)  0           batch_normalization_176[0][0]    
__________________________________________________________________________________________________
conv2d_175 (Conv2D)             (None, 14, 14, 32)   36864       activation_176[0][0]             
__________________________________________________________________________________________________
concatenate_84 (Concatenate)    (None, 14, 14, 512)  0           concatenate_83[0][0]             
                                                                 conv2d_175[0][0]                 
__________________________________________________________________________________________________
batch_normalization_177 (BatchN (None, 14, 14, 512)  2048        concatenate_84[0][0]             
__________________________________________________________________________________________________
activation_177 (Activation)     (None, 14, 14, 512)  0           batch_normalization_177[0][0]    
__________________________________________________________________________________________________
conv2d_176 (Conv2D)             (None, 14, 14, 128)  65536       activation_177[0][0]             
__________________________________________________________________________________________________
batch_normalization_178 (BatchN (None, 14, 14, 128)  512         conv2d_176[0][0]                 
__________________________________________________________________________________________________
activation_178 (Activation)     (None, 14, 14, 128)  0           batch_normalization_178[0][0]    
__________________________________________________________________________________________________
conv2d_177 (Conv2D)             (None, 14, 14, 32)   36864       activation_178[0][0]             
__________________________________________________________________________________________________
concatenate_85 (Concatenate)    (None, 14, 14, 544)  0           concatenate_84[0][0]             
                                                                 conv2d_177[0][0]                 
__________________________________________________________________________________________________
batch_normalization_179 (BatchN (None, 14, 14, 544)  2176        concatenate_85[0][0]             
__________________________________________________________________________________________________
activation_179 (Activation)     (None, 14, 14, 544)  0           batch_normalization_179[0][0]    
__________________________________________________________________________________________________
conv2d_178 (Conv2D)             (None, 14, 14, 128)  69632       activation_179[0][0]             
__________________________________________________________________________________________________
batch_normalization_180 (BatchN (None, 14, 14, 128)  512         conv2d_178[0][0]                 
__________________________________________________________________________________________________
activation_180 (Activation)     (None, 14, 14, 128)  0           batch_normalization_180[0][0]    
__________________________________________________________________________________________________
conv2d_179 (Conv2D)             (None, 14, 14, 32)   36864       activation_180[0][0]             
__________________________________________________________________________________________________
concatenate_86 (Concatenate)    (None, 14, 14, 576)  0           concatenate_85[0][0]             
                                                                 conv2d_179[0][0]                 
__________________________________________________________________________________________________
batch_normalization_181 (BatchN (None, 14, 14, 576)  2304        concatenate_86[0][0]             
__________________________________________________________________________________________________
activation_181 (Activation)     (None, 14, 14, 576)  0           batch_normalization_181[0][0]    
__________________________________________________________________________________________________
conv2d_180 (Conv2D)             (None, 14, 14, 128)  73728       activation_181[0][0]             
__________________________________________________________________________________________________
batch_normalization_182 (BatchN (None, 14, 14, 128)  512         conv2d_180[0][0]                 
__________________________________________________________________________________________________
activation_182 (Activation)     (None, 14, 14, 128)  0           batch_normalization_182[0][0]    
__________________________________________________________________________________________________
conv2d_181 (Conv2D)             (None, 14, 14, 32)   36864       activation_182[0][0]             
__________________________________________________________________________________________________
concatenate_87 (Concatenate)    (None, 14, 14, 608)  0           concatenate_86[0][0]             
                                                                 conv2d_181[0][0]                 
__________________________________________________________________________________________________
batch_normalization_183 (BatchN (None, 14, 14, 608)  2432        concatenate_87[0][0]             
__________________________________________________________________________________________________
activation_183 (Activation)     (None, 14, 14, 608)  0           batch_normalization_183[0][0]    
__________________________________________________________________________________________________
conv2d_182 (Conv2D)             (None, 14, 14, 128)  77824       activation_183[0][0]             
__________________________________________________________________________________________________
batch_normalization_184 (BatchN (None, 14, 14, 128)  512         conv2d_182[0][0]                 
__________________________________________________________________________________________________
activation_184 (Activation)     (None, 14, 14, 128)  0           batch_normalization_184[0][0]    
__________________________________________________________________________________________________
conv2d_183 (Conv2D)             (None, 14, 14, 32)   36864       activation_184[0][0]             
__________________________________________________________________________________________________
concatenate_88 (Concatenate)    (None, 14, 14, 640)  0           concatenate_87[0][0]             
                                                                 conv2d_183[0][0]                 
__________________________________________________________________________________________________
batch_normalization_185 (BatchN (None, 14, 14, 640)  2560        concatenate_88[0][0]             
__________________________________________________________________________________________________
activation_185 (Activation)     (None, 14, 14, 640)  0           batch_normalization_185[0][0]    
__________________________________________________________________________________________________
conv2d_184 (Conv2D)             (None, 14, 14, 128)  81920       activation_185[0][0]             
__________________________________________________________________________________________________
batch_normalization_186 (BatchN (None, 14, 14, 128)  512         conv2d_184[0][0]                 
__________________________________________________________________________________________________
activation_186 (Activation)     (None, 14, 14, 128)  0           batch_normalization_186[0][0]    
__________________________________________________________________________________________________
conv2d_185 (Conv2D)             (None, 14, 14, 32)   36864       activation_186[0][0]             
__________________________________________________________________________________________________
concatenate_89 (Concatenate)    (None, 14, 14, 672)  0           concatenate_88[0][0]             
                                                                 conv2d_185[0][0]                 
__________________________________________________________________________________________________
batch_normalization_187 (BatchN (None, 14, 14, 672)  2688        concatenate_89[0][0]             
__________________________________________________________________________________________________
activation_187 (Activation)     (None, 14, 14, 672)  0           batch_normalization_187[0][0]    
__________________________________________________________________________________________________
conv2d_186 (Conv2D)             (None, 14, 14, 128)  86016       activation_187[0][0]             
__________________________________________________________________________________________________
batch_normalization_188 (BatchN (None, 14, 14, 128)  512         conv2d_186[0][0]                 
__________________________________________________________________________________________________
activation_188 (Activation)     (None, 14, 14, 128)  0           batch_normalization_188[0][0]    
__________________________________________________________________________________________________
conv2d_187 (Conv2D)             (None, 14, 14, 32)   36864       activation_188[0][0]             
__________________________________________________________________________________________________
concatenate_90 (Concatenate)    (None, 14, 14, 704)  0           concatenate_89[0][0]             
                                                                 conv2d_187[0][0]                 
__________________________________________________________________________________________________
batch_normalization_189 (BatchN (None, 14, 14, 704)  2816        concatenate_90[0][0]             
__________________________________________________________________________________________________
activation_189 (Activation)     (None, 14, 14, 704)  0           batch_normalization_189[0][0]    
__________________________________________________________________________________________________
conv2d_188 (Conv2D)             (None, 14, 14, 128)  90112       activation_189[0][0]             
__________________________________________________________________________________________________
batch_normalization_190 (BatchN (None, 14, 14, 128)  512         conv2d_188[0][0]                 
__________________________________________________________________________________________________
activation_190 (Activation)     (None, 14, 14, 128)  0           batch_normalization_190[0][0]    
__________________________________________________________________________________________________
conv2d_189 (Conv2D)             (None, 14, 14, 32)   36864       activation_190[0][0]             
__________________________________________________________________________________________________
concatenate_91 (Concatenate)    (None, 14, 14, 736)  0           concatenate_90[0][0]             
                                                                 conv2d_189[0][0]                 
__________________________________________________________________________________________________
batch_normalization_191 (BatchN (None, 14, 14, 736)  2944        concatenate_91[0][0]             
__________________________________________________________________________________________________
activation_191 (Activation)     (None, 14, 14, 736)  0           batch_normalization_191[0][0]    
__________________________________________________________________________________________________
conv2d_190 (Conv2D)             (None, 14, 14, 128)  94208       activation_191[0][0]             
__________________________________________________________________________________________________
batch_normalization_192 (BatchN (None, 14, 14, 128)  512         conv2d_190[0][0]                 
__________________________________________________________________________________________________
activation_192 (Activation)     (None, 14, 14, 128)  0           batch_normalization_192[0][0]    
__________________________________________________________________________________________________
conv2d_191 (Conv2D)             (None, 14, 14, 32)   36864       activation_192[0][0]             
__________________________________________________________________________________________________
concatenate_92 (Concatenate)    (None, 14, 14, 768)  0           concatenate_91[0][0]             
                                                                 conv2d_191[0][0]                 
__________________________________________________________________________________________________
batch_normalization_193 (BatchN (None, 14, 14, 768)  3072        concatenate_92[0][0]             
__________________________________________________________________________________________________
activation_193 (Activation)     (None, 14, 14, 768)  0           batch_normalization_193[0][0]    
__________________________________________________________________________________________________
conv2d_192 (Conv2D)             (None, 14, 14, 128)  98304       activation_193[0][0]             
__________________________________________________________________________________________________
batch_normalization_194 (BatchN (None, 14, 14, 128)  512         conv2d_192[0][0]                 
__________________________________________________________________________________________________
activation_194 (Activation)     (None, 14, 14, 128)  0           batch_normalization_194[0][0]    
__________________________________________________________________________________________________
conv2d_193 (Conv2D)             (None, 14, 14, 32)   36864       activation_194[0][0]             
__________________________________________________________________________________________________
concatenate_93 (Concatenate)    (None, 14, 14, 800)  0           concatenate_92[0][0]             
                                                                 conv2d_193[0][0]                 
__________________________________________________________________________________________________
batch_normalization_195 (BatchN (None, 14, 14, 800)  3200        concatenate_93[0][0]             
__________________________________________________________________________________________________
activation_195 (Activation)     (None, 14, 14, 800)  0           batch_normalization_195[0][0]    
__________________________________________________________________________________________________
conv2d_194 (Conv2D)             (None, 14, 14, 128)  102400      activation_195[0][0]             
__________________________________________________________________________________________________
batch_normalization_196 (BatchN (None, 14, 14, 128)  512         conv2d_194[0][0]                 
__________________________________________________________________________________________________
activation_196 (Activation)     (None, 14, 14, 128)  0           batch_normalization_196[0][0]    
__________________________________________________________________________________________________
conv2d_195 (Conv2D)             (None, 14, 14, 32)   36864       activation_196[0][0]             
__________________________________________________________________________________________________
concatenate_94 (Concatenate)    (None, 14, 14, 832)  0           concatenate_93[0][0]             
                                                                 conv2d_195[0][0]                 
__________________________________________________________________________________________________
batch_normalization_197 (BatchN (None, 14, 14, 832)  3328        concatenate_94[0][0]             
__________________________________________________________________________________________________
activation_197 (Activation)     (None, 14, 14, 832)  0           batch_normalization_197[0][0]    
__________________________________________________________________________________________________
conv2d_196 (Conv2D)             (None, 14, 14, 128)  106496      activation_197[0][0]             
__________________________________________________________________________________________________
batch_normalization_198 (BatchN (None, 14, 14, 128)  512         conv2d_196[0][0]                 
__________________________________________________________________________________________________
activation_198 (Activation)     (None, 14, 14, 128)  0           batch_normalization_198[0][0]    
__________________________________________________________________________________________________
conv2d_197 (Conv2D)             (None, 14, 14, 32)   36864       activation_198[0][0]             
__________________________________________________________________________________________________
concatenate_95 (Concatenate)    (None, 14, 14, 864)  0           concatenate_94[0][0]             
                                                                 conv2d_197[0][0]                 
__________________________________________________________________________________________________
batch_normalization_199 (BatchN (None, 14, 14, 864)  3456        concatenate_95[0][0]             
__________________________________________________________________________________________________
activation_199 (Activation)     (None, 14, 14, 864)  0           batch_normalization_199[0][0]    
__________________________________________________________________________________________________
conv2d_198 (Conv2D)             (None, 14, 14, 128)  110592      activation_199[0][0]             
__________________________________________________________________________________________________
batch_normalization_200 (BatchN (None, 14, 14, 128)  512         conv2d_198[0][0]                 
__________________________________________________________________________________________________
activation_200 (Activation)     (None, 14, 14, 128)  0           batch_normalization_200[0][0]    
__________________________________________________________________________________________________
conv2d_199 (Conv2D)             (None, 14, 14, 32)   36864       activation_200[0][0]             
__________________________________________________________________________________________________
concatenate_96 (Concatenate)    (None, 14, 14, 896)  0           concatenate_95[0][0]             
                                                                 conv2d_199[0][0]                 
__________________________________________________________________________________________________
batch_normalization_201 (BatchN (None, 14, 14, 896)  3584        concatenate_96[0][0]             
__________________________________________________________________________________________________
activation_201 (Activation)     (None, 14, 14, 896)  0           batch_normalization_201[0][0]    
__________________________________________________________________________________________________
conv2d_200 (Conv2D)             (None, 14, 14, 128)  114688      activation_201[0][0]             
__________________________________________________________________________________________________
batch_normalization_202 (BatchN (None, 14, 14, 128)  512         conv2d_200[0][0]                 
__________________________________________________________________________________________________
activation_202 (Activation)     (None, 14, 14, 128)  0           batch_normalization_202[0][0]    
__________________________________________________________________________________________________
conv2d_201 (Conv2D)             (None, 14, 14, 32)   36864       activation_202[0][0]             
__________________________________________________________________________________________________
concatenate_97 (Concatenate)    (None, 14, 14, 928)  0           concatenate_96[0][0]             
                                                                 conv2d_201[0][0]                 
__________________________________________________________________________________________________
batch_normalization_203 (BatchN (None, 14, 14, 928)  3712        concatenate_97[0][0]             
__________________________________________________________________________________________________
activation_203 (Activation)     (None, 14, 14, 928)  0           batch_normalization_203[0][0]    
__________________________________________________________________________________________________
conv2d_202 (Conv2D)             (None, 14, 14, 128)  118784      activation_203[0][0]             
__________________________________________________________________________________________________
batch_normalization_204 (BatchN (None, 14, 14, 128)  512         conv2d_202[0][0]                 
__________________________________________________________________________________________________
activation_204 (Activation)     (None, 14, 14, 128)  0           batch_normalization_204[0][0]    
__________________________________________________________________________________________________
conv2d_203 (Conv2D)             (None, 14, 14, 32)   36864       activation_204[0][0]             
__________________________________________________________________________________________________
concatenate_98 (Concatenate)    (None, 14, 14, 960)  0           concatenate_97[0][0]             
                                                                 conv2d_203[0][0]                 
__________________________________________________________________________________________________
batch_normalization_205 (BatchN (None, 14, 14, 960)  3840        concatenate_98[0][0]             
__________________________________________________________________________________________________
activation_205 (Activation)     (None, 14, 14, 960)  0           batch_normalization_205[0][0]    
__________________________________________________________________________________________________
conv2d_204 (Conv2D)             (None, 14, 14, 128)  122880      activation_205[0][0]             
__________________________________________________________________________________________________
batch_normalization_206 (BatchN (None, 14, 14, 128)  512         conv2d_204[0][0]                 
__________________________________________________________________________________________________
activation_206 (Activation)     (None, 14, 14, 128)  0           batch_normalization_206[0][0]    
__________________________________________________________________________________________________
conv2d_205 (Conv2D)             (None, 14, 14, 32)   36864       activation_206[0][0]             
__________________________________________________________________________________________________
concatenate_99 (Concatenate)    (None, 14, 14, 992)  0           concatenate_98[0][0]             
                                                                 conv2d_205[0][0]                 
__________________________________________________________________________________________________
batch_normalization_207 (BatchN (None, 14, 14, 992)  3968        concatenate_99[0][0]             
__________________________________________________________________________________________________
activation_207 (Activation)     (None, 14, 14, 992)  0           batch_normalization_207[0][0]    
__________________________________________________________________________________________________
conv2d_206 (Conv2D)             (None, 14, 14, 128)  126976      activation_207[0][0]             
__________________________________________________________________________________________________
batch_normalization_208 (BatchN (None, 14, 14, 128)  512         conv2d_206[0][0]                 
__________________________________________________________________________________________________
activation_208 (Activation)     (None, 14, 14, 128)  0           batch_normalization_208[0][0]    
__________________________________________________________________________________________________
conv2d_207 (Conv2D)             (None, 14, 14, 32)   36864       activation_208[0][0]             
__________________________________________________________________________________________________
concatenate_100 (Concatenate)   (None, 14, 14, 1024) 0           concatenate_99[0][0]             
                                                                 conv2d_207[0][0]                 
__________________________________________________________________________________________________
batch_normalization_209 (BatchN (None, 14, 14, 1024) 4096        concatenate_100[0][0]            
__________________________________________________________________________________________________
activation_209 (Activation)     (None, 14, 14, 1024) 0           batch_normalization_209[0][0]    
__________________________________________________________________________________________________
conv2d_208 (Conv2D)             (None, 14, 14, 512)  524288      activation_209[0][0]             
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, 7, 7, 512)    0           conv2d_208[0][0]                 
__________________________________________________________________________________________________
batch_normalization_210 (BatchN (None, 7, 7, 512)    2048        average_pooling2d_6[0][0]        
__________________________________________________________________________________________________
activation_210 (Activation)     (None, 7, 7, 512)    0           batch_normalization_210[0][0]    
__________________________________________________________________________________________________
conv2d_209 (Conv2D)             (None, 7, 7, 128)    65536       activation_210[0][0]             
__________________________________________________________________________________________________
batch_normalization_211 (BatchN (None, 7, 7, 128)    512         conv2d_209[0][0]                 
__________________________________________________________________________________________________
activation_211 (Activation)     (None, 7, 7, 128)    0           batch_normalization_211[0][0]    
__________________________________________________________________________________________________
conv2d_210 (Conv2D)             (None, 7, 7, 32)     36864       activation_211[0][0]             
__________________________________________________________________________________________________
concatenate_101 (Concatenate)   (None, 7, 7, 544)    0           average_pooling2d_6[0][0]        
                                                                 conv2d_210[0][0]                 
__________________________________________________________________________________________________
batch_normalization_212 (BatchN (None, 7, 7, 544)    2176        concatenate_101[0][0]            
__________________________________________________________________________________________________
activation_212 (Activation)     (None, 7, 7, 544)    0           batch_normalization_212[0][0]    
__________________________________________________________________________________________________
conv2d_211 (Conv2D)             (None, 7, 7, 128)    69632       activation_212[0][0]             
__________________________________________________________________________________________________
batch_normalization_213 (BatchN (None, 7, 7, 128)    512         conv2d_211[0][0]                 
__________________________________________________________________________________________________
activation_213 (Activation)     (None, 7, 7, 128)    0           batch_normalization_213[0][0]    
__________________________________________________________________________________________________
conv2d_212 (Conv2D)             (None, 7, 7, 32)     36864       activation_213[0][0]             
__________________________________________________________________________________________________
concatenate_102 (Concatenate)   (None, 7, 7, 576)    0           concatenate_101[0][0]            
                                                                 conv2d_212[0][0]                 
__________________________________________________________________________________________________
batch_normalization_214 (BatchN (None, 7, 7, 576)    2304        concatenate_102[0][0]            
__________________________________________________________________________________________________
activation_214 (Activation)     (None, 7, 7, 576)    0           batch_normalization_214[0][0]    
__________________________________________________________________________________________________
conv2d_213 (Conv2D)             (None, 7, 7, 128)    73728       activation_214[0][0]             
__________________________________________________________________________________________________
batch_normalization_215 (BatchN (None, 7, 7, 128)    512         conv2d_213[0][0]                 
__________________________________________________________________________________________________
activation_215 (Activation)     (None, 7, 7, 128)    0           batch_normalization_215[0][0]    
__________________________________________________________________________________________________
conv2d_214 (Conv2D)             (None, 7, 7, 32)     36864       activation_215[0][0]             
__________________________________________________________________________________________________
concatenate_103 (Concatenate)   (None, 7, 7, 608)    0           concatenate_102[0][0]            
                                                                 conv2d_214[0][0]                 
__________________________________________________________________________________________________
batch_normalization_216 (BatchN (None, 7, 7, 608)    2432        concatenate_103[0][0]            
__________________________________________________________________________________________________
activation_216 (Activation)     (None, 7, 7, 608)    0           batch_normalization_216[0][0]    
__________________________________________________________________________________________________
conv2d_215 (Conv2D)             (None, 7, 7, 128)    77824       activation_216[0][0]             
__________________________________________________________________________________________________
batch_normalization_217 (BatchN (None, 7, 7, 128)    512         conv2d_215[0][0]                 
__________________________________________________________________________________________________
activation_217 (Activation)     (None, 7, 7, 128)    0           batch_normalization_217[0][0]    
__________________________________________________________________________________________________
conv2d_216 (Conv2D)             (None, 7, 7, 32)     36864       activation_217[0][0]             
__________________________________________________________________________________________________
concatenate_104 (Concatenate)   (None, 7, 7, 640)    0           concatenate_103[0][0]            
                                                                 conv2d_216[0][0]                 
__________________________________________________________________________________________________
batch_normalization_218 (BatchN (None, 7, 7, 640)    2560        concatenate_104[0][0]            
__________________________________________________________________________________________________
activation_218 (Activation)     (None, 7, 7, 640)    0           batch_normalization_218[0][0]    
__________________________________________________________________________________________________
conv2d_217 (Conv2D)             (None, 7, 7, 128)    81920       activation_218[0][0]             
__________________________________________________________________________________________________
batch_normalization_219 (BatchN (None, 7, 7, 128)    512         conv2d_217[0][0]                 
__________________________________________________________________________________________________
activation_219 (Activation)     (None, 7, 7, 128)    0           batch_normalization_219[0][0]    
__________________________________________________________________________________________________
conv2d_218 (Conv2D)             (None, 7, 7, 32)     36864       activation_219[0][0]             
__________________________________________________________________________________________________
concatenate_105 (Concatenate)   (None, 7, 7, 672)    0           concatenate_104[0][0]            
                                                                 conv2d_218[0][0]                 
__________________________________________________________________________________________________
batch_normalization_220 (BatchN (None, 7, 7, 672)    2688        concatenate_105[0][0]            
__________________________________________________________________________________________________
activation_220 (Activation)     (None, 7, 7, 672)    0           batch_normalization_220[0][0]    
__________________________________________________________________________________________________
conv2d_219 (Conv2D)             (None, 7, 7, 128)    86016       activation_220[0][0]             
__________________________________________________________________________________________________
batch_normalization_221 (BatchN (None, 7, 7, 128)    512         conv2d_219[0][0]                 
__________________________________________________________________________________________________
activation_221 (Activation)     (None, 7, 7, 128)    0           batch_normalization_221[0][0]    
__________________________________________________________________________________________________
conv2d_220 (Conv2D)             (None, 7, 7, 32)     36864       activation_221[0][0]             
__________________________________________________________________________________________________
concatenate_106 (Concatenate)   (None, 7, 7, 704)    0           concatenate_105[0][0]            
                                                                 conv2d_220[0][0]                 
__________________________________________________________________________________________________
batch_normalization_222 (BatchN (None, 7, 7, 704)    2816        concatenate_106[0][0]            
__________________________________________________________________________________________________
activation_222 (Activation)     (None, 7, 7, 704)    0           batch_normalization_222[0][0]    
__________________________________________________________________________________________________
conv2d_221 (Conv2D)             (None, 7, 7, 128)    90112       activation_222[0][0]             
__________________________________________________________________________________________________
batch_normalization_223 (BatchN (None, 7, 7, 128)    512         conv2d_221[0][0]                 
__________________________________________________________________________________________________
activation_223 (Activation)     (None, 7, 7, 128)    0           batch_normalization_223[0][0]    
__________________________________________________________________________________________________
conv2d_222 (Conv2D)             (None, 7, 7, 32)     36864       activation_223[0][0]             
__________________________________________________________________________________________________
concatenate_107 (Concatenate)   (None, 7, 7, 736)    0           concatenate_106[0][0]            
                                                                 conv2d_222[0][0]                 
__________________________________________________________________________________________________
batch_normalization_224 (BatchN (None, 7, 7, 736)    2944        concatenate_107[0][0]            
__________________________________________________________________________________________________
activation_224 (Activation)     (None, 7, 7, 736)    0           batch_normalization_224[0][0]    
__________________________________________________________________________________________________
conv2d_223 (Conv2D)             (None, 7, 7, 128)    94208       activation_224[0][0]             
__________________________________________________________________________________________________
batch_normalization_225 (BatchN (None, 7, 7, 128)    512         conv2d_223[0][0]                 
__________________________________________________________________________________________________
activation_225 (Activation)     (None, 7, 7, 128)    0           batch_normalization_225[0][0]    
__________________________________________________________________________________________________
conv2d_224 (Conv2D)             (None, 7, 7, 32)     36864       activation_225[0][0]             
__________________________________________________________________________________________________
concatenate_108 (Concatenate)   (None, 7, 7, 768)    0           concatenate_107[0][0]            
                                                                 conv2d_224[0][0]                 
__________________________________________________________________________________________________
batch_normalization_226 (BatchN (None, 7, 7, 768)    3072        concatenate_108[0][0]            
__________________________________________________________________________________________________
activation_226 (Activation)     (None, 7, 7, 768)    0           batch_normalization_226[0][0]    
__________________________________________________________________________________________________
conv2d_225 (Conv2D)             (None, 7, 7, 128)    98304       activation_226[0][0]             
__________________________________________________________________________________________________
batch_normalization_227 (BatchN (None, 7, 7, 128)    512         conv2d_225[0][0]                 
__________________________________________________________________________________________________
activation_227 (Activation)     (None, 7, 7, 128)    0           batch_normalization_227[0][0]    
__________________________________________________________________________________________________
conv2d_226 (Conv2D)             (None, 7, 7, 32)     36864       activation_227[0][0]             
__________________________________________________________________________________________________
concatenate_109 (Concatenate)   (None, 7, 7, 800)    0           concatenate_108[0][0]            
                                                                 conv2d_226[0][0]                 
__________________________________________________________________________________________________
batch_normalization_228 (BatchN (None, 7, 7, 800)    3200        concatenate_109[0][0]            
__________________________________________________________________________________________________
activation_228 (Activation)     (None, 7, 7, 800)    0           batch_normalization_228[0][0]    
__________________________________________________________________________________________________
conv2d_227 (Conv2D)             (None, 7, 7, 128)    102400      activation_228[0][0]             
__________________________________________________________________________________________________
batch_normalization_229 (BatchN (None, 7, 7, 128)    512         conv2d_227[0][0]                 
__________________________________________________________________________________________________
activation_229 (Activation)     (None, 7, 7, 128)    0           batch_normalization_229[0][0]    
__________________________________________________________________________________________________
conv2d_228 (Conv2D)             (None, 7, 7, 32)     36864       activation_229[0][0]             
__________________________________________________________________________________________________
concatenate_110 (Concatenate)   (None, 7, 7, 832)    0           concatenate_109[0][0]            
                                                                 conv2d_228[0][0]                 
__________________________________________________________________________________________________
batch_normalization_230 (BatchN (None, 7, 7, 832)    3328        concatenate_110[0][0]            
__________________________________________________________________________________________________
activation_230 (Activation)     (None, 7, 7, 832)    0           batch_normalization_230[0][0]    
__________________________________________________________________________________________________
conv2d_229 (Conv2D)             (None, 7, 7, 128)    106496      activation_230[0][0]             
__________________________________________________________________________________________________
batch_normalization_231 (BatchN (None, 7, 7, 128)    512         conv2d_229[0][0]                 
__________________________________________________________________________________________________
activation_231 (Activation)     (None, 7, 7, 128)    0           batch_normalization_231[0][0]    
__________________________________________________________________________________________________
conv2d_230 (Conv2D)             (None, 7, 7, 32)     36864       activation_231[0][0]             
__________________________________________________________________________________________________
concatenate_111 (Concatenate)   (None, 7, 7, 864)    0           concatenate_110[0][0]            
                                                                 conv2d_230[0][0]                 
__________________________________________________________________________________________________
batch_normalization_232 (BatchN (None, 7, 7, 864)    3456        concatenate_111[0][0]            
__________________________________________________________________________________________________
activation_232 (Activation)     (None, 7, 7, 864)    0           batch_normalization_232[0][0]    
__________________________________________________________________________________________________
conv2d_231 (Conv2D)             (None, 7, 7, 128)    110592      activation_232[0][0]             
__________________________________________________________________________________________________
batch_normalization_233 (BatchN (None, 7, 7, 128)    512         conv2d_231[0][0]                 
__________________________________________________________________________________________________
activation_233 (Activation)     (None, 7, 7, 128)    0           batch_normalization_233[0][0]    
__________________________________________________________________________________________________
conv2d_232 (Conv2D)             (None, 7, 7, 32)     36864       activation_233[0][0]             
__________________________________________________________________________________________________
concatenate_112 (Concatenate)   (None, 7, 7, 896)    0           concatenate_111[0][0]            
                                                                 conv2d_232[0][0]                 
__________________________________________________________________________________________________
batch_normalization_234 (BatchN (None, 7, 7, 896)    3584        concatenate_112[0][0]            
__________________________________________________________________________________________________
activation_234 (Activation)     (None, 7, 7, 896)    0           batch_normalization_234[0][0]    
__________________________________________________________________________________________________
conv2d_233 (Conv2D)             (None, 7, 7, 128)    114688      activation_234[0][0]             
__________________________________________________________________________________________________
batch_normalization_235 (BatchN (None, 7, 7, 128)    512         conv2d_233[0][0]                 
__________________________________________________________________________________________________
activation_235 (Activation)     (None, 7, 7, 128)    0           batch_normalization_235[0][0]    
__________________________________________________________________________________________________
conv2d_234 (Conv2D)             (None, 7, 7, 32)     36864       activation_235[0][0]             
__________________________________________________________________________________________________
concatenate_113 (Concatenate)   (None, 7, 7, 928)    0           concatenate_112[0][0]            
                                                                 conv2d_234[0][0]                 
__________________________________________________________________________________________________
batch_normalization_236 (BatchN (None, 7, 7, 928)    3712        concatenate_113[0][0]            
__________________________________________________________________________________________________
activation_236 (Activation)     (None, 7, 7, 928)    0           batch_normalization_236[0][0]    
__________________________________________________________________________________________________
conv2d_235 (Conv2D)             (None, 7, 7, 128)    118784      activation_236[0][0]             
__________________________________________________________________________________________________
batch_normalization_237 (BatchN (None, 7, 7, 128)    512         conv2d_235[0][0]                 
__________________________________________________________________________________________________
activation_237 (Activation)     (None, 7, 7, 128)    0           batch_normalization_237[0][0]    
__________________________________________________________________________________________________
conv2d_236 (Conv2D)             (None, 7, 7, 32)     36864       activation_237[0][0]             
__________________________________________________________________________________________________
concatenate_114 (Concatenate)   (None, 7, 7, 960)    0           concatenate_113[0][0]            
                                                                 conv2d_236[0][0]                 
__________________________________________________________________________________________________
batch_normalization_238 (BatchN (None, 7, 7, 960)    3840        concatenate_114[0][0]            
__________________________________________________________________________________________________
activation_238 (Activation)     (None, 7, 7, 960)    0           batch_normalization_238[0][0]    
__________________________________________________________________________________________________
conv2d_237 (Conv2D)             (None, 7, 7, 128)    122880      activation_238[0][0]             
__________________________________________________________________________________________________
batch_normalization_239 (BatchN (None, 7, 7, 128)    512         conv2d_237[0][0]                 
__________________________________________________________________________________________________
activation_239 (Activation)     (None, 7, 7, 128)    0           batch_normalization_239[0][0]    
__________________________________________________________________________________________________
conv2d_238 (Conv2D)             (None, 7, 7, 32)     36864       activation_239[0][0]             
__________________________________________________________________________________________________
concatenate_115 (Concatenate)   (None, 7, 7, 992)    0           concatenate_114[0][0]            
                                                                 conv2d_238[0][0]                 
__________________________________________________________________________________________________
batch_normalization_240 (BatchN (None, 7, 7, 992)    3968        concatenate_115[0][0]            
__________________________________________________________________________________________________
activation_240 (Activation)     (None, 7, 7, 992)    0           batch_normalization_240[0][0]    
__________________________________________________________________________________________________
conv2d_239 (Conv2D)             (None, 7, 7, 128)    126976      activation_240[0][0]             
__________________________________________________________________________________________________
batch_normalization_241 (BatchN (None, 7, 7, 128)    512         conv2d_239[0][0]                 
__________________________________________________________________________________________________
activation_241 (Activation)     (None, 7, 7, 128)    0           batch_normalization_241[0][0]    
__________________________________________________________________________________________________
conv2d_240 (Conv2D)             (None, 7, 7, 32)     36864       activation_241[0][0]             
__________________________________________________________________________________________________
concatenate_116 (Concatenate)   (None, 7, 7, 1024)   0           concatenate_115[0][0]            
                                                                 conv2d_240[0][0]                 
__________________________________________________________________________________________________
batch_normalization_242 (BatchN (None, 7, 7, 1024)   4096        concatenate_116[0][0]            
__________________________________________________________________________________________________
activation_242 (Activation)     (None, 7, 7, 1024)   0           batch_normalization_242[0][0]    
__________________________________________________________________________________________________
global_average_pooling2d_2 (Glo (None, 1024)         0           activation_242[0][0]             
__________________________________________________________________________________________________
dense_2 (Dense)                 (None, 1000)         1025000     global_average_pooling2d_2[0][0] 
==================================================================================================
Total params: 8,062,504
Trainable params: 7,978,856
Non-trainable params: 83,648
__________________________________________________________________________________________________

In [10]:
model.save(KERAS_MODEL_FILEPATH)

In [ ]: