def build_fc_supermask(args):
    kwargs = {}
    if args.signed_constant:
        kwargs['signed_constant'] = True
        kwargs['const_multiplier'] = args.signed_constant_multiplier
    if args.dynamic_scaling:
        kwargs['dynamic_scaling'] = True

    return SequentialNetwork([
        Flatten(),
        MaskedDense(300,
                    kernel_initializer=glorot_normal,
                    sigmoid_bias=args.sigmoid_bias,
                    round_mask=args.round_mask,
                    activation=relu,
                    kernel_regularizer=l2reg(args.l2),
                    name='fc_1',
                    **kwargs),
        MaskedDense(100,
                    kernel_initializer=glorot_normal,
                    sigmoid_bias=args.sigmoid_bias,
                    round_mask=args.round_mask,
                    activation=relu,
                    kernel_regularizer=l2reg(args.l2),
                    name='fc_2',
                    **kwargs),
        MaskedDense(10,
                    kernel_initializer=glorot_normal,
                    sigmoid_bias=args.sigmoid_bias,
                    round_mask=args.round_mask,
                    activation=None,
                    kernel_regularizer=l2reg(args.l2),
                    name='fc_3',
                    **kwargs)
    ])
def build_network_fc(args):
    return SequentialNetwork([
        Flatten(),
        Dense(100, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
        Dense(50, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'),
        Dense(5, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3')
        # can also try kernel_initializer=tfkeras.initializers.TruncatedNormal(mean=0.0, stddev=0.1)
    ])
def build_conv2_supermask(args):
    kwargs = {}
    if args.signed_constant:
        kwargs['signed_constant'] = True
        kwargs['const_multiplier'] = args.signed_constant_multiplier
    if args.dynamic_scaling:
        kwargs['dynamic_scaling'] = True

    return SequentialNetwork([
        MaskedConv2D(64,
                     3,
                     kernel_initializer=glorot_normal,
                     sigmoid_bias=args.sigmoid_bias,
                     round_mask=args.round_mask,
                     padding='same',
                     kernel_regularizer=l2reg(args.l2),
                     name='conv2D_1',
                     **kwargs),
        Activation('relu'),
        MaskedConv2D(64,
                     3,
                     kernel_initializer=glorot_normal,
                     sigmoid_bias=args.sigmoid_bias,
                     round_mask=args.round_mask,
                     padding='same',
                     kernel_regularizer=l2reg(args.l2),
                     name='conv2D_2',
                     **kwargs),
        Activation('relu'),
        MaxPooling2D((2, 2), (2, 2)),
        Flatten(),
        MaskedDense(256,
                    kernel_initializer=glorot_normal,
                    sigmoid_bias=args.sigmoid_bias,
                    round_mask=args.round_mask,
                    activation=relu,
                    kernel_regularizer=l2reg(args.l2),
                    name='fc_1',
                    **kwargs),
        MaskedDense(256,
                    kernel_initializer=glorot_normal,
                    sigmoid_bias=args.sigmoid_bias,
                    round_mask=args.round_mask,
                    activation=relu,
                    kernel_regularizer=l2reg(args.l2),
                    name='fc_2',
                    **kwargs),
        MaskedDense(10,
                    kernel_initializer=glorot_normal,
                    sigmoid_bias=args.sigmoid_bias,
                    round_mask=args.round_mask,
                    activation=None,
                    kernel_regularizer=l2reg(args.l2),
                    name='fc_3',
                    **kwargs)
    ])
def build_network_fc_special(args):
    return SequentialNetwork([
        Flatten(),
        Dense(100, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_1'),
        BatchNormalization(momentum=0, name='batch_norm_1'),
        Activation('relu'),
        Dense(50, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2'),
        BatchNormalization(momentum=0, name='batch_norm_1'),
        Activation('relu'),
        Dense(5, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3')
    ])
def build_vgg_mini(args):
    return SequentialNetwork([
        Conv2D(64, (3, 3), kernel_initializer=he_normal, padding='same', activation=relu, kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        MaxPooling2D((2, 2), (2, 2)),
        Conv2D(128, (3, 3), kernel_initializer=he_normal, padding='same', activation=relu, kernel_regularizer=l2reg(args.l2), name='conv2D_2'),
        MaxPooling2D((2, 2), (2, 2)),
        Conv2D(256, (3, 3), kernel_initializer=he_normal, padding='same', activation=relu, kernel_regularizer=l2reg(args.l2), name='conv2D_3'),
        MaxPooling2D((2, 2), (2, 2)),
        Flatten(),
        Dense(512, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
        Dropout(0.5),
        Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2')
    ])
def build_lenet_conv(args): # ok this is a slightly modified lenet
    return SequentialNetwork([
        Conv2D(20, 5, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        # BatchNormalization(momentum=0.0, name='batch_norm_1'),
        Activation('relu'),
        MaxPooling2D((2, 2), (2, 2)),
        Conv2D(40, 5, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'),
        # BatchNormalization(momentum=0.0, name='batch_norm_2'),
        Activation('relu'),
        MaxPooling2D((2, 2), (2, 2)),
        Flatten(),
        Dropout(0.25),
        Dense(400, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
        Dropout(0.5),
        Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2')
    ])
    def __init__(self, kernel_size, filters, first_stride, name_prefix='', identity=True, resize=1, l2=0, l2_shortcut=0, *args, **kwargs):
        super(ResidualBlock, self).__init__(*args, **kwargs)
        self.identity = identity
        self.conv1 = self.track_layer(Conv2D(int(filters * resize), kernel_size, strides=first_stride, kernel_initializer=he_normal,
            padding='same', kernel_regularizer=l2reg(l2), name=name_prefix+'conv2D_1'))
        self.bn1 = self.track_layer(BatchNormalization(momentum=0.0, name=name_prefix+'batch_norm_1'))
        self.act1 = self.track_layer(Activation('relu'))

        self.conv2 = self.track_layer(Conv2D(filters, kernel_size, strides=(1, 1), kernel_initializer=he_normal,
            padding='same', kernel_regularizer=l2reg(l2), name=name_prefix+'conv2D_2'))
        self.bn2 = self.track_layer(BatchNormalization(momentum=0.0, name=name_prefix+'batch_norm_2'))

        if not self.identity:
            self.conv_shortcut = self.track_layer(Conv2D(filters, (1, 1), strides=first_stride, kernel_initializer=he_normal,
                padding='same', kernel_regularizer=l2reg(l2_shortcut), name=name_prefix+'shortcut_conv'))

        self.add_layer = self.track_layer(tfkeras.layers.Add())
        self.act2 = self.track_layer(Activation('relu')) # TODO need relu on last block?
def build_resnet(args):
    return SequentialNetwork([
        # pre-blocks
        Conv2D(16, 3, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        BatchNormalization(momentum=0.0, name='batch_norm_1'),
        Activation('relu'),
        # set 1
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1A_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        # set 2
        ResidualBlock(3, 32, first_stride=(2, 2), name_prefix='2A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2B_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2C_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        # set 3
        ResidualBlock(3, 64, first_stride=(2, 2), name_prefix='3A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        # post-blocks
        GlobalAveragePooling2D(),
        Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2_special), name='fc_last')
    ])
def build_resnet(args):
    return SequentialNetwork([
        # pre-blocks
        Conv2D(16, 3, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        BatchNormalization(momentum=0.0, name='batch_norm_1'),
        Activation('relu'),
        # set 1
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1A_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),

        # set 2
        ResidualBlock(3, 32, first_stride=(2, 2), name_prefix='2A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2B_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2C_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),

        tf.layers.Conv2DTranspose(16, 2, strides=(2, 2)),

        # tf.layers.Conv2DTranspose(1, 15, padding='valid'),

        # set 3
        ResidualBlock(3, 64, first_stride=(2, 2), name_prefix='3A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        # post-blocks
        # GlobalAveragePooling2D(),

        tf.layers.Conv2DTranspose(16, 1, strides=(2, 2)),

        ResidualBlock(3, 64, first_stride=(2, 2), name_prefix='4A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='4B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='4C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),

        # tf.layers.Conv2DTranspose(32, 1, strides=(2, 2)),
        tf.layers.Conv2DTranspose(16, 1, strides=(2, 2)),
        tf.layers.Conv2DTranspose(8, 1, strides=(1, 1)),
        tf.layers.Conv2DTranspose(1, 1, strides=(1, 1)),

        # tf.layers.Conv2DTranspose(1, 5, padding='valid'),
        # tf.layers.Conv2DTranspose(1, 11, padding='valid'),
        # tf.layers.Conv2DTranspose(1, 15, padding='valid'),
        # tf.layers.Conv2DTranspose(1, 21, padding='valid'),
        # tf.layers.Conv2DTranspose(1, 42, padding='valid', name='probs'),

        Activation('sigmoid', name='mask')
        # Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2_special), name='fc_last')
    ])
def build_all_cnn(args):
    return SequentialNetwork([
        #Dropout(0.1),
        Conv2D(96, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        BatchNormalization(momentum=0.0, name='batch_norm_1'),
        Activation('relu'),
        Conv2D(96, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'),
        BatchNormalization(momentum=0.0, name='batch_norm_2'),
        Activation('relu'),
        Conv2D(96, (3, 3), kernel_initializer=he_normal, padding='same', strides=(2, 2), name='conv2D_strided_1'),
        Dropout(0.5),
        Conv2D(192, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_3'),
        BatchNormalization(momentum=0.0, name='batch_norm_3'),
        Activation('relu'),
        Conv2D(192, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_4'),
        BatchNormalization(momentum=0.0, name='batch_norm_4'),
        Activation('relu'),
        Conv2D(192, (3, 3), kernel_initializer=he_normal, padding='same', strides=(2, 2), name='conv2D_strided_2'),
        Dropout(0.5),
        Conv2D(192, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_5'),
        BatchNormalization(momentum=0.0, name='batch_norm_5'),
        Activation('relu'),
        Conv2D(192, (1, 1), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_6'),
        BatchNormalization(momentum=0.0, name='batch_norm_6'),
        Activation('relu'),
        Conv2D(10, (1, 1), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_7'),
        GlobalAveragePooling2D()
    ])
def build_frozen_conv2_lottery(args, init_values, mask_values): 
    return SequentialNetwork([
        FreezeConv2D(64, 3, init_values[0], init_values[1], mask_values[0], mask_values[1], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        Activation('relu'),
        FreezeConv2D(64, 3, init_values[2], init_values[3], mask_values[2], mask_values[3], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'),
        Activation('relu'),
        MaxPooling2D((2, 2), (2, 2)),
        Flatten(),
        FreezeDense(256, init_values[4], init_values[5], mask_values[4], mask_values[5], kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
        FreezeDense(256, init_values[6], init_values[7], mask_values[6], mask_values[7], kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'),
        FreezeDense(10, init_values[8], init_values[9], mask_values[8], mask_values[9], kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3')
    ])
def build_conv2_lottery(args): 
    return SequentialNetwork([
        Conv2D(64, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        Activation('relu'),
        Conv2D(64, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'),
        Activation('relu'),
        MaxPooling2D((2, 2), (2, 2)),
        Flatten(),
        Dense(256, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
        Dense(256, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'),
        Dense(10, kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3')
    ])
def build_fc_lottery(args):
    return SequentialNetwork([
        Flatten(),
#         BatchNormalization(momentum=0, name='batch_norm_1'),
        Dense(300, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
        Dense(100, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'),
        Dense(10, kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3')
    ])
def build_masked_conv6_lottery(args, mask_values): 
    return SequentialNetwork([
        MaskedConv2D(64, 3, mask_values[0], mask_values[1], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        Activation('relu'),
        MaskedConv2D(64, 3, mask_values[2], mask_values[3], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'),
        Activation('relu'),
        MaxPooling2D((2, 2), (2, 2)),
        MaskedConv2D(128, 3, mask_values[4], mask_values[5], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_3'),
        Activation('relu'),
        MaskedConv2D(128, 3, mask_values[6], mask_values[7], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_4'),
        Activation('relu'),
        MaxPooling2D((2, 2), (2, 2)),   
        MaskedConv2D(256, 3, mask_values[8], mask_values[9], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_5'),
        Activation('relu'),
        MaskedConv2D(256, 3, mask_values[10], mask_values[11], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_6'),
        Activation('relu'),
        MaxPooling2D((2, 2), (2, 2)),   
        Flatten(),
#         Dropout(0.5),
        MaskedDense(256, mask_values[12], mask_values[13], kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
#         Dropout(0.5),
        MaskedDense(256, mask_values[14], mask_values[15], kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'),
#         Dropout(0.5),
        MaskedDense(10, mask_values[16], mask_values[17], kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3')
    ])
Exemplo n.º 15
0
    def __init__(self,
                 rpn_params,
                 bsamp_params,
                 nms_params,
                 l2=0,
                 im_h=64,
                 im_w=64,
                 coordconv=False,
                 clip=True,
                 filtersame=False):
        super(RegionProposalSampler, self).__init__()
        self.rpn_params = rpn_params
        self.bsamp_params = bsamp_params
        self.nms_params = nms_params
        self.im_h = im_h
        self.im_w = im_w
        self.clip = clip

        _pad = 'same' if filtersame else 'valid'
        _dim = 16 if filtersame else 13
        if coordconv:
            self.l(
                'bottom_conv',
                SequentialNetwork(
                    [
                        AddCoords(x_dim=im_w,
                                  y_dim=im_h,
                                  with_r=False,
                                  skiptile=True),  # (batch, 64, 64, 4 or 5)
                        Conv2D(32, (5, 5),
                               padding=_pad,
                               kernel_initializer=he_normal,
                               kernel_regularizer=l2reg(l2)),
                        ReLu,
                        MaxPooling2D(pool_size=2, strides=2),
                        Conv2D(64, (5, 5),
                               padding=_pad,
                               kernel_initializer=he_normal,
                               kernel_regularizer=l2reg(l2)),
                        ReLu,
                        MaxPooling2D(pool_size=2, strides=2),
                    ],
                    name='bottom_conv'))

            self.l(
                'another_conv',
                SequentialNetwork([
                    AddCoords(
                        x_dim=_dim, y_dim=_dim, with_r=False, skiptile=True),
                    Conv2D(rpn_params.rpn_hidden_dim, (3, 3),
                           padding='same',
                           kernel_initializer=he_normal,
                           kernel_regularizer=l2reg(l2)), ReLu
                ],
                                  name='another_conv'))

            self.l(
                'box_mover',
                SequentialNetwork([
                    Conv2D(rpn_params.rpn_hidden_dim, (3, 3),
                           padding='same',
                           kernel_initializer=he_normal,
                           kernel_regularizer=l2reg(l2)), ReLu,
                    AddCoords(
                        x_dim=_dim, y_dim=_dim, with_r=False, skiptile=True),
                    Conv2D(4 * rpn_params.num_anchors, (1, 1),
                           kernel_initializer=tf.zeros_initializer,
                           bias_initializer=tf.constant_initializer([0.]),
                           kernel_regularizer=l2reg(l2))
                ],
                                  name='box_mover'))  # (13,13,4*k)

        else:
            self.l(
                'bottom_conv',
                SequentialNetwork([
                    Conv2D(32, (5, 5),
                           padding=_pad,
                           kernel_initializer=he_normal,
                           kernel_regularizer=l2reg(l2)),
                    ReLu,
                    MaxPooling2D(pool_size=2, strides=2),
                    Conv2D(64, (5, 5),
                           padding=_pad,
                           kernel_initializer=he_normal,
                           kernel_regularizer=l2reg(l2)),
                    ReLu,
                    MaxPooling2D(pool_size=2, strides=2),
                ],
                                  name='bottom_conv'))

            self.l(
                'another_conv',
                SequentialNetwork([
                    Conv2D(rpn_params.rpn_hidden_dim, (3, 3),
                           padding='same',
                           kernel_initializer=he_normal,
                           kernel_regularizer=l2reg(l2)), ReLu
                ],
                                  name='another_conv'))

            self.l('box_mover',
                   SequentialNetwork([
                       Conv2D(rpn_params.rpn_hidden_dim, (3, 3),
                              padding='same',
                              kernel_initializer=he_normal,
                              kernel_regularizer=l2reg(l2)), ReLu,
                       Conv2D(4 * rpn_params.num_anchors, (1, 1),
                              kernel_initializer=tf.zeros_initializer,
                              bias_initializer=tf.constant_initializer([0.]),
                              kernel_regularizer=l2reg(l2))
                   ],
                                     name='box_mover'))  # (13,13,4*k)

        self.l('box_scorer',
               SequentialNetwork([
                   Conv2D(2 * rpn_params.num_anchors, (1, 1),
                          kernel_initializer=he_normal,
                          kernel_regularizer=l2reg(l2)),
               ],
                                 name='box_scorer'))  # (13,13,2*k)

        return
def build_fc_adjustable(args):
    if args.num_layers == 3:
        return SequentialNetwork([
            Flatten(),
            Dense(455, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
            Dense(67, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'),
            Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3')
        ])
    elif args.num_layers == 4:
        return SequentialNetwork([
            Flatten(),
            Dense(734, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
            Dense(175, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'),
            Dense(42, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_3'),
            Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_4')
        ])
    elif args.num_layers == 5:
        return SequentialNetwork([
            Flatten(),
            Dense(977, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'),
            Dense(311, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'),
            Dense(99, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_3'),
            Dense(31, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_4'),
            Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_5')
        ])
def build_linknet_2(args):
    layers = conv_bn_relu(32, 3, stride=1, name="block1_conv1")
    for layer in conv_bn_relu(32, 3, stride=1, name="block1_conv2"):
        layers.append(layer)
    layers.append(MaxPooling2D((2, 2), strides=(2, 2), padding="same", name="block1_pool"))
    layers.append(Activation('relu'))
    layers.append(Flatten())
    layers.append(Dense(400, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(0), name='fc_1'))
    layers.append(Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(0), name='fc_2'))
    return SequentialNetwork(layers)