def build_network_fc_special(args):
    return SequentialNetwork([
        Flatten(),
        Dense(100, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_1'),
        BatchNormalization(momentum=0, name='batch_norm_1'),
        Activation('relu'),
        Dense(50, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2'),
        BatchNormalization(momentum=0, name='batch_norm_1'),
        Activation('relu'),
        Dense(5, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3')
    ])
    def __init__(self, kernel_size, filters, first_stride, name_prefix='', identity=True, resize=1, l2=0, l2_shortcut=0, *args, **kwargs):
        super(ResidualBlock, self).__init__(*args, **kwargs)
        self.identity = identity
        self.conv1 = self.track_layer(Conv2D(int(filters * resize), kernel_size, strides=first_stride, kernel_initializer=he_normal,
            padding='same', kernel_regularizer=l2reg(l2), name=name_prefix+'conv2D_1'))
        self.bn1 = self.track_layer(BatchNormalization(momentum=0.0, name=name_prefix+'batch_norm_1'))
        self.act1 = self.track_layer(Activation('relu'))

        self.conv2 = self.track_layer(Conv2D(filters, kernel_size, strides=(1, 1), kernel_initializer=he_normal,
            padding='same', kernel_regularizer=l2reg(l2), name=name_prefix+'conv2D_2'))
        self.bn2 = self.track_layer(BatchNormalization(momentum=0.0, name=name_prefix+'batch_norm_2'))

        if not self.identity:
            self.conv_shortcut = self.track_layer(Conv2D(filters, (1, 1), strides=first_stride, kernel_initializer=he_normal,
                padding='same', kernel_regularizer=l2reg(l2_shortcut), name=name_prefix+'shortcut_conv'))

        self.add_layer = self.track_layer(tfkeras.layers.Add())
        self.act2 = self.track_layer(Activation('relu')) # TODO need relu on last block?
def build_all_cnn(args):
    return SequentialNetwork([
        #Dropout(0.1),
        Conv2D(96, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        BatchNormalization(momentum=0.0, name='batch_norm_1'),
        Activation('relu'),
        Conv2D(96, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'),
        BatchNormalization(momentum=0.0, name='batch_norm_2'),
        Activation('relu'),
        Conv2D(96, (3, 3), kernel_initializer=he_normal, padding='same', strides=(2, 2), name='conv2D_strided_1'),
        Dropout(0.5),
        Conv2D(192, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_3'),
        BatchNormalization(momentum=0.0, name='batch_norm_3'),
        Activation('relu'),
        Conv2D(192, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_4'),
        BatchNormalization(momentum=0.0, name='batch_norm_4'),
        Activation('relu'),
        Conv2D(192, (3, 3), kernel_initializer=he_normal, padding='same', strides=(2, 2), name='conv2D_strided_2'),
        Dropout(0.5),
        Conv2D(192, (3, 3), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_5'),
        BatchNormalization(momentum=0.0, name='batch_norm_5'),
        Activation('relu'),
        Conv2D(192, (1, 1), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_6'),
        BatchNormalization(momentum=0.0, name='batch_norm_6'),
        Activation('relu'),
        Conv2D(10, (1, 1), kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_7'),
        GlobalAveragePooling2D()
    ])
Ejemplo n.º 4
0
def conv_bn_relu(num_channel,
                 kernel_size,
                 stride,
                 name,
                 padding='same',
                 activation='relu'):
    return [
        Conv2D(filters=num_channel,
               kernel_size=(kernel_size, kernel_size),
               strides=stride,
               padding=padding,
               kernel_initializer="he_normal",
               name=name + "_conv"),
        BatchNormalization(name=name + '_bn'),
        Activation(activation)  # , name=name + '_relu'
    ]
def build_resnet(args):
    return SequentialNetwork([
        # pre-blocks
        Conv2D(16, 3, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        BatchNormalization(momentum=0.0, name='batch_norm_1'),
        Activation('relu'),
        # set 1
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1A_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),

        # set 2
        ResidualBlock(3, 32, first_stride=(2, 2), name_prefix='2A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2B_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2C_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),

        tf.layers.Conv2DTranspose(16, 2, strides=(2, 2)),

        # tf.layers.Conv2DTranspose(1, 15, padding='valid'),

        # set 3
        ResidualBlock(3, 64, first_stride=(2, 2), name_prefix='3A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        # post-blocks
        # GlobalAveragePooling2D(),

        tf.layers.Conv2DTranspose(16, 1, strides=(2, 2)),

        ResidualBlock(3, 64, first_stride=(2, 2), name_prefix='4A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='4B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='4C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),

        # tf.layers.Conv2DTranspose(32, 1, strides=(2, 2)),
        tf.layers.Conv2DTranspose(16, 1, strides=(2, 2)),
        tf.layers.Conv2DTranspose(8, 1, strides=(1, 1)),
        tf.layers.Conv2DTranspose(1, 1, strides=(1, 1)),

        # tf.layers.Conv2DTranspose(1, 5, padding='valid'),
        # tf.layers.Conv2DTranspose(1, 11, padding='valid'),
        # tf.layers.Conv2DTranspose(1, 15, padding='valid'),
        # tf.layers.Conv2DTranspose(1, 21, padding='valid'),
        # tf.layers.Conv2DTranspose(1, 42, padding='valid', name='probs'),

        Activation('sigmoid', name='mask')
        # Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2_special), name='fc_last')
    ])
Ejemplo n.º 6
0
    def __init__(self, n_filters, stride=1, downsample=None, name=None):
        super(ResidualBlockLinkNet, self).__init__(name=name)

        if downsample is None:
            self.shortcut = None
        else:
            self.shortcut = self.track_layers(downsample)

        self.conv_bn_relu = self.track_layers(
            conv_bn_relu(n_filters,
                         kernel_size=3,
                         stride=stride,
                         name=name + '/cvbnrelu'))
        self.conv1 = self.track_layer(
            Conv2D(n_filters, (3, 3), name=name + '_conv2', padding='same'))
        self.bn1 = self.track_layer(BatchNormalization(name=name + '_bn'))
        self.add_layer = self.track_layer(tfkeras.layers.Add())
        self.act1 = self.track_layer(Activation('relu'))
Ejemplo n.º 7
0
def encoder(m, n, blocks, stride, name='encoder'):
    downsample = None
    if stride != 1 or m != n:
        downsample = [
            Conv2D(n, (1, 1),
                   strides=(stride, stride),
                   name=name + '_conv_downsample'),
            BatchNormalization(name=name + '_batchnorm_downsample')
        ]

    layers = [
        ResidualBlockLinkNet(n,
                             stride,
                             downsample,
                             name=name + '/residualBlock0')
    ]
    for i in range(1, blocks):
        layers.append(
            ResidualBlockLinkNet(n,
                                 stride=1,
                                 name=name + '/residualBlock{}'.format(i)))
    return layers
Ejemplo n.º 8
0
def deconv_bn_relu(num_channels,
                   kernel_size,
                   name,
                   transposed_conv,
                   activation='relu'):
    layers = []
    if transposed_conv:
        layers.append(
            tf.layers.Conv2DTranspose(num_channels,
                                      kernel_size=(4, 4),
                                      strides=(2, 2),
                                      padding="same"))
    else:
        layers.append(tf.keras.layers.UpSampling2D())
        layers.append(
            Conv2D(num_channels,
                   kernel_size=(kernel_size, kernel_size),
                   kernel_initializer="he_normal",
                   padding='same'))
    layers.append(BatchNormalization(name=name + '_bn'))
    layers.append(Activation(activation))
    return layers
def build_resnet(args):
    return SequentialNetwork([
        # pre-blocks
        Conv2D(16, 3, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'),
        BatchNormalization(momentum=0.0, name='batch_norm_1'),
        Activation('relu'),
        # set 1
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1A_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        # set 2
        ResidualBlock(3, 32, first_stride=(2, 2), name_prefix='2A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2B_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2C_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        # set 3
        ResidualBlock(3, 64, first_stride=(2, 2), name_prefix='3A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2),
        # post-blocks
        GlobalAveragePooling2D(),
        Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2_special), name='fc_last')
    ])