示例#1
0
def DepthwiseConvBlock(input_tensor,
                       kernel_size,
                       strides,
                       name,
                       data_format,
                       is_training=False):
    if data_format == 'channels_first':
        axis = 1
    else:
        axis = -1

    x = keras.layers.DepthwiseConv2D(kernel_size=kernel_size,
                                     strides=strides,
                                     padding='same',
                                     use_bias=False,
                                     name='{}_dconv'.format(name),
                                     data_format=data_format)(input_tensor)

    x = tf.layers.batch_normalization(x,
                                      training=is_training,
                                      name='{}_bn'.format(name),
                                      axis=axis)
    if ACTIVATION == 'swish':
        x = swish(x, name='{}_swish'.format(name))
    elif ACTIVATION == 'mish':
        x = mish(x)
    else:
        x = keras.layers.ReLU(name='{}_relu'.format(name))(x)
    return x
示例#2
0
def GhostConv(name, x, filters, kernel_size, dw_size, ratio, mode, padding='SAME', strides=1,
              data_format='channels_first', use_bias=False, is_training=False, activation='relu', momentum=0.9):
    if data_format == 'channels_first':
        axis = 1
    else:
        axis = -1

    with tf.variable_scope(name):
        init_channels = math.ceil(filters / ratio)

        x = CBR(x, init_channels, kernel_size, strides=strides, training=is_training, momentum=momentum, mode=mode,
                name=name, padding='same', data_format=data_format, activation=activation, bn=True, use_bias=use_bias)

        if ratio == 1:
            return x
        dw1 = GhostDepthConv(x, [dw_size, dw_size], channel_mult=ratio - 1, stride=1, data_format=data_format,
                             name=name)
        dw1 = tf.layers.batch_normalization(dw1, training=is_training, name=name + 'BN_2', axis=axis)
        if activation == 'relu':
            dw1 = tf.nn.relu(dw1, name=name + 'relu_2')
        elif activation == 'mish':
            dw1 = mish(dw1, name='mish_2')
        elif activation == 'swish':
            dw1 = swish(dw1, name=name + 'swish_2')
        else:
            pass

        if data_format == 'channels_first':
            dw1 = dw1[:, :filters - init_channels, :, :]
        else:
            dw1 = dw1[:, :, :, :filters - init_channels]
        x = tf.concat([x, dw1], axis=axis)
        return x
示例#3
0
def channelAttention(input_tensor, out_dim, se_dim):
    with tf.variable_scope('channel_attention'):
        x1 = tf.reduce_mean(input_tensor, axis=[1, 2], keep_dims=True)
        x2 = tf.reduce_max(input_tensor, axis=[1, 2], keep_dims=True)
        # out_dim = input_tensor.shape[-1]

        x1 = tf.layers.conv2d(x1,
                              se_dim, (1, 1),
                              use_bias=False,
                              name='dense1',
                              reuse=None)
        if ACTIVATION == 'swish':
            x1 = swish(x1, 'swish')
        elif ACTIVATION == 'mish':
            x1 = mish(x1, 'mish')
        else:
            x1 = tf.nn.relu(x1)
        x1 = tf.layers.conv2d(x1,
                              out_dim, (1, 1),
                              use_bias=False,
                              name='dense2',
                              reuse=None)

        x2 = tf.layers.conv2d(x2,
                              se_dim, (1, 1),
                              use_bias=False,
                              name='dense1',
                              reuse=True)
        if ACTIVATION == 'swish':
            x2 = swish(x2, 'swish')
        elif ACTIVATION == 'mish':
            x2 = mish(x2, 'mish')
        else:
            x2 = tf.nn.relu(x2)
        x2 = tf.layers.conv2d(x2,
                              out_dim, (1, 1),
                              use_bias=False,
                              name='dense2',
                              reuse=True)

        x = x1 + x2
        x = tf.nn.sigmoid(x)

        x = input_tensor * x
    return x
示例#4
0
def ConvBatchNormRelu(inputs,
                      filters,
                      kernel_size,
                      strides,
                      training,
                      momentum,
                      mode,
                      name=None,
                      padding='same',
                      data_format='channels_last',
                      activation=None,
                      bn=True,
                      use_bias=False):
    axis = 1 if data_format == 'channels_first' else -1

    if mode != "savePb":  # BN without bias
        x = tf.layers.Conv2D(
            filters,
            kernel_size=kernel_size,
            strides=strides,
            data_format=data_format,
            name=name + '_CBR_Conv2D',
            padding=padding,
            use_bias=use_bias,
            kernel_initializer=conv_kernel_initializer)(inputs)
        if bn:
            x = tf.layers.batch_normalization(x,
                                              axis=axis,
                                              training=training,
                                              momentum=momentum,
                                              name=name + '_CBR_bn')

    else:  # bias without BN
        x = tf.layers.Conv2D(
            filters,
            kernel_size=kernel_size,
            strides=strides,
            data_format=data_format,
            name=name + '_CBR_Conv2D',
            padding=padding,
            use_bias=True,
            kernel_initializer=conv_kernel_initializer)(inputs)

    if activation == 'relu':
        x = tf.nn.relu(x, name=name + '_relu')
    elif activation == 'swish':
        x = swish(x, name=name + '_swish')
    elif activation == 'mish':
        x = mish(x, name=name + '_mish')
    else:
        pass

    return x
def Squeeze_excitation_layer(input_x, out_dim, se_dim, name, data_format):
    with tf.variable_scope(name):

        if data_format == 'channels_first':
            squeeze = math_ops.reduce_mean(input_x, [2, 3],
                                           name='gap',
                                           keepdims=True)
        else:
            squeeze = math_ops.reduce_mean(input_x, [1, 2],
                                           name='gap',
                                           keepdims=True)

        # excitation = tf.layers.dense(inputs=squeeze, use_bias=True, units=se_dim,
        #                              kernel_initializer=DENSE_KERNEL_INITIALIZER, name=name + '_fully_connected1')
        excitation = tf.layers.Conv2D(se_dim, (1, 1),
                                      strides=(1, 1),
                                      kernel_initializer=kernel_initializer,
                                      padding='same',
                                      data_format=data_format)(squeeze)

        if ACTIVATION == 'swish':
            excitation = swish(excitation, name='swish')
        elif ACTIVATION == 'mish':
            excitation = mish(excitation, 'mish')
        else:
            excitation = tf.nn.relu(excitation, name='relu')

        # excitation = tf.layers.dense(inputs=excitation, use_bias=True, units=out_dim,
        #                              kernel_initializer=DENSE_KERNEL_INITIALIZER, name=name + '_fully_connected2')
        excitation = tf.layers.Conv2D(out_dim,
                                      1,
                                      strides=1,
                                      kernel_initializer=kernel_initializer,
                                      padding='same',
                                      data_format=data_format)(excitation)

        excitation = tf.nn.sigmoid(excitation, name='sigmoid')

        if data_format == 'channels_first':
            excitation = tf.reshape(excitation, [-1, out_dim, 1, 1])
        else:
            excitation = tf.reshape(excitation, [-1, 1, 1, out_dim])

        scale = input_x * excitation

        return scale