Beispiel #1
0
    def depthwise_block(x, strides, **metaparameters):
        """ Construct a Depthwise Separable Convolution block
            x         : input to the block
            strides   : strides
            n_filters : number of filters
            alpha     : width multiplier
        """
        n_filters = metaparameters['n_filters']
        alpha     = metaparameters['alpha']
        del metaparameters['n_filters']
            
        # Apply the width filter to the number of feature maps
        filters = int(n_filters * alpha)

        # Strided convolution to match number of filters
        if strides == (2, 2):
            x = ZeroPadding2D(padding=((0, 1), (0, 1)))(x)
            padding = 'valid'
        else:
            padding = 'same'

        # Depthwise Convolution
        x = Composable.DepthwiseConv2D(x, (3, 3), strides, padding=padding, use_bias=False, 
                                       **metaparameters)
        x = BatchNormalization()(x)
        x = Composable.ReLU(x)

        # Pointwise Convolution
        x = Composable.Conv2D(x, filters, (1, 1), strides=(1, 1), padding='same', use_bias=False, 
                              **metaparameters)
        x = BatchNormalization()(x)
        x = Composable.ReLU(x)
        return x
Beispiel #2
0
    def shuffle_block(x, **metaparameters):
        ''' Construct a shuffle Shuffle block  
            x           : input to the block
            n_partitions: number of groups to partition feature maps (channels) into.
            n_filters   : number of filters
            reduction   : dimensionality reduction factor (e.g, 0.25)
            reg         : kernel regularizer
        '''
        n_partitions = metaparameters['n_partitions']
        n_filters = metaparameters['n_filters']
        reduction = metaparameters['reduction']
        del metaparameters['n_filters']
        del metaparameters['n_partitions']
        if 'reg' in metaparameters:
            reg = metaparameters['reg']
        else:
            reg = ShuffleNet.reg
        if 'init_weights' in metaparameters:
            init_weights = metaparameters['init_weights']
        else:
            init_weights = ShuffleNet.init_weights

        # identity shortcut
        shortcut = x

        # pointwise group convolution, with dimensionality reduction
        x = ShuffleNet.pw_group_conv(x, n_partitions,
                                     int(reduction * n_filters),
                                     **metaparameters)
        x = Composable.ReLU(x)

        # channel shuffle layer
        x = ShuffleNet.channel_shuffle(x, n_partitions)

        # Depthwise 3x3 Convolution
        x = Composable.DepthwiseConv2D(x, (3, 3),
                                       strides=1,
                                       padding='same',
                                       use_bias=False,
                                       **metaparameters)
        x = BatchNormalization()(x)

        # pointwise group convolution, with dimensionality restoration
        x = ShuffleNet.pw_group_conv(x, n_partitions, n_filters,
                                     **metaparameters)

        # Add the identity shortcut (input added to output)
        x = Add()([shortcut, x])
        x = Composable.ReLU(x)
        return x
    def attention_block(x, strides=(1, 1), **metaparameters):
        """ Construct an Attention Residual Block
            x         : input to the block
            strides   : strides
            n_filters : number of filters
            alpha     : width multiplier
            expansion : multiplier for expanding number of filters
            squeeze   : whether to include squeeze
            activation: type of activation function
        """
        n_filters = metaparameters['n_filters']
        expansion = metaparameters['expansion']
        alpha = metaparameters['alpha']
        if 'alpha' in metaparameters:
            alpha = metaparameters['alpha']
        else:
            alpha = MobileNetV3.alpha
        if 'squeeze' in metaparameters:
            squeeze = metaparameters['squeeze']
        else:
            squeeze = False
        if 'activation' in metaparameters:
            activation = metaparameters['activation']
        else:
            activation = ReLU6
        del metaparameters['n_filters']
        del metaparameters['kernel_size']

        # Remember input
        shortcut = x

        # Apply the width filter to the number of feature maps for the pointwise convolution
        filters = int(n_filters * alpha)

        n_channels = int(x.shape[3])

        # Dimensionality Expansion
        # 1x1 linear convolution
        x = Composable.Conv2D(x,
                              expansion, (1, 1),
                              padding='same',
                              use_bias=False,
                              **metaparameters)
        x = BatchNormalization()(x)
        x = activation(x)

        # Depthwise Convolution
        x = Composable.DepthwiseConv2D(x, (3, 3),
                                       strides,
                                       padding='same',
                                       use_bias=False,
                                       **metaparameters)
        x = BatchNormalization()(x)
        x = activation(x)

        # Add squeeze (dimensionality reduction)
        if squeeze:
            x = MobileNetV3.squeeze(x, **metaparameters)

        # Linear Pointwise Convolution
        x = Composable.Conv2D(x,
                              filters, (1, 1),
                              strides=(1, 1),
                              padding='same',
                              use_bias=False,
                              **metaparameters)
        x = BatchNormalization()(x)

        # Number of input filters matches the number of output filters
        if n_channels == filters and strides == (1, 1):
            x = Add()([shortcut, x])
        return x