Esempio n. 1
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 groups,
                 downsample,
                 ignore_group,
                 data_format="channels_last",
                 **kwargs):
        super(ShuffleUnit, self).__init__(**kwargs)
        self.data_format = data_format
        self.downsample = downsample
        mid_channels = out_channels // 4

        if downsample:
            out_channels -= in_channels

        self.compress_conv1 = conv1x1(in_channels=in_channels,
                                      out_channels=mid_channels,
                                      groups=(1 if ignore_group else groups),
                                      data_format=data_format,
                                      name="compress_conv1")
        self.compress_bn1 = BatchNorm(
            # in_channels=mid_channels,
            data_format=data_format,
            name="compress_bn1")
        self.c_shuffle = ChannelShuffle(channels=mid_channels,
                                        groups=groups,
                                        data_format=data_format,
                                        name="c_shuffle")
        self.dw_conv2 = depthwise_conv3x3(
            channels=mid_channels,
            strides=(2 if self.downsample else 1),
            data_format=data_format,
            name="dw_conv2")
        self.dw_bn2 = BatchNorm(
            # in_channels=mid_channels,
            data_format=data_format,
            name="dw_bn2")
        self.expand_conv3 = conv1x1(in_channels=mid_channels,
                                    out_channels=out_channels,
                                    groups=groups,
                                    data_format=data_format,
                                    name="expand_conv3")
        self.expand_bn3 = BatchNorm(
            # in_channels=out_channels,
            data_format=data_format,
            name="expand_bn3")
        if downsample:
            self.avgpool = AvgPool2d(pool_size=3,
                                     strides=2,
                                     padding=1,
                                     data_format=data_format,
                                     name="avgpool")
        self.activ = nn.ReLU()
Esempio n. 2
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 dilation,
                 activate,
                 data_format="channels_last",
                 **kwargs):
        super(DRNConv, self).__init__(**kwargs)
        self.activate = activate

        self.conv = Conv2d(in_channels=in_channels,
                           out_channels=out_channels,
                           kernel_size=kernel_size,
                           strides=strides,
                           padding=padding,
                           dilation=dilation,
                           use_bias=False,
                           data_format=data_format,
                           name="conv")
        self.bn = BatchNorm(data_format=data_format, name="bn")
        if self.activate:
            self.activ = nn.ReLU()
Esempio n. 3
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 strides,
                 bottleneck,
                 data_format="channels_last",
                 **kwargs):
        super(PyrUnit, self).__init__(**kwargs)
        assert (out_channels >= in_channels)
        self.data_format = data_format
        self.resize_identity = (strides != 1)
        self.identity_pad_width = out_channels - in_channels

        if bottleneck:
            self.body = PyrBottleneck(in_channels=in_channels,
                                      out_channels=out_channels,
                                      strides=strides,
                                      data_format=data_format,
                                      name="body")
        else:
            self.body = PyrBlock(in_channels=in_channels,
                                 out_channels=out_channels,
                                 strides=strides,
                                 data_format=data_format,
                                 name="body")
        self.bn = BatchNorm(data_format=data_format, name="bn")
        if self.resize_identity:
            self.identity_pool = AvgPool2d(pool_size=2,
                                           strides=strides,
                                           ceil_mode=True,
                                           data_format=data_format,
                                           name="identity_pool")
Esempio n. 4
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 use_ibn=False,
                 return_preact=False,
                 data_format="channels_last",
                 **kwargs):
        super(IBNPreConvBlock, self).__init__(**kwargs)
        self.use_ibn = use_ibn
        self.return_preact = return_preact

        if self.use_ibn:
            self.ibn = IBN(channels=in_channels,
                           first_fraction=0.6,
                           inst_first=False,
                           data_format=data_format,
                           name="ibn")
        else:
            self.bn = BatchNorm(data_format=data_format, name="bn")
        self.activ = nn.ReLU()
        self.conv = Conv2d(in_channels=in_channels,
                           out_channels=out_channels,
                           kernel_size=kernel_size,
                           strides=strides,
                           padding=padding,
                           use_bias=False,
                           data_format=data_format,
                           name="conv")
Esempio n. 5
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 dilation=1,
                 groups=1,
                 use_bias=False,
                 use_bn=True,
                 bn_eps=1e-5,
                 activation=(lambda: nn.ReLU()),
                 data_format="channels_last",
                 **kwargs):
        super(MixConvBlock, self).__init__(**kwargs)
        self.activate = (activation is not None)
        self.use_bn = use_bn

        self.conv = MixConv(in_channels=in_channels,
                            out_channels=out_channels,
                            kernel_size=kernel_size,
                            strides=strides,
                            padding=padding,
                            dilation=dilation,
                            groups=groups,
                            use_bias=use_bias,
                            axis=get_channel_axis(data_format),
                            data_format=data_format,
                            name="conv")
        if self.use_bn:
            self.bn = BatchNorm(epsilon=bn_eps,
                                data_format=data_format,
                                name="bn")
        if self.activate:
            self.activ = get_activation_layer(activation)
Esempio n. 6
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 dilation=1,
                 groups=1,
                 use_bias=False,
                 use_ibn=False,
                 activate=True,
                 data_format="channels_last",
                 **kwargs):
        super(IBNConvBlock, self).__init__(**kwargs)
        self.activate = activate
        self.use_ibn = use_ibn

        self.conv = Conv2d(in_channels=in_channels,
                           out_channels=out_channels,
                           kernel_size=kernel_size,
                           strides=strides,
                           padding=padding,
                           dilation=dilation,
                           groups=groups,
                           use_bias=use_bias,
                           data_format=data_format,
                           name="conv")
        if self.use_ibn:
            self.ibn = IBN(channels=out_channels,
                           data_format=data_format,
                           name="ibn")
        else:
            self.bn = BatchNorm(data_format=data_format, name="bn")
        if self.activate:
            self.activ = nn.ReLU()
Esempio n. 7
0
 def __init__(self,
              in_channels,
              data_format="channels_last",
              **kwargs):
     super(PreActivation, self).__init__(**kwargs)
     assert (in_channels is not None)
     self.bn = BatchNorm(
         data_format=data_format,
         name="bn")
     self.activ = PReLU2()
Esempio n. 8
0
 def __init__(self,
              in_channels,
              out_channels,
              data_format="channels_last",
              **kwargs):
     super(DenseBlock, self).__init__(**kwargs)
     self.fc = nn.Dense(units=out_channels,
                        input_dim=in_channels,
                        name="fc")
     self.bn = BatchNorm(data_format=data_format, name="bn")
     self.activ = nn.ReLU()
Esempio n. 9
0
def dpn_batch_norm(channels, data_format="channels_last", **kwargs):
    """
    DPN specific Batch normalization layer.

    Parameters:
    ----------
    channels : int
        Number of channels in input data.
    data_format : str, default 'channels_last'
        The ordering of the dimensions in tensors.
    """
    assert (channels is not None)
    return BatchNorm(epsilon=0.001, data_format=data_format, **kwargs)
Esempio n. 10
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 scale_factor,
                 size,
                 bn_eps,
                 data_format="channels_last",
                 **kwargs):
        super(SBBlock, self).__init__(**kwargs)
        self.use_scale = (scale_factor > 1)

        if self.use_scale:
            self.down_scale = AvgPool2d(pool_size=scale_factor,
                                        strides=scale_factor,
                                        data_format=data_format,
                                        name="down_scale")
            self.up_scale = InterpolationBlock(scale_factor=scale_factor,
                                               out_size=size,
                                               data_format=data_format,
                                               name="up_scale")

        use_fdw = (scale_factor > 0)
        if use_fdw:
            fdwconv3x3_class = fdwconv3x3_block if kernel_size == 3 else fdwconv5x5_block
            self.conv1 = fdwconv3x3_class(
                in_channels=in_channels,
                out_channels=in_channels,
                bn_eps=bn_eps,
                activation=(lambda: PReLU2(
                    in_channels, data_format=data_format, name="activ")),
                data_format=data_format,
                name="conv1")
        else:
            self.conv1 = dwconv3x3_block(
                in_channels=in_channels,
                out_channels=in_channels,
                bn_eps=bn_eps,
                activation=(lambda: PReLU2(
                    in_channels, data_format=data_format, name="activ")),
                data_format=data_format,
                name="conv1")

        self.conv2 = conv1x1(in_channels=in_channels,
                             out_channels=out_channels,
                             data_format=data_format,
                             name="conv2")

        self.bn = BatchNorm(epsilon=bn_eps, data_format=data_format, name="bn")
Esempio n. 11
0
    def __init__(self,
                 channels,
                 out_size,
                 bn_eps,
                 data_format="channels_last",
                 **kwargs):
        super(SBDecodeBlock, self).__init__(**kwargs)
        assert (channels is not None)
        self.data_format = data_format

        self.up = InterpolationBlock(scale_factor=2,
                                     out_size=out_size,
                                     data_format=data_format,
                                     name="up")
        self.bn = BatchNorm(epsilon=bn_eps, data_format=data_format, name="bn")
Esempio n. 12
0
 def __init__(self,
              in_channels,
              out_channels,
              data_format="channels_last",
              **kwargs):
     super(PreResInitBlock, self).__init__(**kwargs)
     self.conv = Conv2d(in_channels=in_channels,
                        out_channels=out_channels,
                        kernel_size=7,
                        strides=2,
                        padding=3,
                        use_bias=False,
                        data_format=data_format,
                        name="conv")
     self.bn = BatchNorm(data_format=data_format, name="bn")
     self.activ = nn.ReLU()
     self.pool = MaxPool2d(pool_size=3, strides=2, padding=1, name="pool")
Esempio n. 13
0
 def __init__(self,
              in_channels,
              out_channels,
              kernel_size,
              strides,
              padding,
              data_format="channels_last",
              **kwargs):
     super(InceptConv, self).__init__(**kwargs)
     self.conv = Conv2d(in_channels=in_channels,
                        out_channels=out_channels,
                        kernel_size=kernel_size,
                        strides=strides,
                        padding=padding,
                        use_bias=False,
                        data_format=data_format,
                        name="conv")
     self.bn = BatchNorm(epsilon=1e-3, data_format=data_format, name="bn")
     self.activ = nn.ReLU()
Esempio n. 14
0
 def __init__(self,
              in_channels,
              out_channels,
              data_format="channels_last",
              **kwargs):
     super(ShuffleInitBlock, self).__init__(**kwargs)
     self.conv = conv3x3(in_channels=in_channels,
                         out_channels=out_channels,
                         strides=2,
                         data_format=data_format,
                         name="conv")
     self.bn = BatchNorm(
         # in_channels=out_channels,
         data_format=data_format,
         name="bn")
     self.activ = nn.ReLU()
     self.pool = MaxPool2d(pool_size=3,
                           strides=2,
                           padding=1,
                           data_format=data_format,
                           name="pool")
Esempio n. 15
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 activate,
                 data_format="channels_last",
                 **kwargs):
        super(DwsConvBlock, self).__init__(**kwargs)
        self.activate = activate

        if self.activate:
            self.activ = nn.ReLU()
        self.conv = DwsConv(in_channels=in_channels,
                            out_channels=out_channels,
                            kernel_size=kernel_size,
                            strides=strides,
                            padding=padding,
                            data_format=data_format,
                            name="conv")
        self.bn = BatchNorm(data_format=data_format, name="bn")
Esempio n. 16
0
 def __init__(self,
              in_channels,
              out_channels,
              kernel_size,
              strides,
              padding,
              num_blocks,
              data_format="channels_last",
              **kwargs):
     super(PolyConv, self).__init__(**kwargs)
     self.conv = Conv2d(in_channels=in_channels,
                        out_channels=out_channels,
                        kernel_size=kernel_size,
                        strides=strides,
                        padding=padding,
                        use_bias=False,
                        data_format=data_format,
                        name="conv")
     self.bns = []
     for i in range(num_blocks):
         self.bns.append(
             BatchNorm(data_format=data_format, name="bn{}".format(i + 1)))
     self.activ = nn.ReLU()
Esempio n. 17
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 downsample,
                 use_se,
                 use_residual,
                 data_format="channels_last",
                 **kwargs):
        super(ShuffleUnit, self).__init__(**kwargs)
        self.data_format = data_format
        self.downsample = downsample
        self.use_se = use_se
        self.use_residual = use_residual
        mid_channels = out_channels // 2

        self.compress_conv1 = conv1x1(
            in_channels=(in_channels if self.downsample else mid_channels),
            out_channels=mid_channels,
            data_format=data_format,
            name="compress_conv1")
        self.compress_bn1 = BatchNorm(
            # in_channels=mid_channels,
            data_format=data_format,
            name="compress_bn1")
        self.dw_conv2 = depthwise_conv3x3(
            channels=mid_channels,
            strides=(2 if self.downsample else 1),
            data_format=data_format,
            name="dw_conv2")
        self.dw_bn2 = BatchNorm(
            # in_channels=mid_channels,
            data_format=data_format,
            name="dw_bn2")
        self.expand_conv3 = conv1x1(in_channels=mid_channels,
                                    out_channels=mid_channels,
                                    data_format=data_format,
                                    name="expand_conv3")
        self.expand_bn3 = BatchNorm(
            # in_channels=mid_channels,
            data_format=data_format,
            name="expand_bn3")
        if self.use_se:
            self.se = SEBlock(channels=mid_channels,
                              data_format=data_format,
                              name="se")
        if downsample:
            self.dw_conv4 = depthwise_conv3x3(channels=in_channels,
                                              strides=2,
                                              data_format=data_format,
                                              name="dw_conv4")
            self.dw_bn4 = BatchNorm(
                # in_channels=in_channels,
                data_format=data_format,
                name="dw_bn4")
            self.expand_conv5 = conv1x1(in_channels=in_channels,
                                        out_channels=mid_channels,
                                        data_format=data_format,
                                        name="expand_conv5")
            self.expand_bn5 = BatchNorm(
                # in_channels=mid_channels,
                data_format=data_format,
                name="expand_bn5")

        self.activ = nn.ReLU()
        self.c_shuffle = ChannelShuffle(channels=out_channels,
                                        groups=2,
                                        data_format=data_format,
                                        name="c_shuffle")