Exemple #1
0
    def __init__(self, inp, oup, stride, expand_ratio):
        super(InvertedResidual, self).__init__()
        self.stride = stride
        assert stride in [1, 2]

        self.use_res_connect = self.stride == 1 and inp == oup

        self.conv = nn.Sequential(
            # pw
            Conv2dWithTime(inp, inp * expand_ratio, 1, 1, 0, bias=False),
            BatchNorm2dWithTime(inp * expand_ratio),
            ReLU6WithTime(inplace=True),
            # dw
            Conv2dWithTime(inp * expand_ratio,
                           inp * expand_ratio,
                           3,
                           stride,
                           1,
                           groups=inp * expand_ratio,
                           bias=False),
            BatchNorm2dWithTime(inp * expand_ratio),
            ReLU6WithTime(inplace=True),
            # pw-linear
            Conv2dWithTime(inp * expand_ratio, oup, 1, 1, 0, bias=False),
            BatchNorm2dWithTime(oup),
        )
Exemple #2
0
 def _conv_bn(in_channels, out_channels, stride):
     return nn.Sequential(
         Conv2dWithTime(in_channels=in_channels, out_channels=out_channels,
                        kernel_size=3, stride=stride, padding=1, bias=False),
         BatchNorm2dWithTime(out_channels),
         ReLUWithTime(inplace=True)
     )
Exemple #3
0
    def __init__(self, inp, oup, stride, expand_ratio):
        super(InvertedResidual, self).__init__()
        self.stride = stride
        assert stride in [1, 2]

        self.use_res_connect = self.stride == 1 and inp == oup

        # self.conv = nn.Sequential(
        #     # pw
        #     Conv2dWithTime(inp, inp * expand_ratio, 1, 1, 0, bias=False),
        #     BatchNorm2dWithTime(inp * expand_ratio),
        #     ReLU6WithTime(inplace=True),
        #     # dw
        #     Conv2dWithTime(inp * expand_ratio, inp * expand_ratio, 3,
        #                    stride, 1, groups=inp * expand_ratio, bias=False),
        #     BatchNorm2dWithTime(inp * expand_ratio),
        #     ReLU6WithTime(inplace=True),
        #     # pw-linear
        #     Conv2dWithTime(inp * expand_ratio, oup, 1, 1, 0, bias=False),
        #     BatchNorm2dWithTime(oup),
        # )
        self.pw = nn.Sequential(
            Conv2dWithTime(inp, inp * expand_ratio, 1, 1, 0, bias=False),
            BatchNorm2dWithTime(inp * expand_ratio),
            ReLU6WithTime(inplace=True))
        self.dw = nn.Sequential(
            Conv2dWithTime(inp * expand_ratio,
                           inp * expand_ratio,
                           3,
                           stride,
                           1,
                           groups=inp * expand_ratio,
                           bias=False),
            BatchNorm2dWithTime(inp * expand_ratio),
            ReLU6WithTime(inplace=True))
        self.g = inp if self.use_res_connect else 1
        self.pw_linear = nn.Sequential(
            Conv2dWithTime(inp * expand_ratio,
                           oup,
                           1,
                           1,
                           0,
                           groups=self.g,
                           bias=False), BatchNorm2dWithTime(oup))
 def _conv_bn(in_channels, out_channels, stride, layer_name):
     conv = Conv2dWithTime(in_channels=in_channels, out_channels=out_channels,
                           kernel_size=3, stride=stride, padding=1, bias=False)
     bn = BatchNorm2dWithTime(out_channels)
     relu = ReLU6WithTime(inplace=True)
     conv.name, bn.name, relu.name = layer_name+'_'+conv.layer_type, \
                                     layer_name+'_'+bn.layer_type, \
                                     layer_name+'_'+relu.layer_type
     return nn.Sequential(
         conv, bn, relu
     )
 def _conv_dw(in_channels, out_channels, stride, layer_name):
     conv_dw = Conv2dWithTime(in_channels=in_channels, out_channels=in_channels,
                              kernel_size=3, stride=stride, padding=1, groups=in_channels,
                              bias=False)
     bn = BatchNorm2dWithTime(in_channels)
     relu_1 = ReLU6WithTime(inplace=True)
     conv_pw = Conv2dWithTime(in_channels=in_channels, out_channels=out_channels,
                              kernel_size=1, stride=1, padding=0)
     relu_2 = ReLU6WithTime(inplace=True)
     conv_dw.name, bn.name, relu_1.name, conv_pw.name, relu_2.name = layer_name+'_'+conv_dw.layer_type, \
                                                                     layer_name+'_'+bn.layer_type, \
                                                                     layer_name+'_'+relu_1.layer_type+'_after_dw', \
                                                                     layer_name+'_'+conv_pw.layer_type, \
                                                                     layer_name+'_'+relu_2.layer_type+'_after_pw'
     return nn.Sequential(
         conv_dw, bn, relu_1, conv_pw, relu_2
     )
    def _make_grouped_conv1x1(in_channels,
                              out_channels,
                              groups,
                              batch_norm=True,
                              relu=False):

        modules = OrderedDict()

        conv = conv1x1(in_channels, out_channels, groups=groups)
        modules['conv1x1'] = conv

        if batch_norm:
            modules['batch_norm'] = BatchNorm2dWithTime(out_channels)
        if relu:
            modules['relu'] = ReLU6WithTime(True)
        if len(modules) > 1:
            return nn.Sequential(modules)
        else:
            return conv
    def __init__(self,
                 in_channels,
                 out_channels,
                 groups=3,
                 grouped_conv=True,
                 combine='add'):

        super(ShuffleNetUnit, self).__init__()

        self.in_channels = in_channels
        self.out_channels = out_channels
        self.grouped_conv = grouped_conv
        self.combine = combine
        self.groups = groups
        self.bottleneck_channels = self.out_channels // 4

        # define the type of ShuffleUnit
        if self.combine == 'add':
            # ShuffleUnit Figure 2b
            self.depthwise_stride = 1
            self._combine_func = self._add
        elif self.combine == 'concat':
            # ShuffleUnit Figure 2c
            self.depthwise_stride = 2
            self._combine_func = self._concat
            self.avg_pool = AvgPool2dWithTime(kernel_size=3,
                                              stride=2,
                                              padding=1)

            # ensure output of concat has the same channels as
            # original output channels.
            self.out_channels -= self.in_channels
        else:
            raise ValueError("Cannot combine tensors with \"{}\"" \
                             "Only \"add\" and \"concat\" are" \
                             "supported".format(self.combine))

        # Use a 1x1 grouped or non-grouped convolution to reduce input channels
        # to bottleneck channels, as in a ResNet bottleneck module.
        # NOTE: Do not use group convolution for the first conv1x1 in Stage 2.
        self.first_1x1_groups = self.groups if grouped_conv else 1

        self.g_conv_1x1_compress = self._make_grouped_conv1x1(
            self.in_channels,
            self.bottleneck_channels,
            self.first_1x1_groups,
            batch_norm=True,
            relu=True)

        # 3x3 depthwise convolution followed by batch normalization
        self.depthwise_conv3x3 = conv3x3(self.bottleneck_channels,
                                         self.bottleneck_channels,
                                         stride=self.depthwise_stride,
                                         groups=self.bottleneck_channels)
        self.bn_after_depthwise = BatchNorm2dWithTime(self.bottleneck_channels)

        # Use 1x1 grouped convolution to expand from
        # bottleneck_channels to out_channels
        self.g_conv_1x1_expand = self._make_grouped_conv1x1(
            self.bottleneck_channels,
            self.out_channels,
            self.groups,
            batch_norm=True,
            relu=False)
        self.relu = ReLU6WithTime(True)
Exemple #8
0
def conv_1x1_bn(inp, oup):
    return nn.Sequential(Conv2dWithTime(inp, oup, 1, 1, 0, bias=False),
                         BatchNorm2dWithTime(oup), ReLU6WithTime(inplace=True))
Exemple #9
0
def conv_bn(inp, oup, stride):
    return nn.Sequential(Conv2dWithTime(inp, oup, 3, stride, 1, bias=False),
                         BatchNorm2dWithTime(oup), ReLU6WithTime(inplace=True))