Beispiel #1
0
    def _make_layers(self,):
        layers = []

        in_planes = self.cfg[0]
        if self.dummy_layer == 0:
            layers += [SpConvBlock(3, in_planes, kernel_size=3, stride=1, padding=1, activation=self.activation, dummy = True)]
            #print('ZERO')
        else:
            layers += [
                SpConvBlock(3, in_planes, kernel_size=3, stride=1, padding=1, activation=self.activation, dummy=False)]

        for i, x in enumerate(self.cfg[1:]):
            if (i + 1) == self.dummy_layer:
                add_dummy = True
                print(i + 1)
                print(len(layers))
            else:
                add_dummy = False
            if (i+1) in [2, 4, 6, 12]:
                st = 2
            else:
                st = 1
            out_planes = x if isinstance(x, int) else x[0]

            layers.append(SpMbBlock(in_planes, out_planes, stride= st, activation=self.activation, dummy = add_dummy))
            in_planes = out_planes
        return nn.Sequential(*layers)
class SpMbBlock(nn.Module):
    '''Depthwise conv + Pointwise conv'''
    def __init__(self, in_planes, out_planes, stride=1, activation='swish'):
        super(SpMbBlock, self).__init__()
        self.conv1 = nn.Conv2d(in_planes,
                               in_planes,
                               kernel_size=3,
                               stride=stride,
                               padding=1,
                               groups=in_planes,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(in_planes)

        self.sp_conv = SpConvBlock(in_planes,
                                   out_planes,
                                   kernel_size=1,
                                   stride=1,
                                   padding=0,
                                   activation=activation)

        self.nn_act = Swish() if activation == 'swish' else nn.ReLU(
            inplace=True)

    def forward(self, x):
        out = self.nn_act(self.bn1(self.conv1(x)))
        out = self.sp_conv(out)
        return out

    def sp_forward(self, x):
        out = self.nn_act(self.bn1(self.conv1(x)))
        out = self.sp_conv.sp_forward(out)
        return out

    def reset_yv_(self):
        self.sp_conv.reset_yv_()
 def __init__(self, inp, oup, kernel_size, stride, padding):
     super(ConvBlock, self).__init__()
     self.sp_conv = SpConvBlock(inp,
                                oup,
                                kernel_size=kernel_size,
                                stride=stride,
                                padding=padding)
Beispiel #4
0
 def __init__(self, in_planes, out_planes, stride=1, activation='swish', dummy = False):
     super(SpMbBlock, self).__init__()
     self.conv1 = nn.Conv2d(in_planes, in_planes, kernel_size=3, stride=stride, padding=1, groups=in_planes, bias=False)
     self.bn1 = nn.BatchNorm2d(in_planes)
     #print(dummy)
     self.sp_conv = SpConvBlock(in_planes, out_planes, kernel_size=1, stride=1, padding=0, activation=activation, dummy = dummy)
     self.dummy = []
     self.nn_act = Swish() if activation == 'swish' else nn.ReLU(inplace=True)
    def __init__(self,
                 in_channels,
                 dw_channels,
                 out_channels,
                 stride,
                 identity,
                 no_expand=False):
        super(InvertedResidual, self).__init__()
        assert stride in [1, 2]

        self.no_expand = no_expand
        self.identity = identity

        if self.no_expand:
            dw_channels = in_channels
            assert not self.identity, 'no residual for the first mb block'
        if self.identity:
            self.residual = nn.Conv2d(in_channels,
                                      out_channels,
                                      kernel_size=1,
                                      stride=1,
                                      padding=0,
                                      bias=False)

        if not self.no_expand:
            # first pointwise layer
            self.t_pw_conv = SpConvBlock(in_channels,
                                         dw_channels,
                                         kernel_size=1,
                                         stride=1,
                                         padding=0)

        # depthwise layer with stride
        self.dw_conv = nn.Conv2d(dw_channels,
                                 dw_channels,
                                 3,
                                 stride,
                                 1,
                                 groups=dw_channels,
                                 bias=False)
        self.dw_bn = nn.BatchNorm2d(dw_channels)

        #splitting aware conv block, no activation
        self.b_pw_conv = SpConvBlock(dw_channels,
                                     out_channels,
                                     kernel_size=1,
                                     stride=1,
                                     padding=0,
                                     activation=None)

        self.act_func = Swish()
class ConvBlock(nn.Module):
    def __init__(self, inp, oup, kernel_size, stride, padding):
        super(ConvBlock, self).__init__()
        self.sp_conv = SpConvBlock(inp,
                                   oup,
                                   kernel_size=kernel_size,
                                   stride=stride,
                                   padding=padding)

    def forward(self, x):
        out = self.sp_conv(x)
        return out

    def sp_forward(self, x):
        out = self.sp_conv.sp_forward(x)
        return out

    def reset_yv_(self):
        self.sp_conv.reset_yv_()
class SpMbBlock(nn.Module):
    '''Depthwise conv + Pointwise conv'''
    def __init__(self, in_planes, out_planes, stride=1):
        super(SpMbBlock, self).__init__()
        self.conv1 = nn.Conv2d(in_planes, in_planes, kernel_size=3, stride=stride, padding=1, groups=in_planes, bias=False)
        self.bn1 = nn.BatchNorm2d(in_planes)

        self.sp_conv = SpConvBlock(in_planes, out_planes, kernel_size=1, stride=1, padding=0)

    def forward(self, x):
        out = swish(self.bn1(self.conv1(x)))
        out = self.sp_conv(out)
        return out

    def sp_forward(self, x):
        out = swish(self.bn1(self.conv1(x)))
        out = self.sp_conv.sp_forward(out)
        return out

    def reset_yv_(self):
        self.sp_conv.reset_yv_()
    def _make_layers(self,):
        layers = []

        in_planes = self.cfg[0]
        layers += [SpConvBlock(3, in_planes, kernel_size=3, stride=2, padding=1)]

        for i, x in enumerate(self.cfg[1:]):
            out_planes = x if isinstance(x, int) else x[0]
            stride = 1 if isinstance(x, int) else x[1]

            layers.append(SpMbBlock(in_planes, out_planes, stride))
            in_planes = out_planes
        return nn.Sequential(*layers)
class InvertedResidual(nn.Module):
    def __init__(self,
                 in_channels,
                 dw_channels,
                 out_channels,
                 stride,
                 identity,
                 no_expand=False):
        super(InvertedResidual, self).__init__()
        assert stride in [1, 2]

        self.no_expand = no_expand
        self.identity = identity

        if self.no_expand:
            dw_channels = in_channels
            assert not self.identity, 'no residual for the first mb block'
        if self.identity:
            self.residual = nn.Conv2d(in_channels,
                                      out_channels,
                                      kernel_size=1,
                                      stride=1,
                                      padding=0,
                                      bias=False)

        if not self.no_expand:
            # first pointwise layer
            self.t_pw_conv = SpConvBlock(in_channels,
                                         dw_channels,
                                         kernel_size=1,
                                         stride=1,
                                         padding=0)

        # depthwise layer with stride
        self.dw_conv = nn.Conv2d(dw_channels,
                                 dw_channels,
                                 3,
                                 stride,
                                 1,
                                 groups=dw_channels,
                                 bias=False)
        self.dw_bn = nn.BatchNorm2d(dw_channels)

        #splitting aware conv block, no activation
        self.b_pw_conv = SpConvBlock(dw_channels,
                                     out_channels,
                                     kernel_size=1,
                                     stride=1,
                                     padding=0,
                                     activation=None)

        self.act_func = Swish()

    def forward(self, x):
        if not self.no_expand:
            out = self.t_pw_conv(x)
        else:
            out = x

        out = self.act_func(self.dw_bn(self.dw_conv(out)))
        out = self.b_pw_conv(out)
        if self.identity:
            return out + self.residual(x)
        return out

    def sp_forward(self, x):
        if not self.no_expand:
            out = self.t_pw_conv.sp_forward(x)
        else:
            out = x
        out = self.act_func(self.dw_bn(self.dw_conv(out)))
        out = self.b_pw_conv.sp_forward(out)
        if self.identity:
            return out + self.residual(x)
        return out

    def reset_yv_(self, ):
        if not self.no_expand:
            self.t_pw_conv.reset_yv_()
        self.b_pw_conv.reset_yv_()
    def __init__(self, in_planes, out_planes, stride=1):
        super(SpMbBlock, self).__init__()
        self.conv1 = nn.Conv2d(in_planes, in_planes, kernel_size=3, stride=stride, padding=1, groups=in_planes, bias=False)
        self.bn1 = nn.BatchNorm2d(in_planes)

        self.sp_conv = SpConvBlock(in_planes, out_planes, kernel_size=1, stride=1, padding=0)