def __init__(self,
                 inplanes,
                 outplanes,
                 innerplanes,
                 stride=1,
                 dilation=1,
                 group=1,
                 downsample=None,
                 cfg_prune=None):
        super().__init__()
        # In original resnet, stride=2 is on 1x1.
        # In fb.torch resnet, stride=2 is on 3x3.
        (str1x1, str3x3) = (stride, 1) if cfg.RESNETS.STRIDE_1X1 else (1,
                                                                       stride)
        self.stride = stride
        if cfg_prune is None:
            self.conv1 = nn.Conv2d(inplanes,
                                   innerplanes,
                                   kernel_size=3,
                                   stride=str1x1,
                                   padding=1,
                                   bias=False)
            if cfg.REID.BN_COMPLETE:
                self.bn1 = nn.BatchNorm2d(innerplanes)
            else:
                self.bn1 = mynn.AffineChannel2d(innerplanes)

            self.conv2 = nn.Conv2d(innerplanes,
                                   innerplanes,
                                   kernel_size=3,
                                   stride=str3x3,
                                   bias=False,
                                   padding=1,
                                   dilation=dilation,
                                   groups=group)
            if cfg.REID.BN_COMPLETE:
                self.bn2 = nn.BatchNorm2d(innerplanes)
            else:
                self.bn2 = mynn.AffineChannel2d(innerplanes)
        else:
            self.conv1 = nn.Conv2d(inplanes,
                                   cfg_prune,
                                   kernel_size=3,
                                   stride=str1x1,
                                   padding=1,
                                   bias=False)
            self.bn1 = mynn.AffineChannel2d(cfg_prune)

            self.conv2 = nn.Conv2d(cfg_prune,
                                   innerplanes,
                                   kernel_size=3,
                                   stride=str3x3,
                                   bias=False,
                                   padding=1,
                                   dilation=dilation,
                                   groups=group)
            self.bn2 = mynn.AffineChannel2d(innerplanes)

        self.downsample = downsample
        self.relu = nn.ReLU(inplace=True)
示例#2
0
    def __init__(self, inplanes, outplanes, innerplanes, stride=1, dilation=1, group=1,
                 downsample=None, attention=False):
        super().__init__()
        # In original resnet, stride=2 is on 1x1.
        # In fb.torch resnet, stride=2 is on 3x3.
        (str1x1, str3x3) = (stride, 1) if cfg.RESNETS.STRIDE_1X1 else (1, stride)
        self.stride = stride
        self.attention = attention
        self.conv1 = nn.Conv2d(
            inplanes, innerplanes, kernel_size=1, stride=str1x1, bias=False)
        self.bn1 = mynn.AffineChannel2d(innerplanes)

        self.conv2 = nn.Conv2d(
            innerplanes, innerplanes, kernel_size=3, stride=str3x3, bias=False,
            padding=1 * dilation, dilation=dilation, groups=group)
        self.bn2 = mynn.AffineChannel2d(innerplanes)

        self.conv3 = nn.Conv2d(
            innerplanes, outplanes, kernel_size=1, stride=1, bias=False)
        self.bn3 = mynn.AffineChannel2d(outplanes)

        self.downsample = downsample
        self.relu = nn.ReLU(inplace=True)
        if self.attention:
            self.cbam = CBAM(innerplanes*4, 16)
def conv_bn(inp, oup, stride):
    return nn.Sequential(
        nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
        mynn.AffineChannel2d(oup),
        # nn.BatchNorm2d(oup),
        nn.ReLU6(inplace=True),
    )
示例#4
0
def add_stage(inplanes, planes, nblocks, stride=1, dilation=1):
    """Make a stage consist of `nblocks` residual blocks.
    Returns:
        - stage module: an nn.Sequentail module of residual blocks
        - final output dimension
    """
    block_func = globals()[cfg.RESNETS.TRANS_FUNC]
    downsample = None
    if stride != 1 or inplanes != planes * block_func.expansion:
        downsample = nn.Sequential(
            nn.Conv2d(inplanes,
                      planes * block_func.expansion,
                      kernel_size=1,
                      stride=stride,
                      bias=False),
            mynn.AffineChannel2d(planes * block_func.expansion),
        )

    layers = []
    layers.append(
        block_func(inplanes,
                   planes,
                   stride,
                   dilation=dilation,
                   group=cfg.RESNETS.NUM_GROUPS,
                   downsample=downsample))
    inplanes = planes * block_func.expansion
    for i in range(1, nblocks):
        layers.append(block_func(inplanes, planes, dilation=dilation))

    return nn.Sequential(*layers), inplanes
示例#5
0
    def __init__(self, block_counts):
        super().__init__()
        self.block_counts = block_counts
        self.convX = len(block_counts) + 1
        self.num_layers = (sum(block_counts) + 3 * (self.convX == 4)) * 3 + 2

        self.res1 = nn.Sequential(
            OrderedDict([('conv1', nn.Conv2d(3, 64, 7, stride=2, padding=3, bias=False)),
                         ('bn1', mynn.AffineChannel2d(64)),
                         ('relu', nn.ReLU(inplace=True)),
                         # ('maxpool', nn.MaxPool2d(kernel_size=3, stride=2, padding=0, ceil_mode=True))]))
                         ('maxpool', nn.MaxPool2d(kernel_size=3, stride=2, padding=1))]))
        dim_in = 64
        dim_bottleneck = cfg.RESNETS.NUM_GROUPS * cfg.RESNETS.WIDTH_PER_GROUP
        self.res2, dim_in = add_stage(dim_in, dim_bottleneck, block_counts[0])
        self.res3, dim_in = add_stage(dim_in, dim_bottleneck * 2, block_counts[1], stride=2)
        self.res4, dim_in = add_stage(dim_in, dim_bottleneck * 4, block_counts[2], stride=2)
        if len(block_counts) == 4:
            if cfg.RESNETS.RES5_DILATION != 1:
                stride = 1
            else:
                stride = 2
            self.res5, dim_in = add_stage(dim_in, dim_bottleneck * 8, block_counts[3],
                                          stride, cfg.RESNETS.RES5_DILATION)
            self.spatial_scale = 1 / 32 * cfg.RESNETS.RES5_DILATION
        else:
            self.spatial_scale = 1 / 16  # final feature scale wrt. original image scale

        self.dim_out = dim_in

        self._init_modules()
示例#6
0
def basic_bn_stem():
    return nn.Sequential(OrderedDict([
        ('conv1', nn.Conv2d(3, 64, 7, stride=2, padding=3, bias=False)),
        ('bn1', mynn.AffineChannel2d(64)),
        ('relu', nn.ReLU(inplace=True)),
        # ('maxpool', nn.MaxPool2d(kernel_size=3, stride=2, padding=0, ceil_mode=True))]))
        ('maxpool', nn.MaxPool2d(kernel_size=3, stride=2, padding=1))]))
def basic_bn_shortcut(inplanes, outplanes, stride):
    if cfg.RESNETS.SSN:
        return nn.Sequential(
            nn.Conv2d(inplanes,
                      outplanes,
                      kernel_size=1,
                      stride=stride,
                      bias=False),
            SSN2d(outplanes, using_moving_average=True),
        )
    else:
        if cfg.REID.BN_COMPLETE:
            return nn.Sequential(
                nn.Conv2d(inplanes,
                          outplanes,
                          kernel_size=1,
                          stride=stride,
                          bias=False),
                nn.BatchNorm2d(outplanes),
            )
        else:
            return nn.Sequential(
                nn.Conv2d(inplanes,
                          outplanes,
                          kernel_size=1,
                          stride=stride,
                          bias=False),
                mynn.AffineChannel2d(outplanes),
            )
示例#8
0
def basic_bn_shortcut(inplanes, outplanes, stride):
    return nn.Sequential(
        nn.Conv2d(inplanes,
                  outplanes,
                  kernel_size=1,
                  stride=stride,
                  bias=False),
        mynn.AffineChannel2d(outplanes),
    )
示例#9
0
def basic_bn_stem():
    #if cfg.LESION.LESION_ENABLED:
        #input_dim = cfg.LESION.SLICE_NUM
    #else:
    input_dim = 3
    return nn.Sequential(OrderedDict([
        ('conv1', nn.Conv2d(input_dim, 64, 7, stride=2, padding=3, bias=False)),
        ('bn1', mynn.AffineChannel2d(64)),
        ('relu', nn.ReLU(inplace=True)),
        ('maxpool', nn.MaxPool2d(kernel_size=3, stride=2, padding=1))]))
    def __init__(self, inp, oup, stride, expand_ratio):
        super(InvertedResidual, self).__init__()
        self.stride = stride
        assert stride in [1, 2]

        self.use_res_connect = self.stride == 1 and inp == oup

        self.conv = nn.Sequential(
            # pw
            nn.Conv2d(inp, inp * expand_ratio, 1, 1, 0, bias=False),
            mynn.AffineChannel2d(inp * expand_ratio),
            # nn.BatchNorm2d(inp * expand_ratio),
            nn.ReLU6(inplace=True),
            # dw
            nn.Conv2d(inp * expand_ratio, inp * expand_ratio, 3, stride, 1, groups=inp * expand_ratio, bias=False),
            mynn.AffineChannel2d(inp * expand_ratio),
            # nn.BatchNorm2d(inp * expand_ratio),
            nn.ReLU6(inplace=True),
            # pw-linear
            nn.Conv2d(inp * expand_ratio, oup, 1, 1, 0, bias=False),
            mynn.AffineChannel2d(oup),
            # nn.BatchNorm2d(oup),
        )
def basic_bn_stem():
    if cfg.RESNETS.SSN:
        return nn.Sequential(
            OrderedDict([
                ('conv1', nn.Conv2d(3, 64, 7, stride=2, padding=3,
                                    bias=False)),
                ('bn1', SSN2d(64, using_moving_average=True)),
                ('relu', nn.ReLU(inplace=True)),
                # ('maxpool', nn.MaxPool2d(kernel_size=3, stride=2, padding=0, ceil_mode=True))]))
                ('maxpool', nn.MaxPool2d(kernel_size=3, stride=2, padding=1))
            ]))
    else:
        if cfg.REID.BN_COMPLETE:
            return nn.Sequential(
                OrderedDict([
                    ('conv1',
                     nn.Conv2d(3, 64, 7, stride=2, padding=3, bias=False)),
                    ('bn1', nn.BatchNorm2d(64)),
                    ('relu', nn.ReLU(inplace=True)),
                    # ('maxpool', nn.MaxPool2d(kernel_size=3, stride=2, padding=0, ceil_mode=True))]))
                    ('maxpool', nn.MaxPool2d(kernel_size=3,
                                             stride=2,
                                             padding=1))
                ]))
        else:
            return nn.Sequential(
                OrderedDict([
                    ('conv1',
                     nn.Conv2d(3, 64, 7, stride=2, padding=3, bias=False)),
                    ('bn1', mynn.AffineChannel2d(64)),
                    ('relu', nn.ReLU(inplace=True)),
                    # ('maxpool', nn.MaxPool2d(kernel_size=3, stride=2, padding=0, ceil_mode=True))]))
                    ('maxpool', nn.MaxPool2d(kernel_size=3,
                                             stride=2,
                                             padding=1))
                ]))
示例#12
0
    def __init__(self,
                 inplanes,
                 outplanes,
                 innerplanes,
                 stride=1,
                 dilation=1,
                 group=1,
                 downsample=None):
        super().__init__()
        # In original resnet, stride=2 is on 1x1.
        # In fb.torch resnet, stride=2 is on 3x3.
        (str1x1, str3x3) = (stride, 1) if cfg.RESNETS.STRIDE_1X1 else (1,
                                                                       stride)
        self.stride = stride

        self.conv1 = nn.Conv2d(inplanes,
                               innerplanes,
                               kernel_size=1,
                               stride=str1x1,
                               bias=False)
        self.bn1 = mynn.AffineChannel2d(innerplanes)

        # DCN 使用c3-c5层
        if cfg.USE_DCN and innerplanes in cfg.INNERPLANES_LIST:
            deformable_groups = 1
            offset_channels = 18
            conv_op = DeformConv
            # self.conv2_deformable
            self.conv2_offset = nn.Conv2d(innerplanes,
                                          deformable_groups * offset_channels,
                                          kernel_size=3,
                                          stride=str3x3,
                                          padding=dilation,
                                          dilation=dilation)
            self.conv2 = conv_op(innerplanes,
                                 innerplanes,
                                 kernel_size=3,
                                 stride=str3x3,
                                 padding=dilation,
                                 dilation=dilation,
                                 deformable_groups=deformable_groups,
                                 bias=False)
            # print(innerplanes, str3x3)
        else:
            # DCN模式下 c2层加载参数
            self.conv2 = nn.Conv2d(innerplanes,
                                   innerplanes,
                                   kernel_size=3,
                                   stride=str3x3,
                                   bias=False,
                                   padding=1 * dilation,
                                   dilation=dilation,
                                   groups=group)

        self.bn2 = mynn.AffineChannel2d(innerplanes)

        self.conv3 = nn.Conv2d(innerplanes,
                               outplanes,
                               kernel_size=1,
                               stride=1,
                               bias=False)
        self.bn3 = mynn.AffineChannel2d(outplanes)

        self.downsample = downsample
        self.relu = nn.ReLU(inplace=True)

        if cfg.USE_DCN and innerplanes in cfg.INNERPLANES_LIST:
            self._init_weights()
    def __init__(self,
                 inplanes,
                 outplanes,
                 innerplanes,
                 stride=1,
                 dilation=1,
                 group=1,
                 downsample=None):
        super().__init__()
        # In original resnet, stride=2 is on 1x1.
        # In fb.torch resnet, stride=2 is on 3x3.
        (str1x1, str3x3) = (stride, 1) if cfg.RESNETS.STRIDE_1X1 else (1,
                                                                       stride)
        self.stride = stride

        self.conv1 = nn.Conv2d(inplanes,
                               innerplanes,
                               kernel_size=1,
                               stride=str1x1,
                               bias=False)
        if cfg.RESNETS.SSN:
            self.bn1 = SSN2d(innerplanes, using_moving_average=True)
        else:
            if cfg.REID.BN_COMPLETE:
                self.bn1 = nn.BatchNorm2d(innerplanes)
            else:
                self.bn1 = mynn.AffineChannel2d(innerplanes)

        self.conv2 = nn.Conv2d(innerplanes,
                               innerplanes,
                               kernel_size=3,
                               stride=str3x3,
                               bias=False,
                               padding=1 * dilation,
                               dilation=dilation,
                               groups=group)
        if cfg.RESNETS.SSN:
            self.bn2 = SSN2d(innerplanes, using_moving_average=True)
        else:
            if cfg.REID.BN_COMPLETE:
                self.bn2 = nn.BatchNorm2d(innerplanes)
            else:
                self.bn2 = mynn.AffineChannel2d(innerplanes)

        self.conv3 = nn.Conv2d(innerplanes,
                               outplanes,
                               kernel_size=1,
                               stride=1,
                               bias=False)
        if cfg.RESNETS.SSN:
            self.bn3 = SSN2d(outplanes,
                             using_moving_average=True,
                             last_gamma=True)
        else:
            if cfg.REID.BN_COMPLETE:
                self.bn3 = nn.BatchNorm2d(outplanes)
            else:
                self.bn3 = mynn.AffineChannel2d(outplanes)

        self.downsample = downsample
        self.relu = nn.ReLU(inplace=True)

        if cfg.REID.ZERO_GAMMA:
            self._init_weights()