Exemple #1
0
    def __init__(self,
                 input_ch,
                 output_ch,
                 kernel_size,
                 padding=0,
                 stride=1,
                 use_batchnorm=True,
                 groups=1,
                 attention=None,
                 group_size=2):
        super(BasicConv, self).__init__()
        self.conv = nn.Sequential(
            nn.Conv2d(input_ch,
                      output_ch,
                      kernel_size,
                      stride,
                      padding,
                      bias=False if use_batchnorm else True,
                      groups=groups), nn.BatchNorm2d(output_ch), nn.ReLU(True))
        if attention == 'SE':
            self.conv.add_module("Attention",
                                 SqueezeExcitationBlock(output_ch))

        elif attention == 'CBAM':
            self.conv.add_module("Attention", CBAM(output_ch))

        elif attention == 'TAM':
            self.conv.add_module("Attention", TAM(output_ch, group_size))
Exemple #2
0
    def __init__(self,
                 input_ch,
                 output_ch,
                 bottle_neck_ch=0,
                 pre_activation=False,
                 first_conv_stride=1,
                 n_groups=1,
                 attention='CBAM',
                 group_size=2):
        super(ResidualBlock, self).__init__()
        act = nn.ReLU(inplace=True)
        norm = nn.BatchNorm2d

        if pre_activation:
            if bottle_neck_ch:
                block = [
                    norm(input_ch), act,
                    nn.Conv2d(input_ch, bottle_neck_ch, 1, bias=False)
                ]  # Caffe version has stride 2 here

                block += [
                    norm(bottle_neck_ch), act,
                    nn.Conv2d(bottle_neck_ch,
                              bottle_neck_ch,
                              3,
                              padding=1,
                              stride=first_conv_stride,
                              groups=n_groups,
                              bias=False)
                ]  # PyTorch version has stride 2 here

                block += [
                    norm(bottle_neck_ch),
                    nn.Conv2d(bottle_neck_ch, output_ch, 1, bias=False)
                ]

            else:
                block = [
                    norm(input_ch), act,
                    nn.Conv2d(input_ch,
                              output_ch,
                              3,
                              stride=first_conv_stride,
                              groups=n_groups,
                              padding=1,
                              bias=False)
                ]

                block += [
                    norm(output_ch), act,
                    nn.Conv2d(output_ch, output_ch, 3, padding=1, bias=False)
                ]

        else:
            if bottle_neck_ch:
                block = [
                    nn.Conv2d(input_ch, bottle_neck_ch, 1,
                              bias=False),  # Caffe version has stride 2 here
                    norm(bottle_neck_ch),
                    act
                ]
                block += [
                    nn.Conv2d(bottle_neck_ch,
                              bottle_neck_ch,
                              3,
                              padding=1,
                              stride=first_conv_stride,
                              groups=n_groups,
                              bias=False),  # PyTorch version has stride 2 here
                    norm(bottle_neck_ch),
                    act
                ]
                block += [
                    nn.Conv2d(bottle_neck_ch, output_ch, 1, bias=False),
                    norm(output_ch)
                ]

            else:
                block = [
                    nn.Conv2d(input_ch,
                              output_ch,
                              3,
                              stride=first_conv_stride,
                              groups=n_groups,
                              padding=1,
                              bias=False),
                    norm(output_ch), act
                ]
                block += [
                    nn.Conv2d(output_ch, output_ch, 3, padding=1, bias=False),
                    norm(output_ch)
                ]

        if attention == 'CBAM':
            block += [CBAM(output_ch)]

        elif attention == 'SE':
            block += [SqueezeExcitationBlock(output_ch)]

        elif attention == 'TAM':
            block += [TAM(output_ch, group_size)]

        if input_ch != output_ch:
            side_block = [
                nn.Conv2d(input_ch,
                          output_ch,
                          1,
                          stride=first_conv_stride,
                          bias=False),
                norm(output_ch)
            ]
            self.side_block = nn.Sequential(*side_block)
            self.varying_size = True

        else:
            self.varying_size = False

        self.block = nn.Sequential(*block)