示例#1
0
    def __init__(self, nclass, **kwargs):
        super(DFANet, self).__init__()
        self.backbone = XceptionA()

        self.enc2_2 = Enc(240, 48, 4, **kwargs)
        self.enc3_2 = Enc(144, 96, 6, **kwargs)
        self.enc4_2 = Enc(288, 192, 4, **kwargs)
        self.fca_2 = FCAttention(192, **kwargs)

        self.enc2_3 = Enc(240, 48, 4, **kwargs)
        self.enc3_3 = Enc(144, 96, 6, **kwargs)
        self.enc3_4 = Enc(288, 192, 4, **kwargs)
        self.fca_3 = FCAttention(192, **kwargs)

        self.enc2_1_reduce = ConvBnRelu(48, 32, 1, **kwargs)
        self.enc2_2_reduce = ConvBnRelu(48, 32, 1, **kwargs)
        self.enc2_3_reduce = ConvBnRelu(48, 32, 1, **kwargs)
        self.conv_fusion = ConvBnRelu(32, 32, 1, **kwargs)

        self.fca_1_reduce = ConvBnRelu(192, 32, 1, **kwargs)
        self.fca_2_reduce = ConvBnRelu(192, 32, 1, **kwargs)
        self.fca_3_reduce = ConvBnRelu(192, 32, 1, **kwargs)
        self.conv_out = nn.Conv2d(32, nclass, 1)

        self.dsn1 = dsn(192, nclass)
        self.dsn2 = dsn(192, nclass)

        self.__setattr__('exclusive', [
            'enc2_2', 'enc3_2', 'enc4_2', 'fca_2', 'enc2_3', 'enc3_3',
            'enc3_4', 'fca_3', 'enc2_1_reduce', 'enc2_2_reduce',
            'enc2_3_reduce', 'conv_fusion', 'fca_1_reduce', 'fca_2_reduce',
            'fca_3_reduce', 'conv_out'
        ])
示例#2
0
    def __init__(self, nclass, type="dfv1"):
        super(DFSeg, self).__init__()

        if type == "dfv1":
            self.backbone = dfnetv1()
        else:
            self.backbone = dfnetv2()

        self.cc5 = nn.Conv2d(128, 128, 1)
        self.cc4 = nn.Conv2d(256, 128, 1)
        self.cc3 = nn.Conv2d(128, 128, 1)

        self.ppm = PSPModule(512, 128)

        self.fn4 = FusionNode(128)
        self.fn3 = FusionNode(128)

        self.fc = dsn(128, nclass)
示例#3
0
    def __init__(self,
                 block,
                 layers,
                 num_features=19,
                 k_up=3,
                 efficient=True,
                 use_bn=True,
                 spp_grids=(8, 4, 2, 1),
                 spp_square_grid=False):
        super(SwiftNetResNet, self).__init__()
        self.inplanes = 64
        self.efficient = efficient
        self.nclass = num_features
        self.use_bn = use_bn
        self.conv1 = nn.Conv2d(3,
                               64,
                               kernel_size=7,
                               stride=2,
                               padding=3,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(64) if self.use_bn else lambda x: x
        self.relu = nn.ReLU(inplace=True)
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
        upsamples = []
        self.layer1 = self._make_layer(block, 64, layers[0])
        upsamples += [
            _Upsample(num_features,
                      self.inplanes,
                      num_features,
                      use_bn=self.use_bn,
                      k=k_up)
        ]
        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
        upsamples += [
            _Upsample(num_features,
                      self.inplanes,
                      num_features,
                      use_bn=self.use_bn,
                      k=k_up)
        ]
        self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
        upsamples += [
            _Upsample(num_features,
                      self.inplanes,
                      num_features,
                      use_bn=self.use_bn,
                      k=k_up)
        ]
        self.layer4 = self._make_layer(block, 512, layers[3], stride=2)

        self.fine_tune = [
            self.conv1, self.maxpool, self.layer1, self.layer2, self.layer3,
            self.layer4
        ]
        if self.use_bn:
            self.fine_tune += [self.bn1]

        num_levels = 3
        self.spp_size = num_features
        bt_size = self.spp_size

        level_size = self.spp_size // num_levels

        self.dsn = dsn(256, self.nclass)

        self.spp = SpatialPyramidPooling(self.inplanes,
                                         num_levels,
                                         bt_size=bt_size,
                                         level_size=level_size,
                                         out_size=self.spp_size,
                                         grids=spp_grids,
                                         square_grid=spp_square_grid,
                                         bn_momentum=0.01 / 2,
                                         use_bn=self.use_bn)
        self.upsample = nn.ModuleList(list(reversed(upsamples)))

        self.random_init = [self.spp, self.upsample]

        self.num_features = num_features

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight,
                                        mode='fan_out',
                                        nonlinearity='relu')
            elif isinstance(m, nn.BatchNorm2d):
                nn.init.constant_(m.weight, 1)
                nn.init.constant_(m.bias, 0)