Example #1
0
 def __init__(self, nclass, capacity=512, attention=False, drop=.4,
              norm_layer=nn.BatchNorm, norm_kwargs=None, height=120, width=120):
     super(_CAHead, self).__init__()
     self.up_kwargs = {'height': height, 'width': width}
     self.attention = attention
     self.gamma = 1.0
     height = height // 2
     width = width // 2
     with self.name_scope():
         # Chained Context Aggregation Module
         self.gp = GlobalFlow(capacity, 2048, norm_layer, norm_kwargs,
                              height=height, width=width)
         self.cp1 = _ContextFlow(capacity, stride=2, norm_layer=norm_layer,
                                 norm_kwargs=norm_kwargs, height=height, width=width)
         self.cp2 = _ContextFlow(capacity, stride=4, norm_layer=norm_layer,
                                 norm_kwargs=norm_kwargs, height=height, width=width)
         self.cp3 = _ContextFlow(capacity, stride=8, norm_layer=norm_layer,
                                 norm_kwargs=norm_kwargs, height=height, width=width)
         self.cp4 = _ContextFlow(capacity, stride=16, norm_layer=norm_layer,
                                 norm_kwargs=norm_kwargs, height=height, width=width)
         if self.attention:
             self.selection = _FeatureSelection(256, in_channels=capacity, norm_layer=norm_layer,
                                                norm_kwargs=norm_kwargs)
         else:
             self.proj = ConvBlock(256, 3, 1, 1, in_channels=capacity, norm_layer=norm_layer,
                                   norm_kwargs=norm_kwargs)
         self.drop = nn.Dropout(drop) if drop else None
         # decoder
         self.decoder = ConvBlock(48, 3, 1, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs)
         self.conv3x3 = ConvBlock(256, 3, 1, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs)
         # segmentation head
         self.seg_head = FCNHead(nclass, norm_layer=norm_layer, norm_kwargs=norm_kwargs)
Example #2
0
 def __init__(self, channels, in_channels, norm_layer=nn.BatchNorm, norm_kwargs=None):
     super(_FeatureSelection, self).__init__()
     with self.name_scope():
         self.conv3x3 = ConvBlock(channels, 3, 1, 1, in_channels=in_channels,
                                  norm_layer=norm_layer, norm_kwargs=norm_kwargs)
         self.gap = nn.GlobalAvgPool2D()
         self.conv1x1 = ConvBlock(channels, 1, in_channels=channels, norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs, activation='sigmoid')
Example #3
0
 def __init__(self, channels, norm_layer=nn.BatchNorm, norm_kwargs=None, reduction=1):
     super(_FFModule, self).__init__()
     with self.name_scope():
         self.proj = ConvBlock(channels, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs)
         self.gvp = nn.GlobalAvgPool2D()
         self.conv1x1_1 = ConvBlock(channels // reduction, 1, norm_layer=norm_layer,
                                    norm_kwargs=norm_kwargs)
         self.conv1x1_2 = ConvBlock(channels, 1, norm_layer=norm_layer,
                                    norm_kwargs=norm_kwargs, activation='sigmoid')
Example #4
0
 def __init__(self, channels, stride, groups=4, norm_layer=nn.BatchNorm,
              norm_kwargs=None, height=60, width=60):
     super(_ContextFlowShuffle, self).__init__()
     self.stride = stride
     self.groups = groups
     self.up_kwargs = {'height': height, 'width': width}
     with self.name_scope():
         self.conv1 = ConvBlock(channels, 3, 1, 1, groups=groups, norm_layer=norm_layer,
                                norm_kwargs=norm_kwargs, activation='relu')
         self.conv2 = ConvBlock(channels, 3, 1, 1, groups=groups, norm_layer=norm_layer,
                                norm_kwargs=norm_kwargs, activation='relu')
Example #5
0
 def __init__(self, channels, inter_channels=64, norm_layer=nn.BatchNorm,
              norm_kwargs=None):
     super(_SpatialPath, self).__init__()
     with self.name_scope():
         self.conv7x7 = ConvBlock(inter_channels, 7, 2, 3, norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs)
         self.conv3x3_1 = ConvBlock(inter_channels, 3, 2, 1, norm_layer=norm_layer,
                                    norm_kwargs=norm_kwargs)
         self.conv3x3_2 = ConvBlock(inter_channels, 3, 2, 1, norm_layer=norm_layer,
                                    norm_kwargs=norm_kwargs)
         self.conv1x1 = ConvBlock(channels, 1, norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs)
Example #6
0
 def __init__(self, nclass, low_channels=256, high_channels=128,
              norm_layer=nn.BatchNorm, norm_kwargs=None, drop=.1):
     super(_BoundaryAttention, self).__init__()
     with self.name_scope():
         self.conv1x1 = ConvBlock(low_channels, 1, in_channels=high_channels, norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs, activation='sigmoid')
         self.fconv1x1 = ConvBlock(high_channels, 1, in_channels=low_channels,
                                   norm_layer=norm_layer, norm_kwargs=norm_kwargs)
         self.fconv3x3 = ConvBlock(high_channels, 3, 1, 1, in_channels=high_channels,
                                   norm_layer=norm_layer, norm_kwargs=norm_kwargs)
         self.cconv3x3 = ConvBlock(high_channels, 3, 1, 1, in_channels=high_channels,
                                   norm_layer=norm_layer, norm_kwargs=norm_kwargs)
         self.drop = nn.Dropout(drop) if drop else None
         self.cconv1x1 = nn.Conv2D(nclass, 1, in_channels=high_channels)
Example #7
0
 def __init__(self,
              nclass,
              decoder_capacity,
              input_height,
              input_width,
              norm_layer=nn.BatchNorm,
              norm_kwargs=None):
     super(_LadderHead, self).__init__()
     with self.name_scope():
         self.conv_c4 = ConvBlock(decoder_capacity,
                                  1,
                                  norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs,
                                  activation='relu')
         self.fusion_c3 = _LadderFusion(decoder_capacity,
                                        input_height // 16,
                                        input_width // 16,
                                        norm_layer=norm_layer,
                                        norm_kwargs=norm_kwargs)
         self.fusion_c2 = _LadderFusion(decoder_capacity,
                                        input_height // 8,
                                        input_width // 8,
                                        norm_layer=norm_layer,
                                        norm_kwargs=norm_kwargs)
         self.fusion_c1 = _LadderFusion(decoder_capacity,
                                        input_height // 4,
                                        input_width // 4,
                                        norm_layer=norm_layer,
                                        norm_kwargs=norm_kwargs)
         self.seg_head = FCNHead(nclass,
                                 decoder_capacity,
                                 norm_layer,
                                 norm_kwargs,
                                 activation='relu')
Example #8
0
 def __init__(self,
              in_channels,
              channels,
              kernel_size,
              strides=1,
              padding=0,
              dilation=1,
              norm_layer=nn.BatchNorm,
              norm_kwargs=None,
              activation='relu'):
     super(SeparableConvBlock, self).__init__()
     self.conv = nn.Conv2D(in_channels,
                           kernel_size,
                           strides,
                           padding,
                           dilation,
                           groups=in_channels,
                           use_bias=False,
                           in_channels=in_channels)
     self.pointwise = ConvBlock(channels,
                                1,
                                use_bias=False,
                                in_channels=in_channels,
                                norm_layer=norm_layer,
                                norm_kwargs=norm_kwargs,
                                activation=activation)
Example #9
0
    def __init__(self,
                 capacity,
                 height,
                 width,
                 norm_layer=nn.BatchNorm,
                 norm_kwargs=None):
        super(_LadderFusion, self).__init__()
        with self.name_scope():
            self.conv1x1 = ConvBlock(capacity,
                                     1,
                                     norm_layer=norm_layer,
                                     norm_kwargs=norm_kwargs,
                                     activation='relu')
            self.conv3x3 = ConvBlock(capacity,
                                     3,
                                     1,
                                     1,
                                     norm_layer=norm_layer,
                                     norm_kwargs=norm_kwargs,
                                     activation='relu')

        self.up_kwargs = {'height': height, 'width': width}
Example #10
0
    def __init__(self,
                 in_channels,
                 mid_channels,
                 strides,
                 dilation=1,
                 norm_layer=nn.BatchNorm,
                 norm_kwargs=None,
                 activation='relu'):
        super(Block, self).__init__()
        if strides > 1:
            self.down = ConvBlock(mid_channels * self.expansion,
                                  1,
                                  strides=strides,
                                  use_bias=False,
                                  in_channels=in_channels,
                                  norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs,
                                  activation=None)
        else:
            self.down = None

        self.residual = nn.HybridSequential()
        self.residual.add(
            SeparableConvBlock(in_channels,
                               mid_channels,
                               3,
                               strides,
                               dilation,
                               dilation,
                               norm_layer=norm_layer,
                               norm_kwargs=norm_kwargs,
                               activation=activation),
            SeparableConvBlock(mid_channels,
                               mid_channels,
                               3,
                               1,
                               1,
                               norm_layer=norm_layer,
                               norm_kwargs=norm_kwargs,
                               activation=activation),
            SeparableConvBlock(mid_channels,
                               mid_channels * self.expansion,
                               3,
                               1,
                               1,
                               norm_layer=norm_layer,
                               norm_kwargs=norm_kwargs,
                               activation=None))
        self.act = Activation(activation)
Example #11
0
 def __init__(self, nclass, height, width, norm_layer=nn.BatchNorm, norm_kwargs=None):
     super(_BiSeNetHead, self).__init__()
     self.up_kwargs = {'height': height, 'width': width}
     with self.name_scope():
         self.spatial_path = _SpatialPath(128, norm_layer=norm_layer, norm_kwargs=norm_kwargs)
         self.global_flow = GlobalFlow(128, in_channels=512, norm_layer=norm_layer,
                                       norm_kwargs=norm_kwargs, height=height, width=width)
         self.refine_c4 = _ARModule(128, in_channels=512, norm_layer=norm_layer,
                                    norm_kwargs=norm_kwargs)
         self.refine_c3 = _ARModule(128, in_channels=256, norm_layer=norm_layer,
                                    norm_kwargs=norm_kwargs)
         self.proj = ConvBlock(128, 3, 1, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs)
         self.fusion = _FFModule(256, norm_layer, norm_kwargs, reduction=1)
         self.seg = FCNHead(nclass, in_channels=256, norm_layer=norm_layer, norm_kwargs=norm_kwargs,
                            drop_out=.0, reduction=4)
Example #12
0
 def __init__(self,
              nclass,
              in_channels,
              norm_layer=nn.BatchNorm,
              norm_kwargs=None,
              height=60,
              width=60):
     super(_DeepLabHead, self).__init__()
     self.up_kwargs = {'height': height, 'width': width}
     with self.name_scope():
         self.aspp = ASPP(256,
                          in_channels,
                          norm_layer,
                          norm_kwargs,
                          height // 2,
                          width // 2,
                          atrous_rates=(12, 24, 36))
         self.conv_c1 = ConvBlock(48,
                                  3,
                                  1,
                                  1,
                                  norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs)
         self.conv3x3 = ConvBlock(256,
                                  3,
                                  1,
                                  1,
                                  in_channels=304,
                                  norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs)
         self.drop = nn.Dropout(0.5)
         self.head = FCNHead(nclass,
                             256,
                             norm_layer,
                             norm_kwargs,
                             reduction=1)
Example #13
0
 def __init__(self,
              nclass,
              num_scale=2,
              height=60,
              width=60,
              norm_layer=nn.BatchNorm,
              norm_kwargs=None):
     super(_AttentionHead, self).__init__()
     self.num_scale = num_scale
     self.up_kwargs = {'height': height, 'width': width}
     with self.name_scope():
         self.seg_head = FCNHead(nclass,
                                 norm_layer=norm_layer,
                                 norm_kwargs=norm_kwargs)
         self.conv3x3 = ConvBlock(512,
                                  3,
                                  1,
                                  1,
                                  norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs,
                                  activation='relu')
         self.conv1x1 = nn.Conv2D(num_scale, 1, in_channels=512)
Example #14
0
 def __init__(self,
              block,
              layers,
              channels,
              classes=1000,
              norm_layer=nn.BatchNorm,
              norm_kwargs=None,
              **kwargs):
     super(Xception, self).__init__()
     self.in_channels = 8
     self.norm_kwargs = norm_kwargs if norm_kwargs is not None else {}
     with self.name_scope():
         self.conv1 = ConvBlock(self.in_channels,
                                3,
                                2,
                                1,
                                use_bias=False,
                                norm_layer=norm_layer,
                                norm_kwargs=norm_kwargs,
                                activation='relu')
         self.maxpool = nn.MaxPool2D(pool_size=3, strides=2, padding=1)
         self.layer1 = self._make_layer(block,
                                        norm_layer,
                                        layers[0],
                                        channels[0],
                                        strides=2)
         self.layer2 = self._make_layer(block,
                                        norm_layer,
                                        layers[1],
                                        channels[1],
                                        strides=2)
         self.layer3 = self._make_layer(block,
                                        norm_layer,
                                        layers[2],
                                        channels[2],
                                        strides=2)
         self.avgpool = nn.GlobalAvgPool2D()
         self.flat = nn.Flatten()
         self.fc = nn.Dense(in_units=self.in_channels, units=classes)
Example #15
0
 def __init__(self,
              nclass,
              input_height,
              input_width,
              capacity=128,
              norm_layer=nn.BatchNorm,
              norm_kwargs=None):
     super(_SwiftNetHead, self).__init__()
     with self.name_scope():
         self.conv1x1 = ConvBlock(capacity,
                                  1,
                                  norm_layer=norm_layer,
                                  norm_kwargs=norm_kwargs)
         self.fusion_32x = _LateralFusion(capacity,
                                          input_height // 32,
                                          input_width // 32,
                                          norm_layer=norm_layer,
                                          norm_kwargs=norm_kwargs)
         self.fusion_16x = _LateralFusion(capacity,
                                          input_height // 16,
                                          input_width // 16,
                                          norm_layer=norm_layer,
                                          norm_kwargs=norm_kwargs)
         self.fusion_8x = _LateralFusion(capacity,
                                         input_height // 8,
                                         input_width // 8,
                                         norm_layer=norm_layer,
                                         norm_kwargs=norm_kwargs)
         self.final = _LateralFusion(capacity,
                                     input_height // 4,
                                     input_width // 4,
                                     True,
                                     norm_layer=norm_layer,
                                     norm_kwargs=norm_kwargs)
         self.seg_head = FCNHead(nclass,
                                 capacity,
                                 norm_layer,
                                 norm_kwargs,
                                 reduction=1)