def __init__(self, nclass, decoder_capacity, input_height, input_width, norm_layer=nn.BatchNorm, norm_kwargs=None): super(_LadderHead, self).__init__() with self.name_scope(): self.conv_c4 = ConvBlock(decoder_capacity, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs, activation='relu') self.fusion_c3 = LateralFusion(decoder_capacity, input_height // 16, input_width // 16, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.fusion_c2 = LateralFusion(decoder_capacity, input_height // 8, input_width // 8, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.fusion_c1 = LateralFusion(decoder_capacity, input_height // 4, input_width // 4, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.seg_head = FCNHead(nclass, decoder_capacity, norm_layer, norm_kwargs)
def __init__(self, nclass, input_height, input_width, capacity=256, norm_layer=nn.BatchNorm, norm_kwargs=None): super(_NextHead, self).__init__() with self.name_scope(): self.conv_c4 = ConvBlock(capacity, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.fusion_16x = LateralFusion(capacity, input_height // 16, input_width // 16, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.fusion_8x = LateralFusion(capacity, input_height // 8, input_width // 8, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.seg_head = FCNHead(nclass, norm_layer=norm_layer, norm_kwargs=norm_kwargs)
def __init__(self, nclass, in_channels, input_height, input_width, capacity=256, norm_layer=nn.BatchNorm, norm_kwargs=None): super(_SwiftNetHead, self).__init__() with self.name_scope(): self.ppool = PyramidPooling(in_channels, input_height // 32, input_width // 32, norm_layer, norm_kwargs) self.conv_c4 = ConvBlock(capacity, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.fusion_c3 = LateralFusion(capacity, input_height // 16, input_width // 16, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.fusion_c2 = LateralFusion(capacity, input_height // 8, input_width // 8, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.fusion_c1 = LateralFusion(capacity, input_height // 4, input_width // 4, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.seg_head = FCNHead(nclass, capacity, norm_layer, norm_kwargs)
def __init__(self, nclass, in_channels, capacity=256, norm_layer=nn.BatchNorm, norm_kwargs=None): super(_SwiftNetHead, self).__init__() with self.name_scope(): self.ppool = PPModule(in_channels, norm_layer, norm_kwargs) self.conv_c4 = ConvModule2d(capacity, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.fusion_c3 = LateralFusion(capacity, norm_layer, norm_kwargs) self.fusion_c2 = LateralFusion(capacity, norm_layer, norm_kwargs) self.fusion_c1 = LateralFusion(capacity, norm_layer, norm_kwargs) self.seg_head = FCNHead(nclass, capacity, norm_layer, norm_kwargs)
def __init__(self, capacity=256, norm_layer=nn.BatchNorm, norm_kwargs=None): super(_FPNBranch, self).__init__() with self.name_scope(): self.conv = ConvModule2d(capacity, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.lateral16x = LateralFusion(capacity, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.lateral8x = LateralFusion(capacity, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.lateral4x = LateralFusion(capacity, norm_layer=norm_layer, norm_kwargs=norm_kwargs)
def __init__(self, nclass, decoder_capacity, norm_layer=nn.BatchNorm, norm_kwargs=None): super(_LadderHead, self).__init__() with self.name_scope(): self.conv_c4 = ConvModule2d(decoder_capacity, 1, norm_layer=norm_layer, norm_kwargs=norm_kwargs) self.fusion_c3 = LateralFusion(decoder_capacity, norm_layer, norm_kwargs) self.fusion_c2 = LateralFusion(decoder_capacity, norm_layer, norm_kwargs) self.fusion_c1 = LateralFusion(decoder_capacity, norm_layer, norm_kwargs) self.seg_head = FCNHead(nclass, decoder_capacity, norm_layer, norm_kwargs)