Exemplo n.º 1
0
    def __init__(self, num_anchors, num_classes, in_channels_list, focal_init=False, lite=False, large_kernel=False):
        super().__init__()
        self.num_classes = num_classes
        num_anchors = tuplify(num_anchors, len(in_channels_list))
        kernel_size = 5 if (lite and large_kernel) else 3
        self.loc_heads = nn.ModuleList([
            nn.Sequential(
                Norm("default", c),
                Conv2d(c, n * 4, kernel_size=kernel_size,
                       depthwise_separable=lite, mid_norm_layer='default')
            )
            for c, n in zip(in_channels_list, num_anchors)
        ])
        self.cls_heads = nn.ModuleList([
            nn.Sequential(
                Norm("default", c),
                Conv2d(c, n * num_classes, kernel_size=kernel_size,
                       depthwise_separable=lite, mid_norm_layer='default')
            )
            for c, n in zip(in_channels_list, num_anchors)
        ])

        if focal_init:
            for p in self.cls_heads:
                get_last_conv(p).bias.data.fill_(inverse_sigmoid(0.01))
Exemplo n.º 2
0
    def __init__(self, num_anchors, num_classes=2, in_channels=245, f_channels=256):
        super().__init__()
        self.num_classes = num_classes
        self.conv = DWConv2d(
            in_channels, f_channels, kernel_size=5,
            norm='default', act='default')
        self.loc_conv = Conv2d(
            f_channels, num_anchors * 4, kernel_size=1)
        self.cls_conv = Conv2d(
            f_channels, num_anchors * num_classes, kernel_size=1)

        bias_init_constant(self.cls_conv, inverse_sigmoid(0.01))
Exemplo n.º 3
0
    def __init__(self, num_anchors, num_classes, in_channels_list, f_channels=256, expand_ratio=4, num_layers=4):
        super().__init__()
        self.expand_ratio = expand_ratio
        self.projects = nn.ModuleList([
            MBConv(c, c, f_channels, kernel_size=5)
            for c in in_channels_list
        ])
        self.num_classes = num_classes
        self.loc_head = self._make_head(
            num_layers, f_channels, num_anchors * 4)
        self.cls_head = self._make_head(
            num_layers, f_channels, num_anchors * num_classes)

        bias_init_constant(self.cls_head[-1][1], inverse_sigmoid(0.01))
Exemplo n.º 4
0
    def __init__(self, num_anchors, num_classes, f_channels=256, num_layers=4, lite=False):
        super().__init__()
        self.num_classes = num_classes
        self.loc_head = _make_head(
            f_channels, num_layers, num_anchors * 4, lite=lite)
        self.cls_head = _make_head(
            f_channels, num_layers, num_anchors * num_classes, lite=lite)

        weight_init_normal(self.loc_head, 0, 0.01)
        weight_init_normal(self.cls_head, 0, 0.01)

        fc = self.cls_head[-1]
        if lite:
            fc = fc[-1]
        bias_init_constant(fc, inverse_sigmoid(0.01))
Exemplo n.º 5
0
    def __init__(self, num_anchors, num_classes, in_channels_list, focal_init=False):
        super().__init__()
        self.num_classes = num_classes
        num_anchors = tuplify(num_anchors, len(in_channels_list))
        self.loc_heads = nn.ModuleList([
            nn.Sequential(
                Norm("default", c),
                Conv2d(c, n * 4, kernel_size=1)
            )
            for c, n in zip(in_channels_list, num_anchors)
        ])
        self.cls_heads = nn.ModuleList([
            nn.Sequential(
                Norm("default", c),
                Conv2d(c, n * num_classes, kernel_size=1)
            )
            for c, n in zip(in_channels_list, num_anchors)
        ])

        if focal_init:
            for p in self.cls_heads:
                get_last_conv(p).bias.data.fill_(inverse_sigmoid(0.01))