Exemplo n.º 1
0
    def __init__(
        self,
        in_channels,
        heads,
        head_conv=64,
        final_kernel=1,
        bn=False,
        init_bias=-2.19,
        **kwargs,
    ):
        super(SepHead, self).__init__(**kwargs)

        self.heads = heads
        for head in self.heads:
            classes, num_conv = self.heads[head]

            fc = Sequential()
            for i in range(num_conv - 1):
                fc.add(
                    nn.Conv2d(in_channels,
                              head_conv,
                              kernel_size=final_kernel,
                              stride=1,
                              padding=final_kernel // 2,
                              bias=True))
                if bn:
                    fc.add(nn.BatchNorm2d(head_conv))
                fc.add(nn.ReLU())

            fc.add(
                nn.Conv2d(head_conv,
                          classes,
                          kernel_size=final_kernel,
                          stride=1,
                          padding=final_kernel // 2,
                          bias=True))

            if 'hm' in head:
                fc[-1].bias.data.fill_(init_bias)
            else:
                for m in fc.modules():
                    if isinstance(m, nn.Conv2d):
                        kaiming_init(m)

            self.__setattr__(head, fc)
Exemplo n.º 2
0
    def _make_layer(self, inplanes, planes, num_blocks, stride=1):

        block = Sequential(
            nn.ZeroPad2d(1),
            nn.Conv2d(inplanes, planes, 3, stride=stride, bias=False),
            build_norm_layer(self._norm_cfg, planes)[1],
            nn.ReLU(),
        )

        for j in range(num_blocks):
            block.add(nn.Conv2d(planes, planes, 3, padding=1, bias=False))
            block.add(build_norm_layer(self._norm_cfg, planes)[1], )
            block.add(nn.ReLU())

        return block, planes
Exemplo n.º 3
0
    def _make_layer(self, inplanes, planes, num_blocks, stride=1):
        # 128, 128, 5, 1
        block = Sequential(
            nn.ZeroPad2d(1),   # to keep size of input and output feature map the same=128
            nn.Conv2d(inplanes, planes, 3, stride=stride, bias=False),
            build_norm_layer(self._norm_cfg, planes)[1],
            # nn.BatchNorm2d(planes, eps=1e-3, momentum=0.01),
            nn.ReLU(),
        )

        for j in range(num_blocks):
            block.add(nn.Conv2d(planes, planes, 3, padding=1, bias=False))
            block.add(build_norm_layer(self._norm_cfg, planes)[1],) # nn.BatchNorm2d(planes, eps=1e-3, momentum=0.01)
            block.add(nn.ReLU())

        return block, planes