def __init__(self,
              num_classes: int,
              in_channels: int = 256,
              out_channels: int = 128):
     super(FPNSegmentationHead, self).__init__()
     self.conv1 = modules.conv3x3(in_channels, out_channels)
     self.conv2 = modules.conv3x3(in_channels, out_channels)
     self.conv3 = modules.conv3x3(in_channels, out_channels)
     self.conv4 = modules.conv3x3(in_channels, out_channels)
     self.conv3x3 = modules.conv3x3(out_channels * 4, num_classes)
Esempio n. 2
0
    def __init__(self, in_channels, out_channels, bottleneck, group, anytime):
        super(AnytimeGrouppedBlock, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.bottleneck = bottleneck
        self.group = group
        self.anytime = anytime

        assert group % anytime == 0

        bottleneck *= group
        self.conv1 = conv1x1(in_channels, bottleneck)
        self.conv2 = conv3x3(bottleneck, bottleneck, group=group)
        # self.conv3 = conv1x1(bottleneck, out_channels*anytime, group=anytime)
        self.conv3 = conv1x1(bottleneck, out_channels)

        self.bn1 = nn.ModuleList()
        for i in range(anytime):
            self.bn1.append(nn.BatchNorm2d(in_channels))
        self.bn2 = nn.ModuleList()
        for i in range(anytime):
            self.bn2.append(nn.BatchNorm2d(bottleneck * (i + 1) // anytime))
        self.bn3 = nn.ModuleList()
        for i in range(anytime):
            self.bn3.append(nn.BatchNorm2d(bottleneck * (i + 1) // anytime))

        self.relu = nn.ReLU(inplace=True)
Esempio n. 3
0
    def __init__(self,
                 num_blocks=3,
                 num_classes=10,
                 width=64,
                 bottleneck=4,
                 cardinality=8,
                 anytime=8,
                 init=False):
        super(AnytimeResNeXt, self).__init__()
        self.num_blocks = num_blocks
        self.anytime = anytime
        self.cardinality = cardinality
        self.width = width
        self.num_classes = num_classes
        self.bottleneck = bottleneck

        w, b, c = width, bottleneck, cardinality
        self.transitions = nn.ModuleList([conv3x3(3, w)])
        for i in range(2):
            self.transitions.append(
                nn.Sequential(nn.AvgPool2d(2), ChannelPadding(w << i)))

        self.stages = nn.ModuleList()
        for i in range(3):
            self.stages.append(nn.ModuleList())
            for j in range(num_blocks):
                self.stages[i].append(
                    AnytimeGrouppedBlock(w << i, w << i, b << i, c, anytime))

        self.output_layers = nn.ModuleList()
        for i in range(anytime):
            layer = nn.Sequential(nn.BatchNorm2d(w << 2),
                                  nn.ReLU(inplace=True), nn.AvgPool2d(8),
                                  View(-1), nn.Linear(w << 2, num_classes))
            self.output_layers.append(layer)

        if init:
            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
                    m.weight.data.normal_(0, math.sqrt(2. / n))
                elif isinstance(m, nn.BatchNorm2d):
                    m.weight.data.fill_(1)
                    m.bias.data.zero_()
                elif isinstance(m, nn.Linear):
                    m.bias.data.zero_()
Esempio n. 4
0
    def __init__(self, in_channels, out_channels, bottleneck, group):
        super(GrouppedBlock, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.bottleneck = bottleneck
        self.group = group

        bottleneck *= group
        self.conv1 = conv1x1(in_channels, bottleneck)
        self.conv2 = conv3x3(bottleneck, bottleneck, group=group)
        self.conv3 = conv1x1(bottleneck, out_channels)

        self.bn1 = nn.BatchNorm2d(in_channels)
        self.bn2 = nn.BatchNorm2d(bottleneck)
        self.bn3 = nn.BatchNorm2d(bottleneck)

        self.relu = nn.ReLU(inplace=True)
 def test_return(self):
     conv: nn.Module = modules.conv3x3(2, 3)
     self.assertEqual(len(conv), 3)  # conv has (conv2d, bn, relu)
Esempio n. 6
0
    def __init__(self,
                 num_blocks=3,
                 num_classes=10,
                 width=64,
                 bottleneck=4,
                 cardinality=8,
                 pretrained=None,
                 num_copies=None):
        super(ResNeXt, self).__init__()
        self.num_blocks = num_blocks
        self.num_classes = num_classes
        self.width = width
        self.bottleneck = bottleneck
        self.cardinality = cardinality

        w, b, c = width, bottleneck, cardinality
        self.transitions = nn.ModuleList([conv3x3(3, w)])
        for i in range(2):
            self.transitions.append(
                nn.Sequential(nn.AvgPool2d(2), ChannelPadding(w << i)))
        self.stages = nn.ModuleList()
        for i in range(3):
            self.stages.append(nn.ModuleList())
            for j in range(num_blocks):
                self.stages[i].append(GrouppedBlock(w << i, w << i, b << i, c))
        self.output_layer = nn.Sequential(nn.BatchNorm2d(w << 2),
                                          nn.ReLU(inplace=True),
                                          nn.AvgPool2d(8), View(-1),
                                          nn.Linear(w << 2, num_classes))

        if pretrained is not None:
            assert num_copies > 0 and c % num_copies == 0

            weights = np.concatenate([
                np.random.uniform(size=(3, num_blocks, num_copies - 1)),
                np.ones((3, num_blocks, 1))
            ],
                                     axis=2)
            weights = np.sort(weights)
            for i in range(3):
                for j in range(num_blocks):
                    for k in reversed(range(1, num_copies)):
                        weights[i, j, k] -= weights[i, j, k - 1]
            self.weights = weights
            print(weights)

            c2 = c / num_copies
            own_state = self.state_dict()
            pretrained_state = torch.load(pretrained, map_location='cpu')
            for name, param in pretrained_state.items():
                if 'transitions' in name or 'output_layer' in name or 'bn1' in name:
                    own_state[name].copy_(param)
                else:
                    _, i, j, _, _ = name.split('.')
                    i, j = int(i), int(j)
                    u = c2 * (b << i)
                    if 'conv3' not in name:
                        for k in range(num_copies):
                            own_state[name][k * u:(k + 1) * u].copy_(param)
                    else:
                        for k in range(num_copies):
                            own_state[name][:, k * u:(k + 1) * u].copy_(
                                param * weights[i, j, k])
 def __init__(self, in_channels: int, out_channels: int):
     super(FPNCenter, self).__init__()
     self.conv = modules.conv3x3(in_channels, out_channels)
Esempio n. 8
0
 def __init__(self, in_channels, channels, out_channels):
     super(Decoder, self).__init__()
     self.conv1 = modules.conv3x3(in_channels, channels)
     self.conv2 = modules.conv3x3(channels, out_channels)
     self.s_att = attentions.sSE(out_channels)
     self.c_att = attentions.cSE(out_channels, 16)