Ejemplo n.º 1
0
    def __init__(self, block, num_blocks, size, head7x7=False):
        super(RefineDmeResnet, self).__init__()
        self.inplanes = 64
        self.head7x7 = head7x7

        if self.head7x7:
            self.conv1 = nn.Conv2d(3, 64, 7, 2, 3, bias=False)
            self.bn1 = nn.BatchNorm2d(64)
        else:
            self.conv1 = nn.Conv2d(3, 32, 3, 2, 1, bias=False)
            self.bn1 = nn.BatchNorm2d(32)
            self.conv2 = nn.Conv2d(32, 32, 3, 1, 1, groups=8, bias=False)
            self.bn2 = nn.BatchNorm2d(32)
            self.conv3 = nn.Conv2d(32, 64, 3, 1, 1, groups=16, bias=False)
            self.bn3 = nn.BatchNorm2d(64)
        self.relu = nn.ReLU(inplace=True)
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        # Bottom-up layers
        self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
        self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
        self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
        self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
        self.inchannel = block.expansion * 512
        self.extras = nn.ModuleList(add_extras(str(size), self.inchannel))
        self.smooth1 = nn.Conv2d(self.inchannel,
                                 512,
                                 kernel_size=3,
                                 stride=1,
                                 padding=1)
        self.fpn = FpnAdapter([128, 256, 512, 256], 4)
        self._init_modules()
Ejemplo n.º 2
0
 def __init__(self, block, layers, size, num_classes=1000):
     self.inplanes = 64
     super(DetNet, self).__init__()
     self.inchannel = 1024
     self.extras = nn.ModuleList(add_extras(str(size), self.inchannel))
     self.smooth1 = nn.Conv2d(self.inchannel,
                              512,
                              kernel_size=3,
                              stride=1,
                              padding=1)
     self.conv1 = nn.Conv2d(3,
                            64,
                            kernel_size=7,
                            stride=2,
                            padding=3,
                            bias=False)
     self.bn1 = nn.BatchNorm2d(64)
     self.relu = nn.ReLU(inplace=True)
     self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
     self.layer1 = self._make_layer(block, 64, layers[0])
     self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
     self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
     self.layer4 = self._make_new_layer(256, layers[3])
     self.layer5 = self._make_new_layer(256, layers[4])
     self.avgpool = nn.AdaptiveAvgPool2d(1)
     self.fc = nn.Linear(1024, num_classes)
     self.fpn = FpnAdapter([512, 1024, 512, 256], 4)
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
             m.weight.data.normal_(0, math.sqrt(2. / n))
         elif isinstance(m, nn.BatchNorm2d):
             m.weight.data.fill_(1)
             m.bias.data.zero_()
     self._init_modules()
Ejemplo n.º 3
0
    def __init__(self, block, num_blocks, size):
        super(RefineResnet, self).__init__()
        self.inplanes = 64

        self.conv1 = nn.Conv2d(3,
                               64,
                               kernel_size=7,
                               stride=2,
                               padding=3,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(64)

        # Bottom-up layers
        self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
        self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
        self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
        self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
        self.inchannel = block.expansion * 512
        self.extras = nn.ModuleList(add_extras(str(size), self.inchannel))
        self.smooth1 = nn.Conv2d(self.inchannel,
                                 512,
                                 kernel_size=3,
                                 stride=1,
                                 padding=1)
        self.fpn = FpnAdapter([512, 1024, 512, 256], 4)
        self._init_modules()
    def __init__(self,
                 block,
                 block_drop,
                 num_blocks,
                 size,
                 head7x7=False,
                 baseWidth=4,
                 cardinality=32,
                 drop_prob=0.1,
                 block_size=7,
                 nr_steps=2e3):
        super(RefineSeResneXtDrop, self).__init__()
        self.inplanes = 32
        self.cardinality = cardinality
        self.baseWidth = baseWidth

        self.dropblock = LinearScheduler(
            DropBlock2D(drop_prob=drop_prob, block_size=block_size),
            start_value=0.,
            stop_value=drop_prob,
            nr_steps=nr_steps  # 2e3
        )
        self.head7x7 = head7x7

        if self.head7x7:
            self.conv1 = nn.Conv2d(3, 32, 7, 2, 3, bias=False)
            self.bn1 = nn.BatchNorm2d(32)
        else:
            self.conv1 = nn.Conv2d(3, 32, 3, 2, 1, bias=False)
            self.bn1 = nn.BatchNorm2d(32)
            self.conv2 = nn.Conv2d(32, 32, 3, 1, 1, groups=8, bias=False)
            self.bn2 = nn.BatchNorm2d(32)
            self.conv3 = nn.Conv2d(32, 32, 3, 1, 1, groups=16, bias=False)
            self.bn3 = nn.BatchNorm2d(32)
        self.relu = nn.ReLU(inplace=True)
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        # Bottom-up layers
        self.layer1 = self._make_layer(block, 32, num_blocks[0], stride=1)
        self.layer2 = self._make_layer(block, 64, num_blocks[1], stride=2)
        self.layer3 = self._make_layer(block_drop,
                                       128,
                                       num_blocks[2],
                                       stride=2)
        self.layer4 = self._make_layer(block_drop,
                                       256,
                                       num_blocks[3],
                                       stride=2)
        self.inchannel = block.expansion * 256
        self.extras = nn.ModuleList(add_extras(str(size), self.inchannel))
        self.smooth1 = nn.Conv2d(self.inchannel,
                                 256,
                                 kernel_size=3,
                                 stride=1,
                                 padding=1)
        self.fpn = FpnAdapter([256, 512, 256, 256], 4)
        self._init_modules()
Ejemplo n.º 5
0
 def __init__(self, size, channel_size='48'):
     super(VGG16Extractor, self).__init__()
     self.vgg = nn.ModuleList(vgg(base[str(size)], 3))
     self.extras = nn.ModuleList(add_extras(str(size)))
     self.L2Norm_4_3 = L2Norm(512, 10)
     self.L2Norm_5_3 = L2Norm(1024, 8)
     # self.last_layer_trans = last_layer_trans()
     # self.trans_layers = nn.ModuleList(trans_layers(str(size)))
     # self.latent_layers = nn.ModuleList(latent_layers((str(size))))
     # self.up_layers = nn.ModuleList(up_layers(str(size)))
     self.fpn = FpnAdapter([512, 1024, 256, 256], 4)
     self._init_modules()
Ejemplo n.º 6
0
    def __init__(self, block, num_blocks, size):
        super(RefineForResnet18, self).__init__()
        self.inplanes = 32

        self.conv1 = nn.Conv2d(
            3, 32, kernel_size=7, stride=4, padding=3, bias=False)
        self.bn1 = nn.BatchNorm2d(32)

        # Bottom-up layers
        self.layer1 = self._make_layer(block, 32, num_blocks[0], stride=1)
        self.layer2 = self._make_layer(block, 64, num_blocks[1], stride=2)
        self.layer3 = self._make_layer(block, 128, num_blocks[2], stride=2)
        self.layer4 = self._make_layer(block, 256, num_blocks[3], stride=2)
        self.inchannel = block.expansion * 256
        self.extras = nn.ModuleList(add_extras(str(size), self.inchannel))
        self.fpn = FpnAdapter([64, 128, 256, 64], 4)
        self._init_modules()
    def __init__(self,
                 block,
                 block_drop,
                 num_blocks,
                 size,
                 drop_prob=0.1,
                 block_size=7,
                 nr_steps=2e3):
        super(RefineResnetDrop, self).__init__()
        self.inplanes = 64
        self.dropblock = LinearScheduler(
            DropBlock2D(drop_prob=drop_prob, block_size=block_size),
            start_value=0.,
            stop_value=drop_prob,
            nr_steps=nr_steps  # 2e3
        )
        self.conv1 = nn.Conv2d(3,
                               64,
                               kernel_size=7,
                               stride=2,
                               padding=3,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(64)

        # Bottom-up layers
        self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
        self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
        self.layer3 = self._make_layer(block_drop,
                                       256,
                                       num_blocks[2],
                                       stride=2)
        self.layer4 = self._make_layer(block_drop,
                                       512,
                                       num_blocks[3],
                                       stride=2)
        self.inchannel = block.expansion * 512
        self.extras = nn.ModuleList(add_extras(str(size), self.inchannel))
        self.smooth1 = nn.Conv2d(self.inchannel,
                                 512,
                                 kernel_size=3,
                                 stride=1,
                                 padding=1)
        self.fpn = FpnAdapter([512, 1024, 512, 256], 4)
        self._init_modules()