示例#1
0
    def __init__(self, block, num_blocks, num_classes=10, norm_type='bn', pretrained=False, imagenet=False):
        super(ResNet, self).__init__()
        self.in_planes = 64
        self.num_blocks = num_blocks
        self.norm_type = norm_type

        if num_classes == 1000 or imagenet:
            self.convbnrelu_1 = nn.Sequential(
                ConvBlock(3, 64, 7, 2, 3, bn=norm_type, relu=True),  # 112
                nn.MaxPool2d(3, 2, 1),  # 56
            )
        else:
            self.convbnrelu_1 = ConvBlock(3, 64, 3, 1, 1, bn=norm_type, relu=True)  # 32
        self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)  # 32/ 56
        self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)  # 16/ 28
        self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)  # 8/ 14
        self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)  # 4/ 7
        self.linear = nn.Linear(512 * block.expansion, num_classes)

        if num_classes == 1000 and pretrained:
            assert sum(num_blocks) == 8, 'only implemented for resnet18'
            layers = [self.convbnrelu_1[0].conv, self.convbnrelu_1[0].bn]
            for blocklayers in [self.layer1, self.layer2, self.layer3, self.layer4]:
                for blocklayer in blocklayers:
                    b1 = blocklayer.convbnrelu_1
                    b2 = blocklayer.convbn_2
                    b3 = blocklayer.shortcut
                    layers += [b1.conv, b1.bn, b2.conv, b2.bn]
                    if not isinstance(b3, nn.Sequential):
                        layers += [b3.conv, b3.bn]
            layers += [self.linear]

            self._load_pretrained_from_torch(layers)
示例#2
0
    def __init__(self, in_planes, planes, stride=1, norm_type='bn'):
        super(BasicBlock, self).__init__()

        self.convbnrelu_1 = ConvBlock(in_planes,
                                      planes,
                                      3,
                                      stride,
                                      1,
                                      bn=norm_type,
                                      relu=True)
        self.convbn_2 = ConvBlock(planes,
                                  planes,
                                  3,
                                  1,
                                  1,
                                  bn=norm_type,
                                  relu=False)
        self.shortcut = nn.Sequential()
        if stride != 1 or in_planes != self.expansion * planes:
            self.shortcut = ConvBlock(in_planes,
                                      self.expansion * planes,
                                      1,
                                      stride,
                                      0,
                                      bn=norm_type,
                                      relu=False)
示例#3
0
    def __init__(self, in_channels, num_classes, norm_type='bn', pretrained=False, imagenet=False):
        super(AlexNetNormal, self).__init__()

        params = []

        if num_classes == 1000 or imagenet:  # imagenet1000
            if pretrained:
                norm_type = 'none'
            self.features = nn.Sequential(
                ConvBlock(3, 64, 11, 4, 2, bn=norm_type),
                nn.MaxPool2d(kernel_size=3, stride=2),
                ConvBlock(64, 192, 5, 1, 2, bn=norm_type),
                nn.MaxPool2d(kernel_size=3, stride=2),
                ConvBlock(192, 384, 3, 1, 1, bn=norm_type),
                ConvBlock(384, 256, 3, 1, 1, bn=norm_type),
                ConvBlock(256, 256, 3, 1, 1, bn=norm_type),
                nn.MaxPool2d(kernel_size=3, stride=2),
                nn.AdaptiveAvgPool2d((6, 6))
            )

            self.classifier = nn.Sequential(
                nn.Dropout(),
                nn.Linear(256 * 6 * 6, 4096),
                nn.ReLU(inplace=True),
                nn.Dropout(),
                nn.Linear(4096, 4096),
                nn.ReLU(inplace=True),
                nn.Linear(4096, num_classes),
            )

            for layer in self.features:
                if isinstance(layer, ConvBlock):
                    params.append(layer.conv.weight)
                    params.append(layer.conv.bias)

            for layer in self.classifier:
                if isinstance(layer, nn.Linear):
                    params.append(layer.weight)
                    params.append(layer.bias)

            if pretrained:
                self._load_pretrained_from_torch(params)
        else:
            self.features = nn.Sequential(
                ConvBlock(in_channels, 64, 5, 1, 2, bn=norm_type),
                nn.MaxPool2d(kernel_size=2, stride=2),  # 16x16
                ConvBlock(64, 192, 5, 1, 2, bn=norm_type),
                nn.MaxPool2d(kernel_size=2, stride=2),  # 8x8
                ConvBlock(192, 384, bn=norm_type),
                ConvBlock(384, 256, bn=norm_type),
                ConvBlock(256, 256, bn=norm_type),
                nn.MaxPool2d(kernel_size=2, stride=2),  # 4x4
            )

            self.classifier = nn.Linear(4 * 4 * 256, num_classes)
示例#4
0
    def __init__(self, in_channels, num_classes, norm_type='bn'):
        super(AlexNetNormal, self).__init__()

        self.features = nn.Sequential(
            ConvBlock(in_channels, 64, 5, 1, 2, bn=norm_type),
            nn.MaxPool2d(kernel_size=2, stride=2),  # 16x16
            ConvBlock(64, 192, 5, 1, 2, bn=norm_type),
            nn.MaxPool2d(kernel_size=2, stride=2),  # 8x8
            ConvBlock(192, 384, bn=norm_type),
            ConvBlock(384, 256, bn=norm_type),
            ConvBlock(256, 256, bn=norm_type),
            nn.MaxPool2d(kernel_size=2, stride=2),  # 4x4
        )

        self.classifier = nn.Linear(4 * 4 * 256, num_classes)
示例#5
0
 def convblock_(*args, **kwargs):
     if passport_kwargs['flag']:
         return PassportBlock(*args,
                              **kwargs,
                              passport_kwargs=passport_kwargs)
     else:
         return ConvBlock(*args, **kwargs, bn=passport_kwargs['norm_type'])
    def __init__(self, in_channels, num_classes, passport_kwargs):
        super().__init__()

        maxpoolidx = [1, 3, 7]

        layers = []

        inp = in_channels
        oups = {0: 64, 2: 192, 4: 384, 5: 256, 6: 256}
        kp = {0: (5, 2), 2: (5, 2), 4: (3, 1), 5: (3, 1), 6: (3, 1)}

        for layeridx in range(8):
            if layeridx in maxpoolidx:
                layers.append(nn.MaxPool2d(2, 2))
            else:
                k = kp[layeridx][0]
                p = kp[layeridx][1]
                normtype = passport_kwargs[str(layeridx)]['norm_type']
                if passport_kwargs[str(layeridx)]['flag']:
                    layers.append(
                        PassportPrivateBlock(inp, oups[layeridx], k, 1, p,
                                             passport_kwargs[str(layeridx)]))
                else:
                    layers.append(
                        ConvBlock(inp, oups[layeridx], k, 1, p, normtype))

                inp = oups[layeridx]

        self.features = nn.Sequential(*layers)

        self.classifier = nn.Linear(4 * 4 * 256, num_classes)
示例#7
0
    def __init__(self, block, num_blocks, num_classes=10, norm_type='bn'):
        super(ResNet, self).__init__()
        self.in_planes = 64
        self.num_blocks = num_blocks
        self.norm_type = norm_type

        self.convbnrelu_1 = ConvBlock(3, 64, 3, 1, 1, bn=norm_type, relu=True)
        self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
        self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
        self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
        self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
        self.linear = nn.Linear(512 * block.expansion, num_classes)
示例#8
0
    def __init__(self, in_channels, num_classes, passport_kwargs, pretrained=False, imagenet=False):
        super(AlexNetPassport, self).__init__()

        maxpoolidx = [1, 3, 7]

        layers = []
        params = []

        inp = in_channels
        oups = {
            0: 64,
            2: 192,
            4: 384,
            5: 256,
            6: 256
        }
        kp = {
            0: (5, 2) if num_classes != 1000 else (11, 4, 2),
            2: (5, 2),
            4: (3, 1),
            5: (3, 1),
            6: (3, 1)
        }

        for layeridx in range(8):
            if layeridx in maxpoolidx:
                ks = 2 if num_classes != 1000 else 3
                layers.append(nn.MaxPool2d(ks, 2))
            else:
                if len(kp[layeridx]) == 2:
                    k, p = kp[layeridx]
                    s = 1
                else:
                    k, s, p = kp[layeridx]
                normtype = passport_kwargs[str(layeridx)]['norm_type']
                if passport_kwargs[str(layeridx)]['flag']:
                    layers.append(PassportBlock(inp, oups[layeridx], k, s, p, passport_kwargs[str(layeridx)]))
                else:
                    layers.append(ConvBlock(inp, oups[layeridx], k, s, p, normtype))

                inp = oups[layeridx]

        if num_classes == 1000 or imagenet:
            layers.append(nn.AdaptiveAvgPool2d((6, 6)))

        self.features = nn.Sequential(*layers)

        if num_classes == 1000 or imagenet:
            self.classifier = nn.Sequential(
                nn.Dropout(),
                nn.Linear(256 * 6 * 6, 4096),
                nn.ReLU(inplace=True),
                nn.Dropout(),
                nn.Linear(4096, 4096),
                nn.ReLU(inplace=True),
                nn.Linear(4096, num_classes),
            )
        else:
            self.classifier = nn.Linear(4 * 4 * 256, num_classes)

        if num_classes == 1000 and pretrained:
            assert normtype == 'none', 'torchvision pretrained alexnet does not have normalization layer'
            layers = []
            for layer in self.features:
                if isinstance(layer, (ConvBlock, PassportBlock)):
                    layers.append(layer)

            for layer in self.classifier:
                if isinstance(layer, nn.Linear):
                    layers.append(layer)

            self._load_pretrained_from_torch(layers)