コード例 #1
0
    def __init__(self, opt):
        self.in_planes = opt["in_planes"]
        self.out_planes = opt["out_planes"]
        self.num_stages = opt["num_stages"]
        self.average_end = opt["average_end"] if ("average_end" in opt) else False

        if type(self.out_planes) == int:
            self.out_planes = [self.out_planes for i in range(self.num_stages)]
        assert type(self.out_planes) == list
        assert len(self.out_planes) == self.num_stages

        num_planes = [self.in_planes,] + self.out_planes
        use_post_relu = opt["userelu"] if ("userelu" in opt) else True
        use_pre_relu = opt["use_pre_relu"] if ("use_pre_relu" in opt) else False

        self.use_pool = opt["use_pool"] if ("use_pool" in opt) else None
        if self.use_pool is None:
            self.use_pool = [True for i in range(self.num_stages)]
        assert len(self.use_pool) == self.num_stages

        feature_blocks = []
        for i in range(self.num_stages):
            feature_blocks.append(
                ConvBlock(
                    num_planes[i],
                    num_planes[i + 1],
                    pre_relu=(use_pre_relu if (i == 0) else True),
                    post_relu=(use_post_relu if i == (self.num_stages - 1) else False),
                    pool=self.use_pool[i],
                )
            )

        all_feat_names = ["conv" + str(s + 1) for s in range(self.num_stages)]

        if self.average_end:
            feature_blocks.append(tools.GlobalPooling(pool_type="avg"))
            all_feat_names.append("GlobalAvgPooling")

        super().__init__(all_feat_names, feature_blocks)

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
                m.weight.data.normal_(0, math.sqrt(2.0 / n))
            elif isinstance(m, nn.BatchNorm2d):
                m.weight.data.fill_(1)
                m.bias.data.zero_()
コード例 #2
0
    def __init__(self, arch, pool="avg"):
        assert (arch == "resnet10" or arch == "resnet18" or arch == "resnet34"
                or arch == "resnet50" or arch == "resnet101"
                or arch == "resnet152")

        if arch == "resnet10":
            net = resnet_utils.ResNet(block=resnet_utils.BasicBlock,
                                      layers=[1, 1, 1, 1],
                                      num_classes=10)
        else:
            net = models.__dict__[arch](num_classes=10)

        all_feat_names = []
        feature_blocks = []

        # 1st conv before any network block
        conv1 = nn.Sequential()
        conv1.add_module("Conv", net.conv1)
        conv1.add_module("bn", net.bn1)
        conv1.add_module("relu", net.relu)
        conv1.add_module("maxpool", net.maxpool)
        feature_blocks.append(conv1)
        all_feat_names.append("conv1")

        # 1st block.
        feature_blocks.append(net.layer1)
        all_feat_names.append("block1")

        # 2nd block.
        feature_blocks.append(net.layer2)
        all_feat_names.append("block2")

        # 3rd block.
        feature_blocks.append(net.layer3)
        all_feat_names.append("block3")

        # 4th block.
        feature_blocks.append(net.layer4)
        all_feat_names.append("block4")

        assert pool == "none" or pool == "avg" or pool == "max"
        if pool == "max" or pool == "avg":
            feature_blocks.append(tools.GlobalPooling(pool_type=pool))
            all_feat_names.append("GlobalPooling")

        super().__init__(all_feat_names, feature_blocks)
コード例 #3
0
    def __init__(self, arch, pool="avg"):
        assert (arch == "vgg11" or arch == "vgg11_bn" or arch == "vgg13"
                or arch == "vgg13_bn" or arch == "vgg16" or arch == "vgg16_bn"
                or arch == "vgg19" or arch == "vgg19_bn")

        net = models.__dict__[arch](num_classes=10)

        all_feat_names = []
        feature_blocks = []
        features = net.features[:-1]
        feature_blocks.append(features)
        all_feat_names.append("features")

        assert pool == "none" or pool == "avg" or pool == "max"
        if pool == "max" or pool == "avg":
            feature_blocks.append(tools.GlobalPooling(pool_type=pool))
            all_feat_names.append("GlobalPooling")

        super().__init__(all_feat_names, feature_blocks)
コード例 #4
0
    def __init__(
        self,
        depth,
        widen_factor=1,
        drop_rate=0.0,
        pool="avg",
        extra_block=False,
        block_strides=[2, 2, 2, 2],
        extra_block_width_mult=1,
        num_layers=None,
    ):
        nChannels = [
            16,
            16 * widen_factor,
            32 * widen_factor,
            64 * widen_factor,
            64 * widen_factor * extra_block_width_mult,
        ]

        assert not ((depth is None) and (num_layers is None))

        num_blocks = 4 if extra_block else 3
        if depth is not None:
            assert (depth - 4) % 6 == 0
            n = int((depth - 4) / 6)
            num_layers = [n for _ in range(num_blocks)]
        else:
            assert isinstance(num_layers, (list, tuple))
        assert len(num_layers) == num_blocks

        block = BasicBlock

        all_feat_names = []
        feature_blocks = []

        # 1st conv before any network block
        conv1 = nn.Sequential()
        conv1.add_module(
            "Conv",
            nn.Conv2d(3, nChannels[0], kernel_size=3, padding=1, bias=False))
        conv1.add_module("BN", nn.BatchNorm2d(nChannels[0]))
        conv1.add_module("ReLU", nn.ReLU(inplace=True))
        feature_blocks.append(conv1)
        all_feat_names.append("conv1")

        # 1st block.
        block1 = nn.Sequential()
        block1.add_module(
            "Block",
            NetworkBlock(num_layers[0], nChannels[0], nChannels[1], block,
                         block_strides[0], drop_rate),
        )
        block1.add_module("BN", nn.BatchNorm2d(nChannels[1]))
        block1.add_module("ReLU", nn.ReLU(inplace=True))
        feature_blocks.append(block1)
        all_feat_names.append("block1")

        # 2nd block.
        block2 = nn.Sequential()
        block2.add_module(
            "Block",
            NetworkBlock(num_layers[1], nChannels[1], nChannels[2], block,
                         block_strides[1], drop_rate),
        )
        block2.add_module("BN", nn.BatchNorm2d(nChannels[2]))
        block2.add_module("ReLU", nn.ReLU(inplace=True))
        feature_blocks.append(block2)
        all_feat_names.append("block2")

        # 3rd block.
        block3 = nn.Sequential()
        block3.add_module(
            "Block",
            NetworkBlock(num_layers[2], nChannels[2], nChannels[3], block,
                         block_strides[2], drop_rate),
        )
        block3.add_module("BN", nn.BatchNorm2d(nChannels[3]))
        block3.add_module("ReLU", nn.ReLU(inplace=True))
        feature_blocks.append(block3)
        all_feat_names.append("block3")

        # extra block.
        if extra_block:
            block4 = nn.Sequential()
            block4.add_module(
                "Block",
                NetworkBlock(
                    num_layers[3],
                    nChannels[3],
                    nChannels[4],
                    block,
                    block_strides[3],
                    drop_rate,
                ),
            )
            block4.add_module("BN", nn.BatchNorm2d(nChannels[4]))
            block4.add_module("ReLU", nn.ReLU(inplace=True))
            feature_blocks.append(block4)
            all_feat_names.append("block4")

        # global average pooling and classifier_type
        assert pool == "none" or pool == "avg" or pool == "max"
        if pool == "max" or pool == "avg":
            feature_blocks.append(tools.GlobalPooling(pool_type=pool))
            all_feat_names.append("GlobalPooling")

        super().__init__(all_feat_names, feature_blocks)

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
                m.weight.data.normal_(0, math.sqrt(2.0 / n))
            elif isinstance(m, nn.BatchNorm2d):
                m.weight.data.fill_(1)
                m.bias.data.zero_()