コード例 #1
0
    def __init__(self,
                 block,
                 layers,
                 channels,
                 drop_rate,
                 classes=10,
                 **kwargs):
        super(CIFARWideResNet, self).__init__(**kwargs)
        assert len(layers) == len(channels) - 2
        self.features = list()
        self.features.append(_bn_no_affine(channels[0]))
        self.features.append(
            nn.Conv2d(channels[0], channels[1], 3, 1, 1, bias=False))
        self.features.append(nn.BatchNorm2d(channels[1]))

        in_channels = channels[1]
        for i, num_layer in enumerate(layers):
            stride = 1 if i == 0 else 2
            self.features.append(
                self._make_layer(block, num_layer, in_channels,
                                 channels[i + 2], drop_rate, stride))
            in_channels = channels[i + 2]
        self.features.append(nn.BatchNorm2d(channels[-1]))
        self.features.append(nn.ReLU(inplace=True))
        self.features = nn.Sequential(*self.features)

        self.output = nn.Linear(channels[-1], classes)
コード例 #2
0
    def __init__(self,
                 block,
                 layers,
                 channels,
                 classes=1000,
                 thumbnail=False,
                 last_gamma=False,
                 **kwargs):
        super(ResNetV2, self).__init__(**kwargs)
        assert len(layers) == len(channels) - 2

        self.features = list()
        self.features.append(_bn_no_affine(channels[0]))
        if thumbnail:
            self.features.append(_conv3x3(channels[0], channels[1], 1))
        else:
            self.features.append(
                nn.Conv2d(channels[0], channels[1], 7, 2, 3, bias=False))
            self.features.append(nn.BatchNorm2d(channels[1]))
            self.features.append(nn.ReLU(inplace=True))
            self.features.append(nn.MaxPool2d(3, 2, 1))

        for i, num_layer in enumerate(layers):
            stride = 1 if i == 0 else 2
            self.features.append(
                self._make_layer(block,
                                 num_layer,
                                 channels[i + 1],
                                 channels[i + 2],
                                 stride,
                                 last_gamma=last_gamma))
        self.features.append(nn.BatchNorm2d(channels[-1]))
        self.features.append(nn.ReLU(inplace=True))
        self.features = nn.Sequential(*self.features)

        self.output = nn.Linear(channels[-1], classes)