Example #1
0
    def __init__(self,
                 block,
                 layers,
                 num_classes=10,
                 zero_init_residual=False):
        super(MyResNet, self).__init__()
        self.inplanes = 64
        self.conv1 = nn.Conv2d(1,
                               64,
                               kernel_size=7,
                               stride=2,
                               padding=3,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(64)
        self.relu = nn.ReLU(inplace=True)
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
        self.layer1 = self._make_layer(block, 64, layers[0])
        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
        self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
        self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
        self.avgpool = nn.AdaptiveAvgPool2d((1, 1))

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight,
                                        mode='fan_out',
                                        nonlinearity='relu')
            elif isinstance(m, nn.BatchNorm2d):
                nn.init.constant_(m.weight, 1)
                nn.init.constant_(m.bias, 0)

        # Zero-initialize the last BN in each residual branch,
        # so that the residual branch starts with zeros, and each residual block behaves like an identity.
        # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
        if zero_init_residual:
            for m in self.modules():
                if isinstance(m, Bottleneck):
                    nn.init.constant_(m.bn3.weight, 0)
                elif isinstance(m, BasicBlock):
                    nn.init.constant_(m.bn2.weight, 0)

        self.classifier = nn.Sequential(
            nn.Dropout(p=0.5),
            nn.Linear(512 * block.expansion, 256),
            nn.BatchNorm1d(256),
            nn.ReLU(inplace=True),
            nn.Dropout(p=0.5),
            nn.Linear(256, num_classes),
        )
Example #2
0
    def __init__(self, num_classes=10):
        super(VGG, self).__init__()
        self.l1 = self.two_conv_pool(1, 64, 64)
        self.l2 = self.two_conv_pool(64, 128, 128)
        self.l3 = self.three_conv_pool(128, 256, 256, 256)
        self.l4 = self.three_conv_pool(256, 256, 256, 256)

        self.classifier = nn.Sequential(
            nn.Dropout(p=0.5),
            nn.Linear(256, 512),
            nn.BatchNorm1d(512),
            nn.ReLU(inplace=True),
            nn.Dropout(p=0.5),
            nn.Linear(512, num_classes),
        )
Example #3
0
 def head_layer(self):
     cin = self.channels  #* 2
     return nn.Sequential(
         nn.AdaptiveAvgPool2d(1),  #AdaptiveConcatPool2d(1),
         #nn.Dropout2d(0.5),
         Flatten(),
         init_default(nn.Linear(cin, self.classes),
                      nn.init.kaiming_normal_))
Example #4
0
 def __init__(self, num_classes=10):
     super(ConvNet, self).__init__()
     self.layer1 = nn.Sequential(
         nn.Conv2d(1, 16, kernel_size=5, stride=1, padding=2),
         nn.BatchNorm2d(16),
         nn.ReLU(),
         nn.MaxPool2d(kernel_size=2, stride=2))
     self.layer2 = nn.Sequential(
         nn.Conv2d(16, 32, kernel_size=5, stride=1, padding=2),
         nn.BatchNorm2d(32),
         nn.ReLU(),
         nn.MaxPool2d(kernel_size=2, stride=2))
     self.fc = nn.Linear(7 * 7 * 32, num_classes)