Esempio n. 1
0
 def __init__(self, init_scheme=nn.init.xavier_normal_):
     super(MaskedLeNet, self).__init__(init_scheme=init_scheme)
     self.conv1 = MaskedConv2d(3, 6, kernel_size=5)
     self.conv2 = MaskedConv2d(6, 16, kernel_size=5)
     self.fc1 = MaskedLinear(16 * 5 * 5, 120)
     self.fc2 = MaskedLinear(120, 84)
     self.fc3 = MaskedLinear(84, 10)
Esempio n. 2
0
    def __init__(self, in_planes, planes, dropout_rate, stride=1):
        super(wide_basic, self).__init__()
        self.bn1 = nn.BatchNorm2d(in_planes)
        self.conv1 = MaskedConv2d(in_planes,
                                  planes,
                                  kernel_size=3,
                                  padding=1,
                                  bias=True)
        self.dropout = nn.Dropout(p=dropout_rate)
        self.bn2 = nn.BatchNorm2d(planes)
        self.conv2 = MaskedConv2d(planes,
                                  planes,
                                  kernel_size=3,
                                  stride=stride,
                                  padding=1,
                                  bias=True)

        self.shortcut = nn.Sequential()
        if stride != 1 or in_planes != planes:
            self.shortcut = nn.Sequential(
                nn.Conv2d(in_planes,
                          planes,
                          kernel_size=1,
                          stride=stride,
                          bias=True), )
Esempio n. 3
0
 def _make_layers(self, cfg, use_bn):
     layers = []
     in_channels = 3
     for i, x in enumerate(cfg):
         if x == 'M':
             layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
         else:
             if i == 0:
                 layers += [
                     MaskedConv2d(in_channels,
                                  x,
                                  kernel_size=3,
                                  padding=1,
                                  stride=2)
                 ]
             else:
                 layers += [
                     MaskedConv2d(in_channels, x, kernel_size=3, padding=1)
                 ]
             if use_bn:
                 layers += [nn.BatchNorm2d(x)]
             layers += [nn.ReLU(inplace=True)]
             in_channels = x
     layers += [nn.AvgPool2d(kernel_size=1, stride=1)]
     return nn.Sequential(*layers)
Esempio n. 4
0
    def __init__(self, in_planes, planes, stride=1):
        super(Bottleneck, self).__init__()
        self.conv1 = MaskedConv2d(in_planes, planes, kernel_size=1, bias=False)
        self.bn1 = nn.BatchNorm2d(planes)
        self.conv2 = MaskedConv2d(planes,
                                  planes,
                                  kernel_size=3,
                                  stride=stride,
                                  padding=1,
                                  bias=False)
        self.bn2 = nn.BatchNorm2d(planes)
        self.conv3 = MaskedConv2d(planes,
                                  self.expansion * planes,
                                  kernel_size=1,
                                  bias=False)
        self.bn3 = nn.BatchNorm2d(self.expansion * planes)

        self.shortcut = nn.Sequential()
        if stride != 1 or in_planes != self.expansion * planes:
            self.shortcut = nn.Sequential(
                nn.Conv2d(in_planes,
                          self.expansion * planes,
                          kernel_size=1,
                          stride=stride,
                          bias=False), nn.BatchNorm2d(self.expansion * planes))
Esempio n. 5
0
def conv3x3(in_planes, out_planes, stride=1):
    return MaskedConv2d(in_planes,
                        out_planes,
                        kernel_size=3,
                        stride=stride,
                        padding=1,
                        bias=True)
    def __init__(self, block, num_blocks, num_classes=200):
        super(MaskedResNet_64, self).__init__()
        self.in_planes = 64

        self.conv1 = MaskedConv2d(3, 64, kernel_size=3, stride=2, padding=1, bias=False)
        self.bn1 = nn.BatchNorm2d(64)
        self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
        self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
        self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
        self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
        self.linear = MaskedLinear(512*block.expansion, num_classes)