예제 #1
0
    def __init__(self, in_planes, planes, stride=1):
        super(Bottleneck_1w1a, self).__init__()
        self.conv1 = Layer.BNNConv2d_1w1a(in_planes,
                                          planes,
                                          kernel_size=1,
                                          bias=False)
        self.bn1 = nn.BatchNorm2d(planes)
        self.conv2 = Layer.BNNConv2d_1w1a(planes,
                                          planes,
                                          kernel_size=3,
                                          stride=stride,
                                          padding=1,
                                          bias=False)
        self.bn2 = nn.BatchNorm2d(planes)
        self.conv3 = Layer.BNNConv2d_1w1a(planes,
                                          self.expansion * planes,
                                          kernel_size=1,
                                          bias=False)
        self.bn3 = nn.BatchNorm2d(self.expansion * planes)

        self.shortcut = nn.Sequential()
        if stride != 1 or in_planes != self.expansion * planes:
            self.shortcut = nn.Sequential(
                Layer.BNNConv2d_1w1a(in_planes,
                                     self.expansion * planes,
                                     kernel_size=1,
                                     stride=stride,
                                     bias=False),
                nn.BatchNorm2d(self.expansion * planes))
 def __init__(self, in_planes, out_planes, stride=1):
     super(Block_1w1a, self).__init__()
     self.conv1 = Layer.BNNConv2d_1w1a(in_planes,
                                       in_planes,
                                       kernel_size=3,
                                       stride=stride,
                                       padding=1,
                                       groups=in_planes,
                                       bias=False)
     self.bn1 = nn.BatchNorm2d(in_planes)
     self.conv2 = Layer.BNNConv2d_1w1a(in_planes,
                                       out_planes,
                                       kernel_size=1,
                                       stride=1,
                                       padding=0,
                                       bias=False)
     self.bn2 = nn.BatchNorm2d(out_planes)
예제 #3
0
    def __init__(self, num_classes=10):
        super(VGG_SMALL_1W1A_MOVE, self).__init__()
        self.num_classes = num_classes
        self.conv0 = nn.Conv2d(3, 128, kernel_size=3, padding=1, bias=False)
        self.bn0 = nn.BatchNorm2d(128)
        self.move1 = MoveBlock(128)
        self.conv1 = Layer.BNNConv2d_1w1a(128,
                                          128,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.pooling = nn.MaxPool2d(kernel_size=2, stride=2)
        self.bn1 = nn.BatchNorm2d(128)
        # self.nonlinear = nn.ReLU(inplace=True)
        # self.nonlinear = nn.Hardtanh(inplace=True)
        self.move2 = MoveBlock(128)
        self.conv2 = Layer.BNNConv2d_1w1a(128,
                                          256,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.bn2 = nn.BatchNorm2d(256)
        self.move3 = MoveBlock(256)
        self.conv3 = Layer.BNNConv2d_1w1a(256,
                                          256,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.bn3 = nn.BatchNorm2d(256)
        self.move4 = MoveBlock(256)
        self.conv4 = Layer.BNNConv2d_1w1a(256,
                                          512,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.bn4 = nn.BatchNorm2d(512)
        self.move5 = MoveBlock(512)
        self.conv5 = Layer.BNNConv2d_1w1a(512,
                                          512,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.bn5 = nn.BatchNorm2d(512)

        self.fc = nn.Linear(512 * 4 * 4, self.num_classes)
        self._initialize_weights()
예제 #4
0
    def __init__(self, num_classes=10):
        super(VGG_SMALL_1W1A_DENSE, self).__init__()
        self.num_classes = num_classes
        self.conv0 = nn.Conv2d(3, 128, kernel_size=3, padding=1, bias=False)
        self.bn0 = nn.BatchNorm2d(128)
        self.conv1 = Layer.BNNConv2d_1w1a(128,
                                          128,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.pooling = nn.MaxPool2d(kernel_size=2, stride=2)
        self.bn1 = nn.BatchNorm2d(128)
        # self.nonlinear = nn.ReLU(inplace=True)
        # self.nonlinear = nn.Hardtanh(inplace=True)
        self.conv2 = Layer.BNNConv2d_1w1a(128,
                                          256,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.bn2 = nn.BatchNorm2d(256)
        self.conv3 = Layer.BNNConv2d_1w1a(256,
                                          256,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.bn3 = nn.BatchNorm2d(256)
        self.conv4 = Layer.BNNConv2d_1w1a(256,
                                          512,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.bn4 = nn.BatchNorm2d(512)
        self.conv5 = Layer.BNNConv2d_1w1a(512,
                                          512,
                                          kernel_size=3,
                                          padding=1,
                                          bias=False)
        self.bn5 = nn.BatchNorm2d(512)

        self.fc = Layer.BNNDense_1w1a(512 * 4 * 4, self.num_classes)
        self.bn6 = nn.BatchNorm1d(self.num_classes)
        self.scaleshift = ScaleAndShift(self.num_classes)
        self._initialize_weights()
예제 #5
0
 def __init__(self, in_planes, planes, stride=1, option='A'):
     super(BasicBlock_1w1a, self).__init__()
     self.conv1 = Layer.BNNConv2d_1w1a(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
     self.bn1 = nn.BatchNorm2d(planes)
     self.conv2 = Layer.BNNConv2d_1w1a(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
     self.bn2 = nn.BatchNorm2d(planes)
     self.shortcut = nn.Sequential()
     if stride != 1 or in_planes != planes:
         if option == 'A':
             """
             For CIFAR10 ResNet paper uses option A.
             """
             self.shortcut = LambdaLayer(lambda x:
                                         F.pad(x[:, :, ::2, ::2], (0, 0, 0, 0, planes//4, planes//4), "constant", 0))
         elif option == 'B':
             self.shortcut = nn.Sequential(
                  Layer.BNNConv2d_1w1a(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False),
                  nn.BatchNorm2d(self.expansion * planes)
             )
    def __init__(self, inp, oup, stride, expand_ratio):
        super(InvertedResidual_1w1a, self).__init__()
        self.stride = stride
        assert stride in [1, 2]

        hidden_dim = int(inp * expand_ratio)
        self.use_res_connect = self.stride == 1 and inp == oup

        if expand_ratio == 1:
            self.conv = nn.Sequential(
                # dw
                Layer.BNNConv2d_1w1a(hidden_dim,
                                     hidden_dim,
                                     3,
                                     stride,
                                     1,
                                     groups=hidden_dim,
                                     bias=False),
                nn.BatchNorm2d(hidden_dim),
                # pw-linear
                Layer.BNNConv2d_1w1a(hidden_dim, oup, 1, 1, 0, bias=False),
                nn.BatchNorm2d(oup),
            )
        else:
            self.conv = nn.Sequential(
                # pw
                Layer.BNNConv2d_1w1a(inp, hidden_dim, 1, 1, 0, bias=False),
                nn.BatchNorm2d(hidden_dim),
                # dw
                Layer.BNNConv2d_1w1a(hidden_dim,
                                     hidden_dim,
                                     3,
                                     stride,
                                     1,
                                     groups=hidden_dim,
                                     bias=False),
                nn.BatchNorm2d(hidden_dim),
                # pw-linear
                Layer.BNNConv2d_1w1a(hidden_dim, oup, 1, 1, 0, bias=False),
                nn.BatchNorm2d(oup),
            )
def conv_1x1_bn_1w1a(inp, oup):
    return nn.Sequential(Layer.BNNConv2d_1w1a(inp, oup, 1, 1, 0, bias=False),
                         nn.BatchNorm2d(oup))