Exemplo n.º 1
0
    def __init__(self, in_planes, planes, stride=1, wbit=32, abit=32):
        super(Bottleneck, self).__init__()
        Conv2d = Layer.DoreafaConv2dv1(wbit, abit)
        self.conv1 = Conv2d(in_planes, planes, kernel_size=1, bias=False)
        self.bn1 = nn.BatchNorm2d(planes)
        self.conv2 = Conv2d(planes,
                            planes,
                            kernel_size=3,
                            stride=stride,
                            padding=1,
                            bias=False)
        self.bn2 = nn.BatchNorm2d(planes)
        self.conv3 = Conv2d(planes,
                            self.expansion * planes,
                            kernel_size=1,
                            bias=False)
        self.bn3 = nn.BatchNorm2d(self.expansion * planes)

        self.shortcut = nn.Sequential()
        if stride != 1 or in_planes != self.expansion * planes:
            self.shortcut = nn.Sequential(
                Conv2d(in_planes,
                       self.expansion * planes,
                       kernel_size=1,
                       stride=stride,
                       bias=False), nn.BatchNorm2d(self.expansion * planes))
    def __init__(self, inp, oup, stride, expand_ratio, wbit=32, abit=32):
        super(InvertedResidual, self).__init__()
        Conv2d = Layer.DoreafaConv2dv1(wbit, abit)
        self.stride = stride
        assert stride in [1, 2]

        hidden_dim = int(inp * expand_ratio)
        self.use_res_connect = self.stride == 1 and inp == oup

        if expand_ratio == 1:
            self.conv = nn.Sequential(
                # dw
                Conv2d(hidden_dim,
                       hidden_dim,
                       3,
                       stride,
                       1,
                       groups=hidden_dim,
                       bias=False),
                nn.BatchNorm2d(hidden_dim),
                nn.ReLU6(inplace=True),
                # pw-linear
                Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
                nn.BatchNorm2d(oup),
            )
        else:
            self.conv = nn.Sequential(
                # pw
                Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
                nn.BatchNorm2d(hidden_dim),
                nn.ReLU6(inplace=True),
                # dw
                Conv2d(hidden_dim,
                       hidden_dim,
                       3,
                       stride,
                       1,
                       groups=hidden_dim,
                       bias=False),
                nn.BatchNorm2d(hidden_dim),
                nn.ReLU6(inplace=True),
                # pw-linear
                Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
                nn.BatchNorm2d(oup),
            )
Exemplo n.º 3
0
 def __init__(self, in_planes, planes, stride=1, wbit=32,abit=32,option='A'):
     super(BasicBlock, self).__init__()
     Conv2d = Layer.DoreafaConv2dv1(wbit,abit)
     self.conv1 = Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
     self.bn1 = nn.BatchNorm2d(planes)
     self.conv2 = Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
     self.bn2 = nn.BatchNorm2d(planes)
     self.shortcut = nn.Sequential()
     if stride != 1 or in_planes != planes:
         if option == 'A':
             """
             For CIFAR10 ResNet paper uses option A.
             """
             self.shortcut = LambdaLayer(lambda x:
                                         F.pad(x[:, :, ::2, ::2], (0, 0, 0, 0, planes//4, planes//4), "constant", 0))
         elif option == 'B':
             self.shortcut = nn.Sequential(
                  Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False),
                  nn.BatchNorm2d(self.expansion * planes)
             )
Exemplo n.º 4
0
    def __init__(self, wbit=32, abit=32, num_classes=10):
        super(VGG_SMALL, self).__init__()
        self.num_classes = num_classes
        Conv2d = Layer.DoreafaConv2dv1(wbit, abit)
        self.conv0 = Conv2d(3, 128, kernel_size=3, padding=1, bias=False)
        self.bn0 = nn.BatchNorm2d(128)
        self.conv1 = Conv2d(128, 128, kernel_size=3, padding=1, bias=False)
        self.pooling = nn.MaxPool2d(kernel_size=2, stride=2)
        self.bn1 = nn.BatchNorm2d(128)
        self.nonlinear = nn.ReLU(inplace=True)
        # self.nonlinear = nn.Hardtanh(inplace=True)
        self.conv2 = Conv2d(128, 256, kernel_size=3, padding=1, bias=False)
        self.bn2 = nn.BatchNorm2d(256)
        self.conv3 = Conv2d(256, 256, kernel_size=3, padding=1, bias=False)
        self.bn3 = nn.BatchNorm2d(256)
        self.conv4 = Conv2d(256, 512, kernel_size=3, padding=1, bias=False)
        self.bn4 = nn.BatchNorm2d(512)
        self.conv5 = Conv2d(512, 512, kernel_size=3, padding=1, bias=False)
        self.bn5 = nn.BatchNorm2d(512)

        self.fc = nn.Linear(512 * 4 * 4, self.num_classes)
        self._initialize_weights()
def conv_1x1_bn(inp, oup, wbit, abit):
    Conv2d = Layer.DoreafaConv2dv1(wbit, abit)
    return nn.Sequential(Conv2d(inp, oup, 1, 1, 0, bias=False),
                         nn.BatchNorm2d(oup), nn.ReLU6(inplace=True))