def __init__(self,
              inplanes,
              planes,
              bitW,
              bitA,
              stride=1,
              downsample=None,
              is_last=False):
     super(Bottleneck, self).__init__()
     self.bitW = bitW
     self.bitA = bitA
     self.conv1 = QConv2d(inplanes, planes, bitW, kernel_size=1, bias=False)
     self.bn1 = nn.BatchNorm2d(planes)
     self.conv2 = QConv2d(planes,
                          planes,
                          bitW,
                          kernel_size=3,
                          stride=stride,
                          padding=1,
                          bias=False)
     self.bn2 = nn.BatchNorm2d(planes)
     self.conv3 = QConv2d(planes,
                          planes * 4,
                          bitW,
                          kernel_size=1,
                          bias=False)
     self.bn3 = nn.BatchNorm2d(planes * 4)
     self.downsample = downsample
     self.QReLU = QReLU(k=bitA)
     self.stride = stride
     self.is_last = is_last
示例#2
0
    def _make_layer(self, block, planes, blocks, stride=1, is_last=False):
        downsample_booster = None
        downsample_residual = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample_booster = nn.Sequential(
                nn.Conv2d(self.inplanes, planes * block.expansion,
                          kernel_size=1, stride=stride, bias=False),
                nn.BatchNorm2d(planes * block.expansion),
            )
            downsample_residual = nn.Sequential(
                QConv2d(self.inplanes, planes * block.expansion, self.bitW,
                          kernel_size=1, stride=stride, bias=False),
                nn.BatchNorm2d(planes * block.expansion),
            )

        layers = nn.ModuleList([])
        layers.append(block(self.inplanes, planes, self.bitW, self.bitA, stride, downsample_booster, downsample_residual))
        self.inplanes = planes * block.expansion
        for i in range(1, blocks-1):
            layers.append(block(self.inplanes, planes, self.bitW, self.bitA))

        layers.append(block(self.inplanes, planes, self.bitW, self.bitA, is_last=is_last))


        return layers
    def _make_layer(self, block, planes, blocks, stride=1, is_last=False):

        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:

            downsample = nn.Sequential(
                QConv2d(self.inplanes,
                        planes * block.expansion,
                        self.bitW,
                        kernel_size=1,
                        stride=stride,
                        bias=False),
                nn.BatchNorm2d(planes * block.expansion),
            )

        layers = []
        layers.append(
            block(self.inplanes, planes, self.bitW, self.bitA, stride,
                  downsample))
        self.inplanes = planes * block.expansion
        for i in range(1, blocks - 1):
            layers.append(block(self.inplanes, planes, self.bitW, self.bitA))

        layers.append(
            block(self.inplanes, planes, self.bitW, self.bitA,
                  is_last=is_last))

        return nn.Sequential(*layers)
示例#4
0
 def __init__(self, inplanes, planes, bitW, bitA, stride=1, downsample_booster=None, downsample_residual=None, is_last=False):
     super(Bottleneck, self).__init__()
     self.bitW = bitW
     self.bitA = bitA
     self.conv1 = QConv2d(inplanes, planes, bitW, kernel_size=1, bias=False)
     self.bn1 = nn.BatchNorm2d(planes)
     self.conv2 = QConv2d(planes, planes, bitW, kernel_size=3, stride=stride,
                            padding=1, bias=False)
     self.bn2 = nn.BatchNorm2d(planes)
     self.conv3 = QConv2d(planes, planes * 4, bitW, kernel_size=1, bias=False)
     self.bn3 = nn.BatchNorm2d(planes * 4)
     self.QReLU = QReLU(k=bitA) 
     self.downsample_booster = downsample_booster
     self.downsample = downsample_residual
     self.stride = stride
     self.auxiliary = nn.Sequential(conv1x1(planes * self.expansion, planes * self.expansion), nn.BatchNorm2d(planes * self.expansion))
     self.is_last = is_last
def conv1x1(in_planes, out_planes, bitW=32, stride=1):
    "3x3 convolution with padding"
    return QConv2d(in_planes,
                   out_planes,
                   bitW=32,
                   kernel_size=1,
                   stride=stride,
                   bias=False)
def conv3x3(in_planes, out_planes, bitW, stride=1):
    "3x3 convolution with padding"
    return QConv2d(in_planes,
                   out_planes,
                   bitW,
                   kernel_size=3,
                   stride=stride,
                   padding=1,
                   bias=False)