Beispiel #1
0
    def __init__(self, in_channel, out_channel, stride=1):
        super(ResidualBlock, self).__init__()

        channel = out_channel // self.expansion
        self.conv1 = ConvBNReLU(in_channel, channel, kernel_size=1, stride=1)
        self.conv2 = ConvBNReLU(channel, channel, kernel_size=3, stride=stride)
        self.conv3 = nn.SequentialCell([
            Conv2dBnFoldQuant(channel,
                              out_channel,
                              fake=_fake,
                              quant_config=_quant_config,
                              kernel_size=1,
                              stride=1,
                              pad_mode='same',
                              padding=0),
            FakeQuantWithMinMaxObserver(
                ema=True, ema_decay=_ema_decay, symmetric=False)
        ]) if _fake else Conv2dBnFoldQuant(channel,
                                           out_channel,
                                           fake=_fake,
                                           quant_config=_quant_config,
                                           kernel_size=1,
                                           stride=1,
                                           pad_mode='same',
                                           padding=0)

        self.down_sample = False

        if stride != 1 or in_channel != out_channel:
            self.down_sample = True
        self.down_sample_layer = None

        if self.down_sample:
            self.down_sample_layer = nn.SequentialCell([
                Conv2dBnFoldQuant(in_channel,
                                  out_channel,
                                  quant_config=_quant_config,
                                  kernel_size=1,
                                  stride=stride,
                                  pad_mode='same',
                                  padding=0),
                FakeQuantWithMinMaxObserver(
                    ema=True, ema_decay=_ema_decay, symmetric=False)
            ]) if _fake else Conv2dBnFoldQuant(in_channel,
                                               out_channel,
                                               fake=_fake,
                                               quant_config=_quant_config,
                                               kernel_size=1,
                                               stride=stride,
                                               pad_mode='same',
                                               padding=0)
        self.add = nn.TensorAddQuant()
        self.relu = P.ReLU()
 def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1):
     super(ConvBNReLU, self).__init__()
     padding = (kernel_size - 1) // 2
     conv = Conv2dBnFoldQuant(in_planes, out_planes, kernel_size, stride, pad_mode='pad', padding=padding,
                              group=groups, fake=_fake, quant_config=_quant_config)
     layers = [conv, nn.ActQuant(nn.ReLU())] if _fake else [conv, nn.ReLU()]
     self.features = nn.SequentialCell(layers)