def __init__(self, in_channels, out_channels, has_bias, has_bn): super(LastQuantLayer, self).__init__() self.dense_inner = nn.DenseQuant(in_channels, out_channels, has_bias=has_bias, quant_config=quant_config, quant_dtype=QuantDtype.INT8) self.fake_quant_act = nn.FakeQuantWithMinMaxObserver( min_init=-16, max_init=16, ema=True, quant_dtype=QuantDtype.INT8, per_channel=False, symmetric=True, narrow_range=True, mode="LEARNED_SCALE")
def __init__(self, block, layer_nums, in_channels, out_channels, strides, num_classes): super(ResNet, self).__init__() if not len(layer_nums) == len(in_channels) == len(out_channels) == 4: raise ValueError( "the length of layer_num, in_channels, out_channels list must be 4!" ) self.conv1 = ConvBNReLU(3, 64, kernel_size=7, stride=2) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, pad_mode="same") self.layer1 = self._make_layer(block, layer_nums[0], in_channel=in_channels[0], out_channel=out_channels[0], stride=strides[0]) self.layer2 = self._make_layer(block, layer_nums[1], in_channel=in_channels[1], out_channel=out_channels[1], stride=strides[1]) self.layer3 = self._make_layer(block, layer_nums[2], in_channel=in_channels[2], out_channel=out_channels[2], stride=strides[2]) self.layer4 = self._make_layer(block, layer_nums[3], in_channel=in_channels[3], out_channel=out_channels[3], stride=strides[3]) self.mean = P.ReduceMean(keep_dims=True) self.flatten = nn.Flatten() self.end_point = nn.DenseQuant(out_channels[3], num_classes, has_bias=True, quant_config=_quant_config) self.output_fake = nn.FakeQuantWithMinMaxObserver(ema=True, ema_decay=_ema_decay) # init weights self._initialize_weights()