Example #1
0
def test_qat_mobile_per_channel_ff():
    network = mobilenetV2(num_classes=1000)
    img = Tensor(np.ones((1, 3, 224, 224)).astype(np.float32))
    network = qat.convert_quant_network(network, bn_fold=True, per_channel=[False, False], symmetric=[True, False])
    # should load the checkpoint. mock here
    network.init_parameters_data()
    qat.export(network, img, file_name="quant.pb")
Example #2
0
def test_qat_mobile_per_channel_tf():
    network = mobilenetV2(num_classes=1000)
    img = Tensor(np.ones((1, 3, 224, 224)).astype(np.float32))
    quantizer = QuantizationAwareTraining(bn_fold=True,
                                          per_channel=[True, False],
                                          symmetric=[True, False])
    network = quantizer.quantize(network)
    # should load the checkpoint. mock here
    network.init_parameters_data()
    quant_export.export(network, img, file_name="quant.pb")
Example #3
0
def test_lsq_mobile_per_channel_ff():
    network = mobilenetV2(num_classes=1000)
    img = Tensor(np.ones((1, 3, 224, 224)).astype(np.float32))
    quantizer = QuantizationAwareTraining(
        bn_fold=True,
        per_channel=[False, False],
        symmetric=[True, True],
        narrow_range=[True, True],
        freeze_bn=0,
        quant_delay=0,
        one_conv_fold=True,
        optimize_option=OptimizeOption.LEARNED_SCALE)
    network = quantizer.quantize(network)
    # should load the checkpoint. mock here
    network.init_parameters_data()
    quant_export.export(network, img, file_name="quant.pb")