Пример #1
0
def test_qat_mobile_per_channel_ff():
    network = mobilenetV2(num_classes=1000)
    img = Tensor(np.ones((1, 3, 224, 224)).astype(np.float32))
    network = qat.convert_quant_network(network, bn_fold=True, per_channel=[False, False], symmetric=[True, False])
    # should load the checkpoint. mock here
    network.init_parameters_data()
    qat.export(network, img, file_name="quant.pb")
Пример #2
0
def test_qat_lenet():
    img = Tensor(np.ones((32, 1, 32, 32)).astype(np.float32))
    net = LeNet5()
    net = qat.convert_quant_network(
        net, bn_fold=True, per_channel=[True, False], symmetric=[True, False])
    # should load the checkpoint. mock here
    net.init_parameters_data()
    qat.export(net, img, file_name="quant.pb")
Пример #3
0
def export_lenet():
    context.set_context(mode=context.GRAPH_MODE, device_target=device_target)
    cfg = quant_cfg
    # define fusion network
    network = LeNet5Fusion(cfg.num_classes)
    # convert fusion network to quantization aware network
    network = quant.convert_quant_network(network,
                                          quant_delay=0,
                                          bn_fold=False,
                                          freeze_bn=10000,
                                          per_channel=[True, False],
                                          symmetric=[True, False])

    # export network
    inputs = Tensor(np.ones([1, 1, cfg.image_height, cfg.image_width]),
                    mstype.float32)
    quant.export(network,
                 inputs,
                 file_name="lenet_quant",
                 file_format='MINDIR')
Пример #4
0
from src.config import mnist_cfg as cfg
from src.lenet_fusion import LeNet5 as LeNet5Fusion

parser = argparse.ArgumentParser(description='MindSpore MNIST Example')
parser.add_argument('--device_target', type=str, default="Ascend",
                    choices=['Ascend', 'GPU'],
                    help='device where the code will be implemented (default: Ascend)')
parser.add_argument('--data_path', type=str, default="./MNIST_Data",
                    help='path where the dataset is saved')
parser.add_argument('--ckpt_path', type=str, default="",
                    help='if mode is test, must provide path where the trained ckpt file')
parser.add_argument('--dataset_sink_mode', type=bool, default=True,
                    help='dataset_sink_mode is False or True')
args = parser.parse_args()

if __name__ == "__main__":
    context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target)

    # define fusion network
    network = LeNet5Fusion(cfg.num_classes)
    # convert fusion network to quantization aware network
    network = quant.convert_quant_network(network, quant_delay=0, bn_fold=False, freeze_bn=10000)
    # load quantization aware network checkpoint
    param_dict = load_checkpoint(args.ckpt_path)
    load_param_into_net(network, param_dict)

    # export network
    inputs = Tensor(np.ones([1, 1, cfg.image_height, cfg.image_width]), mindspore.float32)
    quant.export(network, inputs, file_name="lenet_quant", file_format='AIR')
Пример #5
0
from src.mobilenetV2 import mobilenetV2
from src.config import config_ascend_quant

parser = argparse.ArgumentParser(description='Image classification')
parser.add_argument('--checkpoint_path', type=str, default=None, help='Checkpoint file path')
parser.add_argument('--device_target', type=str, default=None, help='Run device target')
args_opt = parser.parse_args()

if __name__ == '__main__':
    cfg = None
    if args_opt.device_target == "Ascend":
        cfg = config_ascend_quant
        context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", save_graphs=False)
    else:
        raise ValueError("Unsupported device target: {}.".format(args_opt.device_target))

    # define fusion network
    network = mobilenetV2(num_classes=cfg.num_classes)
    # convert fusion network to quantization aware network
    network = quant.convert_quant_network(network, bn_fold=True, per_channel=[True, False], symmetric=[True, False])
    # load checkpoint
    param_dict = load_checkpoint(args_opt.checkpoint_path)
    load_param_into_net(network, param_dict)

    # export network
    print("============== Starting export ==============")
    inputs = Tensor(np.ones([1, 3, cfg.image_height, cfg.image_width]), mindspore.float32)
    quant.export(network, inputs, file_name="mobilenet_quant", file_format='MINDIR')
    print("============== End export ==============")