Example #1
0
def export_lenet(optim_option="QAT"):
    context.set_context(mode=context.GRAPH_MODE, device_target=device_target)
    cfg = quant_cfg
    # define fusion network
    network = LeNet5Fusion(cfg.num_classes)
    # convert fusion network to quantization aware network
    if optim_option == "LEARNED_SCALE":
        quant_optim_otions = OptimizeOption.LEARNED_SCALE
        quantizer = QuantizationAwareTraining(
            bn_fold=False,
            per_channel=[True, False],
            symmetric=[True, True],
            narrow_range=[True, True],
            freeze_bn=0,
            quant_delay=0,
            one_conv_fold=True,
            optimize_option=quant_optim_otions)
    else:
        quantizer = QuantizationAwareTraining(quant_delay=0,
                                              bn_fold=False,
                                              freeze_bn=10000,
                                              per_channel=[True, False],
                                              symmetric=[True, False])
    network = quantizer.quantize(network)

    # export network
    inputs = Tensor(np.ones([1, 1, cfg.image_height, cfg.image_width]),
                    mstype.float32)
    export(network,
           inputs,
           file_name="lenet_quant",
           file_format='MINDIR',
           quant_mode='AUTO')
Example #2
0
def output_file_formats(ckpt_path, net_work, batch_size, output_file_name,
                        output_format):
    load_checkpoint(ckpt_path, net=net_work)
    input_data = np.random.uniform(0.0, 1.0,
                                   size=batch_size).astype(np.float32)
    export(net_work,
           Tensor(input_data),
           file_name=output_file_name,
           file_format=output_format)
Example #3
0
def main():
    network = LeNet5()
    # load the parameter into net
    load_checkpoint(args.ckpt_path, net=network)
    input_x = np.random.uniform(0.0, 1.0, size=[1, 1, 32,
                                                32]).astype(np.float32)
    export(network,
           Tensor(input_x),
           file_name=args.file_name,
           file_format=args.file_format)
Example #4
0
def export_net(model_ckpt_path):
    bert_net_cfg.batch_size = 1
    poetrymodel = BertPoetryModel(bert_net_cfg, False, 3191, dropout_prob=0.0)
    poetrymodel.set_train(False)
    param_dict = load_checkpoint(model_ckpt_path)
    load_param_into_net(poetrymodel, param_dict)
    input_id = np.ones(shape=(1, 128))
    token_type_id = np.ones(shape=(1, 128))
    pad_mask = np.ones(shape=(1, 128))
    export(poetrymodel, Tensor(input_id, mstype.int32),\
            Tensor(token_type_id, mstype.int32),\
            Tensor(pad_mask, mstype.float32),\
            file_name='poetry.pb', file_format='MINDIR')
Example #5
0
def run_export(args):
    """ export """
    device_id = int(os.getenv('DEVICE_ID', '0'))
    context.set_context(mode=context.GRAPH_MODE,
                        device_target="Ascend",
                        device_id=device_id)
    net = RCAN(arg)
    param_dict = load_checkpoint(args.ckpt_path)
    load_param_into_net(net, param_dict)
    net.set_train(False)
    print('load mindspore net and checkpoint successfully.')
    inputs = Tensor(np.zeros([args.batch_size, 3, 678, 1020], np.float32))
    export(net, inputs, file_name=args.file_name, file_format=args.file_format)
    print('export successfully!')
Example #6
0
def test_export():
    """distributed inference after distributed training"""
    context.set_context(mode=context.GRAPH_MODE)
    init(backend_name="hccl")
    context.set_auto_parallel_context(
        full_batch=True,
        parallel_mode="semi_auto_parallel",
        strategy_ckpt_load_file="./train_strategy.ckpt",
        device_num=8)

    predict_data = create_predict_data()
    network = Net(matmul_size=(96, 16))
    model = Model(network)
    predict_layout = model.infer_predict_layout(Tensor(predict_data))
    ckpt_file_list = create_ckpt_file_list()
    load_distributed_checkpoint(network, ckpt_file_list, predict_layout)
    export(network,
           Tensor(predict_data),
           file_name='net',
           file_format='MINDIR')
Example #7
0
def export_lenet():
    context.set_context(mode=context.GRAPH_MODE, device_target=device_target)
    cfg = quant_cfg
    # define fusion network
    network = LeNet5Fusion(cfg.num_classes)
    # convert fusion network to quantization aware network
    quantizer = QuantizationAwareTraining(quant_delay=0,
                                          bn_fold=False,
                                          freeze_bn=10000,
                                          per_channel=[True, False],
                                          symmetric=[True, False])
    network = quantizer.quantize(network)

    # export network
    inputs = Tensor(np.ones([1, 1, cfg.image_height, cfg.image_width]),
                    mstype.float32)
    export(network,
           inputs,
           file_name="lenet_quant.mindir",
           file_format='MINDIR',
           quant_mode='AUTO')
Example #8
0
def test_maskrcnn_export():
    """
    export maskrcnn air.
    """
    net = Mask_Rcnn_Resnet50(config=config)
    net.set_train(False)

    bs = config.test_batch_size

    img = Tensor(np.zeros([bs, 3, 768, 1280], np.float16))
    img_metas = Tensor(np.zeros([bs, 4], np.float16))
    gt_bboxes = Tensor(np.zeros([bs, 128, 4], np.float16))
    gt_labels = Tensor(np.zeros([bs, 128], np.int32))
    gt_num = Tensor(np.zeros([bs, 128], np.bool))
    gt_mask = Tensor(np.zeros([bs, 128], np.bool))

    input_data = [img, img_metas, gt_bboxes, gt_labels, gt_num, gt_mask]
    export(net, *input_data, file_name="maskrcnn", file_format="AIR")
    file_name = "maskrcnn.air"
    assert os.path.exists(file_name)
    os.remove(file_name)
Example #9
0
args = parser.parse_args()

context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target)
if args.device_target == "Ascend":
    context.set_context(device_id=args.device_id)

if __name__ == '__main__':
    widedeep_config = WideDeepConfig()
    widedeep_config.argparse_init()

    net_builder = ModelBuilder()
    _, eval_net = net_builder.get_net(widedeep_config)

    param_dict = load_checkpoint(args.ckpt_file)
    load_param_into_net(eval_net, param_dict)
    eval_net.set_train(False)

    ids = Tensor(
        np.ones([widedeep_config.eval_batch_size,
                 widedeep_config.field_size]).astype(np.int32))
    wts = Tensor(
        np.ones([widedeep_config.eval_batch_size,
                 widedeep_config.field_size]).astype(np.float32))
    label = Tensor(
        np.ones([widedeep_config.eval_batch_size, 1]).astype(np.float32))
    input_tensor_list = [ids, wts, label]
    export(eval_net,
           *input_tensor_list,
           file_name=args.file_name,
           file_format=args.file_format)
Example #10
0
            quant_dtype=(QuantDtype.INT4, QuantDtype.INT8),
            freeze_bn=0,
            quant_delay=0,
            one_conv_fold=True,
            optimize_option=quant_optim_otions)
    else:
        # define fusion network
        network = mobilenetV2(num_classes=cfg.num_classes)
        # convert fusion network to quantization aware network
        quantizer = QuantizationAwareTraining(bn_fold=True,
                                              per_channel=[True, False],
                                              symmetric=[True, False])
    network = quantizer.quantize(network)
    # load checkpoint
    param_dict = load_checkpoint(args_opt.checkpoint_path)
    load_param_into_net(network, param_dict)

    # export network
    print("============== Starting export ==============")
    inputs = Tensor(np.ones([1, 3, cfg.image_height, cfg.image_width]),
                    mindspore.float32)
    export(network,
           inputs,
           file_name="mobilenetv2_quant",
           file_format=args_opt.file_format,
           quant_mode='QUANT',
           mean=0.,
           std_dev=48.106)

    print("============== End export ==============")
Example #11
0
if __name__ == '__main__':

    if config.dataset == 'MR':
        instance = MovieReview(root_dir=config.data_path,
                               maxlen=config.word_len,
                               split=0.9)
    elif config.dataset == 'SUBJ':
        instance = Subjectivity(root_dir=config.data_path,
                                maxlen=config.word_len,
                                split=0.9)
    elif config.dataset == 'SST2':
        instance = SST2(root_dir=config.data_path,
                        maxlen=config.word_len,
                        split=0.9)
    else:
        raise ValueError("dataset is not support.")

    net = TextCNN(vocab_len=instance.get_dict_len(),
                  word_len=config.word_len,
                  num_classes=config.num_classes,
                  vec_length=config.vec_length)

    param_dict = load_checkpoint(config.ckpt_file)
    load_param_into_net(net, param_dict)

    input_arr = Tensor(np.ones([config.batch_size, config.word_len], np.int32))
    export(net,
           input_arr,
           file_name=config.file_name,
           file_format=config.file_format)
Example #12
0
                        help='checkpoint of deeplabv3 (Default: None)')
    parser.add_argument('--model',
                        type=str.lower,
                        default='deeplab_v3_s8',
                        choices=['deeplab_v3_s16', 'deeplab_v3_s8'],
                        help='Select model structure (Default: deeplab_v3_s8)')
    parser.add_argument('--num_classes',
                        type=int,
                        default=21,
                        help='the number of classes (Default: 21)')
    args = parser.parse_args()

    if args.model == 'deeplab_v3_s16':
        network = net_factory.nets_map['deeplab_v3_s16']('eval',
                                                         args.num_classes, 16,
                                                         True)
    else:
        network = net_factory.nets_map['deeplab_v3_s8']('eval',
                                                        args.num_classes, 8,
                                                        True)
    param_dict = load_checkpoint(args.checkpoint)

    # load the parameter into net
    load_param_into_net(network, param_dict)
    input_data = np.random.uniform(0.0, 1.0, size=[32, 3, 513,
                                                   513]).astype(np.float32)
    export(network,
           Tensor(input_data),
           file_name=args.model + '-300_11.air',
           file_format='AIR')
Example #13
0
if args.file_format == "AIR" and args.device_target != "Ascend":
    raise ValueError("export AIR must on Ascend")

if __name__ == "__main__":
    input_size = m.ceil(config.captcha_height / 64) * 64 * 3
    captcha_width = config.captcha_width
    captcha_height = config.captcha_height
    batch_size = config.batch_size
    hidden_size = config.hidden_size
    image = Tensor(
        np.zeros([batch_size, 3, captcha_height, captcha_width], np.float32))
    if args.device_target == 'Ascend':
        net = StackedRNN(input_size=input_size,
                         batch_size=batch_size,
                         hidden_size=hidden_size)
        image = Tensor(
            np.zeros([batch_size, 3, captcha_height, captcha_width],
                     np.float16))
    elif args.device_target == 'GPU':
        net = StackedRNNForGPU(input_size=input_size,
                               batch_size=batch_size,
                               hidden_size=hidden_size)
    else:
        net = StackedRNNForCPU(input_size=input_size,
                               batch_size=batch_size,
                               hidden_size=hidden_size)
    param_dict = load_checkpoint(args.ckpt_file)
    load_param_into_net(net, param_dict)
    net.set_train(False)
    export(net, image, file_name=args.file_name, file_format=args.file_format)
Example #14
0
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

parser = argparse.ArgumentParser(description='CNNCTC_export')
parser.add_argument('--ckpt_file',
                    type=str,
                    default='./ckpts/cnn_ctc.ckpt',
                    help='CNN&CTC ckpt file.')
parser.add_argument('--output_file',
                    type=str,
                    default='cnn_ctc',
                    help='CNN&CTC output air name.')
args_opt = parser.parse_args()

if __name__ == '__main__':
    cfg = Config_CNNCTC()
    ckpt_path = cfg.CKPT_PATH

    if args_opt.ckpt_file != "":
        ckpt_path = args_opt.ckpt_file

    net = CNNCTC_Model(cfg.NUM_CLASS, cfg.HIDDEN_SIZE, cfg.FINAL_FEATURE_WIDTH)

    load_checkpoint(ckpt_path, net=net)

    bs = cfg.TEST_BATCH_SIZE

    input_data = Tensor(np.zeros([bs, 3, cfg.IMG_H, cfg.IMG_W]),
                        mstype.float32)

    export(net, input_data, file_name=args_opt.output_file, file_format="AIR")
Example #15
0
                    help="output file name.")
parser.add_argument("--file_format",
                    type=str,
                    choices=["AIR", "ONNX", "MINDIR"],
                    default="AIR",
                    help="file format")
parser.add_argument("--device_target",
                    type=str,
                    default="Ascend",
                    choices=["Ascend", "GPU", "CPU"],
                    help="device target (default: Ascend)")
args = parser.parse_args()

context.set_context(mode=context.GRAPH_MODE,
                    device_target=args.device_target,
                    device_id=args.device_id)

if __name__ == '__main__':
    net = get_network(num_classes=config.num_classes,
                      platform=args.device_target)

    param_dict = load_checkpoint(args.ckpt_file)
    load_param_into_net(net, param_dict)
    input_shp = [args.batch_size, 3, args.height, args.width]
    input_array = Tensor(
        np.random.uniform(-1.0, 1.0, size=input_shp).astype(np.float32))
    export(net,
           input_array,
           file_name=args.file_name,
           file_format=args.file_format)
Example #16
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
ssd export mindir.
"""
import argparse
import numpy as np
from mindspore import context, Tensor, load_checkpoint, load_param_into_net, export
from src.ssd import SSD300, ssd_mobilenet_v2
from src.config import config

def get_export_args():
    parser = argparse.ArgumentParser(description='SSD export')
    parser.add_argument("--checkpoint_path", type=str, required=True, help="Checkpoint file path.")
    parser.add_argument("--run_platform", type=str, default="Ascend", choices=("Ascend", "GPU", "CPU"),
                        help="run platform, support Ascend, GPU and CPU.")
    return parser.parse_args()

if __name__ == '__main__':
    args_opt = get_export_args()
    context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.run_platform)
    net = SSD300(ssd_mobilenet_v2(), config, is_training=False)

    param_dict = load_checkpoint(args_opt.checkpoint_path)
    load_param_into_net(net, param_dict)
    input_shp = [1, 3] + config.img_shape
    input_array = Tensor(np.random.uniform(-1.0, 1.0, size=input_shp).astype(np.float32))
    export(net, input_array, file_name=config.export_file, file_format=config.export_format)
Example #17
0
    args.image_size = config.image_size
    args.num_classes = config.num_classes
    args.backbone = config.backbone

    args.image_size = list(map(int, config.image_size.split(',')))
    args.image_height = args.image_size[0]
    args.image_width = args.image_size[1]
    args.export_format = config.export_format
    args.export_file = config.export_file
    return args


if __name__ == '__main__':
    args_export = parse_args()
    context.set_context(mode=context.GRAPH_MODE,
                        device_target=args_export.platform)

    net = get_network(args_export.backbone,
                      num_classes=args_export.num_classes,
                      platform=args_export.platform)

    param_dict = load_checkpoint(args_export.pretrained)
    load_param_into_net(net, param_dict)
    input_shp = [1, 3, args_export.image_height, args_export.image_width]
    input_array = Tensor(
        np.random.uniform(-1.0, 1.0, size=input_shp).astype(np.float32))
    export(net,
           input_array,
           file_name=args_export.export_file,
           file_format=args_export.export_format)
Example #18
0
        feature_size = [1, 2708, 1433]
        biases_size = [1, 2708, 2708]
        num_classes = 7

    hid_units = GatConfig.hid_units
    n_heads = GatConfig.n_heads

    feature = np.random.uniform(0.0, 1.0, size=feature_size).astype(np.float32)
    biases = np.random.uniform(0.0, 1.0, size=biases_size).astype(np.float64)

    feature_size = feature.shape[2]
    num_nodes = feature.shape[1]

    gat_net = GAT(feature_size,
                  num_classes,
                  num_nodes,
                  hid_units,
                  n_heads,
                  attn_drop=0.0,
                  ftr_drop=0.0)

    gat_net.set_train(False)
    load_checkpoint(args.ckpt_file, net=gat_net)
    gat_net.add_flags_recursive(fp16=True)

    export(gat_net,
           Tensor(feature),
           Tensor(biases),
           file_name=args.file_name,
           file_format=args.file_format)
Example #19
0
from mindspore import Tensor, context, load_checkpoint, load_param_into_net, export
from src.lenet import LeNet5

if os.path.exists(config.data_path_local):
    ckpt_file = config.ckpt_path_local
else:
    ckpt_file = os.path.join(config.data_path, 'checkpoint_lenet-10_1875.ckpt')

context.set_context(mode=context.GRAPH_MODE,
                    device_target=config.device_target)
if config.device_target == "Ascend":
    context.set_context(device_id=get_device_id())

if __name__ == "__main__":

    # define fusion network
    network = LeNet5(config.num_classes)
    # load network checkpoint
    param_dict = load_checkpoint(ckpt_file)
    load_param_into_net(network, param_dict)

    # export network
    inputs = Tensor(
        np.ones(
            [config.batch_size, 1, config.image_height, config.image_width]),
        mindspore.float32)
    export(network,
           inputs,
           file_name=config.file_name,
           file_format=config.file_format)
Example #20
0
                    device_target=args_opt.device_target)
if args_opt.device_target == "Ascend":
    context.set_context(device_id=args_opt.device_id)

if __name__ == '__main__':
    if args_opt.dataset_name == 'cifar10':
        width_multiplier = 1
        cifar_stem = True
        projection_dimension = 128
        image_height = 32
        image_width = 32
    else:
        raise ValueError("dataset is not support.")

    base_net = resnet(1,
                      width_multiplier=width_multiplier,
                      cifar_stem=cifar_stem)
    net = SimCLR(base_net, projection_dimension,
                 base_net.end_point.in_channels)

    param_dict = load_checkpoint(args_opt.ckpt_file)
    load_param_into_net(net, param_dict)

    input_arr = Tensor(
        np.zeros([args_opt.batch_size, 3, image_height, image_width]),
        ms.float32)
    export(net,
           input_arr,
           file_name=args_opt.file_name,
           file_format=args_opt.file_format)
Example #21
0
    type=str,
    default="",
    help='if mode is test, must provide path where the trained ckpt file')
args = parser.parse_args()

if __name__ == "__main__":
    context.set_context(mode=context.GRAPH_MODE,
                        device_target=args.device_target)

    # define fusion network
    network = LeNet5Fusion(cfg.num_classes)
    # convert fusion network to quantization aware network
    quantizer = QuantizationAwareTraining(quant_delay=0,
                                          bn_fold=False,
                                          freeze_bn=10000,
                                          per_channel=[True, False],
                                          symmetric=[True, False])
    network = quantizer.quantize(network)
    # load quantization aware network checkpoint
    param_dict = load_checkpoint(args.ckpt_path)
    load_param_into_net(network, param_dict)

    # export network
    inputs = Tensor(np.ones([1, 1, cfg.image_height, cfg.image_width]),
                    mindspore.float32)
    export(network,
           inputs,
           file_name="lenet_quant",
           file_format='MINDIR',
           quant_mode='AUTO')
Example #22
0
parser.add_argument("--batch_size", type=int, default=1, help="batch size")
parser.add_argument("--ckpt_file", type=str, required=True, help="Checkpoint file path.")
parser.add_argument('--width', type=int, default=224, help='input width')
parser.add_argument('--height', type=int, default=224, help='input height')
parser.add_argument("--file_name", type=str, default="resnet_thor", help="output file name.")
parser.add_argument("--file_format", type=str, choices=["AIR", "ONNX", "MINDIR"], default="AIR", help="file format")
parser.add_argument("--device_target", type=str, default="Ascend",
                    choices=["Ascend", "GPU", "CPU"], help="device target (default: Ascend)")
args = parser.parse_args()

context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target, device_id=args.device_id)

if __name__ == '__main__':

    context.set_context(mode=context.GRAPH_MODE, save_graphs=False)

    # define net
    net = resnet(class_num=config.class_num)
    net.add_flags_recursive(thor=False)

    # load checkpoint
    param_dict = load_checkpoint(args.ckpt_file)
    keys = list(param_dict.keys())
    for key in keys:
        if "damping" in key:
            param_dict.pop(key)
    load_param_into_net(net, param_dict)

    inputs = np.random.uniform(0.0, 1.0, size=[args.batch_size, 3, args.height, args.width]).astype(np.float32)
    export(net, Tensor(inputs), file_name=args.file_name, file_format=args.file_format)
Example #23
0
"""export"""
import argparse
import numpy as np

from mindspore import context, Tensor, load_checkpoint, load_param_into_net, export
from src.resnet_thor import resnet50 as resnet
from src.config import config

parser = argparse.ArgumentParser(description='checkpoint export')
parser.add_argument('--checkpoint_path', type=str, default=None, help='Checkpoint file path')
args_opt = parser.parse_args()

if __name__ == '__main__':

    context.set_context(mode=context.GRAPH_MODE, save_graphs=False)

    # define net
    net = resnet(class_num=config.class_num)
    net.add_flags_recursive(thor=False)

    # load checkpoint
    param_dict = load_checkpoint(args_opt.checkpoint_path)
    keys = list(param_dict.keys())
    for key in keys:
        if "damping" in key:
            param_dict.pop(key)
    load_param_into_net(net, param_dict)

    inputs = np.random.uniform(0.0, 1.0, size=[1, 3, 224, 224]).astype(np.float32)
    export(net, Tensor(inputs), file_name='resnet-42_5004.air', file_format='AIR')
Example #24
0
context.set_context(mode=context.GRAPH_MODE,
                    device_target=args.device_target,
                    device_id=args.device_id)

if __name__ == "__main__":
    config = ConfigGCN()

    if args.dataset == "cora":
        input_dim = 1433
        class_num = 7
        adj = Tensor(np.zeros((2708, 2708), np.float64))
        feature = Tensor(np.zeros((2708, 1433), np.float32))
    else:
        input_dim = 3703
        class_num = 6
        adj = Tensor(np.zeros((3312, 3312), np.float64))
        feature = Tensor(np.zeros((3312, 3703), np.float32))

    gcn_net = GCN(config, input_dim, class_num)

    gcn_net.set_train(False)
    load_checkpoint(args.ckpt_file, net=gcn_net)
    gcn_net.add_flags_recursive(fp16=True)

    export(gcn_net,
           adj,
           feature,
           file_name=args.file_name,
           file_format=args.file_format)
Example #25
0
from src.lenet import LeNet5

parser = argparse.ArgumentParser(description='MindSpore MNIST Example')
parser.add_argument(
    '--device_target',
    type=str,
    default="Ascend",
    choices=['Ascend', 'GPU'],
    help='device where the code will be implemented (default: Ascend)')
parser.add_argument(
    '--ckpt_path',
    type=str,
    default="",
    help='if mode is test, must provide path where the trained ckpt file')
args = parser.parse_args()

if __name__ == "__main__":
    context.set_context(mode=context.GRAPH_MODE,
                        device_target=args.device_target)

    # define fusion network
    network = LeNet5(cfg.num_classes)
    # load network checkpoint
    param_dict = load_checkpoint(args.ckpt_path)
    load_param_into_net(network, param_dict)

    # export network
    inputs = Tensor(np.ones([1, 1, cfg.image_height, cfg.image_width]),
                    mindspore.float32)
    export(network, inputs, file_name=cfg.air_name, file_format='AIR')
Example #26
0
        np.zeros(
            [config.test_batch_size, 3, config.img_height, config.img_width]),
        ms.float16)
    img_metas = Tensor(
        np.random.uniform(0.0, 1.0, size=[config.test_batch_size, 4]),
        ms.float16)
    gt_bboxes = Tensor(
        np.random.uniform(0.0,
                          1.0,
                          size=[config.test_batch_size, config.num_gts]),
        ms.float16)
    gt_label = Tensor(
        np.random.uniform(0.0,
                          1.0,
                          size=[config.test_batch_size, config.num_gts]),
        ms.int32)
    gt_num = Tensor(
        np.random.uniform(0.0,
                          1.0,
                          size=[config.test_batch_size, config.num_gts]),
        ms.bool_)

    export(net,
           img,
           img_metas,
           gt_bboxes,
           gt_label,
           gt_num,
           file_name=args.output_file,
           file_format=args.file_format)
Example #27
0
                    help="device target")
args = parser.parse_args()

context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target)
if args.device_target == "Ascend":
    context.set_context(device_id=args.device_id)

if __name__ == "__main__":
    if cfg['model'] == 'unet_medical':
        net = UNetMedical(n_channels=cfg['num_channels'],
                          n_classes=cfg['num_classes'])
    elif cfg['model'] == 'unet_nested':
        net = NestedUNet(in_channel=cfg['num_channels'],
                         n_class=cfg['num_classes'])
    elif cfg['model'] == 'unet_simple':
        net = UNet(in_channel=cfg['num_channels'], n_class=cfg['num_classes'])
    else:
        raise ValueError("Unsupported model: {}".format(cfg['model']))
    # return a parameter dict for model
    param_dict = load_checkpoint(args.ckpt_file)
    # load the parameter into net
    load_param_into_net(net, param_dict)
    input_data = Tensor(
        np.ones(
            [args.batch_size, cfg["num_channels"], args.height,
             args.width]).astype(np.float32))
    export(net,
           input_data,
           file_name=args.file_name,
           file_format=args.file_format)
Example #28
0
from src.config import cifar_cfg, imagenet_cfg
from src.googlenet import GoogleNet

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Classification')
    parser.add_argument('--dataset_name',
                        type=str,
                        default='cifar10',
                        choices=['imagenet', 'cifar10'],
                        help='dataset name.')
    args_opt = parser.parse_args()

    if args_opt.dataset_name == 'cifar10':
        cfg = cifar_cfg
    elif args_opt.dataset_name == 'imagenet':
        cfg = imagenet_cfg
    else:
        raise ValueError("dataset is not support.")

    net = GoogleNet(num_classes=cfg.num_classes)

    assert cfg.checkpoint_path is not None, "cfg.checkpoint_path is None."
    param_dict = load_checkpoint(cfg.checkpoint_path)
    load_param_into_net(net, param_dict)

    input_arr = Tensor(np.random.uniform(0.0, 1.0, size=[1, 3, 224, 224]),
                       ms.float32)
    export(net, input_arr, file_name=cfg.onnx_filename, file_format="ONNX")
    export(net, input_arr, file_name=cfg.air_filename, file_format="AIR")
Example #29
0
                    required=True,
                    help='inceptionv3 ckpt file.')
parser.add_argument('--output_file',
                    type=str,
                    default='inceptionv3.air',
                    help='inceptionv3 output air name.')
parser.add_argument('--file_format',
                    type=str,
                    choices=["AIR", "ONNX", "MINDIR"],
                    default='AIR',
                    help='file format')
parser.add_argument('--width', type=int, default=299, help='input width')
parser.add_argument('--height', type=int, default=299, help='input height')
args = parser.parse_args()

if __name__ == '__main__':
    net = InceptionV3(num_classes=cfg.num_classes, is_training=False)
    param_dict = load_checkpoint(args.ckpt_file)
    load_param_into_net(net, param_dict)

    input_arr = Tensor(
        np.random.uniform(0.0,
                          1.0,
                          size=[cfg.batch_size, 3, args.width, args.height]),
        ms.float32)

    export(net,
           input_arr,
           file_name=args.output_file,
           file_format=args.file_format)
Example #30
0
if __name__ == '__main__':
    net = FasterRcnn_Infer(config=config)

    param_dict = load_checkpoint(args.ckpt_file)

    param_dict_new = {}
    for key, value in param_dict.items():
        param_dict_new["network." + key] = value

    load_param_into_net(net, param_dict_new)

    device_type = "Ascend" if context.get_context(
        "device_target") == "Ascend" else "Others"
    if device_type == "Ascend":
        net.to_float(mstype.float16)

    img = Tensor(
        np.zeros(
            [config.test_batch_size, 3, config.img_height, config.img_width]),
        ms.float32)
    img_metas = Tensor(
        np.random.uniform(0.0, 1.0, size=[config.test_batch_size, 4]),
        ms.float32)

    export(net,
           img,
           img_metas,
           file_name=args.file_name,
           file_format=args.file_format)