コード例 #1
0
def export(net, *inputs, file_name, file_format='GEIR'):
    """
    Exports MindSpore predict model to file in specified format.

    Args:
        net (Cell): MindSpore network.
        inputs (Tensor): Inputs of the `net`.
        file_name (str): File name of model to export.
        file_format (str): MindSpore currently supports 'GEIR', 'ONNX' 'LITE' and 'BINARY' format for exported model.

            - GEIR: Graph Engine Intermidiate Representation. An intermidiate representation format of
              Ascend model.
            - ONNX: Open Neural Network eXchange. An open format built to represent machine learning models.
            - LITE: Huawei model format for mobile. A lite model only for the MindSpore Lite
            - BINARY: Binary format for model. An intermidiate representation format for models.
    """
    logger.info("exporting model file:%s format:%s.", file_name, file_format)
    check_input_data(*inputs, data_class=Tensor)

    supported_formats = ['GEIR', 'ONNX', 'LITE', 'BINARY']
    if file_format not in supported_formats:
        raise ValueError(
            f'Illegal file format {file_format}, it must be one of {supported_formats}'
        )
    # switch network mode to infer when it is training
    is_training = net.training
    if is_training:
        net.set_train(mode=False)
    # export model
    if file_format == 'GEIR':
        _executor.compile(net, *inputs, phase='export')
        _executor.export(net, file_name, file_format)
    elif file_format == 'ONNX':  # file_format is 'ONNX'
        # NOTICE: the pahse name `export_onnx` is used for judging whether is exporting onnx in the compile pipeline,
        #         do not change it to other values.
        phase_name = 'export_onnx'
        graph_id, _ = _executor.compile(net,
                                        *inputs,
                                        phase=phase_name,
                                        do_convert=False)
        onnx_stream = _executor._get_func_graph_proto(graph_id)
        with open(file_name, 'wb') as f:
            os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR)
            f.write(onnx_stream)
    elif file_format == 'BINARY':  # file_format is 'BINARY'
        phase_name = 'export_binary'
        graph_id, _ = _executor.compile(net,
                                        *inputs,
                                        phase=phase_name,
                                        do_convert=False)
        onnx_stream = _executor._get_func_graph_proto(graph_id, 'binary_ir')
        with open(file_name, 'wb') as f:
            os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR)
            f.write(onnx_stream)
    elif file_format == 'LITE':  # file_format is 'LITE'
        context.set_context(save_ms_model=True, save_ms_model_path=file_name)
        net(*inputs)
    # restore network training mode
    if is_training:
        net.set_train(mode=True)
コード例 #2
0
def _export(net, file_name, file_format, *inputs):
    """
    It is an internal conversion function. Export the MindSpore prediction model to a file in the specified format.
    """
    logger.info("exporting model file:%s format:%s.", file_name, file_format)
    check_input_data(*inputs, data_class=Tensor)

    if file_format == 'GEIR':
        logger.warning(
            f"Format 'GEIR' is deprecated, it would be removed in future release, use 'AIR' instead."
        )
        file_format = 'AIR'

    supported_formats = ['AIR', 'ONNX', 'MINDIR']
    if file_format not in supported_formats:
        raise ValueError(
            f'Illegal file format {file_format}, it must be one of {supported_formats}'
        )
    # When dumping ONNX file, switch network mode to infer when it is training(NOTE: ONNX only designed for prediction)
    is_dump_onnx_in_training = net.training and file_format == 'ONNX'
    if is_dump_onnx_in_training:
        net.set_train(mode=False)

    net.init_parameters_data()
    if file_format == 'AIR':
        phase_name = 'export.air'
        graph_id, _ = _executor.compile(net, *inputs, phase=phase_name)
        if not file_name.endswith('.air'):
            file_name += ".air"
        _executor.export(file_name, graph_id)
    elif file_format == 'ONNX':
        phase_name = 'export.onnx'
        graph_id, _ = _executor.compile(net,
                                        *inputs,
                                        phase=phase_name,
                                        do_convert=False)
        onnx_stream = _executor._get_func_graph_proto(net, graph_id)
        if not file_name.endswith('.onnx'):
            file_name += ".onnx"
        with open(file_name, 'wb') as f:
            os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR)
            f.write(onnx_stream)
    elif file_format == 'MINDIR':
        phase_name = 'export.mindir'
        graph_id, _ = _executor.compile(net,
                                        *inputs,
                                        phase=phase_name,
                                        do_convert=False)
        onnx_stream = _executor._get_func_graph_proto(net, graph_id, 'mind_ir')
        if not file_name.endswith('.mindir'):
            file_name += ".mindir"
        with open(file_name, 'wb') as f:
            os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR)
            f.write(onnx_stream)

    if is_dump_onnx_in_training:
        net.set_train(mode=True)
コード例 #3
0
def export(net, *inputs, file_name, file_format='AIR'):
    """
    Export the MindSpore prediction model to a file in the specified format.

    Args:
        net (Cell): MindSpore network.
        inputs (Tensor): Inputs of the `net`.
        file_name (str): File name of the model to be exported.
        file_format (str): MindSpore currently supports 'AIR', 'ONNX' and 'MINDIR' format for exported model.

            - AIR: Ascend Intermidiate Representation. An intermidiate representation format of Ascend model.
              Recommended suffix for output file is '.air'.
            - ONNX: Open Neural Network eXchange. An open format built to represent machine learning models.
              Recommended suffix for output file is '.onnx'.
            - MINDIR: MindSpore Native Intermidiate Representation for Anf. An intermidiate representation format
              for MindSpore models.
              Recommended suffix for output file is '.mindir'.
    """
    logger.info("exporting model file:%s format:%s.", file_name, file_format)
    check_input_data(*inputs, data_class=Tensor)

    if file_format == 'GEIR':
        logger.warning(f"Format 'GEIR' is deprecated, it would be removed in future release, use 'AIR' instead.")
        file_format = 'AIR'

    supported_formats = ['AIR', 'ONNX', 'MINDIR']
    if file_format not in supported_formats:
        raise ValueError(f'Illegal file format {file_format}, it must be one of {supported_formats}')
    # When dumping ONNX file, switch network mode to infer when it is training(NOTE: ONNX only designed for prediction)
    is_dump_onnx_in_training = net.training and file_format == 'ONNX'
    if is_dump_onnx_in_training:
        net.set_train(mode=False)
    # export model
    net.init_parameters_data()
    if file_format == 'AIR':
        phase_name = 'export.air'
        graph_id, _ = _executor.compile(net, *inputs, phase=phase_name)
        _executor.export(file_name, graph_id)
    elif file_format == 'ONNX':  # file_format is 'ONNX'
        phase_name = 'export.onnx'
        graph_id, _ = _executor.compile(net, *inputs, phase=phase_name, do_convert=False)
        onnx_stream = _executor._get_func_graph_proto(graph_id)
        with open(file_name, 'wb') as f:
            os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR)
            f.write(onnx_stream)
    elif file_format == 'MINDIR':  # file_format is 'MINDIR'
        phase_name = 'export.mindir'
        graph_id, _ = _executor.compile(net, *inputs, phase=phase_name, do_convert=False)
        onnx_stream = _executor._get_func_graph_proto(graph_id, 'mind_ir')
        with open(file_name, 'wb') as f:
            os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR)
            f.write(onnx_stream)
    # restore network training mode
    if is_dump_onnx_in_training:
        net.set_train(mode=True)