Пример #1
0
def run_model(onnx_model, data_inputs):
    # type: (onnx.ModelProto, List[np.ndarray]) -> List[np.ndarray]
    """
    Convert ONNX model to an ngraph model and perform computation on input data.

    :param onnx_model: ONNX ModelProto describing an ONNX model
    :param data_inputs: list of numpy ndarrays with input data
    :return: list of numpy ndarrays with computed output
    """
    ng_model_function = import_onnx_model(onnx_model)
    runtime = get_runtime()
    computation = runtime.computation(ng_model_function)
    return computation(*data_inputs)
Пример #2
0
def test_simple_graph():
    node1 = make_node("Add", ["A", "B"], ["X"], name="add_node1")
    node2 = make_node("Add", ["X", "C"], ["Y"], name="add_node2")
    graph = make_graph(
        [node1, node2],
        "test_graph",
        [
            make_tensor_value_info("A", onnx.TensorProto.FLOAT, [1]),
            make_tensor_value_info("B", onnx.TensorProto.FLOAT, [1]),
            make_tensor_value_info("C", onnx.TensorProto.FLOAT, [1]),
        ],
        [make_tensor_value_info("Y", onnx.TensorProto.FLOAT, [1])],
    )
    model = make_model(graph, producer_name="ngraph ONNX Importer")

    ng_model_function = import_onnx_model(model)

    runtime = get_runtime()
    computation = runtime.computation(ng_model_function)
    assert np.array_equal(computation(1, 2, 3)[0], np.array([6.0], dtype=np.float32))
    assert np.array_equal(computation(4, 5, 6)[0], np.array([15.0], dtype=np.float32))
Пример #3
0
 def run_import(test_self: Any, device: Text) -> None:
     model = ModelImportRunner._load_onnx_model(model_test.model_dir,
                                                model_test.model)
     model_marker[0] = model
     assert import_onnx_model(model)