Ejemplo n.º 1
0
def test_identity():
    np.random.seed(133391)
    shape = [2, 4]
    input_data = np.random.randn(*shape).astype(np.float32)

    identity_node = make_node("Identity", inputs=["x"], outputs=["y"])
    ng_results = run_node(identity_node, [input_data])
    assert np.array_equal(ng_results, [input_data])

    node1 = make_node("Add",
                      inputs=["A", "B"],
                      outputs=["add1"],
                      name="add_node1")
    node2 = make_node("Identity",
                      inputs=["add1"],
                      outputs=["identity1"],
                      name="identity_node1")
    node3 = make_node("Abs",
                      inputs=["identity1"],
                      outputs=["Y"],
                      name="abs_node1")

    graph = make_graph(
        [node1, node2, node3],
        "test_graph",
        [
            make_tensor_value_info("A", onnx.TensorProto.FLOAT, shape),
            make_tensor_value_info("B", onnx.TensorProto.FLOAT, shape),
        ],
        [make_tensor_value_info("Y", onnx.TensorProto.FLOAT, shape)],
    )
    model = make_model(graph, producer_name="ngraph ONNX Importer")
    ng_model_function = import_onnx_model(model)
    runtime = get_runtime()
    computation = runtime.computation(ng_model_function)
    ng_results = computation(input_data, input_data)
    expected_result = np.abs(input_data + input_data)

    assert np.array_equal(ng_results[0], expected_result)
Ejemplo n.º 2
0
def test_reshape_opset5():
    original_shape = [2, 3, 4]
    test_cases = {
        "reordered_dims": np.array([4, 2, 3], dtype=np.int64),
        "reduced_dims": np.array([3, 8], dtype=np.int64),
        "extended_dims": np.array([3, 2, 2, 2], dtype=np.int64),
        "one_dim": np.array([24], dtype=np.int64),
        "negative_dim": np.array([6, -1, 2], dtype=np.int64),
    }
    input_data = np.random.random_sample(original_shape).astype(np.float32)

    for _, shape in test_cases.items():
        const_node = make_node(
            "Constant",
            inputs=[],
            outputs=["const_shape"],
            value=onnx.helper.make_tensor(
                name="const_tensor", data_type=onnx.TensorProto.INT64, dims=shape.shape, vals=shape.flatten()
            ),
        )
        reshape_node = onnx.helper.make_node("Reshape", inputs=["data", "const_shape"], outputs=["reshaped"])

        graph = make_graph(
            [const_node, reshape_node],
            "test_graph",
            [make_tensor_value_info("data", onnx.TensorProto.FLOAT, input_data.shape)],
            [make_tensor_value_info("reshaped", onnx.TensorProto.FLOAT, ())],
        )

        model = make_model(graph, producer_name="ngraph ONNX Importer")
        model.opset_import[0].version = 5
        ng_model_function = import_onnx_model(model)
        runtime = get_runtime()
        computation = runtime.computation(ng_model_function)
        ng_results = computation(input_data)
        expected_output = np.reshape(input_data, shape)
        assert np.array_equal(ng_results[0], expected_output)
Ejemplo n.º 3
0
def test_cast_errors():
    from onnx.onnx_cpp2py_export.checker import ValidationError

    np.random.seed(133391)
    input_data = np.ceil(np.random.rand(2, 3, 4) * 16)

    # missing 'to' attribute
    node = onnx.helper.make_node("Cast", inputs=["A"], outputs=["B"])
    input_tensors = [
        make_tensor_value_info(name, onnx.TensorProto.FLOAT, value.shape)
        for name, value in zip(node.input, [input_data])
    ]
    output_tensors = [
        make_tensor_value_info(name, onnx.TensorProto.FLOAT16, value.shape)
        for name, value in zip(node.output, ())
    ]  # type: ignore

    graph = make_graph([node], "compute_graph", input_tensors, output_tensors)
    model = make_model(graph, producer_name="NgraphBackend")
    with pytest.raises(ValidationError):
        import_onnx_model(model)

    # unsupported data type representation
    node = onnx.helper.make_node("Cast",
                                 inputs=["A"],
                                 outputs=["B"],
                                 to=1.2345)
    input_tensors = [
        make_tensor_value_info(name, onnx.TensorProto.FLOAT, value.shape)
        for name, value in zip(node.input, [input_data])
    ]
    output_tensors = [
        make_tensor_value_info(name, onnx.TensorProto.INT32, value.shape)
        for name, value in zip(node.output, ())
    ]  # type: ignore

    graph = make_graph([node], "compute_graph", input_tensors, output_tensors)
    model = make_model(graph, producer_name="NgraphBackend")
    with pytest.raises(ValidationError):
        import_onnx_model(model)

    # unsupported input tensor data type:
    node = onnx.helper.make_node("Cast",
                                 inputs=["A"],
                                 outputs=["B"],
                                 to=onnx.TensorProto.INT32)
    input_tensors = [
        make_tensor_value_info(name, onnx.TensorProto.COMPLEX64, value.shape)
        for name, value in zip(node.input, [input_data])
    ]
    output_tensors = [
        make_tensor_value_info(name, onnx.TensorProto.INT32, value.shape)
        for name, value in zip(node.output, ())
    ]  # type: ignore

    graph = make_graph([node], "compute_graph", input_tensors, output_tensors)
    model = make_model(graph, producer_name="NgraphBackend")
    with pytest.raises((RuntimeError, NgraphTypeError)):
        import_onnx_model(model)

    # unsupported output tensor data type:
    node = onnx.helper.make_node("Cast",
                                 inputs=["A"],
                                 outputs=["B"],
                                 to=onnx.TensorProto.COMPLEX128)
    input_tensors = [
        make_tensor_value_info(name, onnx.TensorProto.FLOAT, value.shape)
        for name, value in zip(node.input, [input_data])
    ]
    output_tensors = [
        make_tensor_value_info(name, onnx.TensorProto.COMPLEX128, value.shape)
        for name, value in zip(node.output, ())
    ]  # type: ignore

    graph = make_graph([node], "compute_graph", input_tensors, output_tensors)
    model = make_model(graph, producer_name="NgraphBackend")
    with pytest.raises(RuntimeError):
        import_onnx_model(model)
Ejemplo n.º 4
0
def import_and_compute_conv(x, weights, transpose=False, **attributes):
    x, weights = np.array(x), np.array(weights)
    onnx_model = make_onnx_model_for_conv_op(x.shape, weights.shape, transpose=transpose, **attributes)
    ng_model_function = import_onnx_model(onnx_model)
    computation = get_runtime().computation(ng_model_function)
    return computation(x, weights)[0]