Exemple #1
0
def test_mobilenet(enable_usmp, target_kind):
    ir_mod, params = testing.mobilenet.get_workload(batch_size=1)
    data_shape = [
        int(x) for x in ir_mod["main"].checked_type.arg_types[0].shape
    ]
    data = np.random.uniform(size=data_shape).astype("float32")
    inputs = {"data": data}
    ref_outputs = generate_ref_data(ir_mod, inputs, params)

    with tvm.transform.PassContext(opt_level=3,
                                   config={
                                       "tir.disable_vectorize": True,
                                       "tir.usmp.enable": enable_usmp
                                   }):
        mod = tvm.relay.build(
            ir_mod,
            params=params,
            target=target_kind,
            executor=backend.Executor("aot", {"interface-api": "packed"}),
        )

    temp_dir = tvm.contrib.utils.TempDirectory()
    test_so_path = temp_dir / "test.so"
    mod.export_library(test_so_path, cc="gcc", options=["-std=c11"])
    loaded_mod = tvm.runtime.load_module(test_so_path)
    runner = tvm.runtime.executor.AotModule(loaded_mod["default"](tvm.cpu(0)))
    runner.set_input(**inputs)
    runner.run()
    assert (runner.get_output(0).asnumpy() == list(
        ref_outputs.values())[0]).all()
Exemple #2
0
def test_pass_wrong_device_arg():
    """Ensure an error is generated if the incorrect number of devices are passed"""
    x = tvm.relay.var("x", tvm.relay.TensorType([1], dtype="float32"))
    expr = tvm.relay.add(
        x, tvm.relay.Constant(tvm.nd.array(np.array([1], dtype="float32"))))
    with tvm.transform.PassContext(opt_level=3,
                                   config={"tir.disable_vectorize": True}):
        mod = tvm.relay.build(
            tvm.IRModule.from_expr(tvm.relay.Function([x], expr)),
            target="c",
            executor=backend.Executor("aot", {"interface-api": "packed"}),
        )

    temp_dir = tvm.contrib.utils.TempDirectory()
    test_so_path = temp_dir / "test.so"
    mod.export_library(test_so_path,
                       cc="gcc",
                       options=["-std=c11", "-g3", "-O0"])
    loaded_mod = tvm.runtime.load_module(test_so_path)

    with pytest.raises(tvm.TVMError) as error:
        tvm.runtime.executor.AotModule(loaded_mod["default"](tvm.cpu(0),
                                                             tvm.cpu(0)))

        assert (
            "Check failed: devices_.size() == 1 (2 vs. 1) : Expect exactly 1 device passed."
            in str(error.exception))
Exemple #3
0
def test_conv2d(enable_usmp, target_kind):
    RELAY_MODEL = textwrap.dedent(
        """\
        #[version = "0.0.5"]
        def @main(%data : Tensor[(1, 3, 64, 64), uint8], %weight : Tensor[(3, 3, 5, 5), int8]) {
            %1 = nn.conv2d(
                 %data,
                 %weight,
                 padding=[2, 2],
                 channels=3,
                 kernel_size=[5, 5],
                 data_layout="NCHW",
                 kernel_layout="OIHW",
                 out_dtype="int32");
            %2 = cast(nn.max_pool2d(%1, pool_size=[3, 3]), dtype="int8");
            %3 = nn.conv2d(
                 %2,
                 %weight,
                 padding=[2, 2],
                 channels=3,
                 kernel_size=[5, 5],
                 data_layout="NCHW",
                 kernel_layout="OIHW",
                 out_dtype="int32");
            %4 = nn.max_pool2d(%3, pool_size=[3, 3]);
            %4
        }
    """
    )
    ir_mod = tvm.parser.fromtext(RELAY_MODEL)

    main_func = ir_mod["main"]
    shape_dict = {p.name_hint: p.checked_type.concrete_shape for p in main_func.params}
    type_dict = {p.name_hint: p.checked_type.dtype for p in main_func.params}

    weight_data = np.ones(shape_dict["weight"]).astype(type_dict["weight"])
    input_data = np.ones(shape_dict["data"]).astype(type_dict["data"])

    params = {"weight": weight_data}
    inputs = {"data": input_data}
    ref_outputs = generate_ref_data(ir_mod, inputs, params)

    with tvm.transform.PassContext(
        opt_level=3, config={"tir.disable_vectorize": True, "tir.usmp.enable": enable_usmp}
    ):
        mod = tvm.relay.build(
            ir_mod,
            params=params,
            target=target_kind,
            executor=backend.Executor("aot", {"interface-api": "packed"}),
        )

    temp_dir = tvm.contrib.utils.TempDirectory()
    test_so_path = temp_dir / "test.so"
    mod.export_library(test_so_path, cc="gcc", options=["-std=c11"])
    loaded_mod = tvm.runtime.load_module(test_so_path)
    runner = tvm.runtime.executor.AotModule(loaded_mod["default"](tvm.cpu(0)))
    runner.set_input(**inputs)
    runner.run()
    assert (runner.get_output(0).asnumpy() == list(ref_outputs.values())[0]).all()
Exemple #4
0
def test_error_c_interface():
    """Checks that an error occurs when using the packed API in combination with C interface"""

    two = relay.add(relay.const(1), relay.const(1))
    func = relay.Function([], two)

    with pytest.raises(
            tvm.TVMError,
            match=re.escape(
                'Need unpacked-api == false (got: 0) and interface-api == "packed" (got: c) when '
                "targeting c++ runtime"),
    ):
        tvm.relay.build(
            IRModule.from_expr(func),
            target="llvm",
            executor=backend.Executor("aot", {"interface-api": "c"}),
        )
Exemple #5
0
def test_error_c_interface():
    interface_api = "c"
    use_unpacked_api = False
    test_runner = AOT_DEFAULT_RUNNER

    two = relay.add(relay.const(1), relay.const(1))
    func = relay.Function([], two)

    with pytest.raises(
            tvm.TVMError,
            match=re.escape(
                'Need unpacked-api == false (got: 0) and interface-api == "packed" (got: c) when '
                "targeting c++ runtime"),
    ):
        tvm.relay.build(
            IRModule.from_expr(func),
            target="llvm",
            executor=backend.Executor("aot", {"interface-api": "c"}),
        )
Exemple #6
0
def test_tvmc_import_package_mlf_aot(tflite_mobilenet_v1_1_quant,
                                     tflite_compile_model):
    pytest.importorskip("tflite")

    tflite_compiled_model_mlf = tflite_compile_model(
        tflite_mobilenet_v1_1_quant,
        target="c",
        executor=backend.Executor("aot"),
        output_format="mlf",
        pass_context_configs=["tir.disable_vectorize=1"],
    )

    # Compile and export a model to a MLF archive so it can be imported.
    exported_tvmc_package = tflite_compiled_model_mlf
    archive_path = exported_tvmc_package.package_path

    # Import the MLF archive. TVMCPackage constructor will call import_package method.
    tvmc_package = TVMCPackage(archive_path)

    assert tvmc_package.lib_name is None, ".lib_name must not be set in the MLF archive."
    assert tvmc_package.lib_path is None, ".lib_path must not be set in the MLF archive."
    assert tvmc_package.graph is None, ".graph must not be set in the MLF archive for AOT executor."
    assert tvmc_package.params is not None, ".params must be set in the MLF archive."
    assert tvmc_package.type == "mlf", ".type must be set to 'mlf' in the MLF format."