Esempio n. 1
0
def _tuner_test_helper(model,
                       tuner_name,
                       tmpdir_name,
                       early_stopping=1,
                       prior_records=None):
    tvmc_model = tvmc.frontends.load_model(model)
    log_file = os.path.join(tmpdir_name, "log_{}.txt".format(tuner_name))

    tvmc.tune(
        tvmc_model,
        target="llvm",
        tuning_records=log_file,
        prior_records=prior_records,
        tuner=tuner_name,
        trials=4,
        early_stopping=early_stopping,
    )

    # testing whether the log file was produced
    assert path.exists(log_file), "tuning log file should exist"

    with autotvm.apply_history_best(log_file) as best:
        assert isinstance(best, autotvm.task.dispatcher.ApplyHistoryBest
                          ), "unable to load the best results of tuning"

    return log_file
def _autoscheduler_test_helper(model,
                               tmpdir_name,
                               early_stopping=1,
                               prior_records=None):
    tvmc_model = tvmc.frontends.load_model(model)
    log_file = os.path.join(tmpdir_name, "autoscheduler.json")

    hardware_params = auto_scheduler.HardwareParams(num_cores=4, target="llvm")

    tvmc.tune(
        tvmc_model,
        target="llvm",
        tuning_records=log_file,
        prior_records=prior_records,
        early_stopping=early_stopping,
        enable_autoscheduler=True,
        trials=2,
        hardware_params=hardware_params,
    )

    # testing whether the log file was produced
    assert path.exists(log_file), "autoscheduler log file should exist"

    with auto_scheduler.ApplyHistoryBest(log_file) as best:
        assert isinstance(best, auto_scheduler.dispatcher.ApplyHistoryBest
                          ), "unable to load the best results of tuning"

    return log_file
Esempio n. 3
0
def test_tvmc_workflow(keras_simple):
    pytest.importorskip("tensorflow")

    tvmc_model = tvmc.load(keras_simple)
    tuning_records = tvmc.tune(tvmc_model, target="llvm", enable_autoscheduler=True, trials=2)
    tvmc_package = tvmc.compile(tvmc_model, tuning_records=tuning_records, target="llvm")
    result = tvmc.run(tvmc_package, device="cpu")
    assert type(tvmc_model) is TVMCModel
    assert type(tvmc_package) is TVMCPackage
    assert type(result) is TVMCResult
    assert path.exists(tuning_records)
    assert type(result.outputs) is dict
    assert type(result.times) is BenchmarkResult
    assert "output_0" in result.outputs.keys()
Esempio n. 4
0
def test_save_load_model(keras_simple, tmpdir_factory):
    pytest.importorskip("onnx")

    tmpdir = tmpdir_factory.mktemp("data")
    tvmc_model = tvmc.load(keras_simple)

    # Create tuning artifacts
    tvmc.tune(tvmc_model, target="llvm", trials=2)

    # Create package artifacts
    tvmc.compile(tvmc_model, target="llvm")

    # Save the model to disk
    model_path = os.path.join(tmpdir, "saved_model.tar")
    tvmc_model.save(model_path)

    # Load the model into a new TVMCModel
    new_tvmc_model = TVMCModel(model_path=model_path)

    # Check that the two models match.
    assert str(new_tvmc_model.mod) == str(tvmc_model.mod)
    # Check that tuning records and the compiled package are recoverable.
    assert path.exists(new_tvmc_model.default_package_path())
    assert path.exists(new_tvmc_model.default_tuning_records_path())