Beispiel #1
0
def test_tuning_cpu():
    ir_mod = tvm.parser.fromtext(
        textwrap.dedent("""
        #[version = "0.0.5"]
        def @main(%a : Tensor[(1, 3, 32, 32), float32], %b : Tensor[(3, 3, 5, 5), float32]) {
               nn.conv2d(%a, %b, data_layout="NCHW", kernel_layout="OIHW")
        }
        """))
    tasks = autotvm.task.relay_integration.extract_from_program(
        ir_mod, {}, tvm.target.create("llvm"))
    assert len(tasks) == 1, f"Extracted != 1 task from program: {tasks!r}"

    task = tasks[0]

    measure_option = autotvm.measure_option(autotvm.LocalBuilder(),
                                            autotvm.LocalRunner())

    results = []

    tuner = RandomTuner(task)
    tuner.tune(
        n_trial=20,
        measure_option=measure_option,
        callbacks=(lambda _tuner, _inputs, rs: results.extend(rs),
                   ),
    )

    assert len(results) == 20

    successful_results = [
        r for r in results if r.error_no == autotvm.MeasureErrorNo.NO_ERROR
    ]
    assert len(
        successful_results) > 0, f"No successful tuning runs: {results!r}"
Beispiel #2
0
    def runner(target):
        # init task
        task, target = get_sample_task(target, None)
        logging.info("task config space: %s", task.config_space)

        measure_option = autotvm.measure_option(autotvm.LocalBuilder(),
                                                autotvm.LocalRunner())

        results = []

        tuner = RandomTuner(task)
        tuner.tune(
            n_trial=20,
            measure_option=measure_option,
            callbacks=(lambda _tuner, _inputs, rs: results.extend(rs),
                       ),
        )

        assert len(results) == 20

        successful_results = [
            r for r in results if r.error_no == autotvm.MeasureErrorNo.NO_ERROR
            # We filter records before building if we know they won't work ahead of time.
            # We can't guarantee we get one good record so we count these as success too
            or r.error_no == autotvm.MeasureErrorNo.INSTANTIATION_ERROR
        ]
        assert len(
            successful_results) > 0, f"No successful tuning runs: {results!r}"
Beispiel #3
0
def test_tuning_gpu(target, ctx):
    # init task
    task, target = get_sample_task(target, None)
    logging.info("task config space: %s", task.config_space)

    measure_option = autotvm.measure_option(autotvm.LocalBuilder(),
                                            autotvm.LocalRunner())

    results = []

    tuner = RandomTuner(task)
    tuner.tune(
        n_trial=20,
        measure_option=measure_option,
        callbacks=(lambda _tuner, _inputs, rs: results.extend(rs),
                   ),
    )

    assert len(results) == 20

    successful_results = [
        r for r in results if r.error_no == autotvm.MeasureErrorNo.NO_ERROR
    ]
    assert len(
        successful_results) > 0, f"No successful tuning runs: {results!r}"
Beispiel #4
0
def test_tuning(target, ctx):
    # init task
    task, target = get_sample_task(target, None)
    logging.info("%s", task.config_space)

    measure_option = autotvm.measure_option(autotvm.LocalBuilder(),
                                            autotvm.LocalRunner())

    tuner = RandomTuner(task)
    tuner.tune(n_trial=20, measure_option=measure_option)
    def check(target, target_host):
        ctx = tvm.context(target, 0)
        if not ctx.exist:
            logging.info("Skip test because %s is not available" % target)
            return

        # init task
        task, target = get_sample_task(target, target_host)
        logging.info("%s", task.config_space)

        measure_option = autotvm.measure_option(autotvm.LocalBuilder(),
                                                autotvm.LocalRunner())

        tuner = RandomTuner(task)
        tuner.tune(n_trial=20, measure_option=measure_option)
Beispiel #6
0
    def check(target, target_host):
        ctx = tvm.context(target, 0)
        if not ctx.exist:
            logging.info("Skip test because %s is not available" % target)
            return

        # init task
        task, target = get_sample_task(target, target_host)
        logging.info("%s", task.config_space)

        measure_option = autotvm.measure_option(
            autotvm.LocalBuilder(),
            autotvm.LocalRunner())

        tuner = RandomTuner(task)
        tuner.tune(n_trial=20, measure_option=measure_option)
Beispiel #7
0
    def check(target, target_host):
        ctx = tvm.context(target, 0)
        if not ctx.exist:
            logging.info("Skip test because %s is not available" % target)
            return

        # init task
        task, target = get_sample_task(target, target_host)
        logging.info("%s", task.config_space)

        measure_option = autotvm.measure_option(mode='local',
                                                timeout=4,
                                                number=2)

        tuner = RandomTuner(task)
        tuner.tune(n_trial=10, measure_option=measure_option)
Beispiel #8
0
    def runner(target, dev):
        task, target = get_sample_task(target, None)
        logging.info("task config space: %s", task.config_space)

        # Note: we use the MockedLocalBuilder here instead of autotvm.LocalBuilder()
        measure_option = autotvm.measure_option(MockedLocalBuilder(),
                                                autotvm.LocalRunner())

        results = []

        tuner = RandomTuner(task)
        tuner.tune(
            n_trial=1,
            measure_option=measure_option,
            callbacks=(lambda _tuner, _inputs, rs: results.extend(rs),
                       ),
        )

        assert len(results) == 1
Beispiel #9
0
def tune_kernels(
    tasks,
    measure_option,
    tuner,
    n_trial,
    early_stopping,
    log_filename,
    use_transfer_learning,
):
    for i, tsk in enumerate(reversed(tasks)):
        prefix = "[Task %2d/%2d] " % (i + 1, len(tasks))

        # create tuner
        if tuner == "random" or n_trial >= len(tsk.config_space):
            tuner_obj = RandomTuner(tsk)
        elif tuner == "xgb" or tuner == "xgb-rank":
            tuner_obj = XGBTuner(tsk, loss_type="rank")
            # use history data to pre-train the cost model
            if use_transfer_learning:
                if os.path.isfile(log_filename):
                    tuner_obj.load_history(
                        autotvm.record.load_from_file(log_filename))
        elif tuner == "ga":
            tuner_obj = GATuner(tsk, pop_size=100)
        elif tuner == "gridsearch":
            tuner_obj = GridSearchTuner(tsk)
        else:
            raise ValueError("Invalid tuner: " + tuner)

        # do tuning
        tsk_trial = min(n_trial, len(tsk.config_space))
        tuner_obj.tune(
            n_trial=tsk_trial,
            early_stopping=early_stopping,
            measure_option=measure_option,
            callbacks=[
                autotvm.callback.progress_bar(tsk_trial, prefix=prefix),
                autotvm.callback.log_to_file(log_filename),
            ],
        )