Example #1
0
def create_measure(device, flag="t4"):
    if device == 'arm' or device == 'aarch64':
        measure_option = autotvm.measure_option(builder=autotvm.LocalBuilder(
            build_func='ndk' if use_android else 'default'),
                                                runner=autotvm.RPCRunner(
                                                    "pi",
                                                    host='0.0.0.0',
                                                    port=9190,
                                                    number=5,
                                                    timeout=10,
                                                ))
    elif ('x86' in device):
        measure_option = autotvm.measure_option(
            builder=autotvm.LocalBuilder(),
            runner=autotvm.LocalRunner(number=5, repeat=1, min_repeat_ms=1000),
        )
    elif device == 'gpu':
        measure_option = autotvm.measure_option(
            builder=autotvm.LocalBuilder(timeout=1000),
            runner=autotvm.RPCRunner(
                flag,  # change the device key to your key
                '0.0.0.0',
                9190,
                number=20,
                repeat=3,
                timeout=1000,
                min_repeat_ms=150))
    return measure_option
Example #2
0
def test_autotvm(hexagon_session):
    """Top level test function for testing autotvm"""
    logfilename = "./hexagon.autotvm.log"

    options = {
        "log_filename": logfilename,
        "early_stopping": None,
        "measure_option": autotvm.measure_option(
            builder=autotvm.LocalBuilder(timeout=15),
            runner=autotvm.RPCRunner(
                module_loader=HexagonModuleLoader(hexagon_session),
                key=hexagon_session._remote_kw["key"],
                host=hexagon_session._remote_kw["host"],
                port=hexagon_session._remote_kw["port"],
                number=3,
                timeout=15,
                min_repeat_ms=150,
                # cooldown_interval=150
            ),
        ),
    }
    target_hexagon = tvm.target.hexagon("v68")
    task = autotvm.task.create(
        "demo_template", args=[], target=target_hexagon, target_host=target_hexagon
    )
    tune_tasks([task], **options)
def tune_and_evaluate():
    df = pd.read_csv(args.layer_info)
    df = df[df['filename'] == args.layer]

    filenames = df.filename

    for net_fname in filenames:
        print('Tuning: ', net_fname)

        #### TUNING OPTION ####
        log_file = "models/%s/logs/%s.log" % (args.model, args.log_file)

        tuning_opt = {
            'log_filename':
            log_file,
            'n_trial':
            args.n_trials,
            'measure_option':
            autotvm.measure_option(builder=autotvm.LocalBuilder(timeout=10),
                                   runner=autotvm.RPCRunner(
                                       args.device_key,
                                       '0.0.0.0',
                                       9190,
                                       number=20,
                                       repeat=3,
                                       timeout=4,
                                       min_repeat_ms=150)),
        }

        in_c = int(df.loc[df.filename == net_fname, 'in_channels'])
        in_x = int(df.loc[df.filename == net_fname, 'input_spatial_x'])
        out_c = int(df.loc[df.filename == net_fname, 'out_channels'])

        input_shape = (1, in_c, in_x, in_x)
        print(input_shape)
        # extract workloads from relay program
        print("\tExtract tasks...")
        net, params = get_network(net_fname, input_shape)
        tasks = autotvm.task.extract_from_program(net['main'],
                                                  target=target,
                                                  target_host=target_host,
                                                  params=params,
                                                  ops=(relay.op.nn.conv2d, ))

        # run tuning tasks
        print("\tTuning...")
        tune_tasks(tasks, **tuning_opt)
Example #4
0
def tuning_model(model_path):
    dtype='float32'
    ox, shape_dict = get_model(model_path)
    input_name = list(shape_dict.keys())[0]
    device_key = None
    if args.target == 'gpu':
        device_key = 'V100'
    use_android = False

    log_file = get_logfile()

    other_option = {
        'model_path': model_path,
        'dtype': dtype,
        'input_name': input_name,
        'device_key': device_key,
        'use_android': use_android
    }

    if args.target == 'x86' or args.target == 'cpu':
        measure_option = autotvm.measure_option(
                builder=autotvm.LocalBuilder(),
                runner=autotvm.LocalRunner(
                    number=10, repeat=1,
                    min_repeat_ms=1000
                )
        )
    elif args.target == 'gpu':
        measure_option = autotvm.measure_option(
                builder=autotvm.LocalBuilder(timeout=10),
                runner=autotvm.RPCRunner(
                    device_key,
                    '0.0.0.0', 9190,
                    number=20, repeat=3, timeout=4, min_repeat_ms=150)
        )
    n_trial = 200

    tuning_option = {
        'log_filename': log_file,
        'tuner': 'xgb',
        'n_trial': n_trial,
        'early_stopping': 80,
        'measure_option': measure_option
    }

    graph, lib, params = tuning(tuning_option, **other_option)
    return graph, lib, params
Example #5
0
    def __init__(self, task, target, device_key):
        self.task = task
        self.target = target
        self.device_key = device_key
        self.best_config = None
        self.best_latency = None

        self.early_stopping = None
        self.record = None
        self.tuner = 'xgb'
        self.n_trial = 30

        self.measure_option = autotvm.measure_option(
                builder=autotvm.LocalBuilder(build_func="default"),
                runner=autotvm.RPCRunner(
                    device_key,
                    host="115.145.179.79",
                    port=9090,
                    number=5,
                    timeout=10,
                ),
            )
Example #6
0
    def __init__(self, host='115.145.178.78', port=3306, user='******', password='******', db='modeldb'):
        self.host = host
        self.port = port
        self.user = user
        self.password = password
        self.db = pymysql.connect(host=host, port=port, user=user, password=password, db=db)
        self.cursor = self.db.cursor()
        
        self.device_key_list = ['xu4']
        #self.target_list = [tvm.target.Target("llvm -device=arm_cpu -mtriple=armv7l-linux-gnueabihf -mattr=+neon")]
        self.target_list = ["llvm -device=arm_cpu -mtriple=armv7l-linux-gnueabihf -mattr=+neon"]

        self.measure_option = autotvm.measure_option(
                builder=autotvm.LocalBuilder(build_func="default"),
                runner=autotvm.RPCRunner(
                    self.device_key_list[0],
                    host="115.145.179.79",
                    port=9090,
                    number=5,
                    timeout=10,
                ),
            )
        task = autotvm.task.create(
            group_conv2d,
            args=(N, CI, H, W, CO, KH, KW, strides, padding, dilation, groups),
            target=tvm.target.vta(),
            target_host=env.target_host,
            template_key="direct",
        )
        print(task.config_space)

        # Tune
        measure_option = autotvm.measure_option(
            builder=autotvm.LocalBuilder(),
            runner=autotvm.RPCRunner(
                env.TARGET,
                host=tracker_host,
                port=int(tracker_port),
                number=5,
                timeout=60,
                # check_correctness=True, # TODO: re-enable when check_correctness works again.
            ),
        )

        # Run Tuner
        tuner = autotvm.tuner.RandomTuner(task)
        tuner.tune(
            n_trial=len(task.config_space),
            early_stopping=None,
            measure_option=measure_option,
            callbacks=[
                autotvm.callback.progress_bar(len(task.config_space),
                                              prefix=prefix),
                autotvm.callback.log_to_file(tmp_log_file),
def tune_and_compile(graph: Graph, batch_size, target, target_host, device=None):
    #
    # this function is adopted and modified from tvm tutorial
    #
    log_dir = "./tvm_schedule_configs"
    os.makedirs(log_dir, exist_ok=True)
    log_file = os.path.join(log_dir, f"{graph.name}_{device}_{batch_size}.log")
    tuning_option = {
        'log_filename': log_file,
        'tuner': 'ga',
        'n_trial': 2000,
        'early_stopping': 600,
        'measure_option': autotvm.measure_option(
            builder=autotvm.LocalBuilder(timeout=10),
#            runner=autotvm.LocalRunner(number=20, repeat=3, timeout=4, min_repeat_ms=150),
            runner=autotvm.RPCRunner(
                'v100',  # change the device key to your key
                '0.0.0.0', 9190,
                number=20, repeat=3, timeout=4),
        )
    }

    # You can skip the implementation of this function for this tutorial.
    def tune_tasks(tasks,
                   measure_option,
                   tuner,
                   n_trial,
                   early_stopping,
                   log_filename,
                   use_transfer_learning=True):
        # create tmp log file
        tmp_log_file = log_filename + ".tmp"
        for i, tsk in enumerate(reversed(tasks)):
            prefix = "[Task %2d/%2d] " % (i + 1, len(tasks))

            # create tuner
            if tuner == 'xgb' or tuner == 'xgb-rank':
                tuner_obj = XGBTuner(tsk, loss_type='rank')
            elif tuner == 'ga':
                tuner_obj = GATuner(tsk, pop_size=100)
            elif tuner == 'random':
                tuner_obj = RandomTuner(tsk)
            elif tuner == 'gridsearch':
                tuner_obj = GridSearchTuner(tsk)
            else:
                raise ValueError("Invalid tuner: " + tuner)

            if use_transfer_learning:
                if os.path.isfile(tmp_log_file):
                    tuner_obj.load_history(autotvm.record.load_from_file(tmp_log_file))

            # do tuning
            # print(f"tsk.config_space {tsk.config_space}")
            tuner_obj.tune(n_trial=min(n_trial, len(tsk.config_space)),
                           early_stopping=early_stopping,
                           measure_option=measure_option,
                           callbacks=[
                               autotvm.callback.progress_bar(n_trial, prefix=prefix),
                               autotvm.callback.log_to_file(tmp_log_file)])

        # pick best records to a cache file
        autotvm.record.pick_best(tmp_log_file, log_filename)
        os.remove(tmp_log_file)
    mod, params = graph2relay(graph, batch_size)
    input_shape = (batch_size,) + tuple(graph.enter_node.output_shape)
    out_shape = (batch_size,) + tuple(graph.blocks[-1].exit_node.output_shape)
    # print(input_shape, out_shape)

    tasks = autotvm.task.extract_from_program(mod["main"], target=target, target_host=target_host,
                                              params=params, ops=(relay.op.nn.conv2d,))

    # run tuning tasks
    if os.path.exists(log_file):
        print(f"Tuned config found, use {log_file} as config")
    else:
        print("Tuning...")
        tune_tasks(tasks, **tuning_option)

    # compile kernels with history best records
    with autotvm.apply_history_best(log_file):
        # print("Compile...")
        with relay.build_config(opt_level=3):  # opt_level = 3 has problem
            graph, lib, params = relay.build_module.build(mod, target=target, target_host=target_host, params=params)

        return graph, lib, params
Example #9
0
dtype = 'float32'

device_key = 'tx2'

tuning_option = {
    'log_filename': log_file,

    'tuner': 'xgb',
    'n_trial': 200,
    'early_stopping': 600,

    'measure_option': autotvm.measure_option(
        builder=autotvm.LocalBuilder(timeout=10),
        #runner=autotvm.LocalRunner(number=20, repeat=3, timeout=4, min_repeat_ms=150),
        runner=autotvm.RPCRunner(
            device_key,  # change the device key to your key
            '0.0.0.0', 9192,
            number=5, repeat=3, timeout=100, min_repeat_ms=150)
    ),
}

def tune_tasks(tasks,
              measure_option,
              tuner='xgb',
              n_trial=200,
              early_stopping=None,
              log_filename='tuning.log',
              use_transfer_learning=True,
              try_winograd=True):
    if try_winograd:
        for i in range(len(tasks)):
            try:    # try winograd template
    tuning_opt = {
        'log_filename':
        'resnet-18.log',
        'tuner':
        'random',
        'n_trial':
        1e9,
        'early_stopping':
        None,
        'measure_option':
        autotvm.measure_option(builder=autotvm.LocalBuilder(
            build_func=vta.vta_autotvm_build_func),
                               runner=autotvm.RPCRunner(
                                   env.TARGET,
                                   tracker_host,
                                   tracker_port,
                                   number=4,
                                   repeat=3,
                                   timeout=60,
                                   check_correctness=True))
    }
    tune_tasks(tasks, **tuning_opt)

    # compile kernels with history best records
    with autotvm.tophub.context(target,
                                extra_files=[tuning_opt['log_filename']]):

        # ResNet parameters
        input_shape = (1, 3, 224, 224)
        dtype = 'float32'\

        # Compile network
dtype = 'float32'

source = 'darknet'

tuning_option = {
    'log_filename': log_file,

    'tuner': 'xgb',
    'n_trial': 200,
    'early_stopping': 100,

    'measure_option': autotvm.measure_option(
        builder=autotvm.LocalBuilder(timeout=1000),
        runner=autotvm.RPCRunner(
            'jetbot', '0.0.0.0', 9190,
            number=5,
            timeout=1000,
        )
    ),
}


def get_tf_yolov3_tiny(
        model_path=("/hdd02/zhangyiyang/Tensorflow-YOLOv3/"
                    "weights/raw-yolov3-tiny.pb"),
        outputs=['yolov3_tiny/concat_6'],):
    input_shape = (1, 416, 416, 3)

    with tf.compat.v1.gfile.GFile(model_path, 'rb') as f:
        graph_def = tf.compat.v1.GraphDef()
        graph_def.ParseFromString(f.read())
Example #12
0
        # Workload parameters
        N = wl.batch
        CI = wl.in_filter
        CO = wl.out_filter

        task = autotvm.task.create(dense,
                                   args=(N, CI, CO),
                                   target=tvm.target.vta(),
                                   target_host=env.target_host,
                                   template_key='direct')
        print(task.config_space)

        measure_option = autotvm.measure_option(builder=autotvm.LocalBuilder(
            build_func=vta.vta_autotvm_build_func),
                                                runner=autotvm.RPCRunner(
                                                    env.TARGET,
                                                    tracket_host,
                                                    int(tracket_port),
                                                    number=4,
                                                    repeat=3,
                                                    timeout=10000,
                                                    check_correctness=True))

        tuner = autotvm.tuner.RandomTuner(task)
        tuner.tune(n_trial=len(task.config_space),
                   measure_option=measure_option,
                   callbacks=[autotvm.callback.log_to_file('dense.log')])

        print("\nBest tuner config:")
        print(tuner.best_config)
Example #13
0
#    titanx       2      2     0
#    gfx900       1      1     0
#    ----------------------------------
#
# Finally, we need to change the tuning option to use RPCRunner. Use the code below
# to replace the corresponding part above.

tuning_option = {
    "log_filename":
    log_file,
    "tuner":
    "xgb",
    "n_trial":
    2000,
    "early_stopping":
    600,
    "measure_option":
    autotvm.measure_option(
        builder=autotvm.LocalBuilder(timeout=10),
        runner=autotvm.RPCRunner(
            "1080ti",  # change the device key to your key
            "0.0.0.0",
            9190,
            number=20,
            repeat=3,
            timeout=4,
            min_repeat_ms=150,
        ),
    ),
}
Example #14
0
print('network:', network)
log_file = "%s.log" % network
dtype='float32'

tuning_option = {
    'log_filename': log_file,

    'tuner': 'xgb',
    'n_trial': NTRIAL,
    'early_stopping': 600,

    'measure_option': autotvm.measure_option(
        builder=autotvm.LocalBuilder(timeout=10),
        #runner=autotvm.LocalRunner(number=20, repeat=3, timeout=4, min_repeat_ms=150),
        runner=autotvm.RPCRunner(
            '1080ti',
            '0.0.0.0', 9123,
            number=5, timeout=10000)
    ),
}
def conv_5x5(net, out_channels, prefix, activation=True, kernel=(5,5)):
    # 5x5 Convolution
    weight = relay.var(prefix + "_weight")
    net = relay.nn.conv2d(
        net, weight,
        strides=(1,1), padding=(0, 0),
        channels=out_channels,
        kernel_size=kernel)

    # BatchNorm
    base_name = prefix + "_bn"
    gamma = relay.var(base_name + "_gamma")
Example #15
0
stop_pack = "nn.global_avg_pool2d"

# Tuning option
log_file = "%s.%s.log" % (device, network)
tuning_option = {
    "log_filename": log_file,
    "tuner": "random",
    "n_trial": 1000,
    "early_stopping": None,
    "measure_option": autotvm.measure_option(
        builder=autotvm.LocalBuilder(),
        runner=autotvm.RPCRunner(
            env.TARGET,
            host=tracker_host,
            port=tracker_port,
            number=5,
            timeout=60,
            module_loader=vta.module_loader(),
            # check_correctness=True, # TODO: re-enable when check_correctness works again.
        ),
    ),
}

####################################################################
#
# .. note:: How to set tuning options
#
#   In general, the default values provided here work well.
#   If you have enough time budget, you can set :code:`n_trial`, :code:`early_stopping`
#   to larger values, makes the tuning run for longer.
#   If your device is under-powered or your conv2d operators are large, consider
Example #16
0
    "log_filename":
    log_file,
    "tuner":
    "xgb",
    "n_trial":
    1500,
    "early_stopping":
    800,
    "measure_option":
    autotvm.measure_option(
        builder=autotvm.LocalBuilder(
            build_func="ndk" if use_android else "default"),
        runner=autotvm.RPCRunner(
            device_key,
            host="127.0.0.1",
            port=9190,
            number=5,
            timeout=10,
        ),
    ),
}

####################################################################
#
# .. note:: How to set tuning options
#
#   In general, the default values provided here work well.
#   If you have enough time budget, you can set :code:`n_trial`, :code:`early_stopping` larger,
#   which makes the tuning run longer.
#   If your device runs very slow or your conv2d operators have many GFLOPs, considering to
#   set timeout larger.
Example #17
0
        task = autotvm.task.create(
            dense,
            args=(N, CI, CO),
            target=tvm.target.vta(),
            target_host=env.target_host,
            template_key="direct",
        )
        print(task.config_space)

        # Tune
        measure_option = autotvm.measure_option(
            builder=autotvm.LocalBuilder(),
            runner=autotvm.RPCRunner(
                env.TARGET,
                host=tracket_host,
                port=int(tracket_port),
                number=5,
                timeout=60,
                check_correctness=True,
            ),
        )

        # Run Tuner
        tuner = autotvm.tuner.RandomTuner(task)
        tuner.tune(
            n_trial=len(task.config_space),
            early_stopping=None,
            measure_option=measure_option,
            callbacks=[
                autotvm.callback.progress_bar(len(task.config_space), prefix=prefix),
                autotvm.callback.log_to_file(tmp_log_file),
            ],
Example #18
0
def drive_tune(args):
    """Invoke auto-tuning with command line arguments

    Parameters
    ----------
    args: argparse.Namespace
        Arguments from command line parser.
    """

    # extra arguments validation before importing the model, so that obvious errors
    # are pointed in advance.
    if args.rpc_tracker:
        parsed_url = urlparse("//%s" % args.rpc_tracker)
        rpc_hostname = parsed_url.hostname
        rpc_port = parsed_url.port or 9090
        logger.info("RPC tracker hostname: %s", rpc_hostname)
        logger.info("RPC tracker port: %s", rpc_port)

        if not args.rpc_key:
            raise common.TVMCException(
                "need to provide an RPC tracker key (--rpc-key) for remote tuning"
            )

    target = common.target_from_cli(args.target)
    mod, params = frontends.load_model(args.FILE, args.model_format)

    # min_repeat_ms should be:
    # a. the value provided by the user, if any, or
    # b. 0ms in case target is "cpu"; otherwise 1000ms
    if args.min_repeat_ms is not None:
        min_repeat_ms = args.min_repeat_ms
    else:
        min_repeat_ms = 0 if target.keys[0] == "cpu" else 1000
        logger.debug("Default --min-repeat-ms for this target is %s",
                     min_repeat_ms)

    tasks = get_tuning_tasks(
        mod=mod,
        params=params,
        target=target,
        target_host=args.target_host,
        alter_layout=args.desired_layout,
    )

    if args.rpc_tracker:

        runner = autotvm.RPCRunner(
            key=args.rpc_key,
            host=rpc_hostname,
            port=rpc_port,
            number=args.number,
            repeat=args.repeat,
            n_parallel=args.parallel,
            timeout=args.timeout,
            min_repeat_ms=min_repeat_ms,
        )
    else:
        logger.info("starting localhost tuning")
        runner = autotvm.LocalRunner(
            number=args.number,
            repeat=args.repeat,
            timeout=args.timeout,
            min_repeat_ms=min_repeat_ms,
        )

    tuning_option = {
        "tuner":
        args.tuner,
        "trials":
        args.trials,
        "early_stopping":
        args.early_stopping,
        "measure_option":
        autotvm.measure_option(
            builder=autotvm.LocalBuilder(build_func="default"), runner=runner),
        "tuning_records":
        args.tuning_records,
    }
    logger.debug(" tuning options: %s", tuning_option)

    tune_tasks(tasks, args.output, **tuning_option)
# Set up tuning options

tuning_option = {
    'log_filename':
    log_file,
    'tuner':
    tuner,
    'early_stopping':
    None,
    'n_trial':
    trials,
    'measure_option':
    autotvm.measure_option(builder=autotvm.LocalBuilder(build_func='default'),
                           runner=autotvm.RPCRunner(
                               device_key,
                               host='fleet.cs.washington.edu',
                               port=9190,
                               number=1,
                               timeout=20)),
}


def tune_kernels(tasks,
                 measure_option,
                 tuner=tuner,
                 n_trial=trials,
                 early_stopping=None,
                 log_filename=log_file):

    for i, tsk in enumerate(tasks):
        prefix = "[Task %2d/%2d] " % (i + 1, len(tasks))
Example #20
0
                                 params=params,
                                 ops=(tvm.relay.op.nn.conv2d,),
                                 target=target,
                                 target_host=env.target_host)

    # Perform Autotuning
    print("Tuning...")
    tuning_opt = {
        'log_filename': opt.log_filename,
        'tuner': opt.tuner,
        'n_trial': 1e9,
        'early_stopping': None,
        'measure_option': autotvm.measure_option(
                builder=autotvm.LocalBuilder(build_func=vta.vta_autotvm_build_func),
                runner=autotvm.RPCRunner(env.TARGET, tracker_host, tracker_port,
                    number=4, min_repeat_ms=150, repeat=opt.measurements, timeout=60,
                    check_correctness=True))
    }
    tune_tasks(tasks, **tuning_opt)

    # Compile kernels with history best records
    with autotvm.tophub.context(target, extra_files=[opt.log_filename]): 

        # Compile network
        print("Compiling network with best tuning parameters...")
        with relay.build_config(opt_level=3, disabled_pass={"AlterOpLayout"}):
            if target.device_name != "vta":
                graph, lib, params = relay.build(
                    relay_prog, target=target,
                    params=params, target_host=env.target_host)
            else:
Example #21
0
#    ----------------------------------
#    1080ti       4      4     0
#    titanx       2      2     0
#    gfx900       1      1     0
#    ----------------------------------
#
# Finally, we need to change the tuning option to use RPCRunner. Use the code below
# to replace the corresponding part above.

tuning_option = {
    'log_filename':
    log_file,
    'tuner':
    'xgb',
    'n_trial':
    2000,
    'early_stopping':
    600,
    'measure_option':
    autotvm.measure_option(
        builder=autotvm.LocalBuilder(timeout=10),
        runner=autotvm.RPCRunner(
            '1080ti',  # change the device key to your key
            'localhost',
            9190,
            number=20,
            repeat=3,
            timeout=4),
    ),
}
Example #22
0
#### TUNING OPTION ####
network = "resnet-18"
log_file = "%s.%s.log" % (device_key, network)
dtype = "float32"

tuning_option = {
    "log_filename": log_file,
    "tuner": "xgb",
    "n_trial": 1000,
    "early_stopping": 450,
    "measure_option": autotvm.measure_option(
        builder=autotvm.LocalBuilder(build_func="ndk" if use_android else "default"),
        runner=autotvm.RPCRunner(
            device_key,
            host="0.0.0.0",
            port=9190,
            number=10,
            timeout=5,
        ),
    ),
}

####################################################################
#
# .. note:: How to set tuning options
#
#   In general, the default values provided here work well.
#   If you have enough time budget, you can set :code:`n_trial`, :code:`early_stopping` larger,
#   which makes the tuning run longer.
#   If your device runs very slow or your conv2d operators have many GFLOPs, considering to
#   set timeout larger.
#    1080ti       4      4     0
#    titanx       2      2     0
#    gfx900       1      1     0
#    ----------------------------------
#
# Finally, we need to change the tuning option to use RPCRunner. Use the code below
# to replace the corresponding part above.

tuning_option = {
    'log_filename':
    log_file,
    'tuner':
    'xgb',
    'n_trial':
    2000,
    'early_stopping':
    600,
    'measure_option':
    autotvm.measure_option(
        builder=autotvm.LocalBuilder(timeout=10),
        runner=autotvm.RPCRunner(
            '1080ti',  # change the device key to your key
            '0.0.0.0',
            9190,
            number=20,
            repeat=3,
            timeout=4,
            min_repeat_ms=150),
    ),
}
Example #24
0
        opt.log_filename,
        "tuner":
        opt.tuner,
        "n_trial":
        1e9,
        "early_stopping":
        None,
        "measure_option":
        autotvm.measure_option(
            builder=autotvm.LocalBuilder(
                build_func=vta.vta_autotvm_build_func),
            runner=autotvm.RPCRunner(
                env.TARGET,
                tracker_host,
                tracker_port,
                number=4,
                min_repeat_ms=150,
                repeat=opt.measurements,
                timeout=60,
                # check_correctness=True, # TODO: re-enable when check_correctness works again.
            ),
        ),
    }
    tune_tasks(tasks, **tuning_opt)

    # Compile kernels with history best records
    with autotvm.tophub.context(target, extra_files=[opt.log_filename]):

        # Compile network
        print("Compiling network with best tuning parameters...")
        if target.device_name != "vta":
            with tvm.transform.PassContext(opt_level=3,
Example #25
0
    'log_filename':
    log_file,
    'tuner':
    'xgb',
    'n_trial':
    1000,
    'early_stopping':
    450,
    'measure_option':
    autotvm.measure_option(
        builder=autotvm.LocalBuilder(
            build_func='ndk' if use_android else 'default'),
        runner=autotvm.RPCRunner(
            device_key,
            host='localhost',
            port=9190,
            number=10,
            timeout=5,
        ),
    ),
}

####################################################################
#
# .. note:: How to set tuning options
#
#   In general, the default values provided here work well.
#   If you have enough time budget, you can set :code:`n_trial`, :code:`early_stopping` larger,
#   which makes the tuning run longer.
#   If your device runs very slow or your conv2d operators have many GFLOPs, considering to
#   set timeout larger.
Example #26
0
    'log_filename':
    log_file,
    'tuner':
    'xgb',
    'n_trial':
    10,
    'early_stopping':
    450,
    'measure_option':
    autotvm.measure_option(
        builder=autotvm.LocalBuilder(
            build_func='ndk' if use_android else 'default'),
        runner=autotvm.RPCRunner(
            device_key,
            host=tracker_host,
            port=tracker_port,
            number=1,
            timeout=5,
        ),
    ),
}

####################################################################
#
# .. note:: How to set tuning options
#
#   In general, the default values provided here work well.
#   If you have enough time budget, you can set :code:`n_trial`, :code:`early_stopping` larger,
#   which makes the tuning run longer.
#   If your device runs very slow or your conv2d operators have many GFLOPs, considering to
#   set timeout larger.
Example #27
0
log_file = "%s.%s.log" % (device, network)
tuning_option = {
    'log_filename':
    log_file,
    'tuner':
    'random',
    'n_trial':
    1000,
    'early_stopping':
    None,
    'measure_option':
    autotvm.measure_option(
        builder=autotvm.LocalBuilder(),
        runner=autotvm.RPCRunner(env.TARGET,
                                 host=tracker_host,
                                 port=tracker_port,
                                 number=5,
                                 timeout=60,
                                 check_correctness=True),
    ),
}

####################################################################
#
# .. note:: How to set tuning options
#
#   In general, the default values provided here work well.
#   If you have enough time budget, you can set :code:`n_trial`, :code:`early_stopping`
#   to larger values, makes the tuning run for longer.
#   If your device is under-powered or your conv2d operators are large, consider
#   setting a longer timeout.
#
Example #28
0
    'log_filename':
    log_file,
    'tuner':
    'xgb',
    'n_trial':
    1000,
    'early_stopping':
    800,
    'measure_option':
    autotvm.measure_option(
        builder=autotvm.LocalBuilder(
            build_func='ndk' if use_android else 'default'),
        runner=autotvm.RPCRunner(
            device_key,
            host='fleet',
            port=9190,
            number=5,
            timeout=10,
        ),
    ),
}

####################################################################
#
# .. note:: How to set tuning options
#
#   In general, the default values provided here work well.
#   If you have enough time budget, you can set :code:`n_trial`, :code:`early_stopping` larger,
#   which makes the tuning run longer.
#   If your device runs very slow or your conv2d operators have many GFLOPs, considering to
#   set timeout larger.
Example #29
0
network = 'new-mobilenetv2-128_S'
log_file = "%s,%s.log" % (device_key, network)
dtype = 'float32'


tuning_option = {
    'log_filename': log_file,
    'tuner': 'xgb',
    'n_trial': 1500,
    'early_stopping': 800,

    'measure_option': autotvm.measure_option(
        builder=autotvm.LocalBuilder('default'),
        runner=autotvm.RPCRunner(
            device_key, host='0.0.0.0', port=9090,
            number=5,
            timeout=10,
        )
    ),
}


# You can skip the implementation of this function for this tutorial.
def tune_tasks(tasks,
               measure_option,
               tuner='xgb',
               n_trial=1000,
               early_stopping=None,
               log_filename='tuning.log',
               use_transfer_learning=False,
               try_winograd=True,
Example #30
0
        builder=autotvm.LocalBuilder(timeout=10),
        runner=autotvm.LocalRunner(number=20, repeat=3, timeout=4, min_repeat_ms=150)),
}
tuning_rpc_option = {
    'log_filename': log_file,

    'tuner': 'xgb',
    'n_trial': 1000,
    'early_stopping': 1000,

    'measure_option': autotvm.measure_option(
        builder=autotvm.LocalBuilder(timeout=1000),
        runner=autotvm.RPCRunner(
            'nano', host='0.0.0.0', port=9190,
            number=2,
            repeat = 3,
            timeout=2000,
            #min_repeat_ms=150
        ),),
}
def prune_old_tasks(tasks, log_file):
    tmp_log_file = log_file + ".tmp"
    if os.path.isfile(tmp_log_file):
    	autotvm.record.pick_best(tmp_log_file, log_file)
    if os.path.isfile(log_file):
        new_tasks = []
        history = autotvm.record.ApplyHistoryBest(log_file)
        for task in tasks:
            if history._query_inside(task.target, task.workload) is None:
                new_tasks.append(task)
        return new_tasks