Пример #1
0
def nnvm_run(opt_level: int = 2,
             nthreads: int = None,
             iname=MODEL_INPUT,
             oname=MODEL_OUTPUT,
             init_method='std',
             nwarmup: int = 10,
             nloops: int = 100) -> Result:
    """ Compile Model using TVM and run it multiple times """
    r = Result()
    r.desc = 'tvm running time'
    try:
        graph, lib, params, i, o = tvm_import(opt_level, iname, oname)

        if nthreads is None:
            nthread = 20

        get_global_func('runtime.config_threadpool')(1, nthreads)
        m = graph_runtime.create(graph, lib, ctx=tvm.cpu(0))
        print('compiled')

        perfs: List[float] = []
        for it in range(nwarmup + nloops):
            i_data = common_init(init_method,
                                 shape=i.shape.as_list(),
                                 dtype=i.dtype.as_numpy_dtype())
            m.set_input(iname, tvm.nd.array(i_data))
            m.set_input(**params)

            b = perf_counter()
            m.run()
            e = perf_counter()
            o_data = m.get_output(
                0, tvm.nd.empty(o.shape.as_list(), o.dtype.name)).asnumpy()
            print('tvm', e - b)

            if it >= nwarmup:
                perfs.append(e - b)

        r.set_perfs(perfs)
        r.last_data = o_data
    except KeyboardInterrupt:
        raise
    except Exception as e:
        r.err = e

    return r
Пример #2
0
def create(graph_json_str, libmod, ctx, dump_root=None):
    """Create a runtime executor module given a graph and module.

    Parameters
    ----------
    graph_json_str : str or graph class
        The graph to be deployed in json format output by nnvm graph.
        The graph can only contain one operator(tvm_op) that
        points to the name of PackedFunc in the libmod.

    libmod : tvm.Module
        The module of the corresponding function.

    ctx : TVMContext
        The context to deploy the module, can be local or remote.

    dump_root : str
        To select which folder the outputs should be kept.
        None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
    Returns
    -------
    graph_module : GraphModuleDebug
        Debug Runtime graph module that can be used to execute the graph.
    """
    if not isinstance(graph_json_str, string_types):
        try:
            graph_json_str = graph_json_str._tvm_graph_json()
        except AttributeError:
            raise ValueError("Type %s is not supported" % type(graph_json_str))
    try:
        fcreate = get_global_func("tvm.graph_runtime_debug.create")
    except ValueError:
        raise ValueError(
            "Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
            "config.cmake and rebuild TVM to enable debug mode"
        )

    ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx(libmod, ctx)
    if num_rpc_ctx == len(ctx):
        libmod = rpc_base._ModuleHandle(libmod)
        try:
            fcreate = ctx[0]._rpc_sess.get_function(
                "tvm.graph_runtime_debug.remote_create"
            )
        except ValueError:
            raise ValueError(
                "Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in "
                "config.cmake and rebuild TVM to enable debug mode"
            )
    func_obj = fcreate(graph_json_str, libmod, *device_type_id)
    return GraphModuleDebug(func_obj, ctx, graph_json_str, dump_root)
Пример #3
0
def create(graph_json_str, libmod, ctx, dump_root=None):
    """Create a runtime executor module given a graph and module.

    Parameters
    ----------
    graph_json_str : str or graph class
        The graph to be deployed in json format output by nnvm graph.
        The graph can only contain one operator(tvm_op) that
        points to the name of PackedFunc in the libmod.

    libmod : tvm.Module
        The module of the corresponding function.

    ctx : TVMContext
        The context to deploy the module, can be local or remote.

    dump_root : str
        To select which folder the outputs should be kept.
        None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
    Returns
    -------
    graph_module : GraphModuleDebug
        Debug Runtime graph module that can be used to execute the graph.
    """
    if not isinstance(graph_json_str, string_types):
        try:
            graph_json_str = graph_json_str._tvm_graph_json()
        except AttributeError:
            raise ValueError("Type %s is not supported" % type(graph_json_str))
    try:
        fcreate = get_global_func("tvm.graph_runtime_debug.create")
    except ValueError:
        raise ValueError("Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in " \
                         "config.cmake and rebuild TVM to enable debug mode")

    ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx(libmod, ctx)
    if num_rpc_ctx == len(ctx):
        libmod = rpc_base._ModuleHandle(libmod)
        try:
            fcreate = ctx[0]._rpc_sess.get_function("tvm.graph_runtime_debug.remote_create")
        except ValueError:
            raise ValueError("Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in " \
                             "config.cmake and rebuild TVM to enable debug mode")
    func_obj = fcreate(graph_json_str, libmod, *device_type_id)
    return GraphModuleDebug(func_obj, ctx, graph_json_str, dump_root)
Пример #4
0
def create(graph_json_str, libmod, ctx, dbg_ux=FRONTEND_NONE, dump_root=None):
    """Create a runtime executor module given a graph and module.

    Parameters
    ----------
    graph_json_str : str or graph class
        The graph to be deployed in json format output by nnvm graph.
        The graph can only contain one operator(tvm_op) that
        points to the name of PackedFunc in the libmod.

    libmod : tvm.Module
        The module of the corresponding function.

    ctx : TVMContext
        The context to deploy the module, can be local or remote.

    dbg_ux : str
        To select which ux user needs, Example, curses/tensorboard/None.
        None will just do the dumping

    dump_root : str
        To select which folder the outputs should be kept.
        None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping
    Returns
    -------
    graph_module : GraphModuleDebug
        Debug Runtime graph module that can be used to execute the graph.
    """
    if not isinstance(graph_json_str, string_types):
        try:
            graph_json_str = graph_json_str._tvm_graph_json()
        except AttributeError:
            raise ValueError("Type %s is not supported" % type(graph_json_str))
    device_type = ctx.device_type
    device_id = ctx.device_id
    try:
        fcreate = get_global_func("tvm.graph_runtime_debug.create")
    except ValueError:
        raise ValueError("Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in " \
                         "config.cmake and rebuild TVM to enable debug mode")
    func_obj = fcreate(graph_json_str, libmod, device_type, device_id)
    return GraphModuleDebug(func_obj, ctx, graph_json_str, dbg_ux, dump_root)
Пример #5
0
def create(graph_json_str, libmod, ctx, frontend=FRONTEND_CURSES):
    """Create a runtime executor module given a graph and module.

    Parameters
    ----------
    graph_json_str : str or graph class
        The graph to be deployed in json format output by nnvm graph.
        The graph can only contain one operator(tvm_op) that
        points to the name of PackedFunc in the libmod.

    libmod : tvm.Module
        The module of the corresponding function.

    ctx : TVMContext
        The context to deploy the module, can be local or remote.

    frontend : str
        To select which ui user needs, by default its curses ui.

    Returns
    -------
    graph_module : GraphModuleDebug
        Debug Runtime graph module that can be used to execute the graph.
    """
    if not isinstance(graph_json_str, string_types):
        try:
            graph_json_str = graph_json_str._tvm_graph_json()
        except AttributeError:
            raise ValueError("Type %s is not supported" % type(graph_json_str))
    device_type = ctx.device_type
    device_id = ctx.device_id
    try:
        fcreate = get_global_func("tvm.graph_runtime_debug.create")
    except ValueError:
        raise ValueError("Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in " \
                         "config.cmake and rebuild TVM to enable debug mode")
    func_obj = fcreate(graph_json_str, libmod, device_type, device_id)
    return GraphModuleDebug(func_obj, ctx, graph_json_str, frontend)
Пример #6
0
def test2():
    f = get_global_func("test2")
    print(f)
    x = relay.Var('x')
    y = relay.exp(x)
    f(y)
Пример #7
0
def nnvmS_run(opt_level: int = 2,
              nthreads: int = None,
              iname: str = MODEL_INPUT,
              oname: str = MODEL_OUTPUT,
              init_method='std',
              nwarmup: int = 0,
              nloops: int = 1,
              **kwargs) -> Result:
    """ Run staged NNVM model """
    r = Result()
    print("Warning: unused args:", kwargs) if kwargs != {} else None
    try:
        g, gd = fropen()
        _, params = from_tensorflow(
            gd)  # We still need from_tensorflow to get the parameters
        mo, savepoints = staged_model()
        nnvm_graph = nnvm.graph.create(savepoints[oname])
        print('synthesized')

        i = g.get_tensor_by_name(iname + ':0')
        o = g.get_tensor_by_name(oname + ':0')
        i_shape_dict = {iname + ':0': i.shape.as_list()}
        i_dtype_dict = {iname + ':0': i.dtype.as_numpy_dtype()}

        with nnvm.compiler.build_config(opt_level=opt_level):
            graph, lib, params = nnvm.compiler.build(graph=nnvm_graph,
                                                     target='llvm',
                                                     shape=i_shape_dict,
                                                     dtype=i_dtype_dict,
                                                     params=params)
            # print(graph.ir())

        if nthreads is None:
            nthreads = 20

        get_global_func('runtime.config_threadpool')(1, nthreads)
        m = graph_runtime.create(graph, lib, ctx=tvm.cpu(0))
        print('compiled')

        perfs: List[float] = []
        for it in range(nwarmup + nloops):
            i_data = common_init(init_method,
                                 shape=i.shape.as_list(),
                                 dtype=i.dtype.as_numpy_dtype())
            m.set_input(iname, tvm.nd.array(i_data))
            m.set_input(**params)

            b = perf_counter()
            m.run()
            e = perf_counter()
            o_data = m.get_output(
                0, tvm.nd.empty(o.shape.as_list(), o.dtype.name)).asnumpy()
            print('tvms', e - b)

            if it >= nwarmup:
                perfs.append(e - b)

        r.set_perfs(perfs)
        r.last_data = o_data
    except KeyboardInterrupt:
        raise
    except Exception as e:
        warn('exception: ' + str(e))
        r.err = e
    return r