def create(graph_json_str, libmod, ctx, dump_root=None): """Create a runtime executor module given a graph and module. Parameters ---------- graph_json_str : str or graph class The graph to be deployed in json format output by nnvm graph. The graph can only contain one operator(tvm_op) that points to the name of PackedFunc in the libmod. libmod : tvm.Module The module of the corresponding function. ctx : TVMContext The context to deploy the module, can be local or remote. dump_root : str To select which folder the outputs should be kept. None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping Returns ------- graph_module : GraphModuleDebug Debug Runtime graph module that can be used to execute the graph. """ if not isinstance(graph_json_str, string_types): try: graph_json_str = graph_json_str._tvm_graph_json() except AttributeError: raise ValueError("Type %s is not supported" % type(graph_json_str)) try: fcreate = get_global_func("tvm.graph_runtime_debug.create") except ValueError: raise ValueError( "Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in " "config.cmake and rebuild TVM to enable debug mode" ) ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx(libmod, ctx) if num_rpc_ctx == len(ctx): libmod = rpc_base._ModuleHandle(libmod) try: fcreate = ctx[0]._rpc_sess.get_function( "tvm.graph_runtime_debug.remote_create" ) except ValueError: raise ValueError( "Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in " "config.cmake and rebuild TVM to enable debug mode" ) func_obj = fcreate(graph_json_str, libmod, *device_type_id) return GraphModuleDebug(func_obj, ctx, graph_json_str, dump_root)
def create(graph_json_str, libmod, ctx, dump_root=None): """Create a runtime executor module given a graph and module. Parameters ---------- graph_json_str : str or graph class The graph to be deployed in json format output by nnvm graph. The graph can only contain one operator(tvm_op) that points to the name of PackedFunc in the libmod. libmod : tvm.Module The module of the corresponding function. ctx : TVMContext The context to deploy the module, can be local or remote. dump_root : str To select which folder the outputs should be kept. None will make a temp folder in /tmp/tvmdbg<rand_string> and does the dumping Returns ------- graph_module : GraphModuleDebug Debug Runtime graph module that can be used to execute the graph. """ if not isinstance(graph_json_str, string_types): try: graph_json_str = graph_json_str._tvm_graph_json() except AttributeError: raise ValueError("Type %s is not supported" % type(graph_json_str)) try: fcreate = get_global_func("tvm.graph_runtime_debug.create") except ValueError: raise ValueError("Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in " \ "config.cmake and rebuild TVM to enable debug mode") ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx(libmod, ctx) if num_rpc_ctx == len(ctx): libmod = rpc_base._ModuleHandle(libmod) try: fcreate = ctx[0]._rpc_sess.get_function("tvm.graph_runtime_debug.remote_create") except ValueError: raise ValueError("Please set '(USE_GRAPH_RUNTIME_DEBUG ON)' in " \ "config.cmake and rebuild TVM to enable debug mode") func_obj = fcreate(graph_json_str, libmod, *device_type_id) return GraphModuleDebug(func_obj, ctx, graph_json_str, dump_root)
def create(graph_json_str, libmod, ctx): """Create a runtime executor module given a graph and module. Parameters ---------- graph_json_str : str The graph to be deployed in json format output by json graph. The graph can contain operator(tvm_op) that points to the name of PackedFunc in the libmod. libmod : tvm.runtime.Module The module of the corresponding function ctx : TVMContext The context to deploy the module, only supports CUDA GPU Returns ------- graph_module : GraphModuleCudaGraph CUDA graph runtime module that can be used to execute the graph. Note ---- See also :py:class:`tvm.contrib.cuda_graph.cuda_graph_runtime.GraphModuleCudaGraph` for examples to directly construct a GraphModuleCudaGraph from an exported relay compiled library. """ assert isinstance(graph_json_str, string_types) try: ctx, num_rpc_ctx, device_type_id = graph_runtime.get_device_ctx( libmod, ctx) if num_rpc_ctx == len(ctx): fcreate = ctx[0]._rpc_sess.get_function( "tvm.graph_runtime_cuda_graph.create") else: fcreate = tvm._ffi.get_global_func( "tvm.graph_runtime_cuda_graph.create") except ValueError: raise ValueError( "To enable CUDA graph support (experimental), please set " "'(USE_GRAPH_RUNTIME_CUGRAPH ON)' in config.cmake and rebuild TVM") return GraphModuleCudaGraph( fcreate(graph_json_str, libmod, *device_type_id))