def compare_tf_tvm(gdef, input_, output_, runtime="vm", output_tensors=None): """compare tf and tvm execution for the same input. Parameters ---------- gdef: TF2 graph def extracted to be fed into from_tensorflow parser. (https://www.tensorflow.org/code/tensorflow/core/framework/graph.proto) input_: a single numpy array object output_: the expected output from TF to match TVM output with runtime: choose TVM runtime; either "vm" for VirtualMachine or "graph" for GraphExecutor output_tensors : List of output tensor names (Optional) if not specified then the last node is assumed as graph output. """ mod, params = from_tensorflow(gdef, outputs=output_tensors) if runtime == "vm": exec_ = compile_vm(mod, params) tvm_out = run_vm(exec_, input_) elif runtime == "graph": lib = compile_graph_executor(mod, params) tvm_out = run_graph_executor(lib, input_) else: raise RuntimeError("Runtime input not supported: %s" % runtime) tvm.testing.assert_allclose(output_, tvm_out, atol=1e-5)
def check_equal(graph, tf_out): mod, params = from_tensorflow(graph.as_graph_def(add_shapes=True)) ex = relay.create_executor('debug', mod=mod) relay_out = ex.evaluate()(**params) if isinstance(relay_out, relay.backend.interpreter.TensorValue): np.testing.assert_allclose(tf_out, relay_out.asnumpy()) else: if not isinstance(tf_out, list): tf_out = [tf_out] for x, y in zip(tf_out, [r.asnumpy() for r in relay_out]): np.testing.assert_allclose(x, y)
def check_equal(graph, tf_out): expr, params = from_tensorflow(graph.as_graph_def(add_shapes=True)) ex = relay.create_executor('debug') relay_out = ex.evaluate(expr)(**params) if isinstance(relay_out, relay.backend.interpreter.TensorValue): np.testing.assert_allclose(tf_out, relay_out.asnumpy()) else: if not isinstance(tf_out, list): tf_out = [tf_out] for x, y in zip(tf_out, [r.asnumpy() for r in relay_out]): np.testing.assert_allclose(x, y)
def check_equal(graph, tf_out, input_map=None): mod, params = from_tensorflow(graph.as_graph_def(add_shapes=True)) if input_map is not None: params.update(input_map) relay_out = relay.create_executor("vm", mod=mod).evaluate()(**params) if isinstance(relay_out, nd.NDArray): np.testing.assert_allclose(tf_out, relay_out.numpy()) else: if not isinstance(tf_out, (list, tuple)): tf_out = [tf_out] for x, y in zip(tf_out, [r.numpy() for r in relay_out]): np.testing.assert_allclose(x, y)
def run_relay(graph, shape_dict=None, *vars): mod, params = from_tensorflow(graph.as_graph_def(add_shapes=True), shape=shape_dict) ex = relay.create_executor("debug", mod=mod) return ex.evaluate()(*vars)
def run_relay(graph): mod, params = from_tensorflow(graph.as_graph_def(add_shapes=True)) ex = relay.create_executor('debug', mod=mod) return ex.evaluate()(**params)