def run_inference_on_image(image): """Runs inference on an image. Parameters ---------- image: String Image file name. Returns ------- Nothing """ if not tf.gfile.Exists(image): tf.logging.fatal('File does not exist %s', image) image_data = tf.gfile.FastGFile(image, 'rb').read() # Creates graph from saved GraphDef. create_graph() with tf.Session() as sess: softmax_tensor = sess.graph.get_tensor_by_name('softmax:0') start = time.time() predictions = sess.run(softmax_tensor, {'DecodeJpeg/contents:0': image_data}) print("==================", time.time()-start) print ("===== TENSORFLOW RESULTS =======") predictions = np.squeeze(predictions) # Creates node ID --> English string lookup. node_lookup = tf_testing.NodeLookup(label_lookup_path=map_proto_path, uid_lookup_path=label_path) # Print top 5 predictions from tensorflow. top_k = predictions.argsort()[-5:][::-1] print ("===== TENSORFLOW RESULTS =======") for node_id in top_k: human_string = node_lookup.id_to_string(node_id) score = predictions[node_id] print('%s (score = %.5f)' % (human_string, score))
def run_tvm(pics, number, repeat): """ Runs a single inference and gives back the time :param pics: The images(s) to run :param number: The number of times to run the inference :param repeat: The number of times to repeat the measurement :return: An array with the time and the result """ # combine pictures arr = np.ndarray(shape=input_shape, dtype=dtype) p = 0 for ip in pics: arr[p] = ip.astype(dtype) p = p + 1 m.set_input("data", tvm.nd.array(arr)) #Actually run inference time = m.module.time_evaluator("run", ctx, number=number, repeat=repeat)() #Get output res = [] if platform == 'MXNet': for i in range(len(pics)): res.append(synset[np.argmax(m.get_output(0).asnumpy()[i])]) if platform == 'PyTorch': # Get top-1 result for TVM for i in range(len(pics)): top1_tvm = np.argmax(m.get_output(0).asnumpy()[i]) tvm_class_key = class_id_to_key[top1_tvm] res.append(key_to_classname[tvm_class_key]) if platform == 'TensorFlow': pre = np.squeeze(m.get_output(0, tvm.nd.empty(((1, 1008)), "float32")).asnumpy()) node_lookup = tf_testing.NodeLookup(label_lookup_path=map_proto_path, uid_lookup_path=label_path) top_k = pre.argsort()[-5:][::-1] res = node_lookup.id_to_string(top_k[0]) return [time, res]
m.set_input('DecodeJpeg/contents', tvm.nd.array(x.astype(dtype))) m.set_input(**params) # execute m.run() # get outputs tvm_output = m.get_output(0, tvm.nd.empty(((1, 1008)), 'float32')) ###################################################################### # Process the output # ------------------ # Process the model output to human readable text for InceptionV1. predictions = tvm_output.asnumpy() predictions = np.squeeze(predictions) # Creates node ID --> English string lookup. node_lookup = tf_testing.NodeLookup(label_lookup_path=map_proto_path, uid_lookup_path=label_path) # Print top 5 predictions from TVM output. top_k = predictions.argsort()[-5:][::-1] for node_id in top_k: human_string = node_lookup.id_to_string(node_id) score = predictions[node_id] print('%s (score = %.5f)' % (human_string, score)) ###################################################################### # Inference on tensorflow # ----------------------- # Run the corresponding model on tensorflow def create_graph():
m.set_input(**params) # execute m.run() # get outputs tvm_output = m.get_output(0, tvm.nd.empty(((1, 1008)), 'float32')) ###################################################################### # Process the output # ------------------ # Process the model output to human readable text for InceptionV1. predictions = tvm_output.asnumpy() predictions = np.squeeze(predictions) # Creates node ID --> English string lookup. node_lookup = tf_testing.NodeLookup( label_lookup_path=os.path.join("./", map_proto), uid_lookup_path=os.path.join("./", lable_map)) # Print top 5 predictions from TVM output. top_k = predictions.argsort()[-5:][::-1] for node_id in top_k: human_string = node_lookup.id_to_string(node_id) score = predictions[node_id] print('%s (score = %.5f)' % (human_string, score)) ###################################################################### # Inference on tensorflow # ----------------------- # Run the corresponding model on tensorflow