def predict(image, sess, softmax_tensor): """ Function used by classification workers to get prediction on image. This method was adapted based on run_inference_on_image() method from classify_image.py found in TensorFlow official tutorial. :param image: filename of the image to be classified :param sess: TensorFlow session :param softmax_tensor: tensor used for computing the predictions :return: (img_id, result) with img_id being Instagram Image ID and the result being dictionary with 5 most probable objects depicted in the image as keys and corresponding prediction confidences as values. """ img_id = os.path.splitext(os.path.basename(image))[0] image_data = tf.gfile.FastGFile(image, 'rb').read() # 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG # encoding of the image. predictions = sess.run(softmax_tensor, {'DecodeJpeg/contents:0': image_data}) predictions = np.squeeze(predictions) # Creates node ID --> English string lookup. node_lookup = NodeLookup() top_k = predictions.argsort()[-FLAGS.num_top_predictions:][::-1] result = {} for node_id in top_k: human_string = node_lookup.id_to_string(node_id) score = predictions[node_id] result[human_string] = float(score) return img_id, result
def run_inference_on_image(self, imageList, num_top_predictions=5): """Runs inference on an image. Args: image: Image file name. Returns: Nothing """ for image in imageList: if not tf.gfile.Exists(image): tf.logging.fatal('File does not exist %s', image) image_data = tf.gfile.FastGFile(image, 'rb').read() # Creates graph from saved GraphDef. self.__create_graph() with tf.Session() as sess: # Some useful tensors: # 'softmax:0': A tensor containing the normalized prediction across # 1000 labels. # 'pool_3:0': A tensor containing the next-to-last layer containing 2048 # float description of the image. # 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG # encoding of the image. # Runs the softmax tensor by feeding the image_data as input to the graph. softmax_tensor = sess.graph.get_tensor_by_name('softmax:0') predictions = sess.run(softmax_tensor, {'DecodeJpeg/contents:0': image_data}) predictions = np.squeeze(predictions) # Creates node ID --> English string lookup. node_lookup = NodeLookup() top_k = predictions.argsort()[-num_top_predictions:][::-1] for node_id in top_k: human_string = node_lookup.id_to_string(node_id) score = predictions[node_id] print('%s (score = %.5f)' % (human_string, score))
return def on_partitions_assigned(self, assigned): print(assigned) consumer.seek(TopicPartition("Panda_Media", 0), 0) return maybe_download_and_extract() consumer.subscribe(["Panda_Media"], listener=CRL()) # Creates graph from saved GraphDef. create_graph() # Creates node ID --> English string lookup. node_lookup = NodeLookup() # read messages for msg in consumer: # deserialize from json twete = jsonpickle.decode(msg.value) # for each media object in tweet for media in twete.entities['media']: # base64 jpg string to bytes image_data = base64.b64decode(media['data']) # make sure image is jpeg if is_jpg(image_data) == False: print("Invalid panda {0}".format(msg.offset)) continue
if request.method == 'POST': file = request.files['file'] old_file_name = file.filename if file and allowed_files(old_file_name): filename = rename_filename(old_file_name) file_path = os.path.join(UPLOAD_FOLDER, filename) file.save(file_path) type_name = 'N/A' print('file saved to %s' % file_path) start_time = time.time() out_html = inference(file_path) duration = time.time() - start_time print('duration:[%.0fms]' % (duration*1000)) return result + out_html return result ##http://127.0.0.1:5001/ if __name__ == "__main__": print('listening on port %d' % FLAGS.port) init_graph(model_name=FLAGS.model_name) label_file = FLAGS.label_file if not FLAGS.label_file: label_file, _ = os.path.splitext(FLAGS.model_name) label_file = label_file + '.label' node_lookup = NodeLookup(label_file) app.node_lookup = node_lookup sess = tf.Session() app.sess = sess app.run(host='0.0.0.0', port=FLAGS.port, debug=FLAGS.debug, threaded=True)