def predict_image(sess, graph, input_layer, output_layer, labels, top_x, tensor, output_file): """ Obtains predictions for the image (in tensor representation) from the graph and outputs them in the output file. :param sess: the tensorflow session to use :type sess: tf.Session :param graph: the tensorflow graph to use :type graph: tf.Graph :param input_layer: the name of input layer in the graph to use :type input_layer: str :param output_layer: the name of output layer in the graph to use :type output_layer: str :param labels: the list of labels to use :type labels: list :param top_x: the number of labels with the highest probabilities to return, <1 for all :type top_x: int :param tensor: the image as tensor :type tensor: tf.Tensor :param output_file: the file to store the predictions in :type: str """ probs = tensor_to_probs(graph, input_layer, output_layer, tensor, sess) top_probs = top_k_probs(probs, top_x) with open(output_file, "w") as rf: rf.write("label,probability\n") for i in top_probs: rf.write(labels[top_probs[i]] + "," + str(probs[top_probs[i]]) + "\n")
def main(args=None): """ The main method for parsing command-line arguments and labeling. :param args: the commandline arguments, uses sys.argv if not supplied :type args: list """ parser = argparse.ArgumentParser() parser.add_argument("--image", help="image to be processed", required=True) parser.add_argument("--graph", help="graph/model to be executed", required=True) parser.add_argument("--labels", help="name of file containing labels", required=True) parser.add_argument("--input_height", type=int, help="input height", default=299) parser.add_argument("--input_width", type=int, help="input width", default=299) parser.add_argument("--input_mean", type=int, help="input mean", default=0) parser.add_argument("--input_std", type=int, help="input std", default=255) parser.add_argument("--input_layer", help="name of input layer", default="Placeholder") parser.add_argument("--output_layer", help="name of output layer", default="final_result") parser.add_argument("--top_x", type=int, help="output only the top K labels; use <1 for all", default=5) args = parser.parse_args(args=args) graph = load_graph(args.graph) labels = load_labels(args.labels) with tf.compat.v1.Session(graph=graph) as sess: tensor = read_tensor_from_image_file(args.image, input_height=args.input_height, input_width=args.input_width, input_mean=args.input_mean, input_std=args.input_std, sess=sess) results = tensor_to_probs(graph, args.input_layer, args.output_layer, tensor, sess) top_x = top_k_probs(results, args.top_x) if args.top_x > 0: print("Top " + str(args.top_x) + " labels") else: print("All labels") for i in top_x: print("- " + labels[i] + ":", results[i])
def generate_stats(sess, graph, input_layer, output_layer, labels, image_dir, image_file_list, height, width, mean, std, output_preds, output_stats, logging_verbosity): """ Evaluates the built model on images form the specified directory, which can be limited to file listed in the image file list. :param sess: the tensorflow session to use :type sess: tf.Session :param graph: the tensorflow graph to use :type graph: tf.Graph :param input_layer: the name of input layer in the graph to use :type input_layer: str :param output_layer: the name of output layer in the graph to use :type output_layer: str :param labels: the list of labels to use :type labels: list :param image_dir: the directory with the images (sub-directories correspond to labels) :type image_dir: str :param image_file_list: the image file list to use (the keys correspond to labels, and the values contain the images w/o path); uses all images if None :type image_file_list: dict :param height: the expected height of the images :type height: int :param width: the expected height of the images :type width: int :param mean: the mean to use for the images :type mean: int :param std: the std deviation to use for the images :type std: int :param output_preds: the file to store the predictions in :type output_preds: str :param output_stats: the file to store the statistics in :type output_stats: str :param logging_verbosity: the level ('DEBUG', 'INFO', 'WARN', 'ERROR', 'FATAL') :type logging_verbosity: str """ logging_verbosity = logging_level_verbosity(logging_verbosity) tf.compat.v1.logging.set_verbosity(logging_verbosity) tf.compat.v1.logging.info("Class labels: %s" % str(labels)) if not tf.io.gfile.exists(image_dir): tf.compat.v1.logging.error("Image directory '" + image_dir + "' not found.") return None sub_dirs = locate_sub_dirs(image_dir) # compile lists of files per label if image_file_list: tf.compat.v1.logging.info("Using image list: %s" % image_file_list) image_list = load_image_list(image_file_list) else: image_list = dict() for label_name in sub_dirs: image_list[label_name] = locate_images(sub_dirs[label_name], strip_path=True) total = init_counts(labels) correct = init_counts(labels) incorrect = init_counts(labels) with open(output_preds, "w") as pf: pf.write("image,actual,predicted,error,probability\n") for label_name in sub_dirs: if label_name not in image_list: continue tf.compat.v1.logging.info(label_name) sub_dir = sub_dirs[label_name] file_list = image_list[label_name] count = 0 for file_name in file_list: total[''] += 1 total[label_name] += 1 full_name = os.path.join(sub_dir, file_name) tensor = read_tensor_from_image_file(full_name, height, width, mean, std, sess) probs = tensor_to_probs(graph, input_layer, output_layer, tensor, sess) for i in top_k_probs(probs, 1): pf.write("%s,%s,%s,%s,%f\n" % (full_name, label_name, labels[i], label_name != labels[i], probs[i])) if label_name != labels[i]: incorrect[''] += 1 incorrect[label_name] += 1 else: correct[''] += 1 correct[label_name] += 1 # progress count += 1 if count % 10 == 0: tf.compat.v1.logging.info("%d / %d" % (count, len(file_list))) with open(output_stats, "w") as sf: sf.write("statistic,value\n") keys = sorted(total.keys()) for key in keys: if key == '': prefix = "total - " else: prefix = key + " - " num_total = total[key] num_correct = correct[key] num_incorrect = incorrect[key] if num_total > 0: acc = num_correct / num_total else: acc = float("NaN") sf.write("%s%s,%d\n" % (prefix, "number of images", num_total)) sf.write("%s%s,%d\n" % (prefix, "number of correct predictions", num_correct)) sf.write( "%s%s,%d\n" % (prefix, "number of incorrect predictions", num_incorrect)) sf.write("%s%s,%f\n" % (prefix, "accuracy", acc))
def poll(sess, graph, input_layer, output_layer, labels, in_dir, out_dir, height, width, mean, std, top_x, delete): """ Performs continuous predictions on files appearing in the "in_dir" and outputting the results in "out_dir". :param sess: the tensorflow session to use :type sess: tf.Session :param graph: the tensorflow graph to use :type graph: tf.Graph :param input_layer: the name of input layer in the graph to use :type input_layer: str :param output_layer: the name of output layer in the graph to use :type output_layer: str :param labels: the list of labels to use :type labels: list :param in_dir: the input directory to poll :type in_dir: str :param out_dir: the output directory for the results :type out_dir: str :param height: the expected height of the images :type height: int :param width: the expected height of the images :type width: int :param mean: the mean to use for the images :type mean: int :param std: the std deviation to use for the images :type std: int :param top_x: the number of labels with the highest probabilities to return, <1 for all :type top_x: int :param delete: whether to delete the input images (True) or move them to the output directory (False) :type delete: bool """ print("Class labels: %s" % str(labels)) while True: any = False files = [(in_dir + os.sep + x) for x in os.listdir(in_dir) if (x.lower().endswith(".png") or x.lower().endswith(".jpg"))] for f in files: any = True start = datetime.now() print(start, "-", f) img_path = out_dir + os.sep + os.path.basename(f) roi_csv = out_dir + os.sep + os.path.splitext( os.path.basename(f))[0] + ".csv" roi_tmp = out_dir + os.sep + os.path.splitext( os.path.basename(f))[0] + ".tmp" tensor = None try: tensor = read_tensor_from_image_file(f, height, width, mean, std, sess) except Exception as e: print(traceback.format_exc()) try: # delete any existing old files in output dir if os.path.exists(img_path): try: os.remove(img_path) except: print( "Failed to remove existing image in output directory: ", img_path) if os.path.exists(roi_tmp): try: os.remove(roi_tmp) except: print( "Failed to remove existing ROI file (tmp) in output directory: ", roi_tmp) if os.path.exists(roi_csv): try: os.remove(roi_csv) except: print( "Failed to remove existing ROI file in output directory: ", roi_csv) # delete or move into output dir if delete: os.remove(f) else: os.rename(f, img_path) except: img_path = None if tensor is None: continue if img_path is None: continue try: probs = tensor_to_probs(graph, input_layer, output_layer, tensor, sess) top_probs = top_k_probs(probs, top_x) with open(roi_tmp, "w") as rf: rf.write("label,probability\n") for i in top_probs: rf.write(labels[i] + "," + str(probs[top_probs[i]]) + "\n") os.rename(roi_tmp, roi_csv) except Exception as e: print(traceback.format_exc()) timediff = datetime.now() - start print(" time:", timediff) # nothing processed at all, lets wait for files to appear if not any: sleep(1)
def predict_grid(sess, graph, input_layer, output_layer, labels, top_x, tensor, height, width, grid_size, threshold, ignored_labels, output_file): """ Obtains predictions for the image (in tensor representation) from the graph and outputs them in the output file. :param sess: the tensorflow session to use :type sess: tf.Session :param graph: the tensorflow graph to use :type graph: tf.Graph :param input_layer: the name of input layer in the graph to use :type input_layer: str :param output_layer: the name of output layer in the graph to use :type output_layer: str :param labels: the list of labels to use :type labels: list :param height: the expected height of the images :type height: int :param width: the expected height of the images :type width: int :param mean: the mean to use for the images :type mean: int :param std: the std deviation to use for the images :type std: int :param top_x: the number of labels with the highest probabilities to return, <1 for all :type top_x: int :param tensor: the image as tensor :type tensor: tf.Tensor :param grid_size: the number of columns/rows to divide the original image into and passing each sub-image through the model, default is None (= whole image) :type grid_size: int :param threshold: the threshold that the grid cell predictions have to meet before ending up in the output :type threshold: float :param ignored_labels: the list of ignored labels, default is None :type ignored_labels: set :param output_file: the file to store the predictions in :type: str """ crops = tf.reshape(tensor, (-1, grid_size, tensor.shape[1] // grid_size, grid_size, tensor.shape[2] // grid_size, tensor.shape[3])) crops = tf.transpose(crops, [0, 1, 3, 2, 4, 5]) header = "y,x" for i in range(top_x): header += ",label" + str(i + 1) lines = [] lines.append(header) for y in range(grid_size): for x in range(grid_size): sub = crops[0][y][x] dims_expander = tf.expand_dims(sub, 0) resized = tf.compat.v1.image.resize_bilinear( dims_expander, [height, width]) results = tensor_to_probs(graph, input_layer, output_layer, resized.eval(), sess) top = top_k_probs(results, top_x) cells = [str(y), str(x)] for i in range(top_x): if i < len(top): cells.append( to_cell(labels[top[i]], results[top[i]], threshold, ignored_labels)) else: cells.append("") lines.append(",".join(cells)) with open(output_file, "w") as rf: for line in lines: rf.write(line) rf.write("\n")
def main(args=None): """ The main method for parsing command-line arguments and labeling. :param args: the commandline arguments, uses sys.argv if not supplied :type args: list """ parser = argparse.ArgumentParser( description= "Outputs predictions for single image using a trained model.", prog="tfic-labelimage", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument("--image", help="image to be processed", required=True) parser.add_argument("--graph", help="graph/model to be executed", required=True) parser.add_argument( "--info", help= "name of json file with model info (dimensions, layers); overrides input_height/input_width/labels/input_layer/output_layer options", default=None) parser.add_argument("--labels", help="name of file containing labels", required=False) parser.add_argument("--input_height", type=int, help="input height", default=299) parser.add_argument("--input_width", type=int, help="input width", default=299) parser.add_argument("--input_layer", help="name of input layer", default="Placeholder") parser.add_argument("--output_layer", help="name of output layer", default="final_result") parser.add_argument("--input_mean", type=int, help="input mean", default=0) parser.add_argument("--input_std", type=int, help="input std", default=255) parser.add_argument("--top_x", type=int, help="output only the top K labels; use <1 for all", default=5) args = parser.parse_args(args=args) # values from options labels = None input_height = args.input_height input_width = args.input_width input_layer = args.input_layer output_layer = args.output_layer # override from info file? if args.info is not None: input_height, input_width, input_layer, output_layer, labels = load_info_file( args.info) if (labels is None) and (args.labels is not None): labels = load_labels(args.labels) if labels is None: raise Exception( "No labels determined, either supply --info or --labels!") graph = load_graph(args.graph) with tf.compat.v1.Session(graph=graph) as sess: tensor = read_tensor_from_image_file(args.image, input_height=input_height, input_width=input_width, input_mean=args.input_mean, input_std=args.input_std, sess=sess) results = tensor_to_probs(graph, input_layer, output_layer, tensor, sess) top_x = top_k_probs(results, args.top_x) if args.top_x > 0: print("Top " + str(args.top_x) + " labels") else: print("All labels") for i in top_x: print("- " + labels[i] + ":", results[i])