Exemplo n.º 1
0
           )

def parse_example(example):
    return get_resized_image(example), get_ground_truth_boxes(example)

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Visualizes the contents of TFRecord files in TensorBoard')
    parser.add_argument('wildcard', help='The wildcard to find TFRecord files to read')
    parser.add_argument('--out', type=str, default='logs', help='The output directory for summaries')
    parser.add_argument('--batch', type=int, default=100, help='The batch size of each image batch to display')
    parser.add_argument('--max-imgs', type=int, default=3, help='The numbers of images to display in each batch')
    args = parser.parse_args()

    AUTO = tf.data.experimental.AUTOTUNE
    filenames = tf.data.Dataset.list_files(args.wildcard)
    example_protos = tf.data.TFRecordDataset(filenames, num_parallel_reads=AUTO)
    examples = example_protos.map(parse_example_proto, num_parallel_calls=AUTO)

    data = examples.map(parse_example, num_parallel_calls=AUTO)
    data = data.padded_batch(args.batch, padded_shapes=((None, None, None), (None, None)))
    writer = tf.summary.create_file_writer(args.out)
    i = 0
    for image_batch, gt_batch in data:
        print(f'[+] Processing batch {i}')
        image_batch = draw_bounding_boxes(image_batch, convert_bounding_boxes_to_tf_format(gt_batch))
        with writer.as_default():
            draw_image_batch(f'Batch {i}', image_batch, max_images=args.max_imgs)
        i += 1

    writer.flush()
Exemplo n.º 2
0
                        default=0,
                        help='ID of the image to visualize')
    parser.add_argument('--out',
                        type=str,
                        default='logs',
                        help='The output directory for summaries')

    args = parser.parse_args()
    # load the kaggle annotations
    kaggle_annotations = load_kaggle_annotations(args.labels, args.boxes)
    scut_annotations = load_scut_annotations(args.scut_annotations_folder)

    annotations = aggregate_annotations(
        [kaggle_annotations, scut_annotations],
        [args.kaggle_basepath, args.scut_basepath])

    # process the first image
    image_annotation = annotations[args.id]

    image = tf.image.decode_jpeg(
        tf.io.read_file(image_annotation["image_filename"]))
    image = tf.image.convert_image_dtype(image, tf.float32)
    image = tf.image.resize(image, (480, 640))
    image_batch = tf.expand_dims(image, axis=0)

    writer = tf.summary.create_file_writer(args.out)
    with writer.as_default():
        draw_image_batch('Image', image_batch)

    writer.flush()
Exemplo n.º 3
0
    model.load_weights(args.model_path)

    labels, deltas = model.predict(image_batch)
    bboxes = get_bounding_boxes_from_labels(labels, config)
    bboxes = apply_deltas_to_bounding_boxes(bboxes,
                                            tf.reshape(deltas,
                                                       [-1, 4]), config)
    bboxes = convert_bounding_boxes_to_tf_format(bboxes)

    bboxes = tf.squeeze(
        bboxes,
        [0])  # apparently after all the efforts done working with bbox batches
    # tensorflow betrayed us and decided that nms should work on
    # non batched boxes. lame
    selected_indices = tf.image.non_max_suppression(bboxes,
                                                    tf.reshape(labels, [-1]),
                                                    tf.shape(bboxes)[0],
                                                    iou_threshold=0.3)
    selected_boxes = tf.gather(bboxes, selected_indices)

    selected_boxes = tf.expand_dims(
        selected_boxes, axis=0)  # now tensorflow wants the batches again
    # so lame
    output_image = draw_bounding_boxes(image_batch, selected_boxes)

    writer = tf.summary.create_file_writer(args.out)
    with writer.as_default():
        draw_image_batch('Predicted boxes', output_image)

    writer.flush()