def main(_):
    tf.logging.set_verbosity(tf.logging.INFO)

    required_flags = [
        'input_tfrecord_paths', 'output_tfrecord_path', 'inference_graph'
    ]
    for flag_name in required_flags:
        if not getattr(FLAGS, flag_name):
            raise ValueError('Flag --{} is required'.format(flag_name))

    with tf.Session() as sess:
        # input_tfrecord_paths = [
        #     v for v in FLAGS.input_tfrecord_paths.split(',') if v]
        # tf.logging.info('Reading input from %d files', len(input_tfrecord_paths))
        image = tf.read_file('D:\\aaa.png')
        image = tf.image.decode_image(image, 3)
        image.set_shape([None, None, 3])
        image_tensor = tf.expand_dims(image, 0)

        # serialized_example_tensor, image_tensor = detection_inference.build_input(
        #     input_tfrecord_paths)
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, FLAGS.inference_graph)

        tf.logging.info('Running inference and writing output to {}'.format(
            FLAGS.output_tfrecord_path))
        sess.run(tf.local_variables_initializer())
        locations = sess.run(detected_boxes_tensor)
        print(locations)
        print(sess.run(detected_scores_tensor))
        print(sess.run(detected_labels_tensor))
        # tf.train.start_queue_runners()
        # with tf.python_io.TFRecordWriter(
        #     FLAGS.output_tfrecord_path) as tf_record_writer:
        #   try:
        #     for counter in itertools.count():
        #       tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 10,
        #                              counter)
        #       tf_example = detection_inference.infer_detections_and_add_to_example(
        #           serialized_example_tensor, detected_boxes_tensor,
        #           detected_scores_tensor, detected_labels_tensor,
        #           FLAGS.discard_image_pixels)
        #       tf_record_writer.write(tf_example.SerializeToString())
        #   except tf.errors.OutOfRangeError:
        #     tf.logging.info('Finished processing records')
        image = cv2.imread('D:\\aaa.png')

        image = cv2.resize(image, (1680, 1050))
        for location in locations:
            left_top = (int(image.shape[1] * location[1]),
                        int(image.shape[0] * location[0]))
            rigt_bottom = (int(image.shape[1] * location[3]),
                           int(image.shape[0] * location[2]))
            print(left_top, rigt_bottom)
            cv2.rectangle(image, left_top, rigt_bottom, (55, 255, 155), 2)
        cv2.imshow('aaa', image)
        cv2.waitKey(0)
    cv2.destroyAllWindows()
def main(_):
    # Enable Verbose Logging
    tf.logging.set_verbosity(tf.logging.INFO)

    # Check if all required flags are present
    required_flags = ['image', 'output_path', 'inference_graph']
    for flag_name in required_flags:
        if not getattr(FLAGS, flag_name):
            raise ValueError('Flag --{} is required'.format(flag_name))

    # Load category map
    '''
    A category index, which is a dictionary that maps integer ids to dicts
    containing categories, e.g.
    {1: {'id': 1, 'name': 'dog'}, 2: {'id': 2, 'name': 'cat'}, ...}
    '''

    category_index_from_labelmap = label_map_util.create_category_index_from_labelmap(
        FLAGS.path_protofile)

    with tf.Session() as sess:
        input_path = FLAGS.image
        tf.logging.info('Reading input from ', input_path)

        # Obtain image tensor
        image_tensor = load_image(input_path)

        # Run graph
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor,\
        detected_labels_tensor) = detection_inference.build_inference_graph(image_tensor, FLAGS.inference_graph)

        # Get detections
        (detected_boxes, detected_scores,\
        detected_labels)=sess.run([detected_boxes_tensor, detected_scores_tensor,\
        detected_labels_tensor])

        # Detected boxes of form: [ymins,xmins,ymax,xmax]

        input_image = sess.run(image_tensor)
        print(input_image)
        input_image = np.squeeze(input_image)

        # Draw bounding boxes
        print(detected_boxes, detected_scores)
        ii = np.where(detected_scores > FLAGS.confidence)
        for i in range(len(detected_scores[ii])):
            ymin = detected_boxes[i][0]
            xmin = detected_boxes[i][1]
            ymax = detected_boxes[i][2]
            xmax = detected_boxes[i][3]

            category = category_index_from_labelmap[detected_labels[i]]['name']

            vis_utils.draw_bounding_box_on_image_array(input_image,xmin=xmin,ymin=ymin,\
            xmax=xmax, ymax=ymax,display_str_list=(category) ,color='MediumPurple')

        vis_utils.save_image_array_as_png(input_image, FLAGS.output_path)
Exemplo n.º 3
0
def main(_):
    tf.logging.set_verbosity(tf.logging.INFO)

    required_flags = [
        'input_tfrecord_paths', 'output_tfrecord_path', 'inference_graph',
        'num_inter_threads', 'num_intra_threads'
    ]
    for flag_name in required_flags:
        if not getattr(FLAGS, flag_name):
            raise ValueError('Flag --{} is required'.format(flag_name))

    with tf.Session(config=tf.ConfigProto(
            inter_op_parallelism_threads=FLAGS.num_inter_threads,
            intra_op_parallelism_threads=FLAGS.num_intra_threads)) as sess:
        input_tfrecord_paths = [
            v for v in FLAGS.input_tfrecord_paths.split(',') if v
        ]
        tf.logging.info('Reading input from %d files',
                        len(input_tfrecord_paths))
        serialized_example_tensor, image_tensor = detection_inference.build_input(
            input_tfrecord_paths)
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, FLAGS.inference_graph)

        tf.logging.info('Running inference and writing output to {}'.format(
            FLAGS.output_tfrecord_path))
        sess.run(tf.local_variables_initializer())
        tf.train.start_queue_runners()

        latency = []
        with tf.python_io.TFRecordWriter(
                FLAGS.output_tfrecord_path) as tf_record_writer:
            try:
                for counter in itertools.count():
                    tf.logging.log_every_n(
                        tf.logging.INFO,
                        'Processed %d images... moving average latency %d ms',
                        200, counter + 1, np.mean(latency[-200:]))
                    start = time.time()
                    tf_example = detection_inference.\
                        infer_detections_and_add_to_example(
                            serialized_example_tensor, detected_boxes_tensor,
                            detected_scores_tensor, detected_labels_tensor,
                            FLAGS.discard_image_pixels)
                    duration = time.time() - start
                    latency.append(duration * 1000)
                    tf_record_writer.write(tf_example.SerializeToString())
            except tf.errors.OutOfRangeError:
                tf.logging.info('Finished processing records')
        latency = np.array(latency)
        print("Latency: min = {:.1f}, max = {:.1f}, mean= {:.1f}, median "
              "= {:.1f}".format(latency.min(), latency.max(), latency.mean(),
                                np.median(latency)))
    def test_simple(self):
        create_mock_graph()
        create_mock_tfrecord()

        serialized_example_tensor, image_tensor = detection_inference.build_input(
            [get_mock_tfrecord_path()])
        self.assertAllEqual(image_tensor.get_shape().as_list(),
                            [1, None, None, 3])

        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, get_mock_graph_path())

        with self.test_session(use_gpu=False) as sess:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.local_variables_initializer())
            tf.train.start_queue_runners()

            tf_example = detection_inference.infer_detections_and_add_to_example(
                serialized_example_tensor, detected_boxes_tensor,
                detected_scores_tensor, detected_labels_tensor, False)

        self.assertProtoEquals(
            r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "image/encoded"
            value { bytes_list { value:
              "\211PNG\r\n\032\n\000\000\000\rIHDR\000\000\000\001\000\000"
              "\000\001\010\002\000\000\000\220wS\336\000\000\000\022IDATx"
              "\234b\250f`\000\000\000\000\377\377\003\000\001u\000|gO\242"
              "\213\000\000\000\000IEND\256B`\202" } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }
    """, tf_example)
Exemplo n.º 5
0
def getProposal(image, sourceID):
    tf.logging.set_verbosity(tf.logging.INFO)
    im = image
    width, height = im.size
    detection_score = []
    source_id = sourceID
    label_list = []
    with tf.gfile.GFile(source_id, 'rb') as fid:
        encoded_png = fid.read()
    encoded_png_io = io.BytesIO(encoded_png)

    feature_dict = {
        'image/source_id':
        dataset_util.bytes_feature(source_id.encode('utf8')),
        'image/encoded': dataset_util.bytes_feature(encoded_png),
        'image/object/class/label':
        dataset_util.int64_list_feature(label_list),
        'image/format': dataset_util.bytes_feature('png'.encode('utf8'))
    }
    proposalRecord = tf.train.Example(features=tf.train.Features(
        feature=feature_dict))

    with tf.Session() as sess:
        inference_graph = INFERENCE_GRAPH
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor, detected_labels_tensor
         ) = detection_inference.build_inference_graph(inference_graph)
        sess.run(tf.local_variables_initializer())
        tf.train.start_queue_runners()
        try:
            tf_example = detection_inference.infer_detections_and_add_to_example(
                serialized_example_tensor, detected_boxes_tensor,
                detected_scores_tensor, detected_labels_tensor, False)
            source_id = str(tf_example.features.feature['image/source_id'].
                            bytes_list.value[0].decode())
            data = {
                "source_id":
                source_id,
                "format":
                str(tf_example.features.feature['image/format'].bytes_list.
                    value[0].decode()),
                "detection_score":
                str(tf_example.features.feature[
                    standard_fields.TfExampleFields.detection_score].
                    float_list.value),
                "label":
                str(tf_example.features.feature[
                    standard_fields.TfExampleFields.detection_class_label].
                    int64_list.value)
            }
        except tf.errors.OutOfRangeError:
            tf.logging.info('Finished processing records')
    return data
Exemplo n.º 6
0
  def test_simple(self):
    create_mock_graph()
    create_mock_tfrecord()

    serialized_example_tensor, image_tensor = detection_inference.build_input(
        [get_mock_tfrecord_path()])
    self.assertAllEqual(image_tensor.get_shape().as_list(), [1, None, None, 3])

    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = detection_inference.build_inference_graph(
         image_tensor, get_mock_graph_path())

    with self.test_session(use_gpu=False) as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.train.start_queue_runners()

      tf_example = detection_inference.infer_detections_and_add_to_example(
          serialized_example_tensor, detected_boxes_tensor,
          detected_scores_tensor, detected_labels_tensor, False)

    self.assertProtoEquals(r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "image/encoded"
            value { bytes_list { value:
              "\211PNG\r\n\032\n\000\000\000\rIHDR\000\000\000\001\000\000"
              "\000\001\010\002\000\000\000\220wS\336\000\000\000\022IDATx"
              "\234b\250f`\000\000\000\000\377\377\003\000\001u\000|gO\242"
              "\213\000\000\000\000IEND\256B`\202" } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }
    """, tf_example)
Exemplo n.º 7
0
    def test_simple(self):
        create_mock_graph()
        encoded_image = create_mock_tfrecord()

        serialized_example_tensor, image_tensor = detection_inference.build_input(
            [get_mock_tfrecord_path()])
        self.assertAllEqual(image_tensor.get_shape().as_list(),
                            [1, None, None, 3])

        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, get_mock_graph_path())

        with self.test_session(use_gpu=False) as sess:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.local_variables_initializer())
            tf.train.start_queue_runners()

            tf_example = detection_inference.infer_detections_and_add_to_example(
                serialized_example_tensor, detected_boxes_tensor,
                detected_scores_tensor, detected_labels_tensor, False)
        expected_example = tf.train.Example()
        text_format.Merge(
            r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }""",
            expected_example)
        expected_example.features.feature[
            standard_fields.TfExampleFields.image_encoded].CopyFrom(
                dataset_util.bytes_feature(encoded_image))
        self.assertProtoEquals(expected_example, tf_example)
Exemplo n.º 8
0
def main(_):
    tf.logging.set_verbosity(tf.logging.INFO)

    required_flags = [
        'input_tfrecord_paths', 'output_tfrecord_path', 'inference_graph'
    ]
    for flag_name in required_flags:
        if not getattr(FLAGS, flag_name):
            raise ValueError('Flag --{} is required'.format(flag_name))

    with tf.Session() as sess:
        input_tfrecord_paths = [
            v for v in FLAGS.input_tfrecord_paths.split(',') if v
        ]
        tf.logging.info('Reading input from %d files',
                        len(input_tfrecord_paths))
        serialized_example_tensor, image_tensor = detection_inference.build_input(
            input_tfrecord_paths)
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, FLAGS.inference_graph)

        tf.logging.info('Running inference and writing output to {}'.format(
            FLAGS.output_tfrecord_path))
        sess.run(tf.local_variables_initializer())
        tf.train.start_queue_runners()

        print("entering into loop ")
        starttime = datetime.datetime.now()
        with tf.python_io.TFRecordWriter(
                FLAGS.output_tfrecord_path) as tf_record_writer:
            try:
                for counter in itertools.count():
                    tf.logging.log_every_n(tf.logging.INFO,
                                           'Processed %d images...', 10,
                                           counter)
                    t0 = datetime.datetime.now()
                    tf_example = detection_inference.infer_detections_and_add_to_example(
                        serialized_example_tensor, detected_boxes_tensor,
                        detected_scores_tensor, detected_labels_tensor,
                        FLAGS.discard_image_pixels)
                    t1 = datetime.datetime.now()
                    tf.logging.info('processed an image in %d ms',
                                    (t1 - t0).microseconds / 1000)
                    tf_record_writer.write(tf_example.SerializeToString())
            except tf.errors.OutOfRangeError:
                tf.logging.info('Finished processing records')
            endtime = datetime.datetime.now()
            print("running time is ")
            print((endtime - starttime).seconds)
def main(_):
    tf.enable_eager_execution()
    tf.logging.set_verbosity(tf.logging.INFO)

    required_flags = ['input_tfrecord_paths', 'output_tfrecord_path',
                      'inference_graph']
    for flag_name in required_flags:
        if not getattr(FLAGS, flag_name):
            raise ValueError('Flag --{} is required'.format(flag_name))

    if FLAGS.gpu_device:
        os.environ["CUDA_VISIBLE_DEVICES"] = str(FLAGS.gpu_device)

    output_folder = "/"
    output_folder = output_folder.join(FLAGS.output_tfrecord_path.split("/")[:-1])

    if not os.path.exists(output_folder):
        os.mkdir(output_folder)

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True

    with tf.Session(config=config) as sess:
        input_tfrecord_paths = [
            v for v in FLAGS.input_tfrecord_paths.split(',') if v]
        tf.logging.info('Reading input from %d files', len(input_tfrecord_paths))
        serialized_example_tensor, image_tensor = detection_inference.build_input(
            input_tfrecord_paths, num_additional_channels=FLAGS.num_additional_channels)
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
            image_tensor, FLAGS.inference_graph)

        tf.logging.info('Running inference and writing output to {}'.format(
            FLAGS.output_tfrecord_path))
        sess.run(tf.local_variables_initializer())
        tf.train.start_queue_runners()
        with tf.python_io.TFRecordWriter(
                FLAGS.output_tfrecord_path) as tf_record_writer:
            try:
                for counter in itertools.count():
                    tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 10,
                                           counter)
                    tf_example = detection_inference.infer_detections_and_add_to_example(
                        serialized_example_tensor, detected_boxes_tensor,
                        detected_scores_tensor, detected_labels_tensor,
                        FLAGS.discard_image_pixels)

                    tf_record_writer.write(tf_example.SerializeToString())
            except tf.errors.OutOfRangeError:
                tf.logging.info('Finished processing records')
Exemplo n.º 10
0
def main(_):
    tf.logging.set_verbosity(tf.logging.INFO)

    required_flags = [
        'input_tfrecord_pattern', 'output_images_dir', 'inference_graph',
        'label_map'
    ]
    for flag_name in required_flags:
        if not getattr(FLAGS, flag_name):
            raise ValueError('Flag --{} is required'.format(flag_name))

    # load the categories
    category_index = label_map_util.create_category_index_from_labelmap(
        FLAGS.label_map, use_display_name=True)

    # create the outputdir if it doesn't exist already
    if not os.path.exists(FLAGS.output_images_dir):
        os.mkdir(FLAGS.output_images_dir)

    with tf.Session() as sess:
        input_tfrecord_paths = glob.glob(FLAGS.input_tfrecord_pattern)
        tf.logging.info('Reading input from %d files',
                        len(input_tfrecord_paths))
        serialized_example_tensor, image_tensor = detection_inference.build_input(
            input_tfrecord_paths)
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, FLAGS.inference_graph)

        tf.logging.info('Running inference and writing output to {}'.format(
            FLAGS.output_images_dir))
        sess.run(tf.local_variables_initializer())
        tf.train.start_queue_runners()
        try:
            for counter in itertools.count():
                tf.logging.log_every_n(tf.logging.INFO,
                                       'Processed %d images...', 10, counter)
                tf_example = detection_inference.infer_detections_and_add_to_example(
                    serialized_example_tensor, detected_boxes_tensor,
                    detected_scores_tensor, detected_labels_tensor, False)
                image_np = get_image_array_from_example(tf_example)
                draw_bounding_boxes_from_example(image_np, tf_example,
                                                 category_index)
                im = Image.fromarray(image_np)
                pid = tf_example.features.feature[
                    standard_fields.TfExampleFields.
                    source_id].bytes_list.value[0].decode()
                im.save(os.path.join(FLAGS.output_images_dir, pid + '.jpg'))
        except tf.errors.OutOfRangeError:
            tf.logging.info('Finished processing records')
    def test_discard_image(self):
        create_mock_graph()
        create_mock_tfrecord()

        serialized_example_tensor, image_tensor = detection_inference.build_input(
            [get_mock_tfrecord_path()])
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, get_mock_graph_path())

        with self.test_session(use_gpu=False) as sess:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.local_variables_initializer())
            tf.train.start_queue_runners()

            tf_example = detection_inference.infer_detections_and_add_to_example(
                serialized_example_tensor, detected_boxes_tensor,
                detected_scores_tensor, detected_labels_tensor, True)

        self.assertProtoEquals(
            r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }
    """, tf_example)
Exemplo n.º 12
0
  def test_discard_image(self):
    create_mock_graph()
    create_mock_tfrecord()

    serialized_example_tensor, image_tensor = detection_inference.build_input(
        [get_mock_tfrecord_path()])
    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = detection_inference.build_inference_graph(
         image_tensor, get_mock_graph_path())

    with self.test_session(use_gpu=False) as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.train.start_queue_runners()

      tf_example = detection_inference.infer_detections_and_add_to_example(
          serialized_example_tensor, detected_boxes_tensor,
          detected_scores_tensor, detected_labels_tensor, True)

    self.assertProtoEquals(r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }
    """, tf_example)
Exemplo n.º 13
0
def main(_):
  tf.logging.set_verbosity(tf.logging.INFO)

  required_flags = ['input_tfrecord_paths', 'output_tfrecord_path',
                    'inference_graph']
  for flag_name in required_flags:
    if not getattr(FLAGS, flag_name):
      raise ValueError('Flag --{} is required'.format(flag_name))

  with tf.Session() as sess:
    input_tfrecord_paths = []
    for item in os.listdir(FLAGS.input_tfrecord_paths):
        if 'testdev' in item:
            input_tfrecord_paths.append('{}/{}'.format(FLAGS.input_tfrecord_paths,item))

    print(input_tfrecord_paths)
    # for v in FLAGS.input_tfrecord_paths.split(',') if v
    tf.logging.info('Reading input from %d files', len(input_tfrecord_paths))
    serialized_example_tensor, image_tensor = detection_inference.build_input(
        input_tfrecord_paths)
    tf.logging.info('Reading graph and building model...')
    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = detection_inference.build_inference_graph(
         image_tensor, FLAGS.inference_graph)

    tf.logging.info('Running inference and writing output to {}'.format(
        FLAGS.output_tfrecord_path))
    sess.run(tf.local_variables_initializer())
    tf.train.start_queue_runners()
    with tf.python_io.TFRecordWriter(
        FLAGS.output_tfrecord_path) as tf_record_writer:
      try:
        for counter in itertools.count():
          tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 10,
                                 counter)
          tf_example = detection_inference.infer_detections_and_add_to_example(
              serialized_example_tensor, detected_boxes_tensor,
              detected_scores_tensor, detected_labels_tensor,
              FLAGS.discard_image_pixels)
          tf_record_writer.write(tf_example.SerializeToString())
      except tf.errors.OutOfRangeError:
        tf.logging.info('Finished processing records')
Exemplo n.º 14
0
def main(_):
  tf.logging.set_verbosity(tf.logging.INFO)

  required_flags = ['input_tfrecord_paths', 'output_tfrecord_path',
                    'inference_graph']
  for flag_name in required_flags:
    if not getattr(FLAGS, flag_name):
      raise ValueError('Flag --{} is required'.format(flag_name))

  with tf.Session() as sess:
    input_tfrecord_paths = [
        v for v in FLAGS.input_tfrecord_paths.split(',') if v]
    tf.logging.info('Reading input from %d files', len(input_tfrecord_paths))
    serialized_example_tensor, image_tensor = detection_inference.build_input(
        input_tfrecord_paths)
    tf.logging.info('Reading graph and building model...')
    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = detection_inference.build_inference_graph(
         image_tensor, FLAGS.inference_graph)

    tf.logging.info('Running inference and writing output to {}'.format(
        FLAGS.output_tfrecord_path))
    sess.run(tf.local_variables_initializer())
    tf.train.start_queue_runners()
    with tf.python_io.TFRecordWriter(
        FLAGS.output_tfrecord_path) as tf_record_writer:
      try:
        for counter in itertools.count():
          tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 10,
                                 counter)
          tf_example = detection_inference.infer_detections_and_add_to_example(
              serialized_example_tensor, detected_boxes_tensor,
              detected_scores_tensor, detected_labels_tensor,
              FLAGS.discard_image_pixels)
          tf_record_writer.write(tf_example.SerializeToString())
      except tf.errors.OutOfRangeError:
        tf.logging.info('Finished processing records')