コード例 #1
0
    def test_simple(self):
        create_mock_graph()
        create_mock_tfrecord()

        serialized_example_tensor, image_tensor = detection_inference.build_input(
            [get_mock_tfrecord_path()])
        self.assertAllEqual(image_tensor.get_shape().as_list(),
                            [1, None, None, 3])

        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, get_mock_graph_path())

        with self.test_session(use_gpu=False) as sess:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.local_variables_initializer())
            tf.train.start_queue_runners()

            tf_example = detection_inference.infer_detections_and_add_to_example(
                serialized_example_tensor, detected_boxes_tensor,
                detected_scores_tensor, detected_labels_tensor, False)

        self.assertProtoEquals(
            r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "image/encoded"
            value { bytes_list { value:
              "\211PNG\r\n\032\n\000\000\000\rIHDR\000\000\000\001\000\000"
              "\000\001\010\002\000\000\000\220wS\336\000\000\000\022IDATx"
              "\234b\250f`\000\000\000\000\377\377\003\000\001u\000|gO\242"
              "\213\000\000\000\000IEND\256B`\202" } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }
    """, tf_example)
コード例 #2
0
def getProposal(image, sourceID):
    tf.logging.set_verbosity(tf.logging.INFO)
    im = image
    width, height = im.size
    detection_score = []
    source_id = sourceID
    label_list = []
    with tf.gfile.GFile(source_id, 'rb') as fid:
        encoded_png = fid.read()
    encoded_png_io = io.BytesIO(encoded_png)

    feature_dict = {
        'image/source_id':
        dataset_util.bytes_feature(source_id.encode('utf8')),
        'image/encoded': dataset_util.bytes_feature(encoded_png),
        'image/object/class/label':
        dataset_util.int64_list_feature(label_list),
        'image/format': dataset_util.bytes_feature('png'.encode('utf8'))
    }
    proposalRecord = tf.train.Example(features=tf.train.Features(
        feature=feature_dict))

    with tf.Session() as sess:
        inference_graph = INFERENCE_GRAPH
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor, detected_labels_tensor
         ) = detection_inference.build_inference_graph(inference_graph)
        sess.run(tf.local_variables_initializer())
        tf.train.start_queue_runners()
        try:
            tf_example = detection_inference.infer_detections_and_add_to_example(
                serialized_example_tensor, detected_boxes_tensor,
                detected_scores_tensor, detected_labels_tensor, False)
            source_id = str(tf_example.features.feature['image/source_id'].
                            bytes_list.value[0].decode())
            data = {
                "source_id":
                source_id,
                "format":
                str(tf_example.features.feature['image/format'].bytes_list.
                    value[0].decode()),
                "detection_score":
                str(tf_example.features.feature[
                    standard_fields.TfExampleFields.detection_score].
                    float_list.value),
                "label":
                str(tf_example.features.feature[
                    standard_fields.TfExampleFields.detection_class_label].
                    int64_list.value)
            }
        except tf.errors.OutOfRangeError:
            tf.logging.info('Finished processing records')
    return data
コード例 #3
0
  def test_simple(self):
    create_mock_graph()
    create_mock_tfrecord()

    serialized_example_tensor, image_tensor = detection_inference.build_input(
        [get_mock_tfrecord_path()])
    self.assertAllEqual(image_tensor.get_shape().as_list(), [1, None, None, 3])

    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = detection_inference.build_inference_graph(
         image_tensor, get_mock_graph_path())

    with self.test_session(use_gpu=False) as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.train.start_queue_runners()

      tf_example = detection_inference.infer_detections_and_add_to_example(
          serialized_example_tensor, detected_boxes_tensor,
          detected_scores_tensor, detected_labels_tensor, False)

    self.assertProtoEquals(r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "image/encoded"
            value { bytes_list { value:
              "\211PNG\r\n\032\n\000\000\000\rIHDR\000\000\000\001\000\000"
              "\000\001\010\002\000\000\000\220wS\336\000\000\000\022IDATx"
              "\234b\250f`\000\000\000\000\377\377\003\000\001u\000|gO\242"
              "\213\000\000\000\000IEND\256B`\202" } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }
    """, tf_example)
コード例 #4
0
def main(_):
  tf.logging.set_verbosity(tf.logging.INFO)

  sat=Satellite()

  sat.check_flags(required_flags = ['input_tfrecord_paths', 'output_tfrecord_path','inference_graph'])



  with tf.Session() as sess:
    input_tfrecord_paths = [
        v for v in FLAGS.input_tfrecord_paths.split(',') if v]

    number,image_tensor = detection_inference.build_input(
        input_tfrecord_paths)

    tf.logging.info('Reading Queue \t',number)

    tf.logging.info('Reading graph and building model...')

    with tf.gfile.Open(FLAGS.inference_graph, 'rb') as graph_def_file:
        graph_content = graph_def_file.read()

    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = build_inference_graph(
         image_tensor, graph_content)

    tf.logging.info('Running inference and writing output to {}'.format(
        FLAGS.output_tfrecord_path))
    sess.run(tf.local_variables_initializer())
    tf.train.start_queue_runners()

    (serialized_example, detected_boxes, detected_scores,
     detected_classes) = sess.run([
         serialized_example_tensor, detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor
     ])
    detected_boxes = detected_boxes.T

    with tf.python_io.TFRecordWriter(
        FLAGS.output_tfrecord_path) as tf_record_writer:
      try:
        for counter in itertools.count():
          tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 10,
                                 counter)
          tf_example = detection_inference.infer_detections_and_add_to_example(
              serialized_example_tensor, detected_boxes_tensor,
              detected_scores_tensor, detected_labels_tensor,
              FLAGS.discard_image_pixels)
          tf_record_writer.write(tf_example.SerializeToString())
      except tf.errors.OutOfRangeError:
        tf.logging.info('Finished processing records')
コード例 #5
0
    def test_simple(self):
        create_mock_graph()
        encoded_image = create_mock_tfrecord()

        serialized_example_tensor, image_tensor = detection_inference.build_input(
            [get_mock_tfrecord_path()])
        self.assertAllEqual(image_tensor.get_shape().as_list(),
                            [1, None, None, 3])

        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, get_mock_graph_path())

        with self.test_session(use_gpu=False) as sess:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.local_variables_initializer())
            tf.train.start_queue_runners()

            tf_example = detection_inference.infer_detections_and_add_to_example(
                serialized_example_tensor, detected_boxes_tensor,
                detected_scores_tensor, detected_labels_tensor, False)
        expected_example = tf.train.Example()
        text_format.Merge(
            r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }""",
            expected_example)
        expected_example.features.feature[
            standard_fields.TfExampleFields.image_encoded].CopyFrom(
                dataset_util.bytes_feature(encoded_image))
        self.assertProtoEquals(expected_example, tf_example)
コード例 #6
0
ファイル: infer_detections.py プロジェクト: LeeWilli/models
def main(_):
    tf.logging.set_verbosity(tf.logging.INFO)

    required_flags = [
        'input_tfrecord_paths', 'output_tfrecord_path', 'inference_graph'
    ]
    for flag_name in required_flags:
        if not getattr(FLAGS, flag_name):
            raise ValueError('Flag --{} is required'.format(flag_name))

    with tf.Session() as sess:
        input_tfrecord_paths = [
            v for v in FLAGS.input_tfrecord_paths.split(',') if v
        ]
        tf.logging.info('Reading input from %d files',
                        len(input_tfrecord_paths))
        serialized_example_tensor, image_tensor = detection_inference.build_input(
            input_tfrecord_paths)
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, FLAGS.inference_graph)

        tf.logging.info('Running inference and writing output to {}'.format(
            FLAGS.output_tfrecord_path))
        sess.run(tf.local_variables_initializer())
        tf.train.start_queue_runners()

        print("entering into loop ")
        starttime = datetime.datetime.now()
        with tf.python_io.TFRecordWriter(
                FLAGS.output_tfrecord_path) as tf_record_writer:
            try:
                for counter in itertools.count():
                    tf.logging.log_every_n(tf.logging.INFO,
                                           'Processed %d images...', 10,
                                           counter)
                    t0 = datetime.datetime.now()
                    tf_example = detection_inference.infer_detections_and_add_to_example(
                        serialized_example_tensor, detected_boxes_tensor,
                        detected_scores_tensor, detected_labels_tensor,
                        FLAGS.discard_image_pixels)
                    t1 = datetime.datetime.now()
                    tf.logging.info('processed an image in %d ms',
                                    (t1 - t0).microseconds / 1000)
                    tf_record_writer.write(tf_example.SerializeToString())
            except tf.errors.OutOfRangeError:
                tf.logging.info('Finished processing records')
            endtime = datetime.datetime.now()
            print("running time is ")
            print((endtime - starttime).seconds)
コード例 #7
0
def main(_):
    tf.enable_eager_execution()
    tf.logging.set_verbosity(tf.logging.INFO)

    required_flags = ['input_tfrecord_paths', 'output_tfrecord_path',
                      'inference_graph']
    for flag_name in required_flags:
        if not getattr(FLAGS, flag_name):
            raise ValueError('Flag --{} is required'.format(flag_name))

    if FLAGS.gpu_device:
        os.environ["CUDA_VISIBLE_DEVICES"] = str(FLAGS.gpu_device)

    output_folder = "/"
    output_folder = output_folder.join(FLAGS.output_tfrecord_path.split("/")[:-1])

    if not os.path.exists(output_folder):
        os.mkdir(output_folder)

    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True

    with tf.Session(config=config) as sess:
        input_tfrecord_paths = [
            v for v in FLAGS.input_tfrecord_paths.split(',') if v]
        tf.logging.info('Reading input from %d files', len(input_tfrecord_paths))
        serialized_example_tensor, image_tensor = detection_inference.build_input(
            input_tfrecord_paths, num_additional_channels=FLAGS.num_additional_channels)
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
            image_tensor, FLAGS.inference_graph)

        tf.logging.info('Running inference and writing output to {}'.format(
            FLAGS.output_tfrecord_path))
        sess.run(tf.local_variables_initializer())
        tf.train.start_queue_runners()
        with tf.python_io.TFRecordWriter(
                FLAGS.output_tfrecord_path) as tf_record_writer:
            try:
                for counter in itertools.count():
                    tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 10,
                                           counter)
                    tf_example = detection_inference.infer_detections_and_add_to_example(
                        serialized_example_tensor, detected_boxes_tensor,
                        detected_scores_tensor, detected_labels_tensor,
                        FLAGS.discard_image_pixels)

                    tf_record_writer.write(tf_example.SerializeToString())
            except tf.errors.OutOfRangeError:
                tf.logging.info('Finished processing records')
コード例 #8
0
def main(_):
    tf.logging.set_verbosity(tf.logging.INFO)

    required_flags = [
        'input_tfrecord_pattern', 'output_images_dir', 'inference_graph',
        'label_map'
    ]
    for flag_name in required_flags:
        if not getattr(FLAGS, flag_name):
            raise ValueError('Flag --{} is required'.format(flag_name))

    # load the categories
    category_index = label_map_util.create_category_index_from_labelmap(
        FLAGS.label_map, use_display_name=True)

    # create the outputdir if it doesn't exist already
    if not os.path.exists(FLAGS.output_images_dir):
        os.mkdir(FLAGS.output_images_dir)

    with tf.Session() as sess:
        input_tfrecord_paths = glob.glob(FLAGS.input_tfrecord_pattern)
        tf.logging.info('Reading input from %d files',
                        len(input_tfrecord_paths))
        serialized_example_tensor, image_tensor = detection_inference.build_input(
            input_tfrecord_paths)
        tf.logging.info('Reading graph and building model...')
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, FLAGS.inference_graph)

        tf.logging.info('Running inference and writing output to {}'.format(
            FLAGS.output_images_dir))
        sess.run(tf.local_variables_initializer())
        tf.train.start_queue_runners()
        try:
            for counter in itertools.count():
                tf.logging.log_every_n(tf.logging.INFO,
                                       'Processed %d images...', 10, counter)
                tf_example = detection_inference.infer_detections_and_add_to_example(
                    serialized_example_tensor, detected_boxes_tensor,
                    detected_scores_tensor, detected_labels_tensor, False)
                image_np = get_image_array_from_example(tf_example)
                draw_bounding_boxes_from_example(image_np, tf_example,
                                                 category_index)
                im = Image.fromarray(image_np)
                pid = tf_example.features.feature[
                    standard_fields.TfExampleFields.
                    source_id].bytes_list.value[0].decode()
                im.save(os.path.join(FLAGS.output_images_dir, pid + '.jpg'))
        except tf.errors.OutOfRangeError:
            tf.logging.info('Finished processing records')
コード例 #9
0
    def test_discard_image(self):
        create_mock_graph()
        create_mock_tfrecord()

        serialized_example_tensor, image_tensor = detection_inference.build_input(
            [get_mock_tfrecord_path()])
        (detected_boxes_tensor, detected_scores_tensor,
         detected_labels_tensor) = detection_inference.build_inference_graph(
             image_tensor, get_mock_graph_path())

        with self.test_session(use_gpu=False) as sess:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.local_variables_initializer())
            tf.train.start_queue_runners()

            tf_example = detection_inference.infer_detections_and_add_to_example(
                serialized_example_tensor, detected_boxes_tensor,
                detected_scores_tensor, detected_labels_tensor, True)

        self.assertProtoEquals(
            r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }
    """, tf_example)
コード例 #10
0
  def test_discard_image(self):
    create_mock_graph()
    create_mock_tfrecord()

    serialized_example_tensor, image_tensor = detection_inference.build_input(
        [get_mock_tfrecord_path()])
    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = detection_inference.build_inference_graph(
         image_tensor, get_mock_graph_path())

    with self.test_session(use_gpu=False) as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.train.start_queue_runners()

      tf_example = detection_inference.infer_detections_and_add_to_example(
          serialized_example_tensor, detected_boxes_tensor,
          detected_scores_tensor, detected_labels_tensor, True)

    self.assertProtoEquals(r"""
        features {
          feature {
            key: "image/detection/bbox/ymin"
            value { float_list { value: [0.0, 0.1] } } }
          feature {
            key: "image/detection/bbox/xmin"
            value { float_list { value: [0.8, 0.2] } } }
          feature {
            key: "image/detection/bbox/ymax"
            value { float_list { value: [0.7, 0.8] } } }
          feature {
            key: "image/detection/bbox/xmax"
            value { float_list { value: [1.0, 0.9] } } }
          feature {
            key: "image/detection/label"
            value { int64_list { value: [123, 246] } } }
          feature {
            key: "image/detection/score"
            value { float_list { value: [0.1, 0.2] } } }
          feature {
            key: "test_field"
            value { float_list { value: [1.0, 2.0, 3.0, 4.0] } } } }
    """, tf_example)
コード例 #11
0
ファイル: infer_detections.py プロジェクト: yangyongjx/lpcvc
def main(_):
  tf.logging.set_verbosity(tf.logging.INFO)

  required_flags = ['input_tfrecord_paths', 'output_tfrecord_path',
                    'inference_graph']
  for flag_name in required_flags:
    if not getattr(FLAGS, flag_name):
      raise ValueError('Flag --{} is required'.format(flag_name))

  with tf.Session() as sess:
    input_tfrecord_paths = []
    for item in os.listdir(FLAGS.input_tfrecord_paths):
        if 'testdev' in item:
            input_tfrecord_paths.append('{}/{}'.format(FLAGS.input_tfrecord_paths,item))

    print(input_tfrecord_paths)
    # for v in FLAGS.input_tfrecord_paths.split(',') if v
    tf.logging.info('Reading input from %d files', len(input_tfrecord_paths))
    serialized_example_tensor, image_tensor = detection_inference.build_input(
        input_tfrecord_paths)
    tf.logging.info('Reading graph and building model...')
    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = detection_inference.build_inference_graph(
         image_tensor, FLAGS.inference_graph)

    tf.logging.info('Running inference and writing output to {}'.format(
        FLAGS.output_tfrecord_path))
    sess.run(tf.local_variables_initializer())
    tf.train.start_queue_runners()
    with tf.python_io.TFRecordWriter(
        FLAGS.output_tfrecord_path) as tf_record_writer:
      try:
        for counter in itertools.count():
          tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 10,
                                 counter)
          tf_example = detection_inference.infer_detections_and_add_to_example(
              serialized_example_tensor, detected_boxes_tensor,
              detected_scores_tensor, detected_labels_tensor,
              FLAGS.discard_image_pixels)
          tf_record_writer.write(tf_example.SerializeToString())
      except tf.errors.OutOfRangeError:
        tf.logging.info('Finished processing records')
コード例 #12
0
ファイル: infer_detections.py プロジェクト: ALISCIFP/models
def main(_):
  tf.logging.set_verbosity(tf.logging.INFO)

  required_flags = ['input_tfrecord_paths', 'output_tfrecord_path',
                    'inference_graph']
  for flag_name in required_flags:
    if not getattr(FLAGS, flag_name):
      raise ValueError('Flag --{} is required'.format(flag_name))

  with tf.Session() as sess:
    input_tfrecord_paths = [
        v for v in FLAGS.input_tfrecord_paths.split(',') if v]
    tf.logging.info('Reading input from %d files', len(input_tfrecord_paths))
    serialized_example_tensor, image_tensor = detection_inference.build_input(
        input_tfrecord_paths)
    tf.logging.info('Reading graph and building model...')
    (detected_boxes_tensor, detected_scores_tensor,
     detected_labels_tensor) = detection_inference.build_inference_graph(
         image_tensor, FLAGS.inference_graph)

    tf.logging.info('Running inference and writing output to {}'.format(
        FLAGS.output_tfrecord_path))
    sess.run(tf.local_variables_initializer())
    tf.train.start_queue_runners()
    with tf.python_io.TFRecordWriter(
        FLAGS.output_tfrecord_path) as tf_record_writer:
      try:
        for counter in itertools.count():
          tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 10,
                                 counter)
          tf_example = detection_inference.infer_detections_and_add_to_example(
              serialized_example_tensor, detected_boxes_tensor,
              detected_scores_tensor, detected_labels_tensor,
              FLAGS.discard_image_pixels)
          tf_record_writer.write(tf_example.SerializeToString())
      except tf.errors.OutOfRangeError:
        tf.logging.info('Finished processing records')