Пример #1
0
def get_configs_from_pipeline_file():
    """Reads training configuration from a pipeline_pb2.TrainEvalPipelineConfig.

    Reads training config from file specified by pipeline_config_path flag.

    Returns:
        model_config: model_pb2.DetectionModel
        train_config: train_pb2.TrainConfig
        input_config: input_reader_pb2.InputReader
    """
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    coco = object_storage.get_object(namespace, 'training', 'ssd_mobilenet_v1_coco.config').data.content
    text_format.Merge(coco.decode(), pipeline_config)
    #with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
    #  text_format.Merge(f.read(), pipeline_config)

    model_config = pipeline_config.model
    train_config = pipeline_config.train_config
    input_config = pipeline_config.train_input_reader

    return model_config, train_config, input_config
Пример #2
0
def create_pipeline(pipeline_path, model_path, label_path, train_tfrecord_path,
                    eval_tfrecord_path, out_pipeline_path, epochs):
    print((pipeline_path, model_path, label_path, train_tfrecord_path,
           eval_tfrecord_path, out_pipeline_path, epochs))
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_path, "r") as f:
        proto_str = f.read()
        text_format.Merge(proto_str, pipeline_config)
    pipeline_config.train_config.fine_tune_checkpoint = model_path
    pipeline_config.train_config.num_steps = int(epochs)
    pipeline_config.train_input_reader.label_map_path = label_path
    pipeline_config.train_input_reader.tf_record_input_reader.input_path[
        0] = train_tfrecord_path

    pipeline_config.eval_input_reader[0].label_map_path = label_path
    pipeline_config.eval_input_reader[0].tf_record_input_reader.input_path[
        0] = eval_tfrecord_path

    config_text = text_format.MessageToString(pipeline_config)
    with tf.gfile.Open(out_pipeline_path, "wb") as f:
        f.write(config_text)
Пример #3
0
def init_config(args, args_config=None):
    args_config = args_config or {}
    if args.pipeline_config_proto:
        return args.pipeline_config_proto
    msg = pipeline_pb2.TrainEvalPipelineConfig()
    _apply_model_config(args, msg)
    _apply_train_config(args, msg)
    _apply_eval_config(args, msg)
    _apply_dataset_config(args, msg)
    _apply_extra_config(args, msg)
    _apply_arg_config(args, msg)
    _apply_dict(args_config, msg)
    msg_str = text_format.MessageToString(msg)
    if args.print_config:
        sys.stdout.write(msg_str)
        sys.stdout.write("\n")
        sys.stdout.flush()
        raise SystemExit(0)
    with open(args.generated, "wb") as out:
        out.write(msg_str)
    return args.generated
Пример #4
0
def main(_):
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(
            "/home/ok/Downloads/raccoon_dataset-master/training/faster_rcnn_inception_v2_pets.config",
            'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge(FLAGS.config_override, pipeline_config)
    if FLAGS.input_shape:
        input_shape = [
            int(dim) if dim != '-1' else None
            for dim in FLAGS.input_shape.split(',')
        ]
    else:
        input_shape = None
    exporter.export_inference_graph(
        FLAGS.input_type,
        pipeline_config,
        FLAGS.trained_checkpoint_prefix,
        FLAGS.output_directory,
        input_shape=input_shape,
        write_inference_graph=FLAGS.write_inference_graph)
Пример #5
0
def main(_):
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge(FLAGS.config_override, pipeline_config)
    for dim in FLAGS.input_shape.split(','):
        print(dim)
    if FLAGS.input_shape:
        input_shape = [
            int(dim) if dim != '-1' else None
            for dim in FLAGS.input_shape.split(',')
        ]
    else:
        input_shape = None
    exporter.export_inference_graph(
        FLAGS.input_type,
        pipeline_config,
        FLAGS.trained_checkpoint_prefix,
        FLAGS.output_directory,
        input_shape=input_shape,
        write_inference_graph=FLAGS.write_inference_graph)
Пример #6
0
def get_configs_from_pipeline_file(pipeline_config_path):
    """Reads configuration from a pipeline_pb2.TrainEvalPipelineConfig.

  Args:
    pipeline_config_path: Path to pipeline_pb2.TrainEvalPipelineConfig text
      proto.

  Returns:
    Dictionary of configuration objects. Keys are `model`, `train_config`,
      `train_input_config`, `eval_config`, `eval_input_config`, `lstm_confg`.
      Value are the corresponding config objects.
  """
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_config_path, "r") as f:
        proto_str = f.read()
        text_format.Merge(proto_str, pipeline_config)
    configs = config_util.get_configs_from_pipeline_file(pipeline_config_path)
    if pipeline_config.HasExtension(internal_pipeline_pb2.lstm_model):
        configs["lstm_model"] = pipeline_config.Extensions[
            internal_pipeline_pb2.lstm_model]
    return configs
Пример #7
0
def main(_):
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()

    pipeline_config_path = "D:/MajorProjects/ObjectDetectionCreator/data/faster_rcnn_inception_v2_coco.config"
    output_directory = "inference_graph"
    input_type = "image_tensor"
    write_inference_graph = False
    config_override = ''
    input_shape = None
    checkPoint = "D:/MajorProjects/ObjectDetectionCreator/data/records/model.ckpt-100000"

    with tf.gfile.GFile(pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge(config_override, pipeline_config)
    exporter.export_inference_graph(
        input_type,
        pipeline_config,
        checkPoint,
        output_directory,
        input_shape=input_shape,
        write_inference_graph=write_inference_graph)
Пример #8
0
def create_pipeline_proto_from_configs(configs):
  """Creates a pipeline_pb2.TrainEvalPipelineConfig from configs dictionary.

  This function performs the inverse operation of
  create_configs_from_pipeline_proto().

  Args:
    configs: Dictionary of configs. See get_configs_from_pipeline_file().

  Returns:
    A fully populated pipeline_pb2.TrainEvalPipelineConfig.
  """
  pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
  pipeline_config.model.CopyFrom(configs["model"])
  pipeline_config.train_config.CopyFrom(configs["train_config"])
  pipeline_config.train_input_reader.CopyFrom(configs["train_input_config"])
  pipeline_config.eval_config.CopyFrom(configs["eval_config"])
  pipeline_config.eval_input_reader.extend(configs["eval_input_configs"])
  if "graph_rewriter_config" in configs:
    pipeline_config.graph_rewriter.CopyFrom(configs["graph_rewriter_config"])
  return pipeline_config
Пример #9
0
def get_configs_from_pipeline_file(pipeline_config_path, config_override=None):
    """Reads config from a file containing pipeline_pb2.TrainEvalPipelineConfig.

  Args:
    pipeline_config_path: Path to pipeline_pb2.TrainEvalPipelineConfig text
      proto.
    config_override: A pipeline_pb2.TrainEvalPipelineConfig text proto to
      override pipeline_config_path.

  Returns:
    Dictionary of configuration objects. Keys are `model`, `train_config`,
      `train_input_config`, `eval_config`, `eval_input_config`. Value are the
      corresponding config objects.
  """
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_config_path, "r") as f:
        proto_str = f.read()
        text_format.Merge(proto_str, pipeline_config)
    if config_override:
        text_format.Merge(config_override, pipeline_config)
    return create_configs_from_pipeline_proto(pipeline_config)
    def testNewMaskType(self):
        """Tests that mask type can be overwritten in input readers."""
        original_mask_type = input_reader_pb2.NUMERICAL_MASKS
        new_mask_type = input_reader_pb2.PNG_MASKS
        pipeline_config_path = os.path.join(self.get_temp_dir(),
                                            "pipeline.config")

        pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
        train_input_reader = pipeline_config.train_input_reader
        train_input_reader.mask_type = original_mask_type
        eval_input_reader = pipeline_config.eval_input_reader
        eval_input_reader.mask_type = original_mask_type
        _write_config(pipeline_config, pipeline_config_path)

        configs = config_util.get_configs_from_pipeline_file(
            pipeline_config_path)
        configs = config_util.merge_external_params_with_configs(
            configs, mask_type=new_mask_type)
        self.assertEqual(new_mask_type,
                         configs["train_input_config"].mask_type)
        self.assertEqual(new_mask_type, configs["eval_input_config"].mask_type)
Пример #11
0
    def test_save_pipeline_config(self):
        """Tests that the pipeline config is properly saved to disk."""
        print(
            '\n========================================================================='
        )
        print('test_save_pipeline_config')

        pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
        pipeline_config.model.faster_rcnn.num_classes = 10
        pipeline_config.train_config.batch_size = 32
        pipeline_config.train_input_reader.label_map_path = "path/to/label_map"
        pipeline_config.eval_config.num_examples = 20
        pipeline_config.eval_input_reader.queue_capacity = 100

        config_util.save_pipeline_config(pipeline_config, self.get_temp_dir())
        configs = config_util.get_configs_from_pipeline_file(
            os.path.join(self.get_temp_dir(), "pipeline.config"))
        pipeline_config_reconstructed = (
            config_util.create_pipeline_proto_from_configs(configs))

        self.assertEqual(pipeline_config, pipeline_config_reconstructed)
  def testDontOverwriteEmptyLabelMapPath(self):
    """Tests that label map path will not by overwritten with empty string."""
    original_label_map_path = "path/to/original/label_map"
    new_label_map_path = ""
    pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config")

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    train_input_reader = pipeline_config.train_input_reader
    train_input_reader.label_map_path = original_label_map_path
    eval_input_reader = pipeline_config.eval_input_reader.add()
    eval_input_reader.label_map_path = original_label_map_path
    _write_config(pipeline_config, pipeline_config_path)

    configs = config_util.get_configs_from_pipeline_file(pipeline_config_path)
    override_dict = {"label_map_path": new_label_map_path}
    configs = config_util.merge_external_params_with_configs(
        configs, kwargs_dict=override_dict)
    self.assertEqual(original_label_map_path,
                     configs["train_input_config"].label_map_path)
    self.assertEqual(original_label_map_path,
                     configs["eval_input_configs"][0].label_map_path)
    def testOverwriteAllEvalSampling(self):
        original_num_eval_examples = 1
        new_num_eval_examples = 10

        pipeline_config_path = os.path.join(self.get_temp_dir(),
                                            "pipeline.config")
        pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
        pipeline_config.eval_input_reader.add().sample_1_of_n_examples = (
            original_num_eval_examples)
        pipeline_config.eval_input_reader.add().sample_1_of_n_examples = (
            original_num_eval_examples)
        _write_config(pipeline_config, pipeline_config_path)

        configs = config_util.get_configs_from_pipeline_file(
            pipeline_config_path)
        override_dict = {"sample_1_of_n_eval_examples": new_num_eval_examples}
        configs = config_util.merge_external_params_with_configs(
            configs, kwargs_dict=override_dict)
        for eval_input_config in configs["eval_input_configs"]:
            self.assertEqual(new_num_eval_examples,
                             eval_input_config.sample_1_of_n_examples)
  def testNewLabelMapPath(self):
    """Tests that label map path can be overwritten in input readers."""
    original_label_map_path = "path/to/original/label_map"
    new_label_map_path = "path//to/new/label_map"
    pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config")

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    train_input_reader = pipeline_config.train_input_reader
    train_input_reader.label_map_path = original_label_map_path
    eval_input_reader = pipeline_config.eval_input_reader.add()
    eval_input_reader.label_map_path = original_label_map_path
    _write_config(pipeline_config, pipeline_config_path)

    configs = config_util.get_configs_from_pipeline_file(pipeline_config_path)
    override_dict = {"label_map_path": new_label_map_path}
    configs = config_util.merge_external_params_with_configs(
        configs, kwargs_dict=override_dict)
    self.assertEqual(new_label_map_path,
                     configs["train_input_config"].label_map_path)
    for eval_input_config in configs["eval_input_configs"]:
      self.assertEqual(new_label_map_path, eval_input_config.label_map_path)
def main(_):
    pipeline_config_path = './pipeline_config.config'
    output_directory = "./export"
    trained_checkpoint_prefix = './train/model.ckpt-1899'
    config_override = ''
    input_shape = None
    input_type = 'image_tensor'

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge(config_override, pipeline_config)
    if input_shape:
        input_shape = [
            int(dim) if dim != '-1' else None for dim in input_shape.split(',')
        ]
    else:
        input_shape = None
    exporter.export_inference_graph(input_type, pipeline_config,
                                    trained_checkpoint_prefix,
                                    output_directory, input_shape)
Пример #16
0
def get_configs_from_pipeline_file():
    """Reads training configuration from a pipeline_pb2.TrainEvalPipelineConfig.

  Reads training config from file specified by pipeline_config_path flag.

  Returns:
    model_config: model_pb2.DetectionModel
    train_config: train_pb2.TrainConfig
    input_config: input_reader_pb2.InputReader
  """
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    from os.path import expanduser
    home = expanduser("~")
    with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
        text_format.Merge(f.read().replace('~', home), pipeline_config)

    model_config = pipeline_config.model
    train_config = pipeline_config.train_config
    input_config = pipeline_config.train_input_reader

    return model_config, train_config, input_config
Пример #17
0
    def test_create_configs_from_pipeline_proto(self):
        """Tests creating configs dictionary from pipeline proto."""

        pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
        pipeline_config.model.faster_rcnn.num_classes = 10
        pipeline_config.train_config.batch_size = 32
        pipeline_config.train_input_reader.label_map_path = "path/to/label_map"
        pipeline_config.eval_config.num_examples = 20
        pipeline_config.eval_input_reader.queue_capacity = 100

        configs = config_util.create_configs_from_pipeline_proto(
            pipeline_config)
        self.assertProtoEquals(pipeline_config.model, configs["model"])
        self.assertProtoEquals(pipeline_config.train_config,
                               configs["train_config"])
        self.assertProtoEquals(pipeline_config.train_input_reader,
                               configs["train_input_config"])
        self.assertProtoEquals(pipeline_config.eval_config,
                               configs["eval_config"])
        self.assertProtoEquals(pipeline_config.eval_input_reader,
                               configs["eval_input_config"])
Пример #18
0
  def testNewClassificationLocalizationWeightRatio(self):
    """Tests that the loss weight ratio is updated appropriately."""
    original_localization_weight = 0.1
    original_classification_weight = 0.2
    new_weight_ratio = 5.0
    hparams = tf.contrib.training.HParams(
        classification_localization_weight_ratio=new_weight_ratio)
    pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config")

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    pipeline_config.model.ssd.loss.localization_weight = (
        original_localization_weight)
    pipeline_config.model.ssd.loss.classification_weight = (
        original_classification_weight)
    _write_config(pipeline_config, pipeline_config_path)

    configs = config_util.get_configs_from_pipeline_file(pipeline_config_path)
    configs = config_util.merge_external_params_with_configs(configs, hparams)
    loss = configs["model"].ssd.loss
    self.assertAlmostEqual(1.0, loss.localization_weight)
    self.assertAlmostEqual(new_weight_ratio, loss.classification_weight)
    def test_raise_runtime_error_on_images_with_different_sizes(self):
        tmp_dir = self.get_temp_dir()
        trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
        self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                              use_moving_averages=True)
        output_directory = os.path.join(tmp_dir, 'output')
        inference_graph_path = os.path.join(output_directory,
                                            'frozen_inference_graph.pb')
        with mock.patch.object(model_builder, 'build',
                               autospec=True) as mock_builder:
            mock_builder.return_value = FakeModel(add_detection_masks=True)
            pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
            pipeline_config.eval_config.use_moving_averages = False
            exporter.export_inference_graph(
                input_type='encoded_image_string_tensor',
                pipeline_config=pipeline_config,
                trained_checkpoint_prefix=trained_checkpoint_prefix,
                output_directory=output_directory)

        inference_graph = self._load_inference_graph(inference_graph_path)
        large_image = self._create_encoded_image_string(
            np.ones((4, 4, 3)).astype(np.uint8), 'jpg')
        small_image = self._create_encoded_image_string(
            np.ones((2, 2, 3)).astype(np.uint8), 'jpg')

        image_str_batch_np = np.hstack([large_image, small_image])
        with self.test_session(graph=inference_graph) as sess:
            image_str_tensor = inference_graph.get_tensor_by_name(
                'encoded_image_string_tensor:0')
            boxes = inference_graph.get_tensor_by_name('detection_boxes:0')
            scores = inference_graph.get_tensor_by_name('detection_scores:0')
            classes = inference_graph.get_tensor_by_name('detection_classes:0')
            masks = inference_graph.get_tensor_by_name('detection_masks:0')
            num_detections = inference_graph.get_tensor_by_name(
                'num_detections:0')
            with self.assertRaisesRegexp(
                    tf.errors.InvalidArgumentError,
                    '^TensorArray has inconsistent shapes.'):
                sess.run([boxes, scores, classes, masks, num_detections],
                         feed_dict={image_str_tensor: image_str_batch_np})
  def test_export_and_run_inference_with_image_tensor(self):
    tmp_dir = self.get_temp_dir()
    trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
    self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                          use_moving_averages=True)
    output_directory = os.path.join(tmp_dir, 'output')
    inference_graph_path = os.path.join(output_directory,
                                        'frozen_inference_graph.pb')
    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel(add_detection_masks=True)
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      pipeline_config.eval_config.use_moving_averages = False
      exporter.export_inference_graph(
          input_type='image_tensor',
          pipeline_config=pipeline_config,
          trained_checkpoint_prefix=trained_checkpoint_prefix,
          output_directory=output_directory)

    inference_graph = self._load_inference_graph(inference_graph_path)
    with self.test_session(graph=inference_graph) as sess:
      image_tensor = inference_graph.get_tensor_by_name('image_tensor:0')
      boxes = inference_graph.get_tensor_by_name('detection_boxes:0')
      scores = inference_graph.get_tensor_by_name('detection_scores:0')
      classes = inference_graph.get_tensor_by_name('detection_classes:0')
      masks = inference_graph.get_tensor_by_name('detection_masks:0')
      num_detections = inference_graph.get_tensor_by_name('num_detections:0')
      (boxes_np, scores_np, classes_np, masks_np, num_detections_np) = sess.run(
          [boxes, scores, classes, masks, num_detections],
          feed_dict={image_tensor: np.ones((2, 4, 4, 3)).astype(np.uint8)})
      self.assertAllClose(boxes_np, [[[0.0, 0.0, 0.5, 0.5],
                                      [0.5, 0.5, 0.8, 0.8]],
                                     [[0.5, 0.5, 1.0, 1.0],
                                      [0.0, 0.0, 0.0, 0.0]]])
      self.assertAllClose(scores_np, [[0.7, 0.6],
                                      [0.9, 0.0]])
      self.assertAllClose(classes_np, [[1, 2],
                                       [2, 1]])
      self.assertAllClose(masks_np, np.arange(64).reshape([2, 2, 4, 4]))
      self.assertAllClose(num_detections_np, [2, 1])
  def testNewFocalLossParameters(self):
    """Tests that the loss weight ratio is updated appropriately."""
    original_alpha = 1.0
    original_gamma = 1.0
    new_alpha = 0.3
    new_gamma = 2.0
    hparams = tf.HParams(focal_loss_alpha=new_alpha, focal_loss_gamma=new_gamma)
    pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config")

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    classification_loss = pipeline_config.model.ssd.loss.classification_loss
    classification_loss.weighted_sigmoid_focal.alpha = original_alpha
    classification_loss.weighted_sigmoid_focal.gamma = original_gamma
    _write_config(pipeline_config, pipeline_config_path)

    configs = config_util.get_configs_from_pipeline_file(pipeline_config_path)
    configs = config_util.merge_external_params_with_configs(configs, hparams)
    classification_loss = configs["model"].ssd.loss.classification_loss
    self.assertAlmostEqual(new_alpha,
                           classification_loss.weighted_sigmoid_focal.alpha)
    self.assertAlmostEqual(new_gamma,
                           classification_loss.weighted_sigmoid_focal.gamma)
Пример #22
0
 def test_export_tflite_graph_with_postprocessing_op(self):
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     pipeline_config.eval_config.use_moving_averages = False
     pipeline_config.model.ssd.post_processing.score_converter = (
         post_processing_pb2.PostProcessing.SIGMOID)
     pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.height = 10
     pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.width = 10
     pipeline_config.model.ssd.num_classes = 2
     pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.y_scale = 10.0
     pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.x_scale = 10.0
     pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.height_scale = 5.0
     pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.width_scale = 5.0
     tflite_graph_file = self._export_graph_with_postprocessing_op(
         pipeline_config)
     self.assertTrue(os.path.exists(tflite_graph_file))
     graph = tf.Graph()
     with graph.as_default():
         graph_def = tf.GraphDef()
         with tf.gfile.Open(tflite_graph_file, mode='rb') as f:
             graph_def.ParseFromString(f.read())
         all_op_names = [node.name for node in graph_def.node]
         self.assertIn('TFLite_Detection_PostProcess', all_op_names)
         self.assertNotIn('UnattachedTensor', all_op_names)
         for node in graph_def.node:
             if node.name == 'TFLite_Detection_PostProcess':
                 self.assertTrue(node.attr['_output_quantized'].b)
                 self.assertTrue(
                     node.
                     attr['_support_output_type_float_in_quantized_op'].b)
                 self.assertEqual(node.attr['y_scale'].f, 10.0)
                 self.assertEqual(node.attr['x_scale'].f, 10.0)
                 self.assertEqual(node.attr['h_scale'].f, 5.0)
                 self.assertEqual(node.attr['w_scale'].f, 5.0)
                 self.assertEqual(node.attr['num_classes'].i, 2)
                 self.assertTrue(
                     all([
                         t == types_pb2.DT_FLOAT
                         for t in node.attr['_output_types'].list.type
                     ]))
 def _export_saved_model(self):
   tmp_dir = self.get_temp_dir()
   checkpoint_path = os.path.join(tmp_dir, 'model.ckpt')
   self._save_checkpoint_from_mock_model(checkpoint_path)
   output_directory = os.path.join(tmp_dir, 'output')
   saved_model_path = os.path.join(output_directory, 'saved_model')
   tf.io.gfile.makedirs(output_directory)
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel(num_classes=5)
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     pipeline_config.eval_config.use_moving_averages = False
     detection_model = model_builder.build(pipeline_config.model,
                                           is_training=False)
     outputs, placeholder_tensor = exporter.build_detection_graph(
         input_type='tf_example',
         detection_model=detection_model,
         input_shape=None,
         output_collection_name='inference_op',
         graph_hook_fn=None)
     output_node_names = ','.join(outputs.keys())
     saver = tf.train.Saver()
     input_saver_def = saver.as_saver_def()
     frozen_graph_def = exporter.freeze_graph_with_def_protos(
         input_graph_def=tf.get_default_graph().as_graph_def(),
         input_saver_def=input_saver_def,
         input_checkpoint=checkpoint_path,
         output_node_names=output_node_names,
         restore_op_name='save/restore_all',
         filename_tensor_name='save/Const:0',
         output_graph='',
         clear_devices=True,
         initializer_nodes='')
     exporter.write_saved_model(
         saved_model_path=saved_model_path,
         frozen_graph_def=frozen_graph_def,
         inputs=placeholder_tensor,
         outputs=outputs)
     return saved_model_path
Пример #24
0
def get_configs_from_pipeline_file():
  """Reads evaluation configuration from a pipeline_pb2.TrainEvalPipelineConfig.

  Reads evaluation config from file specified by pipeline_config_path flag.

  Returns:
    model_config: a model_pb2.DetectionModel
    eval_config: a eval_pb2.EvalConfig
    input_config: a input_reader_pb2.InputReader
  """
  pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
  with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
    text_format.Merge(f.read(), pipeline_config)

  model_config = pipeline_config.model
  if FLAGS.eval_training_data:
    eval_config = pipeline_config.train_config
  else:
    eval_config = pipeline_config.eval_config
  input_config = pipeline_config.eval_input_reader

  return model_config, eval_config, input_config
Пример #25
0
def create_pipeline_proto_from_configs(configs):
    """Creates a pipeline_pb2.TrainEvalPipelineConfig from configs dictionary.

  This function nearly performs the inverse operation of
  get_configs_from_pipeline_file(). Instead of returning a file path, it returns
  a `TrainEvalPipelineConfig` object.

  Args:
    configs: Dictionary of configs. See get_configs_from_pipeline_file().

  Returns:
    A fully populated pipeline_pb2.TrainEvalPipelineConfig.
  """
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    pipeline_config.model.CopyFrom(configs["model"])
    pipeline_config.train_config.CopyFrom(configs["train_config"])
    pipeline_config.train_input_reader.CopyFrom(configs["train_input_config"])
    pipeline_config.eval_config.CopyFrom(configs["eval_config"])
    pipeline_config.eval_input_reader.CopyFrom(configs["eval_input_config"])
    pipeline_config.offline_eval_input_reader.CopyFrom(
        configs["offline_eval_input_config"])
    return pipeline_config
 def test_export_graph_with_moving_averages(self):
   tmp_dir = self.get_temp_dir()
   trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
   self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                         use_moving_averages=True)
   output_directory = os.path.join(tmp_dir, 'output')
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel()
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     pipeline_config.eval_config.use_moving_averages = True
     exporter.export_inference_graph(
         input_type='image_tensor',
         pipeline_config=pipeline_config,
         trained_checkpoint_prefix=trained_checkpoint_prefix,
         output_directory=output_directory)
     self.assertTrue(os.path.exists(os.path.join(
         output_directory, 'saved_model', 'saved_model.pb')))
   expected_variables = set(['conv2d/bias', 'conv2d/kernel', 'global_step'])
   actual_variables = set(
       [var_name for var_name, _ in tf.train.list_variables(output_directory)])
   self.assertTrue(expected_variables.issubset(actual_variables))
Пример #27
0
 def test_export_yields_correct_directory_structure(
         self, input_type='image_tensor'):
     tmp_dir = self.get_temp_dir()
     self._save_checkpoint_from_mock_model(tmp_dir)
     with mock.patch.object(model_builder, 'build',
                            autospec=True) as mock_builder:
         mock_builder.return_value = FakeModel()
         exporter_lib_v2.INPUT_BUILDER_UTIL_MAP[
             'model_build'] = mock_builder
         output_directory = os.path.join(tmp_dir, 'output')
         pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
         exporter_lib_v2.export_inference_graph(
             input_type=input_type,
             pipeline_config=pipeline_config,
             trained_checkpoint_dir=tmp_dir,
             output_directory=output_directory)
         self.assertTrue(
             os.path.exists(
                 os.path.join(output_directory, 'saved_model',
                              'saved_model.pb')))
         self.assertTrue(
             os.path.exists(
                 os.path.join(output_directory, 'saved_model', 'variables',
                              'variables.index')))
         self.assertTrue(
             os.path.exists(
                 os.path.join(output_directory, 'saved_model', 'variables',
                              'variables.data-00000-of-00001')))
         self.assertTrue(
             os.path.exists(
                 os.path.join(output_directory, 'checkpoint',
                              'ckpt-0.index')))
         self.assertTrue(
             os.path.exists(
                 os.path.join(output_directory, 'checkpoint',
                              'ckpt-0.data-00000-of-00001')))
         self.assertTrue(
             os.path.exists(
                 os.path.join(output_directory, 'pipeline.config')))
Пример #28
0
  def testMergingKeywordArguments(self):
    """Tests that keyword arguments get merged as do hyperparameters."""
    original_num_train_steps = 100
    original_num_eval_steps = 5
    desired_num_train_steps = 10
    desired_num_eval_steps = 1
    pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config")

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    pipeline_config.train_config.num_steps = original_num_train_steps
    pipeline_config.eval_config.num_examples = original_num_eval_steps
    _write_config(pipeline_config, pipeline_config_path)

    configs = config_util.get_configs_from_pipeline_file(pipeline_config_path)
    configs = config_util.merge_external_params_with_configs(
        configs,
        train_steps=desired_num_train_steps,
        eval_steps=desired_num_eval_steps)
    train_steps = configs["train_config"].num_steps
    eval_steps = configs["eval_config"].num_examples
    self.assertEqual(desired_num_train_steps, train_steps)
    self.assertEqual(desired_num_eval_steps, eval_steps)
Пример #29
0
    def testOverWriteRetainOriginalImages(self):
        """Tests that `train_shuffle` keyword arguments are applied correctly."""
        original_retain_original_images = True
        desired_retain_original_images = False

        pipeline_config_path = os.path.join(self.get_temp_dir(),
                                            "pipeline.config")
        pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
        pipeline_config.eval_config.retain_original_images = (
            original_retain_original_images)
        _write_config(pipeline_config, pipeline_config_path)

        configs = config_util.get_configs_from_pipeline_file(
            pipeline_config_path)
        override_dict = {
            "retain_original_images_in_eval": desired_retain_original_images
        }
        configs = config_util.merge_external_params_with_configs(
            configs, kwargs_dict=override_dict)
        retain_original_images = configs["eval_config"].retain_original_images
        self.assertEqual(desired_retain_original_images,
                         retain_original_images)
Пример #30
0
    def test_export_and_run_inference_with_tf_example(self):
        checkpoint_path = os.path.join(self.get_temp_dir(), 'model-ckpt')
        self._save_checkpoint_from_mock_model(checkpoint_path,
                                              use_moving_averages=False)
        inference_graph_path = os.path.join(self.get_temp_dir(),
                                            'exported_graph.pb')
        with mock.patch.object(model_builder, 'build',
                               autospec=True) as mock_builder:
            mock_builder.return_value = FakeModel(add_detection_masks=True)
            pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
            pipeline_config.eval_config.use_moving_averages = False
            exporter.export_inference_graph(
                input_type='tf_example',
                pipeline_config=pipeline_config,
                checkpoint_path=checkpoint_path,
                inference_graph_path=inference_graph_path)

        inference_graph = self._load_inference_graph(inference_graph_path)
        with self.test_session(graph=inference_graph) as sess:
            tf_example = inference_graph.get_tensor_by_name('tf_example:0')
            boxes = inference_graph.get_tensor_by_name('detection_boxes:0')
            scores = inference_graph.get_tensor_by_name('detection_scores:0')
            classes = inference_graph.get_tensor_by_name('detection_classes:0')
            masks = inference_graph.get_tensor_by_name('detection_masks:0')
            num_detections = inference_graph.get_tensor_by_name(
                'num_detections:0')
            (boxes, scores, classes, masks, num_detections) = sess.run(
                [boxes, scores, classes, masks, num_detections],
                feed_dict={
                    tf_example:
                    self._create_tf_example(
                        np.ones((4, 4, 3)).astype(np.uint8))
                })
            self.assertAllClose(boxes,
                                [[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 0.8, 0.8]])
            self.assertAllClose(scores, [[0.7, 0.6]])
            self.assertAllClose(classes, [[1, 2]])
            self.assertAllClose(masks, np.arange(32).reshape([2, 4, 4]))
            self.assertAllClose(num_detections, [2])