예제 #1
0
    def test_export_and_run_inference_with_image_tensor(self):
        checkpoint_path = os.path.join(self.get_temp_dir(), 'model-ckpt')
        self._save_checkpoint_from_mock_model(checkpoint_path,
                                              use_moving_averages=False)
        inference_graph_path = os.path.join(self.get_temp_dir(),
                                            'exported_graph.pb')
        with mock.patch.object(model_builder, 'build',
                               autospec=True) as mock_builder:
            mock_builder.return_value = FakeModel(num_classes=1)
            pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
            pipeline_config.eval_config.use_moving_averages = False
            exporter.export_inference_graph(
                input_type='image_tensor',
                pipeline_config=pipeline_config,
                checkpoint_path=checkpoint_path,
                inference_graph_path=inference_graph_path)

        inference_graph = self._load_inference_graph(inference_graph_path)
        with self.test_session(graph=inference_graph) as sess:
            image_tensor = inference_graph.get_tensor_by_name('image_tensor:0')
            boxes = inference_graph.get_tensor_by_name('detection_boxes:0')
            scores = inference_graph.get_tensor_by_name('detection_scores:0')
            classes = inference_graph.get_tensor_by_name('detection_classes:0')
            num_detections = inference_graph.get_tensor_by_name(
                'num_detections:0')
            (boxes, scores, classes, num_detections) = sess.run(
                [boxes, scores, classes, num_detections],
                feed_dict={
                    image_tensor: np.ones((1, 4, 4, 3)).astype(np.uint8)
                })
            self.assertAllClose(boxes,
                                [[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 0.8, 0.8]])
            self.assertAllClose(scores, [[0.7, 0.6]])
            self.assertAllClose(classes, [[1, 2]])
            self.assertAllClose(num_detections, [2])
예제 #2
0
def main(_):
    print("starting script . . .")

    if not checkIfNecessaryPathsAndFilesExist():
        return
    # end if

    print("calling TrainEvalPipelineConfig() . . .")
    trainEvalPipelineConfig = pipeline_pb2.TrainEvalPipelineConfig()

    print("checking and merging " + os.path.basename(PIPELINE_CONFIG_LOC) +
          " into trainEvalPipelineConfig . . .")
    with tf.gfile.GFile(PIPELINE_CONFIG_LOC, 'r') as f:
        text_format.Merge(f.read(), trainEvalPipelineConfig)
    # end with

    print("calculating input shape . . .")
    if INPUT_SHAPE:
        input_shape = [
            int(dim) if dim != '-1' else None for dim in INPUT_SHAPE.split(',')
        ]
    else:
        input_shape = None
    # end if

    print("calling export_inference_graph() . . .")
    exporter.export_inference_graph(INPUT_TYPE, trainEvalPipelineConfig,
                                    TRAINED_CHECKPOINT_PREFIX_LOC, OUTPUT_DIR,
                                    input_shape)

    print("done !!")
 def test_export_model_with_quantization_nodes(self):
   tmp_dir = self.get_temp_dir()
   trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
   self._save_checkpoint_from_mock_model(
       trained_checkpoint_prefix,
       use_moving_averages=False,
       enable_quantization=True)
   output_directory = os.path.join(tmp_dir, 'output')
   inference_graph_path = os.path.join(output_directory,
                                       'inference_graph.pbtxt')
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel()
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     text_format.Merge(
         """graph_rewriter {
              quantization {
                delay: 50000
                activation_bits: 8
                weight_bits: 8
              }
            }""", pipeline_config)
     exporter.export_inference_graph(
         input_type='image_tensor',
         pipeline_config=pipeline_config,
         trained_checkpoint_prefix=trained_checkpoint_prefix,
         output_directory=output_directory,
         write_inference_graph=True)
   self._load_inference_graph(inference_graph_path, is_binary=False)
   has_quant_nodes = False
   for v in tf.global_variables():
     if v.op.name.endswith('act_quant/min'):
       has_quant_nodes = True
       break
   self.assertTrue(has_quant_nodes)
예제 #4
0
def main(_):
    if not(os.path.exists(output_directory)):
        os.mkdir(output_directory)
    input_shape = None
    additional_output_tensor_names = None

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge(config_override, pipeline_config)
    if input_shape:
        input_shape = [int(dim) if dim != '-1' else None for dim in input_shape.split(',')]
    else:
        input_shape = None
    if use_side_inputs:
        side_input_shapes, side_input_names, side_input_types = (exporter.parse_side_inputs(side_input_shapes, side_input_names, side_input_types))
    else:
        side_input_shapes = None
        side_input_names = None
        side_input_types = None
    if additional_output_tensor_names:
        additional_output_tensor_names = list(additional_output_tensor_names.split(','))
    else:
        additional_output_tensor_names = None
    exporter.export_inference_graph(input_type, pipeline_config, trained_checkpoint_prefix,
                                    output_directory, input_shape=input_shape,
                                    write_inference_graph=write_inference_graph,
                                    additional_output_tensor_names=additional_output_tensor_names,
                                    use_side_inputs=use_side_inputs,
                                    side_input_shapes=side_input_shapes,
                                    side_input_names=side_input_names,
                                    side_input_types=side_input_types)
예제 #5
0
def main(_):
    # 在创建新文件之前清除之前的文件
    shutil.rmtree(FLAGS.output_directory)
    os.mkdir(FLAGS.output_directory)
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge(FLAGS.config_override, pipeline_config)
    if FLAGS.input_shape:
        input_shape = [
            int(dim) if dim != '-1' else None
            for dim in FLAGS.input_shape.split(',')
        ]
    else:
        input_shape = None
    exporter.export_inference_graph(
        FLAGS.input_type,
        pipeline_config,
        FLAGS.trained_checkpoint_prefix,
        FLAGS.output_directory,
        input_shape=input_shape,
        write_inference_graph=FLAGS.write_inference_graph)
    if FLAGS.if_retrain == True:
        shutil.rmtree(model_u)
        os.mkdir(model_u)
예제 #6
0
    def test_export_graph_with_fixed_size_image_tensor_input(self):
        input_shape = [1, 320, 320, 3]

        tmp_dir = self.get_temp_dir()
        trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
        self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                              use_moving_averages=False)
        with mock.patch.object(model_builder, 'build',
                               autospec=True) as mock_builder:
            mock_builder.return_value = FakeModel()
            output_directory = os.path.join(tmp_dir, 'output')
            pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
            pipeline_config.eval_config.use_moving_averages = False
            exporter.export_inference_graph(
                input_type='image_tensor',
                pipeline_config=pipeline_config,
                trained_checkpoint_prefix=trained_checkpoint_prefix,
                output_directory=output_directory,
                input_shape=input_shape)
            saved_model_path = os.path.join(output_directory, 'saved_model')
            self.assertTrue(
                os.path.exists(os.path.join(saved_model_path,
                                            'saved_model.pb')))

        with tf.Graph().as_default() as od_graph:
            with self.test_session(graph=od_graph) as sess:
                meta_graph = tf.saved_model.loader.load(
                    sess, [tf.saved_model.tag_constants.SERVING],
                    saved_model_path)
                signature = meta_graph.signature_def['serving_default']
                input_tensor_name = signature.inputs['inputs'].name
                image_tensor = od_graph.get_tensor_by_name(input_tensor_name)
                self.assertSequenceEqual(image_tensor.get_shape().as_list(),
                                         input_shape)
예제 #7
0
    def test_export_graph_saves_pipeline_file(self):
        tmp_dir = self.get_temp_dir()
        trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
        self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                              use_moving_averages=True)
        output_directory = os.path.join(tmp_dir, 'output')
        with mock.patch.object(model_builder, 'build',
                               autospec=True) as mock_builder:
            mock_builder.return_value = FakeModel()
            pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
            exporter.export_inference_graph(
                input_type='image_tensor',
                pipeline_config=pipeline_config,
                trained_checkpoint_prefix=trained_checkpoint_prefix,
                output_directory=output_directory)
            expected_pipeline_path = os.path.join(output_directory,
                                                  'pipeline.config')
            self.assertTrue(os.path.exists(expected_pipeline_path))

            written_pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
            with tf.gfile.GFile(expected_pipeline_path, 'r') as f:
                proto_str = f.read()
                text_format.Merge(proto_str, written_pipeline_config)
                self.assertProtoEquals(pipeline_config,
                                       written_pipeline_config)
def export_graph(pipeline_path, model_path, export_dir):
    pipeline_config_path = pipeline_path
    trained_checkpoint_prefix = model_path
    output_directory = export_dir
    config_override = ''
    input_shape = None

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge(config_override, pipeline_config)
    if input_shape:
        input_shape = [
            int(dim) if dim != '-1' else None for dim in input_shape.split(',')
        ]
    else:
        input_shape = None

    exporter.export_inference_graph('image_tensor',
                                    pipeline_config,
                                    trained_checkpoint_prefix,
                                    output_directory,
                                    input_shape=input_shape,
                                    write_inference_graph=False)
    print("Successfully created inference graph!")
예제 #9
0
def main(_):
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge(FLAGS.config_override, pipeline_config)
    if FLAGS.input_shape:
        input_shape = [
            int(dim) if dim != '-1' else None
            for dim in FLAGS.input_shape.split(',')
        ]
    else:
        input_shape = None
    exporter.export_inference_graph(
        FLAGS.input_type,
        pipeline_config,
        FLAGS.trained_checkpoint_prefix,
        FLAGS.output_directory,
        input_shape=input_shape,
        write_inference_graph=FLAGS.write_inference_graph)

    #TF Object detection saves the model without version. But tesnorflow model server needs versioned
    #saved models for serving. So copy the saved model to version folder.
    tf.gfile.Rename(FLAGS.output_directory + '/saved_model',
                    FLAGS.output_directory + '/1')
    with tf.gfile.GFile(FLAGS.output_directory + '/1/variables/Dummy',
                        'w') as file:
        file.write("dummy file")
def main(_):
    # Reinstate export path if needed

    if os.listdir(FLAGS.output_directory):
        shutil.rmtree(FLAGS.output_directory)
        os.mkdir(FLAGS.output_directory)

    # Find latest checkpoint
    checkpoint_path=sorted(glob.glob(FLAGS.trained_checkpoint_path+"/*.ckpt*.data*"))[-1]
    checkpoint_path=re.match("\S*.ckpt-([0-9]*)",checkpoint_path).group()

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
        text_format.Merge(FLAGS.config_override, pipeline_config)
    if FLAGS.input_shape:
        input_shape = [
            int(dim) if dim != '-1' else None
            for dim in FLAGS.input_shape.split(',')
        ]
    else:
        input_shape = None
    exporter.export_inference_graph(FLAGS.input_type, pipeline_config,
                                  checkpoint_path,
                                  FLAGS.output_directory, input_shape)
예제 #11
0
 def test_export_graph_with_moving_averages(self):
     tmp_dir = self.get_temp_dir()
     trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
     self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                           use_moving_averages=True)
     output_directory = os.path.join(tmp_dir, 'output')
     with mock.patch.object(model_builder, 'build',
                            autospec=True) as mock_builder:
         mock_builder.return_value = FakeModel()
         pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
         pipeline_config.eval_config.use_moving_averages = True
         exporter.export_inference_graph(
             input_type='image_tensor',
             pipeline_config=pipeline_config,
             trained_checkpoint_prefix=trained_checkpoint_prefix,
             output_directory=output_directory)
         self.assertTrue(
             os.path.exists(
                 os.path.join(output_directory, 'saved_model',
                              'saved_model.pb')))
     expected_variables = set(
         ['conv2d/bias', 'conv2d/kernel', 'global_step'])
     actual_variables = set([
         var_name
         for var_name, _ in tf.train.list_variables(output_directory)
     ])
     self.assertTrue(expected_variables.issubset(actual_variables))
예제 #12
0
 def test_export_model_with_detection_only_nodes(self):
     tmp_dir = self.get_temp_dir()
     trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
     self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                           use_moving_averages=True)
     output_directory = os.path.join(tmp_dir, 'output')
     inference_graph_path = os.path.join(output_directory,
                                         'frozen_inference_graph.pb')
     with mock.patch.object(model_builder, 'build',
                            autospec=True) as mock_builder:
         mock_builder.return_value = FakeModel(add_detection_masks=False)
         pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
         exporter.export_inference_graph(
             input_type='image_tensor',
             pipeline_config=pipeline_config,
             trained_checkpoint_prefix=trained_checkpoint_prefix,
             output_directory=output_directory)
     inference_graph = self._load_inference_graph(inference_graph_path)
     with self.test_session(graph=inference_graph):
         inference_graph.get_tensor_by_name('image_tensor:0')
         inference_graph.get_tensor_by_name('detection_boxes:0')
         inference_graph.get_tensor_by_name('detection_scores:0')
         inference_graph.get_tensor_by_name('detection_classes:0')
         inference_graph.get_tensor_by_name('num_detections:0')
         with self.assertRaises(KeyError):
             inference_graph.get_tensor_by_name('detection_masks:0')
예제 #13
0
  def test_export_graph_with_fixed_size_image_tensor_input(self):
    input_shape = [1, 320, 320, 3]

    tmp_dir = self.get_temp_dir()
    trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
    self._save_checkpoint_from_mock_model(
        trained_checkpoint_prefix, use_moving_averages=False)
    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel()
      output_directory = os.path.join(tmp_dir, 'output')
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      pipeline_config.eval_config.use_moving_averages = False
      exporter.export_inference_graph(
          input_type='image_tensor',
          pipeline_config=pipeline_config,
          trained_checkpoint_prefix=trained_checkpoint_prefix,
          output_directory=output_directory,
          input_shape=input_shape)
      saved_model_path = os.path.join(output_directory, 'saved_model')
      self.assertTrue(
          os.path.exists(os.path.join(saved_model_path, 'saved_model.pb')))

    with tf.Graph().as_default() as od_graph:
      with self.test_session(graph=od_graph) as sess:
        meta_graph = tf.saved_model.loader.load(
            sess, [tf.saved_model.tag_constants.SERVING], saved_model_path)
        signature = meta_graph.signature_def['serving_default']
        input_tensor_name = signature.inputs['inputs'].name
        image_tensor = od_graph.get_tensor_by_name(input_tensor_name)
        self.assertSequenceEqual(image_tensor.get_shape().as_list(),
                                 input_shape)
 def test_export_model_with_detection_only_nodes(self):
   tmp_dir = self.get_temp_dir()
   trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
   self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                         use_moving_averages=True)
   output_directory = os.path.join(tmp_dir, 'output')
   inference_graph_path = os.path.join(output_directory,
                                       'frozen_inference_graph.pb')
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel(add_detection_masks=False)
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     exporter.export_inference_graph(
         input_type='image_tensor',
         pipeline_config=pipeline_config,
         trained_checkpoint_prefix=trained_checkpoint_prefix,
         output_directory=output_directory)
   inference_graph = self._load_inference_graph(inference_graph_path)
   with self.test_session(graph=inference_graph):
     inference_graph.get_tensor_by_name('image_tensor:0')
     inference_graph.get_tensor_by_name('detection_boxes:0')
     inference_graph.get_tensor_by_name('detection_scores:0')
     inference_graph.get_tensor_by_name('detection_classes:0')
     inference_graph.get_tensor_by_name('num_detections:0')
     with self.assertRaises(KeyError):
       inference_graph.get_tensor_by_name('detection_masks:0')
  def test_export_and_run_inference_with_tf_example(self):
    checkpoint_path = os.path.join(self.get_temp_dir(), 'model-ckpt')
    self._save_checkpoint_from_mock_model(checkpoint_path,
                                          use_moving_averages=False)
    inference_graph_path = os.path.join(self.get_temp_dir(),
                                        'exported_graph.pb')
    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel(num_classes=1)
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      pipeline_config.eval_config.use_moving_averages = False
      exporter.export_inference_graph(
          input_type='tf_example',
          pipeline_config=pipeline_config,
          checkpoint_path=checkpoint_path,
          inference_graph_path=inference_graph_path)

    inference_graph = self._load_inference_graph(inference_graph_path)
    with self.test_session(graph=inference_graph) as sess:
      tf_example = inference_graph.get_tensor_by_name('tf_example:0')
      boxes = inference_graph.get_tensor_by_name('detection_boxes:0')
      scores = inference_graph.get_tensor_by_name('detection_scores:0')
      classes = inference_graph.get_tensor_by_name('detection_classes:0')
      num_detections = inference_graph.get_tensor_by_name('num_detections:0')
      (boxes, scores, classes, num_detections) = sess.run(
          [boxes, scores, classes, num_detections],
          feed_dict={tf_example: self._create_tf_example(
              np.ones((4, 4, 3)).astype(np.uint8))})
      self.assertAllClose(boxes, [[0.0, 0.0, 0.5, 0.5],
                                  [0.5, 0.5, 0.8, 0.8]])
      self.assertAllClose(scores, [[0.7, 0.6]])
      self.assertAllClose(classes, [[1, 2]])
      self.assertAllClose(num_detections, [2])
def main(_):
    print("starting script . . .")

    if not checkIfNecessaryPathsAndFilesExist():
        return
    # end if

    print("calling TrainEvalPipelineConfig() . . .")
    trainEvalPipelineConfig = pipeline_pb2.TrainEvalPipelineConfig()

    print("checking and merging " + os.path.basename(PIPELINE_CONFIG_LOC) + " into trainEvalPipelineConfig . . .")
    with tf.gfile.GFile(PIPELINE_CONFIG_LOC, 'r') as f:
        text_format.Merge(f.read(), trainEvalPipelineConfig)
    # end with

    print("calculating input shape . . .")
    if INPUT_SHAPE:
        input_shape = [ int(dim) if dim != '-1' else None for dim in INPUT_SHAPE.split(',') ]
    else:
        input_shape = None
    # end if

    print("calling export_inference_graph() . . .")
    exporter.export_inference_graph(INPUT_TYPE, trainEvalPipelineConfig, TRAINED_CHECKPOINT_PREFIX_LOC, OUTPUT_DIR, input_shape)

    print("done !!")
예제 #17
0
  def test_export_saved_model_and_run_inference(self):
    tmp_dir = self.get_temp_dir()
    trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
    self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                          use_moving_averages=False)
    output_directory = os.path.join(tmp_dir, 'output')
    saved_model_path = os.path.join(output_directory, 'saved_model')

    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel(
          add_detection_keypoints=True, add_detection_masks=True)
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      pipeline_config.eval_config.use_moving_averages = False
      exporter.export_inference_graph(
          input_type='tf_example',
          pipeline_config=pipeline_config,
          trained_checkpoint_prefix=trained_checkpoint_prefix,
          output_directory=output_directory)

    tf_example_np = np.hstack([self._create_tf_example(
        np.ones((4, 4, 3)).astype(np.uint8))] * 2)
    with tf.Graph().as_default() as od_graph:
      with self.test_session(graph=od_graph) as sess:
        meta_graph = tf.saved_model.loader.load(
            sess, [tf.saved_model.tag_constants.SERVING], saved_model_path)

        signature = meta_graph.signature_def['serving_default']
        input_tensor_name = signature.inputs['inputs'].name
        tf_example = od_graph.get_tensor_by_name(input_tensor_name)

        boxes = od_graph.get_tensor_by_name(
            signature.outputs['detection_boxes'].name)
        scores = od_graph.get_tensor_by_name(
            signature.outputs['detection_scores'].name)
        classes = od_graph.get_tensor_by_name(
            signature.outputs['detection_classes'].name)
        keypoints = od_graph.get_tensor_by_name(
            signature.outputs['detection_keypoints'].name)
        masks = od_graph.get_tensor_by_name(
            signature.outputs['detection_masks'].name)
        num_detections = od_graph.get_tensor_by_name(
            signature.outputs['num_detections'].name)

        (boxes_np, scores_np, classes_np, keypoints_np, masks_np,
         num_detections_np) = sess.run(
             [boxes, scores, classes, keypoints, masks, num_detections],
             feed_dict={tf_example: tf_example_np})
        self.assertAllClose(boxes_np, [[[0.0, 0.0, 0.5, 0.5],
                                        [0.5, 0.5, 0.8, 0.8]],
                                       [[0.5, 0.5, 1.0, 1.0],
                                        [0.0, 0.0, 0.0, 0.0]]])
        self.assertAllClose(scores_np, [[0.7, 0.6],
                                        [0.9, 0.0]])
        self.assertAllClose(classes_np, [[1, 2],
                                         [2, 1]])
        self.assertAllClose(keypoints_np, np.arange(48).reshape([2, 2, 6, 2]))
        self.assertAllClose(masks_np, np.arange(64).reshape([2, 2, 4, 4]))
        self.assertAllClose(num_detections_np, [2, 1])
예제 #18
0
def main(_):
    with open('system_dict.json') as json_file:
        args = json.load(json_file)

    if (os.path.isdir(args["output_directory"])):
        os.system("rm -r " + args["output_directory"])

    os.mkdir(args["output_directory"])

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(args["pipeline_config_path"], 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge(args["config_override"], pipeline_config)

    if args["input_shape"]:
        input_shape = [
            int(dim) if dim != '-1' else None
            for dim in args["input_shape"].split(',')
        ]
    else:
        input_shape = None

    if args["input_shape_flops"]:
        input_shape_flops = [
            int(dim) if dim != '-1' else None
            for dim in args["input_shape_flops"].split(',')
        ]
    else:
        input_shape_flops = None

    if args["use_side_inputs"]:
        side_input_shapes, side_input_names, side_input_types = (
            exporter.parse_side_inputs(args["side_input_shapes"],
                                       args["side_input_names"],
                                       args["side_input_types"]))
    else:
        side_input_shapes = None
        side_input_names = None
        side_input_types = None

    if args["additional_output_tensor_names"]:
        additional_output_tensor_names = list(
            args["additional_output_tensor_names"].split(','))
    else:
        additional_output_tensor_names = None

    exporter.export_inference_graph(
        args["input_type"],
        pipeline_config,
        args["trained_checkpoint_prefix"],
        args["output_directory"],
        input_shape=input_shape,
        write_inference_graph=args["write_inference_graph"],
        additional_output_tensor_names=additional_output_tensor_names,
        use_side_inputs=args["use_side_inputs"],
        side_input_shapes=side_input_shapes,
        side_input_names=side_input_names,
        side_input_types=side_input_types)
    '''
def export_model(pipeline_config, checkpoint_prefix):
    export_dir = os.path.join(FLAGS.output_dir, 'model')
    if (os.path.exists(export_dir)
        ):  #there's no overwrite option for exporting
        shutil.rmtree(export_dir)

    exporter.export_inference_graph('image_tensor', pipeline_config,
                                    checkpoint_prefix, export_dir)
def build_detection_graph(config,
                          checkpoint,
                          batch_size=1,
                          score_threshold=None,
                          force_nms_cpu=True,
                          replace_relu6=True,
                          remove_assert=True,
                          input_shape=None,
                          output_dir='.generated_model'):
    """Builds a frozen graph for a pre-trained object detection model"""

    if os.path.isdir(output_dir):
        subprocess.call(['rm', '-rf', output_dir])
    os.mkdir(output_dir)

    config_path = config
    checkpoint_path = checkpoint

    # parse config from file
    config = pipeline_pb2.TrainEvalPipelineConfig()
    with open(config_path, 'r') as f:
        text_format.Merge(f.read(), config, allow_unknown_extension=True)

    tf_config = tf.ConfigProto()
    tf_config.gpu_options.allow_growth = True

    # export inference graph to file (initial)
    with tf.Session(config=tf_config) as tf_sess:
        with tf.Graph().as_default() as tf_graph:
            exporter.export_inference_graph(
                'image_tensor',
                config,
                checkpoint_path,
                output_dir,
                input_shape=[batch_size, None, None, 3])

    # read frozen graph from file
    frozen_graph = tf.GraphDef()
    with open(os.path.join(output_dir, FROZEN_GRAPH_NAME), 'rb') as f:
        frozen_graph.ParseFromString(f.read())

    # apply graph modifications
    if force_nms_cpu:
        frozen_graph = f_force_nms_cpu(frozen_graph)
    if replace_relu6:
        frozen_graph = f_replace_relu6(frozen_graph)
    if remove_assert:
        frozen_graph = f_remove_assert(frozen_graph)

    # get input names
    # TODO: handle mask_rcnn
    input_names = [INPUT_NAME]
    output_names = [BOXES_NAME, CLASSES_NAME, SCORES_NAME, NUM_DETECTIONS_NAME]

    # remove temporary directory
    subprocess.call(['rm', '-rf', output_dir])

    return frozen_graph, input_names, output_names
def export_inference_graph(trained_checkpoint_prefix, output_directory='exported_model', pipeline_config_path='data/ssd.config', input_type='image_tensor', input_shape=None, num_classes=None):
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    if num_classes is not None:
        pipeline_config.model.ssd.num_classes = num_classes
    exporter.export_inference_graph(
        input_type, pipeline_config, trained_checkpoint_prefix, output_directory, input_shape)
    copyfile('data/label_map.pbtxt' , 'exported_model/label_map.pbtxt')
예제 #22
0
    def test_export_and_run_inference_with_encoded_image_string_tensor(self):
        tmp_dir = self.get_temp_dir()
        trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
        self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                              use_moving_averages=True)
        output_directory = os.path.join(tmp_dir, 'output')
        inference_graph_path = os.path.join(output_directory,
                                            'frozen_inference_graph.pb')
        with mock.patch.object(model_builder, 'build',
                               autospec=True) as mock_builder:
            mock_builder.return_value = FakeModel(add_detection_keypoints=True,
                                                  add_detection_masks=True)
            pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
            pipeline_config.eval_config.use_moving_averages = False
            exporter.export_inference_graph(
                input_type='encoded_image_string_tensor',
                pipeline_config=pipeline_config,
                trained_checkpoint_prefix=trained_checkpoint_prefix,
                output_directory=output_directory)

        inference_graph = self._load_inference_graph(inference_graph_path)
        jpg_image_str = self._create_encoded_image_string(
            np.ones((4, 4, 3)).astype(np.uint8), 'jpg')
        png_image_str = self._create_encoded_image_string(
            np.ones((4, 4, 3)).astype(np.uint8), 'png')
        with self.test_session(graph=inference_graph) as sess:
            image_str_tensor = inference_graph.get_tensor_by_name(
                'encoded_image_string_tensor:0')
            boxes = inference_graph.get_tensor_by_name('detection_boxes:0')
            scores = inference_graph.get_tensor_by_name('detection_scores:0')
            classes = inference_graph.get_tensor_by_name('detection_classes:0')
            keypoints = inference_graph.get_tensor_by_name(
                'detection_keypoints:0')
            masks = inference_graph.get_tensor_by_name('detection_masks:0')
            num_detections = inference_graph.get_tensor_by_name(
                'num_detections:0')
            for image_str in [jpg_image_str, png_image_str]:
                image_str_batch_np = np.hstack([image_str] * 2)
                (boxes_np, scores_np, classes_np, keypoints_np, masks_np,
                 num_detections_np) = sess.run([
                     boxes, scores, classes, keypoints, masks, num_detections
                 ],
                                               feed_dict={
                                                   image_str_tensor:
                                                   image_str_batch_np
                                               })
                self.assertAllClose(
                    boxes_np, [[[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 0.8, 0.8]],
                               [[0.5, 0.5, 1.0, 1.0], [0.0, 0.0, 0.0, 0.0]]])
                self.assertAllClose(scores_np, [[0.7, 0.6], [0.9, 0.0]])
                self.assertAllClose(classes_np, [[1, 2], [2, 1]])
                self.assertAllClose(keypoints_np,
                                    np.arange(48).reshape([2, 2, 6, 2]))
                self.assertAllClose(masks_np,
                                    np.arange(64).reshape([2, 2, 4, 4]))
                self.assertAllClose(num_detections_np, [2, 1])
예제 #23
0
def main(_):
    assert FLAGS.pipeline_config_path, 'TrainEvalPipelineConfig missing.'
    assert FLAGS.inference_graph_path, 'Inference graph path missing.'
    assert FLAGS.input_type, 'Input type missing.'
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    exporter.export_inference_graph(FLAGS.input_type, pipeline_config,
                                    FLAGS.checkpoint_path,
                                    FLAGS.inference_graph_path)
def main(_):

    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    text_format.Merge('', pipeline_config)
    input_shape = None
    exporter.export_inference_graph('image_tensor', pipeline_config,
                                    trained_checkpoint_prefix,
                                    module_out_put_dir, input_shape)
def main(_):
  assert FLAGS.pipeline_config_path, 'TrainEvalPipelineConfig missing.'
  assert FLAGS.inference_graph_path, 'Inference graph path missing.'
  assert FLAGS.input_type, 'Input type missing.'
  pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
  with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
    text_format.Merge(f.read(), pipeline_config)
  exporter.export_inference_graph(FLAGS.input_type, pipeline_config,
                                  FLAGS.checkpoint_path,
                                  FLAGS.inference_graph_path)
예제 #26
0
def build_frozen_graph(frozen_graph_name,
                       config,
                       checkpoint,
                       batch_size=1,
                       score_threshold=None,
                       input_shape=None,
                       output_dir='.generated_model'):
    """Builds a frozen graph for a pre-trained object detection model"""

    config_path = config
    checkpoint_path = checkpoint

    # parse config from file
    config = pipeline_pb2.TrainEvalPipelineConfig()
    with open(config_path, 'r') as f:
        text_format.Merge(f.read(), config, allow_unknown_extension=True)

    # override some config parameters
    if config.model.HasField('ssd'):
        config.model.ssd.feature_extractor.override_base_feature_extractor_hyperparams = True
        if score_threshold is not None:
            config.model.ssd.post_processing.batch_non_max_suppression.score_threshold = score_threshold
        if input_shape is not None:
            config.model.ssd.image_resizer.fixed_shape_resizer.height = input_shape[
                0]
            config.model.ssd.image_resizer.fixed_shape_resizer.width = input_shape[
                1]
    elif config.model.HasField('faster_rcnn'):
        if score_threshold is not None:
            config.model.faster_rcnn.second_stage_post_processing.score_threshold = score_threshold
        if input_shape is not None:
            config.model.faster_rcnn.image_resizer.fixed_shape_resizer.height = input_shape[
                0]
            config.model.faster_rcnn.image_resizer.fixed_shape_resizer.width = input_shape[
                1]

    if os.path.isdir(output_dir):
        subprocess.call(['rm', '-rf', output_dir])

    tf_config = tf.ConfigProto()
    tf_config.gpu_options.allow_growth = True

    # export inference graph to file (initial)
    with tf.Session(config=tf_config) as tf_sess:
        with tf.Graph().as_default() as tf_graph:
            exporter.export_inference_graph(
                'image_tensor',
                config,
                checkpoint_path,
                output_dir,
                input_shape=[batch_size, None, None, 3])

    # remove temporary directory after saving frozen graph output
    os.rename(os.path.join(output_dir, FROZEN_GRAPH_NAME), frozen_graph_name)
    subprocess.call(['rm', '-rf', output_dir])
예제 #27
0
def index():
    print('HAHA', PROJECT_ID)
    envelope = request.get_json()
    if not envelope:
        msg = 'no Pub/Sub message received'
        print('error: {}'.format(msg))
        return 'Bad Request: {}'.format(msg), 400

    if not isinstance(envelope, dict) or 'message' not in envelope:
        msg = 'invalid Pub/Sub message format'
        print('error: {}'.format(msg))
        return 'Bad Request: {}'.format(msg), 400

    pubsub_message = envelope['message']

    if isinstance(pubsub_message, dict) and 'data' in pubsub_message:
        data = json.loads(
            base64.b64decode(pubsub_message['data']).decode('utf-8').strip())

    tf.keras.utils.get_file('/model', data['url'], untar=True)
    tar = tarfile.open('/model.tar.gz')
    tar.extractall('/model')

    model_dir = os.path.join('/model', os.listdir('/model')[0])
    print(model_dir)
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(os.path.join(model_dir, 'pipeline.config'), 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    exporter.export_inference_graph(data['input_type'],
                                    pipeline_config,
                                    os.path.join(model_dir, 'model.ckpt'),
                                    '/exported_model',
                                    input_shape=None,
                                    write_inference_graph=False)

    bucket = storage_client.get_bucket(BUCKET_NAME)
    blob = bucket.blob(os.path.join(data['name'],
                                    'saved_model/saved_model.pb'))

    blob.upload_from_filename('/exported_model/saved_model/saved_model.pb')

    # TODO: create AI platform model
    result = create_version(PROJECT_ID,
                            BUCKET_NAME,
                            'm1',
                            data['name'], [REGION],
                            logging=True)
    print('result', result)

    # Flush the stdout to avoid log buffering.
    sys.stdout.flush()

    return ('', 204)
예제 #28
0
def main():
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(CONFIG_PATH, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)

    input_shape = None
    exporter.export_inference_graph('image_tensor',
                                    pipeline_config,
                                    TRAINED_CHECKPOINT,
                                    OUTPUT_DIR,
                                    input_shape=input_shape,
                                    write_inference_graph=False)
예제 #29
0
def go(input_type='image_tensor', pipeline_config_path=config.PIPELINE_CONFIG_PATH, output_directory=config.OUTPUT_INFERENCE_GRAPH_PATH, trained_checkpoint_prefix=None):

  if trained_checkpoint_prefix is None:
    trained_checkpoint_prefix = most_trained_checkpoint(config.TRAIN_PATH)

  pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
  with tf.gfile.GFile(pipeline_config_path, 'r') as f:
    text_format.Merge(f.read(), pipeline_config)

  exporter.export_inference_graph(
      input_type, pipeline_config, trained_checkpoint_prefix,
      output_directory)
예제 #30
0
def main(_):
  assert FLAGS.pipeline_config_path, '`pipeline_config_path` is missing'
  assert FLAGS.trained_checkpoint_prefix, (
         '`trained_checkpoint_prefix` is missing')
  assert FLAGS.output_directory, '`output_directory` is missing'

  pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
  with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
    text_format.Merge(f.read(), pipeline_config)
  exporter.export_inference_graph(
      FLAGS.input_type, pipeline_config, FLAGS.trained_checkpoint_prefix,
      FLAGS.output_directory)
예제 #31
0
def export_model(ckpt, pipeline_config, export_dir):
    config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_config, 'r') as f:
        text_format.Merge(f.read(), config)
    text_format.Merge('', config)
    input_shape = None
    input_type = 'image_tensor'
    exporter.export_inference_graph(input_type,
                                    config,
                                    ckpt,
                                    export_dir,
                                    input_shape=input_shape)
예제 #32
0
def main(_):
  assert FLAGS.pipeline_config_path, '`pipeline_config_path` is missing'
  assert FLAGS.trained_checkpoint_prefix, (
         '`trained_checkpoint_prefix` is missing')
  assert FLAGS.output_directory, '`output_directory` is missing'

  pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
  with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
    text_format.Merge(f.read(), pipeline_config)
  exporter.export_inference_graph(
      FLAGS.input_type, pipeline_config, FLAGS.trained_checkpoint_prefix,
      FLAGS.output_directory)
def export_graph(_, pipeline_config_path, trained_checkpoint_prefix,
                 output_directory):
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)

    input_shape = None
    exporter.export_inference_graph('image_tensor',
                                    pipeline_config,
                                    trained_checkpoint_prefix,
                                    output_directory,
                                    input_shape=input_shape,
                                    write_inference_graph=False)
 def test_export_graph_with_tf_example_input(self):
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel(num_classes=1)
     inference_graph_path = os.path.join(self.get_temp_dir(),
                                         'exported_graph.pbtxt')
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     pipeline_config.eval_config.use_moving_averages = False
     exporter.export_inference_graph(
         input_type='tf_example',
         pipeline_config=pipeline_config,
         checkpoint_path=None,
         inference_graph_path=inference_graph_path)
def main(_):
    pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
    with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
        text_format.Merge(f.read(), pipeline_config)
    if FLAGS.input_shape:
        input_shape = [
            int(dim) if dim != '-1' else None for dim in FLAGS.input_shape
        ]
    else:
        input_shape = None
    exporter.export_inference_graph(FLAGS.input_type, pipeline_config,
                                    FLAGS.trained_checkpoint_prefix,
                                    FLAGS.output_directory, input_shape)
예제 #36
0
 def test_export_graph_with_encoded_image_string_input(self):
     with mock.patch.object(model_builder, 'build',
                            autospec=True) as mock_builder:
         mock_builder.return_value = FakeModel()
         inference_graph_path = os.path.join(self.get_temp_dir(),
                                             'exported_graph.pbtxt')
         pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
         pipeline_config.eval_config.use_moving_averages = False
         exporter.export_inference_graph(
             input_type='encoded_image_string_tensor',
             pipeline_config=pipeline_config,
             checkpoint_path=None,
             inference_graph_path=inference_graph_path)
예제 #37
0
 def export(self):
     pipeline_config = TrainEvalPipelineConfig()
     Merge(self.config_path.read_text(), pipeline_config)
     last_ckpt = max(self.training_path.glob("model.ckpt-*.meta"),
                     key=_get_ckpt_number_from_file).with_suffix("")
     n_steps = last_ckpt.suffix.split("-")[-1]
     export_inference_graph(
         input_type="image_tensor",
         pipeline_config=pipeline_config,
         trained_checkpoint_prefix=str(last_ckpt),
         output_directory=str(PIPELINES_DIR / self.task /
                              f"{self.name}__{n_steps}_steps"),
     )
예제 #38
0
  def test_export_and_run_inference_with_encoded_image_string_tensor(self):
    tmp_dir = self.get_temp_dir()
    trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
    self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                          use_moving_averages=True)
    output_directory = os.path.join(tmp_dir, 'output')
    inference_graph_path = os.path.join(output_directory,
                                        'frozen_inference_graph.pb')
    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel(
          add_detection_keypoints=True, add_detection_masks=True)
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      pipeline_config.eval_config.use_moving_averages = False
      exporter.export_inference_graph(
          input_type='encoded_image_string_tensor',
          pipeline_config=pipeline_config,
          trained_checkpoint_prefix=trained_checkpoint_prefix,
          output_directory=output_directory)

    inference_graph = self._load_inference_graph(inference_graph_path)
    jpg_image_str = self._create_encoded_image_string(
        np.ones((4, 4, 3)).astype(np.uint8), 'jpg')
    png_image_str = self._create_encoded_image_string(
        np.ones((4, 4, 3)).astype(np.uint8), 'png')
    with self.test_session(graph=inference_graph) as sess:
      image_str_tensor = inference_graph.get_tensor_by_name(
          'encoded_image_string_tensor:0')
      boxes = inference_graph.get_tensor_by_name('detection_boxes:0')
      scores = inference_graph.get_tensor_by_name('detection_scores:0')
      classes = inference_graph.get_tensor_by_name('detection_classes:0')
      keypoints = inference_graph.get_tensor_by_name('detection_keypoints:0')
      masks = inference_graph.get_tensor_by_name('detection_masks:0')
      num_detections = inference_graph.get_tensor_by_name('num_detections:0')
      for image_str in [jpg_image_str, png_image_str]:
        image_str_batch_np = np.hstack([image_str]* 2)
        (boxes_np, scores_np, classes_np, keypoints_np, masks_np,
         num_detections_np) = sess.run(
             [boxes, scores, classes, keypoints, masks, num_detections],
             feed_dict={image_str_tensor: image_str_batch_np})
        self.assertAllClose(boxes_np, [[[0.0, 0.0, 0.5, 0.5],
                                        [0.5, 0.5, 0.8, 0.8]],
                                       [[0.5, 0.5, 1.0, 1.0],
                                        [0.0, 0.0, 0.0, 0.0]]])
        self.assertAllClose(scores_np, [[0.7, 0.6],
                                        [0.9, 0.0]])
        self.assertAllClose(classes_np, [[1, 2],
                                         [2, 1]])
        self.assertAllClose(keypoints_np, np.arange(48).reshape([2, 2, 6, 2]))
        self.assertAllClose(masks_np, np.arange(64).reshape([2, 2, 4, 4]))
        self.assertAllClose(num_detections_np, [2, 1])
def main(_):
  pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
  with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
    text_format.Merge(f.read(), pipeline_config)
  if FLAGS.input_shape:
    input_shape = [
        int(dim) if dim != '-1' else None
        for dim in FLAGS.input_shape.split(',')
    ]
  else:
    input_shape = None
  exporter.export_inference_graph(FLAGS.input_type, pipeline_config,
                                  FLAGS.trained_checkpoint_prefix,
                                  FLAGS.output_directory, input_shape)
 def test_export_graph_with_moving_averages(self):
   tmp_dir = self.get_temp_dir()
   trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
   self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                         use_moving_averages=True)
   output_directory = os.path.join(tmp_dir, 'output')
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel()
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     pipeline_config.eval_config.use_moving_averages = True
     exporter.export_inference_graph(
         input_type='image_tensor',
         pipeline_config=pipeline_config,
         trained_checkpoint_prefix=trained_checkpoint_prefix,
         output_directory=output_directory)
 def test_export_frozen_graph_with_moving_averages(self):
   checkpoint_path = os.path.join(self.get_temp_dir(), 'model-ckpt')
   self._save_checkpoint_from_mock_model(checkpoint_path,
                                         use_moving_averages=True)
   inference_graph_path = os.path.join(self.get_temp_dir(),
                                       'exported_graph.pb')
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel(num_classes=1)
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     pipeline_config.eval_config.use_moving_averages = True
     exporter.export_inference_graph(
         input_type='image_tensor',
         pipeline_config=pipeline_config,
         checkpoint_path=checkpoint_path,
         inference_graph_path=inference_graph_path)
  def test_export_checkpoint_and_run_inference(self):
    tmp_dir = self.get_temp_dir()
    trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
    self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                          use_moving_averages=False)
    output_directory = os.path.join(tmp_dir, 'output')
    model_path = os.path.join(output_directory, 'model.ckpt')
    meta_graph_path = model_path + '.meta'

    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel(add_detection_masks=True)
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      pipeline_config.eval_config.use_moving_averages = False
      exporter.export_inference_graph(
          input_type='tf_example',
          pipeline_config=pipeline_config,
          trained_checkpoint_prefix=trained_checkpoint_prefix,
          output_directory=output_directory)

    tf_example_np = np.hstack([self._create_tf_example(
        np.ones((4, 4, 3)).astype(np.uint8))] * 2)
    with tf.Graph().as_default() as od_graph:
      with self.test_session(graph=od_graph) as sess:
        new_saver = tf.train.import_meta_graph(meta_graph_path)
        new_saver.restore(sess, model_path)

        tf_example = od_graph.get_tensor_by_name('tf_example:0')
        boxes = od_graph.get_tensor_by_name('detection_boxes:0')
        scores = od_graph.get_tensor_by_name('detection_scores:0')
        classes = od_graph.get_tensor_by_name('detection_classes:0')
        masks = od_graph.get_tensor_by_name('detection_masks:0')
        num_detections = od_graph.get_tensor_by_name('num_detections:0')
        (boxes_np, scores_np, classes_np, masks_np,
         num_detections_np) = sess.run(
             [boxes, scores, classes, masks, num_detections],
             feed_dict={tf_example: tf_example_np})
        self.assertAllClose(boxes_np, [[[0.0, 0.0, 0.5, 0.5],
                                        [0.5, 0.5, 0.8, 0.8]],
                                       [[0.5, 0.5, 1.0, 1.0],
                                        [0.0, 0.0, 0.0, 0.0]]])
        self.assertAllClose(scores_np, [[0.7, 0.6],
                                        [0.9, 0.0]])
        self.assertAllClose(classes_np, [[1, 2],
                                         [2, 1]])
        self.assertAllClose(masks_np, np.arange(64).reshape([2, 2, 4, 4]))
        self.assertAllClose(num_detections_np, [2, 1])
예제 #43
0
 def test_export_graph_with_tf_example_input(self):
   tmp_dir = self.get_temp_dir()
   trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
   self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                         use_moving_averages=False)
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel()
     output_directory = os.path.join(tmp_dir, 'output')
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     pipeline_config.eval_config.use_moving_averages = False
     exporter.export_inference_graph(
         input_type='tf_example',
         pipeline_config=pipeline_config,
         trained_checkpoint_prefix=trained_checkpoint_prefix,
         output_directory=output_directory)
     self.assertTrue(os.path.exists(os.path.join(
         output_directory, 'saved_model', 'saved_model.pb')))
예제 #44
0
  def test_raise_runtime_error_on_images_with_different_sizes(self):
    tmp_dir = self.get_temp_dir()
    trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
    self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                          use_moving_averages=True)
    output_directory = os.path.join(tmp_dir, 'output')
    inference_graph_path = os.path.join(output_directory,
                                        'frozen_inference_graph.pb')
    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel(
          add_detection_keypoints=True, add_detection_masks=True)
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      pipeline_config.eval_config.use_moving_averages = False
      exporter.export_inference_graph(
          input_type='encoded_image_string_tensor',
          pipeline_config=pipeline_config,
          trained_checkpoint_prefix=trained_checkpoint_prefix,
          output_directory=output_directory)

    inference_graph = self._load_inference_graph(inference_graph_path)
    large_image = self._create_encoded_image_string(
        np.ones((4, 4, 3)).astype(np.uint8), 'jpg')
    small_image = self._create_encoded_image_string(
        np.ones((2, 2, 3)).astype(np.uint8), 'jpg')

    image_str_batch_np = np.hstack([large_image, small_image])
    with self.test_session(graph=inference_graph) as sess:
      image_str_tensor = inference_graph.get_tensor_by_name(
          'encoded_image_string_tensor:0')
      boxes = inference_graph.get_tensor_by_name('detection_boxes:0')
      scores = inference_graph.get_tensor_by_name('detection_scores:0')
      classes = inference_graph.get_tensor_by_name('detection_classes:0')
      keypoints = inference_graph.get_tensor_by_name('detection_keypoints:0')
      masks = inference_graph.get_tensor_by_name('detection_masks:0')
      num_detections = inference_graph.get_tensor_by_name('num_detections:0')
      with self.assertRaisesRegexp(tf.errors.InvalidArgumentError,
                                   'TensorArray.*shape'):
        sess.run(
            [boxes, scores, classes, keypoints, masks, num_detections],
            feed_dict={image_str_tensor: image_str_batch_np})
예제 #45
0
  def test_export_and_run_inference_with_encoded_image_string_tensor(self):
    checkpoint_path = os.path.join(self.get_temp_dir(), 'model-ckpt')
    self._save_checkpoint_from_mock_model(checkpoint_path,
                                          use_moving_averages=False)
    inference_graph_path = os.path.join(self.get_temp_dir(),
                                        'exported_graph.pb')
    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel(add_detection_masks=True)
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      pipeline_config.eval_config.use_moving_averages = False
      exporter.export_inference_graph(
          input_type='encoded_image_string_tensor',
          pipeline_config=pipeline_config,
          checkpoint_path=checkpoint_path,
          inference_graph_path=inference_graph_path)

    inference_graph = self._load_inference_graph(inference_graph_path)
    jpg_image_str = self._create_encoded_image_string(
        np.ones((4, 4, 3)).astype(np.uint8), 'jpg')
    png_image_str = self._create_encoded_image_string(
        np.ones((4, 4, 3)).astype(np.uint8), 'png')
    with self.test_session(graph=inference_graph) as sess:
      image_str_tensor = inference_graph.get_tensor_by_name(
          'encoded_image_string_tensor:0')
      boxes = inference_graph.get_tensor_by_name('detection_boxes:0')
      scores = inference_graph.get_tensor_by_name('detection_scores:0')
      classes = inference_graph.get_tensor_by_name('detection_classes:0')
      masks = inference_graph.get_tensor_by_name('detection_masks:0')
      num_detections = inference_graph.get_tensor_by_name('num_detections:0')
      for image_str in [jpg_image_str, png_image_str]:
        (boxes_np, scores_np, classes_np, masks_np,
         num_detections_np) = sess.run(
             [boxes, scores, classes, masks, num_detections],
             feed_dict={image_str_tensor: image_str})
        self.assertAllClose(boxes_np, [[0.0, 0.0, 0.5, 0.5],
                                       [0.5, 0.5, 0.8, 0.8]])
        self.assertAllClose(scores_np, [[0.7, 0.6]])
        self.assertAllClose(classes_np, [[1, 2]])
        self.assertAllClose(masks_np, np.arange(32).reshape([2, 4, 4]))
        self.assertAllClose(num_detections_np, [2])
예제 #46
0
  def test_export_saved_model_and_run_inference(self):
    checkpoint_path = os.path.join(self.get_temp_dir(), 'model-ckpt')
    self._save_checkpoint_from_mock_model(checkpoint_path,
                                          use_moving_averages=False)
    inference_graph_path = os.path.join(self.get_temp_dir(),
                                        'saved_model')

    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel(add_detection_masks=True)
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      pipeline_config.eval_config.use_moving_averages = False
      exporter.export_inference_graph(
          input_type='tf_example',
          pipeline_config=pipeline_config,
          checkpoint_path=checkpoint_path,
          inference_graph_path=inference_graph_path,
          export_as_saved_model=True)

    with tf.Graph().as_default() as od_graph:
      with self.test_session(graph=od_graph) as sess:
        tf.saved_model.loader.load(
            sess, [tf.saved_model.tag_constants.SERVING], inference_graph_path)
        tf_example = od_graph.get_tensor_by_name('import/tf_example:0')
        boxes = od_graph.get_tensor_by_name('import/detection_boxes:0')
        scores = od_graph.get_tensor_by_name('import/detection_scores:0')
        classes = od_graph.get_tensor_by_name('import/detection_classes:0')
        masks = od_graph.get_tensor_by_name('import/detection_masks:0')
        num_detections = od_graph.get_tensor_by_name('import/num_detections:0')
        (boxes, scores, classes, masks, num_detections) = sess.run(
            [boxes, scores, classes, masks, num_detections],
            feed_dict={tf_example: self._create_tf_example(
                np.ones((4, 4, 3)).astype(np.uint8))})
        self.assertAllClose(boxes, [[0.0, 0.0, 0.5, 0.5],
                                    [0.5, 0.5, 0.8, 0.8]])
        self.assertAllClose(scores, [[0.7, 0.6]])
        self.assertAllClose(classes, [[1, 2]])
        self.assertAllClose(masks, np.arange(32).reshape([2, 4, 4]))
        self.assertAllClose(num_detections, [2])
예제 #47
0
 def test_export_graph_with_moving_averages(self):
   tmp_dir = self.get_temp_dir()
   trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
   self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                         use_moving_averages=True)
   output_directory = os.path.join(tmp_dir, 'output')
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel()
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     pipeline_config.eval_config.use_moving_averages = True
     exporter.export_inference_graph(
         input_type='image_tensor',
         pipeline_config=pipeline_config,
         trained_checkpoint_prefix=trained_checkpoint_prefix,
         output_directory=output_directory)
     self.assertTrue(os.path.exists(os.path.join(
         output_directory, 'saved_model', 'saved_model.pb')))
   expected_variables = set(['conv2d/bias', 'conv2d/kernel', 'global_step'])
   actual_variables = set(
       [var_name for var_name, _ in tf.train.list_variables(output_directory)])
   self.assertTrue(expected_variables.issubset(actual_variables))
예제 #48
0
 def test_export_model_with_all_output_nodes(self):
   checkpoint_path = os.path.join(self.get_temp_dir(), 'model-ckpt')
   self._save_checkpoint_from_mock_model(checkpoint_path,
                                         use_moving_averages=False)
   inference_graph_path = os.path.join(self.get_temp_dir(),
                                       'exported_graph.pb')
   with mock.patch.object(
       model_builder, 'build', autospec=True) as mock_builder:
     mock_builder.return_value = FakeModel(add_detection_masks=True)
     pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
     exporter.export_inference_graph(
         input_type='image_tensor',
         pipeline_config=pipeline_config,
         checkpoint_path=checkpoint_path,
         inference_graph_path=inference_graph_path)
   inference_graph = self._load_inference_graph(inference_graph_path)
   with self.test_session(graph=inference_graph):
     inference_graph.get_tensor_by_name('image_tensor:0')
     inference_graph.get_tensor_by_name('detection_boxes:0')
     inference_graph.get_tensor_by_name('detection_scores:0')
     inference_graph.get_tensor_by_name('detection_classes:0')
     inference_graph.get_tensor_by_name('detection_masks:0')
     inference_graph.get_tensor_by_name('num_detections:0')
예제 #49
0
  def test_export_graph_saves_pipeline_file(self):
    tmp_dir = self.get_temp_dir()
    trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt')
    self._save_checkpoint_from_mock_model(trained_checkpoint_prefix,
                                          use_moving_averages=True)
    output_directory = os.path.join(tmp_dir, 'output')
    with mock.patch.object(
        model_builder, 'build', autospec=True) as mock_builder:
      mock_builder.return_value = FakeModel()
      pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      exporter.export_inference_graph(
          input_type='image_tensor',
          pipeline_config=pipeline_config,
          trained_checkpoint_prefix=trained_checkpoint_prefix,
          output_directory=output_directory)
      expected_pipeline_path = os.path.join(
          output_directory, 'pipeline.config')
      self.assertTrue(os.path.exists(expected_pipeline_path))

      written_pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
      with tf.gfile.GFile(expected_pipeline_path, 'r') as f:
        proto_str = f.read()
        text_format.Merge(proto_str, written_pipeline_config)
        self.assertProtoEquals(pipeline_config, written_pipeline_config)