Exemple #1
0
    def export_saved_model(self,
                           saved_model_dir,
                           batch_size=None,
                           pre_mode='infer',
                           post_mode='global'):
        """Saves the model to Tensorflow SavedModel.

    Args:
      saved_model_dir: Folder path for saved model.
      batch_size: Batch size to be saved in saved_model.
      pre_mode: Pre-processing Mode in ExportModel, must be {None, 'infer'}.
      post_mode: Post-processing Mode in ExportModel, must be {None, 'global',
        'per_class'}.
    """
        # Create EfficientDetModel with latest checkpoint.
        config = self.config
        tf.keras.backend.clear_session()
        model = efficientdet_keras.EfficientDetModel(config=config)
        model.build((batch_size, *config.image_size, 3))
        if config.model_dir:
            util_keras.restore_ckpt(model,
                                    config.model_dir,
                                    config['moving_average_decay'],
                                    skip_mismatch=False)
        else:
            # EfficientDetModel is random initialized without restoring the
            # checkpoint. This is mainly used in object_detector_test and shouldn't be
            #  used if we want to export trained model.
            tf.compat.v1.logging.warn('Need to restore the checkpoint for '
                                      'EfficientDet.')
        # Gets tf.TensorSpec.
        if pre_mode is None:
            # Input is the preprocessed image that's already resized to a certain
            # input shape.
            input_spec = tf.TensorSpec(
                shape=[batch_size, *config.image_size, 3],
                dtype=tf.float32,
                name='images')
        else:
            # Input is that raw image that can be in any input shape,
            input_spec = tf.TensorSpec(shape=[batch_size, None, None, 3],
                                       dtype=tf.uint8,
                                       name='images')

        export_model = inference.ExportModel(model,
                                             pre_mode=pre_mode,
                                             post_mode=post_mode)
        tf.saved_model.save(
            export_model,
            saved_model_dir,
            signatures=export_model.__call__.get_concrete_function(input_spec))
 def build(self, params_override=None):
     """Build model and restore checkpoints."""
     params = copy.deepcopy(self.params)
     if params_override:
         params.update(params_override)
     config = hparams_config.get_efficientdet_config(self.model_name)
     config.override(params)
     if self.only_network:
         self.model = efficientdet_keras.EfficientDetNet(config=config)
     else:
         self.model = efficientdet_keras.EfficientDetModel(config=config)
     image_size = utils.parse_image_size(params['image_size'])
     self.model.build((self.batch_size, *image_size, 3))
     util_keras.restore_ckpt(self.model,
                             self.ckpt_path,
                             self.params['moving_average_decay'],
                             skip_mismatch=False)
Exemple #3
0
def main(_):

  # pylint: disable=line-too-long
  # Prepare images and checkpoints: please run these commands in shell.
  # !mkdir tmp
  # !wget https://user-images.githubusercontent.com/11736571/77320690-099af300-6d37-11ea-9d86-24f14dc2d540.png -O tmp/img.png
  # !wget https://storage.googleapis.com/cloud-tpu-checkpoints/efficientdet/coco/efficientdet-d0.tar.gz -O tmp/efficientdet-d0.tar.gz
  # !tar zxf tmp/efficientdet-d0.tar.gz -C tmp
  imgs = [np.array(Image.open(FLAGS.image_path))]
  # Create model config.
  config = hparams_config.get_efficientdet_config(FLAGS.model_name)
  config.is_training_bn = False
  config.image_size = '1920x1280'
  config.nms_configs.score_thresh = 0.4
  config.nms_configs.max_output_size = 100
  config.override(FLAGS.hparams)

  # Use 'mixed_float16' if running on GPUs.
  policy = tf.keras.mixed_precision.Policy('float32')
  tf.keras.mixed_precision.set_global_policy(policy)
  tf.config.run_functions_eagerly(FLAGS.debug)

  # Create and run the model.
  model = efficientdet_keras.EfficientDetModel(config=config)
  model.build((None, None, None, 3))
  model.load_weights(tf.train.latest_checkpoint(FLAGS.model_dir))
  model.summary()

  class ExportModel(tf.Module):

    def __init__(self, model):
      super().__init__()
      self.model = model

    @tf.function
    def f(self, imgs):
      return self.model(imgs, training=False, post_mode='global')

  imgs = tf.convert_to_tensor(imgs, dtype=tf.uint8)
  export_model = ExportModel(model)
  if FLAGS.saved_model_dir:
    tf.saved_model.save(
        export_model,
        FLAGS.saved_model_dir,
        signatures=export_model.f.get_concrete_function(
            tf.TensorSpec(shape=(None, None, None, 3), dtype=tf.uint8)))
    export_model = tf.saved_model.load(FLAGS.saved_model_dir)

  boxes, scores, classes, valid_len = export_model.f(imgs)

  # Visualize results.
  for i, img in enumerate(imgs):
    length = valid_len[i]
    img = inference.visualize_image(
        img,
        boxes[i].numpy()[:length],
        classes[i].numpy().astype(np.int)[:length],
        scores[i].numpy()[:length],
        label_map=config.label_map,
        min_score_thresh=config.nms_configs.score_thresh,
        max_boxes_to_draw=config.nms_configs.max_output_size)
    output_image_path = os.path.join(FLAGS.output_dir, str(i) + '.jpg')
    Image.fromarray(img).save(output_image_path)
    print('writing annotated image to %s' % output_image_path)