Exemple #1
0
    def test_postprocess_global(self):
        """Test the postprocess with global nms."""
        tf.random.set_seed(1111)
        cls_outputs = {
            1: tf.random.normal([2, 4, 4, 2]),
            2: tf.random.normal([2, 2, 2, 2])
        }
        box_outputs = {
            1: tf.random.normal([2, 4, 4, 4]),
            2: tf.random.normal([2, 2, 2, 4])
        }
        cls_outputs_list = [cls_outputs[1], cls_outputs[2]]
        box_outputs_list = [box_outputs[1], box_outputs[2]]
        scales = [1.0, 2.0]

        self.params['max_detection_points'] = 10
        boxes, scores, classes, valid_len = postprocess.postprocess_global(
            self.params, cls_outputs_list, box_outputs_list, scales)
        self.assertAllClose(valid_len, [2, 2])

        self.params['disable_pyfun'] = True
        score_thresh = 0.5
        self.params['batch_size'] = len(scales)
        max_output_size = self.params['nms_configs']['max_output_size']
        legacy_detections = inference.det_post_process(self.params,
                                                       cls_outputs,
                                                       box_outputs, scales,
                                                       score_thresh,
                                                       max_output_size)
        legacy_boxes = legacy_detections[:, :, 1:5]
        legacy_scores = legacy_detections[:, :, 5]
        legacy_classes = legacy_detections[:, :, 6]
        self.assertAllClose(boxes, legacy_boxes)
        self.assertAllClose(scores, legacy_scores)
        self.assertAllClose(classes, legacy_classes)
Exemple #2
0
    def _postprocess(self, cls_outputs, box_outputs, scales, mode='global'):
        """Postprocess class and box predictions."""
        if not mode:
            return cls_outputs, box_outputs

        if mode == 'global':
            return postprocess.postprocess_global(self.config.as_dict(),
                                                  cls_outputs, box_outputs,
                                                  scales)
        if mode == 'per_class':
            return postprocess.postprocess_per_class(self.config.as_dict(),
                                                     cls_outputs, box_outputs,
                                                     scales)
        if mode == 'combined':
            return postprocess.postprocess_combined(self.config.as_dict(),
                                                    cls_outputs, box_outputs,
                                                    scales)
        if mode == 'tflite':
            if scales is not None:
                # pre_mode should be None for TFLite.
                raise ValueError(
                    'scales not supported for TFLite post-processing')
            return postprocess.postprocess_tflite(self.config.as_dict(),
                                                  cls_outputs, box_outputs)
        raise ValueError('Unsupported postprocess mode {}'.format(mode))
Exemple #3
0
 def _postprocess(self, cls_outputs, box_outputs, scales, mode='global'):
   if not mode:
     return cls_outputs, box_outputs
   if mode == 'global':
     return postprocess.postprocess_global(self.config.as_dict(), cls_outputs,
                                           box_outputs, scales)
   if mode == 'per_class':
     return postprocess.postprocess_per_class(self.config.as_dict(),
                                              cls_outputs, box_outputs, scales)
   raise ValueError('Unsupported postprocess mode {}'.format(mode))
Exemple #4
0
  def _postprocess(self, cls_outputs, box_outputs, scales, mode='global'):
    if not mode:
      return cls_outputs, box_outputs

    # TODO(tanmingxing): remove this cast once FP16 works postprocessing.
    cls_outputs = [tf.cast(i, tf.float32) for i in cls_outputs]
    box_outputs = [tf.cast(i, tf.float32) for i in box_outputs]

    if mode == 'global':
      return postprocess.postprocess_global(self.config.as_dict(), cls_outputs,
                                            box_outputs, scales)
    if mode == 'per_class':
      return postprocess.postprocess_per_class(self.config.as_dict(),
                                               cls_outputs, box_outputs, scales)
    raise ValueError('Unsupported postprocess mode {}'.format(mode))
def det_post_process(
    params: Dict[Any, Any],
    cls_outputs: Dict[int, tf.Tensor],
    box_outputs: Dict[int, tf.Tensor],
    scales: List[float],
):
    """Post preprocessing the box/class predictions.

    Args:
      params: a parameter dictionary that includes `min_level`, `max_level`,
        `batch_size`, and `num_classes`.
      cls_outputs: an OrderDict with keys representing levels and values
        representing logits in [batch_size, height, width, num_anchors].
      box_outputs: an OrderDict with keys representing levels and values
        representing box regression targets in [batch_size, height, width,
        num_anchors * 4].
      scales: a list of float values indicating image scale.

    Returns:
      detections_batch: a batch of detection results. Each detection is a tensor
        with each row as [image_id, ymin, xmin, ymax, xmax, score, class].
    """
    if params.get("combined_nms", None):
        # Use combined version for dynamic batch size.
        nms_boxes, nms_scores, nms_classes, _ = postprocess.postprocess_combined(
            params, cls_outputs, box_outputs, scales)
    else:
        nms_boxes, nms_scores, nms_classes, _ = postprocess.postprocess_global(
            params, cls_outputs, box_outputs, scales)

    batch_size = tf.shape(cls_outputs[params["min_level"]])[0]
    img_ids = tf.expand_dims(
        tf.cast(tf.range(0, batch_size), nms_scores.dtype), -1)
    detections = [
        img_ids * tf.ones_like(nms_scores),
        nms_boxes[:, :, 0],
        nms_boxes[:, :, 1],
        nms_boxes[:, :, 2],
        nms_boxes[:, :, 3],
        nms_scores,
        nms_classes,
    ]
    return tf.stack(detections, axis=-1, name="detections")
Exemple #6
0
  def test_postprocess_global(self):
    """Test the postprocess with global nms."""
    tf.random.set_seed(1111)
    cls_outputs = {
        1: tf.random.normal([2, 4, 4, 2]),
        2: tf.random.normal([2, 2, 2, 2])
    }
    box_outputs = {
        1: tf.random.normal([2, 4, 4, 4]),
        2: tf.random.normal([2, 2, 2, 4])
    }
    cls_outputs_list = [cls_outputs[1], cls_outputs[2]]
    box_outputs_list = [box_outputs[1], box_outputs[2]]
    scales = [1.0, 2.0]

    self.params['max_detection_points'] = 10
    _, scores, classes, valid_len = postprocess.postprocess_global(
        self.params, cls_outputs_list, box_outputs_list, scales)
    self.assertAllClose(valid_len, [2, 2])
    self.assertAllClose(classes.numpy(), [[2., 1.], [1., 2.]])
    self.assertAllClose(scores.numpy(),
                        [[0.90157586, 0.88812476], [0.88454413, 0.8158828]])
Exemple #7
0
  def _postprocess(self, cls_outputs, box_outputs, scales, mode='global'):
    """Postprocess class and box predictions."""
    if not mode:
      return cls_outputs, box_outputs

    # TODO(tanmingxing): remove this cast once FP16 works postprocessing.
    cls_outputs = [tf.cast(i, tf.float32) for i in cls_outputs]
    box_outputs = [tf.cast(i, tf.float32) for i in box_outputs]

    if mode == 'global':
      return postprocess.postprocess_global(self.config.as_dict(), cls_outputs,
                                            box_outputs, scales)
    if mode == 'per_class':
      return postprocess.postprocess_per_class(self.config.as_dict(),
                                               cls_outputs, box_outputs, scales)
    if mode == 'tflite':
      if scales is not None:
        # pre_mode should be None for TFLite.
        raise ValueError('scales not supported for TFLite post-processing')
      return postprocess.postprocess_tflite(self.config.as_dict(), cls_outputs,
                                            box_outputs)
    raise ValueError('Unsupported postprocess mode {}'.format(mode))