Exemplo n.º 1
0
    def process(
        self,
        class_predictions_with_background: tf.Tensor,
        num_object_detections: tf.Tensor,
        detection_object_boxes: tf.Tensor,
        groundtruth_object_boxes: tf.Tensor,
        groundtruth_object_classes: tf.Tensor,
        groundtruth_object_weights: Optional[tf.Tensor] = None
    ) -> Dict[str, tf.Tensor]:
        # pylint: disable=arguments-differ,too-many-locals
        # parent save method has more generic signature
        detection_object_boxes = tf.stop_gradient(detection_object_boxes)
        num_classes_with_bg = (
            class_predictions_with_background.shape.as_list()[-1])
        groundtruth_classes_with_background = tf.one_hot(
            indices=groundtruth_object_classes,
            depth=num_classes_with_bg,
            dtype=tf.float32)
        (class_targets, class_targets_weights, _,
         _) = _batch_assign_targets_dynamic(
             self._detector_target_assigner,
             anchors=detection_object_boxes,
             groundtruth_object_boxes=groundtruth_object_boxes,
             groundtruth_classes_with_background=
             groundtruth_classes_with_background,
             num_classes=num_classes_with_bg - 1,
             groundtruth_object_weights=groundtruth_object_weights)

        max_num_proposals = tf.shape(detection_object_boxes)[1]
        normalizer = _get_normalizer(
            tf.shape(detection_object_boxes)[0], num_object_detections,
            max_num_proposals)
        proposals_batch_indicator = _get_proposals_batch_indicator(
            num_object_detections, max_num_proposals)
        second_stage_cls_losses = self._loss_fn(
            class_predictions_with_background,
            class_targets,
            weights=class_targets_weights,
            losses_mask=None)
        normalizer = broadcast_with_expand_to(normalizer,
                                              second_stage_cls_losses)
        proposals_batch_indicator = broadcast_with_expand_to(
            proposals_batch_indicator, second_stage_cls_losses)
        second_stage_cls_losses = second_stage_cls_losses / normalizer
        second_stage_cls_loss = tf.reduce_sum(second_stage_cls_losses *
                                              proposals_batch_indicator,
                                              name="loss_classification_match")
        return {"loss_classification_match": second_stage_cls_loss}
Exemplo n.º 2
0
    def _cutout_keypoints(object_keypoints, cutout_box):
        (cutout_box_ymin, cutout_box_xmin, cutout_box_ymax,
         cutout_box_xmax) = tf.split(cutout_box, 4, -1)
        object_keypoints_y = object_keypoints[..., 0]
        object_keypoints_x = object_keypoints[..., 1]

        valid_keypoints_mask_first = tf.reduce_all(tf.logical_and(
            tf.greater(object_keypoints, 0),
            tf.less_equal(object_keypoints, 1.0)),
                                                   -1,
                                                   keepdims=True)
        valid_keypoints_mask_cutout = tf.reduce_any(tf.stack([
            tf.greater_equal(object_keypoints_y, cutout_box_ymax),
            tf.less_equal(object_keypoints_y, cutout_box_ymin),
            tf.greater_equal(object_keypoints_x, cutout_box_xmax),
            tf.less_equal(object_keypoints_x, cutout_box_xmin),
        ], -1),
                                                    -1,
                                                    keepdims=True)
        valid_keypoints_mask = tf.logical_and(valid_keypoints_mask_first,
                                              valid_keypoints_mask_cutout)
        keypoints_cutout = tf.where(
            broadcast_with_expand_to(valid_keypoints_mask, object_keypoints),
            object_keypoints, tf.zeros_like(object_keypoints))
        return keypoints_cutout
Exemplo n.º 3
0
    def cutout(self,
               image: tf.Tensor,
               replace_value: Union[float, tf.Tensor] = 0.0) -> tf.Tensor:
        """
        Apply cutout to the given image

        Parameters
        ----------
        image
            input image
        replace_value
            the value to which the rectangle is set

        Returns
        -------
        img_with_cutoff
            the input images after applying cutout
        """
        image_size = tf.shape(image)[:-1]
        image_size_float = tf.cast(image_size, tf.float32)
        cut_lengths_absolute = tf.cast(
            tf.floor(self.random_variables["cut_lengths"] * image_size_float),
            tf.int32)
        cut_offset_absolute = tf.cast(
            tf.floor(self.random_variables["cut_offset"] * image_size_float),
            tf.int32)

        mask = self._create_cutout_mask(cut_offset_absolute,
                                        cut_lengths_absolute, image_size)
        mask = broadcast_with_expand_to(mask, image)
        mask_with_replaced_value = tf.cast(mask, image.dtype) * replace_value
        image_with_cutoff = tf.where(mask, mask_with_replaced_value, image)

        return image_with_cutoff
Exemplo n.º 4
0
def _mask_single_batch_input_to_classes(
        item: tf.Tensor, classes_mask: tf.Tensor,
        rearranged_indices: tf.Tensor) -> tf.Tensor:
    mask_broadcasted = broadcast_with_expand_to(classes_mask, item)
    item_masked = tf.where(mask_broadcasted, item, tf.zeros_like(item))
    item_masked_rearranged = tf.batch_gather(item_masked, rearranged_indices)
    return item_masked_rearranged
Exemplo n.º 5
0
def _flip_keypoints_up_down(keypoints):
    keypoints_y, keypoints_x = tf.split(keypoints, 2, -1)
    keypoints_y_flipped = 1 - keypoints_y
    keypoints_flipped = tf.concat([keypoints_y_flipped, keypoints_x], -1)
    valid_keypoints = tf.reduce_any(tf.greater(keypoints, 0), -1)
    keypoints_flipped = tf.where(
        broadcast_with_expand_to(valid_keypoints, keypoints),
        keypoints_flipped, tf.zeros_like(keypoints_flipped))
    return keypoints_flipped
Exemplo n.º 6
0
def _flip_boxes_up_down(boxes):
    ymin, xmin, ymax, xmax = tf.split(boxes, 4, -1)
    height = ymax - ymin
    ymin = 1.0 - height - ymin
    ymax = ymin + height
    boxes_flipped = tf.concat([ymin, xmin, ymax, xmax], -1)
    valid_boxes = tf.greater(height, 0)
    boxes_flipped = tf.where(broadcast_with_expand_to(valid_boxes, boxes),
                             boxes_flipped, tf.zeros_like(boxes_flipped))
    return boxes_flipped
Exemplo n.º 7
0
def _flip_boxes_left_right(boxes):
    ymin, xmin, ymax, xmax = tf.split(boxes, 4, -1)
    width = xmax - xmin
    xmin = 1.0 - width - xmin
    xmax = xmin + width
    boxes_flipped = tf.concat([ymin, xmin, ymax, xmax], -1)
    valid_boxes = tf.greater(width, 0)
    boxes_flipped = tf.where(broadcast_with_expand_to(valid_boxes, boxes),
                             boxes_flipped, tf.zeros_like(boxes_flipped))
    return boxes_flipped
Exemplo n.º 8
0
 def _rotate_keypoints(self, object_keypoints: tf.Tensor) -> tf.Tensor:
     rotation_angle = self.random_variables["rotation_angle"]
     keypoints_rotated = _rotate_points(object_keypoints, rotation_angle)
     valid_keypoints_mask_first = tf.reduce_all(tf.logical_and(
         tf.greater(object_keypoints, 0),
         tf.less_equal(object_keypoints, 1.0)),
                                                -1,
                                                keepdims=True)
     valid_keypoints_mask_after_rotation = tf.reduce_all(tf.logical_and(
         tf.greater(keypoints_rotated, 0),
         tf.less_equal(keypoints_rotated, 1.0)),
                                                         -1,
                                                         keepdims=True)
     valid_keypoints_mask = tf.logical_and(
         valid_keypoints_mask_first, valid_keypoints_mask_after_rotation)
     keypoints_rotated = tf.where(
         broadcast_with_expand_to(valid_keypoints_mask, object_keypoints),
         keypoints_rotated, tf.zeros_like(object_keypoints))
     return keypoints_rotated
Exemplo n.º 9
0
 def _crop_keypoints(self, object_keypoints: tf.Tensor) -> tf.Tensor:
     crop_offset = self.random_variables["crop_offset"]
     crop_scale = self.random_variables["crop_scale"]
     keypoints_with_offset = object_keypoints - crop_offset
     keypoints_with_offset_scaled = keypoints_with_offset / crop_scale
     valid_keypoints_mask_first = tf.reduce_all(tf.logical_and(
         tf.greater(object_keypoints, 0),
         tf.less_equal(object_keypoints, 1.0)),
                                                -1,
                                                keepdims=True)
     valid_keypoints_mask_after_crop = tf.reduce_all(tf.logical_and(
         tf.greater(keypoints_with_offset_scaled, 0),
         tf.less_equal(keypoints_with_offset_scaled, 1.0)),
                                                     -1,
                                                     keepdims=True)
     valid_keypoints_mask = tf.logical_and(valid_keypoints_mask_first,
                                           valid_keypoints_mask_after_crop)
     keypoints_cropped = tf.where(
         broadcast_with_expand_to(valid_keypoints_mask, object_keypoints),
         keypoints_with_offset_scaled, tf.zeros_like(object_keypoints))
     return keypoints_cropped
Exemplo n.º 10
0
    def process(
        self,
        *,
        detection_object_boxes: tf.Tensor,
        num_object_detections: tf.Tensor,
        detection_object_keypoints_heatmaps: tf.Tensor,
        groundtruth_object_boxes: tf.Tensor,
        groundtruth_object_keypoints: tf.Tensor,
        groundtruth_object_weights: Optional[tf.Tensor] = None
    ) -> Dict[str, tf.Tensor]:
        # pylint: disable=arguments-differ,too-many-locals
        # parent save method has more generic signature
        detection_object_boxes = tf.stop_gradient(detection_object_boxes)
        heatmaps_shape = tf.shape(detection_object_keypoints_heatmaps)[2:4]
        groundtruth_object_keypoints = tf.maximum(groundtruth_object_keypoints,
                                                  0)
        (target_keypoints_encoded, keypoints_masks,
         target_weights) = self._match_groundtruth_keypoints(
             detection_object_boxes, groundtruth_object_boxes,
             groundtruth_object_keypoints, groundtruth_object_weights)
        target_keypoints_heatmaps = self._get_target_keypoints_heatmaps(
            target_keypoints_encoded, keypoints_masks, heatmaps_shape)
        max_num_detections = tf.shape(detection_object_boxes)[1]
        batch_indicator = _get_proposals_batch_indicator(
            num_object_detections, max_num_detections)
        normalizer = tf.maximum(
            tf.reduce_sum(batch_indicator * target_weights *
                          tf.reduce_prod(tf.cast(heatmaps_shape, tf.float32))),
            1.0)

        detection_object_keypoints_heatmaps_r = tf.reshape(
            detection_object_keypoints_heatmaps, [
                tf.shape(detection_object_keypoints_heatmaps)[0], -1,
                tf.shape(detection_object_keypoints_heatmaps)[-1]
            ])
        target_keypoints_heatmaps_r = tf.reshape(target_keypoints_heatmaps, [
            tf.shape(target_keypoints_heatmaps)[0], -1,
            tf.shape(target_keypoints_heatmaps)[-1]
        ])
        weights = tf.reshape(
            broadcast_with_expand_to(target_weights,
                                     target_keypoints_heatmaps[..., 0]),
            [tf.shape(target_weights)[0], -1])
        batch_indicator_r = tf.reshape(
            broadcast_with_expand_to(batch_indicator,
                                     target_keypoints_heatmaps[..., 0]),
            [tf.shape(target_weights)[0], -1])
        if "Classification" in self.heatmaps_loss_name:
            weights = weights[..., tf.newaxis]
            batch_indicator_r = batch_indicator_r[..., tf.newaxis]

        heatmaps_loss_samples = self._loss_fn(
            detection_object_keypoints_heatmaps_r,
            target_keypoints_heatmaps_r,
            weights=weights,
        ) / normalizer

        loss = tf.reduce_sum(heatmaps_loss_samples * batch_indicator_r,
                             name="loss_keypoints_heatmaps")

        return {"loss_keypoints_heatmaps": loss}