Ejemplo n.º 1
0
def subsample_negative_labels(labels, rpn_batchsize=256):
    """
    subsample negative labels if we have too many
    :param labels: array of labels (1 is positive, 0 is negative, -1 is dont
    care)
    :return:
    """
    num_bg = rpn_batchsize - keras.backend.shape(
        tensorflow_backend.where(keras.backend.equal(labels, 1)))[0]

    bg_inds = tensorflow_backend.where(keras.backend.equal(labels, 0))

    num_bg_inds = keras.backend.shape(bg_inds)[0]

    size = num_bg_inds - num_bg

    def more_negative():
        indices = keras.backend.reshape(bg_inds, (-1, ))
        indices = tensorflow_backend.shuffle(indices)[:size]

        updates = tensorflow.ones((size, )) * -1

        inverse_labels = keras.backend.gather(labels, indices) * -1

        indices = keras.backend.reshape(indices, (-1, 1))

        return tensorflow_backend.scatter_add_tensor(labels, indices,
                                                     inverse_labels + updates)

    condition = keras.backend.less_equal(size, 0)

    return keras.backend.switch(condition, labels, lambda: more_negative())
Ejemplo n.º 2
0
    def compute_regression_loss(output, target, labels):
        condition = keras.backend.not_equal(labels, -1)

        indices = tensorflow_backend.where(condition)

        output = tensorflow_backend.gather_nd(output, indices)
        target = tensorflow_backend.gather_nd(target, indices)
        labels = tensorflow_backend.gather_nd(labels, indices)

        condition = keras.backend.greater(labels, 0)

        x = keras.backend.zeros_like(labels) + 1
        y = keras.backend.zeros_like(labels)

        p_star_i = tensorflow_backend.where(condition, x, y)

        p_star_i = keras.backend.expand_dims(p_star_i, 0)

        output = keras.backend.expand_dims(output, 0)
        target = keras.backend.expand_dims(target, 0)

        a_y = tensorflow_backend.smooth_l1(output, target, anchored=True)

        a = p_star_i * a_y

        a = keras.backend.sum(a)

        # Divided by anchor overlaps
        b = keras.backend.sum(p_star_i + keras.backend.epsilon())

        loss = 1.0 * (a / b)

        return loss
Ejemplo n.º 3
0
def label(y_true,
          y_pred,
          inds_inside,
          negative_overlap=0.3,
          positive_overlap=0.7,
          clobber_positives=False):
    """
    Create bbox labels.
    label: 1 is positive, 0 is negative, -1 is do not care
    :param clobber_positives:
    :param positive_overlap:
    :param negative_overlap:
    :param inds_inside: indices of anchors inside image
    :param y_pred: anchors
    :param y_true: ground truth objects
    :return: indices of gt boxes with the greatest overlap, balanced labels
    """
    ones = keras.backend.ones_like(inds_inside, dtype=keras.backend.floatx())
    labels = ones * -1
    zeros = keras.backend.zeros_like(inds_inside, dtype=keras.backend.floatx())

    argmax_overlaps_inds, max_overlaps, gt_argmax_overlaps_inds = overlapping(
        y_pred, y_true, inds_inside)

    # Assign background labels first so that positive labels can clobber them.
    if not clobber_positives:
        labels = tensorflow_backend.where(
            keras.backend.less(max_overlaps, negative_overlap), zeros, labels)

    # fg label: for each gt, anchor with highest overlap

    # TODO: generalize unique beyond 1D
    unique_indices, unique_indices_indices = tensorflow_backend.unique(
        gt_argmax_overlaps_inds, return_index=True)
    inverse_labels = keras.backend.gather(-1 * labels, unique_indices)
    unique_indices = keras.backend.expand_dims(unique_indices, 1)

    updates = keras.backend.ones_like(keras.backend.reshape(
        unique_indices, (-1, )),
                                      dtype=keras.backend.floatx())
    labels = tensorflow_backend.scatter_add_tensor(labels, unique_indices,
                                                   inverse_labels + updates)

    # Assign foreground labels based on IoU overlaps that are higher than
    # RPN_POSITIVE_OVERLAP.
    labels = tensorflow_backend.where(
        keras.backend.greater_equal(max_overlaps, positive_overlap), ones,
        labels)

    if clobber_positives:
        # assign bg labels last so that negative labels can clobber positives
        labels = tensorflow_backend.where(
            keras.backend.less(max_overlaps, negative_overlap), zeros, labels)

    return argmax_overlaps_inds, balance(labels)
Ejemplo n.º 4
0
    def compute_loss(output, target):
        condition = keras.backend.not_equal(target, -1)

        indices = tensorflow_backend.where(condition)

        output = tensorflow_backend.gather_nd(output, indices)
        target = tensorflow_backend.gather_nd(target, indices)
        loss = keras.backend.binary_crossentropy(target, output)
        loss = keras.backend.mean(loss)

        return loss
Ejemplo n.º 5
0
def filter_boxes(proposals, minimum):
    """
    Filters proposed RoIs so that all have width and height at least as big as
    minimum
    """
    ws = proposals[:, 2] - proposals[:, 0] + 1
    hs = proposals[:, 3] - proposals[:, 1] + 1

    indices = tensorflow_backend.where((ws >= minimum) & (hs >= minimum))

    indices = keras.backend.flatten(indices)

    return keras.backend.cast(indices, "int32")
Ejemplo n.º 6
0
    def compute_regression_loss(output, target, labels_target):
        """
        Return the regression loss of Faster R-CNN.
        :return: A loss function for R-CNNs.
        """
        inside_weights = 1.0
        outside_weights = 1.0
        sigma = 1.0
        sigma2 = keras.backend.square(sigma)

        # only consider positive classes
        output = output[:, :, 4:]
        target = target[:, :, 4:]

        labels_target = labels_target[:, :, 1:]

        # mask out output values where class is different from targetrcnn loss
        # function
        a = tensorflow_backend.where(keras.backend.equal(labels_target, 1))
        a = keras.backend.cast(a, 'int32')

        rr = a[:, :2]
        cc = a[:, 2:]

        indices = [
            keras.backend.concatenate([rr, cc * 4 + 0], 1),
            keras.backend.concatenate([rr, cc * 4 + 1], 1),
            keras.backend.concatenate([rr, cc * 4 + 2], 1),
            keras.backend.concatenate([rr, cc * 4 + 3], 1)
        ]

        indices = keras.backend.concatenate(indices, 0)

        updates = keras.backend.ones_like(indices, dtype=keras.backend.floatx())
        labels = tensorflow_backend.scatter_add_tensor(keras.backend.zeros_like(output, dtype='float32'), indices, updates[:, 0])

        inside_mul = inside_weights * keras.backend.abs(output - target) * labels
        smooth_l1_sign = keras.backend.cast(keras.backend.less(inside_mul, 1.0 / sigma2), keras.backend.floatx())

        smooth_l1_option1 = (inside_mul * inside_mul) * (0.5 * sigma2)
        smooth_l1_option2 = inside_mul - (0.5 / sigma2)

        smooth_l1_result = (smooth_l1_option1 * smooth_l1_sign)
        smooth_l1_result += (smooth_l1_option2 * (1.0 - smooth_l1_sign))

        loss = outside_weights * smooth_l1_result
        epsilon = 1e-4
        b = keras.backend.sum(epsilon + labels)
        loss = tensorflow.reduce_sum(loss) / b

        return loss
Ejemplo n.º 7
0
    def get_fg_bg_rois(self, max_overlaps):

        # Select foreground RoIs as those with >= FG_THRESH overlap
        fg_inds = tensorflow_backend.where(max_overlaps >= self.fg_thresh)

        # Guard against the case when an image has fewer than fg_rois_per_image
        # foreground RoIs
        fg_rois_per_image = keras.backend.cast(self.fg_rois_per_image, 'int32')
        self.fg_rois_per_this_image = keras.backend.minimum(
            fg_rois_per_image,
            keras.backend.shape(fg_inds)[0])

        # Sample foreground regions without replacement
        fg_inds = self.sample_indices(fg_inds, self.fg_rois_per_this_image)

        # Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI)
        bg_inds = tensorflow_backend.where((max_overlaps < self.bg_thresh_hi) &
                                           (max_overlaps >= self.bg_thresh_lo))

        # Compute number of background RoIs to take from this image (guarding
        # against there being fewer than desired)
        bg_rois_per_this_image = keras.backend.cast(
            self.rois_per_image, 'int32') - self.fg_rois_per_this_image
        bg_rois_per_this_image = keras.backend.cast(bg_rois_per_this_image,
                                                    'int32')
        bg_rois_per_this_image = keras.backend.minimum(
            bg_rois_per_this_image,
            keras.backend.shape(bg_inds)[0])

        # Sample background regions without replacement
        bg_inds = self.sample_indices(bg_inds, bg_rois_per_this_image)

        # The indices that we're selecting (both fg and bg)
        keep_inds = keras.backend.concatenate([fg_inds, bg_inds])

        return keep_inds
Ejemplo n.º 8
0
def get_bbox_regression_labels(bbox_target_data, labels, num_classes):
    """Bounding-box regression targets (bbox_target_data) are stored in a
    form N x (tx, ty, tw, th), labels N
    This function expands those targets into the 4-of-4*K representation used
    by the network (i.e. only one class has non-zero targets).
    Returns:
        bbox_target: N x 4K blob of regression targets
    """

    n = keras.backend.shape(bbox_target_data)[0]

    bbox_targets = tensorflow.zeros((n, 4 * num_classes),
                                    dtype=keras.backend.floatx())

    inds = keras.backend.reshape(tensorflow_backend.where(labels > 0), (-1, ))

    labels = keras.backend.gather(labels, inds)

    start = 4 * labels

    ii = keras.backend.expand_dims(inds)
    ii = keras.backend.tile(ii, [4, 1])

    aa = keras.backend.expand_dims(
        keras.backend.concatenate([start, start + 1, start + 2, start + 3], 0))
    aa = keras.backend.cast(aa, dtype='int64')

    indices = keras.backend.concatenate([ii, aa], 1)

    updates = keras.backend.gather(bbox_target_data, inds)
    updates = keras.backend.transpose(updates)
    updates = keras.backend.reshape(updates, (-1, ))

    bbox_targets = tensorflow_backend.scatter_add_tensor(
        bbox_targets, indices, updates)

    return bbox_targets
Ejemplo n.º 9
0
def inside_image(boxes, im_info, allowed_border=0):
    """
    Calc indices of boxes which are located completely inside of the image
    whose size is specified by img_info ((height, width, scale)-shaped array).
    :param boxes: (None, 4) tensor containing boxes in original image
    (x1, y1, x2, y2)
    :param im_info: (height, width, scale)
    :param allowed_border: allow boxes to be outside the image by
    allowed_border pixels
    :return: (None, 4) indices of boxes completely in original image, (None,
    4) tensor of boxes completely inside image
    """

    indices = tensorflow_backend.where(
        (boxes[:, 0] >= -allowed_border) & (boxes[:, 1] >= -allowed_border)
        & (boxes[:, 2] < allowed_border + im_info[1]) &  # width
        (boxes[:, 3] < allowed_border + im_info[0])  # height
    )

    indices = keras.backend.cast(indices, "int32")

    gathered = keras.backend.gather(boxes, indices)

    return indices[:, 0], keras.backend.reshape(gathered, [-1, 4])
Ejemplo n.º 10
0
def inside_and_outside_weights(anchors, subsample, positive_weight,
                               proposed_inside_weights):
    """
    Creates the inside_weights and outside_weights bounding-box weights.
    Args:
        anchors: Generated anchors.
        subsample:  Labels obtained after subsampling.
        positive_weight:
        proposed_inside_weights:
    Returns:
        inside_weights:  Inside bounding-box weights.
        outside_weights: Outside bounding-box weights.
    """
    number_of_anchors = keras.backend.int_shape(anchors)[0]

    proposed_inside_weights = keras.backend.constant([proposed_inside_weights])
    proposed_inside_weights = keras.backend.tile(proposed_inside_weights,
                                                 (number_of_anchors, 1))

    positive_condition = keras.backend.equal(subsample, 1)
    negative_condition = keras.backend.equal(subsample, 0)

    if positive_weight < 0:
        # Assign equal weights to both positive_weights and negative_weights
        # labels.
        examples = keras.backend.cast(negative_condition,
                                      keras.backend.floatx())
        examples = keras.backend.sum(examples)

        positive_weights = keras.backend.ones_like(anchors) / examples
        negative_weights = keras.backend.ones_like(anchors) / examples
    else:
        # Assign weights that favor either the positive or the
        # negative_weights labels.
        assert (positive_weight > 0) & (positive_weight < 1)

        positive_examples = keras.backend.cast(positive_condition,
                                               keras.backend.floatx())
        positive_examples = keras.backend.sum(positive_examples)

        negative_examples = keras.backend.cast(negative_condition,
                                               keras.backend.floatx())
        negative_examples = keras.backend.sum(negative_examples)

        positive_weights = keras.backend.ones_like(anchors) * (
            0 + positive_weight) / positive_examples
        negative_weights = keras.backend.ones_like(anchors) * (
            1 - positive_weight) / negative_examples

    inside_weights = keras.backend.zeros_like(anchors)
    inside_weights = tensorflow_backend.where(positive_condition,
                                              proposed_inside_weights,
                                              inside_weights)

    outside_weights = keras.backend.zeros_like(anchors)
    outside_weights = tensorflow_backend.where(positive_condition,
                                               positive_weights,
                                               outside_weights)
    outside_weights = tensorflow_backend.where(negative_condition,
                                               negative_weights,
                                               outside_weights)

    return inside_weights, outside_weights