Beispiel #1
0
    def iou_loss(self, index, label_dict, pred_dict):
        pmask = label_dict[maps_dict.GT_PMASK][index]
        pmask = tf.reduce_max(pmask, axis=-1)

        gt_cls = label_dict[maps_dict.GT_CLS][index]  # bs, pts_num
        gt_cls = tf.cast(
            tf.one_hot(gt_cls - 1,
                       depth=len(self.cls_list),
                       on_value=1,
                       off_value=0,
                       axis=-1), tf.float32)  # [bs, pts_num, cls_num]

        assigned_gt_boxes_3d = label_dict[maps_dict.GT_BOXES_ANCHORS_3D][index]
        proposals = pred_dict[maps_dict.KEY_ANCHORS_3D][index]

        # bs, proposal_num, cls_num
        target_iou_bev, target_iou_3d = calc_iou_match_warper(
            proposals, assigned_gt_boxes_3d)
        # then normalize target_iou to [-1, 1]
        target_iou_3d = target_iou_3d * 2. - 1.
        target_iou_3d = target_iou_3d * gt_cls

        pred_iou = pred_dict[maps_dict.PRED_IOU_3D_VALUE][index]

        norm_param = tf.maximum(1., tf.reduce_sum(pmask))

        iou_loss = model_util.huber_loss(pred_iou - target_iou_3d, delta=1.)
        iou_loss = tf.reduce_mean(iou_loss, axis=-1) * pmask
        iou_loss = tf.identity(
            tf.reduce_sum(iou_loss) / norm_param, 'iou_loss%d' % index)
        tf.summary.scalar('iou_loss%d' % index, iou_loss)
        # tf.add_to_collection(tf.GraphKeys.LOSSES, iou_loss)
        return iou_loss
Beispiel #2
0
    def offset_loss_res(self, index, label_dict, pred_dict):
        pmask = label_dict[maps_dict.GT_PMASK][index]
        nmask = label_dict[maps_dict.GT_NMASK][index]
        gt_offset = label_dict[maps_dict.GT_OFFSET][index]

        pred_offset = pred_dict[maps_dict.PRED_OFFSET][index]

        norm_param = tf.maximum(1., tf.reduce_sum(pmask))

        offset_loss = model_util.huber_loss((pred_offset - gt_offset),
                                            delta=1.)
        offset_loss = tf.reduce_sum(offset_loss, axis=-1) * pmask
        offset_loss = tf.identity(
            tf.reduce_sum(offset_loss) / norm_param, 'offset_loss%d' % index)
        tf.summary.scalar('offset_loss%d' % index, offset_loss)
        tf.add_to_collection(tf.GraphKeys.LOSSES, offset_loss)
Beispiel #3
0
    def corner_loss(self, index, label_dict, pred_dict):
        pmask = label_dict[maps_dict.GT_PMASK][index]
        nmask = label_dict[maps_dict.GT_NMASK][index]
        gt_corners = label_dict[maps_dict.CORNER_LOSS_GT_BOXES_CORNERS][index]

        pred_corners = pred_dict[
            maps_dict.CORNER_LOSS_PRED_BOXES_CORNERS][index]

        norm_param = tf.maximum(1., tf.reduce_sum(pmask))

        corner_loss = model_util.huber_loss((pred_corners - gt_corners),
                                            delta=1.)
        corner_loss = tf.reduce_sum(corner_loss, axis=[-2, -1]) * pmask
        corner_loss = tf.identity(
            tf.reduce_sum(corner_loss) / norm_param, 'corner_loss%d' % index)
        tf.summary.scalar('corner_loss%d' % index, corner_loss)
        tf.add_to_collection(tf.GraphKeys.LOSSES, corner_loss)
Beispiel #4
0
    def vote_loss(self, index, pred_dict, placeholders):
        vote_offset = pred_dict[maps_dict.PRED_VOTE_OFFSET][index]
        vote_base = pred_dict[maps_dict.PRED_VOTE_BASE][index]
        bs, pts_num, _ = vote_offset.get_shape().as_list()
        gt_boxes_3d = placeholders[maps_dict.PL_LABEL_BOXES_3D]
        vote_mask, vote_target = tf.py_func(vote_targets_np,
                                            [vote_base, gt_boxes_3d],
                                            [tf.float32, tf.float32])
        vote_mask = tf.reshape(vote_mask, [bs, pts_num])
        vote_target = tf.reshape(vote_target, [bs, pts_num, 3])

        vote_loss = tf.reduce_sum(model_util.huber_loss(
            vote_target - vote_offset, delta=1.),
                                  axis=-1) * vote_mask
        vote_loss = tf.reduce_sum(vote_loss) / tf.maximum(
            1., tf.reduce_sum(vote_mask))
        vote_loss = tf.identity(vote_loss, 'vote_loss%d' % index)
        tf.summary.scalar('vote_loss%d' % index, vote_loss)
        tf.add_to_collection(tf.GraphKeys.LOSSES, vote_loss)
Beispiel #5
0
    def velo_attr_loss(self, index, label_dict, pred_dict):
        pmask = label_dict[maps_dict.GT_PMASK][index]
        nmask = label_dict[maps_dict.GT_NMASK][index]
        # bs, pts_num, cls_num
        gt_attribute = label_dict[maps_dict.GT_ATTRIBUTE][index]
        # bs,pts_num,cls_num,2
        gt_velocity = label_dict[maps_dict.GT_VELOCITY][index]

        pred_attribute = pred_dict[maps_dict.PRED_ATTRIBUTE][index]
        pred_velocity = pred_dict[maps_dict.PRED_VELOCITY][index]

        attr_mask = tf.cast(tf.greater_equal(gt_attribute, 0), tf.float32)
        attr_mask = attr_mask * pmask
        gt_attribute_onehot = tf.cast(
            tf.one_hot(gt_attribute, depth=8, on_value=1, off_value=0,
                       axis=-1), tf.float32)  # [bs, pts_num, cls_num, 8]
        attr_loss = tf.nn.sigmoid_cross_entropy_with_logits(
            labels=gt_attribute_onehot, logits=pred_attribute)
        attr_loss = attr_loss * tf.expand_dims(attr_mask, axis=-1)
        attr_loss = tf.reduce_sum(attr_loss) / (
            tf.maximum(1., tf.reduce_sum(attr_mask)) * 8.)
        attr_loss = tf.identity(attr_loss, 'attribute_loss_%d' % index)
        tf.summary.scalar('attribute_loss_%d' % index, attr_loss)
        tf.add_to_collection(tf.GraphKeys.LOSSES, attr_loss)

        velo_mask = tf.cast(
            tf.logical_not(tf.is_nan(tf.reduce_sum(gt_velocity, axis=-1))),
            tf.float32)
        velo_mask = velo_mask * pmask
        zero_velocity = tf.zeros_like(gt_velocity)
        gt_velocity = tf.where(tf.is_nan(gt_velocity), zero_velocity,
                               gt_velocity)
        velo_loss = model_util.huber_loss(pred_velocity - gt_velocity,
                                          delta=1.)
        velo_loss = tf.reduce_sum(velo_loss, axis=-1) * velo_mask
        velo_loss = tf.identity(
            tf.reduce_sum(velo_loss) /
            tf.maximum(1., tf.reduce_sum(velo_mask)),
            'velocity_loss_%d' % index)
        tf.summary.scalar('velocity_loss_%d' % index, velo_loss)
        # tf.add_to_collection(tf.GraphKeys.LOSSES, velo_loss)
        return velo_loss
Beispiel #6
0
    def offset_loss_bin(self, index, label_dict, pred_dict):
        pmask = label_dict[maps_dict.GT_PMASK][index]
        nmask = label_dict[maps_dict.GT_NMASK][index]

        # bs, points_num, cls_num, 8
        # xbin/xres/zbin/zres/yres/size_res
        gt_offset = label_dict[maps_dict.GT_OFFSET][index]
        xbin, xres, zbin, zres = tf.unstack(gt_offset[:, :, :, :4], axis=-1)
        gt_other_offset = gt_offset[:, :, :, 4:]

        pred_offset = pred_dict[maps_dict.PRED_OFFSET][index]
        pred_xbin = tf.slice(pred_offset, [0, 0, 0, self.reg_bin_cls_num * 0],
                             [-1, -1, -1, self.reg_bin_cls_num])
        pred_xres = tf.slice(pred_offset, [0, 0, 0, self.reg_bin_cls_num * 1],
                             [-1, -1, -1, self.reg_bin_cls_num])
        pred_zbin = tf.slice(pred_offset, [0, 0, 0, self.reg_bin_cls_num * 2],
                             [-1, -1, -1, self.reg_bin_cls_num])
        pred_zres = tf.slice(pred_offset, [0, 0, 0, self.reg_bin_cls_num * 3],
                             [-1, -1, -1, self.reg_bin_cls_num])
        pred_other_offset = tf.slice(pred_offset,
                                     [0, 0, 0, self.reg_bin_cls_num * 4],
                                     [-1, -1, -1, -1])

        norm_param = tf.maximum(1., tf.reduce_sum(pmask))

        self.bin_res_loss(pmask, norm_param, xbin, xres, pred_xbin, pred_xres,
                          self.reg_bin_cls_num, 'x_loss%d' % index)
        self.bin_res_loss(pmask, norm_param, zbin, zres, pred_zbin, pred_zres,
                          self.reg_bin_cls_num, 'z_loss%d' % index)

        other_offset_loss = model_util.huber_loss(
            (pred_other_offset - gt_other_offset), delta=1.)
        other_offset_loss = tf.reduce_sum(other_offset_loss, axis=-1) * pmask
        other_offset_loss = tf.identity(
            tf.reduce_sum(other_offset_loss) / norm_param,
            'other_offset_loss%d' % index)
        tf.summary.scalar('other_offset_loss%d' % index, other_offset_loss)
        # tf.add_to_collection(tf.GraphKeys.LOSSES, other_offset_loss)
        return other_offset_loss
Beispiel #7
0
    def bin_res_loss(self, pmask, norm_param, gt_bin, gt_res, pred_bin,
                     pred_res, bin_class_num, scope):
        gt_bin = tf.cast(gt_bin, tf.int32)

        bin_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
            logits=pred_bin, labels=gt_bin) * pmask
        bin_loss = tf.identity(
            tf.reduce_sum(bin_loss) / norm_param, 'bin_%s' % scope)
        tf.summary.scalar('bin_%s' % scope, bin_loss)
        tf.add_to_collection(tf.GraphKeys.LOSSES, bin_loss)

        gt_bin_onehot = tf.cast(
            tf.one_hot(gt_bin,
                       depth=bin_class_num,
                       on_value=1,
                       off_value=0,
                       axis=-1), tf.float32)
        pred_res = tf.reduce_sum(pred_res * gt_bin_onehot, axis=-1)
        res_loss = model_util.huber_loss((pred_res - gt_res) * pmask, delta=1.)
        res_loss = tf.identity(
            tf.reduce_sum(res_loss) / norm_param, 'res_%s' % scope)
        tf.summary.scalar('res_%s' % scope, res_loss)
        tf.add_to_collection(tf.GraphKeys.LOSSES, res_loss)