def CCsigmoid_focal_loss(pred,
                         target,
                         weight=None,
                         gamma=2.0,
                         alpha=0.25,
                         reduction='mean',
                         avg_factor=None):
    # Function.apply does not accept keyword arguments, so the decorator
    # "weighted_loss" is not applicable
    loss1 = _sigmoid_focal_loss(pred, target[0], gamma, alpha)
    loss2 = _sigmoid_focal_loss(pred, target[1], gamma, alpha)
    weight1 = weight[0].view(-1, 1)
    weight2 = weight[1].view(-1, 1)
    loss1 = loss1 * weight1
    loss2 = loss2 * weight2
    loss = 0.5 * loss1 + 0.5 * loss2
    background_label = loss1.shape[1]
    pos_ind_only1 = ((target[0] < background_label) &
                     (target[1] == background_label)).nonzero().reshape(-1)
    pos_ind_only2 = ((target[1] < background_label) &
                     (target[0] == background_label)).nonzero().reshape(-1)
    pos_ind_both = ((target[1] < background_label) &
                    (target[0] < background_label)).nonzero().reshape(-1)
    sig_ind_only1 = target[0][pos_ind_both]
    sig_ind_only2 = target[1][pos_ind_both]
    loss[pos_ind_only1] = loss1[pos_ind_only1]
    loss[pos_ind_only2] = loss2[pos_ind_only2]
    loss[pos_ind_both, sig_ind_only1] = loss1[pos_ind_both, sig_ind_only1]
    loss[pos_ind_both, sig_ind_only2] = loss2[pos_ind_both, sig_ind_only2]

    loss = weight_reduce_loss(
        loss, None, reduction,
        avg_factor)  # set weight == None since already multiplied
    return loss
Пример #2
0
def sigmoid_focal_loss(pred,
                       target,
                       weight=None,
                       gamma=2.0,
                       alpha=0.25,
                       reduction='mean',
                       avg_factor=None,
                       use_py_version=False):
    # Function.apply does not accept keyword arguments, so the decorator
    # "weighted_loss" is not applicable
    if use_py_version:
        loss = py_sigmoid_focal_loss(pred,
                                     target,
                                     weight=weight,
                                     gamma=gamma,
                                     alpha=alpha,
                                     reduction=reduction,
                                     avg_factor=avg_factor)
    else:
        loss = _sigmoid_focal_loss(pred, target, gamma, alpha)
        # TODO: find a proper way to handle the shape of weight
        if weight is not None:
            weight = weight.view(-1, 1)
        loss = weight_reduce_loss(loss, weight, reduction, avg_factor)
    return loss
Пример #3
0
def sigmoid_focal_loss(pred,
                       target,
                       weight=None,
                       gamma=2.0,
                       alpha=0.25,
                       reduction='mean',
                       avg_factor=None):
    # Function.apply does not accept keyword arguments, so the decorator
    # "weighted_loss" is not applicable
    loss = _sigmoid_focal_loss(pred, target, gamma, alpha)
    # TODO: find a proper way to handle the shape of weight
    if weight is not None:
        if weight.shape != loss.shape:
            if weight.size(0) == loss.size(0):
                # For most cases, weight is of shape (num_priors, ),
                #  which means it does not have the second axis num_class
                weight = weight.view(-1, 1)
            else:
                # Sometimes, weight per anchor per class is also needed. e.g.
                #  in FSAF. But it may be flattened of shape
                #  (num_priors x num_class, ), while loss is still of shape
                #  (num_priors, num_class).
                assert weight.numel() == loss.numel()
                weight = weight.view(loss.size(0), -1)
        assert weight.ndim == loss.ndim
    # if weight is not None:
    #     weight = weight.view(-1, 1)
    loss = weight_reduce_loss(loss, weight, reduction, avg_factor)
    return loss
Пример #4
0
def sigmoid_focal_loss(pred,
                       target,
                       weight=None,
                       gamma=2.0,
                       alpha=0.25,
                       reduction='mean',
                       avg_factor=None):
    """A warpper of cuda version
    `Focal Loss <https://arxiv.org/abs/1708.02002>`_

    Args:
        pred (torch.Tensor): The prediction with shape (N, C), C is the number
            of classes.
        target (torch.Tensor): The learning label of the prediction.
        weight (torch.Tensor, optional): Sample-wise loss weight.
        gamma (float, optional): The gamma for calculating the modulating
            factor. Defaults to 2.0.
        alpha (float, optional): A balanced form for Focal Loss.
            Defaults to 0.25.
        reduction (str, optional): The method used to reduce the loss into
            a scalar. Defaults to 'mean'. Options are "none", "mean" and "sum".
        avg_factor (int, optional): Average factor that is used to average
            the loss. Defaults to None.
    """
    # Function.apply does not accept keyword arguments, so the decorator
    # "weighted_loss" is not applicable
    loss = _sigmoid_focal_loss(pred, target, gamma, alpha)
    if weight is not None:
        if weight.shape != loss.shape:
            if weight.size(0) == loss.size(0):
                # For most cases, weight is of shape (num_priors, ),
                #  which means it does not have the second axis num_class
                weight = weight.view(-1, 1)
            else:
                # Sometimes, weight per anchor per class is also needed. e.g.
                #  in FSAF. But it may be flattened of shape
                #  (num_priors x num_class, ), while loss is still of shape
                #  (num_priors, num_class).
                assert weight.numel() == loss.numel()
                weight = weight.view(loss.size(0), -1)
        assert weight.ndim == loss.ndim
    loss = weight_reduce_loss(loss, weight, reduction, avg_factor)
    return loss
def weighted_focal_loss(pred,
                        target,
                        weight=None,
                        gamma=2.0,
                        alpha=0.25,
                        reduction='mean',
                        avg_factor=None):
    # Function.apply does not accept keyword arguments, so the decorator
    # "weighted_loss" is not applicable
    loss = _sigmoid_focal_loss(pred, target, gamma, alpha)
    # TODO: find a proper way to handle the shape of weight
    weight = loss.new(19).fill_(1)
    weight[11] = 0.5
    weight[17] = 0.1
    weight[10] = 0.25
    # large-vehicle
    # small-vehicle
    # ship
    loss = weight_reduce_loss(loss, weight, reduction, avg_factor)
    return loss
def sigmoid_focal_loss(pred,
                       target,
                       weight=None,
                       gamma=2.0,
                       alpha=0.25,
                       reduction='mean',
                       avg_factor=None,
                       cate_loss_weight=None):
    # Function.apply does not accept keyword arguments, so the decorator
    # "weighted_loss" is not applicable
    loss = _sigmoid_focal_loss(pred, target, gamma, alpha)
    if cate_loss_weight is None:
        cate_loss_weight = [1.0] * loss.shape[1]
    cate_loss_weight = torch.tensor(cate_loss_weight).unsqueeze(0).cuda()
    loss = loss * cate_loss_weight

    # TODO: find a proper way to handle the shape of weight
    if weight is not None:
        weight = weight.view(-1, 1)
    loss = weight_reduce_loss(loss, weight, reduction, avg_factor)
    return loss
Пример #7
0
def sigmoid_focal_loss(pred,
                       target,
                       weight=None,
                       gamma=2.0,
                       alpha=0.25,
                       reduction='mean',
                       avg_factor=None):
    '''
      pred: [num_points, C]
      targets: [num_points] value in {-1,0,1,...,C}, -1 means ignorance, 0 means background

      p = sigmoid(pred)
      loss = - alpha*(label==对应label) * (1-p)**gamma * log(p) - (1-alpha)*(label!=对应label or label==negative) * p**gamma * log(1-p)
      参考: https://github.com/pytorch/pytorch/blob/master/modules/detectron/sigmoid_focal_loss_op.cc
    '''
    # Function.apply does not accept keyword arguments, so the decorator
    # "weighted_loss" is not applicable
    loss = _sigmoid_focal_loss(pred, target, gamma, alpha)
    # TODO: find a proper way to handle the shape of weight
    if weight is not None:
        weight = weight.view(-1, 1)
    loss = weight_reduce_loss(loss, weight, reduction, avg_factor)
    return loss