Пример #1
0
    def forward(
        self,
        pos_scores: FloatTensorType,
        neg_scores: FloatTensorType,
        weight: Optional[FloatTensorType],
    ) -> FloatTensorType:
        num_pos = match_shape(pos_scores, -1)
        num_neg = match_shape(neg_scores, num_pos, -1)

        # FIXME Workaround for https://github.com/pytorch/pytorch/issues/15223.
        if num_pos == 0 or num_neg == 0:
            return torch.zeros((),
                               device=pos_scores.device,
                               requires_grad=True)

        if weight is not None:
            match_shape(weight, num_pos)
            loss_per_sample = F.margin_ranking_loss(
                neg_scores,
                pos_scores.unsqueeze(1),
                target=pos_scores.new_full((1, 1), -1, dtype=torch.float),
                margin=self.margin,
                reduction="none",
            )
            loss = (loss_per_sample * weight.unsqueeze(-1)).sum()
        else:
            # more memory efficient way if no weights
            loss = F.margin_ranking_loss(
                neg_scores,
                pos_scores.unsqueeze(1),
                target=pos_scores.new_full((1, 1), -1, dtype=torch.float),
                margin=self.margin,
                reduction="sum",
            )

        return loss
Пример #2
0
    def forward(self, pos_scores: FloatTensorType,
                neg_scores: FloatTensorType) -> FloatTensorType:
        num_pos = match_shape(pos_scores, -1)
        num_neg = match_shape(neg_scores, num_pos, -1)

        # FIXME Workaround for https://github.com/pytorch/pytorch/issues/15223.
        if num_pos == 0 or num_neg == 0:
            return torch.zeros((),
                               device=pos_scores.device,
                               requires_grad=True)

        loss = F.margin_ranking_loss(
            neg_scores,
            pos_scores.unsqueeze(1),
            target=pos_scores.new_full((1, 1), -1, dtype=torch.float),
            margin=self.margin,
            reduction="sum",
        )

        return loss