Exemplo n.º 1
0
 def test_reduction(self):
     self.args.label_smoothing = 0.1
     crit = LabelSmoothedCrossEntropyCriterion.build_criterion(
         self.args, self.task)
     loss, _, logging_output = crit(self.model, self.sample, reduce=True)
     unreduced_loss, _, _ = crit(self.model, self.sample, reduce=False)
     self.assertAlmostEqual(loss, unreduced_loss.sum())
 def test_zero_eps(self):
     self.args.label_smoothing = 0.0
     nll_crit = CrossEntropyCriterion.build_criterion(self.args, self.task)
     smooth_crit = LabelSmoothedCrossEntropyCriterion.build_criterion(self.args, self.task)
     nll_loss, nll_sample_size, nll_logging_output = nll_crit(self.model, self.sample)
     smooth_loss, smooth_sample_size, smooth_logging_output = smooth_crit(self.model, self.sample)
     self.assertAlmostEqual(nll_loss, smooth_loss)
 def test_nll_loss(self):
     self.args.label_smoothing = 0.1
     nll_crit = CrossEntropyCriterion.build_criterion(self.args, self.task)
     smooth_crit = LabelSmoothedCrossEntropyCriterion.build_criterion(self.args, self.task)
     nll_loss, nll_sample_size, nll_logging_output = nll_crit(self.model, self.sample)
     smooth_loss, smooth_sample_size, smooth_logging_output = smooth_crit(self.model, self.sample)
     self.assertLess(abs(nll_loss - nll_logging_output['loss']), 1e-6)
     self.assertLess(abs(nll_loss - smooth_logging_output['nll_loss']), 1e-6)
Exemplo n.º 4
0
    def test_padding(self):
        self.args.label_smoothing = 0.1
        crit = LabelSmoothedCrossEntropyCriterion.build_criterion(self.args, self.task)
        loss, _, logging_output = crit(self.model, self.sample)

        def get_one_no_padding(idx):
            # create a new sample with just a single batch item so that there's
            # no padding
            sample1 = next(test_utils.dummy_dataloader([self.data[idx]]))
            args1 = copy.copy(self.args)
            args1.probs = args1.probs[idx, :, :].unsqueeze(0)
            model1 = self.task.build_model(args1)
            loss1, _, _ = crit(model1, sample1)
            return loss1

        loss1 = get_one_no_padding(0)
        loss2 = get_one_no_padding(1)
        self.assertAlmostEqual(loss, loss1 + loss2)