Esempio n. 1
0
    def test_lcwa_label_smoothing(self):
        """Test if output is correct for the LCWA training loop use case."""
        # Create dummy dense labels
        labels = torch.zeros(self.batch_size, self.num_entities)
        for i in range(self.batch_size):
            labels[i, self.random.randint(self.num_entities)] = 1.0
        # Check if labels form a probability distribution
        np.testing.assert_allclose(torch.sum(labels, dim=1).numpy(), 1.0)

        # Apply label smoothing
        smooth_labels = apply_label_smoothing(labels=labels,
                                              epsilon=self.epsilon,
                                              num_classes=self.num_entities)
        # Check if smooth labels form probability distribution
        np.testing.assert_allclose(torch.sum(smooth_labels, dim=1).numpy(),
                                   1.0,
                                   rtol=self.relative_tolerance)
Esempio n. 2
0
    def test_slcwa_label_smoothing(self):
        """Test if output is correct for the sLCWA training loop use case."""
        # Create dummy sLCWA labels
        ones = torch.ones(self.batch_size, 1)
        zeros = torch.zeros(self.batch_size, 1)
        labels = torch.cat([ones, zeros], dim=0)

        # Apply label smoothing
        smooth_labels = apply_label_smoothing(labels=labels,
                                              epsilon=self.epsilon,
                                              num_classes=self.num_entities)
        exp_true = 1.0 - self.epsilon
        np.testing.assert_allclose(smooth_labels[:self.batch_size],
                                   exp_true,
                                   rtol=self.relative_tolerance)
        exp_false = self.epsilon / (self.num_entities - 1.)
        np.testing.assert_allclose(smooth_labels[self.batch_size:],
                                   exp_false,
                                   rtol=self.relative_tolerance)