def test_backward(self):
        x = torch.randn(3, 1025, requires_grad=True)
        x.retain_grad()

        min_value = torch.tensor(0.3)
        max_value = torch.tensor(0.7)
        min_value.requires_grad = True
        max_value.requires_grad = True
        min_value.retain_grad()
        max_value.retain_grad()
        clip_x = QF.clip(x, min_value, max_value)
        clip_x.retain_grad()

        labels = torch.randint(6, (3, )).type(torch.LongTensor).cuda()
        criterion = torch.nn.CrossEntropyLoss()
        loss = criterion(clip_x, labels)
        loss.backward()

        np.testing.assert_array_almost_equal(
            clip_x.grad[x < min_value].sum().cpu().numpy(),
            min_value.grad.cpu().numpy(),
            decimal=6)
        np.testing.assert_array_almost_equal(
            clip_x.grad[x > max_value].sum().cpu().numpy(),
            max_value.grad.cpu().numpy(),
            decimal=6)
        assert x.grad.cpu()[x < min_value].sum() == 0
        assert x.grad.cpu()[x > max_value].sum() == 0
        assert torch.equal(clip_x.grad[(x > min_value) & (x < max_value)],
                           x.grad[(x > min_value) & (x < max_value)])
 def test_broadcast(self):
     """Test broadcast behavior by randomly picked shuffling of np.random.rand"""
     x_np = np.random.rand(1023, 4, 5, 6).astype(np.float32) - 0.5
     x_torch = torch.Tensor(x_np)
     min_value = np.random.rand(1, 4, 1, 1).astype(np.float32) * 0.1 - 0.2
     max_value = np.random.rand(1, 4, 1, 1).astype(np.float32) * 10 + 0.5
     clip_x_np = np.clip(x_np, min_value, max_value)
     clip_x_torch = QF.clip(x_torch, torch.tensor(min_value),
                            torch.tensor(max_value))
     np.testing.assert_array_equal(clip_x_torch.cpu().numpy(), clip_x_np)
    def test_raise(self):
        x = torch.randn(3, 7, requires_grad=True)

        min_value = torch.Tensor(3, 7)
        max_value = torch.Tensor(3, 7)
        min_value.requires_grad = True
        max_value.requires_grad = True
        clip_x = QF.clip(x, min_value, max_value)

        labels = torch.randint(6, (3, )).type(torch.LongTensor).cuda()
        criterion = torch.nn.CrossEntropyLoss()
        loss = criterion(clip_x, labels)
        with pytest.raises(ValueError, match="can only be scalar"):
            loss.backward()
 def test_simple_run(self):
     x_np = np.random.rand(1023).astype(np.float32)
     x_torch = torch.Tensor(x_np)
     clip_x_np = np.clip(x_np, 0.3, 0.7)
     clip_x_torch = QF.clip(x_torch, torch.tensor(0.3), torch.tensor(0.7))
     np.testing.assert_array_equal(clip_x_torch.cpu().numpy(), clip_x_np)
Example #5
0
 def forward(self, inputs):
     outputs = QF.clip(inputs, self.clip_value_min, self.clip_value_max)
     return outputs