Пример #1
0
 def test_simple_run(self):
     x_np = np.random.rand(1023).astype(np.float32)
     x_torch = torch.Tensor(x_np)
     clip_op = clip.Clip(torch.tensor(0.3), torch.tensor(0.7))
     clip_x_np = np.clip(x_np, 0.3, 0.7)
     clip_x_torch = clip_op(x_torch)
     np.testing.assert_array_equal(clip_x_torch.cpu().numpy(), clip_x_np)
Пример #2
0
    def test_backward(self):
        x = torch.randn(3, 7, requires_grad=True)
        x.retain_grad()

        min_value = 0.3
        max_value = 0.7
        clip_op = clip.Clip(min_value, max_value, learn_min=True, learn_max=True)
        clip_x = clip_op(x)
        clip_x.retain_grad()

        labels = torch.randint(6, (3,)).type(torch.LongTensor).cuda()
        criterion = torch.nn.CrossEntropyLoss()
        loss = criterion(clip_x, labels)

        loss.backward()

        assert x.grad.cpu()[x < min_value].sum() == 0
        assert x.grad.cpu()[x > max_value].sum() == 0
        assert torch.equal(clip_x.grad[(x > min_value) & (x < max_value)], x.grad[(x > min_value) & (x < max_value)])
Пример #3
0
 def test_raise(self):
     with pytest.raises(ValueError, match="must be scalar"):
         clip_op = clip.Clip(torch.tensor(0.3), torch.tensor(0.7), learn_min=True)