Пример #1
0
def test_fsim_loss_computes_grad(x, y, device: str) -> None:
    x.requires_grad_()
    loss_value = FSIMLoss()(x.to(device), y.to(device))
    loss_value.backward()
    assert x.grad is not None, 'Expected non None gradient of leaf variable'
Пример #2
0
def test_fsim_loss_computes_grad(prediction: torch.Tensor,
                                 target: torch.Tensor, device: str) -> None:
    prediction.requires_grad_()
    loss_value = FSIMLoss()(prediction, target)
    loss_value.backward()
    assert prediction.grad is not None, 'Expected non None gradient of leaf variable'