def test_backward_indirectly( fix_seed, test_input, test_weight, test_bias, test_mode, expected_weight_grad, expected_input_grad, ): binarized_conv2d(test_input, test_weight, test_bias, 1, 0, 1, 1, test_mode).backward() assert torch.allclose( input=test_input.grad, other=expected_input_grad, rtol=1e-04, atol=1e-04, equal_nan=True, ) assert torch.allclose( input=test_weight.grad, other=expected_weight_grad, rtol=1e-04, atol=1e-04, equal_nan=True, )
def test_forward(fix_seed, test_input, test_weight, test_bias, test_mode, expected): assert torch.allclose( input=binarized_conv2d(test_input, test_weight, test_bias, 1, 0, 1, 1, test_mode), other=expected, rtol=1e-04, atol=1e-04, equal_nan=True, )
def forward(self, input: torch.Tensor) -> torch.Tensor: self.clipping() return binarized_conv2d( input, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups, self.mode, )
def test_forward(fix_seed, test_input, test_weight, test_bias, test_mode, expected): answer = binarized_conv2d(test_input, test_weight, test_bias, 1, 0, 1, 1, test_mode) logger.debug(f"answer: {answer}") logger.debug(f"test mode: {test_mode}") logger.debug(f"expected: {expected}") assert torch.allclose( input=answer, other=expected, rtol=1e-04, atol=1e-04, equal_nan=True, )
def test_supported_mode(fix_seed, test_input, test_weight, test_bias, test_mode): with pytest.raises(RuntimeError): binarized_conv2d(test_input, test_weight, test_bias, 1, 0, 1, 1, test_mode)