예제 #1
0
    def test_frelu(self):

        # Generate inputs
        x = torch.rand(2, 8, 19, 19)

        # Optional argument testing
        with torch.no_grad():
            out = activation.FReLU(8)(x)
        self.assertEqual(out.size(), x.size())
        self.assertFalse(torch.equal(out, x))
예제 #2
0
def test_frelu():
    mod = activation.FReLU(8).eval()
    with torch.no_grad():
        _test_activation_function(mod.forward, (4, 8, 32, 32))
    assert len(repr(mod).split('\n')) == 4