def test_other_rand(self): a = jt.array([1.0, 2.0, 3.0]) b = jt.rand_like(a) c = jt.randn_like(a) assert b.shape == c.shape assert b.shape == a.shape print(b, c) assert jt.randint(10, 20, (2000, )).min() == 10 assert jt.randint(10, 20, (2000, )).max() == 19 assert jt.randint(10, shape=(2000, )).max() == 9 assert jt.randint_like(a, 10).shape == a.shape
def check(xshape, wshape, stride=(1,1,1), padding=(0,0,0), dilation=(1,1,1), group=1): with jt.flag_scope(use_cuda=1): x = jt.random(xshape) w = jt.random(wshape) # y = jt.cudnn.ops.cudnn_conv3d(x, w, *stride, *padding, *dilation, group) y = jt.nn.conv3d(x, w, None, stride, padding, dilation, group) masky = jt.rand_like(y) dx, dw = jt.grad(masky*y, [x, w]) jt.sync_all() y2 = jt.nn.conv3d(x, w, None, stride, padding, dilation, group) dx2, dw2 = jt.grad(masky*y2, [x, w]) np.testing.assert_allclose(y.data, y2.data) np.testing.assert_allclose(dx.data, dx2.data, rtol=1e-5, atol=1e-3) np.testing.assert_allclose(dw.data, dw2.data, rtol=1e-5, atol=1e-3)
def check(xshape, wshape, stride=(1,1,1), padding=(0,0,0), dilation=(1,1,1), group=1): with jt.flag_scope(use_cuda=1): x = jt.random(xshape) w = jt.random(wshape) jt.sync_all() y2 = jt.nn.conv_transpose3d(x, w, None, stride, padding, 0, group, dilation) jt.sync_all() with jt.flag_scope(use_cuda=1): # y = jt.cudnn.ops.cudnn_conv3d_backward_x(w, x, *y2.shape[2:], *stride, *padding, *dilation, group) y = jt.nn.conv_transpose3d(x, w, None, stride, padding, 0, group, dilation) masky = jt.rand_like(y) dx, dw = jt.grad(masky*y, [x, w]) jt.sync_all() dx2, dw2 = jt.grad(masky*y2, [x, w]) jt.sync_all() np.testing.assert_allclose(y.numpy(), y2.numpy(), rtol=1e-6, atol=1e-4) np.testing.assert_allclose(dx.numpy(), dx2.numpy(), rtol=1e-6, atol=1e-4) np.testing.assert_allclose(dw.numpy(), dw2.numpy(), rtol=1e-5, atol=1e-3)
def bernoulli(input): return (input > jt.rand_like(input)).cast(input.dtype)