def _test_normal(test_case, mean, std, shape, device, dtype): dtype = type_name_to_flow_type[dtype] y1 = flow.normal(mean, std, *shape, dtype=dtype, device=flow.device(device)) y2 = flow.normal(mean, std, *shape, dtype=dtype, device=flow.device(device)) test_case.assertFalse(np.array_equal(y1.numpy(), y2.numpy())) test_case.assertEqual(shape, y1.shape) test_case.assertEqual(dtype, y1.dtype)
def _test_with_generator(test_case, mean, std, shape, device, dtype): dtype = type_name_to_flow_type[dtype] gen = flow.Generator() gen.manual_seed(0) y1 = flow.normal(mean, std, *shape, generator=gen, dtype=dtype, device=flow.device(device)) gen.manual_seed(0) y2 = flow.normal(mean, std, *shape, generator=gen, dtype=dtype, device=flow.device(device)) test_case.assertTrue(np.array_equal(y1.numpy(), y2.numpy()))
def _normal(self, mean=0, std=1): if self.is_global: src_tensor = flow.normal(mean, std, self.shape) src_tensor = src_tensor.to_global( placement=self.placement, sbp=tuple(flow.sbp.broadcast for _ in range(len(self.sbp))), ) self.copy_(src_tensor) return self else: return flow.normal( mean, std, self.size(), out=self, dtype=self.dtype, device=self.device, requires_grad=self.requires_grad, )
def _test_backward(test_case, mean, std, shape, device, dtype): dtype = type_name_to_flow_type[dtype] x = flow.normal(mean, std, *shape, dtype=dtype, device=flow.device(device), requires_grad=True) y = x.sum() y.backward() test_case.assertTrue(np.array_equal(np.ones(shape), x.grad.numpy()))
def _test_consistent_normal(test_case, placement, sbp, mean, std, shape, dtype, requires_grad): dtype = type_name_to_flow_type[dtype] x = flow.normal( mean, std, *shape, placement=placement, sbp=sbp, dtype=dtype, requires_grad=requires_grad, ) test_case.assertEqual(x.shape, shape) test_case.assertEqual(x.dtype, dtype) test_case.assertEqual(x.sbp, sbp) test_case.assertEqual(x.placement, placement) test_case.assertEqual(x.requires_grad, requires_grad)