示例#1
0
 def setUp(self):
     self.func = PairwiseSampling(3, [10, 5, 2, 5, 2],
                                          CovarianceType.diagonal)
     self.mean = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
     self.cov = numpy.random.uniform(0.1, 10, (2, 3)).astype(numpy.float32)
     self.t = numpy.array([0, 2]).astype(numpy.int32)
     self.func.zero_grads()
     self.gy = numpy.random.uniform(-1, 1, ()).astype(numpy.float32)
示例#2
0
class TestPairwiseSampling2(unittest.TestCase):
    '''
    spherical covariance
    '''

    def setUp(self):
        self.func = PairwiseSampling(3, [10, 5, 2, 5, 2],
                                             CovarianceType.spherical)
        self.mean = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
        self.cov = numpy.random.uniform(0.1, 10, (2, 1)).astype(numpy.float32)
        self.t = numpy.array([0, 2]).astype(numpy.int32)
        self.func.zero_grads()
        self.gy = numpy.random.uniform(-1, 1, ()).astype(numpy.float32)

    def check_backward(self, mean_data, cov_data, t_data, y_grad):
        m = chainer.Variable(mean_data)
        c = chainer.Variable(cov_data)
        t = chainer.Variable(t_data)
        y = self.func(m, c, t)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((m.data, c.data, t.data))
        gm, gc, _, gM, gC = gradient_check.numerical_grad(f,
                (m.data, c.data, t.data, func.M, func.C), (y.grad,))

        gradient_check.assert_allclose(cuda.to_cpu(gm), cuda.to_cpu(m.grad),
                                       atol=5.e-4)
        gradient_check.assert_allclose(cuda.to_cpu(gc), cuda.to_cpu(c.grad),
                                       atol=5.e-4)
        gradient_check.assert_allclose(cuda.to_cpu(gM), cuda.to_cpu(func.gM),
                                       atol=5.e-4)
        gradient_check.assert_allclose(cuda.to_cpu(gC), cuda.to_cpu(func.gC),
                                       atol=5.e-4)

    @attr.gpu
    @condition.retry(3)
    def test_forward_gpu(self):
        m = chainer.Variable(self.mean)
        c = chainer.Variable(self.cov)
        t = chainer.Variable(self.t)
        self.func._make_samples(self.t)
        y = self.func(m, c, t)

        self.assertEqual(y.data.dtype, numpy.float32)
        self.assertEqual(y.data.shape, ())

        self.func.to_gpu()
        y_g = self.func(chainer.Variable(cuda.to_gpu(self.mean)),
                        chainer.Variable(cuda.to_gpu(self.cov)),
                        chainer.Variable(cuda.to_gpu(self.t)))

        self.assertEqual(y_g.data.dtype, numpy.float32)
        self.assertEqual(y_g.data.shape, ())

        gradient_check.assert_allclose(y.data, y_g.data, atol=1.e-4)

    @condition.retry(3)
    def test_backward_cpu(self):
        self.check_backward(self.mean, self.cov, self.t, self.gy)

    @attr.gpu
    @condition.retry(3)
    def test_backward_gpu(self):
        self.func.to_gpu()
        self.check_backward(cuda.to_gpu(self.mean),
                            cuda.to_gpu(self.cov), 
                            cuda.to_gpu(self.t),
                            cuda.to_gpu(self.gy))
 
    @attr.gpu
    def test_to_cpu(self):
        self.func.to_gpu()
        self.assertTrue(self.func.sampler.use_gpu)
        self.func.to_cpu()
        self.assertFalse(self.func.sampler.use_gpu)