Пример #1
0
    def test_draw_samples_with_broadcast(self, dtype, mean, mean_isSamples,
                                         var, var_isSamples, rv_shape,
                                         num_samples):

        mean_mx = mx.nd.array(mean, dtype=dtype)
        if not mean_isSamples:
            mean_mx = add_sample_dimension(mx.nd, mean_mx)
        var_mx = mx.nd.array(var, dtype=dtype)
        if not var_isSamples:
            var_mx = add_sample_dimension(mx.nd, var_mx)
        var = var_mx.asnumpy()

        isSamples_any = any([mean_isSamples, var_isSamples])
        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))
        rv_samples_np = mean + np.matmul(np.linalg.cholesky(var),
                                         np.expand_dims(rand, axis=-1)).sum(-1)

        normal = MultivariateNormal.define_variable(shape=rv_shape,
                                                    dtype=dtype,
                                                    rand_gen=rand_gen).factor
        variables = {normal.mean.uuid: mean_mx, normal.covariance.uuid: var_mx}
        draw_samples_rt = normal.draw_samples(F=mx.nd, variables=variables)

        assert np.issubdtype(draw_samples_rt.dtype, dtype)
        assert is_sampled_array(mx.nd, draw_samples_rt) == isSamples_any
        if isSamples_any:
            assert get_num_samples(
                mx.nd, draw_samples_rt) == num_samples, (get_num_samples(
                    mx.nd, draw_samples_rt), num_samples)
Пример #2
0
    def test_draw_samples(self, dtype, low, low_is_samples, high,
                          high_is_samples, rv_shape, num_samples):
        n_dim = 1 + len(rv_shape)
        low_np = numpy_array_reshape(low, low_is_samples, n_dim)
        high_np = numpy_array_reshape(high, high_is_samples, n_dim)

        rv_samples_np = np.random.uniform(low=low_np, high=high_np, size=(num_samples,) + rv_shape)

        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rv_samples_np.flatten(), dtype=dtype))

        var = Uniform.define_variable(shape=rv_shape, dtype=dtype, rand_gen=rand_gen).factor
        low_mx = mx.nd.array(low, dtype=dtype)
        if not low_is_samples:
            low_mx = add_sample_dimension(mx.nd, low_mx)
        high_mx = mx.nd.array(high, dtype=dtype)
        if not high_is_samples:
            high_mx = add_sample_dimension(mx.nd, high_mx)
        variables = {var.low.uuid: low_mx, var.high.uuid: high_mx}

        rv_samples_rt = var.draw_samples(F=mx.nd, variables=variables, num_samples=num_samples)

        assert np.issubdtype(rv_samples_rt.dtype, dtype)
        assert array_has_samples(mx.nd, rv_samples_rt)
        assert get_num_samples(mx.nd, rv_samples_rt) == num_samples

        if np.issubdtype(dtype, np.float64):
            rtol, atol = 1e-7, 1e-10
        else:
            rtol, atol = 1e-4, 1e-5
        assert np.allclose(rv_samples_np, rv_samples_rt.asnumpy(), rtol=rtol, atol=atol)
Пример #3
0
    def test_draw_samples(self):
        np.random.seed(0)
        samples_1_np = np.random.randn(5)
        samples_1 = mx.nd.array(samples_1_np)
        samples_2_np = np.random.randn(50)
        samples_2 = mx.nd.array(samples_2_np)
        m = Model()
        v = Variable(shape=(1,))
        m.v2 = Normal.define_variable(mean=v, variance=mx.nd.array([1]), rand_gen=MockMXNetRandomGenerator(samples_1))
        m.v3 = Normal.define_variable(mean=m.v2, variance=mx.nd.array([0.1]), shape=(10,), rand_gen=MockMXNetRandomGenerator(samples_2))
        np.random.seed(0)
        v_np =np.random.rand(1)
        v_mx = mx.nd.array(v_np)

        v_rt = add_sample_dimension(mx.nd, v_mx)
        variance = m.v2.factor.variance
        variance2 = m.v3.factor.variance
        variance_rt = add_sample_dimension(mx.nd, variance.constant)
        variance2_rt = add_sample_dimension(mx.nd, variance2.constant)
        samples = m.draw_samples(F=mx.nd, num_samples=5, targets=[m.v3.uuid],
        variables={v.uuid: v_rt, variance.uuid: variance_rt, variance2.uuid: variance2_rt})[0]

        samples_np = v_np + samples_1_np[:, None] + np.sqrt(0.1)*samples_2_np.reshape(5,10)

        assert array_has_samples(mx.nd, samples) and get_num_samples(mx.nd, samples)==5
        assert np.allclose(samples.asnumpy(), samples_np)
Пример #4
0
    def test_draw_samples(self, dtype, log_prob, log_prob_isSamples, rv_shape,
                          num_samples, one_hot_encoding, normalization):
        n_dim = 1 + len(rv_shape)
        log_prob_np = numpy_array_reshape(log_prob, log_prob_isSamples, n_dim)
        rv_full_shape = (num_samples, ) + rv_shape
        log_prob_np = np.broadcast_to(log_prob_np, rv_full_shape[:-1] + (3, ))

        rand_np = np.random.randint(0, 3, size=rv_full_shape[:-1])
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand_np.flatten(), dtype=dtype))

        if one_hot_encoding:
            rand_np = np.identity(3)[rand_np].reshape(*rv_full_shape)
        else:
            rand_np = np.expand_dims(rand_np, axis=-1)
        rv_samples_np = rand_np

        cat = Categorical.define_variable(0,
                                          num_classes=3,
                                          one_hot_encoding=one_hot_encoding,
                                          normalization=normalization,
                                          shape=rv_shape,
                                          rand_gen=rand_gen,
                                          dtype=dtype).factor
        log_prob_mx = mx.nd.array(log_prob, dtype=dtype)
        if not log_prob_isSamples:
            log_prob_mx = add_sample_dimension(mx.nd, log_prob_mx)
        variables = {cat.log_prob.uuid: log_prob_mx}
        rv_samples_rt = cat.draw_samples(F=mx.nd,
                                         variables=variables,
                                         num_samples=num_samples)

        assert array_has_samples(mx.nd, rv_samples_rt)
        assert get_num_samples(mx.nd, rv_samples_rt) == num_samples
        assert np.allclose(rv_samples_np, rv_samples_rt.asnumpy())
Пример #5
0
    def test_draw_samples_with_broadcast(self, dtype_dof, dtype, degrees_of_freedom, scale, scale_is_samples, rv_shape,
                                         num_samples):

        degrees_of_freedom_mx = mx.nd.array([degrees_of_freedom], dtype=dtype_dof)
        scale_mx = mx.nd.array(scale, dtype=dtype)
        if not scale_is_samples:
            scale_mx = add_sample_dimension(mx.nd, scale_mx)

        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))
        reps = 1000
        mins = np.zeros(reps)
        maxs = np.zeros(reps)
        for i in range(reps):
            rvs = wishart.rvs(df=degrees_of_freedom, scale=scale, size=num_samples)
            mins[i] = rvs.min()
            maxs[i] = rvs.max()
        # rv_samples_np = wishart.rvs(df=degrees_of_freedom, scale=scale, size=num_samples)

        var = Wishart.define_variable(shape=rv_shape, dtype=dtype, rand_gen=rand_gen).factor
        variables = {var.degrees_of_freedom.uuid: degrees_of_freedom_mx, var.scale.uuid: scale_mx}
        draw_samples_rt = var.draw_samples(F=mx.nd, variables=variables)

        assert np.issubdtype(draw_samples_rt.dtype, dtype)
        assert is_sampled_array(mx.nd, draw_samples_rt) == scale_is_samples
        if scale_is_samples:
            assert get_num_samples(mx.nd, draw_samples_rt) == num_samples, (get_num_samples(mx.nd, draw_samples_rt),
                                                                            num_samples)
        assert mins.min() < draw_samples_rt.asnumpy().min()
        assert maxs.max() > draw_samples_rt.asnumpy().max()
Пример #6
0
    def test_draw_samples(self, dtype, mean, mean_isSamples, var,
                          var_isSamples, rv_shape, num_samples):
        n_dim = 1 + len(rv_shape)
        mean_np = numpy_array_reshape(mean, mean_isSamples, n_dim)
        var_np = numpy_array_reshape(var, var_isSamples, n_dim)

        rand = np.random.randn(num_samples, *rv_shape)
        rv_samples_np = mean_np + rand * np.sqrt(var_np)

        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))

        normal = Normal.define_variable(shape=rv_shape, dtype=dtype,
                                        rand_gen=rand_gen).factor
        mean_mx = mx.nd.array(mean, dtype=dtype)
        if not mean_isSamples:
            mean_mx = add_sample_dimension(mx.nd, mean_mx)
        var_mx = mx.nd.array(var, dtype=dtype)
        if not var_isSamples:
            var_mx = add_sample_dimension(mx.nd, var_mx)
        variables = {normal.mean.uuid: mean_mx, normal.variance.uuid: var_mx}
        rv_samples_rt = normal.draw_samples(
            F=mx.nd, variables=variables, num_samples=num_samples)

        assert np.issubdtype(rv_samples_rt.dtype, dtype)
        assert array_has_samples(mx.nd, rv_samples_rt)
        assert get_num_samples(mx.nd, rv_samples_rt) == num_samples

        if np.issubdtype(dtype, np.float64):
            rtol, atol = 1e-7, 1e-10
        else:
            rtol, atol = 1e-4, 1e-5
        assert np.allclose(rv_samples_np, rv_samples_rt.asnumpy(), rtol=rtol, atol=atol)
Пример #7
0
    def test_draw_samples_with_broadcast_no_numpy_verification(
            self, dtype, mean, mean_isSamples, var, var_isSamples, rv_shape,
            num_samples):

        mean_mx = mx.nd.array(mean, dtype=dtype)
        if not mean_isSamples:
            mean_mx = add_sample_dimension(mx.nd, mean_mx)
        var_mx = mx.nd.array(var, dtype=dtype)
        if not var_isSamples:
            var_mx = add_sample_dimension(mx.nd, var_mx)
        var = var_mx.asnumpy()

        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))

        normal = MultivariateNormal.define_variable(shape=rv_shape,
                                                    dtype=dtype,
                                                    rand_gen=rand_gen).factor
        variables = {normal.mean.uuid: mean_mx, normal.covariance.uuid: var_mx}
        draw_samples_rt = normal.draw_samples(F=mx.nd,
                                              variables=variables,
                                              num_samples=num_samples)

        assert np.issubdtype(draw_samples_rt.dtype, dtype)
        assert is_sampled_array(mx.nd, draw_samples_rt) == True
Пример #8
0
    def test_log_pdf_no_broadcast(self, dtype, mean, mean_isSamples, var,
                                  var_isSamples, rv, rv_isSamples,
                                  num_samples):

        mean_mx = mx.nd.array(mean, dtype=dtype)
        if not mean_isSamples:
            mean_mx = add_sample_dimension(mx.nd, mean_mx)
        mean = mean_mx.asnumpy()

        var_mx = mx.nd.array(var, dtype=dtype)
        if not var_isSamples:
            var_mx = add_sample_dimension(mx.nd, var_mx)
        var = var_mx.asnumpy()

        rv_mx = mx.nd.array(rv, dtype=dtype)
        if not rv_isSamples:
            rv_mx = add_sample_dimension(mx.nd, rv_mx)
        rv = rv_mx.asnumpy()

        from scipy.stats import multivariate_normal
        isSamples_any = any([mean_isSamples, var_isSamples, rv_isSamples])
        rv_shape = rv.shape[1:]

        n_dim = 1 + len(
            rv.shape) if isSamples_any and not rv_isSamples else len(rv.shape)
        mean_np = numpy_array_reshape(mean, isSamples_any, n_dim)
        var_np = numpy_array_reshape(var, isSamples_any, n_dim)
        rv_np = numpy_array_reshape(rv, isSamples_any, n_dim)

        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))

        r = []
        for s in range(len(rv_np)):
            a = []
            for i in range(len(rv_np[s])):
                a.append(
                    multivariate_normal.logpdf(rv_np[s][i], mean_np[s][i],
                                               var_np[s][i]))
            r.append(a)
        log_pdf_np = np.array(r)

        normal = MultivariateNormal.define_variable(shape=rv_shape,
                                                    dtype=dtype,
                                                    rand_gen=rand_gen).factor
        variables = {
            normal.mean.uuid: mean_mx,
            normal.covariance.uuid: var_mx,
            normal.random_variable.uuid: rv_mx
        }
        log_pdf_rt = normal.log_pdf(F=mx.nd, variables=variables)

        assert np.issubdtype(log_pdf_rt.dtype, dtype)
        assert is_sampled_array(mx.nd, log_pdf_rt) == isSamples_any
        if isSamples_any:
            assert get_num_samples(
                mx.nd, log_pdf_rt) == num_samples, (get_num_samples(
                    mx.nd, log_pdf_rt), num_samples)
        assert np.allclose(log_pdf_np, log_pdf_rt.asnumpy())
Пример #9
0
    def test_draw_samples_no_broadcast(self, dtype_dof, dtype,
                                       degrees_of_freedom, scale,
                                       scale_is_samples, rv_shape,
                                       num_samples):
        degrees_of_freedom_mx = mx.nd.array([degrees_of_freedom],
                                            dtype=dtype_dof)
        scale_mx = mx.nd.array(scale, dtype=dtype)
        if not scale_is_samples:
            scale_mx = add_sample_dimension(mx.nd, scale_mx)

        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))

        var = Wishart.define_variable(shape=rv_shape,
                                      dtype=dtype,
                                      rand_gen=rand_gen).factor
        variables = {
            var.degrees_of_freedom.uuid: degrees_of_freedom_mx,
            var.scale.uuid: scale_mx
        }
        draw_samples_rt = var.draw_samples(F=mx.nd,
                                           variables=variables,
                                           num_samples=num_samples)

        assert np.issubdtype(draw_samples_rt.dtype, dtype)
        assert array_has_samples(mx.nd, draw_samples_rt)
        assert get_num_samples(
            mx.nd, draw_samples_rt) == num_samples, (get_num_samples(
                mx.nd, draw_samples_rt), num_samples)
Пример #10
0
    def test_draw_samples(self, dtype, mean, mean_is_samples, precision,
                          precision_is_samples, rv_shape, num_samples):
        n_dim = 1 + len(rv_shape)
        mean_np = numpy_array_reshape(mean, mean_is_samples, n_dim)
        precision_np = numpy_array_reshape(precision, precision_is_samples, n_dim)

        rand = np.random.randn(num_samples, *rv_shape)
        rv_samples_np = mean_np + rand * np.power(precision_np, -0.5)

        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))

        var = NormalMeanPrecision.define_variable(shape=rv_shape, dtype=dtype,
                                                  rand_gen=rand_gen).factor
        mean_mx = mx.nd.array(mean, dtype=dtype)
        if not mean_is_samples:
            mean_mx = add_sample_dimension(mx.nd, mean_mx)
        precision_mx = mx.nd.array(precision, dtype=dtype)
        if not precision_is_samples:
            precision_mx = add_sample_dimension(mx.nd, precision_mx)
        variables = {var.mean.uuid: mean_mx, var.precision.uuid: precision_mx}
        rv_samples_rt = var.draw_samples(
            F=mx.nd, variables=variables, num_samples=num_samples)

        assert np.issubdtype(rv_samples_rt.dtype, dtype)
        assert array_has_samples(mx.nd, rv_samples_rt)
        assert get_num_samples(mx.nd, rv_samples_rt) == num_samples

        if np.issubdtype(dtype, np.float64):
            rtol, atol = 1e-7, 1e-10
        else:
            rtol, atol = 1e-4, 1e-5
        assert np.allclose(rv_samples_np, rv_samples_rt.asnumpy(), rtol=rtol, atol=atol)
Пример #11
0
    def test_draw_samples_no_broadcast(self, dtype, mean, mean_is_samples, precision,
                                       precision_is_samples, rv_shape, num_samples):

        mean_mx = mx.nd.array(mean, dtype=dtype)
        if not mean_is_samples:
            mean_mx = add_sample_dimension(mx.nd, mean_mx)
        precision_mx = mx.nd.array(precision, dtype=dtype)
        if not precision_is_samples:
            precision_mx = add_sample_dimension(mx.nd, precision_mx)
        # precision = precision_mx.asnumpy()

        # n_dim = 1 + len(rv.shape) if is_samples_any else len(rv.shape)
        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))
        # rand_exp = np.expand_dims(rand, axis=-1)
        # lmat = np.linalg.cholesky(precision)
        # temp1 = np.matmul(lmat, rand_exp).sum(-1)
        # rv_samples_np = mean + temp1

        normal = MultivariateNormalMeanPrecision.define_variable(shape=rv_shape, dtype=dtype, rand_gen=rand_gen).factor

        variables = {normal.mean.uuid: mean_mx, normal.precision.uuid: precision_mx}
        draw_samples_rt = normal.draw_samples(F=mx.nd, variables=variables, num_samples=num_samples)

        assert np.issubdtype(draw_samples_rt.dtype, dtype)
        assert array_has_samples(mx.nd, draw_samples_rt)
        assert get_num_samples(mx.nd, draw_samples_rt) == num_samples, \
            (get_num_samples(mx.nd, draw_samples_rt), num_samples)
Пример #12
0
    def test_draw_samples_w_mean(self):
        D, X, Y, noise_var, lengthscale, variance = self.gen_data()
        dtype = 'float64'

        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(np.random.rand(20 * D), dtype=dtype))

        m, net = self.gen_mxfusion_model_w_mean(dtype, D, noise_var,
                                                lengthscale, variance,
                                                rand_gen)

        observed = [m.X]
        infr = Inference(ForwardSamplingAlgorithm(m,
                                                  observed,
                                                  num_samples=2,
                                                  target_variables=[m.Y]),
                         dtype=dtype)

        samples = infr.run(X=mx.nd.array(X, dtype=dtype),
                           Y=mx.nd.array(Y, dtype=dtype))[0].asnumpy()

        kern = RBF(3, True, name='rbf', dtype=dtype) + White(3, dtype=dtype)
        X_var = Variable(shape=(10, 3))
        mean_func = MXFusionGluonFunction(net,
                                          num_outputs=1,
                                          broadcastable=True)
        mean_var = mean_func(X_var)
        gp = GaussianProcess.define_variable(X=X_var,
                                             kernel=kern,
                                             mean=mean_var,
                                             shape=(10, D),
                                             dtype=dtype,
                                             rand_gen=rand_gen).factor

        variables = {
            gp.X.uuid:
            mx.nd.expand_dims(mx.nd.array(X, dtype=dtype), axis=0),
            gp.add_rbf_lengthscale.uuid:
            mx.nd.expand_dims(mx.nd.array(lengthscale, dtype=dtype), axis=0),
            gp.add_rbf_variance.uuid:
            mx.nd.expand_dims(mx.nd.array(variance, dtype=dtype), axis=0),
            gp.add_white_variance.uuid:
            mx.nd.expand_dims(mx.nd.array(noise_var, dtype=dtype), axis=0),
            mean_var.uuid:
            mx.nd.expand_dims(net(mx.nd.array(X, dtype=dtype)), axis=0)
        }
        samples_2 = gp.draw_samples(F=mx.nd,
                                    variables=variables,
                                    num_samples=2).asnumpy()

        assert np.allclose(samples, samples_2), (samples, samples_2)
Пример #13
0
    def test_draw_samples(self, dtype, X, X_isSamples, rbf_lengthscale,
                          rbf_lengthscale_isSamples, rbf_variance,
                          rbf_variance_isSamples, rv_shape, num_samples):
        X_mx = prepare_mxnet_array(X, X_isSamples, dtype)
        rbf_lengthscale_mx = prepare_mxnet_array(rbf_lengthscale,
                                                 rbf_lengthscale_isSamples,
                                                 dtype)
        rbf_variance_mx = prepare_mxnet_array(rbf_variance,
                                              rbf_variance_isSamples, dtype)

        rand = np.random.randn(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))

        rbf = RBF(2, True, 1., 1., 'rbf', None, dtype)
        X_var = Variable(shape=(5, 2))
        gp = GaussianProcess.define_variable(X=X_var,
                                             kernel=rbf,
                                             shape=rv_shape,
                                             dtype=dtype,
                                             rand_gen=rand_gen).factor

        variables = {
            gp.X.uuid: X_mx,
            gp.rbf_lengthscale.uuid: rbf_lengthscale_mx,
            gp.rbf_variance.uuid: rbf_variance_mx
        }
        samples_rt = gp.draw_samples(F=mx.nd,
                                     variables=variables,
                                     num_samples=num_samples).asnumpy()

        samples_np = []
        for i in range(num_samples):
            X_i = X[i] if X_isSamples else X
            lengthscale_i = rbf_lengthscale[
                i] if rbf_lengthscale_isSamples else rbf_lengthscale
            variance_i = rbf_variance[
                i] if rbf_variance_isSamples else rbf_variance
            rand_i = rand[i]
            rbf_np = GPy.kern.RBF(input_dim=2, ARD=True)
            rbf_np.lengthscale = lengthscale_i
            rbf_np.variance = variance_i
            K_np = rbf_np.K(X_i)
            L_np = np.linalg.cholesky(K_np)
            sample_np = L_np.dot(rand_i)
            samples_np.append(sample_np)
        samples_np = np.array(samples_np)

        assert np.issubdtype(samples_rt.dtype, dtype)
        assert get_num_samples(mx.nd, samples_rt) == num_samples
        assert np.allclose(samples_np, samples_rt)
Пример #14
0
    def test_draw_samples_no_broadcast(self, dtype, a, a_is_samples, rv_shape, num_samples):
        a_mx = mx.nd.array(a, dtype=dtype)
        if not a_is_samples:
            a_mx = add_sample_dimension(mx.nd, a_mx)

        rand = np.random.gamma(shape=a, scale=np.ones(a.shape), size=(num_samples,)+rv_shape)
        draw_samples_np = rand / np.sum(rand)
        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))

        dirichlet = Dirichlet.define_variable(alpha=Variable(), shape=rv_shape, dtype=dtype, rand_gen=rand_gen).factor
        variables = {dirichlet.alpha.uuid: a_mx}
        draw_samples_rt = dirichlet.draw_samples(F=mx.nd, variables=variables, num_samples=num_samples)

        assert np.issubdtype(draw_samples_rt.dtype, dtype)
        assert np.allclose(draw_samples_np, draw_samples_rt.asnumpy())
Пример #15
0
    def test_log_pdf_with_broadcast(self, dtype, mean, mean_is_samples, precision, precision_is_samples,
                        rv, rv_is_samples, num_samples):

        mean_mx = mx.nd.array(mean, dtype=dtype)
        if not mean_is_samples:
            mean_mx = add_sample_dimension(mx.nd, mean_mx)
        mean = mean_mx.asnumpy()

        precision_mx = mx.nd.array(precision, dtype=dtype)
        if not precision_is_samples:
            precision_mx = add_sample_dimension(mx.nd, precision_mx)
        precision = precision_mx.asnumpy()

        rv_mx = mx.nd.array(rv, dtype=dtype)
        if not rv_is_samples:
            rv_mx = add_sample_dimension(mx.nd, rv_mx)
        rv = rv_mx.asnumpy()

        is_samples_any = any([mean_is_samples, precision_is_samples, rv_is_samples])
        rv_shape = rv.shape[1:]

        n_dim = 1 + len(rv.shape) if is_samples_any and not rv_is_samples else len(rv.shape)
        mean_np = np.broadcast_to(mean, (5, 3, 2))
        precision_np = np.broadcast_to(precision,  (5, 3, 2, 2))
        rv_np = numpy_array_reshape(rv, is_samples_any, n_dim)

        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))

        r = []
        for s in range(len(rv_np)):
            a = []
            for i in range(len(rv_np[s])):
                a.append(multivariate_normal.logpdf(rv_np[s][i], mean_np[s][i], np.linalg.inv(precision_np[s][i])))
            r.append(a)
        log_pdf_np = np.array(r)

        normal = MultivariateNormalMeanPrecision.define_variable(shape=rv_shape, dtype=dtype, rand_gen=rand_gen).factor
        variables = {
            normal.mean.uuid: mean_mx, normal.precision.uuid: precision_mx, normal.random_variable.uuid: rv_mx}
        log_pdf_rt = normal.log_pdf(F=mx.nd, variables=variables)

        assert np.issubdtype(log_pdf_rt.dtype, dtype)
        assert array_has_samples(mx.nd, log_pdf_rt) == is_samples_any
        if is_samples_any:
            assert get_num_samples(mx.nd, log_pdf_rt) == num_samples, (get_num_samples(mx.nd, log_pdf_rt), num_samples)
        assert np.allclose(log_pdf_np, log_pdf_rt.asnumpy())
Пример #16
0
    def test_draw_samples(self, dtype, prob_true, prob_true_is_samples,
                          rv_shape, num_samples):
        rv_full_shape = (num_samples, ) + rv_shape

        rand_np = np.random.normal(size=rv_full_shape) > 0
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand_np.flatten(), dtype=dtype))

        rv_samples_np = rand_np

        var = Bernoulli.define_variable(0,
                                        shape=rv_shape,
                                        rand_gen=rand_gen,
                                        dtype=dtype).factor
        prob_true_mx = mx.nd.array(prob_true, dtype=dtype)
        if not prob_true_is_samples:
            prob_true_mx = add_sample_dimension(mx.nd, prob_true_mx)
        variables = {var.prob_true.uuid: prob_true_mx}
        rv_samples_rt = var.draw_samples(F=mx.nd,
                                         variables=variables,
                                         num_samples=num_samples)

        assert array_has_samples(mx.nd, rv_samples_rt)
        assert get_num_samples(mx.nd, rv_samples_rt) == num_samples
        assert np.array_equal(rv_samples_np,
                              rv_samples_rt.asnumpy().astype(bool))

        # Also make sure the non-mock sampler works
        rand_gen = None
        var = Bernoulli.define_variable(0,
                                        shape=rv_shape,
                                        rand_gen=rand_gen,
                                        dtype=dtype).factor
        prob_true_mx = mx.nd.array(prob_true, dtype=dtype)
        if not prob_true_is_samples:
            prob_true_mx = add_sample_dimension(mx.nd, prob_true_mx)
        variables = {var.prob_true.uuid: prob_true_mx}
        rv_samples_rt = var.draw_samples(F=mx.nd,
                                         variables=variables,
                                         num_samples=num_samples)

        assert array_has_samples(mx.nd, rv_samples_rt)
        assert get_num_samples(mx.nd, rv_samples_rt) == num_samples
        assert rv_samples_rt.dtype == dtype
Пример #17
0
    def test_draw_samples_with_broadcast_no_numpy_verification(self, dtype, mean, mean_is_samples, precision,
                                                               precision_is_samples, rv_shape, num_samples):

        mean_mx = mx.nd.array(mean, dtype=dtype)
        if not mean_is_samples:
            mean_mx = add_sample_dimension(mx.nd, mean_mx)
        precision_mx = mx.nd.array(precision, dtype=dtype)
        if not precision_is_samples:
            precision_mx = add_sample_dimension(mx.nd, precision_mx)
        # precision = precision_mx.asnumpy()

        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))

        normal = MultivariateNormalMeanPrecision.define_variable(shape=rv_shape, dtype=dtype, rand_gen=rand_gen).factor
        variables = {normal.mean.uuid: mean_mx, normal.precision.uuid: precision_mx}
        draw_samples_rt = normal.draw_samples(F=mx.nd, variables=variables, num_samples=num_samples)

        assert np.issubdtype(draw_samples_rt.dtype, dtype)
        assert array_has_samples(mx.nd, draw_samples_rt)
Пример #18
0
    def test_log_pdf_with_broadcast(self, dtype, a, a_is_samples, rv, rv_is_samples, num_samples):
        # Add sample dimension if varaible is not samples
        a_mx = mx.nd.array(a, dtype=dtype)
        if not a_is_samples:
            a_mx = add_sample_dimension(mx.nd, a_mx)
        a = a_mx.asnumpy()

        rv_mx = mx.nd.array(rv, dtype=dtype)
        if not rv_is_samples:
            rv_mx = add_sample_dimension(mx.nd, rv_mx)
        rv = rv_mx.asnumpy()

        is_samples_any = a_is_samples or rv_is_samples
        rv_shape = rv.shape[1:]

        n_dim = 1 + len(rv.shape) if is_samples_any and not rv_is_samples else len(rv.shape)
        a_np = np.broadcast_to(a, (num_samples, 3, 2))
        rv_np = numpy_array_reshape(rv, is_samples_any, n_dim)

        # Initialize rand_gen
        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))

        # Calculate correct Dirichlet logpdf
        r = []
        for s in range(len(rv_np)):
            a = []
            for i in range(len(rv_np[s])):
                a.append(scipy_dirichlet.logpdf(rv_np[s][i]/sum(rv_np[s][i]), a_np[s][i]))
            r.append(a)
        log_pdf_np = np.array(r)

        dirichlet = Dirichlet.define_variable(alpha=Variable(), shape=rv_shape, dtype=dtype, rand_gen=rand_gen).factor
        variables = {dirichlet.alpha.uuid: a_mx, dirichlet.random_variable.uuid: rv_mx}
        log_pdf_rt = dirichlet.log_pdf(F=mx.nd, variables=variables)

        assert np.issubdtype(log_pdf_rt.dtype, dtype)
        assert array_has_samples(mx.nd, log_pdf_rt) == is_samples_any
        if is_samples_any:
            assert get_num_samples(mx.nd, log_pdf_rt) == num_samples, (get_num_samples(mx.nd, log_pdf_rt), num_samples)
        assert np.allclose(log_pdf_np, log_pdf_rt.asnumpy())
Пример #19
0
    def test_draw_samples(self, dtype, location, location_is_samples, scale,
                          scale_is_samples, rv_shape, num_samples):
        n_dim = 1 + len(rv_shape)
        location_np = numpy_array_reshape(location, location_is_samples, n_dim)
        scale_np = numpy_array_reshape(scale, scale_is_samples, n_dim)

        rand = np.random.laplace(size=(num_samples, ) + rv_shape)
        rv_samples_np = location_np + rand * scale_np

        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))

        var = Laplace.define_variable(shape=rv_shape,
                                      dtype=dtype,
                                      rand_gen=rand_gen).factor
        location_mx = mx.nd.array(location, dtype=dtype)
        if not location_is_samples:
            location_mx = add_sample_dimension(mx.nd, location_mx)
        scale_mx = mx.nd.array(scale, dtype=dtype)
        if not scale_is_samples:
            scale_mx = add_sample_dimension(mx.nd, scale_mx)
        variables = {var.location.uuid: location_mx, var.scale.uuid: scale_mx}

        rv_samples_rt = var.draw_samples(F=mx.nd,
                                         variables=variables,
                                         num_samples=num_samples)

        assert np.issubdtype(rv_samples_rt.dtype, dtype)
        assert array_has_samples(mx.nd, rv_samples_rt)
        assert get_num_samples(mx.nd, rv_samples_rt) == num_samples

        if np.issubdtype(dtype, np.float64):
            rtol, atol = 1e-7, 1e-10
        else:
            rtol, atol = 1e-4, 1e-5
        assert np.allclose(rv_samples_np,
                           rv_samples_rt.asnumpy(),
                           rtol=rtol,
                           atol=atol)
Пример #20
0
    def test_draw_samples_no_broadcast(self, dtype, mean, mean_isSamples, var,
                                       var_isSamples, rv_shape, num_samples):

        mean_mx = mx.nd.array(mean, dtype=dtype)
        if not mean_isSamples:
            mean_mx = add_sample_dimension(mx.nd, mean_mx)
        var_mx = mx.nd.array(var, dtype=dtype)
        if not var_isSamples:
            var_mx = add_sample_dimension(mx.nd, var_mx)
        var = var_mx.asnumpy()
        # var = (var[:,:,:,None]*var[:,None,:,:]).sum(-2)+np.eye(2)
        # var_mx = mx.nd.array(var, dtype=dtype)

        isSamples_any = any([mean_isSamples, var_isSamples])
        # n_dim = 1 + len(rv.shape) if isSamples_any else len(rv.shape)
        # mean_np = numpy_array_reshape(mean, mean_isSamples, n_dim)
        # var_np = numpy_array_reshape(var, var_isSamples, n_dim)
        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))
        rand_exp = np.expand_dims(rand, axis=-1)
        lmat = np.linalg.cholesky(var)
        temp1 = np.matmul(lmat, rand_exp).sum(-1)
        rv_samples_np = mean + temp1

        normal = MultivariateNormal.define_variable(shape=rv_shape,
                                                    dtype=dtype,
                                                    rand_gen=rand_gen).factor

        variables = {normal.mean.uuid: mean_mx, normal.covariance.uuid: var_mx}
        draw_samples_rt = normal.draw_samples(F=mx.nd, variables=variables)

        assert np.issubdtype(draw_samples_rt.dtype, dtype)
        assert is_sampled_array(mx.nd, draw_samples_rt) == isSamples_any
        if isSamples_any:
            assert get_num_samples(
                mx.nd, draw_samples_rt) == num_samples, (get_num_samples(
                    mx.nd, draw_samples_rt), num_samples)
Пример #21
0
    def test_draw_samples_no_broadcast(self, dtype_dof, dtype, degrees_of_freedom, scale,
                                       scale_is_samples, rv_shape, num_samples):

        degrees_of_freedom_mx = mx.nd.array([degrees_of_freedom], dtype=dtype_dof)
        scale_mx = mx.nd.array(scale, dtype=dtype)
        if not scale_is_samples:
            scale_mx = add_sample_dimension(mx.nd, scale_mx)

        # n_dim = 1 + len(rv.shape) if isSamples_any else len(rv.shape)
        rand = np.random.rand(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))
        # rand_exp = np.expand_dims(rand, axis=-1)
        # lmat = np.linalg.cholesky(var)
        # temp1 = np.matmul(lmat, rand_exp).sum(-1)
        # rv_samples_np = mean + temp1

        var = Wishart.define_variable(shape=rv_shape, dtype=dtype, rand_gen=rand_gen).factor
        variables = {var.degrees_of_freedom.uuid: degrees_of_freedom_mx, var.scale.uuid: scale_mx}
        draw_samples_rt = var.draw_samples(F=mx.nd, variables=variables, num_samples=num_samples)

        assert np.issubdtype(draw_samples_rt.dtype, dtype)
        assert is_sampled_array(mx.nd, draw_samples_rt)
        assert get_num_samples(mx.nd, draw_samples_rt) == num_samples, (get_num_samples(mx.nd, draw_samples_rt),
                                                                        num_samples)
Пример #22
0
    def test_draw_samples(self):
        np.random.seed(0)
        X = np.random.rand(10, 3)
        Y = np.random.rand(10, 1)
        noise_var = np.random.rand(1)
        lengthscale = np.random.rand(3)
        variance = np.random.rand(1)
        dtype = 'float64'

        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(np.random.rand(20), dtype=dtype))

        m = Model()
        m.N = Variable()
        m.X = Variable(shape=(m.N, 3))
        m.noise_var = Variable(transformation=PositiveTransformation(),
                               initial_value=mx.nd.array(noise_var,
                                                         dtype=dtype))
        kernel = RBF(input_dim=3,
                     ARD=True,
                     variance=mx.nd.array(variance, dtype=dtype),
                     lengthscale=mx.nd.array(lengthscale, dtype=dtype),
                     dtype=dtype)
        m.Y = GPRegression.define_variable(X=m.X,
                                           kernel=kernel,
                                           noise_var=m.noise_var,
                                           shape=(m.N, 1),
                                           dtype=dtype,
                                           rand_gen=rand_gen)

        observed = [m.X]
        infr = Inference(ForwardSamplingAlgorithm(m,
                                                  observed,
                                                  num_samples=2,
                                                  target_variables=[m.Y]),
                         dtype=dtype)

        samples = infr.run(X=mx.nd.array(X, dtype=dtype),
                           Y=mx.nd.array(Y, dtype=dtype))[0].asnumpy()

        kern = RBF(3, True, name='rbf', dtype=dtype) + White(3, dtype=dtype)
        X_var = Variable(shape=(10, 3))
        gp = GaussianProcess.define_variable(X=X_var,
                                             kernel=kern,
                                             shape=(10, 1),
                                             dtype=dtype,
                                             rand_gen=rand_gen).factor

        variables = {
            gp.X.uuid:
            mx.nd.expand_dims(mx.nd.array(X, dtype=dtype), axis=0),
            gp.add_rbf_lengthscale.uuid:
            mx.nd.expand_dims(mx.nd.array(lengthscale, dtype=dtype), axis=0),
            gp.add_rbf_variance.uuid:
            mx.nd.expand_dims(mx.nd.array(variance, dtype=dtype), axis=0),
            gp.add_white_variance.uuid:
            mx.nd.expand_dims(mx.nd.array(noise_var, dtype=dtype), axis=0)
        }
        samples_2 = gp.draw_samples(F=mx.nd,
                                    variables=variables,
                                    num_samples=2).asnumpy()

        assert np.allclose(samples, samples_2), (samples, samples_2)
Пример #23
0
    def test_draw_samples(self, dtype, X, X_isSamples, X_cond,
                          X_cond_isSamples, Y_cond, Y_cond_isSamples,
                          rbf_lengthscale, rbf_lengthscale_isSamples,
                          rbf_variance, rbf_variance_isSamples, rv_shape,
                          num_samples):
        from scipy.linalg.lapack import dtrtrs
        X_mx = prepare_mxnet_array(X, X_isSamples, dtype)
        X_cond_mx = prepare_mxnet_array(X_cond, X_cond_isSamples, dtype)
        Y_cond_mx = prepare_mxnet_array(Y_cond, Y_cond_isSamples, dtype)
        rbf_lengthscale_mx = prepare_mxnet_array(rbf_lengthscale,
                                                 rbf_lengthscale_isSamples,
                                                 dtype)
        rbf_variance_mx = prepare_mxnet_array(rbf_variance,
                                              rbf_variance_isSamples, dtype)

        rand = np.random.randn(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))

        rbf = RBF(2, True, 1., 1., 'rbf', None, dtype)
        X_var = Variable(shape=(5, 2))
        X_cond_var = Variable(shape=(8, 2))
        Y_cond_var = Variable(shape=(8, 1))
        gp = ConditionalGaussianProcess.define_variable(
            X=X_var,
            X_cond=X_cond_var,
            Y_cond=Y_cond_var,
            kernel=rbf,
            shape=rv_shape,
            dtype=dtype,
            rand_gen=rand_gen).factor

        variables = {
            gp.X.uuid: X_mx,
            gp.X_cond.uuid: X_cond_mx,
            gp.Y_cond.uuid: Y_cond_mx,
            gp.rbf_lengthscale.uuid: rbf_lengthscale_mx,
            gp.rbf_variance.uuid: rbf_variance_mx
        }
        samples_rt = gp.draw_samples(F=mx.nd,
                                     variables=variables,
                                     num_samples=num_samples).asnumpy()

        samples_np = []
        for i in range(num_samples):
            X_i = X[i] if X_isSamples else X
            X_cond_i = X_cond[i] if X_cond_isSamples else X_cond
            Y_cond_i = Y_cond[i] if Y_cond_isSamples else Y_cond
            lengthscale_i = rbf_lengthscale[
                i] if rbf_lengthscale_isSamples else rbf_lengthscale
            variance_i = rbf_variance[
                i] if rbf_variance_isSamples else rbf_variance
            rand_i = rand[i]
            rbf_np = GPy.kern.RBF(input_dim=2, ARD=True)
            rbf_np.lengthscale = lengthscale_i
            rbf_np.variance = variance_i
            K_np = rbf_np.K(X_i)
            Kc_np = rbf_np.K(X_cond_i, X_i)
            Kcc_np = rbf_np.K(X_cond_i)

            L = np.linalg.cholesky(Kcc_np)
            LInvY = dtrtrs(L, Y_cond_i, lower=1, trans=0)[0]
            LinvKxt = dtrtrs(L, Kc_np, lower=1, trans=0)[0]

            mu = LinvKxt.T.dot(LInvY)
            cov = K_np - LinvKxt.T.dot(LinvKxt)
            L_cov_np = np.linalg.cholesky(cov)
            sample_np = mu + L_cov_np.dot(rand_i)
            samples_np.append(sample_np)
        samples_np = np.array(samples_np)
        assert np.issubdtype(samples_rt.dtype, dtype)
        assert get_num_samples(mx.nd, samples_rt) == num_samples
        assert np.allclose(samples_np, samples_rt)
Пример #24
0
    def test_draw_samples_w_mean(self, dtype, X, X_isSamples, X_cond,
                                 X_cond_isSamples, Y_cond, Y_cond_isSamples,
                                 rbf_lengthscale, rbf_lengthscale_isSamples,
                                 rbf_variance, rbf_variance_isSamples,
                                 rv_shape, num_samples):
        net = nn.HybridSequential(prefix='nn_')
        with net.name_scope():
            net.add(
                nn.Dense(rv_shape[-1],
                         flatten=False,
                         activation="tanh",
                         in_units=X.shape[-1],
                         dtype=dtype))
        net.initialize(mx.init.Xavier(magnitude=3))

        from scipy.linalg.lapack import dtrtrs
        X_mx = prepare_mxnet_array(X, X_isSamples, dtype)
        X_cond_mx = prepare_mxnet_array(X_cond, X_cond_isSamples, dtype)
        Y_cond_mx = prepare_mxnet_array(Y_cond, Y_cond_isSamples, dtype)
        rbf_lengthscale_mx = prepare_mxnet_array(rbf_lengthscale,
                                                 rbf_lengthscale_isSamples,
                                                 dtype)
        rbf_variance_mx = prepare_mxnet_array(rbf_variance,
                                              rbf_variance_isSamples, dtype)
        mean_mx = net(X_mx)
        mean_np = mean_mx.asnumpy()
        mean_cond_mx = net(X_cond_mx)
        mean_cond_np = mean_cond_mx.asnumpy()

        rand = np.random.randn(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))

        rbf = RBF(2, True, 1., 1., 'rbf', None, dtype)
        X_var = Variable(shape=(5, 2))
        X_cond_var = Variable(shape=(8, 2))
        Y_cond_var = Variable(shape=(8, 1))
        mean_func = MXFusionGluonFunction(net,
                                          num_outputs=1,
                                          broadcastable=True)
        mean_var = mean_func(X_var)
        mean_cond_var = mean_func(X_cond_var)
        gp = ConditionalGaussianProcess.define_variable(
            X=X_var,
            X_cond=X_cond_var,
            Y_cond=Y_cond_var,
            mean=mean_var,
            mean_cond=mean_cond_var,
            kernel=rbf,
            shape=rv_shape,
            dtype=dtype,
            rand_gen=rand_gen).factor

        variables = {
            gp.X.uuid: X_mx,
            gp.X_cond.uuid: X_cond_mx,
            gp.Y_cond.uuid: Y_cond_mx,
            gp.rbf_lengthscale.uuid: rbf_lengthscale_mx,
            gp.rbf_variance.uuid: rbf_variance_mx,
            gp.mean.uuid: mean_mx,
            gp.mean_cond.uuid: mean_cond_mx
        }
        samples_rt = gp.draw_samples(F=mx.nd,
                                     variables=variables,
                                     num_samples=num_samples).asnumpy()

        samples_np = []
        for i in range(num_samples):
            X_i = X[i] if X_isSamples else X
            X_cond_i = X_cond[i] if X_cond_isSamples else X_cond
            Y_cond_i = Y_cond[i] if Y_cond_isSamples else Y_cond
            Y_cond_i = Y_cond_i - mean_cond_np[
                i] if X_cond_isSamples else Y_cond_i - mean_cond_np[0]
            lengthscale_i = rbf_lengthscale[
                i] if rbf_lengthscale_isSamples else rbf_lengthscale
            variance_i = rbf_variance[
                i] if rbf_variance_isSamples else rbf_variance
            rand_i = rand[i]
            rbf_np = GPy.kern.RBF(input_dim=2, ARD=True)
            rbf_np.lengthscale = lengthscale_i
            rbf_np.variance = variance_i
            K_np = rbf_np.K(X_i)
            Kc_np = rbf_np.K(X_cond_i, X_i)
            Kcc_np = rbf_np.K(X_cond_i)

            L = np.linalg.cholesky(Kcc_np)
            LInvY = dtrtrs(L, Y_cond_i, lower=1, trans=0)[0]
            LinvKxt = dtrtrs(L, Kc_np, lower=1, trans=0)[0]

            mu = LinvKxt.T.dot(LInvY)
            cov = K_np - LinvKxt.T.dot(LinvKxt)
            L_cov_np = np.linalg.cholesky(cov)
            sample_np = mu + L_cov_np.dot(rand_i)
            samples_np.append(sample_np)
        samples_np = np.array(samples_np) + mean_np
        assert np.issubdtype(samples_rt.dtype, dtype)
        assert get_num_samples(mx.nd, samples_rt) == num_samples
        assert np.allclose(samples_np, samples_rt)
Пример #25
0
    def test_draw_samples_w_mean(self, dtype, X, X_isSamples, rbf_lengthscale,
                                 rbf_lengthscale_isSamples, rbf_variance,
                                 rbf_variance_isSamples, rv_shape,
                                 num_samples):

        net = nn.HybridSequential(prefix='nn_')
        with net.name_scope():
            net.add(
                nn.Dense(rv_shape[-1],
                         flatten=False,
                         activation="tanh",
                         in_units=X.shape[-1],
                         dtype=dtype))
        net.initialize(mx.init.Xavier(magnitude=3))

        X_mx = prepare_mxnet_array(X, X_isSamples, dtype)
        rbf_lengthscale_mx = prepare_mxnet_array(rbf_lengthscale,
                                                 rbf_lengthscale_isSamples,
                                                 dtype)
        rbf_variance_mx = prepare_mxnet_array(rbf_variance,
                                              rbf_variance_isSamples, dtype)
        mean_mx = net(X_mx)
        mean_np = mean_mx.asnumpy()

        rand = np.random.randn(num_samples, *rv_shape)
        rand_gen = MockMXNetRandomGenerator(
            mx.nd.array(rand.flatten(), dtype=dtype))

        rbf = RBF(2, True, 1., 1., 'rbf', None, dtype)
        X_var = Variable(shape=(5, 2))
        mean_func = MXFusionGluonFunction(net,
                                          num_outputs=1,
                                          broadcastable=True)
        mean_var = mean_func(X_var)
        gp = GaussianProcess.define_variable(X=X_var,
                                             kernel=rbf,
                                             shape=rv_shape,
                                             mean=mean_var,
                                             dtype=dtype,
                                             rand_gen=rand_gen).factor

        variables = {
            gp.X.uuid: X_mx,
            gp.rbf_lengthscale.uuid: rbf_lengthscale_mx,
            gp.rbf_variance.uuid: rbf_variance_mx,
            gp.mean.uuid: mean_mx
        }
        samples_rt = gp.draw_samples(F=mx.nd,
                                     variables=variables,
                                     num_samples=num_samples).asnumpy()

        samples_np = []
        for i in range(num_samples):
            X_i = X[i] if X_isSamples else X
            lengthscale_i = rbf_lengthscale[
                i] if rbf_lengthscale_isSamples else rbf_lengthscale
            variance_i = rbf_variance[
                i] if rbf_variance_isSamples else rbf_variance
            rand_i = rand[i]
            rbf_np = GPy.kern.RBF(input_dim=2, ARD=True)
            rbf_np.lengthscale = lengthscale_i
            rbf_np.variance = variance_i
            K_np = rbf_np.K(X_i)
            L_np = np.linalg.cholesky(K_np)
            sample_np = L_np.dot(rand_i)
            samples_np.append(sample_np)
        samples_np = np.array(samples_np) + mean_np

        assert np.issubdtype(samples_rt.dtype, dtype)
        assert get_num_samples(mx.nd, samples_rt) == num_samples
        assert np.allclose(samples_np, samples_rt)