コード例 #1
0
    def test_score_function_rb_minibatch(self):
        dtype = get_default_dtype()
        x = np.random.rand(1000, 1)
        y = np.random.rand(1000, 1)
        x_nd, y_nd = mx.nd.array(y, dtype=dtype), mx.nd.array(x, dtype=dtype)

        self.net = self.make_net()
        self.net(x_nd)

        m = self.make_bnn_model(self.net)

        from mxfusion.inference.meanfield import create_Gaussian_meanfield
        from mxfusion.inference.grad_based_inference import GradBasedInference
        from mxfusion.inference import MinibatchInferenceLoop
        observed = [m.y, m.x]
        q = create_Gaussian_meanfield(model=m, observed=observed)
        alg = ScoreFunctionRBInference(num_samples=3,
                                       model=m,
                                       observed=observed,
                                       posterior=q)
        infr = GradBasedInference(inference_algorithm=alg,
                                  grad_loop=MinibatchInferenceLoop(
                                      batch_size=100, rv_scaling={m.y: 10}))

        infr.initialize(y=(100, 1), x=(100, 1))
        infr.run(max_iter=1, learning_rate=1e-2, y=y_nd, x=x_nd)
コード例 #2
0
    def get_ppca_grad(self, x_train, inf_type, num_samples=100):
        import random
        dtype = get_default_dtype()
        random.seed(0)
        np.random.seed(0)
        mx.random.seed(0)
        m = self.make_ppca_model()
        q = self.make_ppca_post(m)
        observed = [m.x]
        alg = inf_type(num_samples=num_samples,
                       model=m,
                       posterior=q,
                       observed=observed)

        from mxfusion.inference.grad_based_inference import GradBasedInference
        from mxfusion.inference import BatchInferenceLoop

        infr = GradBasedInference(inference_algorithm=alg,
                                  grad_loop=BatchInferenceLoop())
        infr.initialize(x=mx.nd.array(x_train, dtype=dtype))
        infr.run(max_iter=1,
                 learning_rate=1e-2,
                 x=mx.nd.array(x_train, dtype=dtype),
                 verbose=False)
        return infr, q.post_mean
コード例 #3
0
    def test_score_function_batch(self):
        x = np.random.rand(1000, 1)
        y = np.random.rand(1000, 1)
        x_nd, y_nd = mx.nd.array(y), mx.nd.array(x)

        self.net = self.make_net()
        self.net(x_nd)

        m = self.make_bnn_model(self.net)

        from mxfusion.inference.meanfield import create_Gaussian_meanfield
        from mxfusion.inference.grad_based_inference import GradBasedInference
        from mxfusion.inference import BatchInferenceLoop
        observed = [m.y, m.x]
        q = create_Gaussian_meanfield(model=m, observed=observed)
        alg = ScoreFunctionInference(num_samples=3,
                                     model=m,
                                     observed=observed,
                                     posterior=q)
        infr = GradBasedInference(inference_algorithm=alg,
                                  grad_loop=BatchInferenceLoop())
        infr.initialize(y=y_nd, x=x_nd)
        infr.run(max_iter=1, learning_rate=1e-2, y=y_nd, x=x_nd)
コード例 #4
0
    def test_with_samples(self):
        from mxfusion.common import config
        config.DEFAULT_DTYPE = 'float64'
        dtype = 'float64'

        D, X, Y, Z, noise_var, lengthscale, variance, qU_mean, \
            qU_cov_W, qU_cov_diag, qU_chol = self.gen_data()

        m = Model()
        m.N = Variable()
        m.X = Normal.define_variable(mean=0, variance=1, shape=(m.N, 3))
        m.Z = Variable(shape=(3, 3), initial_value=mx.nd.array(Z, dtype=dtype))
        m.noise_var = Variable(transformation=PositiveTransformation(),
                               initial_value=mx.nd.array(noise_var,
                                                         dtype=dtype))
        kernel = RBF(input_dim=3,
                     ARD=True,
                     variance=mx.nd.array(variance, dtype=dtype),
                     lengthscale=mx.nd.array(lengthscale, dtype=dtype),
                     dtype=dtype)
        m.Y = SVGPRegression.define_variable(X=m.X,
                                             kernel=kernel,
                                             noise_var=m.noise_var,
                                             inducing_inputs=m.Z,
                                             shape=(m.N, D),
                                             dtype=dtype)
        gp = m.Y.factor
        gp.svgp_log_pdf.jitter = 1e-8

        q = create_Gaussian_meanfield(model=m, observed=[m.Y])

        infr = GradBasedInference(
            inference_algorithm=StochasticVariationalInference(
                model=m, posterior=q, num_samples=10, observed=[m.Y]))
        infr.initialize(Y=Y.shape)
        infr.params[gp._extra_graphs[0].qU_mean] = mx.nd.array(qU_mean,
                                                               dtype=dtype)
        infr.params[gp._extra_graphs[0].qU_cov_W] = mx.nd.array(qU_cov_W,
                                                                dtype=dtype)
        infr.params[gp._extra_graphs[0].qU_cov_diag] = mx.nd.array(qU_cov_diag,
                                                                   dtype=dtype)
        infr.run(Y=mx.nd.array(Y, dtype='float64'),
                 max_iter=2,
                 learning_rate=0.1,
                 verbose=True)

        infr2 = Inference(
            ForwardSamplingAlgorithm(model=m, observed=[m.X], num_samples=5))
        infr2.run(X=mx.nd.array(X, dtype='float64'))

        infr_pred = TransferInference(ModulePredictionAlgorithm(
            model=m, observed=[m.X], target_variables=[m.Y]),
                                      infr_params=infr.params)
        xt = np.random.rand(13, 3)
        res = infr_pred.run(X=mx.nd.array(xt, dtype=dtype))[0]

        gp = m.Y.factor
        gp.attach_prediction_algorithms(
            targets=gp.output_names,
            conditionals=gp.input_names,
            algorithm=SVGPRegressionSamplingPrediction(gp._module_graph,
                                                       gp._extra_graphs[0],
                                                       [gp._module_graph.X]),
            alg_name='svgp_predict')
        gp.svgp_predict.diagonal_variance = False
        gp.svgp_predict.jitter = 1e-6

        infr_pred2 = TransferInference(ModulePredictionAlgorithm(
            model=m, observed=[m.X], target_variables=[m.Y]),
                                       infr_params=infr.params)
        xt = np.random.rand(13, 3)
        res = infr_pred2.run(X=mx.nd.array(xt, dtype=dtype))[0]