Exemple #1
0
 def setUp(self):
     np.random.seed(11111)
     self.X = np.linspace(-1, 1, 20)[:, None]
     k = GPy.kern.Matern32(1, lengthscale=1, variance=1)
     self.sim_model = 'Mat+Lin'
     self.mf = GPy.mappings.Linear(1, 1)
     self.mf[:] = .01
     self.mu = self.mf.f(self.X)
     self.Y = np.random.multivariate_normal(np.zeros(self.X.shape[0]),
                                            k.K(self.X))[:, None]
     self.mf.randomize()
     self.test_models = [
         [
             'Mat+Lin',
             kern.Matern32(self.X.shape[1]) +
             kern.Linear(self.X.shape[1], variances=.01) +
             kern.Bias(self.X.shape[1])
         ],
         [
             'Lin',
             kern.Linear(self.X.shape[1], variances=.01) +
             kern.Bias(self.X.shape[1])
         ],
     ]
     self.verbose = True
Exemple #2
0
def standard_models(X):
    """
    Return kernels for model selection
    """
    from GPy import kern
    return [
            ['Mat+Lin', kern.Matern32(X.shape[1]) + kern.Linear(X.shape[1], variances=.01) + kern.Bias(X.shape[1])], 
            ['Exp+Lin', kern.Exponential(X.shape[1]) + kern.Linear(X.shape[1], variances=.01) + kern.Bias(X.shape[1])], 
            ['RBF+Lin', kern.RBF(X.shape[1]) + kern.Linear(X.shape[1], variances=.01) + kern.Bias(X.shape[1])], 
            ['Lin', kern.Linear(X.shape[1], variances=.01) + kern.Bias(X.shape[1])],
            ]
Exemple #3
0
def mrd_simulation(optimize=True,
                   verbose=True,
                   plot=True,
                   plot_sim=True,
                   **kw):
    from GPy import kern
    from GPy.models import MRD

    D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5
    _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, plot_sim)

    # Ylist = [Ylist[0]]
    k = kern.Linear(Q, ARD=True)
    m = MRD(Ylist,
            input_dim=Q,
            num_inducing=num_inducing,
            kernel=k,
            initx="PCA_concat",
            initz='permute',
            **kw)

    m['.*noise'] = [Y.var() / 40. for Y in Ylist]

    if optimize:
        print("Optimizing Model:")
        m.optimize(messages=verbose, max_iters=8e3)
    if plot:
        m.X.plot("MRD Latent Space 1D")
        m.plot_scales("MRD Scales")
    return m
Exemple #4
0
def ssgplvm_simulation(optimize=True,
                       verbose=1,
                       plot=True,
                       plot_sim=False,
                       max_iters=2e4,
                       useGPU=False):
    from GPy import kern
    from GPy.models import SSGPLVM

    D1, D2, D3, N, num_inducing, Q = 13, 5, 8, 45, 3, 9
    _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, plot_sim)
    Y = Ylist[0]
    k = kern.Linear(Q,
                    ARD=True)  # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
    # k = kern.RBF(Q, ARD=True, lengthscale=10.)
    m = SSGPLVM(Y,
                Q,
                init="rand",
                num_inducing=num_inducing,
                kernel=k,
                group_spike=True)
    m.X.variance[:] = _np.random.uniform(0, .01, m.X.shape)
    m.likelihood.variance = .01

    if optimize:
        print("Optimizing model:")
        m.optimize('bfgs', messages=verbose, max_iters=max_iters, gtol=.05)
    if plot:
        m.X.plot("SSGPLVM Latent Space 1D")
        m.kern.plot_ARD('SSGPLVM Simulation ARD Parameters')
    return m
def mrd_simulation_missing_data(optimize=True,
                                verbose=True,
                                plot=True,
                                plot_sim=True,
                                **kw):
    from GPy import kern
    from GPy.models import MRD

    D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5
    _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, plot_sim)

    k = kern.Linear(Q, ARD=True) + kern.White(Q, variance=1e-4)
    inanlist = []

    for Y in Ylist:
        inan = _np.random.binomial(1, .6, size=Y.shape).astype(bool)
        inanlist.append(inan)
        Y[inan] = _np.nan

    m = MRD(Ylist,
            input_dim=Q,
            num_inducing=num_inducing,
            kernel=k,
            inference_method=None,
            initx="random",
            initz='permute',
            **kw)

    if optimize:
        print("Optimizing Model:")
        m.optimize('bfgs', messages=verbose, max_iters=8e3, gtol=.1)
    if plot:
        m.X.plot("MRD Latent Space 1D")
        m.plot_scales()
    return m
def gplvm_simulation(
    optimize=True,
    verbose=1,
    plot=True,
    plot_sim=False,
    max_iters=2e4,
):
    from GPy import kern
    from GPy.models import GPLVM

    D1, D2, D3, N, num_inducing, Q = 13, 5, 8, 45, 3, 9
    _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, plot_sim)
    Y = Ylist[0]
    k = kern.Linear(Q,
                    ARD=True)  # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
    # k = kern.RBF(Q, ARD=True, lengthscale=10.)
    m = GPLVM(Y, Q, init="PCA", kernel=k)
    m.likelihood.variance = .1

    if optimize:
        print("Optimizing model:")
        m.optimize('bfgs', messages=verbose, max_iters=max_iters, gtol=.05)
    if plot:
        m.X.plot("BGPLVM Latent Space 1D")
        m.kern.plot_ARD()
    return m
Exemple #7
0
    def test_missing_data(self):
        from GPy import kern
        from GPy.models.bayesian_gplvm_minibatch import BayesianGPLVMMiniBatch
        from GPy.examples.dimensionality_reduction import _simulate_matern

        D1, D2, D3, N, num_inducing, Q = 13, 5, 8, 400, 3, 4
        _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, False)
        Y = Ylist[0]

        inan = np.random.binomial(1, .9, size=Y.shape).astype(bool) # 80% missing data
        Ymissing = Y.copy()
        Ymissing[inan] = np.nan

        k = kern.Linear(Q, ARD=True) + kern.White(Q, np.exp(-2)) # + kern.bias(Q)
        m = BayesianGPLVMMiniBatch(Ymissing, Q, init="random", num_inducing=num_inducing,
                          kernel=k, missing_data=True)
        assert(m.checkgrad())
        mul, varl = m.predict(m.X)

        k = kern.RBF(Q, ARD=True) + kern.White(Q, np.exp(-2)) # + kern.bias(Q)
        m2 = BayesianGPLVMMiniBatch(Ymissing, Q, init="random", num_inducing=num_inducing,
                          kernel=k, missing_data=True)
        assert(m.checkgrad())
        m2.kern.rbf.lengthscale[:] = 1e6
        m2.X[:] = m.X.param_array
        m2.likelihood[:] = m.likelihood[:]
        m2.kern.white[:] = m.kern.white[:]
        mu, var = m.predict(m.X)
        np.testing.assert_allclose(mul, mu)
        np.testing.assert_allclose(varl, var)

        q50 = m.predict_quantiles(m.X, (50,))
        np.testing.assert_allclose(mul, q50[0])
Exemple #8
0
    def test_missing_data(self):
        from GPy import kern
        from GPy.models.bayesian_gplvm_minibatch import BayesianGPLVMMiniBatch
        from GPy.examples.dimensionality_reduction import _simulate_matern

        D1, D2, D3, N, num_inducing, Q = 13, 5, 8, 400, 3, 4
        _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, False)
        Y = Ylist[0]

        inan = np.random.binomial(1, .9, size=Y.shape).astype(
            bool)  # 80% missing data
        Ymissing = Y.copy()
        Ymissing[inan] = np.nan

        k = kern.Linear(Q, ARD=True) + kern.White(Q,
                                                  np.exp(-2))  # + kern.bias(Q)
        m = BayesianGPLVMMiniBatch(Ymissing,
                                   Q,
                                   init="random",
                                   num_inducing=num_inducing,
                                   kernel=k,
                                   missing_data=True)
        assert (m.checkgrad())

        k = kern.RBF(Q, ARD=True) + kern.White(Q, np.exp(-2))  # + kern.bias(Q)
        m = BayesianGPLVMMiniBatch(Ymissing,
                                   Q,
                                   init="random",
                                   num_inducing=num_inducing,
                                   kernel=k,
                                   missing_data=True)
        assert (m.checkgrad())
Exemple #9
0
    def test_ssgplvm(self):
        from GPy import kern
        from GPy.models import SSGPLVM
        from GPy.examples.dimensionality_reduction import _simulate_matern

        np.random.seed(10)
        D1, D2, D3, N, num_inducing, Q = 13, 5, 8, 45, 3, 9
        _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, False)
        Y = Ylist[0]
        k = kern.Linear(Q, ARD=True)  # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)
        # k = kern.RBF(Q, ARD=True, lengthscale=10.)
        m = SSGPLVM(Y, Q, init="rand", num_inducing=num_inducing, kernel=k, group_spike=True)
        m.randomize()
        self.assertTrue(m.checkgrad())
def bgplvm_simulation_missing_data_stochastics(
    optimize=True,
    verbose=1,
    plot=True,
    plot_sim=False,
    max_iters=2e4,
    percent_missing=0.1,
    d=13,
    batchsize=2,
):
    from GPy import kern
    from GPy.models.bayesian_gplvm_minibatch import BayesianGPLVMMiniBatch

    D1, D2, D3, N, num_inducing, Q = d, 5, 8, 400, 3, 4
    _, _, Ylist = _simulate_matern(D1, D2, D3, N, num_inducing, plot_sim)
    Y = Ylist[0]
    k = kern.Linear(Q,
                    ARD=True)  # + kern.white(Q, _np.exp(-2)) # + kern.bias(Q)

    inan = _np.random.binomial(1, percent_missing,
                               size=Y.shape).astype(bool)  # 80% missing data
    Ymissing = Y.copy()
    Ymissing[inan] = _np.nan

    m = BayesianGPLVMMiniBatch(
        Ymissing,
        Q,
        init="random",
        num_inducing=num_inducing,
        kernel=k,
        missing_data=True,
        stochastic=True,
        batchsize=batchsize,
    )

    m.Yreal = Y

    if optimize:
        print("Optimizing model:")
        m.optimize("bfgs", messages=verbose, max_iters=max_iters, gtol=0.05)
    if plot:
        m.X.plot("BGPLVM Latent Space 1D")
        m.kern.plot_ARD()
    return m
Exemple #11
0
    def optimize(self,
                 views,
                 latent_dims=7,
                 messages=True,
                 max_iters=8e3,
                 save_model=False):
        if (self.kernel):
            if (self.kernel == 'rbf'):
                print("Chosen kernel: RBF")
                print("Chosen lengthscale: " + self.lengthscale)
                k = kern.RBF(latent_dims,
                             ARD=True,
                             lengthscale=self.lengthscale) + kern.White(
                                 latent_dims,
                                 variance=1e-4) + GPy.kern.Bias(latent_dims)
            elif (self.kernel == 'linear'):
                print("Chosen kernel: Linear")
                k = kern.Linear(latent_dims, ARD=True) + kern.White(
                    latent_dims, variance=1e-4) + GPy.kern.Bias(latent_dims)
        else:
            print("No kernel or chosen - using RBF with lengthscale 10...")
            k = kern.RBF(latent_dims, ARD=True, lengthscale=10) + kern.White(
                latent_dims, variance=1e-4) + GPy.kern.Bias(latent_dims)

        print("Number of inducing inputs: " + str(self.num_inducing))
        m = MRD(views,
                input_dim=latent_dims,
                num_inducing=self.num_inducing,
                kernel=k,
                normalizer=False)
        print("Optimizing Model...")
        m.optimize(messages=True, max_iters=8e3)

        if (save_model):
            pickle.dump(m, open(save_model, "wb"), protocol=2)

        self.model = m