Пример #1
0
def bgplvm_simulation(optimize='scg',
                      plot=True,
                      max_f_eval=2e4):
#     from GPy.core.transformations import logexp_clipped
    D1, D2, D3, N, num_inducing, Q = 15, 8, 8, 100, 3, 5
    slist, Slist, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot)

    from GPy.models import mrd
    from GPy import kern
    reload(mrd); reload(kern)


    Y = Ylist[0]

    k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) # + kern.bias(Q)
    m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k, _debug=True)
    # m.constrain('variance|noise', logexp_clipped())
    m['noise'] = Y.var() / 100.
    m['linear_variance'] = .01

    if optimize:
        print "Optimizing model:"
        m.optimize(optimize, max_iters=max_f_eval,
                   max_f_eval=max_f_eval,
                   messages=True, gtol=.05)
    if plot:
        m.plot_X_1d("BGPLVM Latent Space 1D")
        m.kern.plot_ARD('BGPLVM Simulation ARD Parameters')
    return m
Пример #2
0
 def test_bias_kern(self):
     N, num_inducing, input_dim, D = 10, 3, 2, 4
     X = np.random.rand(N, input_dim)
     k = GPy.kern.rbf(input_dim) + GPy.kern.white(input_dim, 0.00001)
     K = k.K(X)
     Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
     Y -= Y.mean(axis=0)
     k = GPy.kern.bias(input_dim) + GPy.kern.white(input_dim, 0.00001)
     m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
     m.randomize()
     self.assertTrue(m.checkgrad())
Пример #3
0
def swiss_roll(optimize=True, N=1000, num_inducing=15, Q=4, sigma=.2, plot=False):
    from GPy.util.datasets import swiss_roll_generated
    from GPy.core.transformations import logexp_clipped

    data = swiss_roll_generated(N=N, sigma=sigma)
    Y = data['Y']
    Y -= Y.mean()
    Y /= Y.std()

    t = data['t']
    c = data['colors']

    try:
        from sklearn.manifold.isomap import Isomap
        iso = Isomap().fit(Y)
        X = iso.embedding_
        if Q > 2:
            X = np.hstack((X, np.random.randn(N, Q - 2)))
    except ImportError:
        X = np.random.randn(N, Q)

    if plot:
        from mpl_toolkits import mplot3d
        import pylab
        fig = pylab.figure("Swiss Roll Data")
        ax = fig.add_subplot(121, projection='3d')
        ax.scatter(*Y.T, c=c)
        ax.set_title("Swiss Roll")

        ax = fig.add_subplot(122)
        ax.scatter(*X.T[:2], c=c)
        ax.set_title("Initialization")


    var = .5
    S = (var * np.ones_like(X) + np.clip(np.random.randn(N, Q) * var ** 2,
                                         - (1 - var),
                                         (1 - var))) + .001
    Z = np.random.permutation(X)[:num_inducing]

    kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, np.exp(-2)) + GPy.kern.white(Q, np.exp(-2))

    m = BayesianGPLVM(Y, Q, X=X, X_variance=S, num_inducing=num_inducing, Z=Z, kernel=kernel)
    m.data_colors = c
    m.data_t = t

    m['rbf_lengthscale'] = 1. # X.var(0).max() / X.var(0)
    m['noise_variance'] = Y.var() / 100.
    m['bias_variance'] = 0.05

    if optimize:
        m.optimize('scg', messages=1)
    return m
Пример #4
0
    def __init__(self,
                 likelihood_or_Y_list,
                 input_dim,
                 num_inducing=10,
                 names=None,
                 kernels=None,
                 initx='PCA',
                 initz='permute',
                 _debug=False,
                 **kw):
        if names is None:
            self.names = [
                "{}".format(i + 1) for i in range(len(likelihood_or_Y_list))
            ]

        # sort out the kernels
        if kernels is None:
            kernels = [None] * len(likelihood_or_Y_list)
        elif isinstance(kernels, kern):
            kernels = [
                kernels.copy() for i in range(len(likelihood_or_Y_list))
            ]
        else:
            assert len(kernels) == len(
                likelihood_or_Y_list), "need one kernel per output"
            assert all([isinstance(k, kern)
                        for k in kernels]), "invalid kernel object detected!"
        assert not ('kernel' in kw), "pass kernels through `kernels` argument"

        self.input_dim = input_dim
        self.num_inducing = num_inducing
        self._debug = _debug

        self._init = True
        X = self._init_X(initx, likelihood_or_Y_list)
        Z = self._init_Z(initz, X)
        self.bgplvms = [
            BayesianGPLVM(l,
                          input_dim=input_dim,
                          kernel=k,
                          X=X,
                          Z=Z,
                          num_inducing=self.num_inducing,
                          **kw) for l, k in zip(likelihood_or_Y_list, kernels)
        ]
        del self._init

        self.gref = self.bgplvms[0]
        nparams = numpy.array(
            [0] +
            [SparseGP._get_params(g).size - g.Z.size for g in self.bgplvms])
        self.nparams = nparams.cumsum()

        self.num_data = self.gref.num_data
        self.NQ = self.num_data * self.input_dim
        self.MQ = self.num_inducing * self.input_dim

        Model.__init__(self)
        self.ensure_default_constraints()
Пример #5
0
def bgplvm_simulation_matlab_compare():
    from GPy.util.datasets import simulation_BGPLVM
    sim_data = simulation_BGPLVM()
    Y = sim_data['Y']
    S = sim_data['S']
    mu = sim_data['mu']
    num_inducing, [_, Q] = 3, mu.shape

    from GPy.models import mrd
    from GPy import kern
    reload(mrd); reload(kern)
    k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2))
    m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k,
#                        X=mu,
#                        X_variance=S,
                       _debug=False)
    m.auto_scale_factor = True
    m['noise'] = Y.var() / 100.
    m['linear_variance'] = .01
    return m
Пример #6
0
 def test_linear_bias_kern(self):
     N, num_inducing, input_dim, D = 30, 5, 4, 30
     X = np.random.rand(N, input_dim)
     k = GPy.kern.linear(input_dim) +  GPy.kern.bias(input_dim) + GPy.kern.white(input_dim, 0.00001)
     K = k.K(X)
     Y = np.random.multivariate_normal(np.zeros(N),K,input_dim).T
     Y -= Y.mean(axis=0)
     k = GPy.kern.linear(input_dim) + GPy.kern.bias(input_dim) + GPy.kern.white(input_dim, 0.00001)
     m = BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing)
     m.ensure_default_constraints()
     m.randomize()
     self.assertTrue(m.checkgrad())