def bgplvm_simulation( optimize=True, verbose=1, plot=True, plot_sim=False, max_iters=2e4, ): from GPy import kern from GPy.models import BayesianGPLVM D1, D2, D3, N, num_inducing, Q = 49, 30, 10, 12, 3, 10 _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) Y = Ylist[0] k = kern.linear(Q, ARD=True) m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k) m.X_variance = m.X_variance * .7 m['noise'] = Y.var() / 100. if optimize: print "Optimizing model:" m.optimize('scg', messages=verbose, max_iters=max_iters, gtol=.05) if plot: m.plot_X_1d("BGPLVM Latent Space 1D") m.kern.plot_ARD('BGPLVM Simulation ARD Parameters') return m
def mrd_simulation(optimize=True, verbose=True, plot=True, plot_sim=True, **kw): from GPy import kern from GPy.models import MRD from GPy.likelihoods import Gaussian D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5 _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) likelihood_list = [Gaussian(x, normalize=True) for x in Ylist] k = kern.linear( Q, ARD=True) # + kern.bias(Q, _np.exp(-2)) + kern.white(Q, _np.exp(-2)) m = MRD(likelihood_list, input_dim=Q, num_inducing=num_inducing, kernels=k, initx="", initz='permute', **kw) m.ensure_default_constraints() for i, bgplvm in enumerate(m.bgplvms): m['{}_noise'.format(i)] = 1 #bgplvm.likelihood.Y.var() / 500. bgplvm.X_variance = bgplvm.X_variance #* .1 if optimize: print "Optimizing Model:" m.optimize(messages=verbose, max_iters=8e3, gtol=.1) if plot: m.plot_X_1d("MRD Latent Space 1D") m.plot_scales("MRD Scales") return m
def mrd_simulation(optimize=True, plot=True, plot_sim=True, **kw): D1, D2, D3, N, num_inducing, Q = 150, 200, 400, 500, 3, 7 slist, Slist, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) from GPy.models import mrd from GPy import kern reload(mrd); reload(kern) k = kern.linear(Q, [.05] * Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) m = mrd.MRD(Ylist, input_dim=Q, num_inducing=num_inducing, kernels=k, initx="", initz='permute', **kw) for i, Y in enumerate(Ylist): m['{}_noise'.format(i + 1)] = Y.var() / 100. # DEBUG # np.seterr("raise") if optimize: print "Optimizing Model:" m.optimize(messages=1, max_iters=8e3, max_f_eval=8e3, gtol=.1) if plot: m.plot_X_1d("MRD Latent Space 1D") m.plot_scales("MRD Scales") return m
def bgplvm_simulation(optimize='scg', plot=True, max_f_eval=2e4): # from GPy.core.transformations import logexp_clipped D1, D2, D3, N, num_inducing, Q = 15, 8, 8, 100, 3, 5 slist, Slist, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot) from GPy.models import mrd from GPy import kern reload(mrd); reload(kern) Y = Ylist[0] k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) # + kern.bias(Q) m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k, _debug=True) # m.constrain('variance|noise', logexp_clipped()) m['noise'] = Y.var() / 100. m['linear_variance'] = .01 if optimize: print "Optimizing model:" m.optimize(optimize, max_iters=max_f_eval, max_f_eval=max_f_eval, messages=True, gtol=.05) if plot: m.plot_X_1d("BGPLVM Latent Space 1D") m.kern.plot_ARD('BGPLVM Simulation ARD Parameters') return m
def bgplvm_simulation_matlab_compare(): from GPy.util.datasets import simulation_BGPLVM sim_data = simulation_BGPLVM() Y = sim_data['Y'] S = sim_data['S'] mu = sim_data['mu'] num_inducing, [_, Q] = 3, mu.shape from GPy.models import mrd from GPy import kern reload(mrd); reload(kern) k = kern.linear(Q, ARD=True) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k, # X=mu, # X_variance=S, _debug=False) m.auto_scale_factor = True m['noise'] = Y.var() / 100. m['linear_variance'] = .01 return m
def bgplvm_simulation(optimize=True, verbose=1, plot=True, plot_sim=False, max_iters=2e4, ): from GPy import kern from GPy.models import BayesianGPLVM D1, D2, D3, N, num_inducing, Q = 15, 5, 8, 30, 3, 10 _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) Y = Ylist[0] k = kern.linear(Q, ARD=True) + kern.bias(Q, _np.exp(-2)) + kern.white(Q, _np.exp(-2)) # + kern.bias(Q) m = BayesianGPLVM(Y, Q, init="PCA", num_inducing=num_inducing, kernel=k) m['noise'] = Y.var() / 100. if optimize: print "Optimizing model:" m.optimize('scg', messages=verbose, max_iters=max_iters, gtol=.05) if plot: m.plot_X_1d("BGPLVM Latent Space 1D") m.kern.plot_ARD('BGPLVM Simulation ARD Parameters') return m
def mrd_simulation(optimize=True, verbose=True, plot=True, plot_sim=True, **kw): from GPy import kern from GPy.models import MRD from GPy.likelihoods import Gaussian D1, D2, D3, N, num_inducing, Q = 60, 20, 36, 60, 6, 5 _, _, Ylist = _simulate_sincos(D1, D2, D3, N, num_inducing, Q, plot_sim) likelihood_list = [Gaussian(x, normalize=True) for x in Ylist] k = kern.linear(Q, ARD=True) + kern.bias(Q, _np.exp(-2)) + kern.white(Q, _np.exp(-2)) m = MRD(likelihood_list, input_dim=Q, num_inducing=num_inducing, kernels=k, initx="", initz='permute', **kw) m.ensure_default_constraints() for i, bgplvm in enumerate(m.bgplvms): m['{}_noise'.format(i)] = bgplvm.likelihood.Y.var() / 500. if optimize: print "Optimizing Model:" m.optimize(messages=verbose, max_iters=8e3, gtol=.1) if plot: m.plot_X_1d("MRD Latent Space 1D") m.plot_scales("MRD Scales") return m