# Set test data XT = np.linspace(np.min(Xall), np.max(Xall), num=200) var_f1 = 3. # GP variance len_f1 = 1. # GP lengthscale var_f2 = 3. # GP variance len_f2 = 1. # GP lengthscale prior1 = priors.Matern32(variance=var_f1, lengthscale=len_f1) prior2 = priors.Matern32(variance=var_f2, lengthscale=len_f2) prior = priors.Independent([prior1, prior2]) lik = likelihoods.HeteroscedasticNoise() # inf_method = approx_inf.ExpectationPropagation(power=0.9, intmethod='UT', damping=0.1) inf_method = approx_inf.ExpectationPropagation(power=0.1, intmethod='GH', damping=0.5) # inf_method = approx_inf.VariationalInference(intmethod='GH', damping=0.5) # inf_method = approx_inf.VariationalInference(intmethod='UT', damping=0.5) # inf_method = approx_inf.ExtendedEP(power=0, damping=0.5) # inf_method = approx_inf.ExtendedKalmanSmoother(damping=0.5) # inf_method = approx_inf.GaussHermiteKalmanSmoother(damping=0.5) # inf_method = approx_inf.StatisticallyLinearisedEP(intmethod='UT', damping=0.5) # inf_method = approx_inf.UnscentedKalmanSmoother(damping=0.5) model = SDEGP(prior=prior, likelihood=lik, t=Xall, y=Yall, t_test=XT, approx_inf=inf_method) opt_init, opt_update, get_params = optimizers.adam(step_size=5e-2) # parameters should be a 2-element list [param_prior, param_likelihood] opt_state = opt_init([model.prior.hyp, model.likelihood.hyp])
f = lambda x_: 6 * np.sin(pi * x_ / 10.0) / (pi * x_ / 10.0 + 1) y_ = f(x) + np.math.sqrt(0.05)*np.random.randn(x.shape[0]) y = np.sign(y_) y[y == -1] = 0 x_test = np.linspace(np.min(x)-5.0, np.max(x)+5.0, num=500) y_test = np.sign(f(x_test) + np.math.sqrt(0.05)*np.random.randn(x_test.shape[0])) y_test[y_test == -1] = 0 var_f = 1. # GP variance len_f = 5.0 # GP lengthscale prior = priors.Matern52(variance=var_f, lengthscale=len_f) lik = likelihoods.Bernoulli(link='logit') inf_method = approx_inf.ExpectationPropagation(power=0.9, intmethod='UT') # inf_method = approx_inf.VariationalInference(intmethod='GH') # inf_method = approx_inf.VariationalInference(intmethod='UT') # inf_method = approx_inf.ExtendedEP(power=0) # inf_method = approx_inf.ExtendedKalmanSmoother() # inf_method = approx_inf.GaussHermiteKalmanSmoother() # inf_method = approx_inf.StatisticallyLinearisedEP(intmethod='UT') # inf_method = approx_inf.UnscentedKalmanSmoother() model = SDEGP(prior=prior, likelihood=lik, t=x, y=y, approx_inf=inf_method) opt_init, opt_update, get_params = optimizers.adam(step_size=2e-1) # parameters should be a 2-element list [param_prior, param_likelihood] opt_state = opt_init([model.prior.hyp, model.likelihood.hyp])
# X0test, X1test = np.linspace(-3., 3., num=100), np.linspace(-3., 3., num=100) # plot_2d_classification(None, 0) np.random.seed(99) N = X.shape[0] # number of training points var_f = 0.3 # GP variance len_time = 0.3 # temporal lengthscale len_space = 0.3 # spacial lengthscale prior = priors.SpatioTemporalMatern52(variance=var_f, lengthscale_time=len_time, lengthscale_space=len_space) lik = likelihoods.Probit() inf_method = approx_inf.ExpectationPropagation(power=0.5) # inf_method = approx_inf.StatisticallyLinearisedEP() # inf_method = approx_inf.ExtendedKalmanSmoother() # inf_method = approx_inf.VariationalInference() model = SDEGP(prior=prior, likelihood=lik, t=X, y=Y, r=R, t_test=Xtest, r_test=Rtest, approx_inf=inf_method) opt_init, opt_update, get_params = optimizers.adam(step_size=2e-1) # parameters should be a 2-element list [param_prior, param_likelihood]