np.random.seed(99)
N = 1000  # number of training points
x = 100 * np.random.rand(N)
f = lambda x_: 6 * np.sin(pi * x_ / 10.0) / (pi * x_ / 10.0 + 1)
y_ = f(x) + np.math.sqrt(0.05)*np.random.randn(x.shape[0])
y = np.sign(y_)
y[y == -1] = 0
x_test = np.linspace(np.min(x)-5.0, np.max(x)+5.0, num=500)
y_test = np.sign(f(x_test) + np.math.sqrt(0.05)*np.random.randn(x_test.shape[0]))

y_test[y_test == -1] = 0

var_f = 1.  # GP variance
len_f = 5.0  # GP lengthscale

prior = priors.Matern52(variance=var_f, lengthscale=len_f)

lik = likelihoods.Bernoulli(link='logit')
inf_method = approx_inf.ExpectationPropagation(power=0.9, intmethod='UT')
# inf_method = approx_inf.VariationalInference(intmethod='GH')
# inf_method = approx_inf.VariationalInference(intmethod='UT')
# inf_method = approx_inf.ExtendedEP(power=0)
# inf_method = approx_inf.ExtendedKalmanSmoother()
# inf_method = approx_inf.GaussHermiteKalmanSmoother()
# inf_method = approx_inf.StatisticallyLinearisedEP(intmethod='UT')
# inf_method = approx_inf.UnscentedKalmanSmoother()

model = SDEGP(prior=prior, likelihood=lik, t=x, y=y, approx_inf=inf_method)

opt_init, opt_update, get_params = optimizers.adam(step_size=2e-1)
# parameters should be a 2-element list [param_prior, param_likelihood]
Exemple #2
0
print('method number', method)
print('batch number', fold)

# Get training and test indices
ind_test = ind_split[fold]  # np.sort(ind_shuffled[:N//10])
ind_train = np.concatenate(ind_split[np.arange(10) != fold])
x_train = x  # [ind_train]  # 90/10 train/test split
x_test = x  # [ind_test]
y_train = y  # [ind_train]
y_test = y  # [ind_test]
N_batch = 5000
M = 5000
# z = np.linspace(701050, 737050, M)
z = np.linspace(x[0], x[-1], M)

prior_1 = priors.Matern52(variance=2., lengthscale=5.5e4)
prior_2 = priors.QuasiPeriodicMatern32(variance=1., lengthscale_periodic=2., period=365., lengthscale_matern=1.5e4)
prior_3 = priors.QuasiPeriodicMatern32(variance=1., lengthscale_periodic=2., period=7., lengthscale_matern=30*365.)

prior = priors.Sum([prior_1, prior_2, prior_3])
lik = likelihoods.Poisson()

if method == 0:
    inf_method = approx_inf.EKS(damping=.5)
elif method == 1:
    inf_method = approx_inf.UKS(damping=.5)
elif method == 2:
    inf_method = approx_inf.GHKS(damping=.5)
elif method == 3:
    inf_method = approx_inf.EP(power=1, intmethod='GH', damping=.5)
elif method == 4:
N = 1000
x = np.sort(
    np.random.permutation(
        np.linspace(-25.0, 150.0, num=N) +
        0.5 * np.random.randn(N)))  # unevenly spaced
x_test = np.linspace(np.min(x) - 15.0, np.max(x) + 15.0, num=100)
dummy_y = x

var_f = 1.0  # GP variance
len_f = 20.0  # GP lengthscale
var_y = 0.1

theta_prior = jnp.array([var_f, len_f])
theta_lik = jnp.array([])

prior_ = priors.Matern52(theta_prior)
lik_ = likelihoods.SumOfGaussians(theta_lik)
approx_inf_ = EP(power=1.)
# approx_inf_ = PL()
# approx_inf_ = CL(power=0.5)
# approx_inf_ = IKS()
# approx_inf_ = EKEP()

sde_gp_model = SDEGP(prior=prior_,
                     likelihood=lik_,
                     t=x,
                     y=dummy_y,
                     t_test=x_test,
                     approx_inf=approx_inf_)

print('generating some data by sampling from the prior ...')