Exemple #1
0
def estimate_scales(tslength):
    #print('estimating random walk scale for timeseries length %s' % tslength)
    def ml_objective(theta, d, cov_rw, cov_w, p):
        cov = theta**2 * cov_rw + w_scale**2 * cov_w
        mu = np.zeros(d.shape[0])
        return -likelihood(d, mu, cov, p)

    def reml_objective(theta, d, cov_rw, cov_w, p):
        cov = theta**2 * cov_rw + w_scale**2 * cov_w
        mu = np.zeros(d.shape[0])
        return -restricted_likelihood(d, mu, cov, p)

    # time indices to use
    ml_solns = []
    reml_solns = []
    idx = time < tslength

    P = gppoly(1).basis(time[idx, None])
    COV_RW = gpbrown(1.0).covariance(time[idx, None], time[idx, None])
    COV_W = gpexp((0.0, 1.0, 1e-10)).covariance(time[idx, None], time[idx,
                                                                      None])
    for data in data_sets:
        ans = fmin_pos(ml_objective, [1.0],
                       args=(data[idx], COV_RW, COV_W, P),
                       disp=False)
        ml_solns += [ans[0]]
        ans = fmin_pos(reml_objective, [1.0],
                       args=(data[idx], COV_RW, COV_W, P),
                       disp=False)
        reml_solns += [ans[0]]

    # compute statistics on solution
    mean = np.mean(ml_solns)
    percs = np.percentile(ml_solns, np.arange(0, 105, 5))
    entry = '%s %s %s\n' % (tslength, mean, ' '.join(percs.astype(str)))
    ml_file.write(entry)
    ml_file.flush()

    mean = np.mean(reml_solns)
    percs = np.percentile(reml_solns, np.arange(0, 105, 5))
    entry = '%s %s %s\n' % (tslength, mean, ' '.join(percs.astype(str)))
    reml_file.write(entry)
    reml_file.flush()
Exemple #2
0
from rbf.gauss import gpse, gpexp, gppoly, gpbfc
from pygeons.filter.gpr import gpr, gpfogm, gpseasonal
import logging
logging.basicConfig(level=logging.DEBUG)


def seasonals(x):
    return np.array([np.sin(2 * np.pi * x[:, 0]),
                     np.cos(2 * np.pi * x[:, 0])]).T


# observation times
dt = 1.0
times = np.arange(0.0, 500.0, dt)[:, None]
# GaussianProcess describing the underlying signal
signal_gp = gppoly(1)
signal_gp += gpse((0.0, 2.0**2, 50.0))
# GaussianProcess describing continuous noise
noise_gp = gpfogm(0.5, 10.0)
noise_gp += gpseasonal(True, True)
# standard deviation for discrete noise
noise_sigma = 0.1 * np.ones(times.shape[0])
# underlying signal we want to recover
true = signal_gp.sample(times, c=[1.0, 0.5])
# true signal plus noise
obs = (true + noise_gp.sample(times, c=[1.0, 1.0, 1.0, 1.0]) +
       np.random.normal(0.0, noise_sigma))

pred, sigma = gpr(times,
                  obs,
                  noise_sigma, (2.0, 50.0),
Exemple #3
0
# true for this demo)
basis = spwen32

# define hyperparameters for the prior. Tune these parameters to get a
# satisfactory interpolant. These can also be chosen with maximum
# likelihood methods.
prior_mean = 0.0
prior_sigma = 1.0
prior_lengthscale = 0.8  # this controls the sparsity

# create the prior Gaussian process
params = (prior_mean, prior_sigma, prior_lengthscale)
prior_gp = gpiso(basis, params)
# add a first order polynomial to the prior to make it suitable for
# data with linear trends
prior_gp += gppoly(1)

# condition the prior on the observations, creating a new Gaussian
# process for the posterior.
posterior_gp = prior_gp.condition(xobs, uobs, sigma=sobs)

# differentiate the posterior with respect to x
derivative_gp = posterior_gp.differentiate((1, 0))

# evaluate the posterior and posterior derivative at the interpolation
# points. calling the GaussianProcess instances will return their mean
# and standard deviation at the provided points.
post_mean, post_std = posterior_gp(xitp)
diff_mean, diff_std = derivative_gp(xitp)

## Plotting
Exemple #4
0
import matplotlib.pyplot as plt
import logging
from rbf.gauss import gpse, gppoly
logging.basicConfig(level=logging.DEBUG)
np.random.seed(1)

y = np.linspace(-7.5, 7.5, 50)  # obsevation points
x = np.linspace(-7.5, 7.5, 1000)  # interpolation points
truth = np.exp(-0.3 * np.abs(x)) * np.sin(x)  # true signal at interp. points
# form synthetic data
obs_sigma = 0.1 * np.ones(50)  # noise standard deviation
noise = np.random.normal(0.0, obs_sigma)
noise[20], noise[25] = 2.0, 1.0  # add anomalously large noise
obs_mu = np.exp(-0.3 * np.abs(y)) * np.sin(y) + noise
# form prior Gaussian process
prior = gpse((0.0, 1.0, 1.0)) + gppoly(1)
# find outliers which will be removed
toss = prior.outliers(y[:, None], obs_mu, obs_sigma)
# condition with non-outliers
post = prior.condition(y[~toss, None], obs_mu[~toss], obs_sigma[~toss])
post_mu, post_sigma = post(x[:, None])
# plot the results
fig, ax = plt.subplots(figsize=(6, 4))
ax.errorbar(y, obs_mu, obs_sigma, fmt='k.', capsize=0.0, label='observations')
ax.plot(x, post_mu, 'b-', label='posterior mean')
ax.fill_between(x,
                post_mu - post_sigma,
                post_mu + post_sigma,
                color='b',
                alpha=0.2,
                edgecolor='none',