t = data.time f = data.flux d = 0.2 x = np.linspace(-6, 0, 10) y = np.linspace(-1.0, 10, 12) s2n = np.zeros((len(x), len(y))) ll = np.zeros((len(x), len(y))) delta_ll = np.zeros((len(x), len(y))) for ix, a in enumerate(x): for iy, l in enumerate(y): gp = GaussianProcess([10 ** a, 10 ** l, 1e6]) gp.compute(data.time, data.ferr) null = gp.lnlikelihood(f - 1.0) results = [] for i, t0 in enumerate(t): if np.abs(t0 - truth[2]) < 2*d: continue model = np.ones_like(f) model[(t < t0+d) * (t > t0-d)] *= 1.0 - 0.0001 results.append((t0, gp.lnlikelihood(f - model))) results = np.array(results) model = np.ones_like(f) model[(t < truth[2]+d) * (t > truth[2]-d)] *= 1.0 - 0.0001 ll_true = gp.lnlikelihood(f - model) mu, std = np.mean(results[:, 1]), np.std(results[:, 1]) s2n[ix, iy] = (ll_true - mu) / std
ds.flux *= model datasets.append(ds) [pl.plot(ds.time, ds.flux, ".") for ds in datasets] pl.savefig("data.png") # Compute the Gaussian processes on the dataset. gp = GaussianProcess([1e-3, 3.0, 10.]) gp.compute(dataset.time, dataset.ferr) # Loop over true epochs. d = 0.2 correct = [] incorrect = [] for t0, depth, duration, data in zip(t0s, depths, durations, datasets): null = gp.lnlikelihood(data.flux - 1.0) # Compute the correct model. model = np.ones_like(data.time) model[(data.time < t0+duration)*(data.time > t0-duration)] *= 1.0 - 0.0001 correct.append(gp.lnlikelihood(data.flux - model) - null) for i, t in enumerate(data.time): if np.abs(t0 - t) < 2*d: continue # Compute the test model. model = np.ones_like(data.time) model[(data.time < t+duration)*(data.time > t-duration)] *= 1-0.0001 incorrect.append(gp.lnlikelihood(data.flux - model) - null)
# Generate some fake data. period = 0.956 x = 10 * np.sort(np.random.rand(75)) yerr = 0.1 + 0.1 * np.random.rand(len(x)) y = gp.sample_prior(x) y += 0.8 * np.cos(2 * np.pi * x / period) y += yerr * np.random.randn(len(yerr)) # Set up a periodic kernel. pk = ExpSquaredKernel(np.sqrt(0.8), 1000.0) * CosineKernel(period) kernel2 = kernel + pk gp2 = GaussianProcess(kernel2) # Condition on this data. gp2.compute(x, yerr) # Compute the log-likelihood. print("Log likelihood = {0}".format(gp2.lnlikelihood(y))) # Compute the conditional predictive distribution. t = np.linspace(0, 10, 200) f = gp2.sample_conditional(y, t, size=500) mu = np.mean(f, axis=0) std = np.std(f, axis=0) pl.errorbar(x, y, yerr=yerr, fmt=".k") pl.plot(t, mu, "k", lw=2, alpha=0.5) pl.plot(t, mu+std, "k", alpha=0.5) pl.plot(t, mu-std, "k", alpha=0.5) pl.savefig("periodic.png")