def main():
    # Load
    G = torch.from_numpy(
            np.load(os.path.join(data_folder, "F_niklas.npy"))).float().detach()
    grid = Grid.load(os.path.join(data_folder,
                    "grid.pickle"))
    volcano_coords = torch.from_numpy(
            grid.cells).float().detach()

    # Optimal hyperparameters.
    data_std = 0.1
    sigma0 = 2.968352
    lambda0 = 207.8275

    ground_truth = torch.from_numpy(np.load(os.path.join(results_folder, "ground_truth.npy")))
    synth_data = torch.from_numpy(
            np.load(os.path.join(results_folder, "synth_data.npy")))

    # Build trends: constant cylindrical.
    x0 = volcano_coords[:, 0].mean() # Volcano center.
    y0 = volcano_coords[:, 1].mean()
    z0 = volcano_coords[:, 2].mean()
    coeff_F = torch.hstack([
        torch.ones(volcano_coords.shape[0], 1),
        cylindrical(
            volcano_coords,
            x0, y0).reshape(-1, 1)
        ]).float()

    # Now fit GP model
    updatable_gp = UniversalUpdatableGP(kernel, lambda0, torch.tensor([sigma0]),
            volcano_coords,
            coeff_F, coeff_cov="uniform", coeff_mean="uniform",
            n_chunks=200)

    # Compute cross-validation matrix.
    K_tilde = updatable_gp.compute_cv_matrix(G, synth_data, data_std=0.2)
    np.save(os.path.join(results_folder, "K_tilde.npy"),
            K_tilde.cpu().numpy())

    # Compute posterior mean.
    updatable_gp.update_uniform(G, synth_data, data_std)
    np.save(os.path.join(results_folder, "post_mean_universal.npy"),
            updatable_gp.post_mean.cpu().numpy())

    np.save(os.path.join(results_folder, "coeff_post_mean.npy"),
            updatable_gp.coeff_post_mean.cpu().numpy())

    variance = updatable_gp.covariance.extract_variance()
    np.save(os.path.join(results_folder, "variance_universal.npy"),
            variance.cpu().numpy())

    """
def main():
    # Load
    G = torch.from_numpy(np.load(os.path.join(
        data_folder, "F_niklas.npy"))).float().detach()
    grid = Grid.load(os.path.join(data_folder, "grid.pickle"))
    volcano_coords = torch.from_numpy(grid.cells).float().detach()
    data_coords = np.load(os.path.join(data_folder, "niklas_data_coords.npy"))

    data_std = 0.1
    ground_truth = torch.from_numpy(
        np.load(os.path.join(results_folder, "ground_truth.npy")))
    synth_data = torch.from_numpy(
        np.load(os.path.join(results_folder, "synth_data.npy")))

    # Build trends: constant cylindrical.
    x0 = volcano_coords[:, 0].mean()  # Volcano center.
    y0 = volcano_coords[:, 1].mean()
    z0 = volcano_coords[:, 2].mean()
    coeff_F = torch.hstack([
        torch.ones(volcano_coords.shape[0], 1),
        cylindrical(volcano_coords, x0, y0).reshape(-1, 1)
    ]).float()

    # Define GP model with arbitrary parameters (we will train them anyway).
    lambda0, sigma0 = 10, 2
    updatable_gp = UniversalUpdatableGP(kernel,
                                        lambda0,
                                        sigma0,
                                        volcano_coords,
                                        coeff_F,
                                        coeff_cov="uniform",
                                        coeff_mean="uniform",
                                        n_chunks=200)

    # Train cross-validation.
    lambda0s = np.linspace(0.2, 1000, 50)
    sigma0s = np.linspace(0.1, 100, 50)

    # Compute folds via kMeans.
    k = 10  # number of clusters.
    folds = kMeans_folds(k, data_coords)
    updatable_gp.train_cv_criterion(
        lambda0s,
        sigma0s,
        G,
        synth_data,
        data_std,
        criterion="k fold",
        folds=folds,
        out_path=os.path.join(results_folder,
                              "./{}_fold_kMeans_residuals.pck".format(k)))
Beispiel #3
0
def main():
    # Load
    G = torch.from_numpy(np.load(os.path.join(
        data_folder, "F_niklas.npy"))).float().detach()
    grid = Grid.load(os.path.join(data_folder, "grid.pickle"))
    volcano_coords = torch.from_numpy(grid.cells).float().detach()

    # Define GP model.
    data_std = 0.1
    sigma0 = 1.0
    m0 = 2139.1
    lambda0 = 200.0

    # Build trends: constant cylindrical.
    x0 = volcano_coords[:, 0].mean()  # Volcano center.
    y0 = volcano_coords[:, 1].mean()
    z0 = volcano_coords[:, 2].mean()

    coeff_mean = torch.tensor([m0, 0.01]).reshape(-1, 1).float()
    coeff_cov = torch.tensor([[200.0, 0], [0, 0.05]]).float()
    coeff_F = torch.hstack([
        torch.ones(volcano_coords.shape[0], 1),
        cylindrical(volcano_coords, x0, y0).reshape(-1, 1)
    ]).float()

    # Model with trend.
    updatable_gp = UniversalUpdatableGP(kernel,
                                        lambda0,
                                        torch.tensor([sigma0]),
                                        volcano_coords,
                                        coeff_F,
                                        coeff_cov,
                                        coeff_mean,
                                        n_chunks=200)

    ground_truth = torch.from_numpy(
        np.load(os.path.join(results_folder, "ground_truth.npy")))
    synth_data = torch.from_numpy(
        np.load(os.path.join(results_folder, "synth_data.npy")))
    lambda0s = np.linspace(1.0, 3000, 30)
    kappa_s = np.linspace(1e-5, 1, 30)

    updatable_gp.train(lambda0s,
                       kappa_s,
                       G,
                       synth_data,
                       out_path=os.path.join(results_folder,
                                             "./train_res_universal.pck"))
n_cells_1d = 50
forward_cutoff = 400 # Only make 200 observations (Fourier and pointwise).
my_problem = ToyFourier2d.build_problem(n_cells_1d, forward_cutoff)

m0 = 0.0
sigma0 = 0.35
lambda0 = 0.23

# Built a simple trend along the x-axis.
coeff_mean = torch.tensor([[1.0]])
coeff_cov = torch.tensor([[1.0]])
coeff_F = torch.from_numpy(my_problem.grid.cells[:, 0]).reshape(-1, 1)

updatable_gp_pt = UniversalUpdatableGP(kernel, lambda0, sigma0,
            torch.tensor(my_problem.grid.cells).float(),
            coeff_F, coeff_cov, coeff_mean,
            n_chunks=200)

# ground_truth = updatable_gp_pt.sample_prior()
# np.save("ground_truth.npy", ground_truth)
ground_truth = np.load("ground_truth.npy")
my_problem.grid.plot_values(ground_truth, cmap='jet')

# Build prior realisations.
reals = []
for i in range(200):
    print(i)
    sample = updatable_gp_pt.sample_prior()
    reals.append(sample)

# Pointwise observations.
Beispiel #5
0
constant_updatable_gp.update(G_tot, d_tot, data_std=0.01)
post_mean_2 = constant_updatable_gp.mean_vec.cpu().numpy()
plot_basic(my_problem.grid,
           ground_truth.numpy(),
           additional_vals=[post_mean, post_mean_2],
           points=[cell_coords[pts_inds], d_pts],
           outfile=os.path.join(output_folder,
                                "posterior_points_and_fourier.png"))

# Now with universal kriging.
from volcapy.update.universal_kriging import UniversalUpdatableGP
coeff_F = trend.float()
updatable_gp = UniversalUpdatableGP(kernel,
                                    lambda0,
                                    sigma0,
                                    cell_coords,
                                    coeff_F,
                                    coeff_cov="uniform",
                                    coeff_mean="uniform",
                                    n_chunks=200)

# With point data.
updatable_gp.update_uniform(G_pts, d_pts, data_std=0.01)
post_mean_univ = updatable_gp.post_mean.cpu().numpy()
plot_basic(my_problem.grid,
           ground_truth.numpy(),
           additional_vals=[post_mean, post_mean_univ],
           points=[cell_coords[pts_inds], d_pts],
           outfile=os.path.join(output_folder,
                                "posterior_points_universal.png"))

# And now with Fourier.
def main():
    # Load
    G = torch.from_numpy(np.load(os.path.join(
        data_folder, "F_niklas.npy"))).float().detach()
    grid = Grid.load(os.path.join(data_folder, "grid.pickle"))
    volcano_coords = torch.from_numpy(grid.cells).float().detach()
    data_coords = torch.from_numpy(
        np.load(os.path.join(data_folder, "niklas_data_coords.npy"))).float()
    data_values = torch.from_numpy(
        np.load(os.path.join(data_folder, "niklas_data_obs.npy"))).float()
    n_data = G.shape[0]

    # Define GP model.
    data_std = 0.1
    sigma0 = 284.66
    m0 = 2139.1
    lambda0 = 651.58

    # Build trends: constant + planar + cylindrical.
    x0 = volcano_coords[:, 0].mean()  # Volcano center.
    y0 = volcano_coords[:, 1].mean()
    z0 = volcano_coords[:, 2].mean()

    coeff_mean = torch.tensor([m0, 0.0, 0.0]).reshape(-1, 1)
    coeff_cov = torch.tensor([[200.0, 0, 0], [0, 0.05, 0], [0, 0, 0.05]])
    coeff_F = torch.hstack([
        torch.ones(volcano_coords.shape[0], 1),
        planar(volcano_coords,
               x0,
               y0,
               z0,
               phi=torch.tensor([45]),
               theta=torch.tensor([45])).reshape(-1, 1),
        cylindrical(volcano_coords, x0, y0).reshape(-1, 1)
    ])

    # Model with trend.
    updatable_gp = UniversalUpdatableGP(kernel,
                                        lambda0,
                                        torch.tensor([sigma0]),
                                        volcano_coords,
                                        coeff_F,
                                        coeff_cov,
                                        coeff_mean,
                                        n_chunks=200)

    # Sample artificial volcano and
    # invert data at Niklas points.
    ground_truth, true_trend_coeffs = updatable_gp.sample_prior()
    noise = MultivariateNormal(loc=torch.zeros(n_data),
                               covariance_matrix=data_std**2 *
                               torch.eye(n_data)).rsample()
    synth_data = G @ ground_truth + noise
    updatable_gp.update(G, synth_data, data_std)
    np.save("post_mean_universal.npy",
            updatable_gp.mean_vec.detach().cpu().numpy())
    np.save("ground_truth.npy", ground_truth.cpu().numpy())

    # Model who thinks the trend is a constant.
    # Let's be fair and allow it to know the true mean.
    m0_true = true_trend_coeffs[0]
    constant_updatable_gp = UpdatableGP(kernel,
                                        lambda0,
                                        torch.tensor([sigma0]),
                                        m0_true,
                                        volcano_coords,
                                        n_chunks=200)
    constant_updatable_gp.update(G, synth_data, data_std)
    np.save("post_mean_constant.npy",
            constant_updatable_gp.mean_vec.detach().cpu().numpy())
    np.save("true_trend_coeffs.npy", true_trend_coeffs.detach().cpu().numpy())
    np.save("trend_matrix.npy", coeff_F.detach().cpu().numpy())
Beispiel #7
0
def main():
    # Load
    G = torch.from_numpy(np.load(os.path.join(
        data_folder, "F_niklas.npy"))).float().detach()
    grid = Grid.load(os.path.join(data_folder, "grid.pickle"))
    volcano_coords = torch.from_numpy(grid.cells).float().detach()
    data_coords = torch.from_numpy(
        np.load(os.path.join(data_folder, "niklas_data_coords.npy"))).float()
    data_values = torch.from_numpy(
        np.load(os.path.join(data_folder, "niklas_data_obs.npy"))).float()
    n_data = G.shape[0]

    # Define GP model.
    data_std = 0.1
    # sigma0 = 284.66
    sigma0 = 1.0
    m0 = 2139.1
    # lambda0 = 651.58
    lambda0 = 200.0

    # Build trends: constant cylindrical.
    x0 = volcano_coords[:, 0].mean()  # Volcano center.
    y0 = volcano_coords[:, 1].mean()
    z0 = volcano_coords[:, 2].mean()

    coeff_mean = torch.tensor([m0, 0.01]).reshape(-1, 1).float()
    coeff_cov = torch.tensor([[200.0, 0], [0, 0.05]]).float()
    coeff_F = torch.hstack([
        torch.ones(volcano_coords.shape[0], 1),
        cylindrical(volcano_coords, x0, y0).reshape(-1, 1)
    ]).float()

    # Model with trend.
    updatable_gp = UniversalUpdatableGP(kernel,
                                        lambda0,
                                        torch.tensor([sigma0]),
                                        volcano_coords,
                                        coeff_F,
                                        coeff_cov,
                                        coeff_mean,
                                        n_chunks=200)

    # Sample artificial log-normal volcano.
    gp_sampler = UpdatableGP(kernel,
                             lambda0,
                             torch.tensor([sigma0]),
                             0,
                             volcano_coords,
                             n_chunks=200)
    """
    # Add trend to generate ground truth.
    # Commented out since we re-use an already nice looking one.
    ground_truth_no_trend = torch.exp(gp_sampler.sample_prior())
    true_trend = coeff_F @ coeff_mean
    ground_truth = ground_truth_no_trend + true_trend
    np.save(os.path.join(results_folder, "ground_truth.npy"), ground_truth.cpu().numpy())
    """
    ground_truth = torch.from_numpy(
        np.load(os.path.join(results_folder, "ground_truth.npy")))

    # Add noise and generate data.
    """
    noise = MultivariateNormal(loc=torch.zeros(n_data), covariance_matrix=data_std**2 * torch.eye(n_data)).rsample().reshape(-1, 1)
    synth_data = G @ ground_truth + noise
    np.save(os.path.join(results_folder, "synth_data.npy"), synth_data.cpu().numpy())
    """
    synth_data = torch.from_numpy(
        np.load(os.path.join(results_folder, "synth_data.npy")))

    # Now train GP model on it.
    constant_updatable_gp = UpdatableGP(kernel,
                                        lambda0,
                                        torch.tensor([sigma0]),
                                        m0,
                                        volcano_coords,
                                        n_chunks=200)

    # Compute log-likelihood.
    updatable_gp.concentrated_NLL(10.0, G, synth_data, kappa_2=0.01)
    """