示例#1
0
def stuart_example_22():
    """
    Example 2.2 on page 461 in Stuart 2010
    """
    q = 2
    # take first q digits of pi
    g = np.array([int(x) for x in str(np.pi) if x != '.'])[:q]

    u = np.array([0.5])

    beta = 0

    def G(u):
        return g * (u + beta * np.array([u[0]**3]))

    prior_covariance = np.identity(1)
    prior = GaussianDistribution(mean=np.zeros_like(u),
                                 covariance=prior_covariance)

    gamma = 0.5
    noise = GaussianDistribution(mean=np.zeros(q),
                                 covariance=np.identity(q) * gamma**2)

    # missing normalisation
    def posterior(u, y):
        likelihood_term = -0.5 / gamma**2 * np.linalg.norm(y - G(u))**2
        prior_term = -0.5 * np.dot(u, prior.apply_precision(u))
        return np.exp(likelihood_term + prior_term)

    model = SyntheticModel(observation_operator=G,
                           noise=noise,
                           posterior=posterior)

    rng = np.random.default_rng(1)

    data = model.observe(u, rng)
    print(f"y = {data}")

    sampler = build_evolution_pCN_sampler(observation_operator=G,
                                          u=u,
                                          data=data,
                                          noise=noise,
                                          prior=prior,
                                          posterior=posterior,
                                          rng=rng)

    u_0 = np.zeros_like(u)
    n_samples = 5000
    samples = sampler.run(u_0=u_0, n_samples=n_samples)

    plt.hist(samples, bins=20, density=True)

    store_figure(f"stuart_example_22q={q}_N={n_samples}")
示例#2
0
def create_pCNSampler(density):
    mean = np.array([0])
    covariance = np.array([1], ndmin=2)
    prior = GaussianDistribution(mean, covariance)

    potential = AnalyticPotential(posterior=density, prior=prior)

    # beta != delta of other proposers, but
    # it could easily be translated if someone took the 30s to do it
    proposer = pCNProposer(beta=0.25, prior=prior)
    accepter = pCNAccepter(potential=potential)
    return MCMCSampler(proposer, accepter, np.random.default_rng(1))
示例#3
0
def create_StandardRWSampler(density):
    mean = np.array([0])
    covariance = np.array([1], ndmin=2)
    sqrt_covariance = np.array([1], ndmin=2)
    prior = GaussianDistribution(mean, covariance)
    potential = AnalyticPotential(posterior=density, prior=prior)

    proposer = StandardRWProposer(delta=0.25,
                                  dims=1,
                                  sqrt_covariance=sqrt_covariance)
    accepter = StandardRWAccepter(potential=potential, prior=prior)
    return MCMCSampler(proposer, accepter, np.random.default_rng(1))
示例#4
0
def main():
    sampler = create_mcmc_sampler()

    samples_full = load_or_compute(
        Settings.filename(), sampler.run,
        (Settings.Sampling.u_0, Settings.Sampling.N, 0, 1))

    samples_full = samples_full.T
    # Add pertubations to means
    for i in range(len(samples_full[0, :])):
        samples_full[:, i] += Settings.Prior.mean

    # do burn_in and sample_interval after the fact
    samples = samples_full[:, Settings.Sampling.burn_in:]
    samples = samples[:, ::Settings.Sampling.sample_interval]

    # plot densities
    fig, plts = plt.subplots(1, 3, figsize=(20, 10))

    priors = [
        GaussianDistribution(mu, Settings.Prior.std_dev)
        for mu in Settings.Prior.mean
    ]

    intervals = [(-2, 2)] * 3

    plot_info = zip(priors, intervals, Settings.Simulation.IC.ground_truth,
                    Settings.Simulation.IC.names, plts)

    for i, (prior, interval, true_val, name, ax) in enumerate(plot_info):
        ax.hist(samples[i, :], density=True)
        x_range = np.linspace(*interval, num=300)
        ax.plot(x_range, [prior(x) for x in x_range])
        ax.axvline(true_val, c='r')
        ax.set_title(f"Prior and posterior for {name}")
        ax.set(xlabel=name, ylabel="Probability")

    fig.suptitle("Posteriors and priors")
    store_figure(Settings.filename() + "_densities")

    # autocorrelation
    ac = autocorrelation(samples_full, int(Settings.Sampling.N / 10), 10)
    for i in range(3):
        plt.plot(ac[i, :], label=Settings.Simulation.IC.names[i])
    plt.title("Autocorrelation")
    plt.xlabel("Lag")
    plt.legend()
    store_figure(Settings.filename() + "_ac")

    show_chain_evolution(samples_full)
    show_setup()
示例#5
0
 def __init__(self, mean, variance, l):
     self.left = GaussianDistribution(mean=-mean, covariance=variance)
     self.right = GaussianDistribution(mean=mean, covariance=variance)
     self.l = l
示例#6
0
def main():
    rng = np.random.default_rng(1)
    data_dir = "/home/david/fs20/thesis/code/report/data/"

    # Parameters of simulation
    K, J = 6, 4
    sim_length = 20

    # True Theta
    theta = np.array([10, 10, 1, 10])  # F, h, c, b
    r = 0.5  # noise level

    # Characteristics of system
    T = 500
    try:
        Y = np.load(data_dir + f"Y_{K=}_{J=}_{T=}.npy")
        print("Loaded existing simulation results")
    except FileNotFoundError:
        print("Running simulation to generate fake measurements")
        Y = run_lorenz96(K, J, theta, T)
        np.save(data_dir + f"Y_{K=}_{J=}_{T=}", Y)

    print(f"{Y.shape=}")

    moment_function_values = moment_function(Y, K, J)
    moment_function_means = np.mean(moment_function_values, axis=1)
    moment_function_variances = np.var(moment_function_values, axis=1)
    print(f"{moment_function_variances.shape=}")

    noise = GaussianDistribution(mean=np.zeros_like(moment_function_variances),
                                 covariance=r**2 *
                                 np.diag(moment_function_variances))

    prior_means = np.array([12, 8, 9])  # F, h, b
    prior_covariance = np.diag([10, 1, 10])

    # From theory: prior is always assumed to be centered,
    # so I do MCMC over pertubations from given prior means
    prior = GaussianDistribution(np.zeros_like(prior_means), prior_covariance)

    observation_operator = LorenzObservationOperator(K, J, sim_length,
                                                     theta[2], prior_means,
                                                     Y[:, -1])

    # don't need huge array anymore
    del Y

    potential = EvolutionPotential(observation_operator, moment_function_means,
                                   noise)

    proposer = pCNProposer(beta=0.5, prior=prior)
    accepter = CountedAccepter(pCNAccepter(potential=potential))

    sampler = MCMCSampler(proposer, accepter, rng)

    u_0 = np.array([-1.9, 1.9, 0.9])  # start close to true theta
    n_samples = 2000
    try:
        samples = np.load(data_dir +
                          f"S_{K=}_{J=}_T={sim_length}_{r=}_{n_samples=}.npy")
        print("Loaded existing sampling results")
    except FileNotFoundError:
        print("Generating samples")
        samples = sampler.run(u_0=u_0,
                              n_samples=n_samples,
                              burn_in=100,
                              sample_interval=1)
        np.save(data_dir + f"S_{K=}_{J=}_T={sim_length}_{r=}_{n_samples=}",
                samples)

    print(f"{samples.shape=}")

    # to conform with the output-shape of
    # solve_ivp. Might be worthwile to change it in the sampler,
    # but then I break older scripts
    samples = samples.T

    # Add pertubations to means
    for i in range(len(samples[0, :])):
        samples[:, i] += prior_means

    # Plot densities
    priors = [
        GaussianDistribution(mu, np.sqrt(sigma_sq))
        for mu, sigma_sq in zip(prior_means, np.diag(prior_covariance))
    ]
    intervals = [(-5, 25)] * 3
    names = ["F", "h", "b"]

    fig, plts = plt.subplots(1, 3, figsize=(20, 10))

    plot_info = zip(priors, intervals, [theta[0], theta[1], theta[3]], names,
                    plts)

    for i, (prior, interval, true_val, name, ax) in enumerate(plot_info):
        ax.hist(samples[i, :], density=True)
        x_range = np.linspace(*interval)
        ax.plot(x_range, [prior(x) for x in x_range])
        ax.axvline(true_val, c='r')
        ax.set_title(f"Prior and posterior for {name}")
        ax.set(xlabel=name, ylabel="Probability")

    fig.suptitle("Posteriors and priors")
    store_figure(f"combined_{K=}_{J=}_T={sim_length}_{r=}")

    # Average the autocorrelation
    ac = np.zeros((3, 100))
    n = 10
    for i in range(1, 1 + n):
        for var in range(3):
            ac[var, :] += MCMCSampler.autocorr(samples[var,
                                                       i * 100:(i + 1) * 100])
    ac /= n
    plt.plot(ac[0, :], label="F")
    plt.plot(ac[1, :], label="h")
    plt.plot(ac[2, :], label="b")
    plt.title("Autocorrelation")
    plt.xlabel("Lag")
    plt.legend()
    store_figure(f"lorenz_ac_avg_{K=}_{J=}_T={sim_length}_{r=}")
示例#7
0
 def get_distribution():
     return GaussianDistribution(Settings.Prior.mean,
                                 Settings.Prior.covariance)
示例#8
0
 def get_distribution():
     return GaussianDistribution(Settings.Noise.mean,
                                 Settings.Noise.covariance)
示例#9
0
def main():
    stepsize = PWLinear(0.1, 0.001, Settings.Sampling.burn_in)
    steps = [stepsize]
    burn_ins = [Settings.Sampling.burn_in]
    sample_intervals = [Settings.Sampling.sample_interval]

    for step, burn_in, sample_interval in zip(steps, burn_ins,
                                              sample_intervals):
        Settings.Sampling.step = step
        Settings.Sampling.burn_in = burn_in
        Settings.Sampling.sample_interval = sample_interval

        sampler = create_mcmc_sampler()

        samples_full = load_or_compute(
            Settings.filename(), sampler.run,
            (Settings.Sampling.u_0, Settings.Sampling.N, 0, 1))

        samples_full = samples_full.T
        # Add pertubations to means
        for i in range(len(samples_full[0, :])):
            samples_full[:, i] += Settings.Prior.mean

        # samples = clean_samples(samples_full)
        samples = samples_full[:, Settings.Sampling.burn_in:]
        samples = samples[:, ::Settings.Sampling.sample_interval]

        # plot densities
        fig, plts = plt.subplots(1, 3, figsize=(20, 10))

        priors = [
            GaussianDistribution(mu, Settings.Prior.std_dev)
            for mu in Settings.Prior.mean
        ]

        intervals = [(-2, 2)] * 3

        plot_info = zip(priors, intervals, Settings.Simulation.IC.ground_truth,
                        Settings.Simulation.IC.names, plts)

        for i, (prior, interval, true_val, name, ax) in enumerate(plot_info):
            ax.hist(samples[i, :], density=True)
            x_range = np.linspace(*interval, num=300)
            ax.plot(x_range, [prior(x) for x in x_range])
            ax.axvline(true_val, c='r')
            ax.set_title(f"Prior and posterior for {name}")
            ax.set(xlabel=name, ylabel="Probability")

        fig.suptitle("Posteriors and priors")
        store_figure(Settings.filename() + "_densities")

        # autocorrelation
        # samples_burned = samples_full[:, len_burn_in(samples_full):]
        ac = autocorrelation(samples_full[:, Settings.Sampling.burn_in:], 100)
        for i in range(3):
            plt.plot(ac[i, :], label=Settings.Simulation.IC.names[i])

        plt.axhline(0, color='k', alpha=0.5)

        plt.title("Autocorrelation")
        plt.xlabel("Lag")
        plt.legend()
        store_figure(Settings.filename() + "_ac")

        show_chain_evolution_and_step(samples_full)