plt.savefig("sig_traces.pdf") plt.clf() def logprobs(): plt.plot(lprobs[:500]) plt.savefig("logprobs.pdf") plt.clf() def kdes(): mu_kernel = gaussian_kde(draws[50:,0]) x_min = min(draws[50:,0]) - 1 x_max = max(draws[50:,0]) + 1 x = np.arange(x_min, x_max, step=0.1) plt.plot(x,mu_kernel(x)) plt.savefig("mu_kernel.pdf") plt.clf() sig_kernel = gaussian_kde(draws[50:,1]) x_min = 20 x_max = 200 x = np.arange(x_min, x_max, step=0.1) plt.plot(x,sig_kernel(x)) plt.savefig("sig_kernel.pdf") plt.clf() if __name__ == "__main__": draws, lprobs, r = metropolis(np.array([40, 10], dtype=float), 20., 10000) traces() logprobs() kdes()
def logprobs(): plt.plot(lprobs[:500]) plt.savefig("logprobs.pdf") plt.clf() def kdes(): mu_kernel = gaussian_kde(draws[50:, 0]) x_min = min(draws[50:, 0]) - 1 x_max = max(draws[50:, 0]) + 1 x = np.arange(x_min, x_max, step=0.1) plt.plot(x, mu_kernel(x)) plt.savefig("mu_kernel.pdf") plt.clf() sig_kernel = gaussian_kde(draws[50:, 1]) x_min = 20 x_max = 200 x = np.arange(x_min, x_max, step=0.1) plt.plot(x, sig_kernel(x)) plt.savefig("sig_kernel.pdf") plt.clf() if __name__ == "__main__": draws, lprobs, r = metropolis(np.array([40, 10], dtype=float), 20., 10000) traces() logprobs() kdes()
i = np.where(x == 0)[0] if i.size > 0: x = np.delete(x, i) # Creation of noise free synthetic data pred = calcPred(mtarget, x) # Unnormalized noise free data # Noisy data data = pred + sigdata * np.random.randn(pred.size) # Noisy data data_dict = {'x': x, 'data': data, 'sigma': sigdata} ## Run Metropolis start_time = time.time() M, LLK, accepted = metropolis(n_samples, calcLLK, verify, data_dict, m_ini, prior_bounds, prop_cov) run_time = time.time() - start_time # Mean/STD M_mean = M[n_burn:, :].mean(axis=0) M_std = M[n_burn:, :].std(axis=0) ## Output display & figures # Print information print("--- %s seconds ---" % (run_time)) print("Acceptance rate : %f" % (float(accepted) / n_samples)) print("Posterior mean : %f , %f , %f" % (M_mean[0], M_mean[1], M_mean[2])) print("2-sigma error : %f , %f , %f" % (2 * M_std[0], 2 * M_std[1], 2 * M_std[2]))
i = np.where(x == 0)[0] if i.size > 0: x = np.delete(x, i) # Creation of noise free synthetic data pred = calcPred(mtarget, x) # Unnormalized noise free data # Noisy data data = pred + sigdata * np.random.randn(pred.size) # Noisy data data_dict = {"x": x, "data": data, "sigma": sigdata} ## Run Metropolis start_time = time.time() M, LLK, accepted = metropolis(n_samples, calcLLK, verify, data_dict, m_ini, prior_bounds, prop_cov) run_time = time.time() - start_time # Mean/STD M_mean = M[n_burn:, :].mean(axis=0) M_std = M[n_burn:, :].std(axis=0) ## Output display & figures # Print information print("--- %s seconds ---" % (run_time)) print("Acceptance rate : %f" % (float(accepted) / n_samples)) print("Posterior mean : %f , %f , %f" % (M_mean[0], M_mean[1], M_mean[2])) print("2-sigma error : %f , %f , %f" % (2 * M_std[0], 2 * M_std[1], 2 * M_std[2]))
def find_g_from_A(A, N, L, g_min, g_max): f = lambda x: norm(A, x) g = [g_min] * (N * (L - 1) + L * (N - 1)) g = metropolis(f, g, g_min, g_max, shake_simple) return g
def test_main(): xl = -1.0 xh = 1.0 x = [1.0, 1.0, 1.0] T = 1.0 print metropolis(pythagoras, x, xl, xh, shake_simple)