def construct_data_dict(hyperp, options, filepaths): #=== Load Observation Indices ===# if options.obs_type == 'full': obs_dimensions = options.parameter_dimensions obs_indices = [] if options.obs_type == 'obs': obs_dimensions = options.num_obs_points * options.num_time_steps print('Loading Boundary Indices') df_obs_indices = pd.read_csv(filepaths.project.obs_indices + '.csv') obs_indices = df_obs_indices.to_numpy() #=== Prepare Data ===# data = DataHandler(hyperp, options, filepaths, obs_indices, options.parameter_dimensions, obs_dimensions, options.parameter_dimensions) data.load_data_train() data.load_data_test() if options.add_noise == True: data.add_noise_qoi_train() data.add_noise_qoi_test() noise_regularization_matrix = data.construct_noise_regularization_matrix_train( ) noise_regularization_matrix = np.expand_dims( noise_regularization_matrix, axis=0) else: noise_regularization_matrix = np.ones((1, obs_dimensions), dtype=np.float32) #=== Construct Dictionary ===# data_dict = {} data_dict["obs_dimensions"] = obs_dimensions data_dict["obs_indices"] = obs_indices data_dict["parameter_train"] = data.poi_train data_dict["state_obs_train"] = data.qoi_train data_dict["parameter_test"] = data.poi_test data_dict["state_obs_test"] = data.qoi_test data_dict["noise_regularization_matrix"] = noise_regularization_matrix return data_dict
def predict_and_plot(hyperp, options, filepaths): #=== Mesh Properties ===# options.mesh_point_1 = [-1, -1] options.mesh_point_2 = [1, 1] # options.nx = 15 # options.ny = 15 # options.nx = 30 # options.ny = 30 options.nx = 50 options.ny = 50 options.num_obs_points = 10 options.order_fe_space = 1 options.order_meta_space = 1 options.num_nodes = (options.nx + 1) * (options.ny + 1) #=== Construct Mesh ===# fe_space, meta_space,\ nodes, dof_fe, dof_meta = construct_mesh(options) #=== Load Observation Indices ===# if options.obs_type == 'full': obs_dimensions = options.parameter_dimensions if options.obs_type == 'obs': obs_dimensions = options.num_obs_points print('Loading Boundary Indices') df_obs_indices = pd.read_csv(filepaths.project.obs_indices + '.csv') obs_indices = df_obs_indices.to_numpy() #=== Data and Latent Dimensions of Autoencoder ===# input_dimensions = obs_dimensions latent_dimensions = options.parameter_dimensions #=== Prepare Data ===# data = DataHandler(hyperp, options, filepaths, options.parameter_dimensions, obs_dimensions) data.load_data_test() if options.add_noise == 1: data.add_noise_qoi_test() parameter_test = data.poi_test state_obs_test = data.qoi_test epoch_list = np.arange(0, 320, 5) #for epoch in epoch_list: # #=== Load Trained Neural Network ===# # nn = VAE(hyperp, options, # input_dimensions, latent_dimensions, # None, None, # positivity_constraint_log_exp) # nn.load_weights(filepaths.directory_trained_nn + '_%d'%(epoch) + '/' + # filepaths.nn_name) # #=== Selecting Samples ===# # sample_number = 128 # parameter_test_sample = np.expand_dims(parameter_test[sample_number,:], 0) # state_obs_test_sample = np.expand_dims(state_obs_test[sample_number,:], 0) # #=== Predictions ===# # posterior_mean_pred, posterior_cov_pred = nn.encoder(state_obs_test_sample) # posterior_pred_draw = nn.reparameterize(posterior_mean_pred, posterior_cov_pred) # posterior_mean_pred = posterior_mean_pred.numpy().flatten() # posterior_cov_pred = posterior_cov_pred.numpy().flatten() # posterior_pred_draw = posterior_pred_draw.numpy().flatten() # if options.model_aware == 1: # state_obs_pred_draw = nn.decoder(np.expand_dims(posterior_pred_draw, 0)) # state_obs_pred_draw = state_obs_pred_draw.numpy().flatten() # #=== Plotting Prediction ===# # print('================================') # print(' Plotting Predictions ') # print('================================') # #=== Plot FEM Functions ===# # cross_section_y = 0.0 # filename_extension = '_%d_%d.png'%(sample_number,epoch) # plot_fem_function_fenics_2d(meta_space, posterior_mean_pred, # cross_section_y, # '', # filepaths.figure_posterior_mean + filename_extension, # (5,5), (0,6), # True) # #=== Plot Cross-Section with Error Bounds ===# # plot_cross_section(meta_space, # parameter_test_sample, posterior_mean_pred, posterior_cov_pred, # (-1,1), cross_section_y, # '', # filepaths.figure_parameter_cross_section + filename_extension, # (1.5,5.5)) # print('Predictions plotted') #=== Make Movie ===# sample_number = 128 make_movie(filepaths.figure_posterior_mean + '_%d' % (sample_number), filepaths.directory_movie, 'posterior_mean', 2, 0, len(epoch_list)) make_movie( filepaths.figure_parameter_cross_section + '_%d' % (sample_number), filepaths.directory_movie, 'parameter_cross_section', 2, 0, len(epoch_list))
def predict_and_plot(hyperp, options, filepaths): #=== Load Observation Indices ===# if options.obs_type == 'full': obs_dimensions = options.parameter_dimensions if options.obs_type == 'obs': obs_dimensions = options.num_obs_points print('Loading Boundary Indices') df_obs_indices = pd.read_csv(filepaths.project.obs_indices + '.csv') obs_indices = df_obs_indices.to_numpy() #=== Data and Latent Dimensions of Autoencoder ===# input_dimensions = obs_dimensions latent_dimensions = options.parameter_dimensions #=== Prepare Data ===# data = DataHandler(hyperp, options, filepaths, options.parameter_dimensions, obs_dimensions) data.load_data_test() if options.add_noise == 1: data.add_noise_qoi_test() parameter_test = data.poi_test state_obs_test = data.qoi_test #=== Load Trained Neural Network ===# nn = VAE(hyperp, options, input_dimensions, latent_dimensions, None, None, positivity_constraint_log_exp) nn.load_weights(filepaths.trained_nn) #=== Selecting Samples ===# sample_number = 105 parameter_test_sample = np.expand_dims(parameter_test[sample_number,:], 0) state_obs_test_sample = np.expand_dims(state_obs_test[sample_number,:], 0) #=== Predictions ===# parameter_pred_sample, _ = nn.encoder(state_obs_test_sample) state_obs_pred_sample = nn.decoder(parameter_test_sample) parameter_pred_sample = parameter_pred_sample.numpy().flatten() state_obs_pred_sample = state_obs_pred_sample.numpy().flatten() #=== Plotting Prediction ===# print('================================') print(' Plotting Predictions ') print('================================') #=== Load Mesh ===# nodes, elements, _, _, _, _, _, _ = load_mesh(filepaths.project) #=== Plot FEM Functions ===# plot_fem_function(filepaths.figure_parameter_test, 'True Parameter', 7.0, nodes, elements, parameter_test_sample) plot_fem_function(filepaths.figure_parameter_pred, 'Parameter Prediction', 7.0, nodes, elements, parameter_pred_sample) if options.obs_type == 'full': plot_fem_function(filepaths.figure_state_test, 'True State', 2.6, nodes, elements, state_obs_test_sample) plot_fem_function(filepaths.figure_state_pred, 'State Prediction', 2.6, nodes, elements, state_obs_pred_sample) print('Predictions plotted')
def predict_and_plot(hyperp, options, filepaths): #=== Mesh Properties ===# options.mesh_point_1 = [-1, -1] options.mesh_point_2 = [1, 1] # options.nx = 15 # options.ny = 15 options.nx = 50 options.ny = 50 options.num_obs_points = 10 options.order_fe_space = 1 options.order_meta_space = 1 options.num_nodes = (options.nx + 1) * (options.ny + 1) #=== Construct Mesh ===# fe_space, meta_space,\ nodes, dof_fe, dof_meta = construct_mesh(options) #=== Load Observation Indices ===# if options.obs_type == 'full': obs_dimensions = options.parameter_dimensions if options.obs_type == 'obs': obs_dimensions = options.num_obs_points print('Loading Boundary Indices') df_obs_indices = pd.read_csv(filepaths.project.obs_indices + '.csv') obs_indices = df_obs_indices.to_numpy() #=== Data and Latent Dimensions of Autoencoder ===# input_dimensions = obs_dimensions latent_dimensions = options.parameter_dimensions #=== Prepare Data ===# data = DataHandler(hyperp, options, filepaths, obs_indices, options.parameter_dimensions, obs_dimensions, options.parameter_dimensions) data.load_data_test() if options.add_noise == True: data.add_noise_qoi_test() parameter_test = data.poi_test state_obs_test = data.qoi_test #=== Load Trained Neural Network ===# nn = VAE(hyperp, options, input_dimensions, latent_dimensions, None, None, positivity_constraint_log_exp) nn.load_weights(filepaths.trained_nn) #=== Selecting Samples ===# # sample_number = 1 sample_number = 128 parameter_test_sample = np.expand_dims(parameter_test[sample_number, :], 0) state_obs_test_sample = np.expand_dims(state_obs_test[sample_number, :], 0) #=== Saving Specific Sample ===# df_poi_specific = pd.DataFrame( {'poi_specific': parameter_test_sample.flatten()}) df_poi_specific.to_csv(filepaths.poi_specific + '.csv', index=False) df_qoi_specific = pd.DataFrame( {'qoi_specific': state_obs_test_sample.flatten()}) df_qoi_specific.to_csv(filepaths.qoi_specific + '.csv', index=False) ##=== Predictions ===# num_draws = 20 #for draw in range(0,num_draws): # start_time_nn = time.time() # posterior_mean_pred, posterior_cov_pred = nn.encoder(state_obs_test_sample) # elapsed_time_nn = time.time() - start_time_nn # print('Time taken for neural network inference: %.4f' %(elapsed_time_nn)) # posterior_pred_draw = nn.reparameterize(posterior_mean_pred, posterior_cov_pred) # posterior_mean_pred = posterior_mean_pred.numpy().flatten() # posterior_cov_pred = posterior_cov_pred.numpy().flatten() # posterior_pred_draw = posterior_pred_draw.numpy().flatten() # if options.model_aware == 1: # state_obs_pred_draw = nn.decoder(np.expand_dims(posterior_pred_draw, 0)) # state_obs_pred_draw = state_obs_pred_draw.numpy().flatten() # #=== Plotting Prediction ===# # print('================================') # print(' Plotting Predictions ') # print('================================') # #=== Plot FEM Functions ===# # # cross_section_y = 0.5 # cross_section_y = 0.0 # plot_parameter_min = 0 # plot_parameter_max = 6 # plot_variance_min = 0 # plot_variance_max = 1.3 # filename_extension = '_%d.png'%(sample_number) # filename_extension_draw = '_%d_%d.png'%(sample_number,draw) # plot_fem_function_fenics_2d(meta_space, parameter_test_sample, # cross_section_y, # '', # filepaths.figure_parameter_test + filename_extension, # (5,5), (plot_parameter_min,plot_parameter_max), # True) # plot_fem_function_fenics_2d(meta_space, posterior_mean_pred, # cross_section_y, # '', # filepaths.figure_posterior_mean + filename_extension, # (5,5), (plot_parameter_min,plot_parameter_max), # False) # plot_fem_function_fenics_2d(meta_space, posterior_pred_draw, # cross_section_y, # '', # filepaths.figure_parameter_pred + filename_extension_draw, # (5,5), (plot_parameter_min,plot_parameter_max), # True) # if options.obs_type == 'full': # plot_fem_function_fenics_2d(meta_space, state_obs_test_sample, # cross_section_y, # 'True State', # filepaths.figure_state_test + filename_extension, # (5,5)) # plot_fem_function_fenics_2d(meta_space, state_obs_pred_draw, # cross_section_y, # 'State Prediction', # filepaths.figure_state_pred + filename_extension, # (5,5)) # #=== Plot Cross-Section with Error Bounds ===# # plot_cross_section(meta_space, # parameter_test_sample, posterior_mean_pred, posterior_cov_pred, # (-1,1), cross_section_y, # '', # filepaths.figure_parameter_cross_section + filename_extension, # (plot_parameter_min,plot_parameter_max)) # plot_cross_section(meta_space, # parameter_test_sample, posterior_pred_draw, posterior_cov_pred, # (-1,1), cross_section_y, # '', # filepaths.figure_parameter_cross_section + filename_extension_draw, # (plot_parameter_min,plot_parameter_max)) # #=== Plot Variation ===# # plot_fem_function_fenics_2d(meta_space, np.exp(posterior_cov_pred), # cross_section_y, # '', # filepaths.figure_posterior_covariance + filename_extension, # (5,5), (plot_variance_min,plot_variance_max), # False) # print('Predictions plotted') #=== Make Movie ===# sample_number = 128 make_movie(filepaths.figure_parameter_pred + '_%d' % (sample_number), filepaths.directory_movie, 'parameter_pred', 2, 0, num_draws) make_movie( filepaths.figure_parameter_cross_section + '_%d' % (sample_number), filepaths.directory_movie, 'parameter_cross_section', 2, 0, num_draws) combine_movies(filepaths.directory_movie + '/parameter_pred', filepaths.directory_movie + '/parameter_cross_section', filepaths.directory_movie, 'parameter_pred_and_parameter_cross_section')
def predict_and_plot(hyperp, options, filepaths): #=== Load Observation Indices ===# if options.obs_type == 'full': obs_dimensions = options.mesh_dimensions obs_indices = [] if options.obs_type == 'obs': obs_dimensions = options.num_obs_points print('Loading Boundary Indices') df_obs_indices = pd.read_csv(filepaths.project.obs_indices + '.csv') obs_indices = df_obs_indices.to_numpy() #=== Data and Latent Dimensions of Autoencoder ===# input_dimensions = obs_dimensions latent_dimensions = options.parameter_dimensions #=== Prepare Data ===# data = DataHandler(hyperp, options, filepaths, obs_indices, options.parameter_dimensions, obs_dimensions, options.mesh_dimensions) data.load_data_test() if options.add_noise == 1: data.add_noise_qoi_test() parameter_test = data.poi_test state_obs_test = data.qoi_test #=== Load Trained Neural Network ===# nn = VAE(hyperp, options, input_dimensions, latent_dimensions, None, None, tf.identity) nn.load_weights(filepaths.trained_nn) #=== Construct Forward Model ===# forward_operator = load_forward_operator_tf(options, filepaths) forward_model =\ SolveForward1D(options, filepaths, forward_operator, obs_indices) if options.discrete_polynomial == True: forward_model_solve = forward_model.discrete_polynomial if options.discrete_exponential == True: forward_model_solve = forward_model.discrete_exponential #=== Selecting Samples ===# sample_number = 4 parameter_test_sample = np.expand_dims(parameter_test[sample_number, :], 0) state_obs_test_sample = np.expand_dims(state_obs_test[sample_number, :], 0) #=== Predictions ===# post_mean_pred, log_post_std_pred, post_cov_chol_pred = nn.encoder( state_obs_test_sample) n_samples = 1000 posterior_pred_draws = np.zeros((n_samples, post_mean_pred.shape[1]), dtype=np.float32) state_obs_pred_draws = np.zeros( (n_samples, state_obs_test_sample.shape[1]), dtype=np.float32) for n in range(0, n_samples): posterior_pred_draws[n, :] = nn.reparameterize(post_mean_pred, post_cov_chol_pred) if options.model_aware == True: state_obs_pred_draws = nn.decoder(posterior_pred_draws) else: state_obs_pred_draws = forward_model_solve(posterior_pred_draws) #=== Plotting Prediction ===# print('================================') print(' Plotting Predictions ') print('================================') n_bins = 100 for n in range(0, post_mean_pred.shape[1]): #=== Posterior Histogram ===# plt.hist(posterior_pred_draws[:, n], density=True, range=[3, 5], bins=n_bins) #=== True Parameter Value ===# plt.axvline(parameter_test_sample[0, n], color='r', linestyle='dashed', linewidth=3, label="True Parameter Value") #=== Predicted Posterior Mean ===# plt.axvline(post_mean_pred[0, n], color='b', linestyle='dashed', linewidth=1, label="Predicted Posterior Mean") #=== Probability Density Function ===# mn, mx = plt.xlim() plt.xlim(mn, mx) kde_xs = np.linspace(mn, mx, 301) kde = st.gaussian_kde(posterior_pred_draws[:, n]) #=== Title and Labels ===# plt.plot(kde_xs, kde.pdf(kde_xs)) plt.legend(loc="upper left") plt.ylabel('Probability') plt.xlabel('Parameter Value') plt.title("Marginal Posterior Parameter_%d" % (n)) #=== Save and Close Figure ===# plt.savefig(filepaths.figure_parameter_pred + '_%d' % (n)) plt.close() print('Predictions plotted') ############################################################################### # Compare Covariance # ############################################################################### #=== Construct Likelihood Matrix ===# if options.add_noise == True: noise_regularization_matrix = data.construct_noise_regularization_matrix_test( ) noise_regularization_matrix = np.expand_dims( noise_regularization_matrix, axis=0) else: noise_regularization_matrix = np.ones((1, obs_dimensions), dtype=np.float32) measurement_matrix = data.construct_measurement_matrix() likelihood_matrix = tf.linalg.matmul( tf.transpose(tf.linalg.matmul(measurement_matrix, forward_operator)), tf.linalg.matmul( tf.linalg.diag(tf.squeeze(noise_regularization_matrix)), tf.linalg.matmul(measurement_matrix, forward_operator))) #=== Construct Inverse of Prior Matrix ===# prior = PriorHandler(hyperp, options, filepaths, options.parameter_dimensions) prior_mean = prior.load_prior_mean() prior_cov_inv = prior.load_prior_covariance_inverse() #=== Construct True Posterior ===# post_cov_true = np.linalg.inv(likelihood_matrix + prior_cov_inv) post_cov_pred = np.matmul( np.reshape( post_cov_chol_pred, (options.parameter_dimensions, options.parameter_dimensions)), np.transpose( np.reshape( post_cov_chol_pred, (options.parameter_dimensions, options.parameter_dimensions)))) #=== Construct True Mean ===# post_mean_true = np.matmul(post_cov_true, np.matmul( np.transpose(np.matmul(measurement_matrix,forward_operator)), np.matmul(np.diag(noise_regularization_matrix.flatten()), np.transpose(state_obs_test_sample))) +\ np.matmul(prior_cov_inv, np.expand_dims(prior_mean, axis=1))) #=== Relative Error of Matrices ===# relative_error = np.linalg.norm(post_cov_true - post_cov_pred, ord='fro')\ /np.linalg.norm(post_cov_true, ord='fro') print('relative error = %.4f' % (relative_error))
def predict_and_plot(hyperp, options, filepaths): #=== Mesh Properties ===# options.hole_single_circle = False options.hole_two_rectangles = True options.discretization_domain = 17 options.domain_length = 1 options.domain_width = 1 options.rect_1_point_1 = [0.25, 0.15] options.rect_1_point_2 = [0.5, 0.4] options.rect_2_point_1 = [0.6, 0.6] options.rect_2_point_2 = [0.75, 0.85] #=== Construct Mesh ===# Vh, nodes, dof = construct_mesh(options) #=== Load Observation Indices ===# obs_dimensions = options.num_obs_points * options.num_time_steps print('Loading Boundary Indices') df_obs_indices = pd.read_csv(filepaths.project.obs_indices + '.csv') obs_indices = df_obs_indices.to_numpy() #=== Data and Latent Dimensions of Autoencoder ===# input_dimensions = obs_dimensions latent_dimensions = options.parameter_dimensions #=== Prepare Data ===# data = DataHandler(hyperp, options, filepaths, options.parameter_dimensions, obs_dimensions) # data.load_data_specific() # if options.add_noise == 1: # data.add_noise_qoi_specific() # parameter_test = data.poi_specific # state_obs_test = data.qoi_specific data.load_data_test() if options.add_noise == True: data.add_noise_qoi_test() parameter_test = data.poi_test state_obs_test = data.qoi_test #=== Load Trained Neural Network ===# nn = VAE(hyperp, options, input_dimensions, latent_dimensions, None, None, positivity_constraint_log_exp) nn.load_weights(filepaths.trained_nn) #=== Selecting Samples ===# sample_number = 15 parameter_test_sample = np.expand_dims(parameter_test[sample_number, :], 0) state_obs_test_sample = np.expand_dims(state_obs_test[sample_number, :], 0) #=== Predictions ===# posterior_mean_pred, posterior_cov_pred = nn.encoder(state_obs_test_sample) posterior_pred_draw = nn.reparameterize(posterior_mean_pred, posterior_cov_pred) posterior_mean_pred = posterior_mean_pred.numpy().flatten() posterior_cov_pred = posterior_cov_pred.numpy().flatten() posterior_pred_draw = posterior_pred_draw.numpy().flatten() if options.model_aware == 1: state_obs_pred_draw = nn.decoder(np.expand_dims( posterior_pred_draw, 0)) state_obs_pred_draw = state_obs_pred_draw.numpy().flatten() #=== Plotting Prediction ===# print('================================') print(' Plotting Predictions ') print('================================') #=== Plot FEM Functions ===# cross_section_y = 0.8 filename_extension = '_%d.png' % (sample_number) plot_fem_function_fenics_2d( Vh, parameter_test_sample, cross_section_y, '', filepaths.figure_parameter_test + filename_extension, (5, 5), (0, 5), False) plot_fem_function_fenics_2d( Vh, posterior_mean_pred, cross_section_y, '', filepaths.figure_posterior_mean + filename_extension, (5, 5), (0, 5), True) plot_fem_function_fenics_2d( Vh, posterior_pred_draw, cross_section_y, '', filepaths.figure_parameter_pred + filename_extension, (5, 5), (0, 5), True) if options.obs_type == 'full': plot_fem_function_fenics_2d( Vh, state_obs_test_sample, cross_section_y, 'True State', filepaths.figure_state_test + filename_extension, (5, 5)) plot_fem_function_fenics_2d( Vh, state_obs_pred_draw, cross_section_y, 'State Prediction', filepaths.figure_state_pred + filename_extension, (5, 5)) #=== Plot Cross-Section with Error Bounds ===# plot_cross_section( Vh, parameter_test_sample, posterior_mean_pred, posterior_cov_pred, (0, 1), cross_section_y, '', filepaths.figure_parameter_cross_section + filename_extension, (0, 5)) print('Predictions plotted')
def predict_and_plot(hyperp, options, filepaths): #=== Load Observation Indices ===# if options.obs_type == 'full': obs_dimensions = options.mesh_dimensions obs_indices = [] if options.obs_type == 'obs': obs_dimensions = options.num_obs_points print('Loading Boundary Indices') df_obs_indices = pd.read_csv(filepaths.project.obs_indices + '.csv') obs_indices = df_obs_indices.to_numpy() #=== Data and Latent Dimensions of Autoencoder ===# input_dimensions = obs_dimensions latent_dimensions = options.parameter_dimensions #=== Prepare Data ===# data = DataHandler(hyperp, options, filepaths, obs_indices, options.parameter_dimensions, obs_dimensions, options.mesh_dimensions) data.load_data_test() if options.add_noise == 1: data.add_noise_qoi_test() parameter_test = data.poi_test state_obs_test = data.qoi_test #=== Load Trained Neural Network ===# nn = VAE(hyperp, options, input_dimensions, latent_dimensions, None, None, tf.identity) nn.load_weights(filepaths.trained_nn) #=== Construct Forward Model ===# if options.model_augmented == True: forward_operator = load_forward_operator_tf(options, filepaths) forward_model =\ SolveForward1D(options, filepaths, forward_operator, obs_indices) if options.discrete_polynomial == True: forward_model_solve = forward_model.discrete_polynomial if options.discrete_exponential == True: forward_model_solve = forward_model.discrete_exponential #=== Selecting Samples ===# sample_number = 105 parameter_test_sample = np.expand_dims(parameter_test[sample_number, :], 0) state_obs_test_sample = np.expand_dims(state_obs_test[sample_number, :], 0) #=== Predictions ===# post_mean_pred, log_post_std_pred, post_cov_chol_pred = nn.encoder( state_obs_test_sample) n_samples = 1000 posterior_pred_draws = np.zeros((n_samples, post_mean_pred.shape[1]), dtype=np.float32) state_obs_pred_draws = np.zeros( (n_samples, state_obs_test_sample.shape[1]), dtype=np.float32) for n in range(0, n_samples): posterior_pred_draws[n, :] = nn.reparameterize(post_mean_pred, post_cov_chol_pred) if options.model_aware == True: state_obs_pred_draws = nn.decoder(posterior_pred_draws) else: state_obs_pred_draws = forward_model_solve(posterior_pred_draws) #=== Plotting Prediction ===# print('================================') print(' Plotting Predictions ') print('================================') n_bins = 100 for n in range(0, post_mean_pred.shape[1]): #=== Posterior Histogram ===# plt.hist(posterior_pred_draws[:, n], density=True, range=[-1, 10], bins=n_bins) #=== True Parameter Value ===# plt.axvline(parameter_test_sample[0, n], color='r', linestyle='dashed', linewidth=3, label="True Parameter Value") #=== Predicted Posterior Mean ===# plt.axvline(post_mean_pred[0, n], color='b', linestyle='dashed', linewidth=1, label="Predicted Posterior Mean") #=== Probability Density Function ===# mn, mx = plt.xlim() plt.xlim(mn, mx) kde_xs = np.linspace(mn, mx, 301) kde = st.gaussian_kde(posterior_pred_draws[:, n]) #=== Title and Labels ===# plt.plot(kde_xs, kde.pdf(kde_xs)) plt.legend(loc="upper left") plt.ylabel('Probability') plt.xlabel('Parameter Value') plt.title("Marginal Posterior Parameter_%d" % (n)) #=== Save and Close Figure ===# plt.savefig(filepaths.figure_parameter_pred + '_%d' % (n)) plt.close() print('Predictions plotted')