Exemplo n.º 1
0
    def np_diagnostics(self):
        feature_names = ["num_restarts", "num_divergent","hit_max_tree_depth","ave_num_transitions","total_num_transitions","bfmi","lp_ess",
                         "lp_rhat", "difficulty","num_chains_removed","min_ess","median_ess","percent_rhat"]

        self.remove_failed_chains()
        out = self.get_diagnostics(permuted=False)
        num_restarts = self.metadata.num_restarts
        num_chains_removed = self.metadata.num_chains_removed
        if self.tune_dict["dynamic"]:
            processed_diag = process_diagnostics(out, name_list=["hit_max_tree_depth"])
            hit_max_tree_depth = numpy.squeeze(processed_diag.sum(axis=1))
        else:
            hit_max_tree_depth = 0
        processed_diag = process_diagnostics(out, name_list=["divergent"])
        num_divergent = numpy.squeeze(processed_diag.sum(axis=1))

        processed_diag = process_diagnostics(out, name_list=["num_transitions"])
        total_num_transitions = numpy.sum(processed_diag)
        ave_num_transitions = numpy.squeeze(processed_diag.mean(axis=1))
        energy_summary = energy_diagnostics(diagnostics_obj=out)
        mixed_mcmc_tensor = self.get_samples(permuted=True)
        mcmc_cov = numpy.cov(mixed_mcmc_tensor, rowvar=False)
        mcmc_sd_vec = numpy.sqrt(numpy.diagonal(mcmc_cov))
        difficulty = max(mcmc_sd_vec) / min(mcmc_sd_vec)
        mcmc_samples = self.get_samples(permuted=False)
        out_dict = diagnostics_stan(mcmc_samples)
        min_ess = min(out_dict["ess"])
        median_ess = numpy.median(out_dict["ess"])
        percent_rhat = sum(out_dict["rhat"]<1.05)/len(out_dict["rhat"])
        num_id = self.num_chains
        output = numpy.zeros((num_id, len(feature_names)))

        output[:, 0] = num_restarts
        output[:, 1] = num_divergent
        output[:, 2] = hit_max_tree_depth
        output[:, 3] = ave_num_transitions
        output[:, 4] = total_num_transitions
        output[:, 5] = energy_summary["bfmi_list"]
        output[:, 6] = energy_summary["ess"]
        output[:, 7] = energy_summary["rhat"]
        output[:, 8] = difficulty
        output[:, 9] = num_chains_removed
        output[:,10] = min_ess
        output[:,11] = median_ess
        output[:,12] = percent_rhat
        return(output,feature_names)
Exemplo n.º 2
0
def get_diagnostics(sampler):
    #
    # want to return numpy array of dimensions [num_chains,9]
    # as well as list of column names

    feature_names = [
        "num_restarts", "num_divergent", "num_hit_max_tree_depth",
        "ave_num_transitions", "bfmi", "lp_ess", "lp_rhat", "difficulty"
    ]
    feature_names = ["num_chains_removed"]

    sampler.remove_failed_chains()
    out = sampler.get_diagnostics(permuted=False)
    num_restarts = sampler.metadata.num_restarts
    num_chains_removed = sampler.metadata.num_chains_removed
    processed_diag = process_diagnostics(out, name_list=["divergent"])
    num_divergent = processed_diag.sum(axis=1)
    processed_diag = process_diagnostics(out, name_list=["hit_max_tree_depth"])
    hix_max_tree_depth = processed_diag.sum(axis=1)
    processed_diag = process_diagnostics(out, name_list=["num_transitions"])
    ave_num_transitions = processed_diag.mean(axis=1)
    energy_summary = energy_diagnostics(diagnostics_obj=out)
    mixed_mcmc_tensor = sampler.get_samples(permuted=True)
    mcmc_cov = numpy.cov(mixed_mcmc_tensor, rowvar=False)
    mcmc_sd_vec = numpy.sqrt(numpy.diagonal(mcmc_cov))
    difficulty = max(mcmc_sd_vec) / min(mcmc_sd_vec)
    num_id = sampler.num_chains
    output = numpy.zeros(num_id, len(feature_names))

    output[:, 0] = num_restarts
    output[:, 1] = num_divergent
    output[:, 2] = hix_max_tree_depth
    output[:, 3] = ave_num_transitions
    output[:, 4] = energy_summary["bfmi_list"]
    output[:, 5] = energy_summary["ess"]
    output[:, 6] = energy_summary["rhat"]
    output[:, 7] = difficulty
    output[:, 8] = num_chains_removed
    return (output, feature_names)
Exemplo n.º 3
0
def get_ess_and_esjds(ran_sampler):
    # get max,min,median ess and normalized esjd for sampler on unconstrained space
    ran_sampler.remove_failed_chains()
    sampler_diag_check = {
        "num_chains_removed": ran_sampler.metadata.num_chains_removed,
        "num_restarts": ran_sampler.metadata.num_restarts
    }

    if ran_sampler.metadata.num_chains_removed == 0:
        diag = ran_sampler.get_diagnostics()
        num_transitions = process_diagnostics(diag,
                                              name_list=["num_transitions"])
        total_num_transitions = sum(num_transitions)
        samples_combined = ran_sampler.get_samples(permuted=True)
        esjd = ESJD(samples_combined)
        esjd_normalized = esjd / math.sqrt(total_num_transitions)
        ess = ess_stan(ran_sampler.get_samples(permuted=False))
        min_ess = min(ess)
        max_ess = max(ess)
        median_ess = numpy.median(ess)
        median_ess_normalized = median_ess / math.sqrt(total_num_transitions)
        min_ess_normalized = min_ess / math.sqrt(total_num_transitions)
        max_ess_normalized = max_ess / math.sqrt(total_num_transitions)

        out = {
            "median_ess": median_ess,
            "max_ess": max_ess,
            "min_ess": min_ess,
            "esjd": 0,
            "esjd_normalized": 0,
            "median_ess_normalized": median_ess_normalized,
            "min_ess_normalized": min_ess_normalized,
            "max_ess_normalized": max_ess_normalized
        }

    else:
        out = {
            "median_ess": 0,
            "max_ess": 0,
            "min_ess": 0,
            "esjd": 0,
            "esjd_normalized": 0,
            "median_ess_normalized": 0,
            "min_ess_normalized": 0,
            "max_ess_normalized": 0
        }

    out.update({"sampler_diag_check": sampler_diag_check})
    return (out)
Exemplo n.º 4
0
print(diagnostics_stan(samples[:,:,hidden_in_sigma2_indices]))

print("posterior mean sigma2 {}".format(posterior_mean_hidden_in_sigma2))
print("posterior median sigma2 {}".format(posterior_median_hidden_in_sigma2))
#print(mcmc_samples_beta["indices_dict"])

full_mcmc_tensor = get_params_mcmc_tensor(sampler=sampler1)

print(get_short_diagnostics(full_mcmc_tensor))


out = sampler1.get_diagnostics(permuted=False)

print("divergent")
processed_diag = process_diagnostics(out,name_list=["divergent"])

print(processed_diag.sum(axis=1))

#print(processed_diag.shape)

#processed_energy = process_diagnostics(out,name_list=["prop_H"])

print(energy_diagnostics(diagnostics_obj=out))

mcmc_samples_mixed = sampler1.get_samples(permuted=True)
#target_dataset = get_data_dict("8x8mnist")

v_generator = wrap_V_class_with_input_data(class_constructor=V_fc_model_1,input_data=input_data,prior_dict=prior_dict,model_dict=model_dict)
precision_type = "torch.DoubleTensor"
te2,predicted2 = test_error(input_data,v_obj=v_generator(precision_type=precision_type),mcmc_samples=mcmc_samples_mixed,type="classification",memory_efficient=False)
Exemplo n.º 5
0
full_mcmc_tensor = get_params_mcmc_tensor(sampler=sampler1)

print(get_short_diagnostics(full_mcmc_tensor))

#print(mcmc_samples_beta["indices_dict"])
print("overall diagnostics")
full_mcmc_tensor = get_params_mcmc_tensor(sampler=sampler1)

print(get_short_diagnostics(full_mcmc_tensor))

#print(mcmc_samples_beta["indices_dict"])

out = sampler1.get_diagnostics(permuted=False)

print("num divergences after warmup")
processed_diag = process_diagnostics(out,name_list=["divergent"])

print(processed_diag.sum(axis=1))

print("num hit max tree depth after warmup")
processed_diag = process_diagnostics(out,name_list=["hit_max_tree_depth"])

print(processed_diag.sum(axis=1))

print("average number of leapfrog steps after warmup")
processed_diag = process_diagnostics(out,name_list=["num_transitions"])
print(processed_diag.mean(axis=1))
#processed_energy = process_diagnostics(out,name_list=["prop_H"])

print("energy diagnostics")
print(energy_diagnostics(diagnostics_obj=out))
Exemplo n.º 6
0
tune_settings_dict = tuning_settings([], [], [], other_arguments)
tune_dict = tuneinput_class(input_dict).singleton_tune_dict()
sampler1 = mcmc_sampler(tune_dict=tune_dict,
                        mcmc_settings_dict=mcmc_meta,
                        tune_settings_dict=tune_settings_dict)
store_name = 'pima_indian_logit_sampler.pkl'
sampled = False
if sampled:
    sampler1 = pickle.load(open(store_name, 'rb'))
else:
    sampler1.start_sampling()
    with open(store_name, 'wb') as f:
        pickle.dump(sampler1, f)

print("overall diagnostics")
full_mcmc_tensor = sampler1.get_samples(permuted=False)

print(get_short_diagnostics(full_mcmc_tensor))

out = sampler1.get_diagnostics(permuted=False)

print("average acceptance rate after warmup")
processed_diag = process_diagnostics(out, name_list=["accept_rate"])

average_accept_rate = numpy.mean(processed_diag, axis=1)

print(average_accept_rate)

print("energy diagnostics")
print(energy_diagnostics(diagnostics_obj=out))
Exemplo n.º 7
0
sampler1 = mcmc_sampler(tune_dict=tune_dict,
                        mcmc_settings_dict=mcmc_meta,
                        tune_settings_dict=tune_settings_dict)
store_name = 'one_pl_sampler.pkl'
sampled = False
if sampled:
    sampler1 = pickle.load(open(store_name, 'rb'))
else:
    sampler1.start_sampling()
    with open(store_name, 'wb') as f:
        pickle.dump(sampler1, f)
print("overall diagnostics")
full_mcmc_tensor = sampler1.get_samples(permuted=False)

print(get_short_diagnostics(full_mcmc_tensor))

out = sampler1.get_diagnostics(permuted=False)
print("num hit max tree depth")
processed_diag = process_diagnostics(out, name_list=["hit_max_tree_depth"])

print(processed_diag.sum(axis=1))

print("average acceptance rate after warmup")
processed_diag = process_diagnostics(out, name_list=["accept_rate"])

average_accept_rate = numpy.mean(processed_diag, axis=1)

print(average_accept_rate)

print("energy diagnostics")
print(energy_diagnostics(diagnostics_obj=out))
Exemplo n.º 8
0
import dill as pickle
import numpy
from post_processing.get_diagnostics import energy_diagnostics,process_diagnostics
sampler1 = pickle.load(open('temp_save_sampler1.pkl', 'rb'))

mcmc_samples_beta = sampler1.get_samples_alt(prior_obj_name="beta",permuted=False)
samples = mcmc_samples_beta["samples"]
posterior_mean = numpy.mean(samples.reshape(-1,101),axis=0)
out = sampler1.get_diagnostics(permuted=False,include_warmup=True)


processed_diag = process_diagnostics(out,name_list=["num_transitions"])
print(processed_diag[0,0:1000,0])