Beispiel #1
0
# plot ess/L standard deviation as well
# hmc need to add jitter (0.9 ep, 1.1 ep)

num_repeats = 2
num_grid_divides = 3

ep_bounds = [1e-3, 0.2]
evolve_t_bounds = [0.15, 50.]
evolve_L_bounds = [5, 1000]
# add constraints such that L = round(evolove_t/ep) < 1024
ep_list = list(numpy.linspace(ep_bounds[0], ep_bounds[1], num_grid_divides))
evolve_t_list = list(
    numpy.linspace(evolve_t_bounds[0], evolve_t_bounds[1], num_grid_divides))
evolve_L_list = list(
    numpy.linspace(evolve_L_bounds[0], evolve_L_bounds[1], num_grid_divides))
input_data = {"input": wishart_for_cov(dim=10)}
V_mvn1 = wrap_V_class_with_input_data(class_constructor=V_mvn,
                                      input_data=input_data)
V_mvn2 = wrap_V_class_with_input_data(class_constructor=V_mvn,
                                      input_data=input_data)
target_fun = fun_extract_median_ess
v_fun_list = [V_mvn1, V_mvn2]

# grid computations
# experiment_setting = experiment_setting_dict(chain_length=300,num_chains_per_sampler=4,warm_up=150,
#                                              tune_l=0,allow_restart=True,max_num_restarts=5,num_cpu_per_sampler=4)
#
input_dict = {
    "v_fun": v_fun_list[0:1],
    "epsilon": ep_list,
    "second_order": [False],
Beispiel #2
0
from distributions.mvn import V_mvn
from input_data.convert_data_to_dict import get_data_dict
from abstract.util import wrap_V_class_with_input_data
from abstract.mcmc_sampler import mcmc_sampler, mcmc_sampler_settings_dict
from adapt_util.tune_param_classes.tune_param_setting_util import *
from experiments.experiment_obj import tuneinput_class
from experiments.correctdist_experiments.prototype import check_mean_var_stan
from experiments.correctdist_experiments.result_from_long_chain.logistic.util import result_from_long_chain
import numpy

from experiments.experiment_util import wishart_for_cov
import os

dim = 100
input_data = {"input": wishart_for_cov(dim=dim, seed=dim)}
v_generator = wrap_V_class_with_input_data(class_constructor=V_mvn,
                                           input_data=input_data)

mcmc_meta = mcmc_sampler_settings_dict(mcmc_id=0,
                                       samples_per_chain=1000,
                                       num_chains=4,
                                       num_cpu=4,
                                       thin=1,
                                       tune_l_per_chain=1000,
                                       warmup_per_chain=1100,
                                       is_float=False,
                                       isstore_to_disk=False,
                                       allow_restart=False)

input_dict = {
    "v_fun": [v_generator],
Beispiel #3
0
from input_data.convert_data_to_dict import get_data_dict
from post_processing.get_diagnostics import energy_diagnostics, process_diagnostics, get_params_mcmc_tensor, get_short_diagnostics
from experiments.experiment_util import wishart_for_cov
mcmc_meta = mcmc_sampler_settings_dict(mcmc_id=0,
                                       samples_per_chain=2000,
                                       num_chains=4,
                                       num_cpu=4,
                                       thin=1,
                                       tune_l_per_chain=1000,
                                       warmup_per_chain=1100,
                                       is_float=False,
                                       isstore_to_disk=False,
                                       allow_restart=False)

numpy.random.seed(0)
Sigma_inv = wishart_for_cov(dim=50)
Sigma = numpy.linalg.inv(Sigma_inv)
# sd_vec = numpy.sqrt(numpy.diagonal(Sigma))
# print(max(sd_vec))
# print(min(sd_vec))
# exit()
input_data = {"input": Sigma_inv}

V_generator = wrap_V_class_with_input_data(class_constructor=V_mvn,
                                           input_data=input_data)

input_dict = {
    "v_fun": [V_generator],
    "epsilon": ["dual"],
    "second_order": [False],
    "cov": ["adapt"],
Beispiel #4
0
import numpy, os

from distributions.mvn import V_mvn
from abstract.util import wrap_V_class_with_input_data
from input_data.convert_data_to_dict import get_data_dict
from experiments.float_vs_double.convergence.match_mean_cov_convergence.util import match_convergence_test
from distributions.logistic_regressions.logistic_regression import V_logistic_regression
from abstract.util import wrap_V_class_with_input_data
from experiments.experiment_util import wishart_for_cov

correct_mean_list = []
correct_cov_list = []
v_fun_list = []
#####################################################################################################################################
input_data = {"input": wishart_for_cov(dim=100, seed=100)}
V_mvn1 = wrap_V_class_with_input_data(class_constructor=V_mvn,
                                      input_data=input_data)
correct_mean_list.append(numpy.zeros(100))
correct_cov_list.append(numpy.linalg.inv(input_data["input"]))
v_fun_list.append(V_mvn1)
####################################################################################################################################
# logisitc regressions
input_data_pima_indian = get_data_dict("pima_indian")
V_pima_indian = wrap_V_class_with_input_data(
    class_constructor=V_logistic_regression, input_data=input_data_pima_indian)
v_fun_list.append(V_pima_indian)

input_data_australian = get_data_dict("australian")
V_australian = wrap_V_class_with_input_data(
    class_constructor=V_logistic_regression, input_data=input_data_australian)
v_fun_list.append(V_australian)