Пример #1
0
def prior_test():
    """
	setup inverse_prior object manually,
	and calculate parameter values corresponding to arbitary
	hypercube set here.
	p should be same length as num_weights
	"""
    num_weights = 2
    p = np.array([0.1, 0.9])
    prior_types = [7]
    prior_hyperparams = [[-2., 2.]]
    dependence_lengths = [2]
    param_prior_types = [0]
    prior = ip.inverse_prior(prior_types, prior_hyperparams,
                             dependence_lengths, param_prior_types,
                             num_weights)
    theta = prior(p)
    print "hypercube = "
    print p
    print "theta = "
    print theta
Пример #2
0
def main(run_string):
    ###### load training data
    data = 'bh_50'
    data_suffix = '_tr_1.csv'
    data_dir = '../../data/uci/'
    data_prefix = data_dir + data
    x_tr, y_tr = input_tools.get_x_y_tr_data(data_prefix, data_suffix)
    x_tr = np.genfromtxt('../../data/linear_input_data.txt', delimiter=',')
    y_tr = x_tr
    batch_size = x_tr.shape[0]
    ###### get weight information
    weights_dir = '../../data/'  #for forward test
    a1_size = 0
    num_inputs = tools.get_num_inputs(x_tr)
    num_outputs = tools.get_num_outputs(y_tr)
    layer_sizes = [1, num_inputs] * 2
    m_trainable_arr = [True, True] * 2 + [False]
    b_trainable_arr = [True, True] * 2 + [False]
    num_weights = tools.calc_num_weights3(num_inputs, layer_sizes, num_outputs,
                                          m_trainable_arr, b_trainable_arr)
    ###### check shapes of training data
    x_tr, y_tr = tools.reshape_x_y_twod(x_tr, y_tr)
    ###### setup prior
    hyper_type = "deterministic"  # "stochastic" or "deterministic"
    var_type = "deterministic"  # "stochastic" or "deterministic"
    weight_shapes = tools.get_weight_shapes3(num_inputs, layer_sizes,
                                             num_outputs, m_trainable_arr,
                                             b_trainable_arr)
    dependence_lengths = tools.get_degen_dependence_lengths(weight_shapes,
                                                            independent=True)
    if hyper_type == "deterministic" and var_type == "deterministic":
        prior_types = [4]
        prior_hyperparams = [[0., 1.]]
        param_prior_types = [0]
        prior = inverse_priors.inverse_prior(prior_types, prior_hyperparams,
                                             dependence_lengths,
                                             param_prior_types, num_weights)
        n_stoc = 0
        n_stoc_var = 0
    elif hyper_type == "stochastic" and var_type == "deterministic":
        granularity = 'single'
        hyper_dependence_lengths = tools.get_hyper_dependence_lengths(
            weight_shapes, granularity)
        hyperprior_types = [9]
        prior_types = [4]
        hyperprior_params = [[1. / 2., 1. / (2. * 100)]]
        prior_hyperparams = [0.]
        param_hyperprior_types = [0]
        param_prior_types = [0]
        n_stoc = len(hyper_dependence_lengths)
        prior = isp.inverse_stoc_hyper_prior(
            hyperprior_types, prior_types, hyperprior_params,
            prior_hyperparams, hyper_dependence_lengths, dependence_lengths,
            param_hyperprior_types, param_prior_types, n_stoc, num_weights)
        n_stoc_var = 0
    elif hyper_type == "stochastic" and var_type == "stochastic":
        granularity = 'single'
        hyper_dependence_lengths = tools.get_hyper_dependence_lengths(
            weight_shapes, granularity)
        var_dependence_lengths = [1]
        n_stoc_var = len(var_dependence_lengths)
        hyperprior_types = [9]
        var_prior_types = [10]
        prior_types = [4]
        hyperprior_params = [[1. / 2., 1. / (2. * 100)]]
        var_prior_params = [[1. / 2., 1. / (2. * 100)]]
        prior_hyperparams = [0.]
        param_hyperprior_types = [0]
        var_param_prior_types = [0]
        param_prior_types = [0]
        n_stoc = len(hyper_dependence_lengths)
        prior = isvp.inverse_stoc_var_hyper_prior(
            hyperprior_types, var_prior_types, prior_types, hyperprior_params,
            var_prior_params, prior_hyperparams, hyper_dependence_lengths,
            var_dependence_lengths, dependence_lengths, param_hyperprior_types,
            var_param_prior_types, param_prior_types, n_stoc, n_stoc_var,
            num_weights)
    ###### test prior output from nn setup
    if "nn_prior_test" in run_string:
        prior_tests.nn_prior_test(prior, n_stoc + n_stoc_var + num_weights)
    #set up np model
    np_nn = npms.mlp_ResNet_2
    npm = np_model(np_nn, x_tr, y_tr, batch_size, layer_sizes, m_trainable_arr,
                   b_trainable_arr, n_stoc_var)
    ll_type = 'gauss'  # 'gauss', 'av_gauss', 'categorical_crossentropy', 'av_categorical_crossentropy'
    npm.setup_LL(ll_type)
    ###### test llhood output
    if "forward_test_linear" in run_string:
        forward_tests.forward_test_linear([npm], num_weights + n_stoc_var,
                                          weights_dir)
    ###### setup polychord
    nDerived = 0
    settings = PyPolyChord.settings.PolyChordSettings(
        n_stoc + n_stoc_var + num_weights, nDerived)
    settings.base_dir = './np_chains/'
    settings.file_root = data + "_slp_sh_sv_sm"
    settings.nlive = 1000
    ###### run polychord
    if "polychord1" in run_string:
        PyPolyChord.run_polychord(npm, n_stoc, n_stoc_var, num_weights,
                                  nDerived, settings, prior,
                                  polychord_tools.dumper)
Пример #3
0
def main(run_string):
    ###### load training data
    data = 'simple_tanh'
    data_dir = '../../data/'
    data_prefix = data_dir + data
    x_tr, y_tr = input_tools.get_x_y_tr_data(data_prefix)
    batch_size = x_tr.shape[0]
    ###### get weight information
    a1_size = 2
    layer_sizes = [a1_size]
    m_trainable_arr = [True, False]
    b_trainable_arr = [False, False]
    num_inputs = tools.get_num_inputs(x_tr)
    num_outputs = tools.get_num_outputs(y_tr)
    num_weights = tools.calc_num_weights3(num_inputs, layer_sizes, num_outputs,
                                          m_trainable_arr, b_trainable_arr)
    ###### check shapes of training data
    x_tr, y_tr = tools.reshape_x_y_twod(x_tr, y_tr)
    ###### setup keras model
    model = kms.slp_model(num_inputs, num_outputs, layer_sizes)
    km = kf.keras_model(model, x_tr, y_tr, batch_size)
    loss = 'mse'  # 'squared_error', 'av_squared_error', 'categorical_crossentropy', 'av_categorical_crossentropy'
    km.setup_LL(loss)
    #setup tf graph
    tf_graph = tfgs.slp_graph
    tfm = tff.tf_model(tf_graph, x_tr, y_tr, batch_size, layer_sizes,
                       m_trainable_arr, b_trainable_arr)
    fit_metric = 'chisq'  # 'chisq', 'av_chisq', 'categorical_crossentropy', 'av_categorical_crossentropy'
    tfm.setup_LL(fit_metric)
    #set up np model
    np_nn = npms.slp_nn
    npm = npf.np_model(np_nn, x_tr, y_tr, batch_size, layer_sizes,
                       m_trainable_arr, b_trainable_arr)
    ll_type = 'gauss'  # 'gauss', 'av_gauss', 'categorical_crossentropy', 'av_categorical_crossentropy'
    npm.setup_LL(ll_type)
    ###### test llhood output
    if "k_forward_test_linear" in run_string:
        forward_tests.forward_test_linear([km], num_weights, data_dir)
    if "tf_forward_test_linear" in run_string:
        forward_tests.forward_test_linear([tfm], num_weights, data_dir)
    if "np_forward_test_linear" in run_string:
        forward_tests.forward_test_linear([npm], num_weights, data_dir)
    ###### setup prior
    if hyper_type == "deterministic":
        prior_types = [4]
        prior_hyperparams = [[0., 1.]]
        param_prior_types = [0]
        prior = inverse_priors.inverse_prior(prior_types, prior_hyperparams,
                                             dependence_lengths,
                                             param_prior_types, num_weights)
        n_stoc = 0
    elif hyper_type == "stochastic":
        granularity = 'single'
        hyper_dependence_lengths = tools.get_hyper_dependence_lengths(
            weight_shapes, granularity)
        hyperprior_types = [9]
        prior_types = [4]
        hyperprior_params = [[0.1 / 2., 0.1 / (2. * 100)]]
        prior_hyperparams = [0.]
        param_hyperprior_types = [0]
        param_prior_types = [0]
        n_stoc = len(hyper_dependence_lengths)
        prior = isp.inverse_stoc_hyper_prior(
            hyperprior_types, prior_types, hyperprior_params,
            prior_hyperparams, hyper_dependence_lengths, dependence_lengths,
            param_hyperprior_types, param_prior_types, n_stoc, num_weights)
    ###### test prior output from nn setup
    if "nn_prior_test" in run_string:
        prior_tests.nn_prior_test(prior, n_stoc + num_weights)
    ###### setup polychord
    nDerived = 0
    settings = PyPolyChord.settings.PolyChordSettings(n_stoc + num_weights,
                                                      nDerived)
    settings.file_root = data + "slp_1"
    settings.nlive = 1000
    ###### run polychord
    if "k_polychord1" in run_string:
        settings.base_dir = './keras_chains/'
        PyPolyChord.run_polychord(km, num_weights, nDerived, settings, prior,
                                  polychord_tools.dumper)
    if "tf_polychord1" in run_string:
        settings.base_dir = './tf_chains/'
        PyPolyChord.run_polychord(tfm, num_weights, nDerived, settings, prior,
                                  polychord_tools.dumper)
    if "np_polychord1" in run_string:
        settings.base_dir = './np_chains/'
        PyPolyChord.run_polychord(npm, num_weights, nDerived, settings, prior,
                                  polychord_tools.dumper)