Exemplo n.º 1
0
    def test_normal_prior(self):
        space_dim = 2
        num_IS = 2
        true_hyper, data = get_random_gp_data(space_dim, num_IS, 500)
        hyperparam_search_domain = pythonTensorProductDomain([ClosedInterval(bound[0], bound[1]) for bound in numpy.repeat([[0.01, 2.]], len(true_hyper), axis=0)])
        hyper_bounds = [(0.01, 100.) for i in range(len(true_hyper))]
        multistart_pts = hyperparam_search_domain.generate_uniform_random_points_in_domain(1)
        cov = MixedSquareExponential(hyperparameters=multistart_pts[0,:], total_dim=space_dim+1, num_is=num_IS)
        test_prior = NormalPrior(5.*numpy.ones(len(true_hyper)), 25. * numpy.eye(len(true_hyper)))
        hyper_test, f, output = hyper_opt(cov, data=data, init_hyper=multistart_pts[0, :], hyper_bounds=hyper_bounds, approx_grad=False, hyper_prior=test_prior)

        good_prior = NormalPrior(true_hyper, 0.1 * numpy.eye(len(true_hyper)))
        hyper_good_prior, _, _ = hyper_opt(cov, data=data, init_hyper=multistart_pts[0, :], hyper_bounds=hyper_bounds, approx_grad=False, hyper_prior=good_prior)
        bad_prior = NormalPrior(numpy.ones(len(true_hyper)), 0.1 * numpy.eye(len(true_hyper)))
        hyper_bad_prior, _, _ = hyper_opt(cov, data=data, init_hyper=multistart_pts[0, :], hyper_bounds=hyper_bounds, approx_grad=False, hyper_prior=bad_prior)
        print "true hyper: {0}\n hyper test: {1}\n good prior: {2}\n bad prior:\n should close to one {3}".format(true_hyper, hyper_test, hyper_good_prior, hyper_bad_prior)
        print "dim {0}, num_is {1}".format(space_dim, num_IS)
Exemplo n.º 2
0
    listPrevData.append((init_pts[index_dataset], init_vals[index_dataset], noise_vars))
    index_dataset += 1
##############

data_list, bias_sq_list = createHistoricalDataForMisoEI(obj_func_min.getDim(), listPrevData, directory=pathToPickles, bias_filename=bias_filename)
###############################################

###############################################
### Begin hyper opt
hyper_result = []
for data in data_list:
    # Setup prior for MAP
    prior_mean = np.concatenate(([np.var(data.points_sampled_value)], [1.]*obj_func_min.getDim()))
    prior_sig = np.eye(obj_func_min.getDim()+1) * 100.
    prior_sig[0,0] = np.power(prior_mean[0]/5., 2.)
    prior = NormalPrior(prior_mean, prior_sig)
    hyper_bounds = [(0.1, prior_mean[i]+2.*np.sqrt(prior_sig[i,i])) for i in range(obj_func_min.getDim()+1)]
    print "hyper bound {0}".format(hyper_bounds)
    hyperparam_search_domain = pythonTensorProductDomain([ClosedInterval(bound[0], bound[1]) for bound in hyper_bounds])
    multistart_pts = hyperparam_search_domain.generate_uniform_random_points_in_domain(num_hyper_multistart)
    best_f = np.inf
    cov = SquareExponential(prior_mean)
    for i in range(num_hyper_multistart):
        hyper, f, output = hyper_opt(cov, data=data, init_hyper=multistart_pts[i, :],
                                     hyper_bounds=hyper_bounds, approx_grad=False, hyper_prior=prior)
        # print output
        if f < best_f:
            best_hyper = hyper
            best_f = f
    print 'best_hyper=' + str(best_hyper)
    print 'best_f= ' + str(best_f)
Exemplo n.º 3
0
    (prior_mean_IS_0, prior_mean_IS_1, prior_mean_IS_2))
prior_sig = numpy.eye(len(prior_mean)) * 25.
# prior_sig[0, 0] = 1e6 # Jialei's original value: 5e5
for indexIS in range(obj_func_min.getNumIS()):
    pos_signal_variance = (
        obj_func_min.getDim() + 1
    ) * indexIS  # compute the position that corresponds to signal variance
    # we assume that for each IS the ordering is signal_var, beta_1, beta_2, ..., bet_dim
    prior_sig[pos_signal_variance, pos_signal_variance] = math.pow(
        (prior_mean[pos_signal_variance] / 5.0) + 1e-6,
        2)  # Jialei's suggestions
    hyper_bounds[pos_signal_variance] = (1.,
                                         max(
                                             prior_mean[pos_signal_variance] *
                                             2, 100))
prior = NormalPrior(prior_mean, prior_sig)
# hyper_bounds[0] = (1., prior_mean[0] * 2) ### can cause errors
""" Matthias' code ends
"""
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
from moe.optimal_learning.python.python_version.log_likelihood import GaussianProcessLogMarginalLikelihood

print "prior mean\n{0}\nprior sig diag\n{1}".format(prior_mean,
                                                    numpy.diag(prior_sig))
print "num_is {0}".format(obj_func_max.getNumIS() - 1 + separateIS0)
hyperparam_search_domain = pythonTensorProductDomain(
    [ClosedInterval(bound[0], bound[1]) for bound in hyper_bounds])
print "hyper bounds\n{0}".format(hyper_bounds)
cov = MixedSquareExponential(hyperparameters=prior_mean,
                             total_dim=obj_func_max.getDim() + 1,