Ejemplo n.º 1
0
data_list, bias_sq_list = createHistoricalDataForMisoEI(obj_func_min.getDim(), listPrevData, directory=pathToPickles, bias_filename=bias_filename)
###############################################

###############################################
### Begin hyper opt
hyper_result = []
for data in data_list:
    # Setup prior for MAP
    prior_mean = np.concatenate(([np.var(data.points_sampled_value)], [1.]*obj_func_min.getDim()))
    prior_sig = np.eye(obj_func_min.getDim()+1) * 100.
    prior_sig[0,0] = np.power(prior_mean[0]/5., 2.)
    prior = NormalPrior(prior_mean, prior_sig)
    hyper_bounds = [(0.1, prior_mean[i]+2.*np.sqrt(prior_sig[i,i])) for i in range(obj_func_min.getDim()+1)]
    print "hyper bound {0}".format(hyper_bounds)
    hyperparam_search_domain = pythonTensorProductDomain([ClosedInterval(bound[0], bound[1]) for bound in hyper_bounds])
    multistart_pts = hyperparam_search_domain.generate_uniform_random_points_in_domain(num_hyper_multistart)
    best_f = np.inf
    cov = SquareExponential(prior_mean)
    for i in range(num_hyper_multistart):
        hyper, f, output = hyper_opt(cov, data=data, init_hyper=multistart_pts[i, :],
                                     hyper_bounds=hyper_bounds, approx_grad=False, hyper_prior=prior)
        # print output
        if f < best_f:
            best_hyper = hyper
            best_f = f
    print 'best_hyper=' + str(best_hyper)
    print 'best_f= ' + str(best_f)
    print "prior mean is: {0}".format(prior_mean)
    hyper_result = np.concatenate((hyper_result, best_hyper))
sql_util.write_array_to_table("mei_hyper_{0}".format(obj_func_min.getFuncName()), hyper_result)
hyper_param = numpy.zeros((obj_func_min.getNumIS(), obj_func_min.getDim() + 1))
### Gen points for hyperparam estimation
for i in range(obj_func_min.getNumIS()):
    data = HistoricalData(obj_func_min.getDim())
    pts = search_domain.generate_uniform_random_points_in_domain(
        num_pts_to_gen)
    vals = [obj_func_min.evaluate(i + 1, pt) for pt in pts]
    sample_vars = [
        obj_func_min.noise_and_cost_func(i + 1, pt)[0] for pt in pts
    ]
    data.append_historical_data(pts, vals, sample_vars)
    # hyperparam opt
    hyperparam_search_domain = pythonTensorProductDomain(
        [ClosedInterval(bound[0], bound[1]) for bound in hyper_bounds])
    multistart_pts = hyperparam_search_domain.generate_uniform_random_points_in_domain(
        num_hyper_multistart)
    best_f = numpy.inf
    for k in range(num_hyper_multistart):
        hyper, f, output = hyper_opt(cov,
                                     data=data,
                                     init_hyper=multistart_pts[k, :],
                                     hyper_bounds=hyper_bounds,
                                     approx_grad=False)
        if f < best_f:
            best_hyper = numpy.copy(hyper)
            best_f = f
    hyper_param[i, :] = best_hyper
sql_util.write_array_to_table('multifidelity_ei_hyperparam_' + func_name,
                              hyper_param.flatten())
#TODO uncomment for real runs
    vals = [obj_func_max.evaluate(i + 1, pt) for pt in pts]
    IS_pts = numpy.hstack(((i + 1) * numpy.ones(num_pts_to_gen).reshape(
        (-1, 1)), pts))
    sample_vars = [
        obj_func_max.noise_and_cost_func(i + 1, pt)[0] for pt in pts
    ]
    data.append_historical_data(IS_pts, vals, sample_vars)

# hyperparam opt
print "start hyperparam optimization..."
hyperparam_search_domain = pythonTensorProductDomain(
    [ClosedInterval(bound[0], bound[1]) for bound in hyper_bounds])
multistart_pts = hyperparam_search_domain.generate_uniform_random_points_in_domain(
    num_hyper_multistart)
best_f = numpy.inf
cov = MixedSquareExponential(hyperparameters=multistart_pts[0, :],
                             total_dim=obj_func_max.getDim() + 1,
                             num_is=obj_func_max.getNumIS())
for i in range(num_hyper_multistart):
    hyper, f, output = hyper_opt(cov,
                                 data=data,
                                 init_hyper=multistart_pts[i, :],
                                 hyper_bounds=hyper_bounds,
                                 approx_grad=False)
    if f < best_f:
        best_hyper = hyper
        best_f = f
sql_util.write_array_to_table('multifidelity_kg_hyperparam_' + func_name,
                              best_hyper)
#TODO uncomment for real runs
Ejemplo n.º 4
0
num_pts_to_gen = 250
search_domain = pythonTensorProductDomain([
    ClosedInterval(bound[0], bound[1]) for bound in obj_func_min._search_domain
])
cov = SquareExponential(numpy.ones(obj_func_min._dim + 1))

### Gen points for hyperparam estimation
data = HistoricalData(obj_func_min._dim)
for i in range(obj_func_min._num_IS):
    pts = search_domain.generate_uniform_random_points_in_domain(
        num_pts_to_gen)
    vals = [obj_func_min.evaluate(i + 1, pt) for pt in pts]
    sample_vars = [noise_and_cost_func(i + 1, pt)[0] for pt in pts]
    data.append_historical_data(pts, vals, sample_vars)
# hyperparam opt
hyperparam_search_domain = pythonTensorProductDomain(
    [ClosedInterval(bound[0], bound[1]) for bound in hyper_bounds])
multistart_pts = hyperparam_search_domain.generate_uniform_random_points_in_domain(
    num_hyper_multistart)
best_f = numpy.inf
for k in range(num_hyper_multistart):
    hyper, f, output = hyper_opt(cov,
                                 data=data,
                                 init_hyper=multistart_pts[k, :],
                                 hyper_bounds=hyper_bounds,
                                 approx_grad=False)
    if f < best_f:
        best_hyper = hyper
        best_f = f
sql_util.write_array_to_table('ego_hyperparam_' + func_name, best_hyper)