def main(): """ Main function. """ # pylint: disable=too-many-locals # pylint: disable=maybe-no-member np.random.seed(0) reporter = BasicReporter() Z_tr, X_tr, Y_tr, ZX_tr, Z_te, X_te, Y_te, ZX_te = get_data() sgp_options = load_options(gp_instances.all_simple_gp_args, 'GP', reporter=reporter) mfgp_options = load_options(mf_gp.all_mf_gp_args, 'MFGP', reporter=reporter) mfgp_options.mean_func_type = 'median' # Fit the GPs. sgp_fitter = gp_instances.SimpleGPFitter(ZX_tr, Y_tr, sgp_options, reporter=reporter) sgp, opt_s = sgp_fitter.fit_gp() mfgp_fitter = mf_gp.MFGPFitter(Z_tr, X_tr, Y_tr, mfgp_options, reporter=reporter) mfgp, opt_mf = mfgp_fitter.fit_gp() opt_s = (np.array(opt_s).round(4)) opt_mf = (np.array(opt_mf).round(4)) s_bounds = sgp_fitter.hp_bounds.round(3) mf_bounds = mfgp_fitter.hp_bounds.round(3) # Print out some fitting statistics _print_str_results(reporter, 'Opt-pts', str(opt_s), str(opt_mf)) _print_str_results(reporter, 'Opt-bounds', str(s_bounds), str(mf_bounds)) # The marginal likelihoods sgp_lml = sgp.compute_log_marginal_likelihood() mfgp_lml = mfgp.compute_log_marginal_likelihood() _print_float_results(reporter, 'Log_Marg_Like', sgp_lml, mfgp_lml) # Train errors s_pred, _ = sgp.eval(ZX_tr) mf_pred, _ = mfgp.eval_at_fidel(Z_tr, X_tr) sgp_tr_err = compute_average_sq_prediction_error(Y_tr, s_pred) mfgp_tr_err = compute_average_sq_prediction_error(Y_tr, mf_pred) _print_float_results(reporter, 'Train Error', sgp_tr_err, mfgp_tr_err) # Test errors s_pred, _ = sgp.eval(ZX_te) mf_pred, _ = mfgp.eval_at_fidel(Z_te, X_te) sgp_te_err = compute_average_sq_prediction_error(Y_te, s_pred) mfgp_te_err = compute_average_sq_prediction_error(Y_te, mf_pred) _print_float_results(reporter, 'Test Error', sgp_te_err, mfgp_te_err)
def _get_ga_optimiser_args(cls, obj, num_evals, mutation_op, init_pool, init_pool_vals=None, expects_inputs_to_be_iterable=True): """ Returns arguments for the optimiser. """ if expects_inputs_to_be_iterable: def _obj_wrap(_obj, _x): """ A wrapper for the optimiser for GA. """ ret = _obj([_x]) return ret[0] def _get_obj_wrap(_obj): """ Returns an optimiser for GA. """ return lambda x: _obj_wrap(_obj, x) obj = _get_obj_wrap(obj) if init_pool_vals is None: init_pool_vals = [obj(nn) for nn in init_pool] reporter = get_reporter('silent') options = load_options(ga_opt_args, reporter=reporter) options.pre_eval_points = init_pool options.pre_eval_vals = init_pool_vals options.pre_eval_true_vals = init_pool_vals options.num_mutations_per_epoch = int( np.clip(3 * np.sqrt(num_evals), 5, 100)) # print 'GA opt args: ', num_evals, options.num_mutations_per_epoch return obj, options, reporter, mutation_op
def test_initial_sampling(self): """ Test for initialisation sampling. """ self.report('Testing sample initialisation.') prob = _get_gpb_problem() acquisitions = ['mf_gp_ucb', 'gp_ucb', 'gp_ei'] options = option_handler.load_options( mf_gp_bandit.all_mf_gp_bandit_args, reporter=prob.reporter) for acq in acquisitions: options.acq = acq options.gpb_init_capital = prob.mfof.opt_fidel_cost * 23.2 mfgpb = mf_gp_bandit.MFGPBandit(prob.mfof, options, prob.reporter) mfgpb.optimise_initialise() hf_idxs = are_opt_fidel_queries(mfgpb.history.query_fidels, prob.mfof.opt_fidel) hf_vals = mfgpb.history.query_vals[hf_idxs] num_hf_queries = len(hf_vals) self.report(( 'Initialised %s with %d queries (%d at opt_fidel). Init capital = ' + '%0.4f (%0.4f used) ') % (acq, len(mfgpb.history.query_vals), num_hf_queries, options.gpb_init_capital, mfgpb.spent_capital), 'test_result') assert mfgpb.spent_capital <= 1.1 * options.gpb_init_capital assert mfgpb.history.curr_opt_vals[-1] == mfgpb.gpb_opt_val assert is_non_decreasing_sequence(mfgpb.history.curr_opt_vals) assert num_hf_queries == 0 or hf_vals.max() == mfgpb.gpb_opt_val assert mfgpb.num_opt_fidel_queries == num_hf_queries assert mfgpb.history.query_at_opt_fidel.sum() == num_hf_queries
def optimize_chemist(func_caller, worker_manager, budget, mode=None, init_pool=None, acq='hei', options=None, reporter='default'): """ Chemist optimization from a function caller. """ if options is None: reporter = get_reporter(reporter) options = load_options(all_chemist_args, reporter=reporter) # TODO: what is this option? if acq is not None: options.acq = acq if mode is not None: options.mode = mode # Initial queries if not hasattr(options, 'pre_eval_points') or options.pre_eval_points is None: if init_pool is None: init_pool = get_initial_pool() options.get_initial_points = lambda n: init_pool[:n] return (Chemist(func_caller, worker_manager, options=options, reporter=reporter)).optimise(budget)
def __init__(self, func_caller, worker_manager, mutation_op, crossover_op=None, options=None, reporter=None): """ Constructor. mutation_op: A function which takes in a list of objects and modifies them. crossover_op: A function which takes in two objects and performs a cross-over operation. So far we have not implemented cross-over but included here in case we want to include it in the future. For other arguments, see BlackboxOptimiser """ if options is None: reporter = get_reporter(reporter) options = load_options(ga_opt_args, reporter=reporter) super(GAOptimiser, self).__init__(func_caller, worker_manager, model=None, options=options, reporter=reporter) self.mutation_op = mutation_op self.crossover_op = crossover_op self.to_eval_points = []
def mf_random_optimiser_from_func_caller(func_caller, worker_manager, max_capital, mode, options=None, reporter='default', *args, **kwargs): """ Creates a MF EuclideanRandomOptimiser Object and optimises the function. """ reporter = get_reporter(reporter) if isinstance(func_caller.domain, domains.EuclideanDomain) and \ isinstance(func_caller.fidel_space, domains.EuclideanDomain): optimiser_constructor = MFEuclideanRandomOptimiser dflt_list_of_options = mf_euclidean_random_optimiser_args else: raise ValueError( ('MF Random optimiser not implemented for (domain, fidel_space) ' + 'of types (%s, %s).') % (type(func_caller.domain), type(func_caller.fidel_space))) # Load options if options is None: options = load_options(dflt_list_of_options) options.mode = mode # Create optimiser optimiser = optimiser_constructor(func_caller, worker_manager, options=options, reporter=reporter, *args, **kwargs) # optimise and return return optimiser.optimise(max_capital)
def nasbot(func_caller, worker_manager, budget, tp_comp=None, mode=None, init_pool=None, acq='hei', options=None, reporter='default'): """ NASBOT optimisation from a function caller. """ nn_type = func_caller.domain.nn_type if options is None: reporter = get_reporter(reporter) options = load_options(all_nasbot_args, reporter=reporter) if acq is not None: options.acq = acq if mode is not None: options.mode = mode if tp_comp is None: tp_comp = get_default_otmann_distance(nn_type, 1.0) # Initial queries if not hasattr(options, 'pre_eval_points') or options.pre_eval_points is None: if init_pool is None: init_pool = get_initial_pool(nn_type) options.get_initial_points = lambda n: init_pool[:n] return (NASBOT(func_caller, worker_manager, tp_comp, options=options, reporter=reporter)).optimise(budget)
def main(): """ Maximizes a function given a config file containing the hyperparameters and the corresponding domain bounds. """ # Loading Options euc_gpb_args = get_all_gp_bandit_args_from_gp_args(euclidean_gp_args) options = load_options(euc_gpb_args + dragonfly_args, cmd_line=True) if options.config is None: raise ValueError('Config file is required.') # Parsing config file expt_dir = os.path.dirname( os.path.abspath(os.path.realpath(options.config))) if not os.path.exists(expt_dir): raise ValueError("Experiment directory does not exist.") param_spec = config_parser(options.config) exp_info = param_spec['exp_info'] obj = imp.load_source(exp_info['name'], os.path.join(expt_dir, exp_info['name'] + '.py')) options.capital_type = 'return_value' if options.max_capital == 0.0: if options.budget == 0.0: raise ValueError('Specify the budget in budget or max_capital.') options.max_capital = options.budget domain = create_domain(param_spec['domain']) opt_val, opt_pt, history = maximise_function( obj.main, domain=domain, options=options, max_capital=options.max_capital) print('Optimum Value in %d evals: %0.4f' % (options.max_capital, opt_val)) print('Optimum Point: %s' % (opt_pt))
def optimise_with_method_on_func_caller(method, func_caller, worker_manager, max_capital, options=None, reporter='default'): """ A wrapper that can be used to call all optimisation method. """ if options is None: from gp_bandit import all_gp_bandit_args reporter = get_reporter(reporter) options = load_options(all_gp_bandit_args, reporter=reporter) # The mode should be specified in the first three letters of the method argument options.mode = method[0:3] if method in ['synRAND', 'asyRAND']: return random_optimiser_from_func_caller(func_caller, worker_manager, max_capital, options=options, reporter=reporter) elif method in [ 'asyTS', 'synTS', 'asyHTS', 'asyUCB', 'asyBUCB', 'synBUCB', 'synUCBPE', 'asyEI' ]: from gp_bandit import gpb_from_func_caller options.acq = method[3:] return gpb_from_func_caller(func_caller, worker_manager, max_capital, options=options, reporter='default') else: raise ValueError('Unknown method %s!' % (method))
def __init__(self, constraint_checker=None, options=None, reporter=None): """ Constructor. """ self.reporter = get_reporter(reporter) self.constraint_checker = constraint_checker if options is None: options = load_options(nn_modifier_args) self.options = options
def main(): """ Main function. """ options = load_options(all_mf_gp_bandit_args) prob, options = get_problem_parameters(options) # Set other variables all_methods = NONFINITE_METHODS + FINITE_METHODS method_options = {key: options for key in all_methods} noisy_str = 'noiseless' if not NOISY else 'noisy%0.3f' % ( prob.mfof.noise_var) save_file_prefix = '%s-%s-p%d-d%d' % (prob.experiment_name, noisy_str, prob.mfof.fidel_dim, prob.mfof.domain_dim) reporter = get_reporter('default') experimenter = MFOptExperimenter(prob.experiment_name, prob.mfof, prob.max_capital, all_methods, prob.num_experiments, SAVE_RESULTS_DIR, save_file_prefix=save_file_prefix, method_options=method_options, method_reporter=reporter, reporter=reporter) experimenter.run_experiments()
def __init__(self, options, reporter='default'): """ Constructor. """ super(GPFitter, self).__init__() self.reporter = get_reporter(reporter) if isinstance(options, list): options = load_options(options, 'GP', reporter=self.reporter) self.options = options self._set_up()
def _get_euc_gpb_arguments(cls, num_dims): """ Returns options for GP Bandits on a Euclidean space. """ reporter = reporters.get_reporter('silent') gpb_args = gp_bandit.get_all_gp_bandit_args_from_gp_args(all_simple_gp_args) options = load_options(gpb_args, reporter=reporter) options.get_initial_points = lambda n: np.random.random((n, num_dims)) options.num_init_evals = 20 return options
def gpb_from_func_caller(func_caller, worker_manager, max_capital, is_mf, mode=None, acq=None, mf_strategy=None, domain_add_max_group_size=-1, options=None, reporter='default'): """ GP Bandit optimisation from a utils.function_caller.FunctionCaller instance. domain_add_max_group_size indicates whether we should use an additive model or not. If its negative, then use a non-additive model. If its positive, then use an additive model with maximum group size as given. If zero, then use the default in options. """ # pylint: disable=too-many-branches reporter = get_reporter(reporter) if is_mf: if isinstance(func_caller.fidel_space, domains.EuclideanDomain) and \ isinstance(func_caller.domain, domains.EuclideanDomain): optimiser_constructor = EuclideanGPBandit dflt_list_of_options = get_all_mf_gp_bandit_args_from_gp_args( euclidean_mf_gp_args) else: if isinstance(func_caller.domain, domains.EuclideanDomain): optimiser_constructor = EuclideanGPBandit dflt_list_of_options = get_all_gp_bandit_args_from_gp_args( euclidean_gp_args) else: raise ValueError('Random optimiser not implemented for domain of type %s.'%( \ type(func_caller.domain))) # Load options if options is None: reporter = get_reporter(reporter) options = load_options(dflt_list_of_options, reporter=reporter) if acq is not None: options.acq = acq if mode is not None: options.mode = mode if mf_strategy is not None: options.mf_strategy = mf_strategy # Additive model if domain_add_max_group_size >= 0: if is_mf: options.domain_use_additive_gp = True if domain_add_max_group_size > 0: options.domain_add_max_group_size = domain_add_max_group_size else: options.use_additive_gp = True if domain_add_max_group_size > 0: options.add_max_group_size = domain_add_max_group_size # create optimiser and return optimiser = optimiser_constructor(func_caller, worker_manager, is_mf=is_mf, options=options, reporter=reporter) return optimiser.optimise(max_capital)
def maximise_function(func, max_capital, domain=None, domain_bounds=None, config=None, options=None, hp_tune_criterion='post_sampling', hp_tune_method='slice', init_capital=None, init_capital_frac=None, num_init_evals=20): """ Maximizes a function given a function and domain bounds of the hyperparameters and returns optimal value and optimal point. """ reporter = get_reporter('default') if options is None: euc_gpb_args = get_all_gp_bandit_args_from_gp_args(euclidean_gp_args) options = load_options(euc_gpb_args) options.gpb_hp_tune_criterion = hp_tune_criterion options.gpb_post_hp_tune_method = hp_tune_method options.init_capital = init_capital options.init_capital_frac = init_capital_frac options.num_init_evals = num_init_evals # Check for Domains if domain is None: if config is not None: param_spec = config_parser(options.config) domain = create_domain(param_spec['domain']) elif domain_bounds is not None: domain = domains.EuclideanDomain(domain_bounds) else: raise ValueError( 'Domain or path to config file or domain_bounds have to be given.' ) # Create worker manager and function caller worker_manager = SyntheticWorkerManager(num_workers=1, time_distro='caller_eval_cost') if isinstance(domain, domains.EuclideanDomain): func_caller = EuclideanFunctionCaller(func, domain, vectorised=False) else: func_caller = FunctionCaller(func, domain) # Create GPBandit opbject and run optimiser gpb = EuclideanGPBandit(func_caller, worker_manager, reporter=reporter, options=options) opt_val, opt_pt, history = gpb.optimise(max_capital) opt_pt = func_caller.get_raw_domain_coords(opt_pt) history.curr_opt_points = [ func_caller.get_raw_domain_coords(pt) for pt in history.curr_opt_points ] history.query_points = [ func_caller.get_raw_domain_coords(pt) for pt in history.query_points ] return opt_val, opt_pt, history
def __init__(self, X, Y, options=None, reporter=None, *args, **kwargs): """ Constructor. """ self.X = X self.Y = Y self.reporter = get_reporter(reporter) self.num_data = len(X) if options is None: options = load_options(mol_gp_args, 'GPFitter', reporter=reporter) super(MolGPFitter, self).__init__(options, *args, **kwargs)
def __init__(self, X, Y, options=None, reporter=None): """ Constructor. """ self.dim = len(X[0]) reporter = get_reporter(reporter) if options is None: options = load_options(euclidean_gp_args, 'EuclideanGP', reporter=reporter) super(EuclideanGPFitter, self).__init__(X, Y, options, reporter)
def prep_optimiser_args(obj_func, optimiser_args): """ Returns the options and reporter. """ reporter = get_reporter('default') options = load_options(optimiser_args, reporter=reporter) options.pre_eval_points = get_initial_pool() options.pre_eval_vals = [obj_func(mol) for mol in options.pre_eval_points] options.pre_eval_true_vals = options.pre_eval_vals options_clone = deepcopy(options) return options, options_clone, reporter
def _get_optimiser_args(self, optimiser_args=chemist.all_chemist_args): """ Returns the options and reporter. """ reporter = get_reporter('default') options = load_options(optimiser_args, reporter=reporter) options.pre_eval_points = self.ga_init_pool options.pre_eval_vals = self.ga_init_vals options.pre_eval_true_vals = self.ga_init_vals options_clone = deepcopy(options) return options, options_clone, reporter
def fit_molgp_with_dataset(dataset, kernel_type): """ Fits an MolGP to this dataset. """ options = load_options(mol_gp.mol_gp_args, '') options.kernel_type = kernel_type gp_fitter = mol_gp.MolGPFitter(dataset[0], dataset[1], options=options, reporter=None) _, fitted_gp, _ = gp_fitter.fit_gp() return fitted_gp
def fit_se_gp_with_dataset(dataset): """ A wrapper to fit a gp using the dataset. """ options = load_options(euclidean_gp_args) options.kernel_type = 'se' ret_fit_gp = (EuclideanGPFitter(dataset[0], dataset[1], options=options, reporter=None)).fit_gp() assert ret_fit_gp[0] == 'fitted_gp' return ret_fit_gp[1]
def __init__(self, ZZ, XX, YY, options=None, reporter=None): """ Constructor. options should either be a Namespace, a list or None. """ reporter = get_reporter(reporter) if options is None: options = load_options(euclidean_mf_gp_args, 'MF-GP', reporter) self.fidel_dim = len(ZZ[0]) self.domain_dim = len(XX[0]) self.input_dim = self.fidel_dim + self.domain_dim super(EuclideanMFGPFitter, self).__init__(ZZ, XX, YY, options, reporter)
def __init__(self, mf_opt_func, options=None, reporter=None): """ Constructor. """ self.reporter = get_reporter(reporter) if options is None: options = load_options(all_mf_gp_bandit_args, reporter=reporter) self.options = options # Set up mfgp and mfof attributes self.mfof = mf_opt_func # mfof refers to an MFOptFunction object. self.mfgp = None # Other set up self._set_up()
def fit_matern_gp_with_dataset(dataset, nu=-1.0): """ A wrapper to fit a GP with a matern kernel using the dataset. """ options = load_options(euclidean_gp_args) options.kernel_type = 'matern' options.matern_nu = nu ret_fit_gp = (EuclideanGPFitter(dataset[0], dataset[1], options=options, reporter=None)).fit_gp() assert ret_fit_gp[0] == 'fitted_gp' return ret_fit_gp[1]
def __init__(self, ZZ, XX, YY, options=None, reporter=None): """ Constructor. """ reporter = get_reporter(reporter) if isinstance(options, list): options = load_options(options, 'MFGP', reporter=self.reporter) self.ZZ = ZZ self.XX = XX self.YY = YY self.num_tr_data = len(self.YY) ZX = get_ZX_from_ZZ_XX(ZZ, XX) super(MFGPFitter, self).__init__(ZX, YY, options, reporter)
def __init__(self, func_caller, worker_manager, options=None, reporter=None): """ Constructor. """ self.reporter = get_reporter(reporter) if options is None: options = load_options(all_gp_bandit_args, reporter=reporter) super(GPBandit, self).__init__(func_caller, worker_manager, options, self.reporter)
def get_options_and_reporter(method, init_points, init_vals): """ Returns the options and reporter. """ reporter = get_reporter('default') if method in ['GA', 'randGA']: options = load_options(ga_optimiser.ga_opt_args, reporter=reporter) else: raise ValueError('Unknown method %s.' % (method)) options.pre_eval_points = init_points options.pre_eval_vals = init_vals options.pre_eval_true_vals = init_vals return options, reporter
def __init__(self, func_caller, worker_manager, options=None, reporter=None): """ Constructor. """ self.reporter = get_reporter(reporter) if options is None: options = load_options(blackbox_opt_args, reporter=reporter) super(RandomOptimiser, self).__init__(func_caller, worker_manager, options, self.reporter)
def __init__(self, func_caller, worker_manager, options=None, reporter=None): if options is None: reporter = get_reporter(reporter) options = load_options(all_chemist_args, reporter=reporter) super(Chemist, self).__init__(func_caller, worker_manager, options=options, reporter=reporter)
def fit_nngp_with_dataset(dataset, kernel_type, dist_type): """ Fits an NNGP to this dataset. """ options = load_options(nn_gp.nn_gp_args, '') options.kernel_type = kernel_type options.dist_type = dist_type gp_fitter = nn_gp.NNGPFitter(dataset[0], dataset[1], dataset[-1], options=options, reporter=None) _, fitted_gp, _ = gp_fitter.fit_gp() return fitted_gp