def test_ei_serial(): max_evals = 50 gp = GPRegressor(dim=ackley.dim) slhd = SymmetricLatinHypercube( dim=ackley.dim, num_pts=2*(ackley.dim+1)) # Create a strategy and a controller controller = SerialController(ackley.eval) controller.strategy = EIStrategy( max_evals=max_evals, opt_prob=ackley, exp_design=slhd, surrogate=gp, asynchronous=True) controller.run() check_strategy(controller)
def test_ei_async(): max_evals = 50 gp = GPRegressor(dim=ackley.dim) slhd = SymmetricLatinHypercube( dim=ackley.dim, num_pts=2*(ackley.dim+1)) # Create a strategy and a controller controller = ThreadController() controller.strategy = EIStrategy( max_evals=max_evals, opt_prob=ackley, exp_design=slhd, surrogate=gp, asynchronous=True, batch_size=None) for _ in range(num_threads): worker = BasicWorkerThread(controller, ackley.eval) controller.launch_worker(worker) controller.run() check_strategy(controller)
def example_expected_improvement(): if not os.path.exists("./logfiles"): os.makedirs("logfiles") if os.path.exists("./logfiles/example_simple.log"): os.remove("./logfiles/example_simple.log") logging.basicConfig(filename="./logfiles/example_simple.log", level=logging.INFO) num_threads = 4 max_evals = 100 hart6 = Hartman6() gp = GPRegressor(dim=hart6.dim) slhd = SymmetricLatinHypercube(dim=hart6.dim, num_pts=2 * (hart6.dim + 1)) # Create a strategy and a controller controller = ThreadController() controller.strategy = EIStrategy(max_evals=max_evals, opt_prob=hart6, exp_design=slhd, surrogate=gp, asynchronous=True) print("Number of threads: {}".format(num_threads)) print("Maximum number of evaluations: {}".format(max_evals)) print("Strategy: {}".format(controller.strategy.__class__.__name__)) print("Experimental design: {}".format(slhd.__class__.__name__)) print("Surrogate: {}".format(gp.__class__.__name__)) # Launch the threads and give them access to the objective function for _ in range(num_threads): worker = BasicWorkerThread(controller, hart6.eval) controller.launch_worker(worker) # Run the optimization strategy result = controller.run() print('Best value found: {0}'.format(result.value)) print('Best solution found: {0}\n'.format( np.array_str(result.params[0], max_line_width=np.inf, precision=5, suppress_small=True)))
def pysot_cube(objective, n_trials, n_dim, with_count=False, method=None, design=None): """ Minimize :param objective: :param n_trials: :param n_dim: :param with_count: :return: """ logging.getLogger('pySOT').setLevel(logging.ERROR) num_threads = 1 asynchronous = True max_evals = n_trials gp = GenericProblem(dim=n_dim, objective=objective) if design == 'latin': exp_design = LatinHypercube(dim=n_dim, num_pts=2 * (n_dim + 1)) elif design == 'symmetric': exp_design = SymmetricLatinHypercube(dim=n_dim, num_pts=2 * (n_dim + 1)) elif design == 'factorial': exp_design = TwoFactorial(dim=n_dim) else: raise ValueError('design should be latin, symmetric or factorial') # Create a strategy and a controller # SRBFStrategy, EIStrategy, DYCORSStrategy,RandomStrategy, LCBStrategy controller = ThreadController() if method.lower() == 'srbf': surrogate = RBFInterpolant(dim=n_dim, lb=np.array([0.0] * n_dim), ub=np.array([1.0] * n_dim), kernel=CubicKernel(), tail=LinearTail(n_dim)) controller.strategy = SRBFStrategy(max_evals=max_evals, opt_prob=gp, exp_design=exp_design, surrogate=surrogate, asynchronous=asynchronous) elif method.lower() == 'ei': surrogate = GPRegressor(dim=n_dim, lb=np.array([0.0] * n_dim), ub=np.array([1.0] * n_dim)) controller.strategy = EIStrategy(max_evals=max_evals, opt_prob=gp, exp_design=exp_design, surrogate=surrogate, asynchronous=asynchronous) elif method.lower() == 'dycors': surrogate = RBFInterpolant(dim=n_dim, lb=np.array([0.0] * n_dim), ub=np.array([1.0] * n_dim), kernel=CubicKernel(), tail=LinearTail(n_dim)) controller.strategy = DYCORSStrategy(max_evals=max_evals, opt_prob=gp, exp_design=exp_design, surrogate=surrogate, asynchronous=asynchronous) elif method.lower() == 'lcb': surrogate = GPRegressor(dim=n_dim, lb=np.array([0.0] * n_dim), ub=np.array([1.0] * n_dim)) controller.strategy = LCBStrategy(max_evals=max_evals, opt_prob=gp, exp_design=exp_design, surrogate=surrogate, asynchronous=asynchronous) elif method.lower() == 'random': controller.strategy = RandomStrategy(max_evals=max_evals, opt_prob=gp) else: raise ValueError("Didn't recognize method passed to pysot") # Launch the threads and give them access to the objective function for _ in range(num_threads): worker = BasicWorkerThread(controller, gp.eval) controller.launch_worker(worker) # Run the optimization strategy result = controller.run() best_x = result.params[0].tolist() return (result.value, best_x, gp.feval_count) if with_count else (result.value, best_x)
def setup_backend( self, params, strategy="SRBF", surrogate="RBF", design=None, ): self.opt_problem = BBoptOptimizationProblem(params) design_kwargs = dict(dim=self.opt_problem.dim) _coconut_case_match_to_1 = design _coconut_case_match_check_1 = False if _coconut_case_match_to_1 is None: _coconut_case_match_check_1 = True if _coconut_case_match_check_1: self.exp_design = EmptyExperimentalDesign(**design_kwargs) if not _coconut_case_match_check_1: if _coconut_case_match_to_1 == "latin_hypercube": _coconut_case_match_check_1 = True if _coconut_case_match_check_1: self.exp_design = LatinHypercube(num_pts=2 * (self.opt_problem.dim + 1), **design_kwargs) if not _coconut_case_match_check_1: if _coconut_case_match_to_1 == "symmetric_latin_hypercube": _coconut_case_match_check_1 = True if _coconut_case_match_check_1: self.exp_design = SymmetricLatinHypercube( num_pts=2 * (self.opt_problem.dim + 1), **design_kwargs) if not _coconut_case_match_check_1: if _coconut_case_match_to_1 == "two_factorial": _coconut_case_match_check_1 = True if _coconut_case_match_check_1: self.exp_design = TwoFactorial(**design_kwargs) if not _coconut_case_match_check_1: _coconut_match_set_name_design_cls = _coconut_sentinel _coconut_match_set_name_design_cls = _coconut_case_match_to_1 _coconut_case_match_check_1 = True if _coconut_case_match_check_1: if _coconut_match_set_name_design_cls is not _coconut_sentinel: design_cls = _coconut_case_match_to_1 if _coconut_case_match_check_1 and not (callable(design_cls)): _coconut_case_match_check_1 = False if _coconut_case_match_check_1: self.exp_design = design_cls(**design_kwargs) if not _coconut_case_match_check_1: raise TypeError( "unknown experimental design {_coconut_format_0!r}".format( _coconut_format_0=(design))) surrogate_kwargs = dict(dim=self.opt_problem.dim, lb=self.opt_problem.lb, ub=self.opt_problem.ub) _coconut_case_match_to_2 = surrogate _coconut_case_match_check_2 = False if _coconut_case_match_to_2 == "RBF": _coconut_case_match_check_2 = True if _coconut_case_match_check_2: self.surrogate = RBFInterpolant( kernel=LinearKernel() if design is None else CubicKernel(), tail=ConstantTail(self.opt_problem.dim) if design is None else LinearTail(self.opt_problem.dim), **surrogate_kwargs) if not _coconut_case_match_check_2: if _coconut_case_match_to_2 == "GP": _coconut_case_match_check_2 = True if _coconut_case_match_check_2: self.surrogate = GPRegressor(**surrogate_kwargs) if not _coconut_case_match_check_2: _coconut_match_set_name_surrogate_cls = _coconut_sentinel _coconut_match_set_name_surrogate_cls = _coconut_case_match_to_2 _coconut_case_match_check_2 = True if _coconut_case_match_check_2: if _coconut_match_set_name_surrogate_cls is not _coconut_sentinel: surrogate_cls = _coconut_case_match_to_2 if _coconut_case_match_check_2 and not (callable(surrogate_cls)): _coconut_case_match_check_2 = False if _coconut_case_match_check_2: self.surrogate = surrogate_cls(**surrogate_kwargs) if not _coconut_case_match_check_2: raise TypeError("unknown surrogate {_coconut_format_0!r}".format( _coconut_format_0=(surrogate))) strategy_kwargs = dict(max_evals=sys.maxsize, opt_prob=self.opt_problem, exp_design=self.exp_design, surrogate=self.surrogate, asynchronous=True, batch_size=1) _coconut_case_match_to_3 = strategy _coconut_case_match_check_3 = False if _coconut_case_match_to_3 == "SRBF": _coconut_case_match_check_3 = True if _coconut_case_match_check_3: self.strategy = SRBFStrategy(**strategy_kwargs) if not _coconut_case_match_check_3: if _coconut_case_match_to_3 == "EI": _coconut_case_match_check_3 = True if _coconut_case_match_check_3: self.strategy = EIStrategy(**strategy_kwargs) if not _coconut_case_match_check_3: if _coconut_case_match_to_3 == "DYCORS": _coconut_case_match_check_3 = True if _coconut_case_match_check_3: self.strategy = DYCORSStrategy(**strategy_kwargs) if not _coconut_case_match_check_3: if _coconut_case_match_to_3 == "LCB": _coconut_case_match_check_3 = True if _coconut_case_match_check_3: self.strategy = LCBStrategy(**strategy_kwargs) if not _coconut_case_match_check_3: _coconut_match_set_name_strategy_cls = _coconut_sentinel _coconut_match_set_name_strategy_cls = _coconut_case_match_to_3 _coconut_case_match_check_3 = True if _coconut_case_match_check_3: if _coconut_match_set_name_strategy_cls is not _coconut_sentinel: strategy_cls = _coconut_case_match_to_3 if _coconut_case_match_check_3 and not (callable(strategy_cls)): _coconut_case_match_check_3 = False if _coconut_case_match_check_3: self.strategy = strategy_cls(**strategy_kwargs) if not _coconut_case_match_check_3: raise TypeError("unknown strategy {_coconut_format_0!r}".format( _coconut_format_0=(strategy)))