def create_result(Xi, yi, space=None, rng=None, specs=None, models=None, model_mu=None, model_std=None, gurobi_mipgap=None): """ Initialize an `OptimizeResult` object. Parameters ---------- Xi : list of lists, shape (n_iters, n_features) Location of the minimum at every iteration. yi : array-like, shape (n_iters,) Minimum value obtained at every iteration. space : Space instance, optional Search space. rng : RandomState instance, optional State of the random state. specs : dict, optional Call specifications. models : list, optional List of fit surrogate models. Returns ------- res : `OptimizeResult`, scipy object OptimizeResult instance with the required information. """ res = OptimizeResult() yi = np.asarray(yi) if np.ndim(yi) == 2: res.log_time = np.ravel(yi[:, 1]) yi = np.ravel(yi[:, 0]) best = np.argmin(yi) res.x = Xi[best] res.fun = yi[best] res.func_vals = yi res.x_iters = Xi res.models = models res.model_mu = model_mu res.model_std = model_std res.gurobi_mipgap = gurobi_mipgap res.space = space res.random_state = rng res.specs = specs return res
def create_result(Xi, yi, space=None, rng=None, specs=None, models=None): """ Initialize an `OptimizeResult` object. Parameters ---------- * `Xi` [list of lists, shape=(n_iters, n_features)]: Location of the minimum at every iteration. * `yi` [array-like, shape=(n_iters,)]: Minimum value obtained at every iteration. * `space` [Space instance, optional]: Search space. * `rng` [RandomState instance, optional]: State of the random state. * `specs` [dict, optional]: Call specifications. * `models` [list, optional]: List of fit surrogate models. Returns ------- * `res` [`OptimizeResult`, scipy object]: OptimizeResult instance with the required information. """ res = OptimizeResult() yi = np.asarray(yi) if np.ndim(yi) == 2: res.log_time = np.ravel(yi[:, 1]) yi = np.ravel(yi[:, 0]) best = np.argmin(yi) res.x = Xi[best] res.fun = yi[best] res.func_vals = yi res.x_iters = Xi res.models = models res.space = space res.random_state = rng res.specs = specs return res
def create_result(xi, yi, space=None, rs=None, specs=None, models=None): res = OptimizeResult() yi = np.asarray(yi) if np.ndim(yi) == 2: res.log_time = np.ravel(yi[:, 1]) yi = np.ravel(yi[:, 0]) best = np.argmin(yi) res.x = xi[best] res.fun = yi[best] res.func_vals = yi res.x_iters = xi res.models = models res.space = space res.random_state = rs res.specs = specs return res
def dummy_minimize(func, dimensions, n_calls=100, x0=None, y0=None, random_state=None): """Random search by uniform sampling within the given bounds. Parameters ---------- * `func` [callable]: Function to minimize. Should take a array of parameters and return the function values. * `dimensions` [list, shape=(n_dims,)]: List of search space dimensions. Each search dimension can be defined either as - a `(upper_bound, lower_bound)` tuple (for `Real` or `Integer` dimensions), - a `(upper_bound, lower_bound, "prior")` tuple (for `Real` dimensions), - as a list of categories (for `Categorical` dimensions), or - an instance of a `Dimension` object (`Real`, `Integer` or `Categorical`). * `n_calls` [int, default=100]: Number of calls to `func` to find the minimum. * `x0` [list, list of lists or `None`]: Initial input points. - If it is a list of lists, use it as a list of input points. - If it is a list, use it as a single initial input point. - If it is `None`, no initial input points are used. * `y0` [list, scalar or `None`] Evaluation of initial input points. - If it is a list, then it corresponds to evaluations of the function at each element of `x0` : the i-th element of `y0` corresponds to the function evaluated at the i-th element of `x0`. - If it is a scalar, then it corresponds to the evaluation of the function at `x0`. - If it is None and `x0` is provided, then the function is evaluated at each element of `x0`. * `random_state` [int, RandomState instance, or None (default)]: Set random state to something other than None for reproducible results. Returns ------- * `res` [`OptimizeResult`, scipy object]: The optimization result returned as a OptimizeResult object. Important attributes are: - `x` [list]: location of the minimum. - `fun` [float]: function value at the minimum. - `x_iters` [list of lists]: location of function evaluation for each iteration. - `func_vals` [array]: function value for each iteration. - `space` [Space]: the optimisation space. - `specs` [dict]: the call specifications. - `rng` [RandomState instance]: State of the random state at the end of minimization. For more details related to the OptimizeResult object, refer http://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.OptimizeResult.html """ # Save call args specs = { "args": copy.copy(inspect.currentframe().f_locals), "function": inspect.currentframe().f_code.co_name } # Check params rng = check_random_state(random_state) space = Space(dimensions) if x0 is None: x0 = [] elif not isinstance(x0[0], list): x0 = [x0] if not isinstance(x0, list): raise ValueError("`x0` should be a list, got %s" % type(x0)) if len(x0) > 0 and y0 is not None: if isinstance(y0, Iterable): y0 = list(y0) elif isinstance(y0, numbers.Number): y0 = [y0] else: raise ValueError("`y0` should be an iterable or a scalar, got %s" % type(y0)) if len(x0) != len(y0): raise ValueError("`x0` and `y0` should have the same length") if not all(map(np.isscalar, y0)): raise ValueError("`y0` elements should be scalars") elif len(x0) > 0 and y0 is None: y0 = [] n_calls -= len(x0) elif len(x0) == 0 and y0 is not None: raise ValueError("`x0`cannot be `None` when `y0` is provided") else: # len(x0) == 0 and y0 is None y0 = [] X = x0 y = y0 # Random search X = X + space.rvs(n_samples=n_calls, random_state=rng) first = True for i in range(len(y0), len(X)): y_i = func(X[i]) if first: first = False if not np.isscalar(y_i): raise ValueError("`func` should return a scalar") y.append(y_i) y = np.array(y) # Pack results res = OptimizeResult() best = np.argmin(y) res.x = X[best] res.fun = y[best] res.func_vals = y res.x_iters = X res.models = [] # Create attribute even though it is empty res.space = space res.random_state = rng res.specs = specs return res
def create_result(Xi, yi, n_evaluations=None, space=None, rng=None, specs=None, models=None, maximize=False): """ Initialize an `OptimizeResult` object. Parameters ---------- * `Xi` [list of lists, shape=(n_iters, n_features)]: Location of the minimum at every iteration. * `yi` [array-like, shape=(n_iters,)]: Minimum value obtained at every iteration. * `space` [Space instance, optional]: Search space. * `rng` [RandomState instance, optional]: State of the random state. * `specs` [dict, optional]: Call specifications. * `models` [list, optional]: List of fit surrogate models. Returns ------- * `res` [`OptimizeResult`, scipy object]: OptimizeResult instance with the required information. """ res = OptimizeResult() try: # Hyperband returns evaluations as lists of lists. # We want to store the results as a single array. yi = list(itertools.chain.from_iterable(yi)) Xi = list(itertools.chain.from_iterable(Xi)) except TypeError: # All algorithms other than Hyperband already return a single list. pass yi = np.asarray(yi) if np.ndim(yi) == 2: res.log_time = np.ravel(yi[:, 1]) yi = np.ravel(yi[:, 0]) if maximize: best = np.argmax(yi) else: best = np.argmin(yi) res.x = Xi[best] res.fun = yi[best] if n_evaluations: unique, sort_indices = np.unique(yi, return_index=True) if len(unique) < n_evaluations: func_sort_idx = np.argsort(yi) func_vals = sorted(yi) res.func_vals = np.asarray(func_vals[:n_evaluations]) x_iter_sort = [] for idx in func_sort_idx: x_iter_sort.append(Xi[idx]) res.x_iters = np.asarray(x_iter_sort[:n_evaluations]) res.all_func_vals = np.asarray(yi) res.all_x_iters = np.asarray(Xi) else: func_vals = sorted(unique) res.func_vals = np.asarray(func_vals[:n_evaluations]) x_iter_sort = [] for idx in sort_indices: x_iter_sort.append(Xi[idx]) res.x_iters = np.asarray(x_iter_sort[:n_evaluations]) res.all_func_vals = np.asarray(yi) res.all_x_iters = np.asarray(Xi) else: res.func_vals = np.asarray(yi) res.x_iters = np.asarray(Xi) res.models = models res.space = space res.random_state = rng res.specs = specs return res
def gp_minimize(func, dimensions, base_estimator=None, alpha=10e-10, acq="EI", xi=0.01, kappa=1.96, search="auto", n_calls=100, n_points=500, n_random_starts=10, n_restarts_optimizer=5, x0=None, y0=None, random_state=None): """Bayesian optimization using Gaussian Processes. If every function evaluation is expensive, for instance when the parameters are the hyperparameters of a neural network and the function evaluation is the mean cross-validation score across ten folds, optimizing the hyperparameters by standard optimization routines would take for ever! The idea is to approximate the function using a Gaussian process. In other words the function values are assumed to follow a multivariate gaussian. The covariance of the function values are given by a GP kernel between the parameters. Then a smart choice to choose the next parameter to evaluate can be made by the acquisition function over the Gaussian prior which is much quicker to evaluate. The total number of evaluations, `n_calls`, are performed like the following. If `x0` is provided but not `y0`, then the elements of `x0` are first evaluated, followed by `n_random_starts` evaluations. Finally, `n_calls - len(x0) - n_random_starts` evaluations are made guided by the surrogate model. If `x0` and `y0` are both provided then `n_random_starts` evaluations are first made then `n_calls - n_random_starts` subsequent evaluations are made guided by the surrogate model. Parameters ---------- * `func` [callable]: Function to minimize. Should take a array of parameters and return the function values. * `dimensions` [list, shape=(n_dims,)]: List of search space dimensions. Each search dimension can be defined either as - a `(upper_bound, lower_bound)` tuple (for `Real` or `Integer` dimensions), - a `(upper_bound, lower_bound, "prior")` tuple (for `Real` dimensions), - as a list of categories (for `Categorical` dimensions), or - an instance of a `Dimension` object (`Real`, `Integer` or `Categorical`). * `base_estimator` [a Gaussian process estimator]: The Gaussian process estimator to use for optimization. * `alpha` [float, default=1e-10]: Value added to the diagonal of the kernel matrix during fitting. Larger values correspond to an increased noise level in the observations and reduce potential numerical issues during fitting. * `acq` [string, default=`"EI"`]: Function to minimize over the gaussian prior. Can be either - `"LCB"` for lower confidence bound, - `"EI"` for expected improvement, - `"PI"` for probability of improvement. * `xi` [float, default=0.01]: Controls how much improvement one wants over the previous best values. Used when the acquisition is either `"EI"` or `"PI"`. * `kappa` [float, default=1.96]: Controls how much of the variance in the predicted values should be taken into account. If set to be very high, then we are favouring exploration over exploitation and vice versa. Used when the acquisition is `"LCB"`. * `search` [string, `"auto"`, `"sampling"` or `"lbfgs"`, default=`"auto"`]: Searching for the next possible candidate to update the Gaussian prior with. If search is set to `"auto"`, then it is set to `"lbfgs"`` if all the search dimensions are Real(continuous). It defaults to `"sampling"` for all other cases. If search is set to `"sampling"`, `n_points` are sampled randomly and the Gaussian Process prior is updated with the point that gives the best acquisition value over the Gaussian prior. If search is set to `"lbfgs"`, then a point is sampled randomly, and lbfgs is run for 10 iterations optimizing the acquisition function over the Gaussian prior. * `n_calls` [int, default=100]: Number of calls to `func`. * `n_points` [int, default=500]: Number of points to sample to determine the next "best" point. Useless if search is set to `"lbfgs"`. * `n_random_starts` [int, default=10]: Number of evaluations of `func` with random initialization points before approximating the `func` with `base_estimator`. * `n_restarts_optimizer` [int, default=10]: The number of restarts of the optimizer when `search` is `"lbfgs"`. * `x0` [list, list of lists or `None`]: Initial input points. - If it is a list of lists, use it as a list of input points. - If it is a list, use it as a single initial input point. - If it is `None`, no initial input points are used. * `y0` [list, scalar or `None`] Evaluation of initial input points. - If it is a list, then it corresponds to evaluations of the function at each element of `x0` : the i-th element of `y0` corresponds to the function evaluated at the i-th element of `x0`. - If it is a scalar, then it corresponds to the evaluation of the function at `x0`. - If it is None and `x0` is provided, then the function is evaluated at each element of `x0`. * `random_state` [int, RandomState instance, or None (default)]: Set random state to something other than None for reproducible results. Returns ------- * `res` [`OptimizeResult`, scipy object]: The optimization result returned as a OptimizeResult object. Important attributes are: - `x` [list]: location of the minimum. - `fun` [float]: function value at the minimum. - `models`: surrogate models used for each iteration. - `x_iters` [list of lists]: location of function evaluation for each iteration. - `func_vals` [array]: function value for each iteration. - `space` [Space]: the optimization space. - `specs` [dict]`: the call specifications. - `rng` [RandomState instance]: State of the random state at the end of minimization. For more details related to the OptimizeResult object, refer http://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.OptimizeResult.html """ # Save call args specs = {"args": copy.copy(inspect.currentframe().f_locals), "function": inspect.currentframe().f_code.co_name} # Check params rng = check_random_state(random_state) space = Space(dimensions) # Default GP if base_estimator is None: base_estimator = GaussianProcessRegressor( kernel=(ConstantKernel(1.0, (0.01, 1000.0)) * Matern(length_scale=np.ones(space.transformed_n_dims), length_scale_bounds=[(0.01, 100)] * space.transformed_n_dims, nu=2.5)), normalize_y=True, alpha=alpha, random_state=random_state) # Initialize with provided points (x0 and y0) and/or random points if x0 is None: x0 = [] elif not isinstance(x0[0], list): x0 = [x0] if not isinstance(x0, list): raise ValueError("`x0` should be a list, but got %s" % type(x0)) n_init_func_calls = len(x0) if y0 is not None else 0 n_total_init_calls = n_random_starts + n_init_func_calls if n_total_init_calls <= 0: # if x0 is not provided and n_random_starts is 0 then # it will ask for n_random_starts to be > 0. raise ValueError( "Expected `n_random_starts` > 0, got %d" % n_random_starts) if n_calls < n_total_init_calls: raise ValueError( "Expected `n_calls` >= %d, got %d" % (n_total_init_calls, n_calls)) if y0 is None and x0: y0 = [func(x) for x in x0] elif x0: if isinstance(y0, Iterable): y0 = list(y0) elif isinstance(y0, numbers.Number): y0 = [y0] else: raise ValueError( "`y0` should be an iterable or a scalar, got %s" % type(y0)) if len(x0) != len(y0): raise ValueError("`x0` and `y0` should have the same length") if not all(map(np.isscalar, y0)): raise ValueError( "`y0` elements should be scalars") else: y0 = [] Xi = x0 + space.rvs(n_samples=n_random_starts, random_state=rng) yi = y0 + [func(x) for x in Xi[len(x0):]] if np.ndim(yi) != 1: raise ValueError("`func` should return a scalar") if search == "auto": if space.is_real: search = "lbfgs" else: search = "sampling" elif search not in ["lbfgs", "sampling"]: raise ValueError( "Expected search to be 'lbfgs', 'sampling' or 'auto', " "got %s" % search) # Bayesian optimization loop models = [] n_model_iter = n_calls - n_total_init_calls for i in range(n_model_iter): gp = clone(base_estimator) with warnings.catch_warnings(): warnings.simplefilter("ignore") gp.fit(space.transform(Xi), yi) models.append(gp) if search == "sampling": X = space.transform(space.rvs(n_samples=n_points, random_state=rng)) values = _gaussian_acquisition( X=X, model=gp, y_opt=np.min(yi), method=acq, xi=xi, kappa=kappa) next_x = X[np.argmin(values)] elif search == "lbfgs": best = np.inf for j in range(n_restarts_optimizer): x0 = space.transform(space.rvs(n_samples=1, random_state=rng))[0] with warnings.catch_warnings(): warnings.simplefilter("ignore") x, a, _ = fmin_l_bfgs_b( _acquisition, x0, args=(gp, np.min(yi), acq, xi, kappa), bounds=space.transformed_bounds, approx_grad=True, maxiter=20) if a < best: next_x, best = x, a next_x = space.inverse_transform(next_x.reshape((1, -1)))[0] next_y = func(next_x) Xi.append(next_x) yi.append(next_y) # Pack results res = OptimizeResult() best = np.argmin(yi) res.x = Xi[best] res.fun = yi[best] res.func_vals = np.array(yi) res.x_iters = Xi res.models = models res.space = space res.random_state = rng res.specs = specs return res
def gp_minimize(func, dimensions, base_estimator=None, alpha=10e-10, acq="EI", xi=0.01, kappa=1.96, search="auto", n_calls=100, n_points=500, n_random_starts=10, n_restarts_optimizer=5, x0=None, y0=None, random_state=None): """Bayesian optimization using Gaussian Processes. If every function evaluation is expensive, for instance when the parameters are the hyperparameters of a neural network and the function evaluation is the mean cross-validation score across ten folds, optimizing the hyperparameters by standard optimization routines would take for ever! The idea is to approximate the function using a Gaussian process. In other words the function values are assumed to follow a multivariate gaussian. The covariance of the function values are given by a GP kernel between the parameters. Then a smart choice to choose the next parameter to evaluate can be made by the acquisition function over the Gaussian prior which is much quicker to evaluate. The total number of evaluations, `n_calls`, are performed like the following. If `x0` is provided but not `y0`, then the elements of `x0` are first evaluated, followed by `n_random_starts` evaluations. Finally, `n_calls - len(x0) - n_random_starts` evaluations are made guided by the surrogate model. If `x0` and `y0` are both provided then `n_random_starts` evaluations are first made then `n_calls - n_random_starts` subsequent evaluations are made guided by the surrogate model. Parameters ---------- * `func` [callable]: Function to minimize. Should take a array of parameters and return the function values. * `dimensions` [list, shape=(n_dims,)]: List of search space dimensions. Each search dimension can be defined either as - a `(upper_bound, lower_bound)` tuple (for `Real` or `Integer` dimensions), - a `(upper_bound, lower_bound, "prior")` tuple (for `Real` dimensions), - as a list of categories (for `Categorical` dimensions), or - an instance of a `Dimension` object (`Real`, `Integer` or `Categorical`). * `base_estimator` [a Gaussian process estimator]: The Gaussian process estimator to use for optimization. * `alpha` [float, default=1e-10]: Value added to the diagonal of the kernel matrix during fitting. Larger values correspond to an increased noise level in the observations and reduce potential numerical issues during fitting. * `acq` [string, default=`"EI"`]: Function to minimize over the gaussian prior. Can be either - `"LCB"` for lower confidence bound, - `"EI"` for expected improvement, - `"PI"` for probability of improvement. * `xi` [float, default=0.01]: Controls how much improvement one wants over the previous best values. Used when the acquisition is either `"EI"` or `"PI"`. * `kappa` [float, default=1.96]: Controls how much of the variance in the predicted values should be taken into account. If set to be very high, then we are favouring exploration over exploitation and vice versa. Used when the acquisition is `"LCB"`. * `search` [string, `"auto"`, `"sampling"` or `"lbfgs"`, default=`"auto"`]: Searching for the next possible candidate to update the Gaussian prior with. If search is set to `"auto"`, then it is set to `"lbfgs"`` if all the search dimensions are Real(continuous). It defaults to `"sampling"` for all other cases. If search is set to `"sampling"`, `n_points` are sampled randomly and the Gaussian Process prior is updated with the point that gives the best acquisition value over the Gaussian prior. If search is set to `"lbfgs"`, then a point is sampled randomly, and lbfgs is run for 10 iterations optimizing the acquisition function over the Gaussian prior. * `n_calls` [int, default=100]: Number of calls to `func`. * `n_points` [int, default=500]: Number of points to sample to determine the next "best" point. Useless if search is set to `"lbfgs"`. * `n_random_starts` [int, default=10]: Number of evaluations of `func` with random initialization points before approximating the `func` with `base_estimator`. * `n_restarts_optimizer` [int, default=10]: The number of restarts of the optimizer when `search` is `"lbfgs"`. * `x0` [list, list of lists or `None`]: Initial input points. - If it is a list of lists, use it as a list of input points. - If it is a list, use it as a single initial input point. - If it is `None`, no initial input points are used. * `y0` [list, scalar or `None`] Evaluation of initial input points. - If it is a list, then it corresponds to evaluations of the function at each element of `x0` : the i-th element of `y0` corresponds to the function evaluated at the i-th element of `x0`. - If it is a scalar, then it corresponds to the evaluation of the function at `x0`. - If it is None and `x0` is provided, then the function is evaluated at each element of `x0`. * `random_state` [int, RandomState instance, or None (default)]: Set random state to something other than None for reproducible results. Returns ------- * `res` [`OptimizeResult`, scipy object]: The optimization result returned as a OptimizeResult object. Important attributes are: - `x` [list]: location of the minimum. - `fun` [float]: function value at the minimum. - `models`: surrogate models used for each iteration. - `x_iters` [list of lists]: location of function evaluation for each iteration. - `func_vals` [array]: function value for each iteration. - `space` [Space]: the optimization space. - `specs` [dict]`: the call specifications. - `rng` [RandomState instance]: State of the random state at the end of minimization. For more details related to the OptimizeResult object, refer http://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.OptimizeResult.html """ # Save call args specs = { "args": copy.copy(inspect.currentframe().f_locals), "function": inspect.currentframe().f_code.co_name } # Check params rng = check_random_state(random_state) space = Space(dimensions) # Default GP if base_estimator is None: base_estimator = GaussianProcessRegressor( kernel=(ConstantKernel(1.0, (0.01, 1000.0)) * Matern( length_scale=np.ones(space.transformed_n_dims), length_scale_bounds=[(0.01, 100)] * space.transformed_n_dims, nu=2.5)), normalize_y=True, alpha=alpha, random_state=random_state) # Initialize with provided points (x0 and y0) and/or random points if x0 is None: x0 = [] elif not isinstance(x0[0], list): x0 = [x0] if not isinstance(x0, list): raise ValueError("`x0` should be a list, but got %s" % type(x0)) n_init_func_calls = len(x0) if y0 is not None else 0 n_total_init_calls = n_random_starts + n_init_func_calls if n_total_init_calls <= 0: # if x0 is not provided and n_random_starts is 0 then # it will ask for n_random_starts to be > 0. raise ValueError("Expected `n_random_starts` > 0, got %d" % n_random_starts) if n_calls < n_total_init_calls: raise ValueError("Expected `n_calls` >= %d, got %d" % (n_total_init_calls, n_calls)) if y0 is None and x0: y0 = [func(x) for x in x0] elif x0: if isinstance(y0, Iterable): y0 = list(y0) elif isinstance(y0, numbers.Number): y0 = [y0] else: raise ValueError("`y0` should be an iterable or a scalar, got %s" % type(y0)) if len(x0) != len(y0): raise ValueError("`x0` and `y0` should have the same length") if not all(map(np.isscalar, y0)): raise ValueError("`y0` elements should be scalars") else: y0 = [] Xi = x0 + space.rvs(n_samples=n_random_starts, random_state=rng) yi = y0 + [func(x) for x in Xi[len(x0):]] if np.ndim(yi) != 1: raise ValueError("`func` should return a scalar") if search == "auto": if space.is_real: search = "lbfgs" else: search = "sampling" elif search not in ["lbfgs", "sampling"]: raise ValueError( "Expected search to be 'lbfgs', 'sampling' or 'auto', " "got %s" % search) # Bayesian optimization loop models = [] n_model_iter = n_calls - n_total_init_calls for i in range(n_model_iter): gp = clone(base_estimator) with warnings.catch_warnings(): warnings.simplefilter("ignore") gp.fit(space.transform(Xi), yi) models.append(gp) if search == "sampling": X = space.transform(space.rvs(n_samples=n_points, random_state=rng)) values = _gaussian_acquisition(X=X, model=gp, y_opt=np.min(yi), method=acq, xi=xi, kappa=kappa) next_x = X[np.argmin(values)] elif search == "lbfgs": best = np.inf for j in range(n_restarts_optimizer): x0 = space.transform(space.rvs(n_samples=1, random_state=rng))[0] with warnings.catch_warnings(): warnings.simplefilter("ignore") x, a, _ = fmin_l_bfgs_b(_acquisition, x0, args=(gp, np.min(yi), acq, xi, kappa), bounds=space.transformed_bounds, approx_grad=True, maxiter=20) if a < best: next_x, best = x, a next_x = space.inverse_transform(next_x.reshape((1, -1)))[0] next_y = func(next_x) Xi.append(next_x) yi.append(next_y) # Pack results res = OptimizeResult() best = np.argmin(yi) res.x = Xi[best] res.fun = yi[best] res.func_vals = np.array(yi) res.x_iters = Xi res.models = models res.space = space res.random_state = rng res.specs = specs return res
def dummy_minimize(func, dimensions, n_calls=100, x0=None, y0=None, random_state=None): """Random search by uniform sampling within the given bounds. Parameters ---------- * `func` [callable]: Function to minimize. Should take a array of parameters and return the function values. * `dimensions` [list, shape=(n_dims,)]: List of search space dimensions. Each search dimension can be defined either as - a `(upper_bound, lower_bound)` tuple (for `Real` or `Integer` dimensions), - a `(upper_bound, lower_bound, "prior")` tuple (for `Real` dimensions), - as a list of categories (for `Categorical` dimensions), or - an instance of a `Dimension` object (`Real`, `Integer` or `Categorical`). * `n_calls` [int, default=100]: Number of calls to `func` to find the minimum. * `x0` [list, list of lists or `None`]: Initial input points. - If it is a list of lists, use it as a list of input points. - If it is a list, use it as a single initial input point. - If it is `None`, no initial input points are used. * `y0` [list, scalar or `None`] Evaluation of initial input points. - If it is a list, then it corresponds to evaluations of the function at each element of `x0` : the i-th element of `y0` corresponds to the function evaluated at the i-th element of `x0`. - If it is a scalar, then it corresponds to the evaluation of the function at `x0`. - If it is None and `x0` is provided, then the function is evaluated at each element of `x0`. * `random_state` [int, RandomState instance, or None (default)]: Set random state to something other than None for reproducible results. Returns ------- * `res` [`OptimizeResult`, scipy object]: The optimization result returned as a OptimizeResult object. Important attributes are: - `x` [list]: location of the minimum. - `fun` [float]: function value at the minimum. - `x_iters` [list of lists]: location of function evaluation for each iteration. - `func_vals` [array]: function value for each iteration. - `space` [Space]: the optimisation space. - `specs` [dict]: the call specifications. - `rng` [RandomState instance]: State of the random state at the end of minimization. For more details related to the OptimizeResult object, refer http://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.OptimizeResult.html """ # Save call args specs = {"args": copy.copy(inspect.currentframe().f_locals), "function": inspect.currentframe().f_code.co_name} # Check params rng = check_random_state(random_state) space = Space(dimensions) if x0 is None: x0 = [] elif not isinstance(x0[0], list): x0 = [x0] if not isinstance(x0, list): raise ValueError("`x0` should be a list, got %s" % type(x0)) if len(x0) > 0 and y0 is not None: if isinstance(y0, Iterable): y0 = list(y0) elif isinstance(y0, numbers.Number): y0 = [y0] else: raise ValueError("`y0` should be an iterable or a scalar, got %s" % type(y0)) if len(x0) != len(y0): raise ValueError("`x0` and `y0` should have the same length") if not all(map(np.isscalar, y0)): raise ValueError("`y0` elements should be scalars") elif len(x0) > 0 and y0 is None: y0 = [] n_calls -= len(x0) elif len(x0) == 0 and y0 is not None: raise ValueError("`x0`cannot be `None` when `y0` is provided") else: # len(x0) == 0 and y0 is None y0 = [] X = x0 y = y0 # Random search X = X + space.rvs(n_samples=n_calls, random_state=rng) first = True for i in range(len(y0), len(X)): y_i = func(X[i]) if first: first = False if not np.isscalar(y_i): raise ValueError("`func` should return a scalar") y.append(y_i) y = np.array(y) # Pack results res = OptimizeResult() best = np.argmin(y) res.x = X[best] res.fun = y[best] res.func_vals = y res.x_iters = X res.models = [] # Create attribute even though it is empty res.space = space res.random_state = rng res.specs = specs return res