def minimize(fun, bounds=None, x0=None, max_evaluations=100000, use_local_search=True, rg=Generator(MT19937()), runid=0): lower, upper, guess = _check_bounds(bounds, x0, rg) n = guess.size if lower is None: lower = [0] * n upper = [0] * n array_type = ct.c_double * n c_callback = call_back_type(_c_func(fun)) seed = int(rg.uniform(0, 2**32 - 1)) try: res = optimizeDA_C(runid, c_callback, n, seed, array_type(*guess), array_type(*lower), array_type(*upper), max_evaluations, use_local_search) x = np.array(np.fromiter(res, dtype=np.float64, count=n)) val = res[n] evals = int(res[n + 1]) iterations = int(res[n + 2]) stop = int(res[n + 3]) freemem(res) return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True) except Exception as ex: return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
def minimize(fun, bounds=None, x0=None, input_sigma=0.3, popsize=31, max_evaluations=100000, max_iterations=100000, accuracy=1.0, stop_fittness=None, is_terminate=None, rg=Generator(MT19937()), runid=0): """Minimization of a scalar function of one or more variables using a C++ CMA-ES implementation called via ctypes. Parameters ---------- fun : callable The objective function to be minimized. ``fun(x, *args) -> float`` where ``x`` is an 1-D array with shape (n,) and ``args`` is a tuple of the fixed parameters needed to completely specify the function. bounds : sequence or `Bounds`, optional Bounds on variables. There are two ways to specify the bounds: 1. Instance of the `scipy.Bounds` class. 2. Sequence of ``(min, max)`` pairs for each element in `x`. None is used to specify no bound. x0 : ndarray, shape (n,) Initial guess. Array of real elements of size (n,), where 'n' is the number of independent variables. input_sigma : ndarray, shape (n,) or scalar Initial step size for each dimension. popsize = int, optional CMA-ES population size. max_evaluations : int, optional Forced termination after ``max_evaluations`` function evaluations. max_iterations : int, optional Forced termination after ``max_iterations`` iterations. accuracy : float, optional values > 1.0 reduce the accuracy. stop_fittness : float, optional Limit for fitness value. If reached minimize terminates. is_terminate : callable, optional Callback to be used if the caller of minimize wants to decide when to terminate. rg = numpy.random.Generator, optional Random generator for creating random guesses. runid : int, optional id used by the is_terminate callback to identify the CMA-ES run. Returns ------- res : scipy.OptimizeResult The optimization result is represented as an ``OptimizeResult`` object. Important attributes are: ``x`` the solution array, ``fun`` the best function value, ``nfev`` the number of function evaluations, ``nit`` the number of CMA-ES iterations, ``status`` the stopping critera and ``success`` a Boolean flag indicating if the optimizer exited successfully. """ if not sys.platform.startswith('linux'): raise Exception("CMAES C++ variant currently only supported on Linux") lower, upper, guess = _check_bounds(bounds, x0, rg) n = guess.size if lower is None: lower = [0] * n upper = [0] * n mu = int(popsize / 2) if np.ndim(input_sigma) == 0: input_sigma = [input_sigma] * n if stop_fittness is None: stop_fittness = np.nan if is_terminate is None: is_terminate = _is_terminate_false use_terminate = False else: use_terminate = True array_type = ct.c_double * n c_callback = call_back_type(_c_func(fun)) c_is_terminate = is_terminate_type(is_terminate) try: res = optimizeACMA_C(runid, c_callback, n, array_type(*guess), array_type(*lower), array_type(*upper), array_type(*input_sigma), max_iterations, max_evaluations, stop_fittness, mu, popsize, accuracy, use_terminate, c_is_terminate) x = np.array(np.fromiter(res, dtype=np.float64, count=n)) val = res[n] evals = int(res[n + 1]) iterations = int(res[n + 2]) stop = int(res[n + 3]) freemem(res) return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True) except Exception: return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
def minimize(fun, bounds=None, x0=None, input_sigma=0.3, popsize=None, max_evaluations=100000, stop_fitness=None, keep=200, f=0.5, cr=0.9, rg=Generator(MT19937()), runid=0): """Minimization of a scalar function of one or more variables using a C++ Differential Evolution implementation called via ctypes. Parameters ---------- fun : callable The objective function to be minimized. ``fun(x, *args) -> float`` where ``x`` is an 1-D array with shape (dim,) and ``args`` is a tuple of the fixed parameters needed to completely specify the function. bounds : sequence or `Bounds`, optional Bounds on variables. There are two ways to specify the bounds: 1. Instance of the `scipy.Bounds` class. 2. Sequence of ``(min, max)`` pairs for each element in `x`. None is used to specify no bound. x0 : ndarray, shape (dim,) Initial guess. Array of real elements of size (dim,), where 'dim' is the number of independent variables. input_sigma : ndarray, shape (dim,) or scalar Initial step size for each dimension. popsize : int, optional Population size. max_evaluations : int, optional Forced termination after ``max_evaluations`` function evaluations. stop_fitness : float, optional Limit for fitness value. If reached minimize terminates. keep = float, optional changes the reinitialization probability of individuals based on their age. Higher value means lower probablity of reinitialization. f = float, optional The mutation constant. In the literature this is also known as differential weight, being denoted by F. Should be in the range [0, 2]. cr = float, optional The recombination constant. Should be in the range [0, 1]. In the literature this is also known as the crossover probability. rg = numpy.random.Generator, optional Random generator for creating random guesses. runid : int, optional id used to identify the run for debugging / logging. Returns ------- res : scipy.OptimizeResult The optimization result is represented as an ``OptimizeResult`` object. Important attributes are: ``x`` the solution array, ``fun`` the best function value, ``nfev`` the number of function evaluations, ``nit`` the number of iterations, ``success`` a Boolean flag indicating if the optimizer exited successfully. """ lower, upper, guess = _check_bounds(bounds, x0, rg) dim = guess.size if popsize is None: popsize = 31 if lower is None: lower = [0] * dim upper = [0] * dim if callable(input_sigma): input_sigma = input_sigma() if np.ndim(input_sigma) == 0: input_sigma = [input_sigma] * dim if stop_fitness is None: stop_fitness = math.inf array_type = ct.c_double * dim c_callback = call_back_type(callback(fun)) seed = int(rg.uniform(0, 2**32 - 1)) res = np.empty(dim + 4) res_p = res.ctypes.data_as(ct.POINTER(ct.c_double)) try: optimizeLDE_C(runid, c_callback, dim, array_type(*guess), array_type(*input_sigma), seed, array_type(*lower), array_type(*upper), max_evaluations, keep, stop_fitness, popsize, f, cr, res_p) x = res[:dim] val = res[dim] evals = int(res[dim + 1]) iterations = int(res[dim + 2]) stop = int(res[dim + 3]) return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True) except Exception as ex: return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
def minimize(fun, bounds=None, x0=None, popsize = 0, max_evaluations = 100000, stop_fitness = None, M = 1, rg = Generator(MT19937()), runid=0): """Minimization of a scalar function of one or more variables using a C++ SCMA implementation called via ctypes. Parameters ---------- fun : callable The objective function to be minimized. ``fun(x, *args) -> float`` where ``x`` is an 1-D array with shape (dim,) and ``args`` is a tuple of the fixed parameters needed to completely specify the function. bounds : sequence or `Bounds`, optional Bounds on variables. There are two ways to specify the bounds: 1. Instance of the `scipy.Bounds` class. 2. Sequence of ``(min, max)`` pairs for each element in `x`. None is used to specify no bound. x0 : ndarray, shape (dim,) Initial guess. Array of real elements of size (dim,), where 'dim' is the number of independent variables. popsize = int, optional CMA-ES population size. max_evaluations : int, optional Forced termination after ``max_evaluations`` function evaluations. stop_fitness : float, optional Limit for fitness value. If reached minimize terminates. M : int, optional Depth to use, 1 for plain CBiteOpt algorithm, >1 for CBiteOptDeep. Expected range is [1; 36]. rg = numpy.random.Generator, optional Random generator for creating random guesses. runid : int, optional id used to identify the run for debugging / logging. Returns ------- res : scipy.OptimizeResult The optimization result is represented as an ``OptimizeResult`` object. Important attributes are: ``x`` the solution array, ``fun`` the best function value, ``nfev`` the number of function evaluations, ``nit`` the number of CMA-ES iterations, ``status`` the stopping critera and ``success`` a Boolean flag indicating if the optimizer exited successfully. """ lower, upper, guess = _check_bounds(bounds, x0, rg) dim = guess.size if lower is None: lower = [0]*dim upper = [0]*dim if stop_fitness is None: stop_fitness = -math.inf array_type = ct.c_double * dim c_callback = mo_call_back_type(callback(fun, dim)) res = np.empty(dim+4) res_p = res.ctypes.data_as(ct.POINTER(ct.c_double)) try: optimizeBite_C(runid, c_callback, dim, int(rg.uniform(0, 2**32 - 1)), array_type(*guess), array_type(*lower), array_type(*upper), max_evaluations, stop_fitness, popsize, M, res_p) x = res[:dim] val = res[dim] evals = int(res[dim+1]) iterations = int(res[dim+2]) stop = int(res[dim+3]) return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True) except Exception as ex: return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
def minimize(fun, bounds=None, x0=None, input_sigma = 0.3, popsize = 31, max_evaluations = 100000, accuracy = 1.0, stop_fitness = None, rg = Generator(MT19937()), runid=0, workers = 1, normalize = True, update_gap = None): """Minimization of a scalar function of one or more variables using a C++ CMA-ES implementation called via ctypes. Parameters ---------- fun : callable The objective function to be minimized. ``fun(x, *args) -> float`` where ``x`` is an 1-D array with shape (dim,) and ``args`` is a tuple of the fixed parameters needed to completely specify the function. bounds : sequence or `Bounds`, optional Bounds on variables. There are two ways to specify the bounds: 1. Instance of the `scipy.Bounds` class. 2. Sequence of ``(min, max)`` pairs for each element in `x`. None is used to specify no bound. x0 : ndarray, shape (dim,) Initial guess. Array of real elements of size (dim,), where 'dim' is the number of independent variables. input_sigma : ndarray, shape (dim,) or scalar Initial step size for each dimension. popsize = int, optional CMA-ES population size. max_evaluations : int, optional Forced termination after ``max_evaluations`` function evaluations. accuracy : float, optional values > 1.0 reduce the accuracy. stop_fitness : float, optional Limit for fitness value. If reached minimize terminates. rg = numpy.random.Generator, optional Random generator for creating random guesses. runid : int, optional id used by the is_terminate callback to identify the CMA-ES run. workers : int or None, optional If not workers is None, function evaluation is performed in parallel for the whole population. Useful for costly objective functions but is deactivated for parallel retry. normalize : boolean, optional pheno -> if true geno transformation maps arguments to interval [-1,1] update_gap : int, optional number of iterations without distribution update Returns ------- res : scipy.OptimizeResult The optimization result is represented as an ``OptimizeResult`` object. Important attributes are: ``x`` the solution array, ``fun`` the best function value, ``nfev`` the number of function evaluations, ``nit`` the number of CMA-ES iterations, ``status`` the stopping critera and ``success`` a Boolean flag indicating if the optimizer exited successfully. """ lower, upper, guess = _check_bounds(bounds, x0, rg) dim = guess.size if lower is None: lower = [0]*dim upper = [0]*dim if workers is None: workers = 0 mu = int(popsize/2) if callable(input_sigma): input_sigma=input_sigma() if np.ndim(input_sigma) == 0: input_sigma = [input_sigma] * dim if stop_fitness is None: stop_fitness = math.inf array_type = ct.c_double * dim c_callback = mo_call_back_type(callback(fun, dim)) res = np.empty(dim+4) res_p = res.ctypes.data_as(ct.POINTER(ct.c_double)) try: optimizeACMA_C(runid, c_callback, dim, array_type(*guess), array_type(*lower), array_type(*upper), array_type(*input_sigma), max_evaluations, stop_fitness, mu, popsize, accuracy, int(rg.uniform(0, 2**32 - 1)), normalize, -1 if update_gap is None else update_gap, workers, res_p) x = res[:dim] val = res[dim] evals = int(res[dim+1]) iterations = int(res[dim+2]) stop = int(res[dim+3]) return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True) except Exception as ex: return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
def minimize(fun, bounds=None, x0=None, input_sigma = 0.3, popsize = None, max_evaluations = 100000, stop_fitness = None, pbest = 0.7, f0 = 0.0, cr0 = 0.0, rg = Generator(MT19937()), runid=0, workers = None): """Minimization of a scalar function of one or more variables using a C++ LCL Differential Evolution implementation called via ctypes. Parameters ---------- fun : callable The objective function to be minimized. ``fun(x, *args) -> float`` where ``x`` is an 1-D array with shape (n,) and ``args`` is a tuple of the fixed parameters needed to completely specify the function. bounds : sequence or `Bounds` Bounds on variables. There are two ways to specify the bounds: 1. Instance of the `scipy.Bounds` class. 2. Sequence of ``(min, max)`` pairs for each element in `x`. x0 : ndarray, shape (n,) Initial guess. Array of real elements of size (n,), where 'n' is the number of independent variables. input_sigma : ndarray, shape (n,) or scalar Initial step size for each dimension. popsize : int, optional Population size. max_evaluations : int, optional Forced termination after ``max_evaluations`` function evaluations. stop_fitness : float, optional Limit for fitness value. If reached minimize terminates. pbest = float, optional use low value 0 < pbest <= 1 to narrow search. f0 = float, optional The initial mutation constant. In the literature this is also known as differential weight, being denoted by F. Should be in the range [0, 2]. cr0 = float, optional The initial recombination constant. Should be in the range [0, 1]. In the literature this is also known as the crossover probability. rg = numpy.random.Generator, optional Random generator for creating random guesses. runid : int, optional id used to identify the run for debugging / logging. workers : int or None, optional If not workers is None, function evaluation is performed in parallel for the whole population. Useful for costly objective functions but is deactivated for parallel retry. Returns ------- res : scipy.OptimizeResult The optimization result is represented as an ``OptimizeResult`` object. Important attributes are: ``x`` the solution array, ``fun`` the best function value, ``nfev`` the number of function evaluations, ``nit`` the number of iterations, ``success`` a Boolean flag indicating if the optimizer exited successfully. """ lower, upper, guess = _check_bounds(bounds, x0, rg) n = guess.size if popsize is None: popsize = int(n*8.5+150) if lower is None: lower = [0]*n upper = [0]*n if np.ndim(input_sigma) == 0: input_sigma = [input_sigma] * n if stop_fitness is None: stop_fitness = math.inf parfun = None if workers is None else parallel(fun, workers) array_type = ct.c_double * n c_callback_par = call_back_par(callback_par(fun, parfun)) seed = int(rg.uniform(0, 2**32 - 1)) try: res = optimizeLCLDE_C(runid, c_callback_par, n, array_type(*guess), array_type(*input_sigma), seed, array_type(*lower), array_type(*upper), max_evaluations, pbest, stop_fitness, popsize, f0, cr0) x = np.array(np.fromiter(res, dtype=np.float64, count=n)) val = res[n] evals = int(res[n+1]) iterations = int(res[n+2]) stop = int(res[n+3]) freemem(res) if not parfun is None: parfun.stop() # stop all parallel evaluation processes return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True) except Exception as ex: if not workers is None: fun.stop() # stop all parallel evaluation processes return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
def minimize(fun, bounds=None, x0=None, input_sigma=0.166, popsize=0, max_evaluations=100000, stop_fittness=None, rg=Generator(MT19937()), runid=0): """Minimization of a scalar function of one or more variables using a C++ SCMA implementation called via ctypes. Parameters ---------- fun : callable The objective function to be minimized. ``fun(x, *args) -> float`` where ``x`` is an 1-D array with shape (n,) and ``args`` is a tuple of the fixed parameters needed to completely specify the function. bounds : sequence or `Bounds`, optional Bounds on variables. There are two ways to specify the bounds: 1. Instance of the `scipy.Bounds` class. 2. Sequence of ``(min, max)`` pairs for each element in `x`. None is used to specify no bound. x0 : ndarray, shape (n,) Initial guess. Array of real elements of size (n,), where 'n' is the number of independent variables. input_sigma : ndarray, shape (n,) or scalar Initial step size for each dimension. popsize = int, optional CMA-ES population size. max_evaluations : int, optional Forced termination after ``max_evaluations`` function evaluations. stop_fittness : float, optional Limit for fitness value. If reached minimize terminates. rg = numpy.random.Generator, optional Random generator for creating random guesses. runid : int, optional id used to identify the run for debugging / logging. Returns ------- res : scipy.OptimizeResult The optimization result is represented as an ``OptimizeResult`` object. Important attributes are: ``x`` the solution array, ``fun`` the best function value, ``nfev`` the number of function evaluations, ``nit`` the number of CMA-ES iterations, ``status`` the stopping critera and ``success`` a Boolean flag indicating if the optimizer exited successfully. """ lower, upper, guess = _check_bounds(bounds, x0, rg) n = guess.size if lower is None: lower = [0] * n upper = [0] * n if np.ndim(input_sigma) == 0: input_sigma = [input_sigma] * n if stop_fittness is None: stop_fittness = -math.inf array_type = ct.c_double * n c_callback = call_back_type(callback(fun)) try: res = optimizeCsma_C(runid, c_callback, n, int(rg.uniform(0, 2**32 - 1)), array_type(*guess), array_type(*lower), array_type(*upper), array_type(*input_sigma), max_evaluations, stop_fittness, popsize) x = np.array(np.fromiter(res, dtype=np.float64, count=n)) val = res[n] evals = int(res[n + 1]) iterations = int(res[n + 2]) stop = int(res[n + 3]) freemem(res) return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True) except Exception as ex: return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)