def test01_availability(self):
        """Quick API check on available optimizers"""

        import skquant.opt as skqopt

        bounds = np.array([[-1, 1], [-1, 1]], dtype=float)
        budget = 40
        x0 = np.array([0.5, 0.5])

        # interface with incorrect input
        assert raises(RuntimeError,
                      skqopt.minimize,
                      f_easy_simple,
                      x0,
                      bounds,
                      budget,
                      method='does not exist')

        # ImFil
        result, history = \
             skqopt.minimize(f_easy_simple, x0, bounds, budget, method='imfil')
        assert type(result.optpar) == np.ndarray
        assert np.round(
            sum(result.optpar) - sum(ref_results[f_easy_simple]['imfil']),
            8) == 0.0

        # SnobFit
        from SQSnobFit import optset
        options = optset(maxmp=len(x0) + 6)
        result, history = \
             skqopt.minimize(f_easy_simple, [], bounds, budget, method='snobfit', options=options)
        assert type(result.optpar) == np.ndarray
        assert np.round(
            sum(result.optpar) - sum(ref_results[f_easy_simple]['snobfit']),
            8) == 0.0

        # Py-BOBYQA
        result, history = \
             skqopt.minimize(f_easy_simple, x0, bounds, budget, method='bobyqa')
        assert type(result.optpar) == np.ndarray
        assert np.round(sum(result.optpar), 5) == 0

        # ORBIT
        if skqopt._check_orbit_prerequisites():
            randstate = 1
            np.random.seed(randstate)
            result, history = \
                 skqopt.minimize(f_easy_simple, x0, bounds, budget, method='orbit')
            assert type(result.optpar) == np.ndarray
            assert np.round(
                sum(result.optpar) - sum((0.00076624, 0.00060909)), 7) == 0
Ejemplo n.º 2
0
 def optimize(
     self,
     num_vars,
     objective_function,
     gradient_function=None,
     variable_bounds=None,
     initial_point=None,
 ):
     """Runs the optimization."""
     super().optimize(num_vars, objective_function, gradient_function,
                      variable_bounds, initial_point)
     snobfit_settings = {
         "maxmp": self._maxmp,
         "maxfail": self._maxfail,
         "verbose": self._verbose,
     }
     options = optset(optin=snobfit_settings)
     # counters the error when initial point is outside the acceptable bounds
     for idx, theta in enumerate(initial_point):
         if abs(theta) > variable_bounds[idx][0]:
             initial_point[
                 idx] = initial_point[idx] % variable_bounds[idx][0]
         elif abs(theta) > variable_bounds[idx][1]:
             initial_point[
                 idx] = initial_point[idx] % variable_bounds[idx][1]
     res, history = skq.minimize(
         objective_function,
         np.array(initial_point, dtype=float),
         bounds=variable_bounds,
         budget=self._maxiter,
         method="snobfit",
         options=options,
     )
     return res.optpar, res.optval, len(history)
Ejemplo n.º 3
0
    def minimize(
        self,
        fun: Callable[[POINT], float],
        x0: POINT,
        jac: Optional[Callable[[POINT], POINT]] = None,
        bounds: Optional[List[Tuple[float, float]]] = None,
    ) -> OptimizerResult:
        snobfit_settings = {
            "maxmp": self._maxmp,
            "maxfail": self._maxfail,
            "verbose": self._verbose,
        }
        options = optset(optin=snobfit_settings)
        # counters the error when initial point is outside the acceptable bounds
        x0 = np.asarray(x0)
        for idx, theta in enumerate(x0):
            if abs(theta) > bounds[idx][0]:
                x0[idx] = x0[idx] % bounds[idx][0]
            elif abs(theta) > bounds[idx][1]:
                x0[idx] = x0[idx] % bounds[idx][1]

        res, history = skq.minimize(
            fun,
            x0,
            bounds=bounds,
            budget=self._maxiter,
            method="snobfit",
            options=options,
        )

        optimizer_result = OptimizerResult()
        optimizer_result.x = res.optpar
        optimizer_result.fun = res.optval
        optimizer_result.nfev = len(history)
        return optimizer_result
Ejemplo n.º 4
0
 def optimize(self, num_vars, objective_function, gradient_function=None,
              variable_bounds=None, initial_point=None):
     """ Runs the optimization. """
     super().optimize(num_vars, objective_function, gradient_function,
                      variable_bounds, initial_point)
     res, history = skq.minimize(objective_function, np.array(initial_point),
                                 bounds=np.array(variable_bounds), budget=self._maxiter,
                                 method="bobyqa")
     return res.optpar, res.optval, len(history)
Ejemplo n.º 5
0
 def optimize(self, num_vars, objective_function, gradient_function=None, variable_bounds=None,
              initial_point=None):
     """ Runs the optimization. """
     super().optimize(num_vars, objective_function, gradient_function, variable_bounds,
                      initial_point)
     res, history = skq.minimize(func=objective_function, x0=initial_point,
                                 bounds=variable_bounds, budget=self._maxiter,
                                 method="imfil")
     return res.optpar, res.optval, len(history)
Ejemplo n.º 6
0
    def test_issue4(self):
        """error in imfil with multivariate function"""

        from skquant.opt import minimize

        def g(a):
            return a[0]**2 - a[0] + a[1]**3 - 4 * a[1]

        bounds = np.array([[0, 2], [-2, 2]], dtype=np.float)
        init = np.array([1., 0.])
        res, hist = minimize(g, init, bounds, method='imfil')
Ejemplo n.º 7
0
    def test_issue3(self):
        """error with snobfit for univariate function"""

        from skquant.opt import minimize

        def f(a):
            return a[0]**2 - a[0]

        bounds = np.array([[0, 2]], dtype=np.float)
        init = np.array([1.])
        res, hist = minimize(f, init, bounds, method='snobfit')
Ejemplo n.º 8
0
    def test_issue2(self):
        """Errors with imfil for univariate functions"""

        from skquant.opt import minimize

        def f(a):
            return a**2 - a

        bounds = np.array([[0, 2]], dtype=np.float)
        init = np.array([1.])
        res, hist = minimize(f, init, bounds, method='imfil')
Ejemplo n.º 9
0
    def optimize(self,
                 num_vars,
                 objective_function,
                 gradient_function=None,
                 variable_bounds=None,
                 initial_point=None):
        super().optimize(num_vars, objective_function, gradient_function,
                         variable_bounds, initial_point)

        res, history = \
             skqopt.minimize(objective_function, initial_point, variable_bounds,
                             self.maxfun, method='imfil', options=self._options)

        return res.optpar, res.optval, len(history)
Ejemplo n.º 10
0
    def test_issue10(self):
        """SNOBFIT error for initialization with nreq=1"""

        from skquant.opt import minimize

        def f(a):
            return a[0]**2 - a[0]

        bounds = np.array([[0, 2]], dtype=np.float)
        init = np.array([1.])
        res, hist = minimize(f,
                             init,
                             bounds,
                             method='snobfit',
                             options={'maxmp': 1})
Ejemplo n.º 11
0
def imfil(fun, x0, *args, **options):
    """
    Implicit Filtering

    Algorithm designed for problems with local minima caused by high-frequency,
    low-amplitude noise, with an underlying large scale structure. This uses
    the SQImFil Python rewrite.

    Reference:
      C.T. Kelley, "Implicit Filtering", 2011, ISBN: 978-1-61197-189-7

    Original MATLAB code available at ctk.math.ncsu.edu/imfil.html
    """

    budget, bounds, options = _split_options(options)

    result, history = skqopt.minimize(fun, x0, bounds=bounds, budget=budget, \
                                      method='imfil', options=options)

    return _res2scipy(result, history)
Ejemplo n.º 12
0
    def minimize(
        self,
        fun: Callable[[POINT], float],
        x0: POINT,
        jac: Optional[Callable[[POINT], POINT]] = None,
        bounds: Optional[List[Tuple[float, float]]] = None,
    ) -> OptimizerResult:
        res, history = skq.minimize(
            func=fun,
            x0=x0,
            bounds=bounds,
            budget=self._maxiter,
            method="imfil",
        )

        optimizer_result = OptimizerResult()
        optimizer_result.x = res.optpar
        optimizer_result.fun = res.optval
        optimizer_result.nfev = len(history)
        return optimizer_result
Ejemplo n.º 13
0
def snobfit(fun, x0, *args, **options):
    """
    Stable Noisy Optimization by Branch and FIT

    SnobFit is specifically developed for optimization problems with noisy and
    expensive to compute objective functions. This implementation uses the
    SQSnobFit Python rewrite.

    Reference:
      W. Huyer and A. Neumaier, “Snobfit - Stable Noisy Optimization by Branch
      and Fit”, ACM Trans. Math. Software 35 (2008), Article 9.

    Original MATLAB code available at www.mat.univie.ac.at/~neum/software/snobfit
    """

    budget, bounds, options = _split_options(options)

    result, history = skqopt.minimize(fun, x0, bounds=bounds, budget=budget, \
                                      method='snobfit', options=options)

    return _res2scipy(result, history)
Ejemplo n.º 14
0
def pybobyqa(fun, x0, *args, **options):
    """
    Bound Optimization BY Quadratic Approximation

    Trust region method that builds a quadratic approximation in each iteration
    based on a set of automatically chosen and adjusted interpolation points.

    Reference:
      Coralia Cartis, et. al., “Improving the Flexibility and Robustness of
      Model-Based Derivative-Free Optimization Solvers”, technical report,
      University of Oxford, (2018).

    Code available at github.com/numericalalgorithmsgroup/pybobyqa/
    """

    budget, bounds, options = _split_options(options)

    result, history = skqopt.minimize(fun, x0, bounds=bounds, budget=budget, \
                                      method='bobyqa', options=options)

    return _res2scipy(result, history)
Ejemplo n.º 15
0
    def minimize(
        self,
        fun: Callable[[POINT], float],
        x0: POINT,
        jac: Optional[Callable[[POINT], POINT]] = None,
        bounds: Optional[List[Tuple[float, float]]] = None,
    ) -> OptimizerResult:
        from skquant import opt as skq

        res, history = skq.minimize(
            func=fun,
            x0=np.asarray(x0),
            bounds=np.array(bounds),
            budget=self._maxiter,
            method="bobyqa",
        )

        optimizer_result = OptimizerResult()
        optimizer_result.x = res.optpar
        optimizer_result.fun = res.optval
        optimizer_result.nfev = len(history)
        return optimizer_result
Ejemplo n.º 16
0
def nomad(fun, x0, *args, **options):
    """
    Nonlinear Optimization by Mesh Adaptive Direct Search

    NOMAD is designed for time-consuming blackbox simulations, with a small
    number of variables, that may fail. It samples the parameter space using a
    mesh that is adaptively adjusted based on the progress of the search.

    Reference:
      C. Audet, S. Le Digabel, C. Tribes and V. Rochon Montplaisir. "The NOMAD
      project." Software available at https://www.gerad.ca/nomad .

      S. Le Digabel. "NOMAD: Nonlinear Optimization with the MADS algorithm."
      ACM Trans. on Mathematical Software, 37(4):44:1–44:15, 2011.

    Original C++ code available at www.gerad.ca/nomad
    """

    budget, bounds, options = _split_options(options)

    result, history = skqopt.minimize(fun, x0, bounds=bounds, budget=budget, \
                                      method='nomad', options=options)

    return _res2scipy(result, history)