Esempio n. 1
0
    def _call(self):
        # args = np.load("/Users/blankjul/workspace/pymoo/pymoo/algorithms/data_constr.npy", allow_pickle=True)
        #
        # pymoo = [self.D[e] for e in self.args]
        # for k in range(len(self.args)):
        #     orig = tuple(args)[k]
        #     _pymoo = pymoo[k]
        #     if isinstance(orig, np.ndarray):
        #         print(orig.dtype, _pymoo.dtype)
        #         print(orig.shape, _pymoo.shape)
        #     print(type(orig), type(_pymoo))
        #     print(orig)
        #     print(_pymoo)
        #
        #     print()
        #
        # print("-------------------------------------------------")

        slsqp(*[self.D[e] for e in self.args])
Esempio n. 2
0
def _minimize_slsqp(func, x0, args=(), jac=None, bounds=None,
                    constraints=(),
                    maxiter=100, ftol=1.0E-6, iprint=1, disp=False,
                    eps=_epsilon, callback=None, finite_diff_rel_step=None,
                    **unknown_options):
    """
    Minimize a scalar function of one or more variables using Sequential
    Least Squares Programming (SLSQP).

    Options
    -------
    ftol : float
        Precision goal for the value of f in the stopping criterion.
    eps : float
        Step size used for numerical approximation of the Jacobian.
    disp : bool
        Set to True to print convergence messages. If False,
        `verbosity` is ignored and set to 0.
    maxiter : int
        Maximum number of iterations.
    finite_diff_rel_step : None or array_like, optional
        If `jac in ['2-point', '3-point', 'cs']` the relative step size to
        use for numerical approximation of `jac`. The absolute step
        size is computed as ``h = rel_step * sign(x0) * max(1, abs(x0))``,
        possibly adjusted to fit into the bounds. For ``method='3-point'``
        the sign of `h` is ignored. If None (default) then step is selected
        automatically.
    """
    _check_unknown_options(unknown_options)
    iter = maxiter - 1
    acc = ftol
    epsilon = eps

    if not disp:
        iprint = 0

    # Constraints are triaged per type into a dictionary of tuples
    if isinstance(constraints, dict):
        constraints = (constraints, )

    cons = {'eq': (), 'ineq': ()}
    for ic, con in enumerate(constraints):
        # check type
        try:
            ctype = con['type'].lower()
        except KeyError as e:
            raise KeyError('Constraint %d has no type defined.' % ic) from e
        except TypeError as e:
            raise TypeError('Constraints must be defined using a '
                            'dictionary.') from e
        except AttributeError as e:
            raise TypeError("Constraint's type must be a string.") from e
        else:
            if ctype not in ['eq', 'ineq']:
                raise ValueError("Unknown constraint type '%s'." % con['type'])

        # check function
        if 'fun' not in con:
            raise ValueError('Constraint %d has no function defined.' % ic)

        # check Jacobian
        cjac = con.get('jac')
        if cjac is None:
            # approximate Jacobian function. The factory function is needed
            # to keep a reference to `fun`, see gh-4240.
            def cjac_factory(fun):
                def cjac(x, *args):
                    if jac in ['2-point', '3-point', 'cs']:
                        return approx_derivative(fun, x, method=jac, args=args,
                                                 rel_step=finite_diff_rel_step)
                    else:
                        return approx_derivative(fun, x, method='2-point',
                                                 abs_step=epsilon, args=args)

                return cjac
            cjac = cjac_factory(con['fun'])

        # update constraints' dictionary
        cons[ctype] += ({'fun': con['fun'],
                         'jac': cjac,
                         'args': con.get('args', ())}, )

    exit_modes = {-1: "Gradient evaluation required (g & a)",
                   0: "Optimization terminated successfully",
                   1: "Function evaluation required (f & c)",
                   2: "More equality constraints than independent variables",
                   3: "More than 3*n iterations in LSQ subproblem",
                   4: "Inequality constraints incompatible",
                   5: "Singular matrix E in LSQ subproblem",
                   6: "Singular matrix C in LSQ subproblem",
                   7: "Rank-deficient equality constraint subproblem HFTI",
                   8: "Positive directional derivative for linesearch",
                   9: "Iteration limit reached"}

    # Transform x0 into an array.
    x = asfarray(x0).flatten()

    # SLSQP is sent 'old-style' bounds, 'new-style' bounds are required by
    # ScalarFunction
    if bounds is None or len(bounds) == 0:
        new_bounds = (-np.inf, np.inf)
    else:
        new_bounds = old_bound_to_new(bounds)

    # clip the initial guess to bounds, otherwise ScalarFunction doesn't work
    x = np.clip(x, new_bounds[0], new_bounds[1])

    # Set the parameters that SLSQP will need
    # meq, mieq: number of equality and inequality constraints
    meq = sum(map(len, [atleast_1d(c['fun'](x, *c['args']))
              for c in cons['eq']]))
    mieq = sum(map(len, [atleast_1d(c['fun'](x, *c['args']))
               for c in cons['ineq']]))
    # m = The total number of constraints
    m = meq + mieq
    # la = The number of constraints, or 1 if there are no constraints
    la = array([1, m]).max()
    # n = The number of independent variables
    n = len(x)

    # Define the workspaces for SLSQP
    n1 = n + 1
    mineq = m - meq + n1 + n1
    len_w = (3*n1+m)*(n1+1)+(n1-meq+1)*(mineq+2) + 2*mineq+(n1+mineq)*(n1-meq) \
            + 2*meq + n1 + ((n+1)*n)//2 + 2*m + 3*n + 3*n1 + 1
    len_jw = mineq
    w = zeros(len_w)
    jw = zeros(len_jw)

    # Decompose bounds into xl and xu
    if bounds is None or len(bounds) == 0:
        xl = np.empty(n, dtype=float)
        xu = np.empty(n, dtype=float)
        xl.fill(np.nan)
        xu.fill(np.nan)
    else:
        bnds = array(bounds, float)
        if bnds.shape[0] != n:
            raise IndexError('SLSQP Error: the length of bounds is not '
                             'compatible with that of x0.')

        with np.errstate(invalid='ignore'):
            bnderr = bnds[:, 0] > bnds[:, 1]

        if bnderr.any():
            raise ValueError('SLSQP Error: lb > ub in bounds %s.' %
                             ', '.join(str(b) for b in bnderr))
        xl, xu = bnds[:, 0], bnds[:, 1]

        # Mark infinite bounds with nans; the Fortran code understands this
        infbnd = ~isfinite(bnds)
        xl[infbnd[:, 0]] = np.nan
        xu[infbnd[:, 1]] = np.nan

    # ScalarFunction provides function and gradient evaluation
    sf = _prepare_scalar_function(func, x, jac=jac, args=args, epsilon=eps,
                                  finite_diff_rel_step=finite_diff_rel_step,
                                  bounds=new_bounds)

    # Initialize the iteration counter and the mode value
    mode = array(0, int)
    acc = array(acc, float)
    majiter = array(iter, int)
    majiter_prev = 0

    # Initialize internal SLSQP state variables
    alpha = array(0, float)
    f0 = array(0, float)
    gs = array(0, float)
    h1 = array(0, float)
    h2 = array(0, float)
    h3 = array(0, float)
    h4 = array(0, float)
    t = array(0, float)
    t0 = array(0, float)
    tol = array(0, float)
    iexact = array(0, int)
    incons = array(0, int)
    ireset = array(0, int)
    itermx = array(0, int)
    line = array(0, int)
    n1 = array(0, int)
    n2 = array(0, int)
    n3 = array(0, int)

    # Print the header if iprint >= 2
    if iprint >= 2:
        print("%5s %5s %16s %16s" % ("NIT", "FC", "OBJFUN", "GNORM"))

    # mode is zero on entry, so call objective, constraints and gradients
    # there should be no func evaluations here because it's cached from
    # ScalarFunction
    fx = sf.fun(x)
    try:
        fx = float(np.asarray(fx))
    except (TypeError, ValueError) as e:
        raise ValueError("Objective function must return a scalar") from e
    g = append(sf.grad(x), 0.0)
    c = _eval_constraint(x, cons)
    a = _eval_con_normals(x, cons, la, n, m, meq, mieq)

    while 1:
        # Call SLSQP
        slsqp(m, meq, x, xl, xu, fx, c, g, a, acc, majiter, mode, w, jw,
              alpha, f0, gs, h1, h2, h3, h4, t, t0, tol,
              iexact, incons, ireset, itermx, line,
              n1, n2, n3)

        if mode == 1:  # objective and constraint evaluation required
            fx = sf.fun(x)
            c = _eval_constraint(x, cons)

        if mode == -1:  # gradient evaluation required
            g = append(sf.grad(x), 0.0)
            a = _eval_con_normals(x, cons, la, n, m, meq, mieq)

        if majiter > majiter_prev:
            # call callback if major iteration has incremented
            if callback is not None:
                callback(np.copy(x))

            # Print the status of the current iterate if iprint > 2
            if iprint >= 2:
                print("%5i %5i % 16.6E % 16.6E" % (majiter, sf.nfev,
                                                   fx, linalg.norm(g)))

        # If exit mode is not -1 or 1, slsqp has completed
        if abs(mode) != 1:
            break

        majiter_prev = int(majiter)

    # Optimization loop complete. Print status if requested
    if iprint >= 1:
        print(exit_modes[int(mode)] + "    (Exit mode " + str(mode) + ')')
        print("            Current function value:", fx)
        print("            Iterations:", majiter)
        print("            Function evaluations:", sf.nfev)
        print("            Gradient evaluations:", sf.ngev)

    return OptimizeResult(x=x, fun=fx, jac=g[:-1], nit=int(majiter),
                          nfev=sf.nfev, njev=sf.ngev, status=int(mode),
                          message=exit_modes[int(mode)], success=(mode == 0))
Esempio n. 3
0
def _minimize_slsqp(func, x0, args=(), jac=None, bounds=None,
                    constraints=(),
                    maxiter=100, ftol=1.0E-6, iprint=1, disp=False,
                    eps=_epsilon,
                    **unknown_options):
    """
    Minimize a scalar function of one or more variables using Sequential
    Least SQuares Programming (SLSQP).

    Options for the SLSQP algorithm are:
        ftol : float
            Precision goal for the value of f in the stopping criterion.
        eps : float
            Step size used for numerical approximation of the jacobian.
        disp : bool
            Set to True to print convergence messages. If False,
            `verbosity` is ignored and set to 0.
        maxiter : int
            Maximum number of iterations.

    This function is called by the `minimize` function with
    `method=SLSQP`. It is not supposed to be called directly.
    """
    _check_unknown_options(unknown_options)
    fprime = jac
    iter = maxiter
    acc = ftol
    epsilon = eps

    if not disp:
        iprint = 0

    # Constraints are triaged per type into a dictionnary of tuples
    if isinstance(constraints, dict):
        constraints = (constraints, )

    cons = {'eq': (), 'ineq': ()}
    for ic, con in enumerate(constraints):
        # check type
        try:
            ctype = con['type'].lower()
        except KeyError:
            raise KeyError('Constraint %d has no type defined.' % ic)
        except TypeError:
            raise TypeError('Constraints must be defined using a '
                            'dictionary.')
        except AttributeError:
            raise TypeError("Constraint's type must be a string.")
        else:
            if ctype not in ['eq', 'ineq']:
                raise ValueError("Unknown constraint type '%s'." % con['type'])

        # check function
        if 'fun' not in con:
            raise ValueError('Constraint %d has no function defined.' % ic)

        # check jacobian
        cjac = con.get('jac')
        if cjac is None:
            # approximate jacobian function
            def cjac(x, *args):
                return approx_jacobian(x, con['fun'], epsilon, *args)

        # update constraints' dictionary
        cons[ctype] += ({'fun': con['fun'],
                         'jac': cjac,
                         'args': con.get('args', ())}, )

    exit_modes = {-1: "Gradient evaluation required (g & a)",
                    0: "Optimization terminated successfully.",
                    1: "Function evaluation required (f & c)",
                    2: "More equality constraints than independent variables",
                    3: "More than 3*n iterations in LSQ subproblem",
                    4: "Inequality constraints incompatible",
                    5: "Singular matrix E in LSQ subproblem",
                    6: "Singular matrix C in LSQ subproblem",
                    7: "Rank-deficient equality constraint subproblem HFTI",
                    8: "Positive directional derivative for linesearch",
                    9: "Iteration limit exceeded"}

    # Wrap func
    feval, func = wrap_function(func, args)

    # Wrap fprime, if provided, or approx_jacobian if not
    if fprime:
        geval, fprime = wrap_function(fprime, args)
    else:
        geval, fprime = wrap_function(approx_jacobian, (func, epsilon))

    # Transform x0 into an array.
    x = asfarray(x0).flatten()

    # Set the parameters that SLSQP will need
    # meq, mieq: number of equality and inequality constraints
    meq = sum(map(len, [atleast_1d(c['fun'](x, *c['args'])) for c in cons['eq']]))
    mieq = sum(map(len, [atleast_1d(c['fun'](x, *c['args'])) for c in cons['ineq']]))
    # m = The total number of constraints
    m = meq + mieq
    # la = The number of constraints, or 1 if there are no constraints
    la = array([1, m]).max()
    # n = The number of independent variables
    n = len(x)

    # Define the workspaces for SLSQP
    n1 = n + 1
    mineq = m - meq + n1 + n1
    len_w = (3*n1+m)*(n1+1)+(n1-meq+1)*(mineq+2) + 2*mineq+(n1+mineq)*(n1-meq) \
            + 2*meq + n1 + ((n+1)*n)//2 + 2*m + 3*n + 3*n1 + 1
    len_jw = mineq
    w = zeros(len_w)
    jw = zeros(len_jw)

    # Decompose bounds into xl and xu
    if bounds is None or len(bounds) == 0:
        xl, xu = array([-1.0E12]*n), array([1.0E12]*n)
    else:
        bnds = array(bounds, float)
        if bnds.shape[0] != n:
            raise IndexError('SLSQP Error: the length of bounds is not '
                             'compatible with that of x0.')

        bnderr = where(bnds[:, 0] > bnds[:, 1])[0]
        if bnderr.any():
            raise ValueError('SLSQP Error: lb > ub in bounds %s.' %
                             ', '.join(str(b) for b in bnderr))
        xl, xu = bnds[:, 0], bnds[:, 1]

        # filter -inf, inf and NaN values
        infbnd = ~isfinite(bnds)
        xl[infbnd[:, 0]] = -1.0E12
        xu[infbnd[:, 1]] = 1.0E12

    # Initialize the iteration counter and the mode value
    mode = array(0,int)
    acc = array(acc,float)
    majiter = array(iter,int)
    majiter_prev = 0

    # Print the header if iprint >= 2
    if iprint >= 2:
        print("%5s %5s %16s %16s" % ("NIT","FC","OBJFUN","GNORM"))

    while 1:

        if mode == 0 or mode == 1:  # objective and constraint evaluation requird

            # Compute objective function
            fx = func(x)
            # Compute the constraints
            if cons['eq']:
                c_eq = concatenate([atleast_1d(con['fun'](x, *con['args']))
                                     for con in cons['eq']])
            else:
                c_eq = zeros(0)
            if cons['ineq']:
                c_ieq = concatenate([atleast_1d(con['fun'](x, *con['args']))
                                     for con in cons['ineq']])
            else:
                c_ieq = zeros(0)

            # Now combine c_eq and c_ieq into a single matrix
            c = concatenate((c_eq, c_ieq))

        if mode == 0 or mode == -1:  # gradient evaluation required

            # Compute the derivatives of the objective function
            # For some reason SLSQP wants g dimensioned to n+1
            g = append(fprime(x),0.0)

            # Compute the normals of the constraints
            if cons['eq']:
                a_eq = vstack([con['jac'](x, *con['args'])
                               for con in cons['eq']])
            else:  # no equality constraint
                a_eq = zeros((meq, n))

            if cons['ineq']:
                a_ieq = vstack([con['jac'](x, *con['args'])
                                for con in cons['ineq']])
            else:  # no inequality constraint
                a_ieq = zeros((mieq, n))

            # Now combine a_eq and a_ieq into a single a matrix
            if m == 0:  # no constraints
                a = zeros((la, n))
            else:
                a = vstack((a_eq, a_ieq))
            a = concatenate((a,zeros([la,1])),1)

        # Call SLSQP
        slsqp(m, meq, x, xl, xu, fx, c, g, a, acc, majiter, mode, w, jw)

        # Print the status of the current iterate if iprint > 2 and the
        # major iteration has incremented
        if iprint >= 2 and majiter > majiter_prev:
            print("%5i %5i % 16.6E % 16.6E" % (majiter,feval[0],
                                               fx,linalg.norm(g)))

        # If exit mode is not -1 or 1, slsqp has completed
        if abs(mode) != 1:
            break

        majiter_prev = int(majiter)

    # Optimization loop complete.  Print status if requested
    if iprint >= 1:
        print(exit_modes[int(mode)] + "    (Exit mode " + str(mode) + ')')
        print("            Current function value:", fx)
        print("            Iterations:", majiter)
        print("            Function evaluations:", feval[0])
        print("            Gradient evaluations:", geval[0])

    return Result(x=x, fun=fx, jac=g, nit=int(majiter), nfev=feval[0],
                  njev=geval[0], status=int(mode),
                  message=exit_modes[int(mode)], success=(mode == 0))
Esempio n. 4
0
def _minimize_slsqp(func, x0, args=(), jac=None, bounds=None,
                    constraints=(),
                    maxiter=100, ftol=1.0E-6, iprint=1, disp=False,
                    eps=_epsilon,
                    **unknown_options):
    """
    Minimize a scalar function of one or more variables using Sequential
    Least SQuares Programming (SLSQP).

    Options for the SLSQP algorithm are:
        ftol : float
            Precision goal for the value of f in the stopping criterion.
        eps : float
            Step size used for numerical approximation of the jacobian.
        disp : bool
            Set to True to print convergence messages. If False,
            `verbosity` is ignored and set to 0.
        maxiter : int
            Maximum number of iterations.

    This function is called by the `minimize` function with
    `method=SLSQP`. It is not supposed to be called directly.
    """
    _check_unknown_options(unknown_options)
    fprime = jac
    iter = maxiter
    acc = ftol
    epsilon = eps

    if not disp:
        iprint = 0

    # Constraints are triaged per type into a dictionnary of tuples
    if isinstance(constraints, dict):
        constraints = (constraints, )

    cons = {'eq': (), 'ineq': ()}
    for ic, con in enumerate(constraints):
        # check type
        try:
            ctype = con['type'].lower()
        except KeyError:
            raise KeyError('Constraint %d has no type defined.' % ic)
        except TypeError:
            raise TypeError('Constraints must be defined using a '
                            'dictionary.')
        except AttributeError:
            raise TypeError("Constraint's type must be a string.")
        else:
            if ctype not in ['eq', 'ineq']:
                raise ValueError("Unknown constraint type '%s'." % con['type'])

        # check function
        if 'fun' not in con:
            raise ValueError('Constraint %d has no function defined.' % ic)

        # check jacobian
        cjac = con.get('jac')
        if cjac is None:
            # approximate jacobian function
            def cjac(x, *args):
                return approx_jacobian(x, con['fun'], epsilon, *args)

        # update constraints' dictionary
        cons[ctype] += ({'fun' : con['fun'],
                         'jac' : cjac,
                         'args': con.get('args', ())}, )


    exit_modes = { -1 : "Gradient evaluation required (g & a)",
                    0 : "Optimization terminated successfully.",
                    1 : "Function evaluation required (f & c)",
                    2 : "More equality constraints than independent variables",
                    3 : "More than 3*n iterations in LSQ subproblem",
                    4 : "Inequality constraints incompatible",
                    5 : "Singular matrix E in LSQ subproblem",
                    6 : "Singular matrix C in LSQ subproblem",
                    7 : "Rank-deficient equality constraint subproblem HFTI",
                    8 : "Positive directional derivative for linesearch",
                    9 : "Iteration limit exceeded" }


    # Wrap func
    feval, func = wrap_function(func, args)

    # Wrap fprime, if provided, or approx_jacobian if not
    if fprime:
        geval, fprime = wrap_function(fprime, args)
    else:
        geval, fprime = wrap_function(approx_jacobian, (func, epsilon))

    # Transform x0 into an array.
    x = asfarray(x0).flatten()


    # Set the parameters that SLSQP will need
    # meq, mieq: number of equality and inequality constraints
    meq = sum(map(len, [atleast_1d(c['fun'](x, *c['args'])) for c in cons['eq']]))
    mieq = sum(map(len, [atleast_1d(c['fun'](x, *c['args'])) for c in cons['ineq']]))
    # m = The total number of constraints
    m = meq + mieq
    # la = The number of constraints, or 1 if there are no constraints
    la = array([1,m]).max()
    # n = The number of independent variables
    n = len(x)

    # Define the workspaces for SLSQP
    n1 = n+1
    mineq = m - meq + n1 + n1
    len_w = (3*n1+m)*(n1+1)+(n1-meq+1)*(mineq+2) + 2*mineq+(n1+mineq)*(n1-meq) \
            + 2*meq + n1 +(n+1)*n/2 + 2*m + 3*n + 3*n1 + 1
    len_jw = mineq
    w = zeros(len_w)
    jw = zeros(len_jw)

    # Decompose bounds into xl and xu
    if bounds is None or len(bounds) == 0:
        xl, xu = array([-1.0E12]*n), array([1.0E12]*n)
    else:
        bnds = array(bounds, float)
        if bnds.shape[0] != n:
            raise IndexError('SLSQP Error: the length of bounds is not '
                             'compatible with that of x0.')

        bnderr = where(bnds[:, 0] > bnds[:, 1])[0]
        if bnderr.any():
            raise ValueError('SLSQP Error: lb > ub in bounds %s.' %
                             ', '.join(str(b) for b in bnderr))
        xl, xu = bnds[:, 0], bnds[:, 1]

        # filter -inf and inf values
        infbnd = isinf(bnds)
        xl[infbnd[:, 0]] = -1.0E12
        xu[infbnd[:, 1]] = 1.0E12

    # Initialize the iteration counter and the mode value
    mode = array(0,int)
    acc = array(acc,float)
    majiter = array(iter,int)
    majiter_prev = 0

    # Print the header if iprint >= 2
    if iprint >= 2:
        print "%5s %5s %16s %16s" % ("NIT","FC","OBJFUN","GNORM")

    while 1:

        if mode == 0 or mode == 1: # objective and constraint evaluation requird

            # Compute objective function
            fx = func(x)
            # Compute the constraints
            if cons['eq']:
                c_eq  = concatenate([atleast_1d(con['fun'](x, *con['args']))
                                     for con in cons['eq']])
            else:
                c_eq = zeros(0)
            if cons['ineq']:
                c_ieq = concatenate([atleast_1d(con['fun'](x, *con['args']))
                                     for con in cons['ineq']])
            else:
                c_ieq = zeros(0)

            # Now combine c_eq and c_ieq into a single matrix
            c = concatenate((c_eq, c_ieq))

        if mode == 0 or mode == -1: # gradient evaluation required

            # Compute the derivatives of the objective function
            # For some reason SLSQP wants g dimensioned to n+1
            g = append(fprime(x),0.0)

            # Compute the normals of the constraints
            if cons['eq']:
                a_eq = vstack([con['jac'](x, *con['args'])
                               for con in cons['eq']])
            else: # no equality constraint
                a_eq = zeros((meq, n))

            if cons['ineq']:
                a_ieq = vstack([con['jac'](x, *con['args'])
                                for con in cons['ineq']])
            else: # no inequality constraint
                a_ieq = zeros((mieq, n))

            # Now combine a_eq and a_ieq into a single a matrix
            if m == 0: # no constraints
                a = zeros((la, n))
            else:
                a = vstack((a_eq, a_ieq))
            a = concatenate((a,zeros([la,1])),1)

        # Call SLSQP
        slsqp(m, meq, x, xl, xu, fx, c, g, a, acc, majiter, mode, w, jw)

        # Print the status of the current iterate if iprint > 2 and the
        # major iteration has incremented
        if iprint >= 2 and majiter > majiter_prev:
            print "%5i %5i % 16.6E % 16.6E" % (majiter,feval[0],
                                               fx,linalg.norm(g))

        # If exit mode is not -1 or 1, slsqp has completed
        if abs(mode) != 1:
            break

        majiter_prev = int(majiter)

    # Optimization loop complete.  Print status if requested
    if iprint >= 1:
        print exit_modes[int(mode)] + "    (Exit mode " + str(mode) + ')'
        print "            Current function value:", fx
        print "            Iterations:", majiter
        print "            Function evaluations:", feval[0]
        print "            Gradient evaluations:", geval[0]

    return Result(x=x, fun=fx, jac=g, nit=int(majiter), nfev=feval[0],
                  njev=geval[0], status=int(mode),
                  message=exit_modes[int(mode)], success=(mode == 0))
Esempio n. 5
0
def _minimize_slsqp(func,
                    x0,
                    args=(),
                    jac=None,
                    bounds=None,
                    constraints=(),
                    maxiter=100,
                    ftol=1.0E-6,
                    iprint=1,
                    disp=False,
                    eps=_epsilon,
                    callback=None,
                    **unknown_options):
    """
    Minimize a scalar function of one or more variables using Sequential
    Least SQuares Programming (SLSQP).

    Options
    -------
    ftol : float
        Precision goal for the value of f in the stopping criterion.
    eps : float
        Step size used for numerical approximation of the Jacobian.
    disp : bool
        Set to True to print convergence messages. If False,
        `verbosity` is ignored and set to 0.
    maxiter : int
        Maximum number of iterations.

    """
    _check_unknown_options(unknown_options)
    fprime = jac
    iter = maxiter
    acc = ftol
    epsilon = eps

    if not disp:
        iprint = 0

    # Constraints are triaged per type into a dictionary of tuples
    if isinstance(constraints, dict):
        constraints = (constraints, )

    cons = {'eq': (), 'ineq': ()}
    for ic, con in enumerate(constraints):
        # check type
        try:
            ctype = con['type'].lower()
        except KeyError:
            raise KeyError('Constraint %d has no type defined.' % ic)
        except TypeError:
            raise TypeError('Constraints must be defined using a '
                            'dictionary.')
        except AttributeError:
            raise TypeError("Constraint's type must be a string.")
        else:
            if ctype not in ['eq', 'ineq']:
                raise ValueError("Unknown constraint type '%s'." % con['type'])

        # check function
        if 'fun' not in con:
            raise ValueError('Constraint %d has no function defined.' % ic)

        # check Jacobian
        cjac = con.get('jac')
        if cjac is None:
            # approximate Jacobian function.  The factory function is needed
            # to keep a reference to `fun`, see gh-4240.
            def cjac_factory(fun):
                def cjac(x, *args):
                    return approx_jacobian(x, fun, epsilon, *args)

                return cjac

            cjac = cjac_factory(con['fun'])

        # update constraints' dictionary
        cons[ctype] += ({
            'fun': con['fun'],
            'jac': cjac,
            'args': con.get('args', ())
        }, )

    exit_modes = {
        -1: "Gradient evaluation required (g & a)",
        0: "Optimization terminated successfully.",
        1: "Function evaluation required (f & c)",
        2: "More equality constraints than independent variables",
        3: "More than 3*n iterations in LSQ subproblem",
        4: "Inequality constraints incompatible",
        5: "Singular matrix E in LSQ subproblem",
        6: "Singular matrix C in LSQ subproblem",
        7: "Rank-deficient equality constraint subproblem HFTI",
        8: "Positive directional derivative for linesearch",
        9: "Iteration limit exceeded"
    }

    # Wrap func
    feval, func = wrap_function(func, args)

    # Wrap fprime, if provided, or approx_jacobian if not
    if fprime:
        geval, fprime = wrap_function(fprime, args)
    else:
        geval, fprime = wrap_function(approx_jacobian, (func, epsilon))

    # Transform x0 into an array.
    x = asfarray(x0).flatten()

    # Set the parameters that SLSQP will need
    # meq, mieq: number of equality and inequality constraints
    meq = sum(
        map(len, [atleast_1d(c['fun'](x, *c['args'])) for c in cons['eq']]))
    mieq = sum(
        map(len, [atleast_1d(c['fun'](x, *c['args'])) for c in cons['ineq']]))
    # m = The total number of constraints
    m = meq + mieq
    # la = The number of constraints, or 1 if there are no constraints
    la = array([1, m]).max()
    # n = The number of independent variables
    n = len(x)

    # Define the workspaces for SLSQP
    n1 = n + 1
    mineq = m - meq + n1 + n1
    len_w = (3*n1+m)*(n1+1)+(n1-meq+1)*(mineq+2) + 2*mineq+(n1+mineq)*(n1-meq) \
            + 2*meq + n1 + ((n+1)*n)//2 + 2*m + 3*n + 3*n1 + 1
    len_jw = mineq
    w = zeros(len_w)
    jw = zeros(len_jw)

    # Decompose bounds into xl and xu
    if bounds is None or len(bounds) == 0:
        xl = np.empty(n, dtype=float)
        xu = np.empty(n, dtype=float)
        xl.fill(np.nan)
        xu.fill(np.nan)
    else:
        bnds = array(bounds, float)
        if bnds.shape[0] != n:
            raise IndexError('SLSQP Error: the length of bounds is not '
                             'compatible with that of x0.')

        with np.errstate(invalid='ignore'):
            bnderr = bnds[:, 0] > bnds[:, 1]

        if bnderr.any():
            raise ValueError('SLSQP Error: lb > ub in bounds %s.' %
                             ', '.join(str(b) for b in bnderr))
        xl, xu = bnds[:, 0], bnds[:, 1]

        # Mark infinite bounds with nans; the Fortran code understands this
        infbnd = ~isfinite(bnds)
        xl[infbnd[:, 0]] = np.nan
        xu[infbnd[:, 1]] = np.nan

    # Clip initial guess to bounds (SLSQP may fail with bounds-infeasible
    # initial point)
    have_bound = np.isfinite(xl)
    x[have_bound] = np.clip(x[have_bound], xl[have_bound], np.inf)
    have_bound = np.isfinite(xu)
    x[have_bound] = np.clip(x[have_bound], -np.inf, xu[have_bound])

    # Initialize the iteration counter and the mode value
    mode = array(0, int)
    acc = array(acc, float)
    majiter = array(iter, int)
    majiter_prev = 0

    # Initialize internal SLSQP state variables
    alpha = array(0, float)
    f0 = array(0, float)
    gs = array(0, float)
    h1 = array(0, float)
    h2 = array(0, float)
    h3 = array(0, float)
    h4 = array(0, float)
    t = array(0, float)
    t0 = array(0, float)
    tol = array(0, float)
    iexact = array(0, int)
    incons = array(0, int)
    ireset = array(0, int)
    itermx = array(0, int)
    line = array(0, int)
    n1 = array(0, int)
    n2 = array(0, int)
    n3 = array(0, int)

    # Print the header if iprint >= 2
    if iprint >= 2:
        print("%5s %5s %16s %16s" % ("NIT", "FC", "OBJFUN", "GNORM"))

    while 1:

        if mode == 0 or mode == 1:  # objective and constraint evaluation required

            # Compute objective function
            fx = func(x)
            try:
                fx = float(np.asarray(fx))
            except (TypeError, ValueError):
                raise ValueError("Objective function must return a scalar")
            # Compute the constraints
            if cons['eq']:
                c_eq = concatenate([
                    atleast_1d(con['fun'](x, *con['args']))
                    for con in cons['eq']
                ])
            else:
                c_eq = zeros(0)
            if cons['ineq']:
                c_ieq = concatenate([
                    atleast_1d(con['fun'](x, *con['args']))
                    for con in cons['ineq']
                ])
            else:
                c_ieq = zeros(0)

            # Now combine c_eq and c_ieq into a single matrix
            c = concatenate((c_eq, c_ieq))

        if mode == 0 or mode == -1:  # gradient evaluation required

            # Compute the derivatives of the objective function
            # For some reason SLSQP wants g dimensioned to n+1
            g = append(fprime(x), 0.0)

            # Compute the normals of the constraints
            if cons['eq']:
                a_eq = vstack(
                    [con['jac'](x, *con['args']) for con in cons['eq']])
            else:  # no equality constraint
                a_eq = zeros((meq, n))

            if cons['ineq']:
                a_ieq = vstack(
                    [con['jac'](x, *con['args']) for con in cons['ineq']])
            else:  # no inequality constraint
                a_ieq = zeros((mieq, n))

            # Now combine a_eq and a_ieq into a single a matrix
            if m == 0:  # no constraints
                a = zeros((la, n))
            else:
                a = vstack((a_eq, a_ieq))
            a = concatenate((a, zeros([la, 1])), 1)

        # Call SLSQP
        slsqp(m, meq, x, xl, xu, fx, c, g, a, acc, majiter, mode, w, jw, alpha,
              f0, gs, h1, h2, h3, h4, t, t0, tol, iexact, incons, ireset,
              itermx, line, n1, n2, n3)

        # call callback if major iteration has incremented
        if callback is not None and majiter > majiter_prev:
            callback(np.copy(x))

        # Print the status of the current iterate if iprint > 2 and the
        # major iteration has incremented
        if iprint >= 2 and majiter > majiter_prev:
            print("%5i %5i % 16.6E % 16.6E" %
                  (majiter, feval[0], fx, linalg.norm(g)))

        # If exit mode is not -1 or 1, slsqp has completed
        if abs(mode) != 1:
            break

        majiter_prev = int(majiter)

    # Get the KKT multipliers from SLSQP result
    # TODO: This call is required to get the correct kkt values. I'm not sure why. This is out of place.
    # Comment this out to get incorrect multipliers.
    slsqp(m, meq, x, xl, xu, fx, c, g, a, acc, majiter, mode, w, jw, alpha, f0,
          gs, h1, h2, h3, h4, t, t0, tol, iexact, incons, ireset, itermx, line,
          n1, n2, n3)

    w_ind = 0
    ind_mapper = []
    kkt = []
    for constraint in cons['eq'] + cons['ineq']:
        cv = atleast_1d(constraint['fun'](x))
        dim = len(cv)
        kkt += [w[w_ind:(w_ind + dim)]]
        w_ind += dim
        ind_mapper += [
            ii for ii, item in enumerate(constraints)
            if item['fun'] == constraint['fun']
        ]

    kkt_sorted = [kkt[i] for i in ind_mapper]

    # Optimization loop complete.  Print status if requested
    if iprint >= 1:
        print(exit_modes[int(mode)] + "    (Exit mode " + str(mode) + ')')
        print("            Current function value:", fx)
        print("            Iterations:", majiter)
        print("            Function evaluations:", feval[0])
        print("            Gradient evaluations:", geval[0])

    return OptimizeResult(x=x,
                          fun=fx,
                          jac=g[:-1],
                          nit=int(majiter),
                          nfev=feval[0],
                          njev=geval[0],
                          status=int(mode),
                          message=exit_modes[int(mode)],
                          success=(mode == 0),
                          kkt=kkt_sorted)
Esempio n. 6
0
def fmin_slsqp( func, x0 , eqcons=[], f_eqcons=None, ieqcons=[], f_ieqcons=None,
                bounds = [], fprime = None, fprime_eqcons=None,
                fprime_ieqcons=None, args = (), iter = 100, acc = 1.0E-6,
                iprint = 1, full_output = 0, epsilon = _epsilon ):
    """
    Minimize a function using Sequential Least SQuares Programming

    Python interface function for the SLSQP Optimization subroutine
    originally implemented by Dieter Kraft.

    *Parameters*:
        func : callable f(x,*args)
            Objective function.
        x0 : ndarray of float
            Initial guess for the independent variable(s).
        eqcons : list
            A list of functions of length n such that
            eqcons[j](x0,*args) == 0.0 in a successfully optimized
            problem.
        f_eqcons : callable f(x,*args)
            Returns an array in which each element must equal 0.0 in a
            successfully optimized problem.  If f_eqcons is specified,
            eqcons is ignored.
        ieqcons : list
            A list of functions of length n such that
            ieqcons[j](x0,*args) >= 0.0 in a successfully optimized
            problem.
        f_ieqcons : callable f(x0,*args)
            Returns an array in which each element must be greater or
            equal to 0.0 in a successfully optimized problem.  If
            f_ieqcons is specified, ieqcons is ignored.
        bounds : list
            A list of tuples specifying the lower and upper bound
            for each independent variable [(xl0, xu0),(xl1, xu1),...]
        fprime : callable f(x,*args)
            A function that evaluates the partial derivatives of func.
        fprime_eqcons : callable f(x,*args)
            A function of the form f(x, *args) that returns the m by n
            array of equality constraint normals.  If not provided,
            the normals will be approximated. The array returned by
            fprime_eqcons should be sized as ( len(eqcons), len(x0) ).
        fprime_ieqcons : callable f(x,*args)
            A function of the form f(x, *args) that returns the m by n
            array of inequality constraint normals.  If not provided,
            the normals will be approximated. The array returned by
            fprime_ieqcons should be sized as ( len(ieqcons), len(x0) ).
        args : sequence
            Additional arguments passed to func and fprime.
        iter : int
            The maximum number of iterations.
        acc : float
            Requested accuracy.
        iprint : int
            The verbosity of fmin_slsqp:
              iprint <= 0 : Silent operation
              iprint == 1 : Print summary upon completion (default)
              iprint >= 2 : Print status of each iterate and summary
        full_output : bool
            If False, return only the minimizer of func (default).
            Otherwise, output final objective function and summary
            information.
        epsilon : float
            The step size for finite-difference derivative estimates.

    *Returns*: ( x, { fx, its, imode, smode })
        x : ndarray of float
            The final minimizer of func.
        fx : ndarray of float
            The final value of the objective function.
        its : int
            The number of iterations.
        imode : int
            The exit mode from the optimizer (see below).
        smode : string
            Message describing the exit mode from the optimizer.

    *Notes*

        Exit modes are defined as follows:
            -1 : Gradient evaluation required (g & a)
             0 : Optimization terminated successfully.
             1 : Function evaluation required (f & c)
             2 : More equality constraints than independent variables
             3 : More than 3*n iterations in LSQ subproblem
             4 : Inequality constraints incompatible
             5 : Singular matrix E in LSQ subproblem
             6 : Singular matrix C in LSQ subproblem
             7 : Rank-deficient equality constraint subproblem HFTI
             8 : Positive directional derivative for linesearch
             9 : Iteration limit exceeded

    """

    exit_modes = { -1 : "Gradient evaluation required (g & a)",
                    0 : "Optimization terminated successfully.",
                    1 : "Function evaluation required (f & c)",
                    2 : "More equality constraints than independent variables",
                    3 : "More than 3*n iterations in LSQ subproblem",
                    4 : "Inequality constraints incompatible",
                    5 : "Singular matrix E in LSQ subproblem",
                    6 : "Singular matrix C in LSQ subproblem",
                    7 : "Rank-deficient equality constraint subproblem HFTI",
                    8 : "Positive directional derivative for linesearch",
                    9 : "Iteration limit exceeded" }

    # Now do a lot of function wrapping

    # Wrap func
    feval, func = wrap_function(func, args)
    # Wrap fprime, if provided, or approx_fprime if not
    if fprime:
        geval, fprime = wrap_function(fprime,args)
    else:
        geval, fprime = wrap_function(approx_fprime,(func,epsilon))

    if f_eqcons:
        # Equality constraints provided via f_eqcons
        ceval, f_eqcons = wrap_function(f_eqcons,args)
        if fprime_eqcons:
            # Wrap fprime_eqcons
            geval, fprime_eqcons = wrap_function(fprime_eqcons,args)
        else:
            # Wrap approx_jacobian
            geval, fprime_eqcons = wrap_function(approx_jacobian,
                                                 (f_eqcons,epsilon))
    else:
        # Equality constraints provided via eqcons[]
        eqcons_prime = []
        for i in range(len(eqcons)):
            eqcons_prime.append(None)
            if eqcons[i]:
                # Wrap eqcons and eqcons_prime
                ceval, eqcons[i] = wrap_function(eqcons[i],args)
                geval, eqcons_prime[i] = wrap_function(approx_fprime,
                                                       (eqcons[i],epsilon))

    if f_ieqcons:
        # Inequality constraints provided via f_ieqcons
        ceval, f_ieqcons = wrap_function(f_ieqcons,args)
        if fprime_ieqcons:
            # Wrap fprime_ieqcons
            geval, fprime_ieqcons = wrap_function(fprime_ieqcons,args)
        else:
            # Wrap approx_jacobian
            geval, fprime_ieqcons = wrap_function(approx_jacobian,
                                                  (f_ieqcons,epsilon))
    else:
        # Inequality constraints provided via ieqcons[]
        ieqcons_prime = []
        for i in range(len(ieqcons)):
            ieqcons_prime.append(None)
            if ieqcons[i]:
                # Wrap ieqcons and ieqcons_prime
                ceval, ieqcons[i] = wrap_function(ieqcons[i],args)
                geval, ieqcons_prime[i] = wrap_function(approx_fprime,
                                                        (ieqcons[i],epsilon))


    # Transform x0 into an array.
    x = asfarray(x0).flatten()

    # Set the parameters that SLSQP will need
    # meq = The number of equality constraints
    if f_eqcons:
        meq = len(f_eqcons(x))
    else:
        meq = len(eqcons)
    if f_ieqcons:
        mieq = len(f_ieqcons(x))
    else:
        mieq = len(ieqcons)
    # m = The total number of constraints
    m = meq + mieq #+ len(bounds)
    # la = The number of constraints, or 1 if there are no constraints
    la = array([1,m]).max()
    # n = The number of independent variables
    n = len(x)

    # Define the workspaces for SLSQP
    n1 = n+1
    mineq = m + len(bounds) - meq + n1 + n1
#    mineq = m - meq + n1 + n1
    len_w = (3*n1+m)*(n1+1)+(n1-meq+1)*(mineq+2) + 2*mineq+(n1+mineq)*(n1-meq) \
            + 2*meq + n1 +(n+1)*n/2 + 2*m + 3*n + 3*n1 + 1
    len_jw = mineq
    w = zeros(len_w)
    jw = zeros(len_jw)

    # Decompose bounds into xl and xu
    if len(bounds) == 0:
        bounds = [(-1.0E12, 1.0E12) for i in range(n)]
    elif len(bounds) != n:
        raise IndexError, \
        'SLSQP Error:  If bounds is specified, len(bounds) == len(x0)'
    else:
        for i in range(len(bounds)):
            if bounds[i][0] > bounds[i][1]:
                raise ValueError, \
                'SLSQP Error: lb > ub in bounds[' + str(i) +']  ' + str(bounds[4])

    xl = array( [ b[0] for b in bounds ] )
    xu = array( [ b[1] for b in bounds ] )



    # Initialize the iteration counter and the mode value
    mode = array(0,int)
    acc = array(acc,float)
    majiter = array(iter,int)
    majiter_prev = 0

    # Print the header if iprint >= 2
    if iprint >= 2:
        print "%5s %5s %16s %16s" % ("NIT","FC","OBJFUN","GNORM")

    while 1:

        if mode == 0 or mode == 1: # objective and constraint evaluation requird

            # Compute objective function
            fx = func(x)
            # Compute the constraints
            if f_eqcons:
                c_eq = f_eqcons(x)
            else:
                c_eq = array([ eqcons[i](x) for i in range(meq) ])
            if f_ieqcons:
                c_ieq = f_ieqcons(x)
            else:
                c_ieq = array([ ieqcons[i](x) for i in range(len(ieqcons)) ])

            # Now combine c_eq and c_ieq into a single matrix
            if m == 0:
                # no constraints
                c = zeros([la])
            else:
                # constraints exist
                if meq > 0 and mieq == 0:
                    # only equality constraints
                    c = c_eq
                if meq == 0 and mieq > 0:
                    # only inequality constraints
                    c = c_ieq
                if meq > 0 and mieq > 0:
                    # both equality and inequality constraints exist
                    c = append(c_eq, c_ieq)

        if mode == 0 or mode == -1: # gradient evaluation required

            # Compute the derivatives of the objective function
            # For some reason SLSQP wants g dimensioned to n+1
            g = append(fprime(x),0.0)

            # Compute the normals of the constraints
            if fprime_eqcons:
                a_eq = fprime_eqcons(x)
            else:
                a_eq = zeros([meq,n])
                for i in range(meq):
                    a_eq[i] = eqcons_prime[i](x)

            if fprime_ieqcons:
                a_ieq = fprime_ieqcons(x)
            else:
                a_ieq = zeros([mieq,n])
                for i in range(mieq):
                    a_ieq[i] = ieqcons_prime[i](x)

            # Now combine a_eq and a_ieq into a single a matrix
            if m == 0:
                # no constraints
                a = zeros([la,n])
            elif meq > 0 and mieq == 0:
                # only equality constraints
                a = a_eq
            elif meq == 0 and mieq > 0:
                # only inequality constraints
                a = a_ieq
            elif meq > 0 and mieq > 0:
                # both equality and inequality constraints exist
                a = vstack((a_eq,a_ieq))
            a = concatenate((a,zeros([la,1])),1)

        # Call SLSQP
        slsqp(m, meq, x, xl, xu, fx, c, g, a, acc, majiter, mode, w, jw)

        # Print the status of the current iterate if iprint > 2 and the
        # major iteration has incremented
        if iprint >= 2 and majiter > majiter_prev:
            print "%5i %5i % 16.6E % 16.6E" % (majiter,feval[0],
                                               fx,linalg.norm(g))

        # If exit mode is not -1 or 1, slsqp has completed
        if abs(mode) != 1:
            break

        majiter_prev = int(majiter)

    # Optimization loop complete.  Print status if requested
    if iprint >= 1:
        print exit_modes[int(mode)] + "    (Exit mode " + str(mode) + ')'
        print "            Current function value:", fx
        print "            Iterations:", majiter
        print "            Function evaluations:", feval[0]
        print "            Gradient evaluations:", geval[0]

    if not full_output:
        return x
    else:
        return [list(x),
                float(fx),
                int(majiter),
                int(mode),
                exit_modes[int(mode)] ]