Beispiel #1
0
def _minimize_slsqp(func, x0, args=(), jac=None, bounds=None,
                    constraints=(),
                    maxiter=100, ftol=1.0E-6, iprint=1, disp=False,
                    eps=_epsilon,
                    **unknown_options):
    """
    Minimize a scalar function of one or more variables using Sequential
    Least SQuares Programming (SLSQP).

    Options for the SLSQP algorithm are:
        ftol : float
            Precision goal for the value of f in the stopping criterion.
        eps : float
            Step size used for numerical approximation of the jacobian.
        disp : bool
            Set to True to print convergence messages. If False,
            `verbosity` is ignored and set to 0.
        maxiter : int
            Maximum number of iterations.

    This function is called by the `minimize` function with
    `method=SLSQP`. It is not supposed to be called directly.
    """
    _check_unknown_options(unknown_options)
    fprime = jac
    iter = maxiter
    acc = ftol
    epsilon = eps

    if not disp:
        iprint = 0

    # Constraints are triaged per type into a dictionnary of tuples
    if isinstance(constraints, dict):
        constraints = (constraints, )

    cons = {'eq': (), 'ineq': ()}
    for ic, con in enumerate(constraints):
        # check type
        try:
            ctype = con['type'].lower()
        except KeyError:
            raise KeyError('Constraint %d has no type defined.' % ic)
        except TypeError:
            raise TypeError('Constraints must be defined using a '
                            'dictionary.')
        except AttributeError:
            raise TypeError("Constraint's type must be a string.")
        else:
            if ctype not in ['eq', 'ineq']:
                raise ValueError("Unknown constraint type '%s'." % con['type'])

        # check function
        if 'fun' not in con:
            raise ValueError('Constraint %d has no function defined.' % ic)

        # check jacobian
        cjac = con.get('jac')
        if cjac is None:
            # approximate jacobian function
            def cjac(x, *args):
                return approx_jacobian(x, con['fun'], epsilon, *args)

        # update constraints' dictionary
        cons[ctype] += ({'fun' : con['fun'],
                         'jac' : cjac,
                         'args': con.get('args', ())}, )


    exit_modes = { -1 : "Gradient evaluation required (g & a)",
                    0 : "Optimization terminated successfully.",
                    1 : "Function evaluation required (f & c)",
                    2 : "More equality constraints than independent variables",
                    3 : "More than 3*n iterations in LSQ subproblem",
                    4 : "Inequality constraints incompatible",
                    5 : "Singular matrix E in LSQ subproblem",
                    6 : "Singular matrix C in LSQ subproblem",
                    7 : "Rank-deficient equality constraint subproblem HFTI",
                    8 : "Positive directional derivative for linesearch",
                    9 : "Iteration limit exceeded" }


    # Wrap func
    feval, func = wrap_function(func, args)

    # Wrap fprime, if provided, or approx_jacobian if not
    if fprime:
        geval, fprime = wrap_function(fprime, args)
    else:
        geval, fprime = wrap_function(approx_jacobian, (func, epsilon))

    # Transform x0 into an array.
    x = asfarray(x0).flatten()


    # Set the parameters that SLSQP will need
    # meq, mieq: number of equality and inequality constraints
    meq = sum(map(len, [atleast_1d(c['fun'](x, *c['args'])) for c in cons['eq']]))
    mieq = sum(map(len, [atleast_1d(c['fun'](x, *c['args'])) for c in cons['ineq']]))
    # m = The total number of constraints
    m = meq + mieq
    # la = The number of constraints, or 1 if there are no constraints
    la = array([1,m]).max()
    # n = The number of independent variables
    n = len(x)

    # Define the workspaces for SLSQP
    n1 = n+1
    mineq = m - meq + n1 + n1
    len_w = (3*n1+m)*(n1+1)+(n1-meq+1)*(mineq+2) + 2*mineq+(n1+mineq)*(n1-meq) \
            + 2*meq + n1 +(n+1)*n/2 + 2*m + 3*n + 3*n1 + 1
    len_jw = mineq
    w = zeros(len_w)
    jw = zeros(len_jw)

    # Decompose bounds into xl and xu
    if bounds is None or len(bounds) == 0:
        xl, xu = array([-1.0E12]*n), array([1.0E12]*n)
    else:
        bnds = array(bounds, float)
        if bnds.shape[0] != n:
            raise IndexError('SLSQP Error: the length of bounds is not '
                             'compatible with that of x0.')

        bnderr = where(bnds[:, 0] > bnds[:, 1])[0]
        if bnderr.any():
            raise ValueError('SLSQP Error: lb > ub in bounds %s.' %
                             ', '.join(str(b) for b in bnderr))
        xl, xu = bnds[:, 0], bnds[:, 1]

        # filter -inf and inf values
        infbnd = isinf(bnds)
        xl[infbnd[:, 0]] = -1.0E12
        xu[infbnd[:, 1]] = 1.0E12

    # Initialize the iteration counter and the mode value
    mode = array(0,int)
    acc = array(acc,float)
    majiter = array(iter,int)
    majiter_prev = 0

    # Print the header if iprint >= 2
    if iprint >= 2:
        print "%5s %5s %16s %16s" % ("NIT","FC","OBJFUN","GNORM")

    while 1:

        if mode == 0 or mode == 1: # objective and constraint evaluation requird

            # Compute objective function
            fx = func(x)
            # Compute the constraints
            if cons['eq']:
                c_eq  = concatenate([atleast_1d(con['fun'](x, *con['args']))
                                     for con in cons['eq']])
            else:
                c_eq = zeros(0)
            if cons['ineq']:
                c_ieq = concatenate([atleast_1d(con['fun'](x, *con['args']))
                                     for con in cons['ineq']])
            else:
                c_ieq = zeros(0)

            # Now combine c_eq and c_ieq into a single matrix
            c = concatenate((c_eq, c_ieq))

        if mode == 0 or mode == -1: # gradient evaluation required

            # Compute the derivatives of the objective function
            # For some reason SLSQP wants g dimensioned to n+1
            g = append(fprime(x),0.0)

            # Compute the normals of the constraints
            if cons['eq']:
                a_eq = vstack([con['jac'](x, *con['args'])
                               for con in cons['eq']])
            else: # no equality constraint
                a_eq = zeros((meq, n))

            if cons['ineq']:
                a_ieq = vstack([con['jac'](x, *con['args'])
                                for con in cons['ineq']])
            else: # no inequality constraint
                a_ieq = zeros((mieq, n))

            # Now combine a_eq and a_ieq into a single a matrix
            if m == 0: # no constraints
                a = zeros((la, n))
            else:
                a = vstack((a_eq, a_ieq))
            a = concatenate((a,zeros([la,1])),1)

        # Call SLSQP
        slsqp(m, meq, x, xl, xu, fx, c, g, a, acc, majiter, mode, w, jw)

        # Print the status of the current iterate if iprint > 2 and the
        # major iteration has incremented
        if iprint >= 2 and majiter > majiter_prev:
            print "%5i %5i % 16.6E % 16.6E" % (majiter,feval[0],
                                               fx,linalg.norm(g))

        # If exit mode is not -1 or 1, slsqp has completed
        if abs(mode) != 1:
            break

        majiter_prev = int(majiter)

    # Optimization loop complete.  Print status if requested
    if iprint >= 1:
        print exit_modes[int(mode)] + "    (Exit mode " + str(mode) + ')'
        print "            Current function value:", fx
        print "            Iterations:", majiter
        print "            Function evaluations:", feval[0]
        print "            Gradient evaluations:", geval[0]

    return Result(x=x, fun=fx, jac=g, nit=int(majiter), nfev=feval[0],
                  njev=geval[0], status=int(mode),
                  message=exit_modes[int(mode)], success=(mode == 0))
Beispiel #2
0
def fmin_slsqp( func, x0 , eqcons=[], f_eqcons=None, ieqcons=[], f_ieqcons=None,
                bounds = [], fprime = None, fprime_eqcons=None,
                fprime_ieqcons=None, args = (), iter = 100, acc = 1.0E-6,
                iprint = 1, full_output = 0, epsilon = _epsilon ):
    """
    Minimize a function using Sequential Least SQuares Programming

    Python interface function for the SLSQP Optimization subroutine
    originally implemented by Dieter Kraft.

    Parameters
    ----------
    func : callable f(x,*args)
        Objective function.
    x0 : ndarray of float
        Initial guess for the independent variable(s).
    eqcons : list
        A list of functions of length n such that
        eqcons[j](x0,*args) == 0.0 in a successfully optimized
        problem.
    f_eqcons : callable f(x,*args)
        Returns an array in which each element must equal 0.0 in a
        successfully optimized problem.  If f_eqcons is specified,
        eqcons is ignored.
    ieqcons : list
        A list of functions of length n such that
        ieqcons[j](x0,*args) >= 0.0 in a successfully optimized
        problem.
    f_ieqcons : callable f(x0,*args)
        Returns an array in which each element must be greater or
        equal to 0.0 in a successfully optimized problem.  If
        f_ieqcons is specified, ieqcons is ignored.
    bounds : list
        A list of tuples specifying the lower and upper bound
        for each independent variable [(xl0, xu0),(xl1, xu1),...]
    fprime : callable `f(x,*args)`
        A function that evaluates the partial derivatives of func.
    fprime_eqcons : callable `f(x,*args)`
        A function of the form `f(x, *args)` that returns the m by n
        array of equality constraint normals.  If not provided,
        the normals will be approximated. The array returned by
        fprime_eqcons should be sized as ( len(eqcons), len(x0) ).
    fprime_ieqcons : callable `f(x,*args)`
        A function of the form `f(x, *args)` that returns the m by n
        array of inequality constraint normals.  If not provided,
        the normals will be approximated. The array returned by
        fprime_ieqcons should be sized as ( len(ieqcons), len(x0) ).
    args : sequence
        Additional arguments passed to func and fprime.
    iter : int
        The maximum number of iterations.
    acc : float
        Requested accuracy.
    iprint : int
        The verbosity of fmin_slsqp :

        * iprint <= 0 : Silent operation
        * iprint == 1 : Print summary upon completion (default)
        * iprint >= 2 : Print status of each iterate and summary
    full_output : bool
        If False, return only the minimizer of func (default).
        Otherwise, output final objective function and summary
        information.
    epsilon : float
        The step size for finite-difference derivative estimates.

    Returns
    -------
    x : ndarray of float
        The final minimizer of func.
    fx : ndarray of float, if full_output is true
        The final value of the objective function.
    its : int, if full_output is true
        The number of iterations.
    imode : int, if full_output is true
        The exit mode from the optimizer (see below).
    smode : string, if full_output is true
        Message describing the exit mode from the optimizer.

    Notes
    -----
    Exit modes are defined as follows ::

        -1 : Gradient evaluation required (g & a)
         0 : Optimization terminated successfully.
         1 : Function evaluation required (f & c)
         2 : More equality constraints than independent variables
         3 : More than 3*n iterations in LSQ subproblem
         4 : Inequality constraints incompatible
         5 : Singular matrix E in LSQ subproblem
         6 : Singular matrix C in LSQ subproblem
         7 : Rank-deficient equality constraint subproblem HFTI
         8 : Positive directional derivative for linesearch
         9 : Iteration limit exceeded

    Examples
    --------
    Examples are given :ref:`in the tutorial <tutorial-sqlsp>`.

    """

    exit_modes = { -1 : "Gradient evaluation required (g & a)",
                    0 : "Optimization terminated successfully.",
                    1 : "Function evaluation required (f & c)",
                    2 : "More equality constraints than independent variables",
                    3 : "More than 3*n iterations in LSQ subproblem",
                    4 : "Inequality constraints incompatible",
                    5 : "Singular matrix E in LSQ subproblem",
                    6 : "Singular matrix C in LSQ subproblem",
                    7 : "Rank-deficient equality constraint subproblem HFTI",
                    8 : "Positive directional derivative for linesearch",
                    9 : "Iteration limit exceeded" }

    # Now do a lot of function wrapping

    # Wrap func
    feval, func = wrap_function(func, args)
    # Wrap fprime, if provided, or approx_fprime if not
    if fprime:
        geval, fprime = wrap_function(fprime,args)
    else:
        geval, fprime = wrap_function(approx_fprime,(func,epsilon))

    if f_eqcons:
        # Equality constraints provided via f_eqcons
        ceval, f_eqcons = wrap_function(f_eqcons,args)
        if fprime_eqcons:
            # Wrap fprime_eqcons
            geval, fprime_eqcons = wrap_function(fprime_eqcons,args)
        else:
            # Wrap approx_jacobian
            geval, fprime_eqcons = wrap_function(approx_jacobian,
                                                 (f_eqcons,epsilon))
    else:
        # Equality constraints provided via eqcons[]
        eqcons_prime = []
        for i in range(len(eqcons)):
            eqcons_prime.append(None)
            if eqcons[i]:
                # Wrap eqcons and eqcons_prime
                ceval, eqcons[i] = wrap_function(eqcons[i],args)
                geval, eqcons_prime[i] = wrap_function(approx_fprime,
                                                       (eqcons[i],epsilon))

    if f_ieqcons:
        # Inequality constraints provided via f_ieqcons
        ceval, f_ieqcons = wrap_function(f_ieqcons,args)
        if fprime_ieqcons:
            # Wrap fprime_ieqcons
            geval, fprime_ieqcons = wrap_function(fprime_ieqcons,args)
        else:
            # Wrap approx_jacobian
            geval, fprime_ieqcons = wrap_function(approx_jacobian,
                                                  (f_ieqcons,epsilon))
    else:
        # Inequality constraints provided via ieqcons[]
        ieqcons_prime = []
        for i in range(len(ieqcons)):
            ieqcons_prime.append(None)
            if ieqcons[i]:
                # Wrap ieqcons and ieqcons_prime
                ceval, ieqcons[i] = wrap_function(ieqcons[i],args)
                geval, ieqcons_prime[i] = wrap_function(approx_fprime,
                                                        (ieqcons[i],epsilon))


    # Transform x0 into an array.
    x = asfarray(x0).flatten()

    # Set the parameters that SLSQP will need
    # meq = The number of equality constraints
    if f_eqcons:
        meq = len(f_eqcons(x))
    else:
        meq = len(eqcons)
    if f_ieqcons:
        mieq = len(f_ieqcons(x))
    else:
        mieq = len(ieqcons)
    # m = The total number of constraints
    m = meq + mieq
    # la = The number of constraints, or 1 if there are no constraints
    la = array([1,m]).max()
    # n = The number of independent variables
    n = len(x)

    # Define the workspaces for SLSQP
    n1 = n+1
    mineq = m - meq + n1 + n1
    len_w = (3*n1+m)*(n1+1)+(n1-meq+1)*(mineq+2) + 2*mineq+(n1+mineq)*(n1-meq) \
            + 2*meq + n1 +(n+1)*n/2 + 2*m + 3*n + 3*n1 + 1
    len_jw = mineq
    w = zeros(len_w)
    jw = zeros(len_jw)

    # Decompose bounds into xl and xu
    if len(bounds) == 0:
        bounds = [(-1.0E12, 1.0E12) for i in range(n)]
    elif len(bounds) != n:
        raise IndexError, \
        'SLSQP Error:  If bounds is specified, len(bounds) == len(x0)'
    else:
        for i in range(len(bounds)):
            if bounds[i][0] > bounds[i][1]:
                raise ValueError, \
                'SLSQP Error: lb > ub in bounds[' + str(i) +']  ' + str(bounds[4])

    xl = array( [ b[0] for b in bounds ] )
    xu = array( [ b[1] for b in bounds ] )



    # Initialize the iteration counter and the mode value
    mode = array(0,int)
    acc = array(acc,float)
    majiter = array(iter,int)
    majiter_prev = 0

    # Print the header if iprint >= 2
    if iprint >= 2:
        print "%5s %5s %16s %16s" % ("NIT","FC","OBJFUN","GNORM")

    while 1:

        if mode == 0 or mode == 1: # objective and constraint evaluation requird

            # Compute objective function
            fx = func(x)
            # Compute the constraints
            if f_eqcons:
                c_eq = f_eqcons(x)
            else:
                c_eq = array([ eqcons[i](x) for i in range(meq) ])
            if f_ieqcons:
                c_ieq = f_ieqcons(x)
            else:
                c_ieq = array([ ieqcons[i](x) for i in range(len(ieqcons)) ])

            # Now combine c_eq and c_ieq into a single matrix
            if m == 0:
                # no constraints
                c = zeros([la])
            else:
                # constraints exist
                if meq > 0 and mieq == 0:
                    # only equality constraints
                    c = c_eq
                if meq == 0 and mieq > 0:
                    # only inequality constraints
                    c = c_ieq
                if meq > 0 and mieq > 0:
                    # both equality and inequality constraints exist
                    c = append(c_eq, c_ieq)

        if mode == 0 or mode == -1: # gradient evaluation required

            # Compute the derivatives of the objective function
            # For some reason SLSQP wants g dimensioned to n+1
            g = append(fprime(x),0.0)

            # Compute the normals of the constraints
            if fprime_eqcons:
                a_eq = fprime_eqcons(x)
            else:
                a_eq = zeros([meq,n])
                for i in range(meq):
                    a_eq[i] = eqcons_prime[i](x)

            if fprime_ieqcons:
                a_ieq = fprime_ieqcons(x)
            else:
                a_ieq = zeros([mieq,n])
                for i in range(mieq):
                    a_ieq[i] = ieqcons_prime[i](x)

            # Now combine a_eq and a_ieq into a single a matrix
            if m == 0:
                # no constraints
                a = zeros([la,n])
            elif meq > 0 and mieq == 0:
                # only equality constraints
                a = a_eq
            elif meq == 0 and mieq > 0:
                # only inequality constraints
                a = a_ieq
            elif meq > 0 and mieq > 0:
                # both equality and inequality constraints exist
                a = vstack((a_eq,a_ieq))
            a = concatenate((a,zeros([la,1])),1)

        # Call SLSQP
        slsqp(m, meq, x, xl, xu, fx, c, g, a, acc, majiter, mode, w, jw)

        # Print the status of the current iterate if iprint > 2 and the
        # major iteration has incremented
        if iprint >= 2 and majiter > majiter_prev:
            print "%5i %5i % 16.6E % 16.6E" % (majiter,feval[0],
                                               fx,linalg.norm(g))

        # If exit mode is not -1 or 1, slsqp has completed
        if abs(mode) != 1:
            break

        majiter_prev = int(majiter)

    # Optimization loop complete.  Print status if requested
    if iprint >= 1:
        print exit_modes[int(mode)] + "    (Exit mode " + str(mode) + ')'
        print "            Current function value:", fx
        print "            Iterations:", majiter
        print "            Function evaluations:", feval[0]
        print "            Gradient evaluations:", geval[0]

    if not full_output:
        return x
    else:
        return [list(x),
                float(fx),
                int(majiter),
                int(mode),
                exit_modes[int(mode)] ]
Beispiel #3
0
def Main(ArgList):
    description = '''This program can be used to find optimized orbital parameters for yaeh program to make it approach to standard band structure.
    Several fitting scheme based on band RMSD can be used.
    In general, weight of each band included in fitting is 1, and unused band is 0.001 to avoid fluctuation
    1. Use lowest N band, and set H**O and LUMO weight to 10 if conduction band is included.
    2. Use N band start from lowest i-th band. Weights of H**O and LUMO are still 1.
    3. Beside band RMSD, included gap with weight 1.

    Notes on parallization: writing a jobs scheduling system is not easy, so we use a very simple way that N compounds can be computed simultaneously, and after anyone exited another one will start. Also, N_i processes will be used for i-th compound.
    So the processes used at the same time is variable, and may exceed total number of processes assigned, which may reduce the efficiency. To avoid this, set up the number of processes for i-th compound carefully.
    The best way is assign enough process in PBS and run all.
    '''
    global nProcess

    parser = ArgumentParser(description=description)
    parser.add_argument("-p",dest="ProcessCount",type=int,default=0,help="The number of tasks can be executed in parallel. Note p=0 will disable MPI for programs and do not use Pool, but p=1 use Pool")
    parser.add_argument("-m",dest='method_min',type=str,default='simplex',help="The minimization algorithm, possible values: simplex, cg, powell, bfgs, basinhopping")
    parser.add_argument("--tol",dest='tol_min',type=float,default=1e-5,help="The tolerance for minimization, only used in basin-hopping minimization steps now")
    parser.add_argument("--iter",dest="n_iter",type=int,default=100,help="Number of basin-hopping iterations, default 100")
    parser.add_argument("filename",nargs=1,help="The xml input file")

    options = parser.parse_args()

    tStart = time.time()

    nProcess = options.ProcessCount
    if ( nProcess < 0 ):
        nProcess = 0
    if (nProcess != 0):
        print("Parallel tasks: %i" % nProcess)
        if (nProcess == 1):
            print("Warning: only 1 process is used in parallel calculation, note the implentation is different from serial one!")

#Prepare calculation
    xpMulti = XMLSerilizer(globals(),"fit_input.xsd")
    mIn = xpMulti.Deserilize(options.filename[0])
    mIn.apply_global()

#This is customized by programs
#Set parameters I/O
    if (mIn.CalcProgram == "tbgwpw"):
        params_base = TBGWPPLibrary(filename=mIn.ParaFileInput)
        mIn.load(params_base)
        x0 = params_base.get_para()
    elif (mIn.CalcProgram == "yaeh" or mIn.CalcProgram == "yaehg"):
        params_base = yaeh_input(mIn.ParaFileInput,b_write_fix=True)
        mIn.load(params_base)
        x0 = params_base.get_para()
        print(x0)

    MinFunc = wrap_function(f_CalcMulti,tuple([mIn]))

#Check parameters
    nProcessUse = sum([x.nProcess for x in mIn.listCompoundConfig])
    if (nProcess > nProcessUse):
        print("Warning: %i processes are specified when maximally possible parallel process is %i, you can lower to it without performance loss" % (nProcess, nProcessUse)) 

    dic_func_min = {"cg": scipy.optimize.fmin_cg,
            "bfgs": scipy.optimize.fmin_bfgs,
            "simplex" : scipy.optimize.fmin,
            "powell" : scipy.optimize.fmin_powell}

    if ( not mIn.bCompareOnly):
#Global minimization
#We always use powell as the local minimization here
        if (options.method_min == "basinhopping"):
            def take_step(x):
                '''
                Random place a parameters guess, linear random
                a steps are taken as 20% ( if  abs(x) < 1 ) 
                or 20% and at least 2 if (abs(x) > 1)
                Abnormal values (abs(x) > 100) will be shrinked to 1.0
                This is 
                '''
                x2 = []
                for v0 in x:
                    if (abs(v0) < 1 and v0 > 0): #Guess as exponetial part
                        v2 = (numpy.random.random() * 0.8 + 0.6) * v0
                    elif (abs(v0) < 100):
#Change at least this value
                        vd1 = max(abs(v0), 5.0)
                        vd = (numpy.random.random() * 0.8 - 0.4) * vd1
                        v2 = v0 + vd
                    else:
                        v2 = 1.0
                    x2.append(v2)
                print("Jump")
                print(x2)
                return x2


            print("Method: Basin-hopping with tolerance=%f" % options.tol_min)
            res = scipy.optimize.basinhopping(MinFunc, x0=x0, niter=options.n_iter, T=0.5, 
                    minimizer_kwargs = {"method":"Powell","tol":options.tol_min, "callback":CallBackFunc,
                        "options":{'xtol':options.tol_min,"ftol":options.tol_min}},
                    take_step = take_step,
                    callback = CallBackFuncGM)
            res = res.x
        else:
            #Local minimization only
            func_min = dic_func_min[options.method_min]
            if (options.method_min == "cg" or options.method_min == "bfgs"): #1st derivatives (finite difference)
                res = func_min(MinFunc,x0=x0,fprime=None,epsilon=0.001,gtol=1e-02,maxiter=1000,callback=CallBackFunc)
            else:#Value only
                res = func_min(MinFunc,x0=x0,xtol=1e-3,ftol=1e-05,maxiter=10000,callback=CallBackFunc)

#Store result
        mIn.params_database.set_para(res)
        mIn.params_database.write(mIn.ParaFileOutput)

#Show result
    f_PrintCalcMulti(mIn)

    print("Time used: %.2f s"% (time.time()-tStart) )