コード例 #1
0
ファイル: _minimize.py プロジェクト: afarahi/QFT
def minimize_scalar(fun, bracket=None, bounds=None, args=(),
                    method='brent', options=None):
    """
    Minimization of scalar function of one variable.

    .. versionadded:: 0.11.0

    Parameters
    ----------
    fun : callable
        Objective function.
        Scalar function, must return a scalar.
    bracket : sequence, optional
        For methods 'brent' and 'golden', `bracket` defines the bracketing
        interval and can either have three items `(a, b, c)` so that `a < b
        < c` and `fun(b) < fun(a), fun(c)` or two items `a` and `c` which
        are assumed to be a starting interval for a downhill bracket search
        (see `bracket`); it doesn't always mean that the obtained solution
        will satisfy `a <= x <= c`.
    bounds : sequence, optional
        For method 'bounded', `bounds` is mandatory and must have two items
        corresponding to the optimization bounds.
    args : tuple, optional
        Extra arguments passed to the objective function.
    method : str, optional
        Type of solver.  Should be one of

            - 'Brent'
            - 'Bounded'
            - 'Golden'

    options : dict, optional
        A dictionary of solver options.
            xtol : float
                Relative error in solution `xopt` acceptable for
                convergence.
            ftol : float
                Relative error in ``fun(xopt)`` acceptable for convergence.
            maxiter : int
                Maximum number of iterations to perform.
            disp : bool
                Set to True to print convergence messages.

    Returns
    -------
    res : Result
        The optimization result represented as a ``Result`` object.
        Important attributes are: ``x`` the solution array, ``success`` a
        Boolean flag indicating if the optimizer exited successfully and
        ``message`` which describes the cause of the termination. See
        `Result` for a description of other attributes.

    See also
    --------
    minimize: Interface to minimization algorithms for scalar multivariate
        functions.

    Notes
    -----
    This section describes the available solvers that can be selected by the
    'method' parameter. The default method is *Brent*.

    Method *Brent* uses Brent's algorithm to find a local minimum.
    The algorithm uses inverse parabolic interpolation when possible to
    speed up convergence of the golden section method.

    Method *Golden* uses the golden section search technique. It uses
    analog of the bisection method to decrease the bracketed interval. It
    is usually preferable to use the *Brent* method.

    Method *Bounded* can perform bounded minimization. It uses the Brent
    method to find a local minimum in the interval x1 < xopt < x2.

    Examples
    --------
    Consider the problem of minimizing the following function.

    >>> def f(x):
    ...     return (x - 2) * x * (x + 2)**2

    Using the *Brent* method, we find the local minimum as:

    >>> from scipy.optimize import minimize_scalar
    >>> res = minimize_scalar(f)
    >>> res.x
    1.28077640403

    Using the *Bounded* method, we find a local minimum with specified
    bounds as:

    >>> res = minimize_scalar(f, bounds=(-3, -1), method='bounded')
    >>> res.x
    -2.0000002026

    """
    meth = method.lower()
    if options is None:
        options = {}

    if meth == 'brent':
        return _minimize_scalar_brent(fun, bracket, args, options)
    elif meth == 'bounded':
        if bounds is None:
            raise ValueError('The `bounds` parameter is mandatory for '
                             'method `bounded`.')
        return _minimize_scalar_bounded(fun, bounds, args, options)
    elif meth == 'golden':
        return _minimize_scalar_golden(fun, bracket, args, options)
    else:
        raise ValueError('Unknown solver %s' % method)
コード例 #2
0
def minimize_scalar(fun, bracket=None, bounds=None, args=(),
                    method='brent', tol=None, options=None):
    """
    Minimization of scalar function of one variable.

    .. versionadded:: 0.11.0

    Parameters
    ----------
    fun : callable
        Objective function.
        Scalar function, must return a scalar.
    bracket : sequence, optional
        For methods 'brent' and 'golden', `bracket` defines the bracketing
        interval and can either have three items `(a, b, c)` so that `a < b
        < c` and `fun(b) < fun(a), fun(c)` or two items `a` and `c` which
        are assumed to be a starting interval for a downhill bracket search
        (see `bracket`); it doesn't always mean that the obtained solution
        will satisfy `a <= x <= c`.
    bounds : sequence, optional
        For method 'bounded', `bounds` is mandatory and must have two items
        corresponding to the optimization bounds.
    args : tuple, optional
        Extra arguments passed to the objective function.
    method : str, optional
        Type of solver.  Should be one of

            - 'Brent'
            - 'Bounded'
            - 'Golden'
    tol : float, optional
        Tolerance for termination. For detailed control, use solver-specific
        options.
    options : dict, optional
        A dictionary of solver options.
            xtol : float
                Relative error in solution `xopt` acceptable for
                convergence.
            maxiter : int
                Maximum number of iterations to perform.
            disp : bool
                Set to True to print convergence messages.

    Returns
    -------
    res : Result
        The optimization result represented as a ``Result`` object.
        Important attributes are: ``x`` the solution array, ``success`` a
        Boolean flag indicating if the optimizer exited successfully and
        ``message`` which describes the cause of the termination. See
        `Result` for a description of other attributes.

    See also
    --------
    minimize: Interface to minimization algorithms for scalar multivariate
        functions.

    Notes
    -----
    This section describes the available solvers that can be selected by the
    'method' parameter. The default method is *Brent*.

    Method *Brent* uses Brent's algorithm to find a local minimum.
    The algorithm uses inverse parabolic interpolation when possible to
    speed up convergence of the golden section method.

    Method *Golden* uses the golden section search technique. It uses
    analog of the bisection method to decrease the bracketed interval. It
    is usually preferable to use the *Brent* method.

    Method *Bounded* can perform bounded minimization. It uses the Brent
    method to find a local minimum in the interval x1 < xopt < x2.

    Examples
    --------
    Consider the problem of minimizing the following function.

    >>> def f(x):
    ...     return (x - 2) * x * (x + 2)**2

    Using the *Brent* method, we find the local minimum as:

    >>> from scipy.optimize import minimize_scalar
    >>> res = minimize_scalar(f)
    >>> res.x
    1.28077640403

    Using the *Bounded* method, we find a local minimum with specified
    bounds as:

    >>> res = minimize_scalar(f, bounds=(-3, -1), method='bounded')
    >>> res.x
    -2.0000002026

    """
    meth = method.lower()
    if options is None:
        options = {}

    if tol is not None:
        options = dict(options)
        options.setdefault('xtol', tol)

    if meth == 'brent':
        return _minimize_scalar_brent(fun, bracket, args, **options)
    elif meth == 'bounded':
        if bounds is None:
            raise ValueError('The `bounds` parameter is mandatory for '
                             'method `bounded`.')
        return _minimize_scalar_bounded(fun, bounds, args, **options)
    elif meth == 'golden':
        return _minimize_scalar_golden(fun, bracket, args, **options)
    else:
        raise ValueError('Unknown solver %s' % method)