Beispiel #1
0
    def __init__(self, f, h=0.5, emax=1e-8, imax=1000):
        '''
        Initializes the optimizer.

        To create an optimizer of this type, instantiate the class with the
        parameters given below:

        :Parameters:
          f
            A multivariable function to be optimized. The function should have
            only one parameter, a multidimensional line-vector, and return the
            function value, a scalar.
          dx
            The initial step of the search. Defaults to 0.5
          emax
            Maximum allowed error. The algorithm stops as soon as the error is
            below this level. The error is absolute.
          imax
            Maximum number of iterations, the algorithm stops as soon this
            number of iterations are executed, no matter what the error is at
            the moment.
        '''
        Optimizer.__init__(self)
        self.__f = f
        self.__dx = None
        self.__h = h
        self.__emax = float(emax)
        self.__imax = int(imax)
Beispiel #2
0
    def __init__(self, f, emax=1e-5, imax=1000):
        '''
        Initializes the optimizer.

        To create an optimizer of this type, instantiate the class with the
        parameters given below:

        :Parameters:
          f
            A one variable only function to be optimized. The function should
            have only one parameter and return the function value.
          dx
            The initial step of the search. Defaults to 0.5
          emax
            Maximum allowed error. The algorithm stops as soon as the error is
            below this level. The error is absolute.
          imax
            Maximum number of iterations, the algorithm stops as soon this
            number of iterations are executed, no matter what the error is at
            the moment.
        '''
        Optimizer.__init__(self)
        self.__f = f
        self.__k = (sqrt(5) - 1.) / 2.
        self.__emax = float(emax)
        self.__imax = int(imax)
    def __init__(self, f, df=None, B=None, h=0.1, emax=1e-5, imax=1000):
        '''
        Initializes the optimizer.

        To create an optimizer of this type, instantiate the class with the
        parameters given below:

        :Parameters:
          f
            A multivariable function to be optimized. The function should have
            only one parameter, a multidimensional line-vector, and return the
            function value, a scalar.
          df
            A function to calculate the gradient vector of the cost function
            ``f``. Defaults to ``None``, if no gradient is supplied, then it is
            estimated from the cost function using Euler equations.
          B
            A first estimate of the inverse hessian. Note that, differently from
            the Newton method, the elements in this matrix are numbers, not
            functions. So, it is an estimate at a given point, and its values
            *should* be coherent with the first estimate (that is, ``B`` should
            be the inverse of the hessian evaluated at the first estimate), or
            else the algorithm might diverge. Defaults to ``None``, if none is
            given, it is estimated. Note that, given the same reasons as before,
            the estimate of ``B`` is deferred to the first calling of the
            ``step`` method, where it is handled automatically.
          h
            Convergence step. This method does not takes into consideration the
            possibility of varying the convergence step, to avoid Stiefel cages.
          emax
            Maximum allowed error. The algorithm stops as soon as the error is
            below this level. The error is absolute.
          imax
            Maximum number of iterations, the algorithm stops as soon this
            number of iterations are executed, no matter what the error is at
            the moment.
        '''
        Optimizer.__init__(self)
        self.__f = f
        if df is None:
            self.__df = gradient(f)
        else:
            self.__df = df
        self.__B = B
        self.__h = h
        self.__emax = float(emax)
        self.__imax = int(imax)
Beispiel #4
0
    def __init__(self, f, df=None, hf=None, h=0.1, emax=1e-5, imax=1000):
        '''
        Initializes the optimizer.

        To create an optimizer of this type, instantiate the class with the
        parameters given below:

        :Parameters:
          f
            A multivariable function to be optimized. The function should have
            only one parameter, a multidimensional line-vector, and return the
            function value, a scalar.
          df
            A function to calculate the gradient vector of the cost function
            ``f``. Defaults to ``None``, if no gradient is supplied, then it is
            estimated from the cost function using Euler equations.
          hf
            A function to calculate the hessian matrix of the cost function
            ``f``. Defaults to ``None``, if no hessian is supplied, then it is
            estimated from the cost function using Euler equations.
          h
            Convergence step. This method does not takes into consideration the
            possibility of varying the convergence step, to avoid Stiefel cages.
          emax
            Maximum allowed error. The algorithm stops as soon as the error is
            below this level. The error is absolute.
          imax
            Maximum number of iterations, the algorithm stops as soon this
            number of iterations are executed, no matter what the error is at
            the moment.
        '''
        Optimizer.__init__(self)
        self.__f = f
        if df is None:
            self.__df = gradient(f)
        else:
            self.__df = df
        if hf is None:
            self.__hf = hessian(f)
        else:
            self.__hf = hf
        self.__h = h
        self.__emax = float(emax)
        self.__imax = int(imax)