Example #1
0
    def __init__(self, f, df=None, hf=None, h=0.1, emax=1e-5, imax=1000):
        '''
        Initializes the optimizer.

        To create an optimizer of this type, instantiate the class with the
        parameters given below:

        :Parameters:
          f
            A multivariable function to be optimized. The function should have
            only one parameter, a multidimensional line-vector, and return the
            function value, a scalar.
          df
            A function to calculate the gradient vector of the cost function
            ``f``. Defaults to ``None``, if no gradient is supplied, then it is
            estimated from the cost function using Euler equations.
          hf
            A function to calculate the hessian matrix of the cost function
            ``f``. Defaults to ``None``, if no hessian is supplied, then it is
            estimated from the cost function using Euler equations.
          h
            Convergence step. This method does not takes into consideration the
            possibility of varying the convergence step, to avoid Stiefel cages.
          emax
            Maximum allowed error. The algorithm stops as soon as the error is
            below this level. The error is absolute.
          imax
            Maximum number of iterations, the algorithm stops as soon this
            number of iterations are executed, no matter what the error is at
            the moment.
        '''
        Optimizer.__init__(self)
        self.__f = f
        if df is None:
            self.__df = gradient(f)
        else:
            self.__df = df
        if hf is None:
            self.__hf = hessian(f)
        else:
            self.__hf = hf
        self.__h = h
        self.__emax = float(emax)
        self.__imax = int(imax)
Example #2
0
    def step(self, x):
        '''
        One step of the search.

        In this method, the result of the step is dependent of parameters
        calculated before (namely, the estimate of the inverse hessian), so it
        is not recomended that different investigations are used with the same
        optimizer in the same cost function.

        :Parameters:
          x
            The value from where the new estimate should be calculated. This can
            of course be the result of a previous iteration of the algorithm.

        :Returns:
          This method returns a tuple ``(x, e)``, where ``x`` is the updated
          estimate of the minimum, and ``e`` is the estimated error.
        '''
        # Initializes the inverse hessian, if needed
        if self.__B is None:
            B = inv(hessian(self.__f)(x))
        else:
            B = self.__B

        # Updates x
        n = x.size
        x = x.reshape((n, 1))          # x as a line-vector
        dfx = self.__df(x).reshape((n, 1))
        dx = - self.__h * dot(B, dfx)
        xn = x + dx

        # Updates B
        y = self.__df(xn) - dfx
        Bty = dot(B.T, y)
        dB = dot(dx, dx.T) / dot(y.T, dx) \
             - dot(Bty, Bty.T) / dot(y.T, Bty)
        self.__B = B + dB

        return xn, sum(abs(xn - x))