Exemplo n.º 1
0
def BusquedaLineal_amplio(f, x0, metodo="maximo descenso"):
    xk=x0
    if metodo == "Newton":
        while not (f_o_c(f,xk)) and (s_o_c(f,xk)):
            grad=Grad(f, xk)
            hess=Hess(f,xk)
            pk=LA.solve(hess,-grad)
            alpha = genera_alpha(f,x0,pk)
            xk= xk + alpha*pk
    else:
        while not (f_o_c(f,xk)) and (s_o_c(f,xk)):
            grad=Grad(f,xk)
            pk = -grad
            alpha = genera_alpha(f,xk,pk)
            xk = xk + alpha*pk
    return xk
Exemplo n.º 2
0
def NewMod(f,xk,h,maxIt=100):       
    Bk = Hess(f,xk,h)   
    for k in range(maxIt):

        while not is_pos_def(Bk):
            Bk=modificacion_hessiana(Bk)

        pk = LA.inv(Bk).dot(-Grad(f,xk))
        a = genera_alpha(1,f,xk,pk,h)
        xk = xk + a*pk

    return xk
Exemplo n.º 3
0
def busqueda_lineal(f, x0, method='Maximo Descenso'):
    xk = x0
    while not (is_min(f, xk)):
        grad = Grad(f, xk)
        hess = Hess(f, xk)
        if method == 'Newton':
            hess = modificacion_hessiana(hess)
            pk = np.linalg.solve(hess, -grad)
        elif method == 'Maximo Descenso':
            pk = -grad
        alpha = genera_alpha(f, xk, pk)
        xk = xk + alpha * pk
    return xk
Exemplo n.º 4
0
def BFGS(f, x0, tol, H0, maxiter=10000):
    k = 0
    Gk = Grad(f, x0)
    Hk = H0
    xk = np.array(x0)
    xk1 = np.array(x0)
    sk = np.array(100)
    while (LA.norm(Gk) > tol and LA.norm(sk) > tol and k <= maxiter):
        pk = -Hk.dot(Gk)
        alphak = genera_alpha(f, xk, pk)
        xk1 = xk + alphak * pk
        sk = xk1 - xk
        Gk1 = Grad(f, xk1)
        yk = Gk1 - Gk
        Hk = DFP_Hk(yk, sk, Hk)
        k += 1
        xk = xk1
        Gk = Gk1
    return xk1, k