def bfgs_method(x0):

    gfk = gradient(x0)
    I = np.eye(len(x0), dtype=int)
    Hk = I
    xk = x0
    xs = []
    alphas = []
    steps = 0

    while (f(xk) > 0.1e-18):

        xs.append(xk)
        steps += 1
        pk = -np.dot(Hk, gfk)
        alpha_k = backtrack_line_search(x0)
        alphas.append(alpha_k)
        xk1 = xk + alpha_k * pk
        sk = xk1 - xk
        xk = xk1

        gfk1 = gradient(xk)
        yk = gfk1 - gfk
        gfk = gfk1

        term = 1.0 / (np.dot(yk, sk))
        term1 = I - term * sk[:, np.newaxis] * yk[np.newaxis, :]
        term2 = I - term * yk[:, np.newaxis] * sk[np.newaxis, :]
        Hk = np.dot(term1, np.dot(
            Hk, term2)) + (term * sk[:, np.newaxis] * sk[np.newaxis, :])

    return xs, alphas, steps
def backtrack_line_search(x0):

    alpha = 1

    while (f(x0) - (f(x0 - alpha * gradient(x0)) +
                    alpha * C * np.dot(gradient(x0), gradient(x0)))) < 0:
        alpha *= Ru

    return alpha
def newton(x):

    alpha = backtrack_line_search(x)

    step = 0

    alphas = []

    xk_s = []

    while (f(x) > 0.1e-18):

        print(f(x))

        xk_s.append(x)

        alphas.append(alpha)

        x = (x - np.multiply(alpha, inv(hessiyan(x)).dot(gradient(x))))

        alpha = backtrack_line_search(x)

        step += 1

        if (f(x) == 0):

            break

    return xk_s, alphas, step
def trust_region_dogleg(xk):

    trust_radius = 1.0

    iteration = 0

    xk_s = []

    Ru_s = []

    while True:

        xk_s.append(xk)

        gk = gradient(xk)

        Bk = hessiyan(xk)

        Hk = np.linalg.inv(Bk)

        pk = dogleg_method(Hk, gk, Bk, trust_radius)

        xk, trust_radius, Ru = trust_region_find_delta(xk, pk, gk, Bk,
                                                       trust_radius)

        Ru_s.append(Ru)

        if f(xk) < 0.1e-18:
            break

        iteration = iteration + 1

    showResult(xk_s, Ru_s, iteration)

    return xk
def trust_region_cauchy(xk):

    trust_radius = 1.0

    iteration = 0

    xk_s = []

    Ru_s = []

    while True:

        xk_s.append(xk)

        gk = gradient(xk)

        Bk = hessiyan(xk)

        pk = find_cauchy_point(gk, Bk, trust_radius)

        print(f(xk))

        xk, trust_radius, Ru = trust_region_find_delta(xk, pk, gk, Bk,
                                                       trust_radius)

        Ru_s.append(Ru)

        if f(xk) < 0.0012310:
            break

        iteration = iteration + 1

    showResult(xk_s, Ru_s, iteration)

    return xk
def inexact_newton(w, i):
    '''
    execute the inexact newton method using the conjugate gradient
    returns the number of steps necessary to achieve the requiered precision
    '''
    k = 0
    while(func.f(w) >= epsilon):
        # This is the heart of the inexact newton
        # We follow the formula wk+1 = wk - alphak * gradient(wk) / hessian(wk)
        # But we use the conjugate gradient method that gives use " - gradient(wk) / hessian(wk) "
        w = w + alpha * cg.conjugate_gradient(w, func.hessian(w), -func.gradient(w), i)
        k = k+1
    return k
def steepestDescent(x):

    alpha = backtrack_line_search(x)

    xk_s = []

    alphas = []

    step = 0

    while (f(x) > 0.1e-18):

        step += 1

        xk_s.append(x)

        alphas.append(alpha)

        x = (x - np.multiply(alpha, gradient(x)))

        alpha = backtrack_line_search(x)

    return xk_s, alphas, step