def ravine_method(func, grad, x0, eps=0.02, debug=False):
    x0 = np.array(x0)

    func = FuncCounter(func, without_memoization=True)
    grad = FuncCounter(grad, without_memoization=True)
    FGD = FuncCounter(fastest_gradient_descent, without_memoization=True)
    count = 0
    while count < 3:
        _x = x0 + 10 * eps
        y0 = np.array(FGD(func, grad, x0, eps, max_iter=1))

        _y = y0 + 10 * eps
        alpha = gold_sech_method(lambda a: func(y0 + a * (_y - y0)), -10, 10, eps * 10)
        x1 = y0 + alpha * (_y - y0)
        c = x1 - x0
        if np.linalg.norm(c) <= eps:
            count += 1
        else:
            count = 0
        x0 = x1.copy()
    if debug:
        print("ravine_method func counter", func.counter)
        print("ravine_method grad counter", grad.counter)
        print("ravine_method FGD counter", FGD.counter)
    return tuple(x0)
def tangent_lines_method(func, a, b, eps=0.01, n=16, debug=False):
    """Метод касательных"""
    x = sympy.symbols('x')
    f = parse_expr(func.__doc__)
    diff = f.diff(x)
    func = FuncCounter(lambda x0: f.evalf(subs={x: x0}, n=n))
    diff_f = FuncCounter(lambda x0: diff.evalf(subs={x: x0}, n=n))
    diff_f_a = diff_f(a)
    diff_f_b = diff_f(b)
    ai = a
    bi = b
    f_a = func(ai)
    f_b = func(bi)
    ci = (f_a - f_b - ai * diff_f_a + bi * diff_f_b) / (diff_f_b - diff_f_a)
    diff_f_c = diff_f(ci)
    while abs(diff_f_c) > eps or bi - ai > eps * 2:
        if diff_f_c < 0:
            ai = ci
            diff_f_a = diff_f_c
            f_a = func(ai)
            ci = (f_a - f_b - ai * diff_f_a + bi * diff_f_b) / (diff_f_b -
                                                                diff_f_a)
            diff_f_c = diff_f(ci)
        else:
            bi = ci
            diff_f_b = diff_f_c
            f_b = func(bi)
            ci = (f_a - f_b - ai * diff_f_a + bi * diff_f_b) / (diff_f_b -
                                                                diff_f_a)
            diff_f_c = diff_f(ci)
    if debug:
        print("tangent_lines_method func counter", func.counter)
        print("tangent_lines_method diff counter", diff_f.counter)
    return ci
Пример #3
0
def step_partition_descent(func,
                           grad,
                           x0,
                           alpha=1,
                           lr=0.5,
                           d=0.5,
                           eps=0.01,
                           debug=False):
    x0 = np.array(x0)
    alph = alpha

    func = FuncCounter(func, without_memoization=True)
    grad = FuncCounter(grad, without_memoization=True)
    gr = np.array(grad(x0))

    x1 = x0 - alph * gr
    while True:
        temp = func(x0)
        while func(x1) - temp > -1 * d * alph * np.linalg.norm(gr)**2:
            alph *= lr
            x1 = x0 - alph * gr
        gr = np.array(grad(x1))
        if np.linalg.norm(gr) <= eps:
            break
        else:
            x0 = x1.copy()
            alph = alpha
            x1 = x0 - alph * gr
    if debug:
        print("step_partition_descent func counter", func.counter)
        print("step_partition_descent grad counter", grad.counter)
    return tuple(x1)
def newton_method(grad, inv_hesse_mat, x0, eps=0.02, debug=False):
    x0 = np.array(x0)
    grad = FuncCounter(grad, without_memoization=True)
    inv_hesse_mat = FuncCounter(inv_hesse_mat, without_memoization=True)
    gr = np.array(grad(x0))
    while np.linalg.norm(gr) > eps:
        m = np.array(inv_hesse_mat(x0))
        x0 = x0 - np.dot(m, gr)
        gr = np.array(grad(x0))
    if debug:
        print("newton_method grad counter", grad.counter)
        print("newton_method inv_hesse_mat counter", inv_hesse_mat.counter)
    return tuple(x0)
def chord_method(func, x0, eps=0.01, n=16, debug=False):
    """Метод Хорд"""
    x = sympy.symbols('x')
    f = parse_expr(func.__doc__)
    diff = f.diff(x)
    func = FuncCounter(lambda _x: f.evalf(subs={x: _x}, n=n))
    diff_f = FuncCounter(lambda _x: diff.evalf(subs={x: _x}, n=n))
    diff_f_x0 = diff_f(x0)
    xk = x0 + 2.1 * eps
    diff_f_xk = diff_f(xk)
    xk1 = xk - diff_f_xk * (xk - x0) / (diff_f_xk - diff_f_x0)
    while abs(diff_f_xk) > eps:
        x0 = xk
        xk = xk1
        diff_f_x0 = diff_f_xk
        diff_f_xk = diff_f(xk)
        xk1 = xk - diff_f_xk * (xk - x0) / (diff_f_xk - diff_f_x0)
    if debug:
        print("chord_method func counter", func.counter)
        print("chord_method diff counter", diff_f.counter)
    return xk
def modified_newton_method(func, grad, inv_hesse_mat, x0, eps=0.02, debug=False):
    x0 = np.array(x0)
    func = FuncCounter(func, without_memoization=True)
    grad = FuncCounter(grad, without_memoization=True)
    minimize = FuncCounter(gold_sech_method, without_memoization=True)
    inv_hesse_mat = FuncCounter(inv_hesse_mat, without_memoization=True)

    gr = np.array(grad(x0))
    while np.linalg.norm(gr) > eps:
        m = np.array(inv_hesse_mat(x0))
        d = np.dot(m, gr)
        alpha = minimize(lambda a: func(x0 - a * d), -10, 10, eps * 10)
        x0 = x0 - alpha * d
        gr = np.array(grad(x0))
    if debug:
        print("modified_newton_method func counter", func.counter)
        print("modified_newton_method grad counter", grad.counter)
        print("modified_newton_method minimizations counter", minimize.counter)
        print("modified_newton_method inv_hesse_mat counter", inv_hesse_mat.counter)

    return tuple(x0)
def newton_raphson_method(func, x0, eps=0.01, n=16, debug=False):
    """Метод Ньютона-Рафсона"""
    x = sympy.symbols('x')
    f = parse_expr(func.__doc__)
    diff = f.diff(x)
    func = FuncCounter(lambda _x: f.evalf(subs={x: _x}, n=n))
    diff_f = FuncCounter(lambda _x: diff.evalf(subs={x: _x}, n=n))
    diff2 = diff.diff(x)
    diff2_f = FuncCounter(lambda _x: diff2.evalf(subs={x: _x}, n=n))
    diff_f_xk = diff_f(x0)
    diff2_f_xk = diff2_f(x0)
    xk = x0 - diff_f_xk / diff2_f_xk
    while abs(xk - x0) > eps:
        x0 = xk
        diff_f_xk = diff_f(x0)
        diff2_f_xk = diff2_f(x0)
        xk = x0 - diff_f_xk / diff2_f_xk
    if debug:
        print("newton_raphson_method func counter", func.counter)
        print("newton_raphson_method diff counter", diff_f.counter)
        print("newton_raphson_method diff2 counter", diff_f.counter)

    return xk
def naive_method(func, a, b, eps=0.01, debug=False):
    """Минимизация наивным методом"""
    f = FuncCounter(func)
    n = ceil((b - a) / eps)
    tests = []

    for i in range(n + 1):
        x = a + eps * i
        tests.append((f(x), x))

    minimum = min(tests)[1]
    if debug:
        print("naive_method func counter", f.counter)
    return minimum
def fletcher_reeves_method(func, grad, x0, eps=0.02, debug=False):
    x0 = np.array(x0)
    func = FuncCounter(func, without_memoization=True)
    grad = FuncCounter(grad, without_memoization=True)
    minimize = FuncCounter(gold_sech_method, without_memoization=True)
    dim = len(x0)

    gr0 = np.array(grad(x0))
    d = gr0
    alpha = minimize(lambda a: func(x0 - a * d), -10, 10, eps * 10)
    x1 = x0 - alpha * d

    gr1 = np.array(grad(x1))
    nr0 = np.linalg.norm(-1 * gr0)
    nr1 = np.linalg.norm(-1 * gr1)
    k = 0
    while nr1 > eps:
        if (k + 1) % dim == 0:
            beta = 0
        else:
            beta = nr1**2 / nr0**2
        d = -1 * gr1 + beta * d
        k += 1
        x0 = x1.copy()

        alpha = minimize(lambda a: func(x0 - a * d), -10, 10, eps * 10)
        x1 = x0 - alpha * d
        nr0 = nr1
        gr1 = np.array(grad(x1))
        nr1 = np.linalg.norm(-1 * gr1)

    if debug:
        print("fletcher_reeves_method func counter", func.counter)
        print("fletcher_reeves_method grad counter", grad.counter)
        print("fletcher_reeves_method minimizations counter", minimize.counter)
    return tuple(x1)
Пример #10
0
def per_coordinate_descent(func, x0, eps=0.01, debug=False):
    a = np.array(x0)
    b = np.array(x0)
    dim = len(b)
    count = 0
    minimize = FuncCounter(gold_sech_method, without_memoization=True)
    while True:
        for i in range(dim):
            phi = deco_maker(i)(func, a)
            b[i] = minimize(phi, a[i] - 5 * eps, a[i] + 5 * eps, eps)
        c = b - a
        if np.linalg.norm(c) <= eps:
            count += 1
        else:
            count = 0
        if count == 3:
            break
        a = b.copy()
    if debug:
        print("coordinate_descent minimizations counter", minimize.counter)
    return tuple(b)
def dichotomy_method(func, a, b, eps=0.01, delta_mult=0.5, debug=False):
    """Минимизация методом дихотомии"""
    f = FuncCounter(func)
    delta = eps * delta_mult

    ai = a
    bi = b

    while (bi - ai) > eps * 2:
        ci = (ai + bi - delta) / 2
        di = (ai + bi + delta) / 2

        if f(ci) <= f(di):
            ai = ai
            bi = di
        else:
            ai = ci
            bi = bi

    if debug:
        print("dichotomy_method func counter", f.counter)
    return (bi + ai) / 2
def gold_sech_method(func, a, b, eps=0.01, debug=False):
    """Минимизация методом золотого сечения"""
    f = FuncCounter(func)
    ai = a
    bi = b
    ci = ai + (bi - ai) * (3 - sqrt(5)) / 2
    di = ai + (bi - ai) * (sqrt(5) - 1) / 2

    while (bi - ai) > eps * 2:
        if f(ci) <= f(di):
            ai = ai
            bi = di
            di = ci
            ci = ai + (bi - ai) * (3 - sqrt(5)) / 2
        else:
            ai = ci
            bi = bi
            ci = di
            di = ai + (bi - ai) * (sqrt(5) - 1) / 2

    if debug:
        print("gold_sech_method func counter", f.counter)

    return (bi + ai) / 2