def demo_conjugate_gradient_search():
    def F(x):
        u = x[0]
        v = x[1]
        return 10 * u**2 - 16 * u * v + 8 * v**2 + 8 * u - 16 * v + 16

    def J(x):
        u = x[0]
        v = x[1]
        return [20 * u - 16 * v + 8, -16 * u + 16 * v - 16]

    def H(x):
        u = x[0]
        v = x[1]
        return [[20, -16], [-16, 16]]

    A = np.matrix([[20, -16], [-16, 16]])
    b = np.matrix([[-8], [16]])

    #x0 = [[2.5], [2.5]]
    x0 = [2.5, 2.5]
    min = conjugate_gradient_search(F, J, x0)
    print('The minimum is: (%.2f, %.2f, %.2f)' %
          (min[0], min[1], F([min[0], min[1]])))

    plot3d_with_mins(F, [0, 5], [0, 5], mins=[min])
def demo_mv_newton():
    def F(x, y):
        return x**4 + y**4 + 2 * x**2 * y**2 + 6 * x * y - 4 * x - 4 * y + 1

    # Jacobian
    def J(x, y):
        return [
            4 * x**3 + 4 * x * y**2 + 6 * y - 4,
            4 * y**3 + 4 * x**2 * y + 6 * x - 4
        ]

    # Hessian
    def H(x, y):
        return [[12 * x**2 + 4 * y**2, 8 * x * y + 6],
                [8 * x * y + 6, 12 * y**2 + 4 * x**2]]

    print('Plotting both minimums...')
    starting_guesses = [[-1, 1], [1, -1]]
    mins = multivariate_newtons_multi_guess(J, H, starting_guesses)
    plot3d_with_mins(F, mins=mins)

    print('Plotting only one minimum...')
    starting_guess = [-1, 1]
    min = multivariate_newtons(J, H, starting_guess)
    plot3d_with_mins(F, mins=[min])
def main():
    '''
    x, y = opt.multivariate_newtons(J, H, [-1.9, 2], 0.5e-15)
    print('(%.20f, %.20f)' % (x, y))
    opt.plot3d_with_mins(F, [-2, 2], [-1, 3], [[-1.9, 2], [x, y]])

    x, y = opt.weakest_line(F, J, [-1.9, 2], tolerance=0.5e-15)
    print('(%.20f, %.20f)' % (x, y))
    opt.plot3d_with_mins(F, [-2, 2], [-1, 3], [[-1.9, 2], [x, y]])

    x, y = opt.steepest_descent_gss(F, J, [-1.9, 2], tolerance=0.5e-15)
    print('(%.20f, %.20f)' % (x, y))
    opt.plot3d_with_mins(F, [-2, 2], [-1, 3], [[-1.9, 2], [x, y]])
    '''
    #    v = opt.nelder_mead(F, [-1.9, 2], 3, xtol=0.5e-10, ftol=0.5e-10)
    #    print(v)
    #    x = v[0]
    #    y = v[1]
    #    print('(%.20f, %.20f)' % (x, y))
    #    opt.plot3d_with_mins(F, [-2, 2], [-1, 3], [[-1.9, 2], [x, y]])

    v = opt.conjugate_gradient_search(F, J, [-1.9, 2])
    x = v[0]
    y = v[1]
    print('(%.20f, %.20f)' % (x, y))
    opt.plot3d_with_mins(F, [-2, 2], [-1, 3], [[-1.9, 2], [x, y]])
def demo_weakest_line():
    def F(x, y):
        return x**4 + y**4 + 2 * x**2 * y**2 + 6 * x * y - 4 * x - 4 * y + 1

    # Jacobian
    def J(x, y):
        return [
            4 * x**3 + 4 * x * y**2 + 6 * y - 4,
            4 * y**3 + 4 * x**2 * y + 6 * x - 4
        ]

    x0 = [-1, 1]
    min = weakest_line(F, J, x0)
    plot3d_with_mins(F, mins=[min])
def demo_weakest_line_gss():
    def F(x, y):
        return x**4 + y**4 + 2 * x**2 * y**2 + 6 * x * y - 4 * x - 4 * y + 1

    # Jacobian
    def J(x, y):
        return [
            4 * x**3 + 4 * x * y**2 + 6 * y - 4,
            4 * y**3 + 4 * x**2 * y + 6 * x - 4
        ]

    def H(x, y):
        return [[12 * x**2 + 4 * y**2, 8 * x * y + 6],
                [8 * x * y + 6, 12 * y**2 + 4 * x**2]]

    x0 = [-1, 1]
    min = []
    min.append(steepest_descent_gss(F, J, x0))
    #min.append(weakest_line(F, J, x0))
    #min.append(multivariate_newtons(J,H,x0))
    plot3d_with_mins(F, mins=min)
    print(min)