Пример #1
0
def steepest_descent_conjugate_gradient_compare():
    n=100;
    random_matrix = gen_diagdom_sym_matrix(n)
    b = convert_vec_mat(matrix_mult(random_matrix,[[1]] * n)) #the solution should be a vector of 1s.
    (solution1,iterCount1) = matrix_solve_steepest_descent(random_matrix,b,0.0001,10000,True)
    (solution2, iterCount2) = matrix_solve_conjugate_gradient(random_matrix, b, 0.0001, 10000, True)

    print("Steepest Descent " + str(n) + "x" + str(n) + ": Absolute Error=" + str(abs_error_2norm([1] * n,solution1)) + "  Iteration Count=" + str(iterCount1))
    print("Conjugate Gradient " + str(n) + "x" + str(n) + ": Absolute Error=" + str(abs_error_2norm([1] * n,solution2)) + "  Iteration Count=" + str(iterCount2))
Пример #2
0
def hilbert_matrix_conjugate_gradient_test():

    selection = [4,8,16,32]
    hilbert_matrix = []
    solution = []

    # Create the hilbert matrices and their corresponding Q factorization matrix.
    for n in selection:
        hilbert_matrix.append([[1/(1+i+j) for i in range(n)] for j in range(n)])  # Hilbert Matrix generator
        solution.append(matrix_solve_conjugate_gradient(hilbert_matrix[-1],[1]*n,0.000000001,10000))

    for i in range(0,len(selection)):  # 4, 6, 8, 10
        #print(matrix_mult(hilbert_matrix[i],convert_vec_mat(solution[i])))
        print("Absolute error for " + str(selection[i]) + "x" + str(selection[i]) + ": " + str(abs_error_2norm([1]*selection[i],convert_vec_mat(matrix_mult(hilbert_matrix[i],convert_vec_mat(solution[i]))))))
        print(solution[i])
def steepest_descent_conjugate_gradient_gauss_seidel_compare():
    n=100;
    random_matrix = gen_diagdom_sym_matrix(n)
    b = convert_vec_mat(matrix_mult(random_matrix,[[1]] * n)) #the solution should be a vector of 1s.
    start_time = time.time()
    (solution1,iterCount1) = matrix_solve_steepest_descent(random_matrix,b,0.00000000001,10000,True)
    time1 = time.time() - start_time
    start_time = time.time()
    (solution2, iterCount2) = matrix_solve_conjugate_gradient(random_matrix, b, 0.00000000001, 10000, True)
    time2 = time.time() - start_time
    start_time = time.time()
    (solution3, iterCount3) = matrix_solve_gauss_seidel(random_matrix, b, 0.00000000001, 10000, True)
    time3 = time.time() - start_time
    print("Steepest Descent " + str(n) + "x" + str(n) + ": Absolute Error=" + str(abs_error_2norm([1] * n,solution1)) + "  Iteration Count=" + str(iterCount1) + " time=" + str(time1))
    print("Conjugate Gradient " + str(n) + "x" + str(n) + ": Absolute Error=" + str(abs_error_2norm([1] * n,solution2)) + "  Iteration Count=" + str(iterCount2) + " time=" + str(time2))
    print("Gauss Seidel " + str(n) + "x" + str(n) + ": Absolute Error=" + str(abs_error_2norm([1] * n,solution3)) + "  Iteration Count=" + str(iterCount3) + " time=" + str(time3))
Пример #4
0
def matrix_jacobian_conjugate_gradient_compare():
    for i in [10, 50, 100, 200, 500]:
        matrix = gen_sqr_diagdom_matrix(i)
        vector = [1] * i
        starttime = time.time()
        solution, count = matrix_solve_jacobian(matrix,
                                                vector,
                                                0.0001,
                                                10000,
                                                getIterCount=True)
        print(
            str(i) + "x" + str(i) + " Jacobian:           Time: " +
            str(time.time() - starttime) + ", Iteration Count: " + str(count))
        starttime = time.time()
        solution, count = matrix_solve_conjugate_gradient(matrix,
                                                          vector,
                                                          0.0001,
                                                          10000,
                                                          getIterCount=True)
        print(
            str(i) + "x" + str(i) + " Conjugate Gradient: Time: " +
            str(time.time() - starttime) + ", Iteration Count: " + str(count))