def test_elasticity(self): A, B = linear_elasticity((35, 35), format='bsr') smoother = ('gauss_seidel', {'sweep': 'symmetric', 'iterations': 2}) [asa, work] = adaptive_sa_solver(A, num_candidates=3, improvement_iters=5, prepostsmoother=smoother) sa = smoothed_aggregation_solver(A, B=B) b = sp.rand(A.shape[0]) residuals0 = [] residuals1 = [] sol0 = asa.solve(b, maxiter=20, tol=1e-10, residuals=residuals0) sol1 = sa.solve(b, maxiter=20, tol=1e-10, residuals=residuals1) del sol0, sol1 conv_asa = (residuals0[-1] / residuals0[0])**(1.0 / len(residuals0)) conv_sa = (residuals1[-1] / residuals1[0])**(1.0 / len(residuals1)) # print "ASA convergence (Elasticity) %1.2e" % (conv_asa) # print "SA convergence (Elasticity) %1.2e" % (conv_sa) assert (conv_asa < 1.3 * conv_sa)
def test_improve_candidates(self): ## # test improve_candidates for the Poisson problem and elasticity, where rho_scale is # the amount that each successive improve_candidates option should improve convergence # over the previous improve_candidates option. improve_candidates_list = [None, [('block_gauss_seidel', {'iterations' : 4, 'sweep':'symmetric'})] ] # make tests repeatable numpy.random.seed(0) cases = [] A_elas,B_elas = linear_elasticity( (60,60), format='bsr') # Matrix Candidates rho_scale cases.append( (poisson( (61,61), format='csr'), ones((61*61,1)), 0.9 ) ) cases.append( (A_elas, B_elas, 0.9 ) ) for (A,B,rho_scale) in cases: last_rho = -1.0 x0 = rand(A.shape[0],1) b = rand(A.shape[0],1) for improve_candidates in improve_candidates_list: ml = smoothed_aggregation_solver(A, B, max_coarse=10, improve_candidates=improve_candidates) residuals=[] x_sol = ml.solve(b,x0=x0,maxiter=20,tol=1e-10, residuals=residuals) rho = (residuals[-1]/residuals[0])**(1.0/len(residuals)) if last_rho == -1.0: last_rho = rho else: # each successive improve_candidates option should be an improvement on the previous # print "\nimprove_candidates Test: %1.3e, %1.3e, %d\n"%(rho,rho_scale*last_rho,A.shape[0]) assert(rho < rho_scale*last_rho) last_rho = rho
def test_improve_candidates(self): # test improve_candidates for the Poisson problem and elasticity, where # rho_scale is the amount that each successive improve_candidates # option should improve convergence over the previous # improve_candidates option. improve_candidates_list = [None, [("block_gauss_seidel", {"iterations": 4, "sweep": "symmetric"})]] # make tests repeatable numpy.random.seed(0) cases = [] A_elas, B_elas = linear_elasticity((60, 60), format="bsr") # Matrix Candidates rho_scale cases.append((poisson((75, 75), format="csr"), ones((75 * 75, 1)), 0.9)) cases.append((A_elas, B_elas, 0.9)) for (A, B, rho_scale) in cases: last_rho = -1.0 x0 = rand(A.shape[0], 1) b = rand(A.shape[0], 1) for improve_candidates in improve_candidates_list: ml = rootnode_solver(A, B, max_coarse=10, improve_candidates=improve_candidates) residuals = [] x_sol = ml.solve(b, x0=x0, maxiter=20, tol=1e-10, residuals=residuals) del x_sol rho = (residuals[-1] / residuals[0]) ** (1.0 / len(residuals)) if last_rho == -1.0: last_rho = rho else: # each successive improve_candidates option should be an # improvement on the previous print "\nimprove_candidates # Test: %1.3e, %1.3e, # %d\n"%(rho,rho_scale*last_rho,A.shape[0]) assert rho < rho_scale * last_rho last_rho = rho
def setUp(self): self.cases = [] A = poisson((5000,), format='csr') self.cases.append((A, None, 0.4, 'symmetric', ('jacobi', {'omega': 4.0 / 3.0}))) self.cases.append((A, None, 0.4, 'symmetric', ('energy', {'krylov': 'cg'}))) self.cases.append((A, None, 0.5, 'symmetric', ('energy', {'krylov': 'gmres'}))) A = poisson((60, 60), format='csr') self.cases.append((A, None, 0.42, 'symmetric', ('jacobi', {'omega': 4.0 / 3.0}))) self.cases.append((A, None, 0.42, 'symmetric', ('energy', {'krylov': 'cg'}))) self.cases.append((A, None, 0.42, 'symmetric', ('energy', {'krylov': 'cgnr', 'weighting': 'diagonal'}))) A, B = linear_elasticity((50, 50), format='bsr') self.cases.append((A, B, 0.32, 'symmetric', ('jacobi', {'omega': 4.0 / 3.0}))) self.cases.append((A, B, 0.22, 'symmetric', ('energy', {'krylov': 'cg'}))) self.cases.append((A, B, 0.42, 'symmetric', ('energy', {'krylov': 'cgnr', 'weighting': 'diagonal'}))) self.cases.append((A, B, 0.42, 'symmetric', ('energy', {'krylov': 'gmres'})))
def setUp(self): self.cases = [] A = poisson((5000,), format='csr') self.cases.append((A, None, 0.4, 'symmetric', ('jacobi', {'omega': 4.0 / 3.0}))) self.cases.append((A, None, 0.4, 'symmetric', ('energy', {'krylov': 'cg'}))) self.cases.append((A, None, 0.5, 'symmetric', ('energy', {'krylov': 'gmres'}))) A = poisson((60, 60), format='csr') self.cases.append((A, None, 0.42, 'symmetric', ('jacobi', {'omega': 4.0 / 3.0}))) self.cases.append((A, None, 0.42, 'symmetric', ('energy', {'krylov': 'cg'}))) self.cases.append((A, None, 0.42, 'symmetric', ('energy', {'krylov': 'cgnr'}))) A, B = linear_elasticity((50, 50), format='bsr') self.cases.append((A, B, 0.32, 'symmetric', ('jacobi', {'omega': 4.0 / 3.0}))) self.cases.append((A, B, 0.22, 'symmetric', ('energy', {'krylov': 'cg'}))) self.cases.append((A, B, 0.42, 'symmetric', ('energy', {'krylov': 'cgnr'}))) self.cases.append((A, B, 0.42, 'symmetric', ('energy', {'krylov': 'gmres'})))
def setUp(self): self.cases = [] A = poisson((5000, ), format='csr') self.cases.append((A, None, 0.4, 'symmetric', ('energy', { 'krylov': 'cg' }))) self.cases.append((A, None, 0.4, 'symmetric', ('energy', { 'krylov': 'gmres' }))) A = poisson((75, 75), format='csr') self.cases.append((A, None, 0.26, 'symmetric', ('energy', { 'krylov': 'cg' }))) self.cases.append((A, None, 0.30, 'symmetric', ('energy', { 'krylov': 'cgnr' }))) A, B = linear_elasticity((50, 50), format='bsr') self.cases.append((A, B, 0.3, 'symmetric', ('energy', { 'krylov': 'cg' }))) self.cases.append((A, B, 0.3, 'symmetric', ('energy', { 'krylov': 'cgnr' }))) self.cases.append((A, B, 0.3, 'symmetric', ('energy', { 'krylov': 'gmres' })))
def setUp(self): self.cases = [] A = poisson((5000,), format="csr") self.cases.append((A, None, 0.4, "symmetric", ("energy", {"krylov": "cg"}))) self.cases.append((A, None, 0.4, "symmetric", ("energy", {"krylov": "gmres"}))) A = poisson((75, 75), format="csr") self.cases.append((A, None, 0.26, "symmetric", ("energy", {"krylov": "cg"}))) self.cases.append((A, None, 0.30, "symmetric", ("energy", {"krylov": "cgnr"}))) A, B = linear_elasticity((50, 50), format="bsr") self.cases.append((A, B, 0.3, "symmetric", ("energy", {"krylov": "cg"}))) self.cases.append((A, B, 0.3, "symmetric", ("energy", {"krylov": "cgnr"}))) self.cases.append((A, B, 0.3, "symmetric", ("energy", {"krylov": "gmres"})))
def test_improve_candidates(self): # test improve_candidates for the Poisson problem and elasticity, where # rho_scale is the amount that each successive improve_candidates # option should improve convergence over the previous # improve_candidates option. improve_candidates_list = [ None, [('block_gauss_seidel', { 'iterations': 4, 'sweep': 'symmetric' })] ] # make tests repeatable np.random.seed(0) cases = [] A_elas, B_elas = linear_elasticity((60, 60), format='bsr') # Matrix, Candidates, rho_scale cases.append((poisson((61, 61), format='csr'), np.ones( (61 * 61, 1)), 0.9)) cases.append((A_elas, B_elas, 0.9)) for (A, B, rho_scale) in cases: last_rho = -1.0 x0 = sp.rand(A.shape[0], 1) b = sp.rand(A.shape[0], 1) for ic in improve_candidates_list: ml = smoothed_aggregation_solver(A, B, max_coarse=10, improve_candidates=ic) residuals = [] x_sol = ml.solve(b, x0=x0, maxiter=20, tol=1e-10, residuals=residuals) del x_sol rho = (residuals[-1] / residuals[0])**(1.0 / len(residuals)) if last_rho == -1.0: last_rho = rho else: # each successive improve_candidates option should be an # improvement on the previous print "\nimprove_candidates # Test: %1.3e, %1.3e, # %d\n"%(rho,rho_scale*last_rho,A.shape[0]) assert (rho < rho_scale * last_rho) last_rho = rho
def setUp(self): self.cases = [] A = poisson((5000,), format="csr") self.cases.append((A, None, 0.4, "symmetric", ("jacobi", {"omega": 4.0 / 3.0}))) self.cases.append((A, None, 0.4, "symmetric", ("energy", {"krylov": "cg"}))) self.cases.append((A, None, 0.5, "symmetric", ("energy", {"krylov": "gmres"}))) A = poisson((60, 60), format="csr") self.cases.append((A, None, 0.42, "symmetric", ("jacobi", {"omega": 4.0 / 3.0}))) self.cases.append((A, None, 0.42, "symmetric", ("energy", {"krylov": "cg"}))) self.cases.append((A, None, 0.42, "symmetric", ("energy", {"krylov": "cgnr"}))) A, B = linear_elasticity((50, 50), format="bsr") self.cases.append((A, B, 0.32, "symmetric", ("jacobi", {"omega": 4.0 / 3.0}))) self.cases.append((A, B, 0.22, "symmetric", ("energy", {"krylov": "cg"}))) self.cases.append((A, B, 0.42, "symmetric", ("energy", {"krylov": "cgnr"}))) self.cases.append((A, B, 0.42, "symmetric", ("energy", {"krylov": "gmres"})))
def setUp(self): self.cases = [] A = poisson((5000,), format='csr') self.cases.append((A, None, 0.4, 'symmetric', ('energy', {'krylov': 'cg'}))) self.cases.append((A, None, 0.4, 'symmetric', ('energy', {'krylov': 'gmres'}))) A = poisson((75, 75), format='csr') self.cases.append((A, None, 0.26, 'symmetric', ('energy', {'krylov': 'cg'}))) self.cases.append((A, None, 0.30, 'symmetric', ('energy', {'krylov': 'cgnr'}))) A, B = linear_elasticity((50, 50), format='bsr') self.cases.append((A, B, 0.3, 'symmetric', ('energy', {'krylov': 'cg'}))) self.cases.append((A, B, 0.3, 'symmetric', ('energy', {'krylov': 'cgnr'}))) self.cases.append((A, B, 0.3, 'symmetric', ('energy', {'krylov': 'gmres'})))
def test_elasticity(self): A, B = linear_elasticity((35, 35), format='bsr') smoother = ('gauss_seidel', {'sweep': 'symmetric', 'iterations': 2}) [asa, work] = adaptive_sa_solver(A, num_candidates=3, improvement_iters=5, prepostsmoother=smoother) sa = smoothed_aggregation_solver(A, B=B) b = sp.rand(A.shape[0]) residuals0 = [] residuals1 = [] sol0 = asa.solve(b, maxiter=20, tol=1e-10, residuals=residuals0) sol1 = sa.solve(b, maxiter=20, tol=1e-10, residuals=residuals1) del sol0, sol1 conv_asa = (residuals0[-1] / residuals0[0]) ** (1.0 / len(residuals0)) conv_sa = (residuals1[-1] / residuals1[0]) ** (1.0 / len(residuals1)) # print "ASA convergence (Elasticity) %1.2e" % (conv_asa) # print "SA convergence (Elasticity) %1.2e" % (conv_sa) assert(conv_asa < 1.3 * conv_sa)
def setUp(self): self.cases = [] self.cases.append((poisson((100, ), format='csr'), None)) self.cases.append((poisson((10, 10), format='csr'), None)) self.cases.append(linear_elasticity((7, 7), format='bsr'))
epsilon = 0.00 theta = 3.0 * np.pi / 16 # 1d Poisson if problem_dim == 1: grid_dims = [N, 1] A = poisson((N, ), format='csr') # 2d Poisson elif problem_dim == 2: grid_dims = [N, N] stencil = diffusion_stencil_2d(epsilon, theta) A = stencil_grid(stencil, grid_dims, format='csr') # Elasticity (don't plot right now, plotting designed for Poisson) elif problem_dim == -1: grid_dims = [N, N] [A, B] = linear_elasticity(grid_dims) [d, d, A] = symmetric_rescaling(A) # W = get_geometric_weights(A, theta, N, N) # W.eliminate_zeros() # mmwrite('./test.mtx', A) # pdb.set_trace() # ------------------------------------------------------------------------------# # C = evolution_strength_of_connection(A, epsilon=4.0, k=2) # C = symmetric_strength_of_connection(W, theta=0.1) # AggOp, Cpts = standard_aggregation(C) # ------------------------------------------------------------------------------#
def test_evolution_strength_of_connection(self): cases = [] # Single near nullspace candidate stencil = [[0.0, -1.0, 0.0], [-0.001, 2.002, -0.001], [0.0, -1.0, 0.0]] A = 1.0j * stencil_grid(stencil, (4, 4), format='csr') B = 1.0j * np.ones((A.shape[0], 1)) B[0] = 1.2 - 12.0j B[11] = -14.2 cases.append({ 'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2' }) # Multiple near nullspace candidate B = 1.0j * np.ones((A.shape[0], 2)) B[0:-1:2, 0] = 0.0 B[1:-1:2, 1] = 0.0 B[-1, 0] = 0.0 B[11, 1] = -14.2 B[0, 0] = 1.2 - 12.0j cases.append({ 'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2' }) Absr = A.tobsr(blocksize=(2, 2)) cases.append({ 'A': Absr.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2' }) for ca in cases: scipy.random.seed(0) # make results deterministic result = evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj'], symmetrize_measure=False) scipy.random.seed(0) # make results deterministic expected = reference_evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj']) assert_array_almost_equal(result.todense(), expected.todense()) scipy.random.seed(0) # make results deterministic result = evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj'], symmetrize_measure=False, weighting='local') scipy.random.seed(0) # make results deterministic expected = reference_evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj'], weighting='local') assert_array_almost_equal(result.todense(), expected.todense()) # Test Scale Invariance for a single candidate A = 1.0j * poisson((5, 5), format='csr') B = 1.0j * arange(1, A.shape[0] + 1, dtype=float).reshape(-1, 1) scipy.random.seed(0) # make results deterministic result_unscaled = evolution_soc(A, B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) # create scaled A D = spdiags([arange(A.shape[0], 2 * A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') Dinv = spdiags([1.0 / arange(A.shape[0], 2 * A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') scipy.random.seed(0) # make results deterministic result_scaled = evolution_soc(D * A * D, Dinv * B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) assert_array_almost_equal(result_scaled.todense(), result_unscaled.todense(), decimal=2) # Test that the l2 and D_A are the same for the 1 candidate case scipy.random.seed(0) # make results deterministic resultDA = evolution_soc(D * A * D, Dinv * B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) scipy.random.seed(0) # make results deterministic resultl2 = evolution_soc(D * A * D, Dinv * B, epsilon=4.0, k=2, proj_type="l2", symmetrize_measure=False) assert_array_almost_equal(resultDA.todense(), resultl2.todense()) # Test Scale Invariance for multiple candidates (A, B) = linear_elasticity((5, 5), format='bsr') A = 1.0j * A B = 1.0j * B scipy.random.seed(0) # make results deterministic result_unscaled = evolution_soc(A, B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) # create scaled A D = spdiags([arange(A.shape[0], 2 * A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') Dinv = spdiags([1.0 / arange(A.shape[0], 2 * A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') scipy.random.seed(0) # make results deterministic result_scaled = evolution_soc((D * A * D).tobsr(blocksize=(2, 2)), Dinv * B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) assert_array_almost_equal(result_scaled.todense(), result_unscaled.todense(), decimal=2)
def test_range(self): """Check that P*R=B""" numpy.random.seed(0) #make tests repeatable cases = [] ## # Simple, real-valued diffusion problems X = load_example('airfoil') A = X['A'].tocsr(); B = X['B'] cases.append((A,B,('jacobi', {'filter' : True, 'weighting' : 'local'}) )) cases.append((A,B,('jacobi', {'filter' : True, 'weighting' : 'block'}) )) cases.append((A,B,('energy', {'maxiter' : 3}) )) cases.append((A,B,('energy', {'krylov' : 'cgnr'}) )) cases.append((A,B,('energy', {'krylov' : 'gmres', 'degree' : 2}) )) A = poisson((10,10), format='csr') B = ones((A.shape[0],1)) cases.append((A,B,('jacobi', {'filter' : True, 'weighting' : 'diagonal'}) )) cases.append((A,B,('jacobi', {'filter' : True, 'weighting' : 'local'}) )) cases.append((A,B,'energy')) cases.append((A,B,('energy', {'degree' : 2}) )) cases.append((A,B,('energy', {'krylov' : 'cgnr', 'degree' : 2}) )) cases.append((A,B,('energy', {'krylov' : 'gmres'}) )) ## # Simple, imaginary-valued problems iA = 1.0j*A iB = 1.0 + rand(iA.shape[0],2) + 1.0j*(1.0 + rand(iA.shape[0],2)) cases.append((iA, B,('jacobi', {'filter' : True, 'weighting' : 'diagonal'}) )) cases.append((iA, B,('jacobi', {'filter' : True, 'weighting' : 'block'}) )) cases.append((iA,iB,('jacobi', {'filter' : True, 'weighting' : 'local'}) )) cases.append((iA,iB,('jacobi', {'filter' : True, 'weighting' : 'block'}) )) cases.append((iA.tobsr(blocksize=(5,5)), B, ('jacobi', {'filter' : True, 'weighting' : 'block'}) )) cases.append((iA.tobsr(blocksize=(5,5)), iB, ('jacobi', {'filter' : True, 'weighting' : 'block'}) )) cases.append((iA,B, ('energy', {'krylov' : 'cgnr', 'degree' : 2}) )) cases.append((iA,iB,('energy', {'krylov' : 'cgnr'}) )) cases.append((iA.tobsr(blocksize=(5,5)),B, ('energy', {'krylov' : 'cgnr', 'degree' : 2, 'maxiter' : 3}) )) cases.append((iA.tobsr(blocksize=(5,5)),iB,('energy', {'krylov' : 'cgnr'}) )) cases.append((iA,B, ('energy', {'krylov' : 'gmres'}) )) cases.append((iA,iB,('energy', {'krylov' : 'gmres', 'degree' : 2}) )) cases.append((iA.tobsr(blocksize=(5,5)),B, ('energy', {'krylov' : 'gmres', 'degree' : 2, 'maxiter' : 3}) )) cases.append((iA.tobsr(blocksize=(5,5)),iB,('energy', {'krylov' : 'gmres'}) )) ## # # Simple, imaginary-valued problems iA = A + 1.0j*scipy.sparse.eye(A.shape[0], A.shape[1]) cases.append((iA,B, ('jacobi', {'filter' : True, 'weighting' : 'local'}) )) cases.append((iA,B, ('jacobi', {'filter' : True, 'weighting' : 'block'}) )) cases.append((iA,iB,('jacobi', {'filter' : True, 'weighting' : 'diagonal'}) )) cases.append((iA,iB,('jacobi', {'filter' : True, 'weighting' : 'block'}) )) cases.append((iA.tobsr(blocksize=(4,4)), iB, ('jacobi', {'filter' : True, 'weighting' : 'block'}) )) cases.append((iA,B, ('energy', {'krylov' : 'cgnr'}) )) cases.append((iA.tobsr(blocksize=(4,4)),iB,('energy', {'krylov' : 'cgnr'}) )) cases.append((iA,B, ('energy', {'krylov' : 'gmres'}) )) cases.append((iA.tobsr(blocksize=(4,4)),iB, ('energy', {'krylov' : 'gmres', 'degree' : 2, 'maxiter' : 3}) )) ## # A = gauge_laplacian(10, spacing=1.0, beta=0.21) B = ones((A.shape[0],1)) cases.append((A,iB,('jacobi', {'filter' : True, 'weighting' : 'diagonal'}) )) cases.append((A,iB,('jacobi', {'filter' : True, 'weighting' : 'local'}) )) cases.append((A,B, ('energy', {'krylov' : 'cg'}) )) cases.append((A,iB, ('energy', {'krylov' : 'cgnr'}) )) cases.append((A,iB, ('energy', {'krylov' : 'gmres'}) )) cases.append((A.tobsr(blocksize=(2,2)),B, ('energy', {'krylov' : 'cgnr', 'degree' : 2, 'maxiter' : 3}) )) cases.append((A.tobsr(blocksize=(2,2)),iB,('energy', {'krylov' : 'cg'}) )) cases.append((A.tobsr(blocksize=(2,2)),B, ('energy', {'krylov' : 'gmres', 'degree' : 2, 'maxiter' : 3}) )) ## # A,B = linear_elasticity((10,10)) cases.append((A,B,('jacobi', {'filter' : True, 'weighting' : 'diagonal'}) )) cases.append((A,B,('jacobi', {'filter' : True, 'weighting' : 'local'}) )) cases.append((A,B,('jacobi', {'filter' : True, 'weighting' : 'block'}) )) cases.append((A,B,('energy', {'degree' : 2}) )) cases.append((A,B,('energy', {'krylov' : 'cgnr'}) )) cases.append((A,B,('energy', {'krylov' : 'gmres', 'degree' : 2}) )) ## # Classic SA cases for A,B,smooth in cases: ml = smoothed_aggregation_solver(A, B=B, max_coarse=1, max_levels=2, smooth=smooth ) P = ml.levels[0].P B = ml.levels[0].B R = ml.levels[1].B assert_almost_equal(P*R, B) def blocksize(A): # Helper Function: return the blocksize of a matrix if isspmatrix_bsr(A): return A.blocksize[0] else: return 1 ## # Root-node cases counter = 0 for A,B,smooth in cases: counter += 1 if isinstance( smooth, tuple): smoother = smooth[0] else: smoother = smooth if smoother == 'energy' and (B.shape[1] >= blocksize(A)): ml = rootnode_solver(A, B=B, max_coarse=1, max_levels=2, smooth=smooth, improve_candidates =[('gauss_seidel_nr', {'sweep': 'symmetric', 'iterations': 4}), None], keep=True, symmetry='nonsymmetric') T = ml.levels[0].T.tocsr() Cpts = ml.levels[0].Cpts Bf = ml.levels[0].B Bf_H = ml.levels[0].BH Bc = ml.levels[1].B P = ml.levels[0].P.tocsr() ## # P should preserve B in its range, wherever P # has enough nonzeros mask = ((P.indptr[1:] - P.indptr[:-1]) >= B.shape[1]) assert_almost_equal( (P*Bc)[mask,:], Bf[mask,:]) assert_almost_equal( (P*Bc)[mask,:], Bf_H[mask,:]) ## # P should be the identity at Cpts I = eye(T.shape[1], T.shape[1], format='csr', dtype=T.dtype) I2 = P[Cpts,:] assert_almost_equal(I.data, I2.data) assert_equal(I.indptr, I2.indptr) assert_equal(I.indices, I2.indices) ## # T should be the identity at Cpts I2 = T[Cpts,:] assert_almost_equal(I.data, I2.data) assert_equal(I.indptr, I2.indptr) assert_equal(I.indices, I2.indices)
# Linear Elasticity Example import scipy from pyamg.gallery import linear_elasticity from pyamg import smoothed_aggregation_solver, rootnode_solver from convergence_tools import print_cycle_history print "Test convergence for a simple 200x200 Grid, Linearized Elasticity Problem" choice = input('\n Input Choice:\n' + \ '1: Run smoothed_aggregation_solver\n' + \ '2: Run rootnode_solver\n' ) # Create matrix and candidate vectors. B has 3 columns, representing # rigid body modes of the mesh. B[:,0] and B[:,1] are translations in # the X and Y directions while B[:,2] is a rotation. A, B = linear_elasticity((200, 200), format='bsr') # Construct solver using AMG based on Smoothed Aggregation (SA) if choice == 1: mls = smoothed_aggregation_solver(A, B=B, smooth='energy') elif choice == 2: mls = rootnode_solver(A, B=B, smooth='energy') else: raise ValueError("Enter a choice of 1 or 2") # Display hierarchy information print mls # Create random right hand side b = scipy.rand(A.shape[0], 1)
def test_range(self): """Check that P*R=B.""" warnings.filterwarnings('ignore', category=UserWarning, message='Having less target vectors') np.random.seed(18410243) # make tests repeatable cases = [] # Simple, real-valued diffusion problems name = 'airfoil' X = load_example('airfoil') A = X['A'].tocsr() B = X['B'] cases.append((A, B, ('jacobi', {'filter_entries': True, 'weighting': 'local'}), name)) cases.append((A, B, ('jacobi', {'filter_entries': True, 'weighting': 'block'}), name)) cases.append((A, B, ('energy', {'maxiter': 3}), name)) cases.append((A, B, ('energy', {'krylov': 'cgnr', 'weighting': 'diagonal'}), name)) cases.append((A, B, ('energy', {'krylov': 'gmres', 'degree': 2}), name)) name = 'poisson' A = poisson((10, 10), format='csr') B = np.ones((A.shape[0], 1)) cases.append((A, B, ('jacobi', {'filter_entries': True, 'weighting': 'diagonal'}), name)) cases.append((A, B, ('jacobi', {'filter_entries': True, 'weighting': 'local'}), name)) cases.append((A, B, 'energy', name)) cases.append((A, B, ('energy', {'degree': 2}), name)) cases.append((A, B, ('energy', {'krylov': 'cgnr', 'degree': 2, 'weighting': 'diagonal'}), name)) cases.append((A, B, ('energy', {'krylov': 'gmres'}), name)) # Simple, imaginary-valued problems name = 'random imaginary' iA = 1.0j * A iB = 1.0 + np.random.rand(iA.shape[0], 2)\ + 1.0j * (1.0 + np.random.rand(iA.shape[0], 2)) cases.append((iA, B, ('jacobi', {'filter_entries': True, 'weighting': 'diagonal'}), name)) cases.append((iA, B, ('jacobi', {'filter_entries': True, 'weighting': 'block'}), name)) cases.append((iA, iB, ('jacobi', {'filter_entries': True, 'weighting': 'local'}), name)) cases.append((iA, iB, ('jacobi', {'filter_entries': True, 'weighting': 'block'}), name)) cases.append((iA.tobsr(blocksize=(5, 5)), B, ('jacobi', {'filter_entries': True, 'weighting': 'block'}), name)) cases.append((iA.tobsr(blocksize=(5, 5)), iB, ('jacobi', {'filter_entries': True, 'weighting': 'block'}), name)) cases.append((iA, B, ('energy', {'krylov': 'cgnr', 'degree': 2, 'weighting': 'diagonal'}), name)) cases.append((iA, iB, ('energy', {'krylov': 'cgnr', 'weighting': 'diagonal'}), name)) cases.append((iA.tobsr(blocksize=(5, 5)), B, ('energy', {'krylov': 'cgnr', 'degree': 2, 'maxiter': 3, 'weighting': 'diagonal', 'postfilter': {'theta': 0.05}}), name)) cases.append((iA.tobsr(blocksize=(5, 5)), B, ('energy', {'krylov': 'cgnr', 'degree': 2, 'maxiter': 3, 'weighting': 'diagonal', 'prefilter': {'theta': 0.05}}), name)) cases.append((iA.tobsr(blocksize=(5, 5)), B, ('energy', {'krylov': 'cgnr', 'degree': 2, 'weighting': 'diagonal', 'maxiter': 3}), name)) cases.append((iA.tobsr(blocksize=(5, 5)), iB, ('energy', {'krylov': 'cgnr', 'weighting': 'diagonal'}), name)) cases.append((iA, B, ('energy', {'krylov': 'gmres'}), name)) cases.append((iA, iB, ('energy', {'krylov': 'gmres', 'degree': 2}), name)) cases.append((iA.tobsr(blocksize=(5, 5)), B, ('energy', {'krylov': 'gmres', 'degree': 2, 'maxiter': 3}), name)) cases.append((iA.tobsr(blocksize=(5, 5)), iB, ('energy', {'krylov': 'gmres'}), name)) # Simple, imaginary-valued problems name = 'random imaginary + I' iA = A + 1.0j * sparse.eye(A.shape[0], A.shape[1]) cases.append((iA, B, ('jacobi', {'filter_entries': True, 'weighting': 'local'}), name)) cases.append((iA, B, ('jacobi', {'filter_entries': True, 'weighting': 'block'}), name)) cases.append((iA, iB, ('jacobi', {'filter_entries': True, 'weighting': 'diagonal'}), name)) cases.append((iA, iB, ('jacobi', {'filter_entries': True, 'weighting': 'block'}), name)) cases.append((iA.tobsr(blocksize=(4, 4)), iB, ('jacobi', {'filter_entries': True, 'weighting': 'block'}), name)) cases.append((iA, B, ('energy', {'krylov': 'cgnr', 'weighting': 'diagonal'}), name)) cases.append((iA.tobsr(blocksize=(4, 4)), iB, ('energy', {'krylov': 'cgnr', 'weighting': 'diagonal'}), name)) cases.append((iA, B, ('energy', {'krylov': 'gmres'}), name)) cases.append((iA.tobsr(blocksize=(4, 4)), iB, ('energy', {'krylov': 'gmres', 'degree': 2, 'maxiter': 3}), name)) cases.append((iA.tobsr(blocksize=(4, 4)), iB, ('energy', {'krylov': 'gmres', 'degree': 2, 'maxiter': 3, 'postfilter': {'theta': 0.05}}), name)) cases.append((iA.tobsr(blocksize=(4, 4)), iB, ('energy', {'krylov': 'gmres', 'degree': 2, 'maxiter': 3, 'prefilter': {'theta': 0.05}}), name)) name = 'gauge laplacian' A = gauge_laplacian(10, spacing=1.0, beta=0.21) B = np.ones((A.shape[0], 1)) cases.append((A, iB, ('jacobi', {'filter_entries': True, 'weighting': 'diagonal'}), name)) cases.append((A, iB, ('jacobi', {'filter_entries': True, 'weighting': 'local'}), name)) cases.append((A, B, ('energy', {'krylov': 'cg'}), name)) cases.append((A, iB, ('energy', {'krylov': 'cgnr', 'weighting': 'diagonal'}), name)) cases.append((A, iB, ('energy', {'krylov': 'gmres'}), name)) name = 'gauge laplacian bsr' cases.append((A.tobsr(blocksize=(2, 2)), B, ('energy', {'krylov': 'cgnr', 'degree': 2, 'weighting': 'diagonal', 'maxiter': 3, 'postfilter': {'theta': 0.05}}), name)) cases.append((A.tobsr(blocksize=(2, 2)), B, ('energy', {'krylov': 'cgnr', 'degree': 2, 'weighting': 'diagonal', 'maxiter': 3, 'prefilter': {'theta': 0.05}}), name)) cases.append((A.tobsr(blocksize=(2, 2)), B, ('energy', {'krylov': 'cgnr', 'degree': 2, 'maxiter': 3, 'weighting': 'diagonal'}), name)) cases.append((A.tobsr(blocksize=(2, 2)), iB, ('energy', {'krylov': 'cg'}), name)) cases.append((A.tobsr(blocksize=(2, 2)), B, ('energy', {'krylov': 'gmres', 'degree': 2, 'maxiter': 3}), name)) cases.append((A.tobsr(blocksize=(2, 2)), B, ('energy', {'krylov': 'gmres', 'degree': 2, 'maxiter': 3, 'postfilter': {'theta': 0.05}}), name)) cases.append((A.tobsr(blocksize=(2, 2)), B, ('energy', {'krylov': 'gmres', 'degree': 2, 'maxiter': 3, 'prefilter': {'theta': 0.05}}), name)) # name = 'linear elasticity' A, B = linear_elasticity((10, 10)) cases.append((A, B, ('jacobi', {'filter_entries': True, 'weighting': 'diagonal'}), name)) cases.append((A, B, ('jacobi', {'filter_entries': True, 'weighting': 'local'}), name)) cases.append((A, B, ('jacobi', {'filter_entries': True, 'weighting': 'block'}), name)) cases.append((A, B, ('energy', {'degree': 2}), name)) cases.append((A, B, ('energy', {'degree': 3, 'postfilter': {'theta': 0.05}}), name)) cases.append((A, B, ('energy', {'degree': 3, 'prefilter': {'theta': 0.05}}), name)) cases.append((A, B, ('energy', {'krylov': 'cgnr', 'weighting': 'diagonal'}), name)) cases.append((A, B, ('energy', {'krylov': 'gmres', 'degree': 2}), name)) # Classic SA cases for A, B, smooth, _name in cases: ml = smoothed_aggregation_solver(A, B=B, max_coarse=1, max_levels=2, smooth=smooth) P = ml.levels[0].P B = ml.levels[0].B R = ml.levels[1].B assert_almost_equal(P * R, B) def _get_blocksize(A): # Helper Function: return the blocksize of a matrix if sparse.isspmatrix_bsr(A): return A.blocksize[0] return 1 # Root-node cases counter = 0 for A, B, smooth, _name in cases: counter += 1 if isinstance(smooth, tuple): smoother = smooth[0] else: smoother = smooth if smoother == 'energy' and (B.shape[1] >= _get_blocksize(A)): ic = [('gauss_seidel_nr', {'sweep': 'symmetric', 'iterations': 4}), None] ml = rootnode_solver(A, B=B, max_coarse=1, max_levels=2, smooth=smooth, improve_candidates=ic, keep=True, symmetry='nonsymmetric') T = ml.levels[0].T.tocsr() Cpts = ml.levels[0].Cpts Bf = ml.levels[0].B Bf_H = ml.levels[0].BH Bc = ml.levels[1].B P = ml.levels[0].P.tocsr() T.eliminate_zeros() P.eliminate_zeros() # P should preserve B in its range, wherever P # has enough nonzeros mask = ((P.indptr[1:] - P.indptr[:-1]) >= B.shape[1]) assert_almost_equal((P*Bc)[mask, :], Bf[mask, :]) assert_almost_equal((P*Bc)[mask, :], Bf_H[mask, :]) # P should be the identity at Cpts I1 = sparse.eye(T.shape[1], T.shape[1], format='csr', dtype=T.dtype) I2 = P[Cpts, :] assert_almost_equal(I1.data, I2.data) assert_equal(I1.indptr, I2.indptr) assert_equal(I1.indices, I2.indices) # T should be the identity at Cpts I2 = T[Cpts, :] assert_almost_equal(I1.data, I2.data) assert_equal(I1.indptr, I2.indptr) assert_equal(I1.indices, I2.indices)
def setUp(self): self.cases = [] self.cases.append((poisson((100,), format='csr'), None)) self.cases.append((poisson((10, 10), format='csr'), None)) self.cases.append(linear_elasticity((7, 7), format='bsr'))
def test_evolution_strength_of_connection(self): # Params: A, B, epsilon=4.0, k=2, proj_type="l2" cases = [] # Ensure that isotropic diffusion results in isotropic strength stencil for N in [3, 5, 7, 10]: A = poisson((N, ), format='csr') B = np.ones((A.shape[0], 1)) cases.append({ 'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2' }) # Ensure that anisotropic diffusion results in an anisotropic # strength stencil for N in [3, 6, 7]: u = np.ones(N * N) A = spdiags([-u, -0.001 * u, 2.002 * u, -0.001 * u, -u], [-N, -1, 0, 1, N], N * N, N * N, format='csr') B = np.ones((A.shape[0], 1)) cases.append({ 'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2' }) # Ensure that isotropic elasticity results in an isotropic stencil for N in [3, 6, 7]: (A, B) = linear_elasticity((N, N), format='bsr') cases.append({ 'A': A.copy(), 'B': B.copy(), 'epsilon': 32.0, 'k': 8, 'proj': 'D_A' }) # Run an example with a non-uniform stencil ex = load_example('airfoil') A = ex['A'].tocsr() B = np.ones((A.shape[0], 1)) cases.append({ 'A': A.copy(), 'B': B.copy(), 'epsilon': 8.0, 'k': 4, 'proj': 'D_A' }) Absr = A.tobsr(blocksize=(5, 5)) cases.append({ 'A': Absr.copy(), 'B': B.copy(), 'epsilon': 8.0, 'k': 4, 'proj': 'D_A' }) # Different B B = arange(1, 2 * A.shape[0] + 1, dtype=float).reshape(-1, 2) cases.append({ 'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2' }) cases.append({ 'A': Absr.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2' }) # Zero row and column A.data[A.indptr[4]:A.indptr[5]] = 0.0 A = A.tocsc() A.data[A.indptr[4]:A.indptr[5]] = 0.0 A.eliminate_zeros() A = A.tocsr() A.sort_indices() cases.append({ 'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2' }) Absr = A.tobsr(blocksize=(5, 5)) cases.append({ 'A': Absr.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2' }) for ca in cases: scipy.random.seed(0) # make results deterministic result = evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj'], symmetrize_measure=False) scipy.random.seed(0) # make results deterministic expected = reference_evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj']) assert_array_almost_equal(result.todense(), expected.todense(), decimal=4) scipy.random.seed(0) # make results deterministic result = evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj'], symmetrize_measure=False, weighting='local') scipy.random.seed(0) # make results deterministic expected = reference_evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj'], weighting='local') assert_array_almost_equal(result.todense(), expected.todense(), decimal=4) # Test Scale Invariance for multiple near nullspace candidates (A, B) = linear_elasticity((5, 5), format='bsr') scipy.random.seed(0) # make results deterministic result_unscaled = evolution_soc(A, B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) # create scaled A D = spdiags([arange(A.shape[0], 2 * A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') Dinv = spdiags([1.0 / arange(A.shape[0], 2 * A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') scipy.random.seed(0) # make results deterministic result_scaled = evolution_soc((D * A * D).tobsr(blocksize=(2, 2)), Dinv * B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) assert_array_almost_equal(result_scaled.todense(), result_unscaled.todense(), decimal=2)
def test_evolution_strength_of_connection(self): # Params: A, B, epsilon=4.0, k=2, proj_type="l2" cases = [] # Ensure that isotropic diffusion results in isotropic strength stencil for N in [3, 5, 7, 10]: A = poisson((N,), format='csr') B = np.ones((A.shape[0], 1)) cases.append({'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2'}) # Ensure that anisotropic diffusion results in an anisotropic # strength stencil for N in [3, 6, 7]: u = np.ones(N*N) A = spdiags([-u, -0.001*u, 2.002*u, -0.001*u, -u], [-N, -1, 0, 1, N], N*N, N*N, format='csr') B = np.ones((A.shape[0], 1)) cases.append({'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2'}) # Ensure that isotropic elasticity results in an isotropic stencil for N in [3, 6, 7]: (A, B) = linear_elasticity((N, N), format='bsr') cases.append({'A': A.copy(), 'B': B.copy(), 'epsilon': 32.0, 'k': 8, 'proj': 'D_A'}) # Run an example with a non-uniform stencil ex = load_example('airfoil') A = ex['A'].tocsr() B = np.ones((A.shape[0], 1)) cases.append({'A': A.copy(), 'B': B.copy(), 'epsilon': 8.0, 'k': 4, 'proj': 'D_A'}) Absr = A.tobsr(blocksize=(5, 5)) cases.append({'A': Absr.copy(), 'B': B.copy(), 'epsilon': 8.0, 'k': 4, 'proj': 'D_A'}) # Different B B = arange(1, 2*A.shape[0]+1, dtype=float).reshape(-1, 2) cases.append({'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2'}) cases.append({'A': Absr.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2'}) # Zero row and column A.data[A.indptr[4]:A.indptr[5]] = 0.0 A = A.tocsc() A.data[A.indptr[4]:A.indptr[5]] = 0.0 A.eliminate_zeros() A = A.tocsr() A.sort_indices() cases.append({'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2'}) Absr = A.tobsr(blocksize=(5, 5)) cases.append({'A': Absr.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2'}) for ca in cases: scipy.random.seed(0) # make results deterministic result = evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj'], symmetrize_measure=False) scipy.random.seed(0) # make results deterministic expected = reference_evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj']) assert_array_almost_equal(result.todense(), expected.todense(), decimal=4) # Test Scale Invariance for multiple near nullspace candidates (A, B) = linear_elasticity((5, 5), format='bsr') scipy.random.seed(0) # make results deterministic result_unscaled = evolution_soc(A, B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) # create scaled A D = spdiags([arange(A.shape[0], 2*A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') Dinv = spdiags([1.0/arange(A.shape[0], 2*A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') scipy.random.seed(0) # make results deterministic result_scaled = evolution_soc((D*A*D).tobsr(blocksize=(2, 2)), Dinv*B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) assert_array_almost_equal(result_scaled.todense(), result_unscaled.todense(), decimal=2)
# Linear Elasticity Example import scipy from pyamg.gallery import linear_elasticity from pyamg import smoothed_aggregation_solver, rootnode_solver from convergence_tools import print_cycle_history print "Test convergence for a simple 200x200 Grid, Linearized Elasticity Problem" choice = input('\n Input Choice:\n' + \ '1: Run smoothed_aggregation_solver\n' + \ '2: Run rootnode_solver\n' ) # Create matrix and candidate vectors. B has 3 columns, representing # rigid body modes of the mesh. B[:,0] and B[:,1] are translations in # the X and Y directions while B[:,2] is a rotation. A,B = linear_elasticity((200,200), format='bsr') # Construct solver using AMG based on Smoothed Aggregation (SA) if choice == 1: mls = smoothed_aggregation_solver(A, B=B, smooth='energy') elif choice == 2: mls = rootnode_solver(A, B=B, smooth='energy') else: raise ValueError("Enter a choice of 1 or 2") # Display hierarchy information print mls # Create random right hand side b = scipy.rand(A.shape[0],1)
def test_range(self): """Check that P*R=B""" np.random.seed(0) # make tests repeatable cases = [] # Simple, real-valued diffusion problems X = load_example("airfoil") A = X["A"].tocsr() B = X["B"] cases.append((A, B, ("jacobi", {"filter": True, "weighting": "local"}))) cases.append((A, B, ("jacobi", {"filter": True, "weighting": "block"}))) cases.append((A, B, ("energy", {"maxiter": 3}))) cases.append((A, B, ("energy", {"krylov": "cgnr"}))) cases.append((A, B, ("energy", {"krylov": "gmres", "degree": 2}))) A = poisson((10, 10), format="csr") B = np.ones((A.shape[0], 1)) cases.append((A, B, ("jacobi", {"filter": True, "weighting": "diagonal"}))) cases.append((A, B, ("jacobi", {"filter": True, "weighting": "local"}))) cases.append((A, B, "energy")) cases.append((A, B, ("energy", {"degree": 2}))) cases.append((A, B, ("energy", {"krylov": "cgnr", "degree": 2}))) cases.append((A, B, ("energy", {"krylov": "gmres"}))) # Simple, imaginary-valued problems iA = 1.0j * A iB = 1.0 + np.random.rand(iA.shape[0], 2) + 1.0j * (1.0 + np.random.rand(iA.shape[0], 2)) cases.append((iA, B, ("jacobi", {"filter": True, "weighting": "diagonal"}))) cases.append((iA, B, ("jacobi", {"filter": True, "weighting": "block"}))) cases.append((iA, iB, ("jacobi", {"filter": True, "weighting": "local"}))) cases.append((iA, iB, ("jacobi", {"filter": True, "weighting": "block"}))) cases.append((iA.tobsr(blocksize=(5, 5)), B, ("jacobi", {"filter": True, "weighting": "block"}))) cases.append((iA.tobsr(blocksize=(5, 5)), iB, ("jacobi", {"filter": True, "weighting": "block"}))) cases.append((iA, B, ("energy", {"krylov": "cgnr", "degree": 2}))) cases.append((iA, iB, ("energy", {"krylov": "cgnr"}))) cases.append( ( iA.tobsr(blocksize=(5, 5)), B, ("energy", {"krylov": "cgnr", "degree": 2, "maxiter": 3, "postfilter": {"theta": 0.05}}), ) ) cases.append( ( iA.tobsr(blocksize=(5, 5)), B, ("energy", {"krylov": "cgnr", "degree": 2, "maxiter": 3, "prefilter": {"theta": 0.05}}), ) ) cases.append((iA.tobsr(blocksize=(5, 5)), B, ("energy", {"krylov": "cgnr", "degree": 2, "maxiter": 3}))) cases.append((iA.tobsr(blocksize=(5, 5)), iB, ("energy", {"krylov": "cgnr"}))) cases.append((iA, B, ("energy", {"krylov": "gmres"}))) cases.append((iA, iB, ("energy", {"krylov": "gmres", "degree": 2}))) cases.append((iA.tobsr(blocksize=(5, 5)), B, ("energy", {"krylov": "gmres", "degree": 2, "maxiter": 3}))) cases.append((iA.tobsr(blocksize=(5, 5)), iB, ("energy", {"krylov": "gmres"}))) # Simple, imaginary-valued problems iA = A + 1.0j * scipy.sparse.eye(A.shape[0], A.shape[1]) cases.append((iA, B, ("jacobi", {"filter": True, "weighting": "local"}))) cases.append((iA, B, ("jacobi", {"filter": True, "weighting": "block"}))) cases.append((iA, iB, ("jacobi", {"filter": True, "weighting": "diagonal"}))) cases.append((iA, iB, ("jacobi", {"filter": True, "weighting": "block"}))) cases.append((iA.tobsr(blocksize=(4, 4)), iB, ("jacobi", {"filter": True, "weighting": "block"}))) cases.append((iA, B, ("energy", {"krylov": "cgnr"}))) cases.append((iA.tobsr(blocksize=(4, 4)), iB, ("energy", {"krylov": "cgnr"}))) cases.append((iA, B, ("energy", {"krylov": "gmres"}))) cases.append((iA.tobsr(blocksize=(4, 4)), iB, ("energy", {"krylov": "gmres", "degree": 2, "maxiter": 3}))) cases.append( ( iA.tobsr(blocksize=(4, 4)), iB, ("energy", {"krylov": "gmres", "degree": 2, "maxiter": 3, "postfilter": {"theta": 0.05}}), ) ) cases.append( ( iA.tobsr(blocksize=(4, 4)), iB, ("energy", {"krylov": "gmres", "degree": 2, "maxiter": 3, "prefilter": {"theta": 0.05}}), ) ) A = gauge_laplacian(10, spacing=1.0, beta=0.21) B = np.ones((A.shape[0], 1)) cases.append((A, iB, ("jacobi", {"filter": True, "weighting": "diagonal"}))) cases.append((A, iB, ("jacobi", {"filter": True, "weighting": "local"}))) cases.append((A, B, ("energy", {"krylov": "cg"}))) cases.append((A, iB, ("energy", {"krylov": "cgnr"}))) cases.append((A, iB, ("energy", {"krylov": "gmres"}))) cases.append( ( A.tobsr(blocksize=(2, 2)), B, ("energy", {"krylov": "cgnr", "degree": 2, "maxiter": 3, "postfilter": {"theta": 0.05}}), ) ) cases.append( ( A.tobsr(blocksize=(2, 2)), B, ("energy", {"krylov": "cgnr", "degree": 2, "maxiter": 3, "prefilter": {"theta": 0.05}}), ) ) cases.append((A.tobsr(blocksize=(2, 2)), B, ("energy", {"krylov": "cgnr", "degree": 2, "maxiter": 3}))) cases.append((A.tobsr(blocksize=(2, 2)), iB, ("energy", {"krylov": "cg"}))) cases.append((A.tobsr(blocksize=(2, 2)), B, ("energy", {"krylov": "gmres", "degree": 2, "maxiter": 3}))) cases.append( ( A.tobsr(blocksize=(2, 2)), B, ("energy", {"krylov": "gmres", "degree": 2, "maxiter": 3, "postfilter": {"theta": 0.05}}), ) ) cases.append( ( A.tobsr(blocksize=(2, 2)), B, ("energy", {"krylov": "gmres", "degree": 2, "maxiter": 3, "prefilter": {"theta": 0.05}}), ) ) # A, B = linear_elasticity((10, 10)) cases.append((A, B, ("jacobi", {"filter": True, "weighting": "diagonal"}))) cases.append((A, B, ("jacobi", {"filter": True, "weighting": "local"}))) cases.append((A, B, ("jacobi", {"filter": True, "weighting": "block"}))) cases.append((A, B, ("energy", {"degree": 2}))) cases.append((A, B, ("energy", {"degree": 3, "postfilter": {"theta": 0.05}}))) cases.append((A, B, ("energy", {"degree": 3, "prefilter": {"theta": 0.05}}))) cases.append((A, B, ("energy", {"krylov": "cgnr"}))) cases.append((A, B, ("energy", {"krylov": "gmres", "degree": 2}))) # Classic SA cases for A, B, smooth in cases: ml = smoothed_aggregation_solver(A, B=B, max_coarse=1, max_levels=2, smooth=smooth) P = ml.levels[0].P B = ml.levels[0].B R = ml.levels[1].B assert_almost_equal(P * R, B) def blocksize(A): # Helper Function: return the blocksize of a matrix if isspmatrix_bsr(A): return A.blocksize[0] else: return 1 # Root-node cases counter = 0 for A, B, smooth in cases: counter += 1 if isinstance(smooth, tuple): smoother = smooth[0] else: smoother = smooth if smoother == "energy" and (B.shape[1] >= blocksize(A)): ic = [("gauss_seidel_nr", {"sweep": "symmetric", "iterations": 4}), None] ml = rootnode_solver( A, B=B, max_coarse=1, max_levels=2, smooth=smooth, improve_candidates=ic, keep=True, symmetry="nonsymmetric", ) T = ml.levels[0].T.tocsr() Cpts = ml.levels[0].Cpts Bf = ml.levels[0].B Bf_H = ml.levels[0].BH Bc = ml.levels[1].B P = ml.levels[0].P.tocsr() # P should preserve B in its range, wherever P # has enough nonzeros mask = (P.indptr[1:] - P.indptr[:-1]) >= B.shape[1] assert_almost_equal((P * Bc)[mask, :], Bf[mask, :]) assert_almost_equal((P * Bc)[mask, :], Bf_H[mask, :]) # P should be the identity at Cpts I1 = eye(T.shape[1], T.shape[1], format="csr", dtype=T.dtype) I2 = P[Cpts, :] assert_almost_equal(I1.data, I2.data) assert_equal(I1.indptr, I2.indptr) assert_equal(I1.indices, I2.indices) # T should be the identity at Cpts I2 = T[Cpts, :] assert_almost_equal(I1.data, I2.data) assert_equal(I1.indptr, I2.indptr) assert_equal(I1.indices, I2.indices)
def test_evolution_strength_of_connection(self): cases = [] # Single near nullspace candidate stencil = [[0.0, -1.0, 0.0], [-0.001, 2.002, -0.001], [0.0, -1.0, 0.0]] A = 1.0j*stencil_grid(stencil, (4, 4), format='csr') B = 1.0j*np.ones((A.shape[0], 1)) B[0] = 1.2 - 12.0j B[11] = -14.2 cases.append({'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2'}) # Multiple near nullspace candidate B = 1.0j*np.ones((A.shape[0], 2)) B[0:-1:2, 0] = 0.0 B[1:-1:2, 1] = 0.0 B[-1, 0] = 0.0 B[11, 1] = -14.2 B[0, 0] = 1.2 - 12.0j cases.append({'A': A.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2'}) Absr = A.tobsr(blocksize=(2, 2)) cases.append({'A': Absr.copy(), 'B': B.copy(), 'epsilon': 4.0, 'k': 2, 'proj': 'l2'}) for ca in cases: scipy.random.seed(0) # make results deterministic result = evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj'], symmetrize_measure=False) scipy.random.seed(0) # make results deterministic expected = reference_evolution_soc(ca['A'], ca['B'], epsilon=ca['epsilon'], k=ca['k'], proj_type=ca['proj']) assert_array_almost_equal(result.todense(), expected.todense()) # Test Scale Invariance for a single candidate A = 1.0j*poisson((5, 5), format='csr') B = 1.0j*arange(1, A.shape[0]+1, dtype=float).reshape(-1, 1) scipy.random.seed(0) # make results deterministic result_unscaled = evolution_soc(A, B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) # create scaled A D = spdiags([arange(A.shape[0], 2*A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') Dinv = spdiags([1.0/arange(A.shape[0], 2*A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') scipy.random.seed(0) # make results deterministic result_scaled = evolution_soc(D*A*D, Dinv*B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) assert_array_almost_equal(result_scaled.todense(), result_unscaled.todense(), decimal=2) # Test that the l2 and D_A are the same for the 1 candidate case scipy.random.seed(0) # make results deterministic resultDA = evolution_soc(D*A*D, Dinv*B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) scipy.random.seed(0) # make results deterministic resultl2 = evolution_soc(D*A*D, Dinv*B, epsilon=4.0, k=2, proj_type="l2", symmetrize_measure=False) assert_array_almost_equal(resultDA.todense(), resultl2.todense()) # Test Scale Invariance for multiple candidates (A, B) = linear_elasticity((5, 5), format='bsr') A = 1.0j*A B = 1.0j*B scipy.random.seed(0) # make results deterministic result_unscaled = evolution_soc(A, B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) # create scaled A D = spdiags([arange(A.shape[0], 2*A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') Dinv = spdiags([1.0/arange(A.shape[0], 2*A.shape[0], dtype=float)], [0], A.shape[0], A.shape[0], format='csr') scipy.random.seed(0) # make results deterministic result_scaled = evolution_soc((D*A*D).tobsr(blocksize=(2, 2)), Dinv*B, epsilon=4.0, k=2, proj_type="D_A", symmetrize_measure=False) assert_array_almost_equal(result_scaled.todense(), result_unscaled.todense(), decimal=2)
cycle_list=['V'], symmetry='symmetric', definiteness='positive', solver=rootnode_solver) ## # To run the best solver found above, uncomment next two lines #from rot_ani_diff_diagnostic import rot_ani_diff_diagnostic #rot_ani_diff_diagnostic(A) if choice == 3: ## # Try a basic elasticity problem # --> Try V- and W-cycles by specifying cycle_list # --> Don't specify symmetry and definiteness and allow for auto-detection A = gallery.linear_elasticity((30, 30))[0].tobsr(blocksize=(2, 2)) solver_diagnostics(A, fname='elas_diagnostic', cycle_list=['V', 'W']) ## # To run the best solver found above, uncomment next two lines #from elas_diagnostic import elas_diagnostic #elas_diagnostic(A) if choice == 4: ## # Try a basic nonsymmetric recirculating flow problem # --> Only use V-cycles by specifying cycle_list # --> Don't specify symmetry and definiteness and allow for auto-detection # --> Specify the maximum coarse size and coarse grid solver with coarse_size_list # --> Try two different Krylov wrappers and set the maximum number of iterations