P_nii = permute * P_nii P_nii_dagger = np.linalg.pinv(P_nii.todense(), rcond=tol) err_nii = identity(n) - P_nii * P_nii_dagger P_nii_nnz = float(len(P_nii.data)) / np.prod(P_nii.shape) # Form standard SA prolongation operator T, B_coarse = fit_candidates(AggOp, B) P_sa = jacobi_prolongation_smoother(A, T, C, B_coarse) # P_sa = richardson_prolongation_smoother(A, T, **kwargs) P_sa_dagger = np.linalg.pinv(P_sa.todense(), rcond=tol) err_sa = identity(n) - P_sa * P_sa_dagger P_sa_nnz = float(len(P_sa.data)) / np.prod(P_sa.shape) # Form RN prolongation operator T, B_coarse = fit_candidates(AggOp, B[:, 0:1]) Cpt_params = (True, get_Cpt_params(A, Cpts, AggOp, T)) T = scale_T(T, Cpt_params[1]['P_I'], Cpt_params[1]['I_F']) B_coarse = Cpt_params[1]['P_I'].T * B P_rn = energy_prolongation_smoother(A, T, C, B_coarse, B, Cpt_params=Cpt_params, maxiter=8, degree=2, weighting='local') P_rn_dagger = np.linalg.pinv(P_rn.todense(), rcond=tol) err_rn = identity(n) - P_rn * P_rn_dagger P_rn_nnz = float(len(P_rn.data)) / np.prod(P_rn.shape)
def extend_hierarchy(levels, strength, aggregate, smooth, improve_candidates, diagonal_dominance=False, keep=True): """Service routine to implement the strength of connection, aggregation, tentative prolongation construction, and prolongation smoothing. Called by smoothed_aggregation_solver. """ def unpack_arg(v): if isinstance(v, tuple): return v[0], v[1] else: return v, {} A = levels[-1].A B = levels[-1].B if A.symmetry == "nonsymmetric": AH = A.H.asformat(A.format) BH = levels[-1].BH # Compute the strength-of-connection matrix C, where larger # C[i, j] denote stronger couplings between i and j. fn, kwargs = unpack_arg(strength[len(levels) - 1]) if fn == "symmetric": C = symmetric_strength_of_connection(A, **kwargs) elif fn == "classical": C = classical_strength_of_connection(A, **kwargs) elif fn == "distance": C = distance_strength_of_connection(A, **kwargs) elif (fn == "ode") or (fn == "evolution"): if "B" in kwargs: C = evolution_strength_of_connection(A, **kwargs) else: C = evolution_strength_of_connection(A, B, **kwargs) elif fn == "energy_based": C = energy_based_strength_of_connection(A, **kwargs) elif fn == "predefined": C = kwargs["C"].tocsr() elif fn == "algebraic_distance": C = algebraic_distance(A, **kwargs) elif fn is None: C = A.tocsr() else: raise ValueError("unrecognized strength of connection method: %s" % str(fn)) # Avoid coarsening diagonally dominant rows flag, kwargs = unpack_arg(diagonal_dominance) if flag: C = eliminate_diag_dom_nodes(A, C, **kwargs) # Compute the aggregation matrix AggOp (i.e., the nodal coarsening of A). # AggOp is a boolean matrix, where the sparsity pattern for the k-th column # denotes the fine-grid nodes agglomerated into k-th coarse-grid node. fn, kwargs = unpack_arg(aggregate[len(levels) - 1]) if fn == "standard": AggOp, Cnodes = standard_aggregation(C, **kwargs) elif fn == "naive": AggOp, Cnodes = naive_aggregation(C, **kwargs) elif fn == "lloyd": AggOp, Cnodes = lloyd_aggregation(C, **kwargs) elif fn == "predefined": AggOp = kwargs["AggOp"].tocsr() Cnodes = kwargs["Cnodes"] else: raise ValueError("unrecognized aggregation method %s" % str(fn)) # Improve near nullspace candidates by relaxing on A B = 0 fn, kwargs = unpack_arg(improve_candidates[len(levels) - 1]) if fn is not None: b = np.zeros((A.shape[0], 1), dtype=A.dtype) B = relaxation_as_linear_operator((fn, kwargs), A, b) * B levels[-1].B = B if A.symmetry == "nonsymmetric": BH = relaxation_as_linear_operator((fn, kwargs), AH, b) * BH levels[-1].BH = BH # Compute the tentative prolongator, T, which is a tentative interpolation # matrix from the coarse-grid to the fine-grid. T exactly interpolates # B_fine[:, 0:blocksize(A)] = T B_coarse[:, 0:blocksize(A)]. T, dummy = fit_candidates(AggOp, B[:, 0 : blocksize(A)]) del dummy if A.symmetry == "nonsymmetric": TH, dummyH = fit_candidates(AggOp, BH[:, 0 : blocksize(A)]) del dummyH # Create necessary root node matrices Cpt_params = (True, get_Cpt_params(A, Cnodes, AggOp, T)) T = scale_T(T, Cpt_params[1]["P_I"], Cpt_params[1]["I_F"]) if A.symmetry == "nonsymmetric": TH = scale_T(TH, Cpt_params[1]["P_I"], Cpt_params[1]["I_F"]) # Set coarse grid near nullspace modes as injected fine grid near # null-space modes B = Cpt_params[1]["P_I"].T * levels[-1].B if A.symmetry == "nonsymmetric": BH = Cpt_params[1]["P_I"].T * levels[-1].BH # Smooth the tentative prolongator, so that it's accuracy is greatly # improved for algebraically smooth error. fn, kwargs = unpack_arg(smooth[len(levels) - 1]) if fn == "energy": P = energy_prolongation_smoother(A, T, C, B, levels[-1].B, Cpt_params=Cpt_params, **kwargs) elif fn is None: P = T else: raise ValueError( "unrecognized prolongation smoother \ method %s" % str(fn) ) # Compute the restriction matrix R, which interpolates from the fine-grid # to the coarse-grid. If A is nonsymmetric, then R must be constructed # based on A.H. Otherwise R = P.H or P.T. symmetry = A.symmetry if symmetry == "hermitian": R = P.H elif symmetry == "symmetric": R = P.T elif symmetry == "nonsymmetric": fn, kwargs = unpack_arg(smooth[len(levels) - 1]) if fn == "energy": R = energy_prolongation_smoother(AH, TH, C, BH, levels[-1].BH, Cpt_params=Cpt_params, **kwargs) R = R.H elif fn is None: R = T.H else: raise ValueError( "unrecognized prolongation smoother \ method %s" % str(fn) ) if keep: levels[-1].C = C # strength of connection matrix levels[-1].AggOp = AggOp # aggregation operator levels[-1].T = T # tentative prolongator levels[-1].Fpts = Cpt_params[1]["Fpts"] # Fpts levels[-1].P_I = Cpt_params[1]["P_I"] # Injection operator levels[-1].I_F = Cpt_params[1]["I_F"] # Identity on F-pts levels[-1].I_C = Cpt_params[1]["I_C"] # Identity on C-pts levels[-1].P = P # smoothed prolongator levels[-1].R = R # restriction operator levels[-1].Cpts = Cpt_params[1]["Cpts"] # Cpts (i.e., rootnodes) levels.append(multilevel_solver.level()) A = R * A * P # Galerkin operator A.symmetry = symmetry levels[-1].A = A levels[-1].B = B # right near nullspace candidates if A.symmetry == "nonsymmetric": levels[-1].BH = BH # left near nullspace candidates
def extend_hierarchy(levels, strength, aggregate, smooth, improve_candidates, diagonal_dominance=False, keep=True): """Service routine to implement the strength of connection, aggregation, tentative prolongation construction, and prolongation smoothing. Called by smoothed_aggregation_solver. """ def unpack_arg(v): if isinstance(v, tuple): return v[0], v[1] else: return v, {} A = levels[-1].A B = levels[-1].B if A.symmetry == "nonsymmetric": AH = A.H.asformat(A.format) BH = levels[-1].BH # Compute the strength-of-connection matrix C, where larger # C[i, j] denote stronger couplings between i and j. fn, kwargs = unpack_arg(strength[len(levels) - 1]) if fn == 'symmetric': C = symmetric_strength_of_connection(A, **kwargs) elif fn == 'classical': C = classical_strength_of_connection(A, **kwargs) elif fn == 'distance': C = distance_strength_of_connection(A, **kwargs) elif (fn == 'ode') or (fn == 'evolution'): if 'B' in kwargs: C = evolution_strength_of_connection(A, **kwargs) else: C = evolution_strength_of_connection(A, B, **kwargs) elif fn == 'energy_based': C = energy_based_strength_of_connection(A, **kwargs) elif fn == 'predefined': C = kwargs['C'].tocsr() elif fn == 'algebraic_distance': C = algebraic_distance(A, **kwargs) elif fn == 'affinity': C = affinity_distance(A, **kwargs) elif fn is None: C = A.tocsr() else: raise ValueError('unrecognized strength of connection method: %s' % str(fn)) # Avoid coarsening diagonally dominant rows flag, kwargs = unpack_arg(diagonal_dominance) if flag: C = eliminate_diag_dom_nodes(A, C, **kwargs) # Compute the aggregation matrix AggOp (i.e., the nodal coarsening of A). # AggOp is a boolean matrix, where the sparsity pattern for the k-th column # denotes the fine-grid nodes agglomerated into k-th coarse-grid node. fn, kwargs = unpack_arg(aggregate[len(levels) - 1]) if fn == 'standard': AggOp, Cnodes = standard_aggregation(C, **kwargs) elif fn == 'naive': AggOp, Cnodes = naive_aggregation(C, **kwargs) elif fn == 'lloyd': AggOp, Cnodes = lloyd_aggregation(C, **kwargs) elif fn == 'predefined': AggOp = kwargs['AggOp'].tocsr() Cnodes = kwargs['Cnodes'] else: raise ValueError('unrecognized aggregation method %s' % str(fn)) # Improve near nullspace candidates by relaxing on A B = 0 fn, kwargs = unpack_arg(improve_candidates[len(levels) - 1]) if fn is not None: b = np.zeros((A.shape[0], 1), dtype=A.dtype) B = relaxation_as_linear_operator((fn, kwargs), A, b) * B levels[-1].B = B if A.symmetry == "nonsymmetric": BH = relaxation_as_linear_operator((fn, kwargs), AH, b) * BH levels[-1].BH = BH # Compute the tentative prolongator, T, which is a tentative interpolation # matrix from the coarse-grid to the fine-grid. T exactly interpolates # B_fine[:, 0:blocksize(A)] = T B_coarse[:, 0:blocksize(A)]. T, dummy = fit_candidates(AggOp, B[:, 0:blocksize(A)]) del dummy if A.symmetry == "nonsymmetric": TH, dummyH = fit_candidates(AggOp, BH[:, 0:blocksize(A)]) del dummyH # Create necessary root node matrices Cpt_params = (True, get_Cpt_params(A, Cnodes, AggOp, T)) T = scale_T(T, Cpt_params[1]['P_I'], Cpt_params[1]['I_F']) if A.symmetry == "nonsymmetric": TH = scale_T(TH, Cpt_params[1]['P_I'], Cpt_params[1]['I_F']) # Set coarse grid near nullspace modes as injected fine grid near # null-space modes B = Cpt_params[1]['P_I'].T * levels[-1].B if A.symmetry == "nonsymmetric": BH = Cpt_params[1]['P_I'].T * levels[-1].BH # Smooth the tentative prolongator, so that it's accuracy is greatly # improved for algebraically smooth error. fn, kwargs = unpack_arg(smooth[len(levels) - 1]) if fn == 'energy': P = energy_prolongation_smoother(A, T, C, B, levels[-1].B, Cpt_params=Cpt_params, **kwargs) elif fn is None: P = T else: raise ValueError('unrecognized prolongation smoother \ method %s' % str(fn)) # Compute the restriction matrix R, which interpolates from the fine-grid # to the coarse-grid. If A is nonsymmetric, then R must be constructed # based on A.H. Otherwise R = P.H or P.T. symmetry = A.symmetry if symmetry == 'hermitian': R = P.H elif symmetry == 'symmetric': R = P.T elif symmetry == 'nonsymmetric': fn, kwargs = unpack_arg(smooth[len(levels) - 1]) if fn == 'energy': R = energy_prolongation_smoother(AH, TH, C, BH, levels[-1].BH, Cpt_params=Cpt_params, **kwargs) R = R.H elif fn is None: R = T.H else: raise ValueError('unrecognized prolongation smoother \ method %s' % str(fn)) if keep: levels[-1].C = C # strength of connection matrix levels[-1].AggOp = AggOp # aggregation operator levels[-1].T = T # tentative prolongator levels[-1].Fpts = Cpt_params[1]['Fpts'] # Fpts levels[-1].P_I = Cpt_params[1]['P_I'] # Injection operator levels[-1].I_F = Cpt_params[1]['I_F'] # Identity on F-pts levels[-1].I_C = Cpt_params[1]['I_C'] # Identity on C-pts levels[-1].P = P # smoothed prolongator levels[-1].R = R # restriction operator levels[-1].Cpts = Cpt_params[1]['Cpts'] # Cpts (i.e., rootnodes) levels.append(multilevel_solver.level()) A = R * A * P # Galerkin operator A.symmetry = symmetry levels[-1].A = A levels[-1].B = B # right near nullspace candidates if A.symmetry == "nonsymmetric": levels[-1].BH = BH # left near nullspace candidates
def extend_hierarchy(levels, strength, aggregate, smooth, improve_candidates, diagonal_dominance=False, keep=True): """Service routine to implement the strength of connection, aggregation, tentative prolongation construction, and prolongation smoothing. Called by smoothed_aggregation_solver. """ A = levels[-1].A B = levels[-1].B if A.symmetry == "nonsymmetric": AH = A.H.asformat(A.format) BH = levels[-1].BH # Compute the strength-of-connection matrix C, where larger # C[i, j] denote stronger couplings between i and j. fn, kwargs = unpack_arg(strength[len(levels)-1]) if fn == 'symmetric': C = symmetric_strength_of_connection(A, **kwargs) elif fn == 'classical': C = classical_strength_of_connection(A, **kwargs) elif fn == 'distance': C = distance_strength_of_connection(A, **kwargs) elif (fn == 'ode') or (fn == 'evolution'): if 'B' in kwargs: C = evolution_strength_of_connection(A, **kwargs) else: C = evolution_strength_of_connection(A, B, **kwargs) elif fn == 'energy_based': C = energy_based_strength_of_connection(A, **kwargs) elif fn == 'predefined': C = kwargs['C'].tocsr() elif fn == 'algebraic_distance': C = algebraic_distance(A, **kwargs) elif fn == 'affinity': C = affinity_distance(A, **kwargs) elif fn is None: C = A.tocsr() else: raise ValueError('unrecognized strength of connection method: %s' % str(fn)) levels[-1].complexity['strength'] = kwargs['cost'][0] # Avoid coarsening diagonally dominant rows flag, kwargs = unpack_arg(diagonal_dominance) if flag: C = eliminate_diag_dom_nodes(A, C, **kwargs) levels[-1].complexity['diag_dom'] = kwargs['cost'][0] # Compute the aggregation matrix AggOp (i.e., the nodal coarsening of A). # AggOp is a boolean matrix, where the sparsity pattern for the k-th column # denotes the fine-grid nodes agglomerated into k-th coarse-grid node. fn, kwargs = unpack_arg(aggregate[len(levels)-1]) if fn == 'standard': AggOp, Cnodes = standard_aggregation(C, **kwargs) elif fn == 'naive': AggOp, Cnodes = naive_aggregation(C, **kwargs) elif fn == 'lloyd': AggOp, Cnodes = lloyd_aggregation(C, **kwargs) elif fn == 'predefined': AggOp = kwargs['AggOp'].tocsr() Cnodes = kwargs['Cnodes'] else: raise ValueError('unrecognized aggregation method %s' % str(fn)) levels[-1].complexity['aggregation'] = kwargs['cost'][0] * (float(C.nnz)/A.nnz) # Improve near nullspace candidates by relaxing on A B = 0 temp_cost = [0.0] fn, kwargs = unpack_arg(improve_candidates[len(levels)-1],cost=False) if fn is not None: b = np.zeros((A.shape[0], 1), dtype=A.dtype) B = relaxation_as_linear_operator((fn, kwargs), A, b, temp_cost) * B levels[-1].B = B if A.symmetry == "nonsymmetric": BH = relaxation_as_linear_operator((fn, kwargs), AH, b, temp_cost) * BH levels[-1].BH = BH levels[-1].complexity['candidates'] = temp_cost[0] * B.shape[1] # Compute the tentative prolongator, T, which is a tentative interpolation # matrix from the coarse-grid to the fine-grid. T exactly interpolates # B_fine[:, 0:blocksize(A)] = T B_coarse[:, 0:blocksize(A)]. # Orthogonalization complexity ~ 2nk^2, k = blocksize(A). temp_cost=[0.0] T, dummy = fit_candidates(AggOp, B[:, 0:blocksize(A)], cost=temp_cost) del dummy if A.symmetry == "nonsymmetric": TH, dummyH = fit_candidates(AggOp, BH[:, 0:blocksize(A)], cost=temp_cost) del dummyH levels[-1].complexity['tentative'] = temp_cost[0]/A.nnz # Create necessary root node matrices Cpt_params = (True, get_Cpt_params(A, Cnodes, AggOp, T)) T = scale_T(T, Cpt_params[1]['P_I'], Cpt_params[1]['I_F']) levels[-1].complexity['tentative'] += T.nnz / float(A.nnz) if A.symmetry == "nonsymmetric": TH = scale_T(TH, Cpt_params[1]['P_I'], Cpt_params[1]['I_F']) levels[-1].complexity['tentative'] += TH.nnz / float(A.nnz) # Set coarse grid near nullspace modes as injected fine grid near # null-space modes B = Cpt_params[1]['P_I'].T*levels[-1].B if A.symmetry == "nonsymmetric": BH = Cpt_params[1]['P_I'].T*levels[-1].BH # Smooth the tentative prolongator, so that it's accuracy is greatly # improved for algebraically smooth error. fn, kwargs = unpack_arg(smooth[len(levels)-1]) if fn == 'energy': P = energy_prolongation_smoother(A, T, C, B, levels[-1].B, Cpt_params=Cpt_params, **kwargs) elif fn is None: P = T else: raise ValueError('unrecognized prolongation smoother \ method %s' % str(fn)) levels[-1].complexity['smooth_P'] = kwargs['cost'][0] # Compute the restriction matrix R, which interpolates from the fine-grid # to the coarse-grid. If A is nonsymmetric, then R must be constructed # based on A.H. Otherwise R = P.H or P.T. symmetry = A.symmetry if symmetry == 'hermitian': R = P.H elif symmetry == 'symmetric': R = P.T elif symmetry == 'nonsymmetric': fn, kwargs = unpack_arg(smooth[len(levels)-1]) if fn == 'energy': R = energy_prolongation_smoother(AH, TH, C, BH, levels[-1].BH, Cpt_params=Cpt_params, **kwargs) R = R.H levels[-1].complexity['smooth_R'] = kwargs['cost'][0] elif fn is None: R = T.H else: raise ValueError('unrecognized prolongation smoother \ method %s' % str(fn)) if keep: levels[-1].C = C # strength of connection matrix levels[-1].AggOp = AggOp # aggregation operator levels[-1].T = T # tentative prolongator levels[-1].Fpts = Cpt_params[1]['Fpts'] # Fpts levels[-1].P_I = Cpt_params[1]['P_I'] # Injection operator levels[-1].I_F = Cpt_params[1]['I_F'] # Identity on F-pts levels[-1].I_C = Cpt_params[1]['I_C'] # Identity on C-pts levels[-1].P = P # smoothed prolongator levels[-1].R = R # restriction operator levels[-1].Cpts = Cpt_params[1]['Cpts'] # Cpts (i.e., rootnodes) # Form coarse grid operator, get complexity levels[-1].complexity['RAP'] = mat_mat_complexity(R,A) / float(A.nnz) RA = R * A levels[-1].complexity['RAP'] += mat_mat_complexity(RA,P) / float(A.nnz) A = RA * P # Galerkin operator, Ac = RAP A.symmetry = symmetry levels.append(multilevel_solver.level()) levels[-1].A = A levels[-1].B = B # right near nullspace candidates if A.symmetry == "nonsymmetric": levels[-1].BH = BH # left near nullspace candidates