def sparse_lanczos(A, k): q = sp.random(A.shape[0], 1) n = A.shape[0] Q = sp.lil_matrix(np.zeros((n, k + 1))) A = sp.lil_matrix(A) Q[:, 0] = q / sparsenorm(q) alpha = 0 beta = 0 for i in range(k): if i == 0: q = A * Q[:, i] else: q = A * Q[:, i] - beta * Q[:, i - 1] alpha = q.T * Q[:, i] q = q - Q[:, i] * alpha q = q - Q[:, :i] * Q[:, :i].T * q # full reorthogonalization beta = sparsenorm(q) Q[:, i + 1] = q / beta print(i) Q = Q[:, :k] Sigma = Q.T * A * Q A2 = Q[:, :k] * Sigma[:k, :k] * Q[:, :k].T return A2
def sparse_lanczos(A,k): q = sp.random(A.shape[0],1) n = A.shape[0] Q = sp.lil_matrix(np.zeros((n,k+1))) A = sp.lil_matrix(A) Q[:,0] = q/sparsenorm(q) alpha = 0 beta = 0 for i in range(k): if i == 0: q = A*Q[:,i] else: q = A*Q[:,i] - beta*Q[:,i-1] alpha = q.T*Q[:,i] q = q - Q[:,i]*alpha q = q - Q[:,:i]*Q[:,:i].T*q # full reorthogonalization beta = sparsenorm(q) Q[:,i+1] = q/beta print(i) Q = Q[:,:k] Sigma = Q.T*A*Q A2 = Q[:,:k]*Sigma[:k,:k]*Q[:,:k].T return A2
def mix_prop(adj, features, sparseinputs=False): adj_column_norm = sparsenorm(adj, axis=0) if sparseinputs: features_row_norm = sparsenorm(features, axis=1) else: features_row_norm = np.linalg.norm(features, axis=1) mix_norm = adj_column_norm * features_row_norm norm_sum = sum(mix_norm) return mix_norm / norm_sum
def mix_prop(adj, features, sparseinputs=False): adj_column_norm = sparsenorm(adj, axis=0) if sparseinputs: features_row_norm = sparsenorm(features, axis=1) else: features_row_norm = np.linalg.norm(features, axis=1) mix_norm = adj_column_norm*features_row_norm norm_sum = sum(mix_norm) return mix_norm / norm_sum
def column_prop(adj): column_norm = sparsenorm(adj, axis=0) # column_norm = pow(sparsenorm(adj, axis=0),2) norm_sum = sum(column_norm) return column_norm / norm_sum
def column_prop(adj): column_norm = sparsenorm(adj, axis=0) # column_norm = pow(sparsenorm(adj, axis=0),2) norm_sum = sum(column_norm) return column_norm/norm_sum