def load_hcp_tcgn(device): time_series, labels, As = load_hcp_example() normalized_laplacian = True coarsening_levels = 4 graphs, perm = coarsening.coarsen(As[0], levels=coarsening_levels, self_connections=False) L = [ torch.tensor(graph.rescale_L(graph.laplacian( A, normalized=normalized_laplacian).todense(), lmax=2), dtype=torch.float).to(device) for A in graphs ] L_sparse = list() for A in graphs: g = graph.rescale_L(graph.laplacian(A, normalized=normalized_laplacian), lmax=2) coo = coo_matrix(g) values = coo.data indices = np.vstack((coo.row, coo.col)) i = torch.LongTensor(indices) v = torch.FloatTensor(values) shape = coo.shape a = torch.sparse.FloatTensor(i, v, torch.Size(shape)) a = a.to(device) L_sparse.append(a) # idx_train = range(17*512) idx_train = range(int(0.8 * time_series.shape[0])) print('Size of train set: {}'.format(len(idx_train))) idx_test = range(len(idx_train), time_series.shape[0]) print('Size of test set: {}'.format(len(idx_test))) # idx_train = range(5*512) # idx_test = range(len(idx_train), 10*512) train_data = time_series[idx_train] train_labels = labels[idx_train] test_data = time_series[idx_test] test_labels = labels[idx_test] train_data = perm_data_time(train_data, perm) test_data = perm_data_time(test_data, perm) sparse = False if sparse: laplacian = L_sparse else: laplacian = L return laplacian, train_data, test_data, train_labels, test_labels
def load_hcp_tcgn(): time_series, labels, As = load_hcp_example() normalized_laplacian = True coarsening_levels = 4 graphs, perm = coarsening.coarsen(As[0], levels=coarsening_levels, self_connections=False) L = [graph.laplacian(A, normalized=normalized_laplacian) for A in graphs] # idx_train = range(40*512) # idx_test = range(len(idx_train), time_series.shape[0]) idx_train = range(5 * 512) idx_test = range(len(idx_train), 10 * 512) train_data = time_series[idx_train] train_labels = labels[idx_train] test_data = time_series[idx_test] test_labels = labels[idx_test] train_data = perm_data_time(train_data, perm) test_data = perm_data_time(test_data, perm) return L, train_data, test_data, train_labels, test_labels
def create_graph(): def grid_graph(m, corners=False): z = graph.grid(m) dist, idx = graph.distance_sklearn_metrics(z, k=number_edges, metric=metric) A = graph.adjacency(dist, idx) if corners: import scipy.sparse A = A.toarray() A[A < A.max() / 1.5] = 0 A = scipy.sparse.csr_matrix(A) print('{} edges'.format(A.nnz)) print("{} > {} edges".format(A.nnz // 2, number_edges * m**2 // 2)) return A number_edges = 12 metric = 'euclidean' normalized_laplacian = True coarsening_levels = 4 A = grid_graph(28, corners=False) graphs, perm = coarsening.coarsen(A, levels=coarsening_levels, self_connections=False) L = [graph.laplacian(A, normalized=normalized_laplacian) for A in graphs] del A return L, perm
def create_graph(): def grid_graph(m, corners=False): z = graph.grid(m) dist, idx = graph.distance_sklearn_metrics(z, k=number_edges, metric=metric) A = graph.adjacency(dist, idx) # Connections are only vertical or horizontal on the grid. # Corner vertices are connected to 2 neightbors only. if corners: import scipy.sparse A = A.toarray() A[A < A.max() / 1.5] = 0 A = scipy.sparse.csr_matrix(A) print('{} edges'.format(A.nnz)) print("{} > {} edges".format(A.nnz // 2, number_edges * m ** 2 // 2)) return A number_edges= 12 metric = 'euclidean' normalized_laplacian = True coarsening_levels = 4 A = grid_graph(28, corners=False) # A = graph.replace_random_edges(A, 0) graphs, perm = coarsening.coarsen(A, levels=coarsening_levels, self_connections=False) L = [graph.laplacian(A, normalized=normalized_laplacian) for A in graphs] # graph.plot_spectrum(L) del A return L, perm
def init_GCN_params(): A = scipy.sparse.csr_matrix(create_sq_mesh(28, 28)) L = graph.laplacian(A) _, U = graph.fourier(L) hyper = dict() hyper['NFEATURES'] = 28**2 hyper['NCLASSES'] = 10 hyper['F'] = 15 hyper['U'] = U hyper['L'] = L params = dict() params['W1'] = 0.1*np.random.randn(hyper['NFEATURES'], hyper['F'], 1) params['b1'] = 0.001*np.random.randn(1, hyper['F'], 1) params['W2'] = 0.1*np.random.randn(hyper['F']*hyper['NFEATURES'], hyper['NCLASSES']) params['b2'] = 0.001*np.random.randn(hyper['NCLASSES']) return params, hyper
def create_graph(device): def grid_graph(m, corners=False): z = graph.grid(m) dist, idx = graph.distance_sklearn_metrics(z, k=number_edges, metric=metric) A = graph.adjacency(dist, idx) #A = sp.random(A.shape[0], A.shape[0], density=0.01, format="csr", data_rvs=lambda s: np.random.uniform(0, 0.5, size=s)) # Connections are only vertical or horizontal on the grid. # Corner vertices are connected to 2 neightbors only. if corners: import scipy.sparse A = A.toarray() A[A < A.max() / 1.5] = 0 A = scipy.sparse.csr_matrix(A) print('{} edges'.format(A.nnz)) print("{} > {} edges".format(A.nnz // 2, number_edges * m**2 // 2)) return A number_edges = 8 metric = 'euclidean' normalized_laplacian = True coarsening_levels = 4 A = grid_graph(28, corners=False) A = graph.replace_random_edges(A, 0) graphs, perm = coarsening.coarsen(A, levels=coarsening_levels, self_connections=False) L = [ torch.tensor(graph.rescale_L(graph.laplacian( A, normalized=normalized_laplacian).todense(), lmax=2), dtype=torch.float).to(device) for A in graphs ] # graph.plot_spectrum(L) del A return L, perm