def __init__(self, graphs, max_nodes, normalize=False): self.adjs = [] self.features = [] self.rwrs = [] self.labels = [] self.max_nodes = max_nodes self.feat_dim = len(graphs[0].nodes[list( graphs[0].nodes.keys())[0]]['feat']) for graph in graphs: num_nodes = graph.number_of_nodes() adj = np.array(nx.to_numpy_matrix(graph)) if normalize: adj = normalize_adjacency(adj) self.adjs.append(adj) self.labels.append(graph.graph['label']) feat = np.zeros((self.max_nodes, self.feat_dim), dtype=float) feat[:num_nodes] = np.array( list(nx.get_node_attributes(graph, 'feat').values()))[:] self.features.append(feat) rwr = np.zeros((self.max_nodes, self.max_nodes), dtype=float) src_array = np.array( list(nx.get_node_attributes(graph, 'rwr').values()))[:] rwr[:src_array.shape[0], :src_array.shape[0]] = src_array self.rwrs.append(-np.sort(-rwr, axis=1)) self.feat_dim = self.features[0].shape[1]
from utils import load_data, accuracy, normalize_adjacency from models import GNN # Hyperparameters epochs = 100 n_hidden_1 = 64 n_hidden_2 = 32 learning_rate = 0.01 dropout_rate = 0.5 # Read data features, adj, class_labels = load_data() n = adj.shape[0] # Number of nodes n_class = class_labels.shape[1] adj = normalize_adjacency(adj) # Normalize adjacency matrix # Yields indices to split data into training, validation and test sets idx = np.random.permutation(n) idx_train = idx[:int(0.6 * n)] idx_val = idx[int(0.6 * n):int(0.8 * n)] idx_test = idx[int(0.8 * n):] # Transform the numpy matrices/vectors to torch tensors features = torch.FloatTensor(features) y = torch.LongTensor(np.argmax(class_labels, axis=1)) adj = torch.FloatTensor(adj) idx_train = torch.LongTensor(idx_train) idx_val = torch.LongTensor(idx_val) idx_test = torch.LongTensor(idx_test)