def train_loader(self, index): labels = self.graph.label[index] sequence = FullBatchSequence(inputs=[self.cache.X], y=labels, out_index=index, device=self.data_device) return sequence
def config_train_data(self, index): labels = self.graph.label[index] sequence = FullBatchSequence([self.cache.feat, self.cache.edges], labels, out_index=index, device=self.data_device) return sequence
def config_predict_data(self, edge_index): if isinstance(edge_index, (list, tuple)): edge_index = np.hstack(edge_index) sequence = FullBatchSequence([self.cache.feat, self.cache.edges], out_index=edge_index, device=self.data_device) return sequence
def config_train_data(self, index): labels = self.graph.label[index] sequence = FullBatchSequence(inputs=[self.cache.feat, *self.cache.adj], y=labels, out_index=index, device=self.data_device) return sequence
def config_test_data(self, index): labels = self.graph.label[index] sequence = FullBatchSequence([self.cache.feat, self.cache.g], labels, out_index=index, device=self.device, escape=type(self.cache.g)) return sequence
def config_train_data(self, index): labels = self.graph.label[index] cache = self.cache sequence = FullBatchSequence(inputs=[cache.feat, cache.adj, cache.knn_graph], y=[labels, cache.pseudo_labels, cache.node_pairs], out_index=index, device=self.data_device) return sequence
def config_test_data(self, edge_index): if isinstance(edge_index, (list, tuple)): edge_index = np.hstack(edge_index) y = self.graph.adj_matrix[edge_index[0], edge_index[1]].A1 y[y > 0] = 1 sequence = FullBatchSequence([self.cache.feat, self.cache.adj], y=y, out_index=edge_index, device=self.data_device) return sequence
def config_train_data(self, edge_index): if isinstance(edge_index, (list, tuple)): train_edges = edge_index[0] # postive edge index else: train_edges = edge_index train_edges = gf.astensor(train_edges, device=self.data_device) self.register_cache(edges=train_edges) sequence = FullBatchSequence([self.cache.feat, train_edges], out_index=edge_index, device=self.data_device) return sequence
def config_train_data(self, edge_index): if isinstance(edge_index, (list, tuple)): train_edges = edge_index[0] # postive edge index else: train_edges = edge_index full_adj = self.graph.adj_matrix edge_weight = full_adj[train_edges[0], train_edges[1]].A1 adj_matrix = gf.edge_to_sparse_adj(train_edges, edge_weight) train_adj = self.transform.adj_transform(adj_matrix) train_adj = gf.astensor(train_adj, device=self.data_device) self.register_cache(adj=train_adj) sequence = FullBatchSequence([self.cache.feat, train_adj], out_index=edge_index, device=self.data_device) return sequence
def config_train_data(self, index): labels = self.graph.label[index] # ========================================================== # initial weight_y is obtained by linear regression feat = self.cache.feat.to(self.device) labels = gf.astensor(labels, device=self.device) A = torch.mm(feat.t(), feat) + 1e-05 * torch.eye(feat.size(1), device=feat.device) labels_one_hot = feat.new_zeros(feat.size(0), self.graph.num_classes) labels_one_hot[torch.arange(labels.size(0)), labels] = 1 self.model.init_weight_y = torch.mm( torch.mm(torch.cholesky_inverse(A), feat.t()), labels_one_hot) # ========================================================== sequence = FullBatchSequence([self.cache.feat, self.cache.g], labels, out_index=index, device=self.data_device, escape=type(self.cache.g)) return sequence
def config_train_data(self, index): labels = self.graph.label[index] feat = self.cache.feat[index] sequence = FullBatchSequence(feat, labels, device=self.data_device) return sequence