def forward(self, nodes, adjs): edge, _ = dense_to_sparse(adjs) x = self.sage1(nodes, edge) s = self.sage2(nodes, edge) s = torch.reshape(s, (1, nodes.size(0), 128)) x = torch.reshape(x, (1, nodes.size(0), 128)) adjs = torch.reshape(adjs, (1, nodes.size(0), nodes.size(0))) x, edge, link_loss1, ent_loss1 = dense_diff_pool(x, adjs, s) x = torch.reshape(x, (128, 128)) edge = torch.reshape(edge, (128, 128)) #for i in range(edge.size(0)): # edge[i,:] = torch.where(edge[i,:] == torch.max(edge[i,:]),torch.ones(1,128).cuda(), torch.zeros(1,128).cuda()) edge_out = edge edge, _ = dense_to_sparse(edge) #nodes_out = x x = self.sage3(x, edge) nodes_out = torch.tanh(x) #x = self.sage4(nodes_out, edge) edge = torch.Tensor( convert.to_scipy_sparse_matrix(edge).todense()).cuda() edge = torch.reshape(edge, (1, 128, 128)) x = torch.reshape(x, (1, 128, 2)) s = torch.ones(1, 128, 1).cuda() x, edge, link_loss2, ent_loss2 = dense_diff_pool(x, edge, s) x = x.reshape(-1) link_loss = link_loss1 + link_loss2 ent_loss = ent_loss1 + ent_loss2 #print(x.shape, edge.shape) #print(asd) """ x_out = torch.reshape(x, (128,2)) edge = torch.reshape(edge, (128,128)) for i in range(edge.size(0)): edge[i,:] = torch.where(edge[i,:] == torch.max(edge[i,:]),torch.ones(1,128).cuda(), torch.zeros(1,128).cuda()) edge, _ = dense_to_sparse(edge) x = self.sage3(x_out, edge) x = torch.reshape(x, (128,)) """ return x, link_loss, ent_loss, nodes_out, edge_out
def _prep_batch(self, im, adj): batch = [] for i in range(im.shape[0]): batch.append(Data(x=im[i], edge_index=dense_to_sparse(adj[i])[0])) batch = Batch.from_data_list(batch) #batch.x = batch.x.cuda(self.gpu, non_blocking=True) batch.batch = batch.batch.cuda(self.gpu, non_blocking=True) return batch
def embd_loss(edge_idx, z): edge_idx, _ = dense_to_sparse(edge_idx) EPS = 1e-8 row, col = edge_idx loss_pos = -torch.log((z[row] * z[col]).sum(dim=-1).sigmoid() + EPS).mean() col_neg = torch.randint(z.size(0), (row.size(0), ), dtype=torch.long, device=row.device) loss_neg = -torch.log((-(z[row] * z[col_neg])).sum(dim=-1).sigmoid() + EPS).mean() loss = loss_pos + loss_neg return loss
def download(self): self.generate() data_list = [] for i in range(500): data = Data() data.y = self.labels[i].view(-1, 2) edge_index = sp.dense_to_sparse(self.adj_matrix[i])[0] data.edge_index = edge_index data.name = i data.num_nodes = len(self.adj_matrix[i]) data_list.append(data) data, slices = self.collate(data_list) torch.save((data, slices), self.raw_paths[0])
def download(self): data_list = [] seed=0 for i in range(1000): data = Data() adj_matrix, num_triangles = generate_graph(n=self.num_node, seed=seed) seed+=1 while(adj_matrix.sum()==0): seed+=1 adj_matrix, num_triangles = generate_graph(n=self.num_node,seed=seed) adj_matrix=torch.from_numpy(adj_matrix).float() data.y = torch.tensor([num_triangles]).view(-1,1) edge_index = sp.dense_to_sparse(adj_matrix)[0] data.edge_index = edge_index data.name = i data.num_nodes = len(adj_matrix) data_list.append(data) data, slices = self.collate(data_list) torch.save((data, slices), self.raw_paths[0])
def getSparseData(x, adj, mask): myedge_index = sp.dense_to_sparse(adj)[0] myx = x[mask] mydata = Data(x=myx,edge_index = myedge_index) return mydata
def forward(self, inputs): x = F.relu(self.conv1(inputs)) x = F.max_pool2d(x, 2, 2) x = F.relu(self.conv2(x)) x = F.max_pool2d(x, 2, 2) x = F.relu(self.conv3(x)) x = F.max_pool2d(x, 2, 2) x = F.relu(self.conv4(x)) x = F.max_pool2d(x, 2, 2) x = F.relu(self.conv5(x)) x = F.max_pool2d(x, 2, 2) #print(x) org = torch.reshape(x, (256, 256)) edge = torch.Tensor(ori_adjacen).long().t().contiguous().cuda() x = self.sage1(org, edge) s = self.sage2(org, edge) s = torch.reshape(s, (1, 256, 128)) x = torch.reshape(x, (1, 256, 128)) edge = torch.Tensor( convert.to_scipy_sparse_matrix(edge).todense()).cuda() edge = torch.reshape(edge, (1, 256, 256)) x, edge, link_loss1, ent_loss1 = dense_diff_pool(x, edge, s) #x = torch.tanh(x) x = torch.reshape(x, (128, 128)) edge = torch.reshape(edge, (128, 128)) edge_out = edge for i in range(edge_out.size(0)): edge_out[i, :] = torch.where( edge_out[i, :] == torch.max(edge_out[i, :]), torch.ones(1, 128).cuda(), torch.zeros(1, 128).cuda()) edge, _ = dense_to_sparse(edge) #nodes_out = x x = self.sage3(x, edge) nodes_out = torch.tanh(x) x = nodes_out #x = self.sage4(nodes_out, edge) edge_dense = edge edge = torch.Tensor( convert.to_scipy_sparse_matrix(edge).todense()).cuda() #print(edge) #print(asd) edge = torch.reshape(edge, (1, 128, 128)) x = torch.reshape(x, (1, 128, 2)) s = torch.ones(1, 128, 1).cuda() x, edge, link_loss2, ent_loss2 = dense_diff_pool(x, edge, s) x = x.reshape(-1) link_loss = link_loss1 + link_loss2 ent_loss = ent_loss1 + ent_loss2 return x, link_loss, ent_loss, nodes_out, edge_out, edge_dense