Exemplo n.º 1
0
    def forward(self, data, pos_edge_index, neg_edge_index):
        x, edge_index = data.x, data.train_edge_index
        # 0 layer
        # s1 = self.score1(x, masked_node)
        # masked_node = x[data.cold_mask_node]
        # 1 layer
        x = F.relu(self.conv1(x, edge_index))
        # masked_node = F.relu(self.conv1(masked_node, torch.zeros([2,1], dtype=edge_index.dtype, device= edge_index.device)))
        # s2 = self.score2(x, masked_node)

        # 2 layer
        x = self.conv2(x, edge_index)
        # masked_node = self.conv2(masked_node, torch.zeros([2,1], dtype=edge_index.dtype, device= edge_index.device))
        # s3 = self.score2(x, masked_node)

        # x[data.cold_mask_node] = masked_node
        total_edge_index = torch.cat([pos_edge_index, neg_edge_index], dim=-1)
        x_j = torch.index_select(x, 0, total_edge_index[0])
        x_i = torch.index_select(x, 0, total_edge_index[1])
        return torch.einsum("ef,ef->e", x_i, x_j)

        # return F.softmax((s1+s2+s3), 1)


dataset = get_planetoid_dataset(args.dataset, args.normalize_features)
dataset = dataset.shuffle()
permute_masks = random_planetoid_splits if args.random_splits else None
run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
    args.early_stopping, permute_masks)
Exemplo n.º 2
0
        x = F.relu(self.conv1(x, edge_index))

        # masked_node = F.relu(self.conv1(masked_node, torch.zeros([2,1], dtype=edge_index.dtype, device= edge_index.device)))
        # s1 = self.score(x, masked_node)

        # x_j = torch.index_select(x, 0, total_edge_index[0])
        # x_i = torch.index_select(x, 0, total_edge_index[1])
        # s1 = x_i-x_j
        # 2 layer5
        x = self.conv2(x, edge_index)

        x_j = torch.index_select(x, 0, total_edge_index[0])
        x_i = torch.index_select(x, 0, total_edge_index[1])

        # dist = x_j-x_i
        # out = F.relu(self.score(dist).squeeze())
        # score_loss = torch.matmul(dist, self.score.weight.squeeze()).mean()

        return torch.einsum("ef,ef->e", x_i,
                            x_j), torch.zeros(total_edge_index.size())
        # return out, score_loss

        # return  F.log_softmax(x, dim=1)


dataset = get_planetoid_dataset(args.dataset, args.normalize_features)

run(dataset, Net(dataset), args.runs, args.epochs, args.lr, args.weight_decay,
    args.early_stopping)
Exemplo n.º 3
0
        x = self.conv2_ssl(x, added_index)  # LAYER 2 
        return F.log_softmax(x, dim=1), z,r.size(-1)


# ego-Facebook com-Amazon ego-gplus ego-twitter


# name = "ego-Facebook"
# path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', name)
# dataset = SNAPDataset(path, name)
# # pdb.set_trace()
# run(dataset, DoubleNet(dataset, dataset[0].num_feature, int(dataset[0].num_class)), args.runs, args.epochs, args.lr, args.weight_decay,
#         args.early_stopping)


motif_size =3
max_value = 1000
gen_interval= n_epochs_gen = 3
dis_interval= n_epochs_dis = 3
batch_size_gen = 64  
batch_size_dis = 64  

# "CiteSeer", "PubMed"
names = ["CiteSeer"]
for name in names:
    dataset = get_planetoid_dataset(name, args.normalize_features)
    dataset = dataset.shuffle()
    # pdb.set_trace()
    run(dataset, DoubleNet(dataset, dataset.num_features, dataset.num_classes), args.runs, args.epochs, args.lr, args.weight_decay,
        args.early_stopping)