def test_erdos_renyi_graph():
    torch.manual_seed(1234)
    edge_index = erdos_renyi_graph(5, 0.2, directed=False)
    assert edge_index.tolist() == [
        [0, 1, 1, 1, 2, 4],
        [1, 0, 2, 4, 1, 1],
    ]

    edge_index = erdos_renyi_graph(5, 0.5, directed=True)
    assert edge_index.tolist() == [
        [1, 1, 2, 2, 3, 4, 4, 4],
        [0, 3, 0, 4, 0, 0, 1, 3],
    ]
예제 #2
0
def gen_undirected_random_index(node_num=100,
                                sparsity=0.5,
                                device=None,
                                **kwargs):
    device = device or getDevice()
    edge_index = erdos_renyi_graph(node_num, 1 - sparsity, directed=False)
    return edge_index.to(device).type(torch.long), node_num
예제 #3
0
def test_sampler():
    num_nodes = 10
    data = Data(edge_index=erdos_renyi_graph(num_nodes, 0.1))
    data.num_nodes = num_nodes

    loader = NeighborSampler(data, size=[4, 0.5], num_hops=2, batch_size=2,
                             shuffle=True)

    for data_flow in loader():
        assert data_flow.__repr__()[:8] == 'DataFlow'
        assert data_flow.n_id.size() == (2, )
        assert data_flow.batch_size == 2
        assert len(data_flow) == 2
        block = data_flow[0]
        assert block.__repr__()[:5] == 'Block'
        for block in data_flow:
            pass
        data_flow = data_flow.to(torch.long)
        break
    for data_flow in loader(torch.tensor([0, 1, 2, 3, 4])):
        pass

    loader = NeighborSampler(data, size=[4, 0.5], num_hops=2, batch_size=3,
                             drop_last=True, shuffle=False,
                             add_self_loops=True)

    for data_flow in loader():
        pass
    for data_flow in loader(torch.tensor([0, 1, 2, 3, 4])):
        pass
    mask = torch.tensor([0, 1, 0, 1, 0, 1, 0, 1, 0, 1], dtype=torch.uint8)
    for data_flow in loader(mask):
        pass
예제 #4
0
def gen_synth_data(count=200, nl=None, nu=50, p=0.5, kl=None, ku=2):
    """
    Generating synthetic data based on Erdos–Renyi model.
    :param count: Number of graph pairs to generate.
    :param nl: Minimum number of nodes in a source graph.
    :param nu: Maximum number of nodes in a source graph.
    :param p: Probability of an edge.
    :param kl: Minimum number of insert/remove edge operations on a graph.
    :param ku: Maximum number of insert/remove edge operations on a graph.
    """
    if nl is None:
        nl = nu
    if kl is None:
        kl = ku

    data = []
    data_new = []
    mat = torch.full((count, count), float("inf"))
    norm_mat = torch.full((count, count), float("inf"))

    for i in range(count):
        n = random.randint(nl, nu)
        edge_index = erdos_renyi_graph(n, p)
        x = torch.ones(n, 1)

        g1 = Data(x=x, edge_index=edge_index, i=torch.tensor([i]))
        g2, ged = gen_pair(g1, kl, ku)

        data.append(g1)
        data_new.append(g2)
        mat[i, i] = ged
        norm_mat[i, i] = ged / (0.5 * (g1.num_nodes + g2.num_nodes))

    return data, data_new, mat, norm_mat
예제 #5
0
def feature_graph():
    edge_index = erdos_renyi_graph(2400, 0.01)
    torch.save(edge_index, './synthdata/feature/edge_index.pt')

    dim = 20
    mask_convariance_maxtix = np.diag([1 for _ in range(dim)])

    center1 = 2.5 * np.random.random(size=dim) - 1
    center2 = 2.5 * np.random.random(size=dim) - 1
    center3 = 2.5 * np.random.random(size=dim) - 1

    data1 = multivariate_normal.rvs(mean=center1,
                                    cov=mask_convariance_maxtix,
                                    size=800)
    data2 = multivariate_normal.rvs(mean=center2,
                                    cov=mask_convariance_maxtix,
                                    size=800)
    data3 = multivariate_normal.rvs(mean=center3,
                                    cov=mask_convariance_maxtix,
                                    size=800)
    data = np.vstack((data1, data2, data3))

    label = np.array([0 for _ in range(800)] + [1 for _ in range(800)] +
                     [2 for _ in range(800)])

    permutation = np.random.permutation(label.shape[0])

    data = data[permutation, :]
    label = label[permutation]
    x, y = torch.from_numpy(data), torch.from_numpy(label)
    x, y = x.float(), y.long()
    torch.save(x, './synthdata/feature/x.pt')
    torch.save(y, './synthdata/feature/y.pt')
예제 #6
0
def perturb_graph(G, perturb="none", erdos_flip_p=0.5, erdos_flip_p_neg=1e-2):
    assert perturb in ['none', 'erdos', 'flip']
    x, edge_index = G.x, G.edge_index
    if perturb == 'erdos':
        edge_index = erdos_renyi_graph(num_nodes=x.shape[0],
                                       edge_prob=erdos_flip_p)
    # elif perturb=='flip':
    # 	raise NotImplementedError
    # 	edge_index_pos=dropout_adj(edge_index,p=erdos_flip_p,num_nodes=x.shape[0])[0]
    # 	edge_index_neg=negative_sampling(edge_index,num_nodes=x.shape[0],num_neg_samples=int(round(erdos_flip_p_neg*(x.shape[0]*(x.shape[0]-1)/2-edge_index.shape[1]))))
    # 	edge_index=torch.cat([edge_index_pos,edge_index_neg],dim=1)#sort_edge_index()
    G.edge_index = edge_index
    return G
def test_neighbor_sampler():
    torch.manual_seed(12345)
    edge_index = erdos_renyi_graph(num_nodes=10, edge_prob=0.5)
    E = edge_index.size(1)

    loader = NeighborSampler(edge_index, sizes=[2, 4], batch_size=2)
    assert loader.__repr__() == 'NeighborSampler(sizes=[2, 4])'
    assert len(loader) == 5

    for batch_size, n_id, adjs in loader:
        assert batch_size == 2
        assert all(np.isin(n_id, torch.arange(10)).tolist())
        assert n_id.unique().size(0) == n_id.size(0)
        for (edge_index, e_id, size) in adjs:
            assert int(edge_index[0].max() + 1) <= size[0]
            assert int(edge_index[1].max() + 1) <= size[1]
            assert all(np.isin(e_id, torch.arange(E)).tolist())
            assert e_id.unique().size(0) == e_id.size(0)
            assert size[0] >= size[1]

    out = loader.sample([1, 2])
    assert len(out) == 3
def test_erdos_renyi_graph():
    edge_index = erdos_renyi_graph(5, 0.2, seed=12345, directed=False)
    assert edge_index.tolist() == [[0, 1, 2, 2, 2, 3, 3, 4],
                                   [2, 3, 0, 3, 4, 1, 2, 2]]