Ejemplo n.º 1
0
def test_to_dense():
    edge_index = torch.tensor([[0, 0, 0, 1, 2, 3], [1, 2, 3, 0, 0, 0]])
    edge_attr = torch.Tensor([1, 2, 3, 4, 5, 6])
    num_nodes = edge_index.max().item() + 1
    x = torch.randn((num_nodes, 4))
    pos = torch.randn((num_nodes, 3))
    y = torch.randint(0, 4, (num_nodes, ), dtype=torch.long)

    assert ToDense().__repr__() == 'ToDense()'
    data = Data(x=x, pos=pos, edge_index=edge_index, edge_attr=edge_attr, y=y)
    data = ToDense()(data)
    assert len(data) == 5
    assert data.x.tolist() == x.tolist()
    assert data.pos.tolist() == pos.tolist()
    assert data.y.tolist() == y.tolist()
    assert data.adj.size() == (num_nodes, num_nodes)
    assert data.adj.tolist() == [
        [0, 1, 2, 3],
        [4, 0, 0, 0],
        [5, 0, 0, 0],
        [6, 0, 0, 0],
    ]
    assert data.mask.tolist() == [1, 1, 1, 1]

    assert ToDense(num_nodes=5).__repr__() == 'ToDense(num_nodes=5)'
    data = Data(x=x, pos=pos, edge_index=edge_index, edge_attr=edge_attr, y=y)
    data = ToDense(num_nodes=5)(data)
    assert len(data) == 5
    assert data.x.size() == (5, 4)
    assert data.x[:4].tolist() == x.tolist()
    assert data.x[4].tolist() == [0, 0, 0, 0]
    assert data.pos.size() == (5, 3)
    assert data.pos[:4].tolist() == pos.tolist()
    assert data.pos[4].tolist() == [0, 0, 0]
    assert data.y.size() == (5, )
    assert data.y[:4].tolist() == y.tolist()
    assert data.y[4].tolist() == 0
    assert data.adj.size() == (5, 5)
    assert data.adj.tolist() == [
        [0, 1, 2, 3, 0],
        [4, 0, 0, 0, 0],
        [5, 0, 0, 0, 0],
        [6, 0, 0, 0, 0],
        [0, 0, 0, 0, 0],
    ]
    assert data.mask.tolist() == [1, 1, 1, 1, 0]
Ejemplo n.º 2
0
def test_enzymes():
    root = osp.join('/', 'tmp', str(random.randrange(sys.maxsize)))
    dataset = TUDataset(root, 'ENZYMES')

    assert len(dataset) == 600
    assert dataset.num_features == 3
    assert dataset.num_classes == 6
    assert dataset.__repr__() == 'ENZYMES(600)'

    assert len(dataset[0]) == 3
    assert len(dataset.shuffle()) == 600
    assert len(dataset.shuffle(return_perm=True)) == 2
    assert len(dataset[:100]) == 100
    assert len(dataset[torch.arange(100, dtype=torch.long)]) == 100
    mask = torch.zeros(600, dtype=torch.bool)
    mask[:100] = 1
    assert len(dataset[mask]) == 100

    loader = DataLoader(dataset, batch_size=len(dataset))
    for data in loader:
        assert data.num_graphs == 600

        avg_num_nodes = data.num_nodes / data.num_graphs
        assert pytest.approx(avg_num_nodes, abs=1e-2) == 32.63

        avg_num_edges = data.num_edges / (2 * data.num_graphs)
        assert pytest.approx(avg_num_edges, abs=1e-2) == 62.14

        assert len(data) == 5
        assert list(data.x.size()) == [data.num_nodes, 3]
        assert list(data.y.size()) == [data.num_graphs]
        assert data.y.max() + 1 == 6
        assert list(data.batch.size()) == [data.num_nodes]
        assert data.ptr.numel() == data.num_graphs + 1

        assert data.has_isolated_nodes()
        assert not data.has_self_loops()
        assert data.is_undirected()

    loader = DataListLoader(dataset, batch_size=len(dataset))
    for data_list in loader:
        assert len(data_list) == 600

    dataset.transform = ToDense(num_nodes=126)
    loader = DenseDataLoader(dataset, batch_size=len(dataset))
    for data in loader:
        assert len(data) == 4
        assert list(data.x.size()) == [600, 126, 3]
        assert list(data.adj.size()) == [600, 126, 126]
        assert list(data.mask.size()) == [600, 126]
        assert list(data.y.size()) == [600, 1]

    dataset = TUDataset(root, 'ENZYMES', use_node_attr=True)
    assert dataset.num_node_features == 21
    assert dataset.num_features == 21
    assert dataset.num_edge_features == 0

    shutil.rmtree(root)
Ejemplo n.º 3
0
def test_enzymes(get_dataset):
    dataset = get_dataset(name='ENZYMES')
    assert len(dataset) == 600
    assert dataset.num_features == 3
    assert dataset.num_classes == 6
    assert str(dataset) == 'ENZYMES(600)'

    assert len(dataset[0]) == 3
    assert len(dataset.shuffle()) == 600
    assert len(dataset.shuffle(return_perm=True)) == 2
    assert len(dataset[:100]) == 100
    assert len(dataset[torch.arange(100, dtype=torch.long)]) == 100
    mask = torch.zeros(600, dtype=torch.bool)
    mask[:100] = 1
    assert len(dataset[mask]) == 100

    loader = DataLoader(dataset, batch_size=len(dataset))
    for data in loader:
        assert data.num_graphs == 600

        avg_num_nodes = data.num_nodes / data.num_graphs
        assert pytest.approx(avg_num_nodes, abs=1e-2) == 32.63

        avg_num_edges = data.num_edges / (2 * data.num_graphs)
        assert pytest.approx(avg_num_edges, abs=1e-2) == 62.14

        assert len(data) == 5
        assert list(data.x.size()) == [data.num_nodes, 3]
        assert list(data.y.size()) == [data.num_graphs]
        assert data.y.max() + 1 == 6
        assert list(data.batch.size()) == [data.num_nodes]
        assert data.ptr.numel() == data.num_graphs + 1

        assert data.has_isolated_nodes()
        assert not data.has_self_loops()
        assert data.is_undirected()

    loader = DataListLoader(dataset, batch_size=len(dataset))
    for data_list in loader:
        assert len(data_list) == 600

    dataset.transform = ToDense(num_nodes=126)
    loader = DenseDataLoader(dataset, batch_size=len(dataset))
    for data in loader:
        assert len(data) == 4
        assert list(data.x.size()) == [600, 126, 3]
        assert list(data.adj.size()) == [600, 126, 126]
        assert list(data.mask.size()) == [600, 126]
        assert list(data.y.size()) == [600, 1]
def test_enzymes():
    root = osp.join('/', 'tmp', str(random.randrange(sys.maxsize)), 'test')
    dataset = TUDataset(root, 'ENZYMES')

    assert len(dataset) == 600
    assert dataset.num_features == 21
    assert dataset.num_classes == 6
    assert dataset.__repr__() == 'ENZYMES(600)'

    assert len(dataset.__getitem__(0)) == 3

    assert len(dataset.shuffle()) == 600
    assert len(dataset[:100]) == 100
    assert len(dataset[torch.arange(100, dtype=torch.long)]) == 100
    mask = torch.zeros(600, dtype=torch.uint8)
    mask[:100] = 1
    assert len(dataset[mask]) == 100

    loader = DataLoader(dataset, batch_size=len(dataset))
    for data in loader:
        assert data.num_graphs == 600

        avg_num_nodes = data.num_nodes / data.num_graphs
        assert pytest.approx(avg_num_nodes, abs=1e-2) == 32.63

        avg_num_edges = data.num_edges / (2 * data.num_graphs)
        assert pytest.approx(avg_num_edges, abs=1e-2) == 62.14

        assert len(data) == 4
        assert list(data.x.size()) == [data.num_nodes, 21]
        assert list(data.y.size()) == [data.num_graphs]
        assert data.y.max() + 1 == 6
        assert list(data.batch.size()) == [data.num_nodes]

        assert data.contains_isolated_nodes()
        assert not data.contains_self_loops()
        assert data.is_undirected()

    dataset.transform = ToDense(num_nodes=126)
    loader = DenseDataLoader(dataset, batch_size=len(dataset))
    for data in loader:
        assert len(data) == 4
        assert list(data.x.size()) == [600, 126, 21]
        assert list(data.adj.size()) == [600, 126, 126]
        assert list(data.mask.size()) == [600, 126]
        assert list(data.y.size()) == [600, 1]

    shutil.rmtree(root)
Ejemplo n.º 5
0
    def __init__(self, max_nodes, nhid):
        super(Net, self).__init__()

        self.conv1 = DenseGCN(dataset.num_features, nhid)
        self.pool1 = DIFFPOOL(nhid, nhid, ceil(args.ratio * max_nodes))

        self.conv2 = DenseGCN(nhid, nhid)
        self.pool2 = DIFFPOOL(nhid, nhid, ceil(args.ratio**2 * max_nodes))

        self.conv3 = DenseGCN(nhid, nhid)
        self.pool3 = DIFFPOOL(nhid, nhid, ceil(args.ratio**3 * max_nodes))

        self.lin1 = torch.nn.Linear(nhid * 2, nhid)
        self.lin2 = torch.nn.Linear(nhid, int(nhid / 2))
        self.lin3 = torch.nn.Linear(int(nhid / 2), dataset.num_classes)
        self.to_dense = ToDense()
Ejemplo n.º 6
0
 def __init__(self, min_nodes, max_nodes, distance_threshold):
     self.min_nodes = min_nodes
     self.distance_threshold = distance_threshold
     self.to_dense = ToDense(max_nodes)