Beispiel #1
0
def test_loader_first():
    datalist = [GraphData.random(5, 4, 3) for _ in range(32 * 5)]
    loader = GraphDataLoader(datalist, batch_size=32, shuffle=True)

    batch = loader.first()
    assert isinstance(batch, GraphBatch)
    assert batch.shape == (5, 4, 3)
    assert batch.num_graphs == 32
Beispiel #2
0
def test_train_shortest_path():
    graphs = [
        generate_shorest_path_example(100, 0.01, 1000) for _ in range(10)
    ]
    input_data = [
        GraphData.from_networkx(g, feature_key="_features") for g in graphs
    ]
    target_data = [
        GraphData.from_networkx(g, feature_key="_target") for g in graphs
    ]

    loader = GraphDataLoader(input_data,
                             target_data,
                             batch_size=32,
                             shuffle=True)

    agg = lambda: Flex(MultiAggregator)(Flex.d(),
                                        ["add", "mean", "max", "min"])

    network = Network()

    for input_batch, _ in loader:
        network(input_batch, 10)
        break

    loss_fn = torch.nn.BCELoss()
    optimizer = torch.optim.AdamW(network.parameters())
    for _ in range(10):
        for input_batch, target_batch in loader:
            output = network(input_batch, 10)[0]
            x, y = output.x, target_batch.x
            loss = loss_fn(x.flatten(), y[:, 0].flatten())
            loss.backward()
            print(loss.detach())
            optimizer.step()
Beispiel #3
0
def test_loader_dataset():
    datalist = [GraphData.random(5, 4, 3) for _ in range(32 * 4)]
    dataset = GraphDataset(datalist)

    for batch in GraphDataLoader(dataset, shuffle=True, batch_size=32):
        print(batch.size)
        assert isinstance(batch, GraphBatch)
        assert batch.size[-1] == 32
Beispiel #4
0
def test_loader_zipped():
    datalist1 = [GraphData.random(5, 4, 3) for _ in range(32 * 5)]
    datalist2 = [GraphData.random(5, 4, 3) for _ in range(32 * 5)]
    loader = GraphDataLoader(datalist1, datalist2, batch_size=32, shuffle=True)

    for a, b in loader:
        assert isinstance(a, GraphBatch)
        assert isinstance(b, GraphBatch)
        assert a is not b
Beispiel #5
0
    def sigmoid_circuit(cls, data_size, batch_size):
        import math

        def func(x):
            return 1 - 1.0 / (1 + math.exp(-x))

        input_data = []
        output_data = []
        for _ in range(data_size):
            n_size = np.random.randint(2, 20)
            tree = nx.random_tree(n_size)

            # randomize node directions
            g = nx.DiGraph()
            for n1, n2, edata in tree.edges(data=True):
                i = np.random.randint(2)
                if i % 2 == 0:
                    g.add_edge(n1, n2)
                else:
                    g.add_edge(n2, n1)
            cls._default_g(g)

            for n in nx_utils.iter_roots(g):
                ndata = g.nodes[n]
                ndata["target"] = np.array(10.0)

            for n in nx.topological_sort(g):
                ndata = g.nodes[n]
                if "target" not in ndata:
                    incoming = []
                    for p in g.predecessors(n):
                        pdata = g.nodes[p]
                        incoming.append(pdata["target"])
                    incoming = np.concatenate(incoming)
                    i = incoming.sum()
                    o = func(i)
                    ndata["target"] = o

            input_data.append(
                GraphData.from_networkx(g, feature_key="features"))
            output_data.append(GraphData.from_networkx(g,
                                                       feature_key="target"))

        return GraphDataLoader(list(zip(input_data, output_data)),
                               batch_size=batch_size)
Beispiel #6
0
    def boolean_network(cls, data_size, batch_size):

        input_data = []
        output_data = []
        for _ in range(data_size):
            n_size = np.random.randint(2, 20)
            tree = nx.random_tree(n_size)

            # randomize node directions
            g = nx.DiGraph()
            for n1, n2, edata in tree.edges(data=True):
                i = np.random.randint(2)
                if i % 2 == 0:
                    g.add_edge(n1, n2)
                else:
                    g.add_edge(n2, n1)
            cls._default_g(g)

            for n in nx_utils.iter_roots(g):
                ndata = g.nodes[n]
                ndata["target"] = np.array([1.0])

            for n in nx.topological_sort(g):
                ndata = g.nodes[n]
                if "target" not in ndata:
                    incoming = []
                    for p in g.predecessors(n):
                        pdata = g.nodes[p]
                        incoming.append(pdata["target"])
                    incoming = np.concatenate(incoming)
                    i = incoming.max()
                    if i == 1:
                        o = np.array([0.0])
                    else:
                        o = np.array([1.0])
                    ndata["target"] = o

            input_data.append(
                GraphData.from_networkx(g, feature_key="features"))
            output_data.append(GraphData.from_networkx(g,
                                                       feature_key="target"))

        return GraphDataLoader(list(zip(input_data, output_data)),
                               batch_size=batch_size)
Beispiel #7
0
    def est_density(cls, data_size, batch_size):
        input_data = []
        output_data = []
        s = 2
        for _ in range(data_size):
            n_size = np.random.randint(2, 20)
            g = nx.to_directed(nx.random_tree(n_size))
            cls._default_g(g)

            gdata = g.get_global()
            gdata["features"] = np.random.randn(1)
            gdata["target"] = np.array([nx.density(g)])

            input_data.append(
                GraphData.from_networkx(g, feature_key="features"))
            output_data.append(GraphData.from_networkx(g,
                                                       feature_key="target"))

        return GraphDataLoader(input_data, output_data, batch_size=batch_size)
Beispiel #8
0
def create_loader(generator, graphs, batch_size, shuffle, pin_memory=False):
    train_batch = GraphBatch.from_networkx_list(graphs,
                                                n_edge_feat=1,
                                                n_node_feat=generator.n_parts,
                                                n_glob_feat=1)
    target_batch = GraphBatch.from_networkx_list(graphs,
                                                 n_edge_feat=16,
                                                 n_node_feat=1,
                                                 n_glob_feat=1,
                                                 feature_key="target")
    train_list = train_batch.to_data_list()
    target_list = target_batch.to_data_list()
    if batch_size is None:
        batch_size = len(train_list)
    return GraphDataLoader(
        list(zip(train_list, target_list)),
        batch_size=batch_size,
        shuffle=shuffle,
        pin_memory=pin_memory,
    )
Beispiel #9
0
    def in_degree(cls, data_size, batch_size):
        input_data = []
        output_data = []
        s = 2
        for _ in range(data_size):
            n_size = np.random.randint(2, 20)
            g = nx.to_directed(nx.random_tree(n_size))
            cls._default_g(g)

            for n, ndata in g.nodes(data=True):
                ndata["features"] = np.random.randn(1)
                ndata["target"] = np.array([g.in_degree(n)])

            input_data.append(
                GraphData.from_networkx(g, feature_key="features"))
            output_data.append(GraphData.from_networkx(g,
                                                       feature_key="target"))

        return GraphDataLoader(list(zip(input_data, output_data)),
                               batch_size=batch_size)
Beispiel #10
0
    def random_graph_red_black_edges(cls, data_size, batch_size):
        input_data = []
        output_data = []
        s = 2
        for _ in range(data_size):
            g = nx.to_directed(nx.random_tree(10))
            cls._default_g(g)
            for _, _, edata in g.edges(data=True):
                i = np.random.randint(0, 1, (1, ))
                edata["features"] = to_one_hot(i, s)
                if i % 2 == 0:
                    target = np.array([0.5])
                else:
                    target = np.zeros((1, ))
                edata["target"] = target

            input_data.append(
                GraphData.from_networkx(g, feature_key="features"))
            output_data.append(GraphData.from_networkx(g,
                                                       feature_key="target"))

        return GraphDataLoader(input_data, output_data, batch_size=batch_size)
Beispiel #11
0
def test_loader():
    datalist = [GraphData.random(5, 4, 3) for _ in range(32 * 5)]
    loader = GraphDataLoader(datalist, batch_size=32, shuffle=True)

    for batch in loader:
        assert batch.size[2] == 32
Beispiel #12
0
 def random_loader(data_size, batch_size):
     datalist = [GraphData.random(5, 5, 5) for _ in range(data_size)]
     return GraphDataLoader(datalist, batch_size)
Beispiel #13
0
def test_loader_limit_mem_sizes():
    datalist = [GraphData.random(5, 4, 3) for _ in range(32 * 5)]
    loader = GraphDataLoader(datalist, batch_size=1, shuffle=True)

    assert not list(loader(limit_mem_size=10))
    assert list(loader(limit_mem_size=1000))
Beispiel #14
0
def test_loader_mem_sizes():
    datalist = [GraphData.random(5, 4, 3) for _ in range(32 * 5)]
    loader = GraphDataLoader(datalist, batch_size=1, shuffle=True)
    print(loader.mem_sizes())
    print(loader.mem_sizes().to(torch.float).std())