Exemplo n.º 1
0
    def test_train_layer3(self):
        """
        Test with 3 hubs sampling using different support sizes per layer.
        """
        graph = gb.create_directed_barbell(4, 4)
        gae = GraphAutoEncoder(graph,
                               support_size=[3, 4, 5],
                               dims=[2, 3, 3, 3, 3, 2],
                               batch_size=3,
                               max_total_steps=1,
                               verbose=False,
                               seed=2,
                               act=tf.nn.relu)

        exp = [
            153.83647, 309.56152, 311.00153, 459.34726, 484.33817, 504.59387
        ]
        for i in range(6):
            res = gae.train_layer(i + 1)
            self.assertAlmostEqual(
                res['l'][0], exp[i], 4,
                f"loss of layer {i+1} does not match with expectations")

        res = gae.train_layer(6, all_layers=True)
        self.assertAlmostEqual(
            res['l'][0], 504.55478, 4,
            "loss of the layer 6 all traning does not match with expectations")
Exemplo n.º 2
0
    def test_weight_label(self):
        """
        Test if setting the customer weight label works corrects. 
        Checks if the right label is select and put in the first column of the sample weight tensors
        and checks if the orders is correctly exact on the custom weight label values.
        """
        graph = gb.create_directed_barbell(4, 4)
        edge_weight = [1, 1, 1]
        edge_lbl1_in = [0.22, 2.198239366963403, 3.1873590504451044]
        edge_lbl1_out = [91.12909091, 0.71726504, 0.55211558]
        for in_node, out_node, lbl in graph.edges(data=True):
            lbl['edge_lbl1'] = in_node / (out_node + 0.011) + 0.22
        data_feeders = [
            DataFeederNx(graph),
            DataFeederNx(graph, weight_label='edge_lbl1')
        ]
        for nr, data_feeder in enumerate(data_feeders):
            in_weight = data_feeder.in_sample_weight
            out_weight = data_feeder.out_sample_weight
            if nr == 1:
                edge_lbl1_in.sort(reverse=True)

            for i in range(3):
                self.assertAlmostEqual(in_weight[1][i][nr], edge_weight[i], 4)
                self.assertAlmostEqual(in_weight[1][i][abs(nr - 1)],
                                       edge_lbl1_in[i], 4)

                self.assertAlmostEqual(out_weight[1][i][nr], edge_weight[i], 4)
                self.assertAlmostEqual(out_weight[1][i][abs(nr - 1)],
                                       edge_lbl1_out[i], 4)
Exemplo n.º 3
0
    def test_reconstruct_graph(self):
        """
        Test the reconstruction of an inputlayer.
        """
        graph = gb.create_directed_barbell(10, 10)
        random.seed(2)
        for u in graph.nodes(data=True):
            u[1]['label1'] = int(u[0])
            u[1]['label2'] = random.uniform(0.0, 1.0)
        gae = GraphAutoEncoder(graph,
                               learning_rate=0.01,
                               support_size=[5, 5],
                               dims=[3, 5, 7, 6, 2],
                               batch_size=12,
                               max_total_steps=100,
                               verbose=True)

        l1_struct, graph2 = gae.get_l1_structure(15, show_graph=False)
        # check if the nodes of the reconstructed graph is equal to 5
        self.assertEqual(
            graph2.number_of_nodes(), 5,
            "Number of nodes in reconstructed graph does not match with expectations"
        )

        # check if the returned nodes are correct by summing the node values.
        sum_values = np.sum(l1_struct, 1)
        self.assertAlmostEqual(
            sum_values[0, 1], 120, 4,
            "sum of nodes ids in reconstructed graph does not match with expectations"
        )
        self.assertAlmostEqual(
            sum_values[0, 0], 2.399999, 4,
            "sum of edges in reconstructed graph does not match with expectations"
        )
Exemplo n.º 4
0
    def test_save_load(self):
        """
        Test if saving and loading the model in a new object gives the same results
        """
        filename = os.getcwd() + "/data/test_save_load"
        graph = gb.create_directed_barbell(4, 4)
        gae = GraphAutoEncoder(graph,
                               learning_rate=0.01,
                               support_size=[5, 5],
                               dims=[3, 5, 7, 6, 2],
                               batch_size=12,
                               max_total_steps=50,
                               verbose=True)
        gae.fit(graph)
        embed = gae.calculate_embeddings()
        gae.save_model(filename)

        gae2 = GraphAutoEncoder(graph,
                                learning_rate=0.01,
                                support_size=[5, 5],
                                dims=[3, 5, 7, 6, 2],
                                batch_size=12,
                                max_total_steps=50,
                                verbose=True)
        gae2.load_model(filename, graph)
        embed2 = gae2.calculate_embeddings()

        embed3 = np.subtract(embed, embed2)
        self.assertAlmostEqual(
            np.sum(embed3), 0, 4,
            "loaded model gives different result then original")
Exemplo n.º 5
0
    def test_train_layer2(self):
        """
        Test if the loss is reduced during training
        """
        graph = gb.create_directed_barbell(4, 4)
        gae = GraphAutoEncoder(graph,
                               support_size=[3, 3],
                               dims=[2, 3, 3, 2],
                               batch_size=3,
                               max_total_steps=10,
                               verbose=False,
                               seed=2,
                               act=tf.nn.relu)
        res = gae.train_layer(1, learning_rate=0.0001)
        self.assertTrue(res['val_l'][0] > res['val_l'][-1],
                        "loss has not decreased while training layer 1")

        res = gae.train_layer(2, learning_rate=0.0001)
        self.assertTrue(res['val_l'][0] > res['val_l'][-1],
                        "loss has not decreased while training layer 2")

        res = gae.train_layer(3, learning_rate=0.0001)
        self.assertTrue(res['val_l'][0] > res['val_l'][-1],
                        "loss has not decreased while training layer 3")

        res = gae.train_layer(4, learning_rate=0.0001)
        self.assertTrue(res['val_l'][0] > res['val_l'][-1],
                        "loss has not decreased while training layer 4")
Exemplo n.º 6
0
    def test_fit(self):
        """
        Test if fit function results in the same results as when trained separately
        """
        graph = gb.create_directed_barbell(4, 4)
        gae = GraphAutoEncoder(graph,
                               learning_rate=0.01,
                               support_size=[5, 5],
                               dims=[3, 5, 7, 6, 2],
                               batch_size=12,
                               max_total_steps=50,
                               verbose=True)

        train_res = {}
        for i in range(len(gae.dims)):
            train_res["l" + str(i + 1)] = gae.train_layer(i + 1)

        train_res['all'] = gae.train_layer(len(gae.dims),
                                           all_layers=True,
                                           dropout=None)
        embed = gae.calculate_embeddings()

        gae2 = GraphAutoEncoder(graph,
                                learning_rate=0.01,
                                support_size=[5, 5],
                                dims=[3, 5, 7, 6, 2],
                                batch_size=12,
                                max_total_steps=50,
                                verbose=True)
        gae2.fit(graph)
        embed2 = gae2.calculate_embeddings()
        embed3 = np.subtract(embed, embed2)
        self.assertAlmostEqual(
            np.sum(embed3), 0, 4,
            "fit method results in a different model when trained separately")
Exemplo n.º 7
0
    def test_train_layer5(self):
        """
        Test using final combination layer. Test if training works correctly and if the calculation
        of the embeddings works correctly.
        """
        graph = gb.create_directed_barbell(4, 4)
        for in_node, out_node, lbl in graph.edges(data=True):
            lbl['edge_lbl1'] = in_node / (out_node + 0.011) + 0.22

        gae = GraphAutoEncoder(graph,
                               support_size=[3, 3],
                               dims=[2, 3, 3, 2, 2],
                               batch_size=3,
                               max_total_steps=10,
                               verbose=False,
                               seed=2,
                               weight_label='edge_lbl1',
                               act=tf.nn.relu)

        for i in range(len(gae.dims)):
            res = gae.train_layer(i + 1, act=tf.nn.relu)

        self.assertAlmostEqual(
            res['l'][0], 134.9637, 4,
            "loss of the last layer does not match with expectations using a \
                               final combination layer")

        res = gae.train_layer(len(gae.dims), all_layers=True, act=tf.nn.relu)
        embed = gae.calculate_embeddings()
        self.assertAlmostEqual(
            embed[0][2], 38.221458435058594, 4,
            "embedding of the first batch node differs from expected value")
Exemplo n.º 8
0
 def test_consistency_checks(self):
     """
     Test the checks during initializations.
     """
     graph = gb.create_directed_barbell(10, 10)
     with self.assertRaises(AssertionError):
         gae = GraphAutoEncoder(graph,
                                support_size=[5, 5],
                                dims=[2, 6, 6],
                                batch_size=1024,
                                max_total_steps=10,
                                verbose=True,
                                seed=2)
Exemplo n.º 9
0
    def test_train_layer(self):
        """
        Test if the loss of the initial setup is correct.
        """
        graph = gb.create_directed_barbell(4, 4)
        # ad node ids to the graph as label
        labels3 = [(i, i) for i in range(13)]
        labels3 = dict(labels3)
        nx.set_node_attributes(graph, labels3, 'label3')
        gae = GraphAutoEncoder(graph,
                               support_size=[3, 3],
                               dims=[2, 3, 3, 2],
                               batch_size=3,
                               max_total_steps=1,
                               verbose=False,
                               seed=2,
                               act=tf.nn.relu)
        res = gae.train_layer(1)
        self.assertAlmostEqual(
            res['l'][0], 2158.0686, 4,
            "loss of the initial setup does not match with expectations")

        res = gae.train_layer(2)
        self.assertAlmostEqual(
            res['l'][0], 2613.2725, 4,
            "loss of the initial setup does not match with expectations")

        res = gae.train_layer(3)
        self.assertAlmostEqual(
            res['l'][0], 2693.6736, 4,
            "loss of the initial setup does not match with expectations")

        res = gae.train_layer(4)
        self.assertAlmostEqual(
            res['l'][0], 2842.3582, 3,
            "loss of the initial setup does not match with expectations")

        res = gae.train_layer(4, all_layers=True)
        self.assertAlmostEqual(
            res['l'][0], 2842.1409, 4,
            "loss of the initial setup does not match with expectations")
Exemplo n.º 10
0
    def test_train_layer4(self):
        """
        Test using multiple edge label icw a custom weight label. The test checks if the
        weights are calculated correct.
        """
        graph = gb.create_directed_barbell(4, 4)
        for in_node, out_node, lbl in graph.edges(data=True):
            lbl['edge_lbl1'] = in_node / (out_node + 0.011) + 0.22

        gae = GraphAutoEncoder(graph,
                               support_size=[3, 3],
                               dims=[2, 3, 3, 2],
                               batch_size=3,
                               max_total_steps=10,
                               verbose=False,
                               seed=2,
                               weight_label='edge_lbl1',
                               act=tf.nn.relu)
        res = gae.train_layer(1, learning_rate=0.0001)
        self.assertAlmostEqual(
            res['l'][0], 49.392754, 4,
            "loss of the layer 1 does not match with expectations using a \
                               custom edge label")
Exemplo n.º 11
0
 def setUp(self):
     G = gb.create_directed_barbell(4, 4)
     self.data_feeder = DataFeederNx(G)