def test_predict_proba(self): pickle_fname = 'abp_CV_fold_1_tlXlY_trn.pkl' gcn_graph = GCNDataset.load_transkribus_pickle(pickle_fname) gcn_graph_train = [gcn_graph[8], gcn_graph[18], gcn_graph[29]] node_dim = gcn_graph[0].X.shape[1] edge_dim = gcn_graph[0].E.shape[1] - 2.0 nb_class = gcn_graph[0].Y.shape[1] gcn_model = GraphAttNet(node_dim, nb_class, num_layers=1, learning_rate=0.01, node_indim=-1, nb_attention=3) gcn_model.dropout_rate_node = 0.2 gcn_model.dropout_rate_attention = 0.2 gcn_model.create_model() with tf.Session() as session: session.run([gcn_model.init]) # Get the Test Prediction gcn_model.train_lG(session, gcn_graph) g_proba = gcn_model.prediction_prob(session, gcn_graph_train[1]) print(g_proba.shape) print(type(g_proba)) print(gcn_graph_train[1].X.shape) self.assertTrue(g_proba.shape == (gcn_graph_train[1].X.shape[0], 5))
def test_graphattnet_train_dropout(self): pickle_fname = 'abp_CV_fold_1_tlXlY_trn.pkl' gcn_graph = GCNDataset.load_transkribus_pickle(pickle_fname) gcn_graph_train = [gcn_graph[8], gcn_graph[18], gcn_graph[29]] node_dim = gcn_graph[0].X.shape[1] edge_dim = gcn_graph[0].E.shape[1] - 2.0 nb_class = gcn_graph[0].Y.shape[1] gcn_model = GraphAttNet(node_dim, nb_class, num_layers=1, learning_rate=0.01, node_indim=-1, nb_attention=3) gcn_model.dropout_rate_node = 0.2 gcn_model.dropout_rate_attention = 0.2 gcn_model.create_model() with tf.Session() as session: session.run([gcn_model.init]) # Get the Test Prediction g_acc, node_acc = gcn_model.test_lG(session, gcn_graph_train) print('Mean Accuracy', g_acc, node_acc) gcn_model.train_lG(session, gcn_graph) g_acc, node_acc = gcn_model.test_lG(session, gcn_graph_train) print('Mean Accuracy', g_acc, node_acc)
def test_train_ensemble_NN_model(self): #TODO Make a proper synthetic dataset for test Purpose pickle_fname = 'abp_CV_fold_1_tlXlY_trn.pkl' gcn_graph = GCNDataset.load_transkribus_pickle(pickle_fname) gcn_graph_train = [gcn_graph[8], gcn_graph[18], gcn_graph[29]] node_dim = gcn_graph[0].X.shape[1] edge_dim = gcn_graph[0].E.shape[1] - 2.0 nb_class = gcn_graph[0].Y.shape[1] gat_model = GraphAttNet(node_dim, nb_class, num_layers=1, learning_rate=0.01, node_indim=-1, nb_attention=3) gat_model.dropout_rate_node = 0.2 gat_model.dropout_rate_attention = 0.2 gat_model.create_model() nb_layers = 3 lr = 0.001 nb_conv = 2 ecn_model = EdgeConvNet( node_dim, edge_dim, nb_class, num_layers=nb_layers, learning_rate=lr, mu=0.0, node_indim=-1, nconv_edge=nb_conv, ) ecn_model.create_model() #Check Graphs #Are we recopying the models and graph definition implicitly ? ensemble = EnsembleGraphNN([ecn_model, gat_model]) with tf.Session() as session: session.run([ensemble.models[0].init]) for iter in range(500): ensemble.train_lG(session, gcn_graph_train) prediction = ensemble.predict_lG(session, gcn_graph_train) print(prediction) self.assertTrue(len(prediction) == len(gcn_graph_train)) print('Ensemble Prediction') accs = ensemble.test_lG(session, gcn_graph_train) print('Base Predictions') for m in ensemble.models: accs = m.test_lG(session, gcn_graph_train) print(accs) print(accs)