def test(graph, model, args): tfg.conf.training = False query_test = query(graph, gl.Mask.TEST, args) dataset = tfg.Dataset(query_test, window=5) eg_test = dataset.get_egograph('test') test_embeddings = model.forward(eg_test) test_acc = accuracy(test_embeddings, eg_test.src.labels) return dataset.iterator, test_acc
def train(graph, model, args): tfg.conf.training = True query_train = query(graph, gl.Mask.TRAIN, args) dataset = tfg.Dataset(query_train, window=5) eg_train = dataset.get_egograph('train') train_embeddings = model.forward(eg_train) loss = supervised_loss(train_embeddings, eg_train.src.labels) return dataset.iterator, loss
def save_node_embedding(graph, model): tfg.conf.training = False seed = graph.V('i').batch(FLAGS.batch_size).alias('i') nbrs_num = json.loads(FLAGS.nbrs_num) query_save = meta_path_sample(seed, 'i', nbrs_num, FLAGS.sampler).values() dataset = tfg.Dataset(query_save, window=1) ego_graph = dataset.get_egograph('i') emb = model.forward(ego_graph) return dataset.iterator, ego_graph.src.ids, emb
def train(graph, model): tfg.conf.training = True query_train = query(graph, gl.Mask.TRAIN) dataset = tfg.Dataset(query_train, window=5) data_dict = dataset.get_data_dict() feature_handler = tfg.FeatureHandler('feature_handler', query_train.get_node("train").decoder.feature_spec) x_list = reformat_node_feature(data_dict, query_train.list_alias(), feature_handler) train_embeddings = model.forward(x_list, nbrs_num) loss = supervised_loss(train_embeddings, data_dict['train'].labels) return dataset.iterator, loss
def test(graph, model): tfg.conf.training = False query_test = query(graph, gl.Mask.TEST) dataset = tfg.Dataset(query_test, window=5) data_dict = dataset.get_data_dict() feature_handler = tfg.FeatureHandler('feature_handler', query_test.get_node("test").decoder.feature_spec) x_list = reformat_node_feature(data_dict, query_test.list_alias(), feature_handler) test_embeddings = model.forward(x_list, nbrs_num) test_acc = accuracy(test_embeddings, data_dict['test'].labels) return dataset.iterator, test_acc, data_dict['test'].ids,\ data_dict['test'].labels, tf.nn.softmax(test_embeddings)
def train(graph, model, config): tfg.conf.training = True query_train = query(graph, config) dataset = tfg.Dataset(query_train, window=5) src_ego = dataset.get_egograph('src') dst_ego = dataset.get_egograph('dst') neg_dst_ego = dataset.get_egograph('neg_dst') src_emb = model.forward(src_ego) dst_emb = model.forward(dst_ego) neg_dst_emb = model.forward(neg_dst_ego) # use sampled softmax loss with temperature. loss = tfg.unsupervised_softmax_cross_entropy_loss( src_emb, dst_emb, neg_dst_emb, temperature=config['temperature']) return dataset.iterator, loss
def test(g, model, predictor, config, edge_type='test'): tfg.conf.training = False seed = g.E(edge_type).batch(config['batch_size']) src = seed.outV().alias(tfg.SubKeys.POS_SRC) src.outV('train').sample(config['nbrs_num']).by('full').alias('src_hop1') dst = seed.inV().alias(tfg.SubKeys.POS_DST) dst.outV('train').sample(config['nbrs_num']).by('full').alias('dst_hop1') query = seed.values() dataset = tfg.Dataset(query, induce_func=induce_graph_cn_with_edge, induce_additional_spec=config['strut_label_spec']) pos_graph = dataset.get_batchgraph(tfg.SubKeys.POS_SRC) pos_src, pos_dst = model.forward(batchgraph=pos_graph) logits = predictor(pos_src * pos_dst) return dataset.iterator, logits
def train(g, model, config): seed = g.E('train').batch(config['batch_size']).shuffle(traverse=True) src = seed.outV().alias(tfg.SubKeys.POS_SRC) src.outV('train').sample(config['nbrs_num']).by('full').alias('src_hop1') dst = seed.inV().alias(tfg.SubKeys.POS_DST) dst.outV('train').sample(config['nbrs_num']).by('full').alias('dst_hop1') src.outNeg('train').sample(1).by('random').alias(tfg.SubKeys.NEG_DST).\ outV('train').sample(config['nbrs_num']).by('full').alias('neg_hop1') query = seed.values() dataset = tfg.Dataset(query, induce_func=induce_graph_with_edge) pos_graph = dataset.get_batchgraph(tfg.SubKeys.POS_SRC) neg_graph = dataset.get_batchgraph(tfg.SubKeys.NEG_DST) pos_src, pos_dst = model.forward(batchgraph=pos_graph) neg_src, neg_dst = model.forward(batchgraph=neg_graph) pos_h = tf.reduce_sum(pos_src * pos_dst, axis=-1) neg_h = tf.reduce_sum(neg_src * neg_dst, axis=-1) loss = tfg.sigmoid_cross_entropy_loss(pos_h, neg_h) return dataset.iterator, loss
def train(g, model, predictor, config): tfg.conf.training = True seed = g.E('train').batch(config['batch_size']).shuffle(traverse=True) src = seed.outV().alias(tfg.SubKeys.POS_SRC) src.outV('train').sample(config['nbrs_num']).by('full').alias('src_hop1') dst = seed.inV().alias(tfg.SubKeys.POS_DST) dst.outV('train').sample(config['nbrs_num']).by('full').alias('dst_hop1') src.outNeg('train').sample(1).by('random').alias(tfg.SubKeys.NEG_DST).\ outV('train').sample(config['nbrs_num']).by('full').alias('neg_hop1') query = seed.values() dataset = tfg.Dataset(query, induce_func=induce_graph_cn_with_edge, induce_additional_spec=config['strut_label_spec']) pos_graph = dataset.get_batchgraph(tfg.SubKeys.POS_SRC) neg_graph = dataset.get_batchgraph(tfg.SubKeys.NEG_DST) pos_src, pos_dst = model.forward(batchgraph=pos_graph) neg_src, neg_dst = model.forward(batchgraph=neg_graph) pos_h = predictor(pos_src * pos_dst) neg_h = predictor(neg_src * neg_dst) # train loss loss = tfg.sigmoid_cross_entropy_loss(pos_h, neg_h) return dataset.iterator, loss