if "," in FLAGS.model: FLAGS.model = FLAGS.model.split(",") FLAGS.observed_sets = FLAGS.observed_sets.split(",") assert (not FLAGS.batch_train or FLAGS.ckpt_its <= -1 ), "Do not define checkpoint iterations when doing batch training." if FLAGS.batch_train: print("Batch training!") random.seed(FLAGS.random_seed) tf.set_random_seed(FLAGS.random_seed) kb = load_fb15k(FLAGS.fb15k_dir, with_text=not FLAGS.kb_only) if FLAGS.subsample_kb > 0: kb = subsample_kb(kb, FLAGS.subsample_kb) if FLAGS.type_constraint: print("Loading type constraints...") load_fb15k_type_constraints(kb, os.path.join(FLAGS.fb15k_dir, "types")) num_kb = 0 num_text = 0 for f in kb.get_all_facts(): if f[2] == "train": num_kb += 1 elif f[2] == "train_text": num_text += 1
import os from data.load_fb15k237 import load_fb15k, load_fb15k_type_constraints from model.models import * # data loading specifics tf.app.flags.DEFINE_string('fb15k_dir', None, 'data dir containing files of fb15k dataset') # model parameters tf.app.flags.DEFINE_integer('size', 10, 'num of models hidden dim') # Evaluation tf.app.flags.DEFINE_string("model_path", None, "Path to trained model.") tf.app.flags.DEFINE_integer("batch_size", 20000, "Number of examples in each batch for training.") tf.app.flags.DEFINE_boolean("type_constraint", False, "Use type constraint during sampling.") FLAGS = tf.app.flags.FLAGS kb = load_fb15k(FLAGS.fb15k_dir, with_text=False) print("Loaded data.") if FLAGS.type_constraint: print("Loading type constraints!") load_fb15k_type_constraints(kb, os.path.join(FLAGS.fb15k_dir, "types")) with tf.Session() as sess: model = DistMult(kb, FLAGS.size, FLAGS.batch_size, is_train=False) model.saver.restore(sess, os.path.join(FLAGS.model_path)) print("Loaded model.") eval_triples(sess, kb, model, map( lambda x: x[0], kb.get_all_facts_of_arity(2, "test")), verbose=True)
FLAGS = tf.app.flags.FLAGS if "," in FLAGS.model: FLAGS.model = FLAGS.model.split(",") FLAGS.observed_sets = FLAGS.observed_sets.split(",") assert (not FLAGS.batch_train or FLAGS.ckpt_its <= -1), "Do not define checkpoint iterations when doing batch training." if FLAGS.batch_train: print("Batch training!") random.seed(FLAGS.random_seed) tf.set_random_seed(FLAGS.random_seed) kb = load_fb15k(FLAGS.fb15k_dir, with_text=not FLAGS.kb_only) if FLAGS.subsample_kb > 0: kb = subsample_kb(kb, FLAGS.subsample_kb) if FLAGS.type_constraint: print("Loading type constraints...") load_fb15k_type_constraints(kb, os.path.join(FLAGS.fb15k_dir, "types")) num_kb = 0 num_text = 0 for f in kb.get_all_facts(): if f[2] == "train": num_kb += 1 elif f[2] == "train_text": num_text += 1