import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '../')) from qaPairsRelationClassification.DSSM.model import DSSM import tensorflow as tf from qaPairsRelationClassification.utils.load_data import load_char_data os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE" p, h, y = load_char_data('data/test.csv', data_size=None) model = DSSM() saver = tf.train.Saver() with tf.Session() as sess: sess.run(tf.global_variables_initializer()) saver.restore(sess, '../output/dssm/dssm_49.ckpt') loss, acc = sess.run([model.loss, model.acc], feed_dict={ model.p: p, model.h: h, model.y: y, model.keep_prob: 1 }) print('loss: ', loss, ' acc:', acc)
import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '../')) from qaPairsRelationClassification.ABCNN_V1.model import ABCNN import tensorflow as tf from qaPairsRelationClassification.utils.load_data import load_char_data os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE" os.environ['CUDA_VISIBLE_DEVICES'] = '0,1' p, h, y = load_char_data('input/test.csv', data_size=None) model = ABCNN(True, True) saver = tf.train.Saver() with tf.Session() as sess: sess.run(tf.global_variables_initializer()) saver.restore(sess, '../output/abcnn/abcnn_23.ckpt') loss, acc = sess.run([model.loss, model.acc], feed_dict={ model.p: p, model.h: h, model.y: y, model.keep_prob: 1 }) print('loss: ', loss, ' acc:', acc)
import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '../')) from qaPairsRelationClassification.ABCNN_V1.model import ABCNN import tensorflow as tf from qaPairsRelationClassification.utils.load_data import load_char_data from qaPairsRelationClassification.ABCNN_V1 import args p, h, y = load_char_data('data/train.csv', data_size=None) p_eval, h_eval, y_eval = load_char_data('data/dev.csv', data_size=1000) p_holder = tf.placeholder(dtype=tf.int32, shape=(None, args.seq_length), name='p') h_holder = tf.placeholder(dtype=tf.int32, shape=(None, args.seq_length), name='h') y_holder = tf.placeholder(dtype=tf.int32, shape=None, name='y') dataset = tf.data.Dataset.from_tensor_slices((p_holder, h_holder, y_holder)) dataset = dataset.batch(args.batch_size).repeat(args.epochs) iterator = dataset.make_initializable_iterator() next_element = iterator.get_next() # model = Graph(False, False) model = ABCNN(True, True) saver = tf.train.Saver() config = tf.ConfigProto()
import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '../')) from qaPairsRelationClassification.CONVNET.model import CONVNET import tensorflow as tf from qaPairsRelationClassification.utils.load_data import load_char_data from qaPairsRelationClassification.CONVNET import args p, h, y = load_char_data('input/train.csv', data_size=None) p_eval, h_eval, y_eval = load_char_data('input/dev.csv', data_size=args.batch_size) p_holder = tf.placeholder(dtype=tf.int32, shape=(None, args.seq_length), name='p') h_holder = tf.placeholder(dtype=tf.int32, shape=(None, args.seq_length), name='h') y_holder = tf.placeholder(dtype=tf.int32, shape=None, name='y') dataset = tf.data.Dataset.from_tensor_slices((p_holder, h_holder, y_holder)) dataset = dataset.batch(args.batch_size).repeat(args.epochs) iterator = dataset.make_initializable_iterator() next_element = iterator.get_next() model = CONVNET() saver = tf.train.Saver() config = tf.ConfigProto()