def main(text, checkpoint_path): q_vocabutil = VocabUtil("./data/xhj_q.vocab") a_vocabutil = VocabUtil("./data/xhj_a.vocab") tf.reset_default_graph() # 定义训练用的循环神经网络模型。 with tf.variable_scope("nmt_model", reuse=None): model = NMTModel() print(datetime.now(), text) # 根据词汇表,将测试句子转为ids。 text_ids = q_vocabutil.get_ids_word(text) print(datetime.now(), text_ids) # 建立解码所需的计算图。 output_op = model.inference(text_ids) sess = tf.Session() saver = tf.train.Saver() saver.restore(sess, checkpoint_path) # 读取翻译结果。 output_ids = sess.run(output_op) print(datetime.now(), output_ids) output_text = a_vocabutil.get_text(output_ids) # 输出翻译结果。 print(datetime.now(), output_text) sess.close()
def main(test_text, checkpoint_path): tf.reset_default_graph() # 定义训练用的循环神经网络模型。 with tf.variable_scope("nmt_model", reuse=None): model = NMTModel() # 定义个测试句子。 # test_en_text = "This is a test . <eos>" # print(test_en_text) vocabutil = VocabUtil("./data/xhj.vocab") # 根据英文词汇表,将测试句子转为单词ID。 test_ids = vocabutil.get_ids_word(test_text) print(test_ids) # 建立解码所需的计算图。 output_op = model.inference(test_ids) sess = tf.Session() saver = tf.train.Saver() saver.restore(sess, checkpoint_path) # 读取翻译结果。 output_ids = sess.run(output_op) print(output_ids) # 根据中文词汇表,将翻译结果转换为中文文字。 answer = vocabutil.get_text(output_ids) # 输出翻译结果。 print(datetime.now(), answer.encode('utf8').decode(sys.stdout.encoding)) sess.close()
class TestModel(): def __init__(self, vocabfile, checkpoint_path): self.vocabutil = VocabUtil(vocabfile) tf.reset_default_graph() # 定义训练用的循环神经网络模型。 with tf.variable_scope("nmt_model", reuse=tf.AUTO_REUSE): self.model = NMTModel() # 建立解码所需的计算图。 text_ids = self.vocabutil.get_ids_word("你是谁") output_op = self.model.inference(text_ids) # self.saver = tf.train.import_meta_graph(checkpoint_path+".meta") self.sess = tf.Session() self.saver = tf.train.Saver() self.saver.restore(self.sess, checkpoint_path) # self.sess.run(tf.global_variables_initializer()) answer = self.vocabutil.get_text(self.sess.run(output_op)) print(answer) # print(self.predict("你是谁")) def close(self): self.sess.close() def predict(self, q): # tf.reset_default_graph() text_ids = self.vocabutil.get_ids_word(q) output_op = self.model.inference(text_ids) self.sess.run(output_op) answer = self.vocabutil.get_text(output_op) return answer
def talk_word(checkpoint_path): vocabutil = VocabUtil("./data/xhj.vocab") try: while True: q = _prompt_input() if q.lower() == 'exit': break ids = vocabutil.get_ids_word(q) answer = get_answer(ids, checkpoint_path) answer = vocabutil.get_text(answer) print(answer) except Exception: traceback.print_exc() except KeyboardInterrupt: print("Ctrl+c exit.")
def talk_word(checkpoint_path): vocabutil = VocabUtil("./data/xhj.vocab") #time consumption: 00.002783 try: while True: q = _prompt_input() if q.lower() == 'exit': break ids = vocabutil.get_ids_word(q) #time_consumption:00.000027 answer = get_answer(ids, checkpoint_path) #time_consumption:03.158722 answer = vocabutil.get_text(answer) #modi_answser 0:00:00.000066 print(answer) except Exception: traceback.print_exc() except KeyboardInterrupt: print("Ctrl+c exit.")