# # for a, p, l in zip(batch[0].tolist(), batch[1].tolist(), batch[2].tolist()):
# #     print(voc.id2word[a], voc.id2word[p], l)
# print(time.asctime( time.localtime(time.time()) ))
#
# sys.exit()

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    summary_writer = tf.summary.FileWriter(graph_saving_path, graph=sess.graph)

    # Restore from checkpoint
    # saver.restore(sess, ckpt_path)
    # sess.graph.as_default()

    for e in range(epochs):
        batch = reader.next_batch()
        first_batch = batch

        while batch is not None:

            in_words, out_words, labels = batch

            _, batch_count = sess.run([train_, adder_], {
                in_words_: in_words,
                out_words_: out_words,
                labels_: labels
            })

            if batch_count % 1000 == 0:
                # in_words, out_words, labels = first_batch
                loss_val, summary, _ = sess.run([loss_, saveloss_, final_], {