def experiment_fn(run_config, params):
    data = Data(FLAGS)
    data.initialize_word_vectors()

    model = Seq2seq(data.vocab_size, FLAGS, data.embeddings_mat)
    estimator = tf.estimator.Estimator(
        model_fn=model.make_graph,
        #                                        model_dir=FLAGS.model_dir,
        config=run_config,
        params=FLAGS)

    train_input_fn, train_feed_fn = data.make_input_fn('train')
    eval_input_fn, eval_feed_fn = data.make_input_fn('test')

    print_vars = [
        'source', 'predict'
        # 'decoder_output',
        # 'actual'
    ]
    print_inputs = tf.train.LoggingTensorHook(print_vars,
                                              every_n_iter=FLAGS.print_every,
                                              formatter=data.get_formatter(
                                                  ['source', 'predict']))

    experiment = tf.contrib.learn.Experiment(
        estimator=estimator,
        train_input_fn=train_input_fn,
        eval_input_fn=eval_input_fn,
        train_steps=FLAGS.iterations,
        min_eval_frequency=FLAGS.print_every,
        train_monitors=[tf.train.FeedFnHook(train_feed_fn), print_inputs],
        eval_hooks=[tf.train.FeedFnHook(eval_feed_fn)],
        eval_steps=10)
    return experiment
Example #2
0
import tensorflow as tf
from seq2seq import Seq2seq
from data_handler import Data
from train import FLAGS

from tensorflow.python import debug as tf_debug
from tensorflow.contrib.learn import learn_runner

tf.logging.set_verbosity(tf.logging.INFO)

run_config = tf.contrib.learn.RunConfig()
run_config = run_config.replace(model_dir=FLAGS.experiment_dir)

data = Data(FLAGS)
data.initialize_word_vectors()

input_fn, _ = data.make_input_fn()
format_fn = lambda seq: ' '.join([data.rev_vocab.get(x, '<UNK>') for x in seq])

model = Seq2seq(data.vocab_size, FLAGS, data.embeddings_mat)
estimator = tf.estimator.Estimator(model_fn=model.make_graph,
                                   config=run_config,
                                   params=FLAGS)


def predict_feed_fn(phrase):
    tokens = data.tokenize_and_map(phrase, mode='test') + [data.END_TOKEN]

    def feed_fn():
        return {'source_in:0': [tokens]}