def main(_): """Trains Language Model.""" tf.logging.set_verbosity(tf.logging.INFO) with tf.device(tf.train.replica_device_setter(FLAGS.ps_tasks)): model = graphs.get_model() train_op, loss, global_step = model.language_model_training() train_utils.run_training(train_op, loss, global_step)
def main(_): """Trains LSTM classification model.""" tf.logging.set_verbosity(tf.logging.INFO) with tf.device(tf.train.replica_device_setter(FLAGS.ps_tasks)): model = graphs.get_model() train_op, loss, global_step = model.classifier_training() train_utils.run_training( train_op, loss, global_step, variables_to_restore=model.pretrained_variables, pretrained_model_dir=FLAGS.pretrained_model_dir)
def main(_): tf.logging.set_verbosity(tf.logging.INFO) tf.gfile.MakeDirs(FLAGS.eval_dir) tf.logging.info('Building eval graph...') output = graphs.get_model().eval_graph(FLAGS.eval_data) eval_ops, moving_averaged_variables = output saver = tf.train.Saver(moving_averaged_variables) summary_writer = tf.summary.FileWriter(FLAGS.eval_dir, graph=tf.get_default_graph()) while True: run_eval(eval_ops, summary_writer, saver) if FLAGS.run_once: break time.sleep(FLAGS.eval_interval_secs)
def main(_): tf.logging.set_verbosity(tf.logging.INFO) tf.gfile.MakeDirs(FLAGS.eval_dir) tf.logging.info('Building eval graph...') output = graphs.get_model().eval_graph(FLAGS.eval_data) eval_ops, moving_averaged_variables = output saver = tf.train.Saver(moving_averaged_variables) summary_writer = tf.summary.FileWriter( FLAGS.eval_dir, graph=tf.get_default_graph()) while True: run_eval(eval_ops, summary_writer, saver) if FLAGS.run_once: break time.sleep(FLAGS.eval_interval_secs)