def create_model(session, reverse_target_vocab_table, is_training): """Create model and initialize or load parameters in session.""" model = Model.BiGRUModel(FLAGS.doc_vocab_size, FLAGS.sum_vocab_size, _buckets, FLAGS.size, FLAGS.embsize, FLAGS.max_gradient, FLAGS.batch_size, FLAGS.beam_size, reverse_target_vocab_table, is_training, FLAGS.learning_rate) if FLAGS.checkpoint != "": ckpt = FLAGS.checkpoint else: ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir) if ckpt: ckpt = ckpt.model_checkpoint_path if ckpt and tf.train.checkpoint_exists(ckpt): logging.info("Reading model parameters from %s" % ckpt) model.saver.restore(session, ckpt) else: logging.info("Created model with fresh parameters.") session.run(tf.global_variables_initializer()) return model