Пример #1
0
def create_model(session, vocab_size, forward_only):
    model = nlc_model.NLCModel(
        vocab_size, FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
        FLAGS.learning_rate, FLAGS.learning_rate_decay_factor, FLAGS.dropout,
        forward_only=forward_only)
    ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
    if ckpt:  # and tf.gfile.Exists(ckpt.model_checkpoint_path):
        print("Reading model parameters from %s" % ckpt.model_checkpoint_path)
        model.saver.restore(session, ckpt.model_checkpoint_path)
    else:
        print("Created model with fresh parameters.")
        session.run(tf.global_variables_initializer())
    return model
Пример #2
0
def create_model(session, vocab_size, forward_only):
  model = nlc_model.NLCModel(
      vocab_size, FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
      FLAGS.learning_rate, FLAGS.learning_rate_decay_factor, FLAGS.dropout,
      forward_only=forward_only, optimizer=FLAGS.optimizer)
  ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
  if ckpt and tf.gfile.Exists(ckpt.model_checkpoint_path):
    logging.info("Reading model parameters from %s" % ckpt.model_checkpoint_path)
    model.saver.restore(session, ckpt.model_checkpoint_path)
  else:
    logging.info("Created model with fresh parameters.")
    session.run(tf.initialize_all_variables())
    logging.info('Num params: %d' % sum(v.get_shape().num_elements() for v in tf.trainable_variables()))
  return model
Пример #3
0
def create_model(session, vocab_size, forward_only):
    model = nlc_model.NLCModel(vocab_size,
                               FLAGS.size,
                               FLAGS.num_layers,
                               FLAGS.max_gradient_norm,
                               FLAGS.batch_size,
                               FLAGS.learning_rate,
                               FLAGS.learning_rate_decay_factor,
                               FLAGS.dropout,
                               forward_only=forward_only)
    ckpt_paths = [f for f in os.listdir(FLAGS.train_dir) if (re.search(r"best\.ckpt-\d+", f) \
                                                             and not f.endswith("meta"))]
    assert (len(ckpt_paths) > 0)
    ckpt_paths = sorted(ckpt_paths, key=lambda x: int(x.split("-")[-1]))
    ckpt_path = os.path.join(FLAGS.train_dir, ckpt_paths[-1])
    if tf.gfile.Exists(ckpt_path):
        print("Reading model parameters from %s" % ckpt_path)
        model.saver.restore(session, ckpt_path)
    else:
        assert (False)
    return model