Exemplo n.º 1
0
def create_model(session, sent_vocab_size, slot_vocab_size, intent_vocab_size):
    """Create model and initialize or load parameters in session."""
    with tf.variable_scope("model_tylh", reuse=None):
        model_train = multi_task_model.MultiTaskModel(
            sent_vocab_size, slot_vocab_size, intent_vocab_size, FLAGS.max_sequence_length,
            FLAGS.word_embedding_size, FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
            learning_rate=FLAGS.learning_rate, alpha=FLAGS.alpha,
            dropout_keep_prob=FLAGS.dropout_keep_prob, use_lstm=True,
            forward_only=False)
        # use_attention=FLAGS.use_attention,
        # bidirectional_rnn=FLAGS.bidirectional_rnn,
        # task=task)
    with tf.variable_scope("model_tylh", reuse=True):
        model_test = multi_task_model.MultiTaskModel(
            sent_vocab_size, slot_vocab_size, intent_vocab_size, FLAGS.max_sequence_length,
            FLAGS.word_embedding_size, FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
            learning_rate=FLAGS.learning_rate, alpha=FLAGS.alpha,
            dropout_keep_prob=FLAGS.dropout_keep_prob, use_lstm=True,
            forward_only=True)
        # use_attention=FLAGS.use_attention,
        # bidirectional_rnn=FLAGS.bidirectional_rnn,
        # task=task)

    ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
    if ckpt and tf.gfile.Exists(ckpt.model_checkpoint_path):
        print("Reading model parameters from %s" % ckpt.model_checkpoint_path)
        model_train.saver.restore(session, ckpt.model_checkpoint_path)
    else:
        print("Created model with fresh parameters.")
        session.run(tf.global_variables_initializer())
    return model_train, model_test
def create_model(session, source_vocab_size, target_vocab_size, label_vocab_size):
  """Create model and initialize or load parameters in session."""
  with tf.variable_scope("model", reuse=None):
    model_train = multi_task_model.MultiTaskModel(
          source_vocab_size, target_vocab_size, label_vocab_size, _buckets,
          FLAGS.word_embedding_size, FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
          dropout_keep_prob=FLAGS.dropout_keep_prob, use_lstm=True,
          forward_only=False,
          use_attention=FLAGS.use_attention,
          bidirectional_rnn=FLAGS.bidirectional_rnn,
          task=task)
  with tf.variable_scope("model", reuse=True):
    model_test = multi_task_model.MultiTaskModel(
          source_vocab_size, target_vocab_size, label_vocab_size, _buckets,
          FLAGS.word_embedding_size, FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
          dropout_keep_prob=FLAGS.dropout_keep_prob, use_lstm=True,
          forward_only=True,
          use_attention=FLAGS.use_attention,
          bidirectional_rnn=FLAGS.bidirectional_rnn,
          task=task)

  ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
  if ckpt and tf.gfile.Exists(ckpt.model_checkpoint_path):
    print("Reading model parameters from %s" % ckpt.model_checkpoint_path)
    model_train.saver.restore(session, ckpt.model_checkpoint_path)
  else:
    print("Created model with fresh parameters.")
    session.run(tf.initialize_all_variables())
  return model_train, model_test
Exemplo n.º 3
0
def create_model(session, source_vocab_size, target_vocab_size,
                 label_vocab_size):
    """Create model and initialize or load parameters in session."""
    with tf.variable_scope("model", reuse=None):
        model_train = multi_task_model.MultiTaskModel(
            source_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            forward_only=False,
            use_attention=FLAGS.use_attention,
            bidirectional_rnn=FLAGS.bidirectional_rnn,
            task=task)
    with tf.variable_scope("model", reuse=True):
        model_test = multi_task_model.MultiTaskModel(
            source_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            forward_only=True,
            use_attention=FLAGS.use_attention,
            bidirectional_rnn=FLAGS.bidirectional_rnn,
            task=task)

    ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
    if ckpt:
        tf.logging.info("Reading model parameters from %s" %
                        ckpt.model_checkpoint_path)
        # saver = tf.train.Saver(max_to_keep=3)
        # model_file = tf.train.latest_checkpoint('model_tmp/')
        # saver.restore(session, model_file)
        init_op = tf.group(tf.global_variables_initializer(),
                           tf.local_variables_initializer())
        session.run(init_op)
        model_train.saver.restore(session, ckpt.model_checkpoint_path)
    else:
        tf.logging.info("Created model with fresh parameters.")
        session.run(tf.global_variables_initializer())
    return model_train, model_test
Exemplo n.º 4
0
def create_model(session, source_vocab_size, target_vocab_size,
                 label_vocab_size, lm_vocab_size):
    """Create model and initialize or load parameters in session."""
    with tf.variable_scope("model", reuse=None):
        model_train = multi_task_model.MultiTaskModel(
            source_vocab_size,
            lm_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            lm_cost_weight=FLAGS.lm_cost_weight,
            forward_only=False,
            DNN_at_output=FLAGS.DNN_at_output,
            dnn_hidden_layer_size=FLAGS.dnn_hidden_layer_size,
            output_emb_size=FLAGS.output_emb_size,
            zero_intent_thres=FLAGS.zero_intent_thres)
    with tf.variable_scope("model", reuse=True):
        model_test = multi_task_model.MultiTaskModel(
            source_vocab_size,
            lm_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            lm_cost_weight=FLAGS.lm_cost_weight,
            forward_only=True,
            DNN_at_output=FLAGS.DNN_at_output,
            dnn_hidden_layer_size=FLAGS.dnn_hidden_layer_size,
            output_emb_size=FLAGS.output_emb_size,
            zero_intent_thres=FLAGS.zero_intent_thres)
    ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
    if ckpt and tf.gfile.Exists(ckpt.model_checkpoint_path):
        print("Reading model parameters from %s" % ckpt.model_checkpoint_path)
        model_train.saver.restore(session, ckpt.model_checkpoint_path)
    else:
        print("Created model with fresh parameters.")
        session.run(tf.global_variables_initializer())
    return model_train, model_test
def create_model(session, source_vocab_size, target_vocab_size,
                 label_vocab_size):
    # 4-2-1. Create train model.
    with tf.variable_scope("model", reuse=None):
        model_train = multi_task_model.MultiTaskModel(
            source_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            forward_only=False,
            use_attention=FLAGS.use_attention,
            bidirectional_rnn=FLAGS.bidirectional_rnn,
            task=task)

    # 4-2-2. Create test model.
    with tf.variable_scope("model", reuse=True):
        model_test = multi_task_model.MultiTaskModel(
            source_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            forward_only=True,
            use_attention=FLAGS.use_attention,
            bidirectional_rnn=FLAGS.bidirectional_rnn,
            task=task)

    # 4-2-3. Get "model paramters" or Initialize the model parameters.
    # 4-2-3-1. Get model parameters.
    ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
    if ckpt:
        print("Reading model parameters from %s" % ckpt.model_checkpoint_path)
        model_train.saver.restore(session, ckpt.model_checkpoint_path)
    # 4-2-3-2. Initialize the model parameters.
    else:
        print("Created model with fresh parameters.")
        session.run(tf.global_variables_initializer())
    return model_train, model_test
Exemplo n.º 6
0
def create_model(session, source_vocab_size, target_vocab_size,
                 label_vocab_size):
    """Create model and initialize or load parameters in session."""
    with tf.variable_scope("model", reuse=None):
        model_train = multi_task_model.MultiTaskModel(
            source_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            forward_only=False,
            use_attention=FLAGS.use_attention,
            bidirectional_rnn=FLAGS.bidirectional_rnn,
            task=task)
    with tf.variable_scope("model", reuse=True):
        global model_test
        model_test = multi_task_model.MultiTaskModel(
            source_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            forward_only=True,
            use_attention=FLAGS.use_attention,
            bidirectional_rnn=FLAGS.bidirectional_rnn,
            task=task)

        restorationPath = "./model_tmp/model_final.ckpt"  # It will change. Somehow we must solve this problem
        if True:
            print("Reading model parameters from %s" % restorationPath)
            model_train.saver.restore(session, restorationPath)
            #model_test.saver.restore(session, restorationPath)
        else:
            print("Created model with fresh parameters.")
            session.run(tf.initialize_all_variables())
        return model_train, model_test
Exemplo n.º 7
0
def create_model(session,
                 source_vocab_size, 
                 target_vocab_size, 
                 label_vocab_size):
    """创建模型并在会话中初始化或者加载参数。"""
    with tf.variable_scope("model", reuse=None):
        model_train = multi_task_model.MultiTaskModel(
            source_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size, FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            forward_only=False,
            use_attention=FLAGS.use_attention,
            bidirectional_rnn=FLAGS.bidirectional_rnn,
            task=task)
    with tf.variable_scope("model", reuse=True):
        model_test = multi_task_model.MultiTaskModel(
            source_vocab_size,
            target_vocab_size,
            label_vocab_size,
            _buckets,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            forward_only=True,
            use_attention=FLAGS.use_attention,
            bidirectional_rnn=FLAGS.bidirectional_rnn,
            task=task)

    # 判断是否添加模型
    checkpoint = tf.train.get_checkpoint_state(FLAGS.train_dir)
    if checkpoint:
        print("读取模型参数: %s" % checkpoint.model_checkpoint_path)
        model_train.saver.restore(session, checkpoint.model_checkpoint_path)
    else:
        print("使用新的参数创建模型。")
        session.run(tf.global_variables_initializer())
    return model_train, model_test
Exemplo n.º 8
0
def load_nlu_model():
    session = tf.Session()
    session.run(tf.global_variables_initializer())
    with tf.variable_scope("model", reuse=None):
        model_test = multi_task_model.MultiTaskModel(
            sent_vocab_size,
            slot_vocab_size,
            intent_vocab_size,
            FLAGS.max_sequence_length,
            FLAGS.word_embedding_size,
            FLAGS.size,
            FLAGS.num_layers,
            FLAGS.max_gradient_norm,
            FLAGS.batch_size,
            learning_rate=FLAGS.learning_rate,
            alpha=FLAGS.alpha,
            dropout_keep_prob=FLAGS.dropout_keep_prob,
            use_lstm=True,
            forward_only=True)
    model_test.saver.restore(
        session,
        tf.train.get_checkpoint_state(FLAGS.train_dir).model_checkpoint_path)

    return session, model_test