コード例 #1
0
ファイル: stt.py プロジェクト: inikdom/rnn-speech
def build_language_training_rnn(sess, hyper_params, prog_params, train_set, test_set):
    model = LanguageModel(hyper_params["num_layers"], hyper_params["hidden_size"], hyper_params["batch_size"],
                          hyper_params["max_input_seq_length"], hyper_params["max_target_seq_length"],
                          hyper_params["char_map_length"])

    # Create a Dataset from the train_set and the test_set
    train_dataset = model.build_dataset(train_set, hyper_params["batch_size"], hyper_params["max_input_seq_length"],
                                        hyper_params["char_map"])

    v_iterator = None
    if test_set is []:
        t_iterator = model.add_dataset_input(train_dataset)
        sess.run(t_iterator.initializer)
    else:
        test_dataset = model.build_dataset(test_set, hyper_params["batch_size"], hyper_params["max_input_seq_length"],
                                           hyper_params["char_map"])

        # Build the input stream from the different datasets
        t_iterator, v_iterator = model.add_datasets_input(train_dataset, test_dataset)
        sess.run(t_iterator.initializer)
        sess.run(v_iterator.initializer)

    # Create the model
    model.create_training_rnn(hyper_params["dropout_input_keep_prob"], hyper_params["dropout_output_keep_prob"],
                              hyper_params["grad_clip"], hyper_params["learning_rate"],
                              hyper_params["lr_decay_factor"], use_iterator=True)
    model.add_tensorboard(sess, hyper_params["tensorboard_dir"], prog_params["tb_name"], prog_params["timeline"])
    model.initialize(sess)
    model.restore(sess, hyper_params["checkpoint_dir"] + "/language/")

    # Override the learning rate if given on the command line
    if prog_params["learn_rate"] is not None:
        model.set_learning_rate(sess, prog_params["learn_rate"])

    return model, t_iterator, v_iterator
コード例 #2
0
ファイル: stt.py プロジェクト: templeblock/speech-demo
def build_language_training_rnn(sess, hyper_params, prog_params, train_set, test_set):
    model = LanguageModel(hyper_params["num_layers"], hyper_params["hidden_size"], hyper_params["batch_size"],
                          hyper_params["max_input_seq_length"], hyper_params["max_target_seq_length"],
                          hyper_params["char_map_length"])

    # Create a Dataset from the train_set and the test_set
    train_dataset = model.build_dataset(train_set, hyper_params["batch_size"], hyper_params["max_input_seq_length"],
                                        hyper_params["char_map"])

    v_iterator = None
    if test_set is []:
        t_iterator = model.add_dataset_input(train_dataset)
        sess.run(t_iterator.initializer)
    else:
        test_dataset = model.build_dataset(test_set, hyper_params["batch_size"], hyper_params["max_input_seq_length"],
                                           hyper_params["char_map"])

        # Build the input stream from the different datasets
        t_iterator, v_iterator = model.add_datasets_input(train_dataset, test_dataset)
        sess.run(t_iterator.initializer)
        sess.run(v_iterator.initializer)

    # Create the model
    model.create_training_rnn(hyper_params["dropout_input_keep_prob"], hyper_params["dropout_output_keep_prob"],
                              hyper_params["grad_clip"], hyper_params["learning_rate"],
                              hyper_params["lr_decay_factor"], use_iterator=True)
    model.add_tensorboard(sess, hyper_params["tensorboard_dir"], prog_params["tb_name"], prog_params["timeline"])
    model.initialize(sess)
    model.restore(sess, hyper_params["checkpoint_dir"] + "/language/")

    # Override the learning rate if given on the command line
    if prog_params["learn_rate"] is not None:
        model.set_learning_rate(sess, prog_params["learn_rate"])

    return model, t_iterator, v_iterator
コード例 #3
0
 def test_create_training_rnn(self):
     tf.reset_default_graph()
     with tf.Session():
         model = LanguageModel(self.num_layers, self.hidden_size, self.batch_size, self.max_input_seq_length,
                               self.max_target_seq_length, self.input_dim)
         model.create_training_rnn(self.input_keep_prob, self.output_keep_prob, self.grad_clip,
                                   self.learning_rate, self.lr_decay_factor)
コード例 #4
0
 def test_create_training_rnn(self):
     tf.reset_default_graph()
     with tf.Session():
         model = LanguageModel(self.num_layers, self.hidden_size,
                               self.batch_size, self.max_input_seq_length,
                               self.max_target_seq_length, self.input_dim)
         model.create_training_rnn(self.input_keep_prob,
                                   self.output_keep_prob, self.grad_clip,
                                   self.learning_rate, self.lr_decay_factor)
コード例 #5
0
    def test_create_training_rnn_with_iterators(self):
        tf.reset_default_graph()

        with tf.Session():
            model = LanguageModel(self.num_layers, self.hidden_size, self.batch_size, self.max_input_seq_length,
                                  self.max_target_seq_length, self.input_dim)

            # Create a Dataset from the train_set and the test_set
            train_dataset = model.build_dataset(["the brown lazy fox", "the red quick fox"], self.batch_size,
                                                self.max_input_seq_length, ENGLISH_CHAR_MAP)
            model.add_dataset_input(train_dataset)
            model.create_training_rnn(self.input_keep_prob, self.output_keep_prob, self.grad_clip,
                                      self.learning_rate, self.lr_decay_factor, use_iterator=True)
コード例 #6
0
    def test_create_training_rnn_with_iterators(self):
        tf.reset_default_graph()

        with tf.Session():
            model = LanguageModel(self.num_layers, self.hidden_size,
                                  self.batch_size, self.max_input_seq_length,
                                  self.max_target_seq_length, self.input_dim)

            # Create a Dataset from the train_set and the test_set
            train_dataset = model.build_dataset(
                ["the brown lazy fox", "the red quick fox"], self.batch_size,
                self.max_input_seq_length, ENGLISH_CHAR_MAP)
            model.add_dataset_input(train_dataset)
            model.create_training_rnn(self.input_keep_prob,
                                      self.output_keep_prob,
                                      self.grad_clip,
                                      self.learning_rate,
                                      self.lr_decay_factor,
                                      use_iterator=True)