Пример #1
0
 def test_create_forward_rnn(self):
     tf.reset_default_graph()
     with tf.Session():
         model = LanguageModel(self.num_layers, self.hidden_size,
                               self.batch_size, self.max_input_seq_length,
                               self.max_target_seq_length, self.input_dim)
         model.create_forward_rnn()
Пример #2
0
def generate_text(hyper_params):
    with tf.Session() as sess:
        # Create model
        model = LanguageModel(hyper_params["num_layers"],
                              hyper_params["hidden_size"], 1, 1,
                              hyper_params["max_target_seq_length"],
                              hyper_params["char_map_length"])
        model.create_forward_rnn()
        model.initialize(sess)
        model.restore(sess, hyper_params["checkpoint_dir"] + "/language/")

        # Start with a letter
        text = "O"

        for _ in range(10):
            print(text, end="")
            # Convert to an one-hot encoded vector
            input_vec = dataprocessor.DataProcessor.get_str_to_one_hot_encoded(
                hyper_params["char_map"], text, add_eos=False)
            feat_vec = np.array(input_vec)
            (a, b) = feat_vec.shape
            feat_vec = feat_vec.reshape((a, 1, b))
            prediction = model.process_input(sess, feat_vec, [1])
            text = dataprocessor.DataProcessor.get_labels_str(
                hyper_params["char_map"], prediction[0])
        print(text)
        return
Пример #3
0
def generate_text(hyper_params):
    with tf.Session() as sess:
        # Create model
        model = LanguageModel(hyper_params["num_layers"], hyper_params["hidden_size"], 1, 1,
                              hyper_params["max_target_seq_length"], hyper_params["char_map_length"])
        model.create_forward_rnn()
        model.initialize(sess)
        model.restore(sess, hyper_params["checkpoint_dir"] + "/language/")

        # Start with a letter
        text = "O"

        for _ in range(10):
            print(text, end="")
            # Convert to an one-hot encoded vector
            input_vec = dataprocessor.DataProcessor.get_str_to_one_hot_encoded(hyper_params["char_map"], text,
                                                                               add_eos=False)
            feat_vec = np.array(input_vec)
            (a, b) = feat_vec.shape
            feat_vec = feat_vec.reshape((a, 1, b))
            prediction = model.process_input(sess, feat_vec, [1])
            text = dataprocessor.DataProcessor.get_labels_str(hyper_params["char_map"], prediction[0])
        print(text)
        return
Пример #4
0
 def test_create_forward_rnn(self):
     tf.reset_default_graph()
     with tf.Session():
         model = LanguageModel(self.num_layers, self.hidden_size, self.batch_size, self.max_input_seq_length,
                               self.max_target_seq_length, self.input_dim)
         model.create_forward_rnn()