def build_training_and_verification_sets(): """ Training and Verification Set Builder Builds the training and verification datasets. Depending on the configuration, this may be from the source files or from pickled files. """ if not Config.Train.restore: input_str = read_input() create_examples(input_str) # Character to integer map required during text generation Config.export_character_to_integer_map() # Export the training and verification data in case # the previous setup will be trained on aga Config.export_train_and_verification_data() Config.word_count = len(input_str.split(" ")) else: Config.import_character_to_integer_map() Config.import_train_and_verification_data() Config.dataset_size = Config.Train.size() + Config.Validation.size() _print_basic_text_statistics()
feed_dict={ x: input_x, seq_len: phrase_seq_len }) pred_char_id = Config.DecisionEngine.function(sess, softmax_out) pred_char = Config.Generate.int2char()[pred_char_id] generated_text.append(pred_char) if cur_seq_len == Config.sequence_length: # Delete off the front of the list if it has reached the specified sequence length del input_x[0][0] else: # Shave last dummy element off since fixed batch size del input_x[0][Config.sequence_length - 1] cur_seq_len += 1 input_x[0].insert(cur_seq_len - 1, pred_char_id) Config.Generate.prev_char = pred_char logging.info("Output Text: " + Config.Generate.seed_text + "".join(generated_text)) if __name__ == "__main__": Config.parse_args() Config.import_character_to_integer_map() Config.Generate.build_int2char() Config.Generate.build_seed_x() generate_text()