from keras.models import load_model import helper import numpy as np import sys SEQUENCE_LENGTH = 40 SEQUENCE_STEP = 3 PATH_TO_CORPUS = "pink_floyd_lyrics.txt" EPOCHS = 20 DIVERSITY = 1.0 text = helper.read_corpus(PATH_TO_CORPUS) chars = helper.extract_characters(text) sequences, next_chars = helper.create_sequences(text, SEQUENCE_LENGTH, SEQUENCE_STEP) char_to_index, indices_char = helper.get_chars_index_dicts(chars) # load json and create model json_file = open('model.json', 'r') loaded_model_json = json_file.read() json_file.close() loaded_model = model_from_json(loaded_model_json) # load weights into new model loaded_model.load_weights("lyrical_lstm_weights.h5") loaded_model.load_weights("new_weights.h5") print("Loaded model from disk") x = np.zeros((1, 40, 49)) print(loaded_model.predict(x, verbose=0)[0]) def preprocess(input_text):
PATH_TO_CORPUS = "leevi_corpus.txt" EPOCHS = 5 DIVERSITY = 1.0 """ Read the corpus and get unique characters from the corpus. """ text = helper.read_corpus(PATH_TO_CORPUS) words = text.split() unique_words = helper.extract_characters(words) """ Create sequences that will be used as the input to the network. Create next_chars array that will serve as the labels during the training. """ word_sequences, next_words = helper.create_word_sequences( words, WORD_SEQUENCE_LENGTH, WORD_SEQUENCE_STEP) word_to_index, indices_word = helper.get_chars_index_dicts(unique_words) # """ # The network is not able to work with characters and strings, we need to vectorise. # """ X, y = helper.vectorize(word_sequences, WORD_SEQUENCE_LENGTH, unique_words, word_to_index, next_words) # """ # Define the structure of the model. # """ model = helper.build_model(WORD_SEQUENCE_LENGTH, unique_words) # """ # Train the model # """