def get_text_sample(corpus_folder, min_sample_size): corpus_docs = read_corpus(corpus_folder) file = open("samples1.txt", "w") for i in corpus_docs: sentences = splitToSentences(i) corpus_sentences = [] for sentence in sentences: corpus_sentences.append(remove_char(sentence, '\n')) text_size = len(corpus_sentences) sample_size = math.ceil(text_size * .05) if (sample_size < min_sample_size): sample_size = min_sample_size random_position = random.randint(0, text_size) sample_text = "" if (random_position + sample_size < text_size): sample_text = get_substring(corpus_sentences, random_position, random_position + sample_size) else: sample_text = get_substring(corpus_sentences, random_position, text_size) sample_text += get_substring( corpus_sentences, 0, sample_size - (text_size - random_position)) file.write(sample_text) file.write('\n\n') file.close()
from keras.models import load_model, model_from_json import five_words as five M = load_model("lyrical_lstm.h5") from keras.models import load_model import helper import numpy as np import sys SEQUENCE_LENGTH = 40 SEQUENCE_STEP = 3 PATH_TO_CORPUS = "pink_floyd_lyrics.txt" EPOCHS = 20 DIVERSITY = 1.0 text = helper.read_corpus(PATH_TO_CORPUS) chars = helper.extract_characters(text) sequences, next_chars = helper.create_sequences(text, SEQUENCE_LENGTH, SEQUENCE_STEP) char_to_index, indices_char = helper.get_chars_index_dicts(chars) # load json and create model json_file = open('model.json', 'r') loaded_model_json = json_file.read() json_file.close() loaded_model = model_from_json(loaded_model_json) # load weights into new model loaded_model.load_weights("lyrical_lstm_weights.h5") loaded_model.load_weights("new_weights.h5") print("Loaded model from disk") x = np.zeros((1, 40, 49))
warnings.simplefilter(action='ignore', category=FutureWarning) import helper import sys import numpy as np import tensorflow as tf from pathlib import Path from os.path import splitext from os.path import basename from keras.models import load_model tf.logging.set_verbosity(tf.logging.ERROR) CORPUS = helper.read_corpus(sys.argv[2]) PATH_TO_MODEL = sys.argv[1] DIVERSITY = float(sys.argv[3]) GEN_LENGTH = 400 CHARS = helper.extract_characters(CORPUS) char_to_index, indices_char = helper.get_chars_index_dicts(CHARS) """ Load the model """ modelFile = Path(PATH_TO_MODEL) if modelFile.is_file(): model = load_model(PATH_TO_MODEL) """ GEN_LENGTH needs to be the same that was used when model was saved """ generated = ''