示例#1
0
import numpy as np
import tensorflow as tf

from pathlib import Path
from os.path import splitext
from os.path import basename
from keras.models import load_model

tf.logging.set_verbosity(tf.logging.ERROR)

CORPUS = helper.read_corpus(sys.argv[2])
PATH_TO_MODEL = sys.argv[1]
DIVERSITY = float(sys.argv[3])
GEN_LENGTH = 400
CHARS = helper.extract_characters(CORPUS)
char_to_index, indices_char = helper.get_chars_index_dicts(CHARS)
"""
  Load the model
"""
modelFile = Path(PATH_TO_MODEL)
if modelFile.is_file():
    model = load_model(PATH_TO_MODEL)
"""
  GEN_LENGTH needs to be the same that was used when model was saved
"""
generated = ''
sentence = CORPUS[0:GEN_LENGTH]
sentence = sentence.lower()
generated += sentence
import five_words as five

M = load_model("lyrical_lstm.h5")

from keras.models import load_model
import helper
import numpy as np
import sys

SEQUENCE_LENGTH = 40
SEQUENCE_STEP = 3
PATH_TO_CORPUS = "pink_floyd_lyrics.txt"
EPOCHS = 20
DIVERSITY = 1.0
text = helper.read_corpus(PATH_TO_CORPUS)
chars = helper.extract_characters(text)
sequences, next_chars = helper.create_sequences(text, SEQUENCE_LENGTH,
                                                SEQUENCE_STEP)
char_to_index, indices_char = helper.get_chars_index_dicts(chars)

# load json and create model
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("lyrical_lstm_weights.h5")
loaded_model.load_weights("new_weights.h5")
print("Loaded model from disk")
x = np.zeros((1, 40, 49))
print(loaded_model.predict(x, verbose=0)[0])
示例#3
0
import sys
from keras.models import load_model
"""
    Define global variables.
"""
WORD_SEQUENCE_LENGTH = 20
WORD_SEQUENCE_STEP = 1
PATH_TO_CORPUS = "leevi_corpus.txt"
EPOCHS = 5
DIVERSITY = 1.0
"""
    Read the corpus and get unique characters from the corpus.
"""
text = helper.read_corpus(PATH_TO_CORPUS)
words = text.split()
unique_words = helper.extract_characters(words)
"""
    Create sequences that will be used as the input to the network.
    Create next_chars array that will serve as the labels during the training.
"""
word_sequences, next_words = helper.create_word_sequences(
    words, WORD_SEQUENCE_LENGTH, WORD_SEQUENCE_STEP)
word_to_index, indices_word = helper.get_chars_index_dicts(unique_words)

# """
#     The network is not able to work with characters and strings, we need to vectorise.
# """
X, y = helper.vectorize(word_sequences, WORD_SEQUENCE_LENGTH, unique_words,
                        word_to_index, next_words)

# """