return np.argmax(np.random.multinomial(1, a, 1))

# train the model, output generated text after each iteration
for iteration in range(1, 60):
    print()
    print('-' * 50)
    print('Iteration', iteration)
    model.fit(X, y, batch_size=128, nb_epoch=1)'''


def sample(a, temperature=1.0):
    a = np.log(a) / temperature
    a = np.exp(a) / np.sum(np.exp(a))
    return np.argmax(np.random.multinomial(1, a, 1))

model = ok.load_model('keras_shakespeare_model.pk')

start_index = random.randint(0, len(text) - maxlen - 1)

diversity = 0.5
generated = ''
sentence = text[start_index: start_index + maxlen]
generated += sentence
sys.stdout.write(generated)

for iteration in range(400):
    x = np.zeros((1, maxlen, len(chars)))
    for t, char in enumerate(sentence):
        x[0, t, char_indices[char]] = 1.
    preds = model.predict(x, verbose=0)[0]
    next_index = sample(preds, diversity)
Exemplo n.º 2
0
                next_char = indices_char[next_index]
                generated += next_char
                sentence = sentence[1:] + next_char
                sys.stdout.write(next_char)
                sys.stdout.flush()
            except ValueError:
                print("Value Error")

model.train(X_train, y_train, num_epochs=num_epochs,
            epoch_callback=predict)
end_time_1 = time.clock()
t1 = end_time_1 - start_time_1

ok.save_model(model)
del model
model = ok.load_model()

model.reinforce(X_train, y_train, num_epochs=num_epochs,
                epoch_callback=predict)

'''start_time_2 = time.clock()
model_keras = Sequential()
model_keras.add(GRU(h_layer_size, input_dim=len(chars),
                init='normal', return_sequences=False))
model_keras.add(BatchNormalization(mode=1))
model_keras.add(PReLU())
model_keras.add(Dropout(dropout_p))
model_keras.add(Dense(d_layer_size, init='normal'))
model_keras.add(BatchNormalization(mode=1))
model_keras.add(PReLU())
model_keras.add(Dropout(dropout_p))