Exemplo n.º 1
0
def stateless_fit(model, X, y, Xtest, ytest, params):
    """
    Train the model passed as 1st argument, and return the train_loss
    X and Y Training values are passed.
    Parameters dictionary is also necessary.
    """
    for i in range(params['lstm_num_epochs']):
        model.fit(
            X, y,
            epochs=1,
            validation_data=(Xtest, ytest),
            verbose=params['keras_verbose_level'],
            shuffle=False,
            batch_size=params['lstm_batch_size'])
        model.reset_states()
    return model
Exemplo n.º 2
0
def range_predict(model, X_test, Y_test, params, batch_size=1):
    """
    Make a prediction for a range of input values, by saturating the lstm
    returns the predictions (unscaled) and the number of errors
    """
    input_shape = (1, params['lstm_timesteps'], len(params['columNames']))
    preds = zeros(X_test.shape[0])
    for i in range(0, X_test.shape[0]):
        input_vector = X_test[i].reshape(input_shape)
        # Make a prediction, saturating
        for k in range(0, params['num_saturations']):
            y_hat = model.predict(input_vector, batch_size=batch_size)
        model.reset_states()
        preds[i] = y_hat
    rmse, num_errors = compute.error(Y_test, preds)
    return (preds, rmse, num_errors)
def generate_text(model, start_sequence, num_generate):
    input_eval = [char2idx[s] for x in start_sequence]
    input_eval = tf.expand_dims(input_eval, 0)

    text_generated = []

    temperature = 0.6

    model.reset_states()
    for i in range(num_generate):
        predictions = model(input_eval)
        predictions = tf.squeeze(predictions, 0)
        predictions = prediction / temperature
        predicted_id = tf.random.categorical(predictions,
                                             num_samples=1)[-1, 0].numpy()
        input_eval = tf.expand_dims([predicted_id], 0)
        text_generated.append(int2char[predicted_id])
    return text_generated
Exemplo n.º 4
0
def generate_text(model, char2idx, start_string):
    # Evaluation step (generating text using the learned model)

    # Number of characters to generate
    num_generate = 1000

    # Converting our start string to numbers (vectorizing)
    input_eval = [char2idx[s] for s in start_string]
    input_eval = tf.expand_dims(input_eval, 0)

    # Empty string to store our results
    text_generated = start_string

    # Low temperatures results in more predictable text.
    # Higher temperatures results in more surprising text.
    # Experiment to find the best setting.
    temperature = 1.0

    # Here batch size == 1
    model.reset_states()

    tf.random.set_seed(42)
    for i in range(num_generate):
        # if i > 200:
        #    print(str(i) + text_generated[-1])
        predictions = model(input_eval)
        # remove the batch
        predictions = tf.squeeze(predictions, 0)

        # using a categorical distribution to predict the character returned by the
        predictions = predictions / temperature
        predicted_id = tf.random.categorical(predictions[:, 1:],
                                             num_samples=1)[-1, 0].numpy() + 1

        # We pass the predicted character as the next input to the model
        # along with the previous hidden state
        input_eval = tf.expand_dims([predicted_id], 0)
        text_generated.append(idx2char[predicted_id])
    return text_generated