def main():
    import problem_unittests as tests

    tests.test_get_init_cell(get_init_cell)
    tests.test_get_embed(get_embed)
    tests.test_build_rnn(build_rnn)
    tests.test_build_nn(build_nn)
    tests.test_get_batches(get_batches)
    tests.test_get_tensors(get_tensors)
    tests.test_pick_word(pick_word)

    print(get_batches([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20],
                      batch_size=3,
                      seq_length=2))
def run_test():

    import problem_unittests as t

    t.test_create_lookup_tables(create_lookup_tables)
    t.test_get_batches(get_batches)
    t.test_tokenize(token_lookup)
    t.test_get_inputs(get_inputs)
    t.test_get_init_cell(get_init_cell)
    t.test_get_embed(get_embed)
    t.test_build_rnn(build_rnn)
    t.test_build_nn(build_nn)
    t.test_get_tensors(get_tensors)
    t.test_pick_word(pick_word)
Ejemplo n.º 3
0
            targets.append(int_text[loc + 1 : loc + seq_length + 1])
        outputs.append([inputs, targets])  
    return np.array(outputs)
    """
    xdata = np.array(int_text[:n_batches * batch_size * seq_length])
    ydata = np.array(int_text[1:n_batches * batch_size * seq_length + 1])
    # ydata[-1] = xdata[0] # you could add this line, to pass the unittest
    x_batches = np.split(xdata.reshape(batch_size, -1), n_batches, 1)
    y_batches = np.split(ydata.reshape(batch_size, -1), n_batches, 1)
    return np.array(list(zip(x_batches, y_batches)))


"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_get_batches(get_batches)

# ## Neural Network Training
# ### Hyperparameters
# Tune the following parameters:
#
# - Set `num_epochs` to the number of epochs.
# - Set `batch_size` to the batch size.
# - Set `rnn_size` to the size of the RNNs.
# - Set `embed_dim` to the size of the embedding.
# - Set `seq_length` to the length of sequence.
# - Set `learning_rate` to the learning rate.
# - Set `show_every_n_batches` to the number of batches the neural network should print progress.

# In[53]: