Esempio n. 1
0
def synthesize(X_seq):
    x0 = X_seq[:, 0:1]
    synth = RNN.synthesize(x0, 200)
    text = ""
    for column in synth.T:
        text += ind_to_char[np.argmax(column)]
    return text
Esempio n. 2
0
    # np.random.seed(400)  # TODO: remove
    # compare_gradients()

    RNN = RNN(K, m, eta, seq_length, init='xavier')

    save = True
    smooth_loss = -1
    step = -1
    last_epoch = 0
    if save:
        smooth_loss, step, last_epoch = RNN.load()
        print('last smooth_loss: %f \t last step: %d \t last epoch: %d' %
              (smooth_loss, step, last_epoch))

    synth = RNN.synthesize(make_one_hot([char_to_ind['.']], K), 1000)
    text = ""
    for column in synth.T:
        text += ind_to_char[np.argmax(column)]
    print(text.encode('ascii', 'ignore').decode('ascii'))
    exit()

    losses = []
    f = open(
        'synthesized-' + str(
            datetime.datetime.fromtimestamp(
                time.time()).strftime('%Y-%m-%d %H:%M:%S')), 'w+')
    for epoch in range(n_epoch):
        print("\t\t---NEW EPOCH--- number: %d" % (epoch + last_epoch))
        RNN.h0 = np.zeros((m, 1))
        for X_seq, Y_seq in get_batch():