Example #1
0
                    (batch_y.shape[0], 1), dtype=np.int) * ch2ind['<go>'],
                                batch_y[:, :-1]),
                               axis=1),
                y_real_len:
                by,
                x_real_len:
                bx,
                y_max_len:
                max(by)
            })
        train_loss_list.append(loss)
        #tmp_train_acc = cal_acc(tran,batch_y)
        #train_acc_list.append(tmp_train_acc)
        exp_loss = loss if exp_loss == None else alpha * exp_loss + (
            1 - alpha) * loss
        pb.info = "iter {} loss:{} lr:{}".format(i + 1, exp_loss, lr)
        with open('val/{}/train_loss.txt'.format(model_path), 'a') as whdl:
            whdl.write("{}\t{}\t{}\n".format(one_epoch, one_batch, loss))
        val_step = int(worksum / 4)
        if j % val_step == 0 and j != 0:
            test_loss, test_acc, bleu_score, predict_list, target_list, source_list = calc_test_loss(
                test_x[::4], test_y[::4])
            _, train_acc, train_bleu_score, train_predict_list, train_target_list, train_source_list = calc_test_loss(
                train_x[::1000], train_y[::1000])
            predict_texts = get_all_text(predict_list)
            target_texts = get_all_text(target_list)
            source_texts = get_all_en_text(source_list)

            train_predict_texts = get_all_text(train_predict_list)
            train_target_texts = get_all_text(train_target_list)
            train_source_texts = get_all_en_text(train_source_list)
Example #2
0
for one_epoch in range(1, 10):
    pb = ProgressBar(worksum=MAX_SEQUENCE_LENGTH)
    pb.startjob()
    for one_batch in range(0, len(X_train), batch_size):
        batch_x, batch_y = gen.__next__()
        batch_x_len = np.asarray([len(x) for x in batch_x])
        batch_lr = beginning_lr

        _, batch_loss = sess.run([optimizer, loss],
                                 feed_dict={
                                     X: batch_x,
                                     y: batch_y,
                                     X_len: batch_x_len,
                                     learning_rate: batch_lr
                                 })
    pb.info = "EPOCH {} batch{} lr {} loss {}".format(one_epoch, one_batch,
                                                      batch_lr, batch_loss)
    pb.complete(batch_size)
    losses.append(batch_loss)
batch_predict = sess.run(model_pred, feed_dict={
    X: test_X,
    X_len: test_X_len
})[:, 0]
batch_predict = [(1 if x > 0.5 else 0) for x in batch_predict]

# In[]
# =============================================================================
# Run Batch Prediction
# =============================================================================
pd.DataFrame(losses).plot()
df = pd.DataFrame({'id': testData.id, 'pred': batch_predict})
pred_path = os.path.join(dir_path, 'TF_RNN.csv')
        batch_x, batch_y_ori, a, t, s = voice_flow_train.next()['data']
        batch_y = sparse_tuple_from(batch_y_ori)
        batch_seq_len = [i // 2 for i in s]
        batch_target_len = [len(i) for i in batch_y_ori]

        # learning rate decay strategy
        batch_lr = begining_learning_rate * 10**-(one_epoch // DECAY_EPOCH)

        step_dis, _, step_loss, step_summary, step_value = sess.run(
            [dis, train_op, loss, summary_op, global_step],
            feed_dict={
                input_sound: batch_x,
                targets: batch_y,
                learning_rate: batch_lr,
                voicelength: batch_seq_len,
                seqlength: batch_target_len,
                max_grad: clip_norm,
                training: True
            })

        expdis.update(step_dis)
        exploss.update(step_loss)
        pb.info = "EPOCH {} STEP {} LOSS {} DIS {} ".format(
            one_epoch, one_batch, exploss.getval(), expdis.getval())
        train_summary_writer.add_summary(step_summary, step_value)
        pb.complete(BATCH_SIZE)

    print()
    saver = tf.train.Saver(tf.global_variables())
    saver.save(sess, "models/{}/model_{}".format(model_name, one_epoch))