Esempio n. 1
0
if not args.gen:
    print('Start pre-training generator...')
    start = time.time()
    C = 0
    for epoch in range(args.gen_pretrain_epoch):

        # pre-train
        pre_train_loss = []
        sum_g_loss = []
        sum_kl_loss = []
        perm = np.random.permutation(train_num)
        C = epoch / args.gen_pretrain_epoch
        for i in range(0, train_num, batch_size):
            batch = train_comment_data[perm[i:i + batch_size]]
            if args.vae:
                g_loss, kl_loss = generator.pretrain_step_vrae(batch)
                loss = g_loss + C * kl_loss

                enc_optimizer.zero_grads()
                gen_optimizer.zero_grads()
                loss.backward()
                enc_optimizer.update()
                gen_optimizer.update()

                pre_train_loss.append(float(loss.data))
                sum_g_loss.append(float(g_loss.data))
                sum_kl_loss.append(float(kl_loss.data))
            else:
                g_loss = generator.pretrain_step_autoencoder(batch)
                enc_optimizer.zero_grads()
                gen_optimizer.zero_grads()
Esempio n. 2
0
    pre_train_loss = []
    sum_g_loss = []
    sum_kl_loss = []
    perm = np.random.permutation(train_num)
    if args.kl_anneal:
        C += args.kl_anneal_ratio

    for i in range(0, train_num, batch_size):
        x_batch = train_comment_data[perm[i:i + batch_size]]
        tag_batch = train_tag_data[perm[i:i + batch_size]]
        if args.vae:
            if args.use_tag:
                g_loss, kl_loss = generator.pretrain_step_vrae_tag(
                    x_batch, tag_batch, args.word_drop)
            else:
                g_loss, kl_loss = generator.pretrain_step_vrae(
                    x_batch, args.word_drop)

            loss = g_loss + C * kl_loss

            enc_optimizer.zero_grads()
            gen_optimizer.zero_grads()
            loss.backward()
            enc_optimizer.update()
            gen_optimizer.update()

            pre_train_loss.append(float(loss.data))
            sum_g_loss.append(float(g_loss.data))
            sum_kl_loss.append(float(kl_loss.data))
        else:
            g_loss = generator.pretrain_step_autoencoder(x_batch)
            enc_optimizer.zero_grads()