Пример #1
0
        input_2 = [Variable(t.from_numpy(var)) for var in input_2]
        input_2 = [var.long() for var in input_2]
        input_2 = [var.cuda() if use_cuda else var for var in input_2]

        [
            encoder_word_input_2, encoder_character_input_2,
            decoder_word_input_2, decoder_character_input_2, target
        ] = input_2
        ''' ================================================== Forward pass ===========================================================
        '''
        # exit()

        logits, _, kld, mu, std = rvae.forward(0.,
                                               encoder_word_input,
                                               encoder_character_input,
                                               encoder_word_input_2,
                                               encoder_character_input_2,
                                               decoder_word_input_2,
                                               decoder_character_input_2,
                                               z=None)
        ''' ================================================================================================================================
        '''

        # print '============'
        print(data[i])
        print(data_2[i])
        # print '------------------------------------'

        for iteration in range(args.num_sample):
            # seed = np.random.normal(size=[1, parameters.latent_variable_size])
            seed = Variable(t.randn([1, parameters.latent_variable_size]))
            # seed = Variable(t.from_numpy(seed).float())
Пример #2
0
    initial_state = None
    sentence = " ".join(
        list(
            map(lambda x: batch_loader.idx_to_word[x],
                original_encoder_word_input[0].tolist()))[::-1])
    reference = " ".join(
        list(
            map(lambda x: batch_loader.idx_to_word[x],
                paraphrse_encoder_word_input[0].tolist()))[::-1])
    for i in range(seq_len):
        logits, initial_state, _ = rvae.forward(
            0.,
            original_encoder_word_input[0:1],
            original_encoder_character_input[0:1],
            paraphrse_encoder_word_input[0:1],
            paraphrse_encoder_character_input[0:1],
            decoder_word_input[0:1],
            decoder_character_input[0:1],
            z=seed,
            initial_state=initial_state)

        logits = logits.view(-1, rvae.params.word_vocab_size)
        prediction = F.softmax(logits)

        word = batch_loader.sample_word_from_distribution(
            prediction.data.cpu().numpy()[-1])

        if word == batch_loader.end_token:
            break

        result += ' ' + word