Example #1
0
EOS_token = 1
MAX_LENGTH = 10
lang_dataset = TextDataset()
# batch_size = 1
lang_dataloader = DataLoader(lang_dataset, shuffle=True)

# input words num
input_size = lang_dataset.input_lang_words
hidden_size = 256
# output words num
output_size = lang_dataset.output_lang_words
total_epoch = 20

encoder = EncoderRNN(input_size, hidden_size)
decoder = DecoderRNN(hidden_size, output_size, n_layers=2)
attn_decoder = AttnDecoderRNN(hidden_size, output_size, n_layers=2)
use_attn = True

if torch.cuda.is_available():
    encoder = encoder.cuda()
    decoder = decoder.cuda()
    attn_decoder = attn_decoder.cuda()


def show_plot(points):
    plt.figure()
    x = np.arange(len(points))
    plt.plot(x, points)
    plt.show()

Example #2
0
            decoder_attentions[di] = decoder_attention.data
            topv, topi = decoder_output.data.topk(1)
            if topi.item() == EOS_token:
                decoded_words.append('<EOS>')
                break
            else:
                decoded_words.append(output_lang.index2word[topi.item()])

            decoder_input = topi.squeeze().detach()

        return decoded_words, decoder_attentions[:di + 1]


encoder1 = EncoderRNN(input_lang.n_words, hidden_size).to(device)
attn_decoder1 = AttnDecoderRNN(hidden_size, output_lang.n_words,
                               dropout_p=0.1).to(device)


def evaluateRandomly(encoder, decoder, n=10):
    for i in range(n):
        pair = random.choice(pairs)
        print('>', pair[0])
        print('=', pair[1])
        output_words, attentions = evaluate(encoder, decoder, pair[0])
        output_sentence = ' '.join(output_words)
        print('<', output_sentence)
        print('')


evaluateRandomly(encoder1, attn_decoder1)
Example #3
0
N_LAYERS = 2
ENC_DROPOUT = 0.5
DEC_DROPOUT = 0.5


'''
enc = Encoder(INPUT_DIM, ENC_EMB_DIM, HID_DIM, N_LAYERS, ENC_DROPOUT)
dec = Decoder(OUTPUT_DIM, DEC_EMB_DIM, HID_DIM, N_LAYERS, DEC_DROPOUT)
model = Seq2Seq(enc, dec)
'''

hidden_size = 32
MAX_LENGTH = 10
n_layers = 1
encoder = EncoderRNN(INPUT_DIM, hidden_size, n_layers = n_layers)
decoder = AttnDecoderRNN(hidden_size, OUTPUT_DIM, dropout_p=0.5,max_length = MAX_LENGTH, n_layers= n_layers)
    
    
learning_rate = 0.0001
encoder_optimizer = torch.optim.Adam(encoder.parameters(), lr=learning_rate)
decoder_optimizer = torch.optim.Adam(decoder.parameters(), lr=learning_rate)

criterion = nn.NLLLoss()
teacher_forcing_ratio = 0.5


def train(encoder,decoder,train_loader,encoder_optimizer,decoder_optimizer,criterion):
    decoder.train()
    print_loss_total = 0
    for i,batch in enumerate(train_loader):
        src=batch[0][:,0:10]
Example #4
0
            output_words, attentions = evaluate(encoder, decoder, in_lang)
        else:
            output_words = evaluate(encoder, decoder, in_lang)
        output_sentence = ' '.join(output_words)
        print('<', output_sentence)
        print('')


input_size = lang_dataset.input_lang_words
hidden_size = 256
output_size = lang_dataset.output_lang_words

encoder = EncoderRNN(input_size, hidden_size)
encoder.load_state_dict(torch.load('./encoder.pth'))
if use_attn:
    decoder = AttnDecoderRNN(hidden_size, output_size, n_layers=2)
    decoder.load_state_dict(torch.load('./attn_decoder.pth'))
else:
    decoder = DecoderRNN(hidden_size, output_size, n_layers=2)
    decoder.load_state_dict(torch.load('./decoder.pth'))

if use_cuda:
    encoder = encoder.cuda()
    decoder = decoder.cuda()

evaluateRandomly(encoder, decoder)

if use_attn:
    pair_idx = random.choice(list(range(len(lang_dataset))))
    pairs = lang_dataset.pairs[pair_idx]
    print('>')
            output_words, attentions = evaluate(encoder, decoder, in_lang)
        else:
            output_words = evaluate(encoder, decoder, in_lang)
        output_sentence = ' '.join(output_words)
        print('<', output_sentence)
        print('')


input_size = lang_dataset.input_lang_words
hidden_size = 256
output_size = lang_dataset.output_lang_words

encoder = EncoderRNN(input_size, hidden_size)
encoder.load_state_dict(torch.load('./encoder.pth'))
if use_attn:
    decoder = AttnDecoderRNN(hidden_size, output_size, n_layers=2)
    decoder.load_state_dict(torch.load('./attn_decoder.pth'))
else:
    decoder = DecoderRNN(hidden_size, output_size, n_layers=2)
    decoder.load_state_dict(torch.load('./decoder.pth'))

if use_cuda:
    encoder = encoder.cuda()
    decoder = decoder.cuda()

evaluateRandomly(encoder, decoder)

if use_attn:
    pair_idx = random.choice(list(range(len(lang_dataset))))
    pairs = lang_dataset.pairs[pair_idx]
    print('>')
SOS_token = 0
EOS_token = 1
MAX_LENGTH = 10
lang_dataset = TextDataset()
lang_dataloader = DataLoader(lang_dataset, shuffle=True)
print()

input_size = lang_dataset.input_lang_words
hidden_size = 256
output_size = lang_dataset.output_lang_words
total_epoch = 20

encoder = EncoderRNN(input_size, hidden_size)
decoder = DecoderRNN(hidden_size, output_size, n_layers=2)
attn_decoder = AttnDecoderRNN(hidden_size, output_size, n_layers=2)
use_attn = True

if torch.cuda.is_available():
    encoder = encoder.cuda()
    decoder = decoder.cuda()
    attn_decoder = attn_decoder.cuda()


def showPlot(points):
    plt.figure()
    x = np.arange(len(points))
    plt.plot(x, points)
    plt.show()