Exemple #1
0
def do_translate(src_sentence: str) -> str:

    hypotheses = beam_search(model, src_sentence,
                                beam_size=5,
                                max_decoding_time_step=70)

    tgt_sentence = []

    for src_sent, hyps in zip(src_sentence, hypotheses):
        top_hyp = hyps[0]
        hyp_sent = ' '.join(top_hyp.value)
        tgt_sentence.append(hyp_sent + '\n')

    return ''.join(tgt_sentence)
Exemple #2
0
def send_ASL():
    print(request.json['englishInputText'])
    line = request.json['englishInputText']

    outputText = ''

    try:
        outputText = ' '.join(
            beam_search(nmt_model, [nltk.word_tokenize(line)],
                        beam_size=5,
                        max_decoding_time_step=70)[0][0].value)
    except Exception as e:
        print(e)
        outputText = 'Could not translate...'
    return {'outputText': outputText}
Exemple #3
0
def decode(args: Dict[str, str]):
    """ Performs decoding on the autograder test set
    Make sure to run this code before submitting the code to the auto`grader
    @param args (Dict): args from cmd line
    """

    test_data_src = read_corpus(args['SOURCE_FILE'], source='src')
    model = NMT.load(args['MODEL_PATH'])

    if args['CUDA']:
        model = model.to(torch.device("cuda:0"))

    hypotheses = beam_search(model,
                             test_data_src,
                             beam_size=int(args['BEAM_SIZE']),
                             max_decoding_time_step=int(
                                 args['MAX_DECODING_TIME_STEP']))

    with open(args['OUTPUT_FILE'], 'w') as f:
        for src_sent, hyps in zip(test_data_src, hypotheses):
            top_hyp = hyps[0]
            hyp_sent = ' '.join(top_hyp.value)
            f.write(hyp_sent + '\n')