def predecir(self, frase):
            _, (source_vocab_to_int, target_vocab_to_int), (
                source_int_to_vocab,
                target_int_to_vocab) = helper.load_preprocess()
            load_path = helper.load_params()
            tests.test_sentence_to_seq(sentence_to_seq)
            translate_sentence = frase
            pIngles = translate_sentence
            translate_sentence = sentence_to_seq(translate_sentence,
                                                 source_vocab_to_int)
            loaded_graph = tf.Graph()
            with tf.Session(graph=loaded_graph) as sess:
                # Load saved model
                loader = tf.train.import_meta_graph(load_path + '.meta')
                loader.restore(sess, load_path)

                input_data = loaded_graph.get_tensor_by_name('input:0')
                logits = loaded_graph.get_tensor_by_name('predictions:0')
                target_sequence_length = loaded_graph.get_tensor_by_name(
                    'target_sequence_length:0')
                source_sequence_length = loaded_graph.get_tensor_by_name(
                    'source_sequence_length:0')
                keep_prob = loaded_graph.get_tensor_by_name('keep_prob:0')

                translate_logits = sess.run(
                    logits, {
                        input_data: [translate_sentence] * batch_size,
                        target_sequence_length:
                        [len(translate_sentence) * 2] * batch_size,
                        source_sequence_length:
                        [len(translate_sentence)] * batch_size,
                        keep_prob: 1.0
                    })[0]
            """
        print('Input')
        print('  Word Ids:      {}'.format([i for i in translate_sentence]))
        print('  English Words: {}'.format([source_int_to_vocab[i] for i in translate_sentence]))

        print('\nPrediction')
        print('  Word Ids:      {}'.format([i for i in translate_logits]))
        print('  Spanish Words: {}'.format(" ".join([target_int_to_vocab[i] for i in translate_logits])))
        """
            variableRetornar = format(" ".join(
                [target_int_to_vocab[i] for i in translate_logits]))
            print('Resultado de ', pIngles)
            print(variableRetornar)

            miTxt = open("BorderOut\\IA\\respuesta.txt", 'w')
            miTxt.write(variableRetornar)
            miTxt.close()

            return variableRetornar
Esempio n. 2
0
def run_tests():

    import problem_unittests as t

    t.test_decoding_layer(decoding_layer)
    t.test_decoding_layer_infer(decoding_layer_infer)
    t.test_decoding_layer_train(decoding_layer_train)
    t.test_encoding_layer(encoding_layer)
    t.test_model_inputs(model_inputs)
    t.test_process_encoding_input(process_decoder_input)
    t.test_sentence_to_seq(sentence_to_seq)
    t.test_seq2seq_model(seq2seq_model)
    t.test_text_to_ids(text_to_ids)
def run_all_tests():
    tests.test_text_to_ids(text_to_ids)

    check_tensorflow_gpu()
    tests.test_model_inputs(model_inputs)

    tests.test_process_encoding_input(process_decoder_input)

    from imp import reload
    reload(tests)
    tests.test_encoding_layer(encoding_layer)

    tests.test_decoding_layer_train(decoding_layer_train)
    tests.test_decoding_layer_infer(decoding_layer_infer)
    tests.test_decoding_layer(decoding_layer)
    tests.test_seq2seq_model(seq2seq_model)

    tests.test_sentence_to_seq(sentence_to_seq)
Esempio n. 4
0
    """
    # TODO: Implement Function
    word_ids = []
    for word in sentence.split(' '):
        word = word.lower()
        if word not in vocab_to_int:
            word_ids.append(vocab_to_int['<UNK>'])
        else:
            word_ids.append(vocab_to_int[word])
    return word_ids


"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_sentence_to_seq(sentence_to_seq)

# ## 翻译
#
# 将 `translate_sentence` 从英语翻译成法语。

# In[20]:

translate_sentence = 'he saw a old yellow truck .'
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
translate_sentence = sentence_to_seq(translate_sentence, source_vocab_to_int)

loaded_graph = tf.Graph()
with tf.Session(graph=loaded_graph) as sess:
Esempio n. 5
0
 def unit_test(self):
     tests.test_sentence_to_seq(self.sentence_to_seq)
Esempio n. 6
0
def traducir(frase):
    # Number of Epochs
    epochs = 10
    # Batch Size
    batch_size = 512
    # RNN Size
    rnn_size = 128
    # Number of Layers
    num_layers = 2
    # Embedding Size
    encoding_embedding_size = 128
    decoding_embedding_size = 128
    # Learning Rate
    learning_rate = 0.001
    # Dropout Keep Probability
    keep_probability = 0.55
    display_step = True

    _, (source_vocab_to_int,
        target_vocab_to_int), (source_int_to_vocab,
                               target_int_to_vocab) = helper.load_preprocess()
    load_path = helper.load_params()
    tests.test_sentence_to_seq(sentence_to_seq)
    translate_sentence = frase
    pIngles = translate_sentence
    translate_sentence = sentence_to_seq(translate_sentence,
                                         source_vocab_to_int)
    loaded_graph = tf.Graph()
    with tf.Session(graph=loaded_graph) as sess:
        # Load saved model
        loader = tf.train.import_meta_graph(load_path + '.meta')
        loader.restore(sess, load_path)

        input_data = loaded_graph.get_tensor_by_name('input:0')
        logits = loaded_graph.get_tensor_by_name('predictions:0')
        target_sequence_length = loaded_graph.get_tensor_by_name(
            'target_sequence_length:0')
        source_sequence_length = loaded_graph.get_tensor_by_name(
            'source_sequence_length:0')
        keep_prob = loaded_graph.get_tensor_by_name('keep_prob:0')

        translate_logits = sess.run(
            logits, {
                input_data: [translate_sentence] * batch_size,
                target_sequence_length:
                [len(translate_sentence) * 2] * batch_size,
                source_sequence_length: [len(translate_sentence)] * batch_size,
                keep_prob: 1.0
            })[0]
    """
    print('Input')
    print('  Word Ids:      {}'.format([i for i in translate_sentence]))
    print('  English Words: {}'.format([source_int_to_vocab[i] for i in translate_sentence]))

    print('\nPrediction')
    print('  Word Ids:      {}'.format([i for i in translate_logits]))
    print('  Spanish Words: {}'.format(" ".join([target_int_to_vocab[i] for i in translate_logits])))
    """
    variableRetornar = format(" ".join(
        [target_int_to_vocab[i] for i in translate_logits]))
    return variableRetornar