Exemplo n.º 1
0
 def test_words_from_current_pos(self):
     sentence = Sentence(self._bot.brain.tokenizer, "One Two Three")
     self.assertIsNotNone(sentence)
     self.assertEqual("One Two Three", sentence.words_from_current_pos(0))
     self.assertEqual("Two Three", sentence.words_from_current_pos(1))
     self.assertEqual("Three", sentence.words_from_current_pos(2))
     with self.assertRaises(Exception):
         self.assertEqual("Three", sentence.words_from_current_pos(3))
     self.assertEqual("One Two Three", sentence.text())
Exemplo n.º 2
0
 def test_words_from_current_pos(self):
     sentence = Sentence(self._bot.brain.tokenizer, "One Two Three")
     self.assertIsNotNone(sentence)
     self.assertEqual("One Two Three", sentence.words_from_current_pos(0))
     self.assertEqual("Two Three", sentence.words_from_current_pos(1))
     self.assertEqual("Three", sentence.words_from_current_pos(2))
     with self.assertRaises(Exception):
         self.assertEqual("Three", sentence.words_from_current_pos(3))
     self.assertEqual("One Two Three", sentence.text())
Exemplo n.º 3
0
    def match_sentence(self, bot, clientid, pattern_sentence, topic_pattern, that_pattern):

        topic_sentence = Sentence(bot.brain.tokenizer, topic_pattern)
        that_sentence = Sentence(bot.brain.tokenizer, that_pattern)

        if logging.getLogger().isEnabledFor(logging.DEBUG):
            logging.debug("AIML Parser matching sentence [%s], topic=[%s], that=[%s] ",
                          pattern_sentence.text(), topic_pattern, that_pattern)

        sentence = Sentence(bot.brain.tokenizer)
        sentence.append_sentence(pattern_sentence)
        sentence.append_word('__TOPIC__')
        sentence.append_sentence(topic_sentence)
        sentence.append_word('__THAT__')
        sentence.append_sentence(that_sentence)
        if logging.getLogger().isEnabledFor(logging.DEBUG):
            logging.debug("Matching [%s]", sentence.words_from_current_pos(0))

        context = MatchContext(max_search_depth=bot.configuration.max_search_depth,
                               max_search_timeout=bot.configuration.max_search_timeout,
                               tokenizer=bot.brain.tokenizer)

        template = self._pattern_parser._root_node.match(bot, clientid, context, sentence)

        if template is not None:
            context._template_node = template

            context.list_matches()

            # Save the matched context for the associated sentence
            pattern_sentence.matched_context = context

            return context

        return None
Exemplo n.º 4
0
    def match_sentence(self, client_context, pattern_sentence, topic_pattern, that_pattern):

        topic_sentence = Sentence(client_context.brain.tokenizer, topic_pattern)
        that_sentence = Sentence(client_context.brain.tokenizer, that_pattern)

        YLogger.debug(client_context, "AIML Parser matching sentence [%s], topic=[%s], that=[%s] ",
                          pattern_sentence.text(), topic_pattern, that_pattern)

        sentence = Sentence(client_context.brain.tokenizer)
        sentence.append_sentence(pattern_sentence)
        sentence.append_word('__TOPIC__')
        sentence.append_sentence(topic_sentence)
        sentence.append_word('__THAT__')
        sentence.append_sentence(that_sentence)
        YLogger.debug(client_context, "Matching [%s]", sentence.words_from_current_pos(0))

        context = MatchContext(max_search_depth=client_context.bot.configuration.max_search_depth,
                               max_search_timeout=client_context.bot.configuration.max_search_timeout,
                               tokenizer=client_context.brain.tokenizer)

        template = self._pattern_parser._root_node.match(client_context, context, sentence)

        if template is not None:
            context._template_node = template

            context.list_matches(client_context)

            # Save the matched context for the associated sentence
            pattern_sentence.matched_context = context

            return context

        return None
Exemplo n.º 5
0
 def test_split_into_words(self):
     sentence = Sentence(self._bot.brain.tokenizer, "HELLO")
     self.assertIsNotNone(sentence)
     self.assertEqual(1, sentence.num_words())
     self.assertEqual("HELLO", sentence.word(0))
     self.assertEqual("HELLO", sentence.words_from_current_pos(0))
     with self.assertRaises(Exception):
         sentence.sentence.word(1)
     self.assertEqual("HELLO", sentence.text())
Exemplo n.º 6
0
 def test_split_into_words(self):
     sentence = Sentence(self._bot.brain.tokenizer, "HELLO")
     self.assertIsNotNone(sentence)
     self.assertEqual(1, sentence.num_words())
     self.assertEqual("HELLO", sentence.word(0))
     self.assertEqual("HELLO", sentence.words_from_current_pos(0))
     with self.assertRaises(Exception):
         sentence.sentence.word(1)
     self.assertEqual("HELLO", sentence.text())
Exemplo n.º 7
0
    def match_sentence(self, client_context, pattern_sentence, topic_pattern,
                       that_pattern):

        topic_sentence = Sentence(client_context.brain.tokenizer,
                                  topic_pattern)
        that_sentence = Sentence(client_context.brain.tokenizer, that_pattern)
        YLogger.debug(
            client_context,
            "AIML Parser matching sentence [%s], topic=[%s], that=[%s] ",
            pattern_sentence.text(), topic_pattern, that_pattern)

        sentence = Sentence(client_context.brain.tokenizer)
        sentence.append_sentence(pattern_sentence)
        sentence.append_word('__TOPIC__')
        sentence.append_sentence(topic_sentence)
        sentence.append_word('__THAT__')
        sentence.append_sentence(that_sentence)

        YLogger.debug(client_context, "Matching [%s]",
                      sentence.words_from_current_pos(0))

        context = MatchContext(
            max_search_depth=client_context.bot.configuration.max_search_depth,
            max_search_timeout=client_context.bot.configuration.
            max_search_timeout,
            tokenizer=client_context.brain.tokenizer)

        template = self._pattern_parser._root_node.match(
            client_context, context, sentence)

        if template is not None:
            context._template_node = template

            context.list_matches(client_context)  # score을 log로 출력

            # Save the matched context for the associated sentence
            pattern_sentence.matched_context = context

            return context

        return None