def match_sentence(self, bot, clientid, pattern_sentence, topic_pattern, that_pattern): topic_sentence = Sentence(topic_pattern) that_sentence = Sentence(that_pattern) logging.debug( "AIML Parser matching sentence [%s], topic=[%s], that=[%s] ", pattern_sentence.text(), topic_pattern, that_pattern) sentence = Sentence() sentence.append_sentence(pattern_sentence) sentence.append_word('__TOPIC__') sentence.append_sentence(topic_sentence) sentence.append_word('__THAT__') sentence.append_sentence(that_sentence) logging.debug("Matching [%s]" % sentence.words_from_current_pos(0)) context = MatchContext() template = self.pattern_parser._root_node.match( bot, clientid, context, sentence) if template is not None: context._template_node = template context.list_matches() # Save the matched context for the associated sentence pattern_sentence.matched_context = context return context return None
def match_sentence(self, bot, clientid, pattern_sentence, topic_pattern, that_pattern): topic_sentence = Sentence(topic_pattern) that_sentence = Sentence(that_pattern) logging.debug("AIML Parser matching sentence [%s], topic=[%s], that=[%s] ", pattern_sentence.text(), topic_pattern, that_pattern) sentence = Sentence() sentence.append_sentence(pattern_sentence) sentence.append_word('__TOPIC__') sentence.append_sentence(topic_sentence) sentence.append_word('__THAT__') sentence.append_sentence(that_sentence) logging.debug("Matching [%s]"%sentence.words_from_current_pos(0)) context = MatchContext() template = self.pattern_parser._root_node.match(bot, clientid, context, sentence) if template is not None: context._template_node = template context.list_matches() # Save the matched context for the associated sentence pattern_sentence.matched_context = context return context return None
def match_sentence(self, bot, clientid, pattern_sentence, topic_pattern, that_pattern): topic_sentence = Sentence(bot.brain.tokenizer, topic_pattern) that_sentence = Sentence(bot.brain.tokenizer, that_pattern) if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("AIML Parser matching sentence [%s], topic=[%s], that=[%s] ", pattern_sentence.text(), topic_pattern, that_pattern) sentence = Sentence(bot.brain.tokenizer) sentence.append_sentence(pattern_sentence) sentence.append_word('__TOPIC__') sentence.append_sentence(topic_sentence) sentence.append_word('__THAT__') sentence.append_sentence(that_sentence) if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("Matching [%s]", sentence.words_from_current_pos(0)) context = MatchContext(max_search_depth=bot.configuration.max_search_depth, max_search_timeout=bot.configuration.max_search_timeout, tokenizer=bot.brain.tokenizer) template = self._pattern_parser._root_node.match(bot, clientid, context, sentence) if template is not None: context._template_node = template context.list_matches() # Save the matched context for the associated sentence pattern_sentence.matched_context = context return context return None
def match_sentence(self, client_context, pattern_sentence, topic_pattern, that_pattern): topic_sentence = Sentence(client_context.brain.tokenizer, topic_pattern) that_sentence = Sentence(client_context.brain.tokenizer, that_pattern) YLogger.debug(client_context, "AIML Parser matching sentence [%s], topic=[%s], that=[%s] ", pattern_sentence.text(), topic_pattern, that_pattern) sentence = Sentence(client_context.brain.tokenizer) sentence.append_sentence(pattern_sentence) sentence.append_word('__TOPIC__') sentence.append_sentence(topic_sentence) sentence.append_word('__THAT__') sentence.append_sentence(that_sentence) YLogger.debug(client_context, "Matching [%s]", sentence.words_from_current_pos(0)) context = MatchContext(max_search_depth=client_context.bot.configuration.max_search_depth, max_search_timeout=client_context.bot.configuration.max_search_timeout, tokenizer=client_context.brain.tokenizer) template = self._pattern_parser._root_node.match(client_context, context, sentence) if template is not None: context._template_node = template context.list_matches(client_context) # Save the matched context for the associated sentence pattern_sentence.matched_context = context return context return None
def match_sentence(self, client_context, pattern_sentence, topic_pattern, that_pattern): topic_sentence = Sentence(client_context.brain.tokenizer, topic_pattern) if len(topic_sentence.words) == 0: topic_sentence.words.append('*') that_sentence = Sentence(client_context.brain.tokenizer, that_pattern) if len(that_sentence.words) == 0: that_sentence.words.append('*') if client_context.match_nlu is True: YLogger.debug(client_context, "AIML Parser NLU matching topic=[%s], that=[%s] ", topic_pattern, that_pattern) else: YLogger.debug( client_context, "AIML Parser matching sentence [%s], topic=[%s], that=[%s] ", pattern_sentence.text(), topic_pattern, that_pattern) sentence = Sentence(client_context.brain.tokenizer) sentence.append_sentence(pattern_sentence) sentence.append_word('__TOPIC__') sentence.append_sentence(topic_sentence) sentence.append_word('__THAT__') sentence.append_sentence(that_sentence) YLogger.debug(client_context, "Matching [%s]", sentence.words_from_current_pos(0)) context = MatchContext( max_search_depth=client_context.bot.configuration.max_search_depth, max_search_timeout=client_context.bot.configuration. max_search_timeout, tokenizer=client_context.brain.tokenizer, start_time=client_context.question_start_time) template = self._pattern_parser._root_node.match( client_context, context, sentence) if template is not None: context._template_node = template context.list_matches(client_context) # Save the matched context for the associated sentence pattern_sentence.matched_context = context return context return None