def test_list_matches_no_template(self): global total_str topic = PatternOneOrMoreWildCardNode("*") word1 = PatternWordNode("Hi") word2 = PatternWordNode("There") context = MatchContext(max_search_depth=100, max_search_timeout=60, template_node=PatternTemplateNode( TemplateWordNode("Hello")), sentence="HELLO", response="Hi there") context.add_match(Match(Match.TOPIC, topic, None)) context.add_match(Match(Match.WORD, word1, "Hi")) context.add_match(Match(Match.WORD, word2, "There")) total_str = "" context.list_matches(self._client_context, output_func=collector, include_template=False) self.assertEquals( "\tMatches... Asked: HELLO 1: Match=(Topic) Node=(ONEORMORE [*]) Matched=() " "2: Match=(Word) Node=(WORD [Hi]) Matched=(Hi) " "3: Match=(Word) Node=(WORD [There]) Matched=(There) " "Match score 100.00", total_str)
def test_match_context_in_db(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) store.empty() client = TestClient() client_context = client.create_client_context("user1") matched_context1 = MatchContext(100, 100, sentence="Hello", response="Hi There") matched_context1._matched_nodes = [] matched_context1._template_node = None store._write_match_context_to_db(client_context, 1, matched_context1) store.commit() matched_context2 = MatchContext(100, 100) store._read_match_context_from_db(client_context, 1, matched_context2) self.assertEqual(100, matched_context2.max_search_timeout) self.assertEqual(100, matched_context2.max_search_depth) self.assertEqual("Hello", matched_context2.sentence) self.assertEqual("Hi There", matched_context2.response) store.empty()
def test_attrib_with_html(self): template = ET.fromstring(""" <template> <a target="_new" href="http://www.google.com/search?q=<star />"> Google Search </a> </template> """) conversation = Conversation(self._client_context) question = Question.create_from_text( self._client_context, "GOOGLE AIML", self._client_context.bot.sentence_splitter) question.current_sentence()._response = "OK" conversation.record_dialog(question) match = PatternOneOrMoreWildCardNode("*") context = MatchContext(max_search_depth=100, max_search_timeout=-1) context.add_match(Match(Match.WORD, match, "AIML")) question.current_sentence()._matched_context = context self._client_context.bot._conversation_mgr._conversations[ "testid"] = conversation ast = self._graph.parse_template_expression(template) self.assertIsNotNone(ast) self.assertIsInstance(ast, TemplateNode) self.assertIsNotNone(ast.children) self.assertEqual(len(ast.children), 1) xml_node = ast.children[0] self.assertIsNotNone(xml_node) self.assertIsInstance(xml_node, TemplateXMLNode) attribs = xml_node.attribs self.assertEqual(2, len(attribs)) self.assertIsInstance(attribs['target'], TemplateWordNode) target = attribs['target'] self.assertEqual(len(target.children), 0) self.assertEqual("_new", target.word) self.assertIsInstance(attribs['href'], TemplateNode) href = attribs['href'] self.assertEqual(len(href.children), 3) self.assertIsInstance(href.children[0], TemplateWordNode) self.assertEqual('http://www.google.com/search?q=', href.children[0].word) self.assertIsInstance(href.children[1], TemplateNode) self.assertEqual(1, len(href.children[1].children)) star = href.children[1].children[0] self.assertIsInstance(star, TemplateStarNode) self.assertIsInstance(href.children[2], TemplateWordNode) self.assertEqual('', href.children[2].word) result = xml_node.resolve(self._client_context) self.assertIsNotNone(result) self.assertEqual( result, '<a target="_new" href="http://www.google.com/search?q=AIML">Google Search</a>' )
def match_sentence(self, client_context, pattern_sentence, topic_pattern, that_pattern): topic_sentence = Sentence(client_context, topic_pattern) that_sentence = Sentence(client_context, that_pattern) YLogger.debug(client_context, "AIML Parser matching sentence [%s], topic=[%s], that=[%s] ", pattern_sentence.text(client_context), topic_pattern, that_pattern) sentence = Sentence(client_context) sentence.append_sentence(pattern_sentence) sentence.append_word('__TOPIC__') sentence.append_sentence(topic_sentence) sentence.append_word('__THAT__') sentence.append_sentence(that_sentence) YLogger.debug(client_context, "Matching [%s]", sentence.words_from_current_pos(client_context, 0)) context = MatchContext(max_search_depth=client_context.bot.configuration.max_search_depth, max_search_timeout=client_context.bot.configuration.max_search_timeout) template = self._pattern_parser.root.match(client_context, context, sentence) if template is not None: context.template_node = template context.list_matches(client_context) # Save the matched context for the associated sentence pattern_sentence.matched_context = context return context return None
def test_resolve_no_defaults_inside_topic(self): root = TemplateNode() self.assertIsNotNone(root) self.assertIsNotNone(root.children) self.assertEqual(len(root.children), 0) node = TemplateTopicStarNode(index=1) self.assertIsNotNone(node) self.assertIsInstance(node.index, TemplateNode) root.append(node) self.assertEqual(len(root.children), 1) conversation = Conversation(self._client_context) question = Question.create_from_text(self._client_context, "Hello world") question.current_sentence()._response = "Hello matey" conversation.record_dialog(question) question = Question.create_from_text(self._client_context, "How are you") question.current_sentence()._response = "Very well thanks" conversation.record_dialog(question) match = PatternOneOrMoreWildCardNode("*") context = MatchContext(max_search_depth=100, max_search_timeout=-1) context.add_match(Match(Match.TOPIC, match, "Matched")) question.current_sentence()._matched_context = context conversation.record_dialog(question) self._client_context.bot._conversation_mgr._conversations[ "testid"] = conversation self.assertEqual("Matched", node.resolve(self._client_context))
def test_node_with_star(self): root = TemplateNode() node = TemplateThatStarNode() root.append(node) conversation = Conversation(self._client_context) question = Question.create_from_text(self._client_context, "Hello world") question.current_sentence()._response = "Hello matey" conversation.record_dialog(question) question = Question.create_from_text(self._client_context, "How are you") question.current_sentence()._response = "Very well thanks" conversation.record_dialog(question) match = PatternOneOrMoreWildCardNode("*") context = MatchContext(max_search_depth=100, max_search_timeout=-1) context.add_match(Match(Match.THAT, match, "Matched")) question.current_sentence()._matched_context = context conversation.record_dialog(question) self._client_context.bot._conversation_mgr._conversations[ "testid"] = conversation self.assertEqual("Matched", root.resolve(self._client_context))
def test_match_context_depth(self): context = MatchContext(max_search_depth=100, max_search_timeout=60) self.assertEqual(100, context.max_search_depth) self.assertEqual(60, context.max_search_timeout) self.assertFalse(context.matched()) template = PatternTemplateNode(template=TemplateNode()) context.template_node = template self.assertIsNotNone(context.template_node) self.assertTrue(context.matched())
def test_set_get_matched_nodes(self): context1 = MatchContext(max_search_depth=100, max_search_timeout=60, matched_nodes=[]) self.assertEquals([], context1.matched_nodes) context1.set_matched_nodes( [Match(Match.WORD, PatternWordNode("Hello"), "Hello")]) self.assertEquals(1, len(context1.matched_nodes))
def test_list_matches_empty(self): global total_str context = MatchContext(max_search_depth=100, max_search_timeout=60, matched_nodes=[]) total_str = "" context.list_matches(self._client_context, output_func=collector) self.assertEquals("\tMatches...\tMatch score 0.00\t\tResponse: None", total_str)
def test_get_set_matched_nodes(self): context = MatchContext(max_search_depth=100, max_search_timeout=60) context._matched_nodes = [ Match(Match.WORD, PatternWordNode("Hello"), "Hello"), Match(Match.WORD, PatternWordNode("There"), "There") ] self.assertEqual(2, len(context.matched_nodes)) matched = context.matched_nodes self.assertEqual(2, len(matched))
def test_time_functions(self): context = MatchContext(max_search_depth=100, max_search_timeout=-1) self.assertEqual(-1, context.max_search_timeout) self.assertFalse(context.search_time_exceeded()) context = MatchContext(max_search_depth=100, max_search_timeout=0) self.assertEqual(0, context.max_search_timeout) self.assertTrue(context.search_time_exceeded()) context = MatchContext(max_search_depth=100, max_search_timeout=60) time_now = datetime.datetime.now() prev_time = time_now - datetime.timedelta(seconds=-70) context._total_search_start = prev_time self.assertTrue(context.search_time_exceeded())
def assert_just_conversation_storage(self, store, can_empty=True, test_load=True): if can_empty is True: store.empty() client = TestClient() client_context = client.create_client_context("user1") conversation = Conversation(client_context) question1 = Question.create_from_text(client_context, "Hello There") question1.sentence(0).response = "Hi" question1.sentence(0).matched_context = MatchContext( max_search_depth=99, max_search_timeout=99, matched_nodes=[ Match(Match.WORD, PatternWordNode("Hello"), "Hello") ], template_node=None, sentence="HELLO", response="Hi There") conversation.record_dialog(question1) store.store_conversation(client_context, conversation)
def test_check_child_is_wildcard_star(self): wildcard = MockPatternWildCardNode("*") self.assertIsNotNone(wildcard) wildcard._1ormore_star = PatternOneOrMoreWildCardNode('*') wildcard._1ormore_star._template = PatternTemplateNode(TemplateNode()) context = MatchContext(max_search_depth=100, max_search_timeout=-1) sentence = Sentence(self._client_context, "TEST SENTENCE") match = wildcard.check_child_is_wildcard("", self._client_context, context, sentence, 0, Match.WORD, 0) self.assertIsNotNone(match) context = MatchContext(max_search_depth=100, max_search_timeout=-1) sentence = Sentence(self._client_context, "TEST") match = wildcard.check_child_is_wildcard("", self._client_context, context, sentence, 0, Match.WORD, 0) self.assertIsNone(match)
def setUp(self): self._client = TemplateGraphClient() self._client_context = self._client.create_client_context("testid") self._graph = self._client_context.bot.brain.aiml_parser.template_parser self.test_sentence = Sentence(self._client_context, "test sentence") test_node = PatternOneOrMoreWildCardNode("*") self.test_sentence._matched_context = MatchContext( max_search_depth=100, max_search_timeout=-1) self.test_sentence._matched_context._matched_nodes = [ Match(Match.WORD, test_node, 'one'), Match(Match.WORD, test_node, 'two'), Match(Match.WORD, test_node, 'three'), Match(Match.WORD, test_node, 'four'), Match(Match.WORD, test_node, 'five'), Match(Match.WORD, test_node, 'six'), Match(Match.TOPIC, test_node, '*'), Match(Match.THAT, test_node, '*') ] conversation = self._client_context.bot.get_conversation( self._client_context) question = Question.create_from_sentence(self.test_sentence) conversation._questions.append(question)
def test_init_match_nodes_empty(self): context1 = MatchContext(max_search_depth=100, max_search_timeout=60, matched_nodes=[]) self.assertEqual(100, context1.max_search_depth) self.assertEqual(60, context1.max_search_timeout) self.assertEqual([], context1.matched_nodes) self.assertIsInstance(context1.total_search_start, datetime.datetime)
def test_check_child_is_wildcard_no_wildcard_children(self): wildcard = MockPatternWildCardNode("*") self.assertIsNotNone(wildcard) context = MatchContext(max_search_depth=100, max_search_timeout=-1) sentence = Sentence(self._client_context, "TEST SENTENCE") match = wildcard.check_child_is_wildcard("", self._client_context, context, sentence, 0, Match.WORD, 0) self.assertIsNone(match)
def test_read_write_matches_in_db(self): config = SQLStorageConfiguration() engine = SQLStorageEngine(config) engine.initialise() store = SQLConversationStore(engine) store.empty() client = TestClient() client_context = client.create_client_context("user1") matched_context1 = MatchContext(100, 100, sentence="Hello", response="Hi There") matched_context1.matched_nodes.append( Match(Match.WORD, PatternWordNode("Hello"), "Hello")) store._write_matches_to_db(client_context, matched_context1, 1) store.commit() matched_context2 = MatchContext(0, 0) store._read_matches_from_db(client_context, matched_context2, 1) self.assertEqual(1, len(matched_context2.matched_nodes)) self.assertEqual(Match.WORD, matched_context2.matched_nodes[0].matched_node_type) self.assertEqual("WORD [Hello]", matched_context2.matched_nodes[0].matched_node_str) self.assertFalse( matched_context2.matched_nodes[0].matched_node_multi_word) self.assertFalse( matched_context2.matched_nodes[0].matched_node_wildcard) self.assertEqual( 1, len(matched_context2.matched_nodes[0].matched_node_words)) self.assertEqual(["Hello"], matched_context2.matched_nodes[0].matched_node_words) store.empty()
def test_invalid_topic_or_that(self): wildcard = MockPatternWildCardNode("*") self.assertIsNotNone(wildcard) context = MatchContext(max_search_depth=100, max_search_timeout=-1) matches_added = 1 self.assertTrue(wildcard.invalid_topic_or_that("", self._client_context, PatternTopicNode.TOPIC, context, matches_added)) self.assertTrue(wildcard.invalid_topic_or_that("", self._client_context, PatternTopicNode.THAT, context, matches_added)) self.assertFalse(wildcard.invalid_topic_or_that("", self._client_context, "TEST", context, matches_added))
def test_to_json(self): topic = PatternOneOrMoreWildCardNode("*") word1 = PatternWordNode("Hi") word2 = PatternWordNode("There") context = MatchContext(max_search_depth=100, max_search_timeout=60, template_node=PatternTemplateNode( TemplateWordNode("Hello"))) context.add_match(Match(Match.TOPIC, topic, None)) context.add_match(Match(Match.WORD, word1, "Hi")) context.add_match(Match(Match.WORD, word2, "There")) json_data = context.to_json() self.assertIsNotNone(json_data) self.assertEquals(json_data["max_search_depth"], 100) self.assertEquals(json_data["max_search_timeout"], 60) self.assertIsInstance(json_data["total_search_start"], datetime.datetime) self.assertEquals(3, len(json_data["matched_nodes"])) self.assertEquals( json_data["matched_nodes"][0], { 'multi_word': True, 'node': 'ONEORMORE [*]', 'type': 'Topic', 'wild_card': True, 'words': [] }) self.assertEquals( json_data["matched_nodes"][1], { 'multi_word': False, 'node': 'WORD [Hi]', 'type': 'Word', 'wild_card': False, 'words': ["Hi"] }) self.assertEquals( json_data["matched_nodes"][2], { 'multi_word': False, 'node': 'WORD [There]', 'type': 'Word', 'wild_card': False, 'words': ["There"] })
def test_init_match_nodes_not_empty(self): context1 = MatchContext(max_search_depth=100, max_search_timeout=60, matched_nodes=[ Match(Match.WORD, PatternWordNode("Hello"), "Hello"), Match(Match.WORD, PatternWordNode("There"), "There") ]) self.assertEqual(100, context1.max_search_depth) self.assertEqual(60, context1.max_search_timeout) self.assertEqual(2, len(context1.matched_nodes)) self.assertIsInstance(context1.total_search_start, datetime.datetime)
def from_json(client_context, json_data): sentence = Sentence(client_context) sentence.words = json_data['words'] sentence.response = json_data['response'] sentence.positivity = json_data['positivity'] sentence.subjectivity = json_data['subjectivity'] if 'matched_context' in json_data: sentence.matched_context = MatchContext.from_json( json_data["matched_context"]) return sentence
def test_consume_out_of_words_without_template(self): node = PatternNode() self.assertIsNotNone(node) match_context = MatchContext(max_search_depth=100, max_search_timeout=100) words = Sentence(self._client_context) result = node.consume(self._client_context, match_context, words, 0, Match.WORD, 1, parent=False) self.assertIsNone(result)
def test_consume_search_depth_exceeded(self): node = PatternZeroOrMoreWildCardNode("^") self.assertIsNotNone(node) match_context = MatchContext(max_search_depth=0, max_search_timeout=100) words = Sentence(self._client_context) result = node.consume(self._client_context, match_context, words, 0, Match.WORD, 1, parent=False) self.assertIsNone(result)
def test_from_json(self): time1 = datetime.datetime(2019, 8, 29, 17, 25, 7, 141098).strftime("%d/%m/%Y, %H:%M:%S") json_data = { 'max_search_depth': 100, 'max_search_timeout': 60, 'total_search_start': time1, 'sentence': 'Hello', 'response': 'Hi there', 'matched_nodes': [{ 'type': 'Topic', 'node': 'ONEORMORE [*]', 'words': [], 'multi_word': True, 'wild_card': True }, { 'type': 'Word', 'node': 'WORD [Hi]', 'words': ['Hi'], 'multi_word': False, 'wild_card': False }, { 'type': 'Word', 'node': 'WORD [There]', 'words': ['There'], 'multi_word': False, 'wild_card': False }] } match_context = MatchContext.from_json(json_data) self.assertIsNotNone(match_context) self.assertEquals(match_context._max_search_depth, 100) self.assertEquals(match_context._max_search_timeout, 60) time2 = match_context._total_search_start.strftime( "%d/%m/%Y, %H:%M:%S") self.assertEquals(time1, time2) self.assertEquals(match_context.template_node, None) self.assertEquals(match_context.sentence, "Hello") self.assertEquals(match_context.response, "Hi there") self.assertEquals(3, len(match_context._matched_nodes))
def test_to_json(self): topic = PatternOneOrMoreWildCardNode("*") word1 = PatternWordNode("Hi") word2 = PatternWordNode("There") context = MatchContext(max_search_depth=100, max_search_timeout=60, template_node=TemplateWordNode("Hello")) context.add_match(Match(Match.TOPIC, topic, None)) context.add_match(Match(Match.WORD, word1, "Hi")) context.add_match(Match(Match.WORD, word2, "There")) sentence = Sentence(self._client_context, "One Two Three", matched_context=context) json_data = sentence.to_json() self.assertIsNotNone(json_data)
def test_consume_that(self): node = PatternNode() self.assertIsNotNone(node) node.add_that(PatternThatNode()) match_context = MatchContext(max_search_depth=100, max_search_timeout=100) words = Sentence(self._client_context, text="__THAT__") result = node.consume(self._client_context, match_context, words, 0, Match.WORD, 1, parent=False) self.assertIsNone(result)
def _read_sentences_from_db(self, client_context, questiondid, question ): sentencedaos = self._storage_engine.session.query(SentenceDAO).\ filter(SentenceDAO.questionid==questiondid) for sentencedao in sentencedaos: YLogger.debug(client_context, "Loading sentence %s", sentencedao) sentence = Sentence(client_context, sentencedao.sentence) sentence._response = sentencedao.response sentence._positivity = float(sentencedao.positivity) sentence._subjectivity = float(sentencedao.subjectivity) sentence._matched_context = MatchContext(0,0) question.sentences.append(sentence) self._read_match_context_from_db(client_context, sentencedao.id, sentence._matched_context)
def test_consume_topic(self): node = PatternZeroOrMoreWildCardNode("^") self.assertIsNotNone(node) node.add_topic(PatternTopicNode()) match_context = MatchContext(max_search_depth=100, max_search_timeout=100) words = Sentence(self._client_context, text="__TOPIC__") result = node.consume(self._client_context, match_context, words, 0, Match.WORD, 1, parent=False) self.assertIsNone(result)
def test_consume_with_priority_mismatch(self): node = PatternZeroOrMoreWildCardNode("^") priority = PatternPriorityWordNode("$TEST") node.add_child(priority) priority.add_template((PatternTemplateNode(TemplateWordNode("word")))) match_context = MatchContext(max_search_depth=100, max_search_timeout=100) words = Sentence(self._client_context, text="THIS $TEST2") result = node.consume(self._client_context, match_context, words, 0, Match.WORD, 1, parent=False) self.assertIsNone(result)
def test_to_json(self): client = TestClient() client_context = ClientContext(client, "testid") client_context.bot = Bot(BotConfiguration(), client) client_context.bot.configuration.conversations._max_histories = 3 client_context.brain = client_context.bot.brain conversation1 = Conversation(client_context) conversation1.properties["convo1"] = "value1" matched_context = MatchContext(100, 100) matched_context.matched_nodes.append(Match(Match.WORD, PatternWordNode("Hello"), "Hello")) sentence = Sentence(client_context, text="Hi", response="Hello there", matched_context=matched_context) question1 = Question.create_from_sentence(sentence) question1.properties['quest1'] = "value2" conversation1.record_dialog(question1) json_data = conversation1.to_json() self.assertIsNotNone(json_data) self.assertEqual("testclient", json_data['client_context']['clientid']) self.assertEqual("testid", json_data['client_context']['userid']) self.assertEqual("bot", json_data['client_context']['botid']) self.assertEqual("brain", json_data['client_context']['brainid']) self.assertEqual(0, json_data['client_context']['depth']) conversation2 = Conversation.from_json(client_context, json_data) self.assertEqual(conversation1._client_context.client.id, conversation2._client_context.client.id) self.assertEqual(conversation1._client_context.userid, conversation2._client_context.userid) self.assertEqual(conversation1._client_context.bot.id, conversation2._client_context.bot.id) self.assertEqual(conversation1._client_context.brain.id, conversation2._client_context.brain.id) self.assertEqual(conversation1._client_context._question_start_time, conversation2._client_context._question_start_time) self.assertEqual(conversation1._client_context._question_depth, conversation2._client_context._question_depth) self.assertEqual(conversation1._client_context._id, conversation2._client_context._id) self.assertEqual(conversation1.properties, conversation2.properties) self.assertEqual(conversation1.max_histories, conversation2.max_histories) self.assertNotEquals(0, len(conversation1.questions)) self.assertNotEquals(0, len(conversation2.questions)) self.assertEqual(len(conversation1.questions), len(conversation2.questions)) for i in range(len(conversation2.questions)): q1 = conversation1.questions[i] q2 = conversation2.questions[i] self.assertEqual(q1.srai, q2.srai) self.assertEqual(q1._current_sentence_no, q2._current_sentence_no) self.assertEqual(q1.properties, q2.properties) self.assertNotEquals(0, len(q1.sentences)) self.assertNotEquals(0, len(q2.sentences)) self.assertEqual(len(q1.sentences), len(q2.sentences)) for j in range(len(q2.sentences)): s1 = q1.sentences[j] s2 = q2.sentences[j] self.assertEqual(s1.words, s2.words) self.assertEqual(s1.response, s2.response) self.assertEqual(s1.positivity, s2.positivity) self.assertEqual(s1.subjectivity, s2.subjectivity) mc1 = s1.matched_context mc2 = s2.matched_context self.assertEquals(mc1.template_node, mc2.template_node) self.assertEquals(mc1.max_search_depth, mc2.max_search_depth) self.assertEquals(mc1.max_search_timeout, mc2.max_search_timeout) time1 = mc1._total_search_start.strftime("%d/%m/%Y, %H:%M:%S") time2 = mc2._total_search_start.strftime("%d/%m/%Y, %H:%M:%S") self.assertEquals(time1, time2) self.assertNotEquals(0, len(mc1.matched_nodes)) self.assertNotEquals(0, len(mc2.matched_nodes)) self.assertEquals(len(mc1.matched_nodes), len(mc2.matched_nodes)) for k in range(len(mc1.matched_nodes)): mn1 = mc1.matched_nodes[k] mn2 = mc2.matched_nodes[k] self.assertEquals(mn1._matched_node_str, mn2._matched_node_str) self.assertEquals(mn1._matched_node_type, mn2._matched_node_type) self.assertEquals(mn1._matched_node_multi_word, mn2._matched_node_multi_word) self.assertEquals(mn1._matched_node_wildcard, mn2._matched_node_wildcard) self.assertEquals(mn1._matched_node_words, mn2._matched_node_words)