def assert_upload_from_file(self, store): store.upload_from_file(os.path.dirname(__file__) + os.sep + "data" + os.sep + "nodes" + os.sep + "pattern_nodes.conf") collection = PatternNodeFactory() store.load(collection) self.assertEqual(12, len(collection.nodes)) self.assertTrue(collection.exists("zeroormore"))
def test_load_variables(self): config = FileStorageConfiguration() config._pattern_nodes_storage = FileStoreConfiguration(file=os.path.dirname(__file__) + os.sep + "data" + os.sep + "nodes" + os.sep + "pattern_nodes.conf", format="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() store = FilePatternNodeStore(engine) collection = PatternNodeFactory() store.load(collection) self.assertEqual(12, len(collection.nodes)) self.assertTrue(collection.exists("zeroormore"))
def __init__(self, aiml_parser, root_node=None): self._aiml_parser = aiml_parser pattern_nodes = aiml_parser.brain.configuration.nodes.pattern_nodes self._pattern_factory = PatternNodeFactory() self._pattern_factory.load_nodes_config_from_file(pattern_nodes) if root_node is None: YLogger.debug(self, "Defaulting root to PatternRootNode") self._root_node = self._pattern_factory.get_root_node() else: if root_node.is_root() is False: raise ParserException( "Root node needs to be of base type PatternRootNode") self._root_node = root_node
def __init__(self, aiml_parser=None, root_node=None): self._aiml_parser = aiml_parser pattern_nodes = None if aiml_parser is not None: if aiml_parser.brain is not None: pattern_nodes = aiml_parser.brain.configuration.nodes.pattern_nodes self._pattern_factory = PatternNodeFactory() self._pattern_factory.load_nodes_config_from_file(pattern_nodes) if root_node is None: if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("Defaulting root to PatternRootNode") self._root_node = self._pattern_factory.get_root_node() else: if root_node.is_root() is False: raise ParserException("Root node needs to be of base type PatternRootNode") self._root_node = root_node
def __init__(self, root_node=None, pattern_factory=None): if root_node is None: logging.debug("Defaulting root to PatternRootNode") self._root_node = PatternRootNode() else: if root_node.is_root() is False: raise ParserException( "Root node needs to be of base type PatternRootNode") self._root_node = root_node if pattern_factory is None: logging.debug("Defaulting node factory to PatternNodeFactory") self._pattern_factory = PatternNodeFactory() self._pattern_factory.load_nodes_config_from_file( "dummy_config.conf") else: if isinstance(pattern_factory, PatternNodeFactory) is False: raise ParserException( "Pattern factory needs to be base class of PatternNodeFactory" ) self._pattern_factory = pattern_factory
class PatternGraph(object): def __init__(self, aiml_parser, root_node=None): self._aiml_parser = aiml_parser self.load_pattern_node_factory() if root_node is None: YLogger.debug(self, "Defaulting root to PatternRootNode") self._root_node = self._pattern_factory.get_root_node() else: if root_node.is_root() is False: raise ParserException( "Root node needs to be of base type PatternRootNode") self._root_node = root_node def load_pattern_node_factory(self): pattern_nodes = self._aiml_parser.brain.configuration.nodes.pattern_nodes self._pattern_factory = PatternNodeFactory() self._pattern_factory.load_nodes_config_from_file(pattern_nodes) @property def root(self): return self._root_node @property def aiml_parser(self): return self._aiml_parser @property def pattern_factory(self): return self._pattern_factory def empty(self): YLogger.debug(self, "Defaulting root to PatternRootNode") self._empty_children(self._root_node) self._root_node = self._pattern_factory.get_root_node() def _empty_children(self, node): for child in node.children: self._empty_children(child) child.children.clear() def node_from_text(self, word, userid="*"): if word.startswith("$"): node_class = self._pattern_factory.new_node_class('priority') return node_class(word[1:], userid) elif PatternZeroOrMoreWildCardNode.is_wild_card(word): node_class = self._pattern_factory.new_node_class('zeroormore') return node_class(word, userid) elif PatternOneOrMoreWildCardNode.is_wild_card(word): node_class = self._pattern_factory.new_node_class('oneormore') return node_class(word, userid) node_class = self._pattern_factory.new_node_class('word') return node_class(word, userid) def node_from_element(self, element, userid="*"): node_name = TextUtils.tag_from_text(element.tag) if self._pattern_factory.exists(node_name) is False: raise ParserException("Unknown node name [%s]" % node_name) text = None if element.text is not None: text = TextUtils.strip_whitespace(element.text) node_class_instance = self._pattern_factory.new_node_class(node_name) node_instance = node_class_instance(element.attrib, text, userid) return node_instance def _parse_text(self, pattern_text, current_node, userid="*"): stripped = pattern_text.strip() words = self._aiml_parser.brain.tokenizer.texts_to_words(stripped) for word in words: if word != '': # Blank nodes add no value, ignore them word = TextUtils.strip_whitespace(word) new_node = self.node_from_text(word, userid=userid) current_node = current_node.add_child(new_node) return current_node def get_text_from_element(self, element): text = element.text if text is not None: text = TextUtils.strip_whitespace(text) if text == "": return None return text return None def get_tail_from_element(self, element): text = element.tail if text is not None: text = TextUtils.strip_whitespace(text) if text == "": return None return text return None def add_pattern_to_node(self, pattern_element, userid="*"): try: head_text = self.get_text_from_element(pattern_element) if head_text is not None: current_node = self._parse_text(head_text, self._root_node, userid=userid) else: current_node = self._root_node for sub_element in pattern_element: new_node = self.node_from_element(sub_element) current_node = current_node.add_child(new_node) tail_text = self.get_tail_from_element(sub_element) if tail_text is not None: current_node = self._parse_text(tail_text, current_node) return current_node except ParserException as parser_excep: parser_excep.xml_element = pattern_element raise parser_excep def add_topic_to_node(self, topic_element, base_node, userid="*"): try: current_node = self._pattern_factory.new_node_class('topic')( userid) current_node = base_node.add_topic(current_node) head_text = self.get_text_from_element(topic_element) if head_text is not None: current_node = self._parse_text(head_text, current_node) added_child = False for sub_element in topic_element: new_node = self.node_from_element(sub_element) current_node = current_node.add_child(new_node) tail_text = self.get_tail_from_element(sub_element) if tail_text is not None: current_node = self._parse_text(tail_text, current_node) added_child = True if head_text is None: if added_child is False: raise ParserException("Topic node text is empty", xml_element=topic_element) return current_node except ParserException as parser_excep: parser_excep.xml_element = topic_element raise parser_excep def add_that_to_node(self, that_element, base_node, userid="*"): try: current_node = self._pattern_factory.new_node_class('that')(userid) current_node = base_node.add_that(current_node) head_text = self.get_text_from_element(that_element) if head_text is not None: current_node = self._parse_text( TextUtils.strip_all_punctuation(head_text), current_node) added_child = False for sub_element in that_element: new_node = self.node_from_element(sub_element) current_node = current_node.add_child(new_node) tail_text = self.get_tail_from_element(sub_element) if tail_text is not None: current_node = self._parse_text(tail_text, current_node) added_child = True if head_text is None: if added_child is False: raise ParserException("That node text is empty", xml_element=that_element) return current_node except ParserException as parser_excep: parser_excep.xml_element = that_element raise parser_excep def add_template_to_node(self, template_graph_root, current_node, userid="*"): template_node = self._pattern_factory.new_node_class('template')( template_graph_root, userid) current_node = current_node.add_child(template_node, replace_existing=True) return current_node def add_pattern_to_graph(self, pattern_element, topic_element, that_element, template_graph_root, learn=False, userid="*"): pattern_node = self.add_pattern_to_node(pattern_element, userid=userid) topic_node = self.add_topic_to_node(topic_element, pattern_node, userid=userid) that_node = self.add_that_to_node(that_element, topic_node, userid=userid) if that_node.has_template() is True: if learn is False: if pattern_element.text is not None: raise DuplicateGrammarException( "Dupicate grammar tree found [%s]" % (pattern_element.text.strip())) else: raise DuplicateGrammarException( "Dupicate grammar tree found for bot/set") else: if pattern_element.text is not None: YLogger.warning( self, "Dupicate grammar tree found [%s] in learn, replacing existing", pattern_element.text.strip()) else: YLogger.warning( self, "Dupicate grammar tree found for bot/set in learn, replacing existing" ) self.add_template_to_node(template_graph_root, that_node) else: self.add_template_to_node(template_graph_root, that_node) return that_node def count_words_in_patterns(self): counter = [0] self._count_words_in_children(self._root_node, counter) return counter[0] def _count_words_in_children(self, node, counter): for child in node.children: counter[0] += 1 self._count_words_in_children(child, counter) def dump(self, output_func=YLogger.debug, eol="", verbose=True): self.root.dump("", output_func, eol, verbose) output_func(self, "") def save_braintree(self, client_context, filename, content): if content == 'txt': with open(filename, "w+", encoding="utf-8") as dump_file: self.dump(output_func=dump_file.write, eol="\n") elif content == 'xml': braintree = '<?xml version="1.0" encoding="UTF-8"?>\n' braintree += '<aiml>\n' braintree += self.root.to_xml(client_context) braintree += '</aiml>\n' with open(filename, "w+", encoding="utf-8") as dump_file: dump_file.write(braintree) else: YLogger.error(client_context, "Unknown braintree content type [%s]", content)
def test_load_nodes_config_from_file_invalid_filename(self): factory = PatternNodeFactory() factory.load_nodes_config_from_file("some_rubbish.txt") self.assert_nodes(factory)
def test_load_nodes_config_from_file(self): factory = PatternNodeFactory() factory.load_nodes_config_from_file(os.path.dirname(__file__) + os.sep + "pattern_nodes.conf") self.assert_nodes(factory)
def test_default_config_file(self): factory = PatternNodeFactory() self.assertTrue(factory.default_config_file().endswith( "/programy/parser/pattern/pattern_nodes.conf"))
class Brain(object): NLU_UTTERANCE = 'NLU_Matching' def __init__(self, bot, configuration: BrainConfiguration): assert (bot is not None) assert (configuration is not None) self._bot = bot self._configuration = configuration self._binaries = BinariesManager(configuration.binaries) self._braintree = BraintreeManager(configuration.braintree) self._tokenizer = Tokenizer.load_tokenizer(configuration) if configuration.debugfiles.save_errors_collection is True: errors_dict = {} else: errors_dict = None self._denormal_collection = DenormalCollection(errors_dict) self._normal_collection = NormalCollection(errors_dict) self._gender_collection = GenderCollection(errors_dict) self._person_collection = PersonCollection(errors_dict) self._person2_collection = Person2Collection(errors_dict) self._rdf_collection = RDFCollection(errors_dict) self._sets_collection = SetCollection(errors_dict) self._maps_collection = MapCollection(errors_dict) self._properties_collection = PropertiesCollection(errors_dict) self._default_variables_collection = DefaultVariablesCollection( errors_dict) self._botnames_collection = BotNamesCollection(errors_dict) self._preprocessors = PreProcessorCollection(errors_dict) self._postprocessors = PostProcessorCollection(errors_dict) self._pattern_factory = None self._template_factory = None self._security = SecurityManager(configuration.security) self._oobhandler = OOBHandler(configuration.oob) self._regex_templates = RegexTemplatesCollection(errors_dict) self._dynamics_collection = DynamicsCollection() self._aiml_parser = self.load_aiml_parser() self._nlu_collection = NluCollection(bot.client, configuration.nlu, errors_dict) self._nlu = NluRequest.load_nlu(configuration.nlu) self._nlu_utterance = None self.load(self.configuration) if configuration.debugfiles.save_errors_collection is True: storage_factory = self.bot.client.storage_factory if storage_factory.entity_storage_engine_available( StorageFactory.ERRORS_COLLECTION) is True: errors_collection_engine = storage_factory.entity_storage_engine( StorageFactory.ERRORS_COLLECTION) errors_collection_store = errors_collection_engine.errors_collection_store( ) errors_collection_store.save_errors_collection(errors_dict) def ylogger_type(self): return "brain" @property def id(self): return self._configuration.section_name @property def bot(self): return self._bot @property def configuration(self): return self._configuration @property def aiml_parser(self): return self._aiml_parser @property def denormals(self): return self._denormal_collection @property def normals(self): return self._normal_collection @property def genders(self): return self._gender_collection @property def persons(self): return self._person_collection @property def person2s(self): return self._person2_collection @property def rdf(self): return self._rdf_collection @property def sets(self): return self._sets_collection @property def maps(self): return self._maps_collection @property def properties(self): return self._properties_collection @property def default_variables(self): return self._default_variables_collection @property def botnames(self): return self._botnames_collection @property def preprocessors(self): return self._preprocessors @property def postprocessors(self): return self._postprocessors @property def pattern_factory(self): return self._pattern_factory @property def template_factory(self): return self._template_factory @property def regex_templates(self): return self._regex_templates @property def dynamics(self): return self._dynamics_collection @property def tokenizer(self): return self._tokenizer @property def nlu(self): return self._nlu @property def security(self): return self._security def load_aiml_parser(self): self._load_pattern_nodes() self._load_template_nodes() return AIMLParser(self) def load_aiml(self): YLogger.debug(self, "Loading aiml source brain") self._aiml_parser.load_aiml() def reload_aimls(self): YLogger.debug(self, "Loading aiml source brain") self._aiml_parser.empty() self._aiml_parser.load_aiml() def load(self, configuration: BrainConfiguration): self._load_properties() if self.properties.has_property("punctuation_chars") is True: self.tokenizer.set_configuration_punctuation_chars( self.properties.property("punctuation_chars")) if self.properties.has_property("before_concatenation_rule") is True: self.tokenizer.set_configuration_before_concatenation_rule( self.properties.property("before_concatenation_rule")) if self.properties.has_property("after_concatenation_rule") is True: self.tokenizer.set_configuration_after_concatenation_rule( self.properties.property("after_concatenation_rule")) YLogger.debug(self, "Loading collections") self.load_collections() YLogger.debug(self, "Loading dynamics sets, maps and vars") self.load_dynamics() YLogger.debug(self, "Loading services") self.load_services(configuration) load_aiml = True if self.configuration.binaries.load_binary is True: load_aiml = self._binaries.load_binary( self.bot.client.storage_factory) if load_aiml is False: self._aiml_parser = self._binaries.get_aiml_parser() if load_aiml is True: self.load_aiml() self._binaries.set_aiml_parser(self._aiml_parser) if configuration.binaries.save_binary is True: self._binaries.save_binary(self.bot.client.storage_factory) YLogger.debug(self, "Loading security services") self.load_security_services() YLogger.debug(self, "Loading oob processors") self._oobhandler.load_oob_processors() def dump_brain_tree(self, client_context): self._braintree.dump_brain_tree(client_context) def _load_denormals(self): self._denormal_collection.empty() self._denormal_collection.load(self.bot.client.storage_factory) def _load_normals(self): self._normal_collection.empty() self._normal_collection.load(self.bot.client.storage_factory) def _load_genders(self): self._gender_collection.empty() self._gender_collection.load(self.bot.client.storage_factory) def _load_persons(self): self._person_collection.empty() self._person_collection.load(self.bot.client.storage_factory) def _load_person2s(self): self._person2_collection.empty() self._person2_collection.load(self.bot.client.storage_factory) def _load_botnames(self): self._botnames_collection.empty() self._botnames_collection.load(self.bot.client.storage_factory) def _load_properties(self): self._properties_collection.empty() self._properties_collection.load(self.bot.client.storage_factory) def _load_default_variables(self): self._default_variables_collection.empty() self._default_variables_collection.load( self.bot.client.storage_factory) self._set_system_defined() def _set_system_defined(self): self.set_sentiment_scores(0.0, 0.5) def set_sentiment_scores(self, positivity, subjectivity): if self._default_variables_collection.has_variable( "positivity") is False: self._default_variables_collection.set_value( "positivity", str(positivity)) if self._default_variables_collection.has_variable( "subjectivity") is False: self._default_variables_collection.set_value( "subjectivity", str(subjectivity)) def _load_maps(self): self._maps_collection.empty() self._maps_collection.load(self.bot.client.storage_factory) def reload_map(self, mapname): if self._maps_collection.contains(mapname): self._maps_collection.reload(self.bot.client.storage_factory, mapname) def _load_sets(self): self._sets_collection.empty() self._sets_collection.load(self.bot.client.storage_factory) def reload_set(self, setname): if self._sets_collection.contains(setname): self._sets_collection.reload(self.bot.client.storage_factory, setname) def _load_rdfs(self): self._rdf_collection.empty() self._rdf_collection.load(self.bot.client.storage_factory) def reload_rdf(self, rdfname): if self._rdf_collection.contains(rdfname): self._rdf_collection.reload(self.bot.client.storage_factory, rdfname) def _load_regex_templates(self): self._regex_templates.load(self.bot.client.storage_factory) def _load_preprocessors(self): self._preprocessors.empty() self._preprocessors.load(self.bot.client.storage_factory) def _load_postprocessors(self): self._postprocessors.empty() self._postprocessors.load(self.bot.client.storage_factory) def _load_pattern_nodes(self): self._pattern_factory = PatternNodeFactory() self._pattern_factory.load(self.bot.client.storage_factory) def _load_template_nodes(self): self._template_factory = TemplateNodeFactory() self._template_factory.load(self.bot.client.storage_factory) def load_collections(self): self._load_denormals() self._load_normals() self._load_genders() self._load_persons() self._load_person2s() self._load_default_variables() self._load_botnames() self._load_rdfs() self._load_sets() self._load_maps() self._load_regex_templates() self._load_preprocessors() self._load_postprocessors() def load_services(self, configuration): ServiceFactory.preload_services(configuration.services) def load_security_services(self): self._security.load_security_services(self.bot.client) def load_dynamics(self): if self.configuration.dynamics is not None: self._dynamics_collection.load_from_configuration( self.configuration.dynamics) else: YLogger.debug(self, "No dynamics configuration defined...") def pre_process_question(self, client_context, question): return self.preprocessors.process(client_context, question) def post_process_response(self, client_context, response: str): return self.postprocessors.process(client_context, response) def failed_authentication(self, client_context): return self._security.failed_authentication(client_context) def authenticate_user(self, client_context): return self._security.authenticate_user(client_context) def resolve_matched_template(self, client_context, match_context): assert (client_context is not None) assert (match_context is not None) template_node = match_context.template_node() YLogger.debug(client_context, "AIML Parser evaluating template [%s]", template_node.to_string()) response = template_node.template.resolve(client_context) if self._oobhandler.oob_in_response(response) is True: if client_context.brain.template_factory.exists('oob') is True: response = self._oobhandler.handle(client_context, response) else: YLogger.debug(client_context, "OOB function is disable [%s]", response) return response def ask_question(self, client_context, sentence, srai=False, default_srai=False): assert (client_context is not None) assert (client_context.bot is not None) assert (self._aiml_parser is not None) client_context.brain = self authenticated = self.authenticate_user(client_context) if authenticated is not None: return authenticated conversation = client_context.bot.get_conversation(client_context) answer = None if conversation is not None: if srai is False and default_srai is False: self._nlu_utterance = None try: client_context.userInfo.userInfoPreProcessor( client_context, srai) except Exception: pass topic_pattern = conversation.get_topic_pattern(client_context) that_pattern = conversation.get_that_pattern(client_context, srai) original_base = None if default_srai is True and conversation.internal_base is not None: original_base = conversation.internal_base base = original_base if 'srai_histories' not in base: base['srai_histories'] = [] default_srai_root = base['srai_histories'] default_srai_root.append({}) conversation.internal_base = default_srai_root[-1] conversation.add_internal_data(conversation.internal_base, 'default_srai', True) elif srai is False: conversation.internal_data.append({}) conversation.internal_base = conversation.internal_data[-1] base = conversation.internal_base texts = self.tokenizer.words_to_texts(sentence.words) conversation.add_internal_data(base, 'question', texts) conversation.add_internal_data(base, 'topic', topic_pattern) conversation.add_internal_data(base, 'that', that_pattern) current_variables = ConversationVariables(conversation) match_context = self._aiml_parser.match_sentence( client_context, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) if match_context is not None: if len(match_context.matched_nodes ) == 3 and match_context.matched_nodes[ 0].matched_node.is_wildcard() is True: if self._aiml_parser.pattern_parser.use_nlu is False: YLogger.debug( client_context, "Matched Catgeory (*): file[%s] line[%s-%s]", match_context._template_node._filename, match_context._template_node._start_line, match_context._template_node._end_line) conversation.add_internal_matched( base, match_context._template_node) answer = self.resolve_matched_template( client_context, match_context) else: YLogger.debug(client_context, "Matched Catgeory : file[%s] line[%s-%s]", match_context._template_node._filename, match_context._template_node._start_line, match_context._template_node._end_line) try: conversation.add_internal_matched( base, match_context._template_node) answer = self.resolve_matched_template( client_context, match_context) except Exception: self.set_match_context_info(conversation, match_context) before, after = current_variables.different_variables( conversation) conversation.add_internal_data(base, 'before_variables', before) conversation.add_internal_data(base, 'after_variables', after) raise if answer is None and self._aiml_parser.pattern_parser.use_nlu is True: assert (self._nlu is not None) try: utterance = client_context.brain.tokenizer.words_to_texts( sentence.words) if default_srai is False or self.bot.configuration.default_response_srai != utterance: nluResult = conversation.current_question().property( "__SYSTEM_NLUDATA__") if nluResult is None or nluResult == "" or self._nlu_utterance != utterance: self._nlu_utterance = utterance match_context = self.multi_nlu_match( client_context, conversation, topic_pattern, that_pattern) else: client_context.match_nlu = True sentence = Sentence(client_context.brain.tokenizer, self.NLU_UTTERANCE) match_context = self._aiml_parser.match_sentence( client_context, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) client_context.match_nlu = False if match_context is not None: YLogger.debug( client_context, "Matched Catgeory (NLU): file[%s] line[%s-%s]", match_context._template_node._filename, match_context._template_node._start_line, match_context._template_node._end_line) conversation.add_internal_matched( base, match_context._template_node) answer = self.resolve_matched_template( client_context, match_context) except NotImplementedError: if match_context is not None: YLogger.debug( client_context, "Matched Catgeory (*): file[%s] line[%s-%s]", match_context._template_node._filename, match_context._template_node._start_line, match_context._template_node._end_line) conversation.add_internal_matched( base, match_context._template_node) answer = self.resolve_matched_template( client_context, match_context) except Exception: self.set_match_context_info(conversation, match_context) before, after = current_variables.different_variables( conversation) conversation.add_internal_data(base, 'before_variables', before) conversation.add_internal_data(base, 'after_variables', after) raise self.set_match_context_info(conversation, match_context) before, after = current_variables.different_variables(conversation) conversation.add_internal_data(base, 'before_variables', before) conversation.add_internal_data(base, 'after_variables', after) conversation.add_internal_data(base, 'processing_result', answer) if original_base is not None: conversation.internal_base = original_base base = conversation.internal_base conversation.add_internal_data(base, 'response', answer) try: client_context.userInfo.userInfoPostProcessor(client_context) except Exception: pass return answer def set_match_context_info(self, conversation, match_context): if match_context is not None: json_matchedNode = { 'file_name': match_context._template_node._filename, 'start_line': match_context._template_node._start_line, 'end_line': match_context._template_node._end_line } question = conversation.current_question() if question is not None: question.sentences[ question. _current_sentence_no].matched_node = json_matchedNode def multi_nlu_match(self, client_context, conversation, topic_pattern, that_pattern): nlu_list = self._nlu_collection.servers match_context = None for server_name in nlu_list: try: server = self._nlu_collection.server_info(server_name) nluResult = self._nlu.nluCall(client_context, server.url, server.apikey, self._nlu_utterance) except NotImplementedError: raise conversation.current_question().set_property( "__SYSTEM_NLUDATA__", nluResult) if nluResult is not None: client_context.match_nlu = True sentence = Sentence(client_context.brain.tokenizer, self.NLU_UTTERANCE) match_context = self._aiml_parser.match_sentence( client_context, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) client_context.match_nlu = False if match_context is not None: if len(match_context.matched_nodes) != 3 or \ match_context.matched_nodes[0].matched_node.is_wildcard() is False: break if match_context is None: conversation.current_question().set_property( "__SYSTEM_NLUDATA__", None) try: client_context.userInfo.userInfoPostProcessor(client_context) except Exception: pass return match_context
def load_pattern_node_factory(self): pattern_nodes = self._aiml_parser.brain.configuration.nodes.pattern_nodes self._pattern_factory = PatternNodeFactory() self._pattern_factory.load_nodes_config_from_file(pattern_nodes)
class PatternGraph(object): def __init__(self, root_node=None, pattern_factory=None): if root_node is None: logging.debug("Defaulting root to PatternRootNode") self._root_node = PatternRootNode() else: if root_node.is_root() is False: raise ParserException( "Root node needs to be of base type PatternRootNode") self._root_node = root_node if pattern_factory is None: logging.debug("Defaulting node factory to PatternNodeFactory") self._pattern_factory = PatternNodeFactory() self._pattern_factory.load_nodes_config_from_file( "dummy_config.conf") else: if isinstance(pattern_factory, PatternNodeFactory) is False: raise ParserException( "Pattern factory needs to be base class of PatternNodeFactory" ) self._pattern_factory = pattern_factory @property def root(self): return self._root_node def node_from_text(self, word): if word.startswith("$"): node_class = self._pattern_factory.new_node_class('priority') return node_class(word[1:]) elif PatternZeroOrMoreWildCardNode.is_wild_card(word): node_class = self._pattern_factory.new_node_class('zeroormore') return node_class(word) elif PatternOneOrMoreWildCardNode.is_wild_card(word): node_class = self._pattern_factory.new_node_class('oneormore') return node_class(word) else: node_class = self._pattern_factory.new_node_class('word') return node_class(word) def node_from_element(self, element): node_name = element.tag if self._pattern_factory.exists(node_name) is False: raise ParserException("Unknown node name [%s]" % node_name) node_instance = self._pattern_factory.new_node_class(node_name) if 'name' in element.attrib: return node_instance(element.attrib['name']) else: return node_instance(TextUtils.strip_whitespace(element.text)) def _parse_text(self, pattern_text, current_node): stripped = pattern_text.strip() words = stripped.split(" ") for word in words: if word != '': # Blank nodes add no value, ignore them word = TextUtils.strip_whitespace(word) new_node = self.node_from_text(word) current_node = current_node.add_child(new_node) return current_node def get_text_from_element(self, element): text = element.text if text is not None: text = TextUtils.strip_whitespace(text) if text == "": return None return text return None def get_tail_from_element(self, element): text = element.tail if text is not None: text = TextUtils.strip_whitespace(text) if text == "": return None return text return None def add_pattern_to_node(self, pattern_element): try: head_text = self.get_text_from_element(pattern_element) if head_text is not None: current_node = self._parse_text(head_text, self._root_node) else: current_node = self._root_node for sub_element in pattern_element: new_node = self.node_from_element(sub_element) current_node = current_node.add_child(new_node) tail_text = self.get_tail_from_element(sub_element) if tail_text is not None: current_node = self._parse_text(tail_text, current_node) return current_node except ParserException as parser_excep: parser_excep._xml_element = pattern_element raise parser_excep def add_topic_to_node(self, topic_element, base_node): try: current_node = self._pattern_factory.new_node_class('topic')() current_node = base_node.add_topic(current_node) head_text = self.get_text_from_element(topic_element) if head_text is not None: current_node = self._parse_text(head_text, current_node) added_child = False for sub_element in topic_element: new_node = self.node_from_element(sub_element) current_node = current_node.add_child(new_node) tail_text = self.get_tail_from_element(sub_element) if tail_text is not None: current_node = self._parse_text(tail_text, current_node) added_child = True if head_text is None: if added_child is False: raise ParserException("Topic node text is empty", xml_element=topic_element) return current_node except ParserException as parser_excep: parser_excep._xml_element = topic_element raise parser_excep def add_that_to_node(self, that_element, base_node): try: current_node = self._pattern_factory.new_node_class('that')() current_node = base_node.add_that(current_node) head_text = self.get_text_from_element(that_element) if head_text is not None: current_node = self._parse_text( TextUtils.strip_whitespace(head_text), current_node) added_child = False for sub_element in that_element: new_node = self.node_from_element(sub_element) current_node = current_node.add_child(new_node) tail_text = self.get_tail_from_element(sub_element) if tail_text is not None: current_node = self._parse_text(tail_text, current_node) added_child = True if head_text is None: if added_child is False: raise ParserException("That node text is empty", xml_element=that_element) return current_node except ParserException as parser_excep: parser_excep._xml_element = that_element raise parser_excep def add_template_to_node(self, template_graph_root, current_node): template_node = self._pattern_factory.new_node_class('template')( template_graph_root) current_node = current_node.add_child(template_node) return current_node def add_pattern_to_graph(self, pattern_element, topic_element, that_element, template_graph_root): current_node = self.add_pattern_to_node(pattern_element) current_node = self.add_topic_to_node(topic_element, current_node) current_node = self.add_that_to_node(that_element, current_node) if current_node.has_template(): raise DuplicateGrammarException( "Duplicate grammar tree found [%s] in [%s] with template [%s]" % (pattern_element.text, tostring(pattern_element), current_node.template.template.to_xml(None, None))) else: self.add_template_to_node(template_graph_root, current_node) def count_words_in_patterns(self): counter = [0] self._count_words_in_children(self._root_node, counter) return counter[0] def _count_words_in_children(self, node, counter): for child in node.children: counter[0] += 1 self._count_words_in_children(child, counter) def dump(self, output_func=logging.debug, eol="", verbose=True): self.root.dump("", output_func, eol, verbose) output_func("") def dump_to_file(self, filename): with open(filename, "w+") as dump_file: self.dump(output_func=dump_file.write, eol="\n")
class Brain(object): def __init__(self, bot, configuration: BrainConfiguration): assert (bot is not None) assert (configuration is not None) self._questions = 0 self._bot = bot self._configuration = configuration self._binaries = BinariesManager(configuration.binaries) self._braintree = BraintreeManager(configuration.braintree) self._tokenizer = Tokenizer.load_tokenizer(configuration) self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = Person2Collection() self._rdf_collection = RDFCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._default_variables_collection = DefaultVariablesCollection() self._preprocessors = PreProcessorCollection() self._postprocessors = PostProcessorCollection() self._postquestionprocessors = PostQuestionProcessorCollection() self._pattern_factory = None self._template_factory = None self._security = SecurityManager(configuration.security) self._oobhandler = OOBHandler(configuration.oob) self._openchatbots = OpenChatBotCollection() self._regex_templates = RegexTemplatesCollection() self._dynamics_collection = DynamicsCollection() self._aiml_parser = self.load_aiml_parser() self.load(self.configuration) def ylogger_type(self): return "brain" @property def num_questions(self): return self._questions @property def id(self): return self._configuration.section_name @property def bot(self): return self._bot @property def configuration(self): return self._configuration @property def aiml_parser(self): return self._aiml_parser @property def denormals(self): return self._denormal_collection @property def normals(self): return self._normal_collection @property def genders(self): return self._gender_collection @property def persons(self): return self._person_collection @property def person2s(self): return self._person2_collection @property def rdf(self): return self._rdf_collection @property def sets(self): return self._sets_collection @property def maps(self): return self._maps_collection @property def properties(self): return self._properties_collection @property def default_variables(self): return self._default_variables_collection @property def preprocessors(self): return self._preprocessors @property def postprocessors(self): return self._postprocessors @property def postquestionprocessors(self): return self._postquestionprocessors @property def pattern_factory(self): return self._pattern_factory @property def template_factory(self): return self._template_factory @property def regex_templates(self): return self._regex_templates @property def dynamics(self): return self._dynamics_collection @property def tokenizer(self): return self._tokenizer @property def openchatbots(self): return self._openchatbots @property def security(self): return self._security def load_aiml_parser(self): self._load_pattern_nodes() self._load_template_nodes() return AIMLParser(self) def load_aiml(self): YLogger.info(self, "Loading aiml source brain") self._aiml_parser.load_aiml() def reload_aimls(self): YLogger.info(self, "Loading aiml source brain") self._aiml_parser.empty() self._aiml_parser.load_aiml() def load(self, configuration: BrainConfiguration): load_aiml = True if self.configuration.binaries.load_binary is True: load_aiml = self._binaries.load_binary( self.bot.client.storage_factory) if load_aiml is True: self.load_aiml() if configuration.binaries.save_binary is True: self._binaries.save_binary(self.bot.client.storage_factory) YLogger.info(self, "Loading collections") self.load_collections() YLogger.info(self, "Loading services") self.load_services(configuration) YLogger.info(self, "Loading openchat bots") self.load_openchatbots(configuration) YLogger.info(self, "Loading security services") self.load_security_services() YLogger.info(self, "Loading oob processors") self._oobhandler.load_oob_processors() YLogger.info(self, "Loading regex templates") self.load_regex_templates() YLogger.info(self, "Loading dynamics sets, maps and vars") self.load_dynamics() def dump_brain_tree(self, client_context): self._braintree.dump_brain_tree(client_context) def _load_denormals(self): self._denormal_collection.empty() self._denormal_collection.load(self.bot.client.storage_factory) def _load_normals(self): self._normal_collection.empty() self._normal_collection.load(self.bot.client.storage_factory) def _load_genders(self): self._gender_collection.empty() self._gender_collection.load(self.bot.client.storage_factory) def _load_persons(self): self._person_collection.empty() self._person_collection.load(self.bot.client.storage_factory) def _load_person2s(self): self._person2_collection.empty() self._person2_collection.load(self.bot.client.storage_factory) def _load_properties(self): self._properties_collection.empty() self._properties_collection.load(self.bot.client.storage_factory) def _load_default_variables(self): self._default_variables_collection.empty() self._default_variables_collection.load( self.bot.client.storage_factory) self._set_system_defined() def _set_system_defined(self): self.set_sentiment_scores(0.0, 0.5) def set_sentiment_scores(self, positivity, subjectivity): if self._default_variables_collection.has_variable( "positivity") is False: self._default_variables_collection.set_value( "positivity", str(positivity)) if self._default_variables_collection.has_variable( "subjectivity") is False: self._default_variables_collection.set_value( "subjectivity", str(subjectivity)) def _load_maps(self): self._maps_collection.empty() self._maps_collection.load(self.bot.client.storage_factory) def reload_map(self, mapname): if self._maps_collection.contains(mapname): self._maps_collection.reload(self.bot.client.storage_factory, mapname) else: YLogger.error(self, "Unknown map name [%s], unable to reload ", mapname) def _load_sets(self): self._sets_collection.empty() self._sets_collection.load(self.bot.client.storage_factory) def reload_set(self, setname): if self._sets_collection.contains(setname): self._sets_collection.reload(self.bot.client.storage_factory, setname) else: YLogger.error(self, "Unknown set name [%s], unable to reload ", setname) def _load_rdfs(self): self._rdf_collection.empty() self._rdf_collection.load(self.bot.client.storage_factory) def reload_rdf(self, rdfname): if self._rdf_collection.contains(rdfname): self._rdf_collection.reload(self.bot.client.storage_factory, rdfname) else: YLogger.error(self, "Unknown rdf name [%s], unable to reload ", rdfname) def _load_preprocessors(self): self._preprocessors.empty() self._preprocessors.load(self.bot.client.storage_factory) def _load_postprocessors(self): self._postprocessors.empty() self._postprocessors.load(self.bot.client.storage_factory) def _load_postquestionprocessors(self): self._postquestionprocessors.empty() self._postquestionprocessors.load(self.bot.client.storage_factory) def _load_pattern_nodes(self): self._pattern_factory = PatternNodeFactory() self._pattern_factory.load(self.bot.client.storage_factory) def _load_template_nodes(self): self._template_factory = TemplateNodeFactory() self._template_factory.load(self.bot.client.storage_factory) def load_collections(self): self._load_denormals() self._load_normals() self._load_genders() self._load_persons() self._load_person2s() self._load_properties() self._load_default_variables() self._load_rdfs() self._load_sets() self._load_maps() self._load_preprocessors() self._load_postprocessors() self._load_postquestionprocessors() def load_services(self, configuration): ServiceFactory.preload_services(configuration.services) def load_openchatbots(self, configuration): self._openchatbots.load_from_configuration(configuration.openchatbots) def load_security_services(self): self._security.load_security_services(self.bot.client) def load_dynamics(self): if self.configuration.dynamics is not None: self._dynamics_collection.load_from_configuration( self.configuration.dynamics) else: YLogger.debug(self, "No dynamics configuration defined...") def load_regex_templates(self): self._regex_templates.load(self.bot.client.storage_factory) def pre_process_question(self, client_context, question): return self.preprocessors.process(client_context, question) def post_process_response(self, client_context, response: str): return self.postprocessors.process(client_context, response) def post_process_question(self, client_context, question: str): return self.postquestionprocessors.process(client_context, question) def failed_authentication(self, client_context): return self._security.failed_authentication(client_context) def authenticate_user(self, client_context): return self._security.authenticate_user(client_context) def resolve_matched_template(self, client_context, match_context): assert (client_context is not None) assert (match_context is not None) template_node = match_context.template_node() YLogger.debug(client_context, "AIML Parser evaluating template [%s]", template_node.to_string()) response = template_node.template.resolve(client_context) if self._oobhandler.oob_in_response(response) is True: response = self._oobhandler.handle(client_context, response) return response def ask_question(self, client_context, sentence, srai=False): assert (client_context is not None) assert (client_context.bot is not None) assert (self._aiml_parser is not None) client_context.brain = self authenticated = self.authenticate_user(client_context) if authenticated is not None: return authenticated conversation = client_context.bot.get_conversation(client_context) if conversation is not None: self._questions += 1 topic_pattern = conversation.get_topic_pattern(client_context) that_pattern = conversation.get_that_pattern(client_context, srai) match_context = self._aiml_parser.match_sentence( client_context, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) if match_context is not None: return self.resolve_matched_template(client_context, match_context) return None
def test_load_nodes_config_from_file(self): factory = PatternNodeFactory() factory.load_nodes_config_from_file( os.path.dirname(__file__) + os.sep + "pattern_nodes.conf") self.assert_nodes(factory)
def _load_pattern_nodes(self): self._pattern_factory = PatternNodeFactory() self._pattern_factory.load(self.bot.client.storage_factory)
def test_init(self): factory = PatternNodeFactory() self.assertIsNotNone(factory) self.assertEquals({}, factory._nodes_config) self.assertEqual("Pattern", factory._type)
class PatternGraph(object): def __init__(self, aiml_parser, root_node=None): self._aiml_parser = aiml_parser self.load_pattern_node_factory() if root_node is None: YLogger.debug(self, "Defaulting root to PatternRootNode") self._root_node = self._pattern_factory.get_root_node() else: if root_node.is_root() is False: raise ParserException("Root node needs to be of base type PatternRootNode") self._root_node = root_node def load_pattern_node_factory(self): pattern_nodes = self._aiml_parser.brain.configuration.nodes.pattern_nodes self._pattern_factory = PatternNodeFactory() self._pattern_factory.load_nodes_config_from_file(pattern_nodes) @property def root(self): return self._root_node @property def aiml_parser(self): return self._aiml_parser @property def pattern_factory(self): return self._pattern_factory def empty(self): YLogger.debug(self, "Defaulting root to PatternRootNode") self._empty_children(self._root_node) self._root_node = self._pattern_factory.get_root_node() def _empty_children(self, node): for child in node.children: self._empty_children(child) child.children.clear() def node_from_text(self, word, userid="*"): if word.startswith("$"): node_class = self._pattern_factory.new_node_class('priority') return node_class(word[1:], userid) elif PatternZeroOrMoreWildCardNode.is_wild_card(word): node_class = self._pattern_factory.new_node_class('zeroormore') return node_class(word, userid) elif PatternOneOrMoreWildCardNode.is_wild_card(word): node_class = self._pattern_factory.new_node_class('oneormore') return node_class(word, userid) node_class = self._pattern_factory.new_node_class('word') return node_class(word, userid) def node_from_element(self, element, userid="*"): node_name = TextUtils.tag_from_text(element.tag) if self._pattern_factory.exists(node_name) is False: raise ParserException("Unknown node name [%s]"%node_name) text = None if element.text is not None: text = TextUtils.strip_whitespace(element.text) node_class_instance = self._pattern_factory.new_node_class(node_name) node_instance = node_class_instance(element.attrib, text, userid) return node_instance def _parse_text(self, pattern_text, current_node, userid="*"): stripped = pattern_text.strip() words = self._aiml_parser.brain.tokenizer.texts_to_words(stripped) for word in words: if word != '': # Blank nodes add no value, ignore them word = TextUtils.strip_whitespace(word) new_node = self.node_from_text(word, userid=userid) current_node = current_node.add_child(new_node) return current_node def get_text_from_element(self, element): text = element.text if text is not None: text = TextUtils.strip_whitespace(text) if text == "": return None return text return None def get_tail_from_element(self, element): text = element.tail if text is not None: text = TextUtils.strip_whitespace(text) if text == "": return None return text return None def add_pattern_to_node(self, pattern_element, userid="*"): try: head_text = self.get_text_from_element(pattern_element) if head_text is not None: current_node = self._parse_text(head_text, self._root_node, userid=userid) else: current_node = self._root_node for sub_element in pattern_element: new_node = self.node_from_element(sub_element) current_node = current_node.add_child(new_node) tail_text = self.get_tail_from_element(sub_element) if tail_text is not None: current_node = self._parse_text(tail_text, current_node) return current_node except ParserException as parser_excep: parser_excep.xml_element = pattern_element raise parser_excep def add_topic_to_node(self, topic_element, base_node, userid="*"): try: current_node = self._pattern_factory.new_node_class('topic')(userid) current_node = base_node.add_topic(current_node) head_text = self.get_text_from_element(topic_element) if head_text is not None: current_node = self._parse_text(head_text, current_node) added_child = False for sub_element in topic_element: new_node = self.node_from_element(sub_element) current_node = current_node.add_child(new_node) tail_text = self.get_tail_from_element(sub_element) if tail_text is not None: current_node = self._parse_text(tail_text, current_node) added_child = True if head_text is None: if added_child is False: raise ParserException("Topic node text is empty", xml_element=topic_element) return current_node except ParserException as parser_excep: parser_excep.xml_element = topic_element raise parser_excep def add_that_to_node(self, that_element, base_node, userid="*"): try: current_node = self._pattern_factory.new_node_class('that')(userid) current_node = base_node.add_that(current_node) head_text = self.get_text_from_element(that_element) if head_text is not None: current_node = self._parse_text(TextUtils.strip_whitespace(head_text), current_node) added_child = False for sub_element in that_element: new_node = self.node_from_element(sub_element) current_node = current_node.add_child(new_node) tail_text = self.get_tail_from_element(sub_element) if tail_text is not None: current_node = self._parse_text(tail_text, current_node) added_child = True if head_text is None: if added_child is False: raise ParserException("That node text is empty", xml_element=that_element) return current_node except ParserException as parser_excep: parser_excep.xml_element = that_element raise parser_excep def add_template_to_node(self, template_graph_root, current_node, userid="*"): template_node = self._pattern_factory.new_node_class('template')(template_graph_root, userid) current_node = current_node.add_child(template_node, replace_existing=True) return current_node def add_pattern_to_graph(self, pattern_element, topic_element, that_element, template_graph_root, learn=False, userid="*"): pattern_node = self.add_pattern_to_node(pattern_element, userid=userid) topic_node = self.add_topic_to_node(topic_element, pattern_node, userid=userid) that_node = self.add_that_to_node(that_element, topic_node, userid=userid) if that_node.has_template() is True: if learn is False: if pattern_element.text is not None: raise DuplicateGrammarException("Dupicate grammar tree found [%s]"%(pattern_element.text.strip())) else: raise DuplicateGrammarException("Dupicate grammar tree found for bot/set") else: if pattern_element.text is not None: YLogger.warning(self, "Dupicate grammar tree found [%s] in learn, replacing existing", pattern_element.text.strip()) else: YLogger.warning(self, "Dupicate grammar tree found for bot/set in learn, replacing existing") self.add_template_to_node(template_graph_root, that_node) else: self.add_template_to_node(template_graph_root, that_node) return that_node def count_words_in_patterns(self): counter = [0] self._count_words_in_children(self._root_node, counter) return counter[0] def _count_words_in_children(self, node, counter): for child in node.children: counter[0] += 1 self._count_words_in_children(child, counter) def dump(self, output_func=YLogger.debug, eol="", verbose=True): self.root.dump("", output_func, eol, verbose) output_func(self, "") def save_braintree(self, client_context, filename, content): if content == 'txt': with open(filename, "w+", encoding="utf-8") as dump_file: self.dump(output_func=dump_file.write, eol="\n") elif content == 'xml': braintree = '<?xml version="1.0" encoding="UTF-8"?>\n' braintree += '<aiml>\n' braintree += self.root.to_xml(client_context) braintree += '</aiml>\n' with open(filename, "w+", encoding="utf-8") as dump_file: dump_file.write(braintree) else: YLogger.error(client_context, "Unknown braintree content type [%s]", content)