def test_load_from_file(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._sets_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "sets" ]) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[StorageFactory.SETS] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.SETS] = storage_engine collection = SetCollection() self.assertIsNotNone(collection) self.assertTrue(collection.load(storage_factory) > 0) self.assertIsNotNone(collection._sets) self.assertEqual(len(collection._sets), 1) self.assertIsNotNone(collection._stores) self.assertEqual(len(collection._stores), 1) self.assertTrue("TEST_SET" in collection._sets) self.assertTrue("TEST_SET" in collection._stores) self.assertTrue(collection.contains('TEST_SET')) aset = collection.set('TEST_SET') self.assertIsNotNone(aset) values = aset['AIR'] self.assertIsNotNone(values) self.assertTrue(['Air', 'Force', 'blue'] in values)
def __init__(self, bot, configuration: BrainConfiguration): self._bot = bot self._configuration = configuration self._tokenizer = self.load_tokenizer() self._aiml_parser = self.load_aiml_parser() self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._rdf_collection = RDFCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._variables_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self._authentication = None self._authorisation = None self._default_oob = None self._oob = {} self._regex_templates = {} self._dynamics_collection = DynamicsCollection() self.load(self.configuration) self.dump_brain_tree()
def __init__(self, configuration: BrainConfiguration): self._configuration = configuration self._aiml_parser = AIMLParser(self) self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._rdf_collection = RDFCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self._authentication = None self._authorisation = None self._default_oob = None self._oob = {} self._regex_templates = {} self._dynamics_collection = DynamicsCollection() self.load(self._configuration)
def test_reload_from_file_jp(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._sets_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "sets_jp" ]) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[StorageFactory.SETS] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.SETS] = storage_engine collection = SetCollection() self.assertIsNotNone(collection) collection.load(storage_factory) self.assertIsNotNone(collection._sets) self.assertEqual(len(collection._sets), 1) self.assertIsNotNone(collection._stores) self.assertEqual(len(collection._stores), 1) self.assertTrue("TEST_SET" in collection._sets) self.assertTrue("TEST_SET" in collection._stores) self.assertTrue("TEST_SET" in collection._is_cjk) self.assertTrue(collection.contains('TEST_SET')) self.assertTrue(collection.is_cjk('TEST_SET')) aset = collection.set_list('TEST_SET') self.assertIsNotNone(aset) values = aset['千'] self.assertIsNotNone(values) self.assertEqual(['千葉', '千葉県', '千葉県下'], values) collection.reload(storage_factory, "TEST_SET") self.assertIsNotNone(collection._sets) self.assertEqual(len(collection._sets), 1) self.assertIsNotNone(collection._stores) self.assertEqual(len(collection._stores), 1) self.assertTrue("TEST_SET" in collection._sets) self.assertTrue("TEST_SET" in collection._stores) self.assertTrue(collection.contains('TEST_SET')) values = aset['千'] self.assertIsNotNone(values) self.assertEqual(['千葉', '千葉県', '千葉県下'], values)
def test_reload_no_storage(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._sets_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "sets" ]) collection = SetCollection() self.assertIsNotNone(collection) self.assertTrue(collection.reload(storage_factory, "TEST_SET") == 0)
def assert_upload_from_csv_file(self, store): store.empty() store.upload_from_file(os.path.dirname(__file__) + os.sep + "data" + os.sep + "sets" + os.sep + "csv" + os.sep + "testset.csv", fileformat=Store.CSV_FORMAT) set_collection = SetCollection() store.load(set_collection, 'TESTSET') self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values)
def assert_upload_text_files_from_directory_no_subdir(self, store): store.empty () store.upload_from_directory(os.path.dirname(__file__)+os.sep+"data"+os.sep+"sets"+os.sep+"text", subdir=False) set_collection = SetCollection() store.load(set_collection, 'TESTSET') self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values)
def assert_upload_from_text_file(self, store): store.empty() store.upload_from_file(os.path.dirname(__file__) + os.sep + "data" + os.sep + "sets" + os.sep + "text" + os.sep + "testset.txt") set_collection = SetCollection() store.load(set_collection, 'TESTSET') self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values)
def assert_upload_csv_files_from_directory_with_subdir(self, store): store.empty() store.upload_from_directory(os.path.dirname(__file__) + os.sep + "data" + os.sep + "sets" + os.sep + "csv", format=Store.CSV_FORMAT) set_collection = SetCollection() store.load(set_collection, 'TESTSET') self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values) set_collection = SetCollection() store.load(set_collection, 'TESTSET2') self.assertTrue(set_collection.contains('TESTSET2')) values = set_collection.set('TESTSET2') self.assertEqual(4, len(values)) self.assertTrue('VAL5' in values) self.assertTrue('VAL6' in values) self.assertTrue('VAL7' in values) self.assertTrue('VAL8' in values)
def __init__(self, bot, configuration: BrainConfiguration): self._bot = bot self._configuration = configuration self._tokenizer = self.load_tokenizer() self._aiml_parser = self.load_aiml_parser() self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._rdf_collection = RDFCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._variables_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self._authentication = None self._authorisation = None self._default_oob = None self._oob = {} self._regex_templates = {} self._dynamics_collection = DynamicsCollection() self.load(self.configuration)
def test_load_with_exception(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._sets_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "sets" ]) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[StorageFactory.SETS] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.SETS] = storage_engine collection = SetCollection() self.assertIsNotNone(collection) self.assertTrue(collection.load(storage_factory) == 0)
def __init__(self, bot, configuration: BrainConfiguration): assert bot is not None assert configuration is not None self._questions = 0 self._bot = bot self._configuration = configuration self._pattern_factory = None self._template_factory = None self._binaries = BinariesManager(configuration.binaries) self._braintree = BraintreeManager(configuration.braintree) self._tokenizer = Tokenizer.load_tokenizer(configuration.tokenizer) self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = Person2Collection() self._rdf_collection = RDFCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._default_variables_collection = DefaultVariablesCollection() self._regex_templates = RegexTemplatesCollection() self._dynamics_collection = DynamicsCollection() self._preprocessors = PreProcessorCollection() self._postprocessors = PostProcessorCollection() self._postquestionprocessors = PostQuestionProcessorCollection() self._services = ServiceHandler() self._oobhandler = OOBHandler() self._security = SecurityManager(configuration.security) self._aiml_parser = self.load_aiml_parser() self.load(self.configuration)
def assert_upload_from_text(self, store): store.empty () store.upload_from_text('TESTSET', """ VAL1 VAL2 VAL3 VAL4 """) set_collection = SetCollection() store.load(set_collection, 'TESTSET') self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values)
def test_load(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileSetsStore(engine) set_collection = SetCollection() self.assertTrue( store.load( set_collection, os.path.dirname(__file__) + os.sep + "data" + os.sep + "sets" + os.sep + "text" + os.sep + "testset.txt")) self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values)
def __init__(self, configuration: BrainConfiguration): self._configuration = configuration self._aiml_parser = AIMLParser() self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._predicates_collection = PredicatesCollection() self._pronouns_collection = PronounsCollection() self._triples_collection = TriplesCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self.load(self._configuration)
def test_load_with_exception(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileSetsStore(engine) set_collection = SetCollection() self.assertFalse( store.load( set_collection, os.path.dirname(__file__) + os.sep + "data" + os.sep + "sets" + os.sep + "text" + os.sep + "testset.txt"))
def test_collection_duplicate(self): collection = SetCollection() set_dict = { "A": [["A", "A B", "A C"]], "D": [["D"]], "E": [["E", "E F"]] } values = { "A": "A", "A B": "A B", "A C": "A C", "D": "D", "E": "E", "E F": "E F" } collection.add_set("TESTSET", set_dict, "teststore", False, values) with self.assertRaises(Exception): set_dict = { "1": [["1", "1 2", "1 3"]], "4": [["4"]], "5": [["5", "5 6"]] } values = { "1": "1", "1 2": "1 2", "1 3": "1 3", "4": "4", "5": "5", "5 6": "5 6" } collection.add_set("TESTSET", set_dict, "teststore", False, values)
def test_load_from_test_dir_with_subdir(self): config = FileStorageConfiguration() config._sets_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "sets" + os.sep + "text" ], extension="txt", subdirs=True, format="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() store = FileSetsStore(engine) set_collection = SetCollection() store.load_all(set_collection) self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set_list('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values) self.assertTrue(set_collection.contains('TESTSET2')) values = set_collection.set_list('TESTSET2') self.assertEqual(4, len(values)) self.assertTrue('VAL5' in values) self.assertTrue('VAL6' in values) self.assertTrue('VAL7' in values) self.assertTrue('VAL8' in values)
def test_collection(self): loader = SetLoader() self.assertIsNotNone(loader) collection = SetCollection() self.assertIsNotNone(collection) collection._sets = loader.load_from_text(""" VAL1 VAL2 VAL3 VAL4 """) self.assertIsNotNone(collection._sets) self.assertEqual(len(collection._sets), 4) self.assertTrue(collection.contains('VAL1')) self.assertTrue(collection.contains('VAL2')) self.assertTrue(collection.contains('VAL3')) self.assertTrue(collection.contains('VAL4')) self.assertFalse(collection.contains('VAL5'))
def test_load_single_file(self): config = FileStorageConfiguration() config._sets_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "sets" + os.sep + "text" + os.sep + "testset.txt" ], extension="txt", format="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() store = FileSetsStore(engine) set_collection = SetCollection() store.load(set_collection) self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set_list('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values)
def test_add_set_exists(self): collection = SetCollection() collection.add_set("TESTSET", { "A": [["A", "B", "C"]], "D": [["D"]], "E": [["E", "F"]] }, "teststore") with self.assertRaises(Exception): collection.add_set("TESTSET", { "1": [["1", "2", "3"]], "4": [["4"]], "5": [["5", "6"]] }, "teststore")
def test_collection_duplicate(self): collection = SetCollection() set_dict = { "A": [["A", "A B", "A C"]], "D": [["D"]], "E": [["E", "E F"]] } values = { "A": "A", "A B": "A B", "A C": "A C", "D": "D", "E": "E", "E F": "E F" } collection.add_set("TESTSET", set_dict, "teststore", False, values) set_dict = { "1": [["1", "1 2", "1 3"]], "4": [["4"]], "5": [["5", "5 6"]] } values = { "1": "1", "1 2": "1 2", "1 3": "1 3", "4": "4", "5": "5", "5 6": "5 6" } collection.add_set("TESTSET", set_dict, "teststore", False, values) aset = collection.set_list('TESTSET') self.assertIsNotNone(aset) values = aset['A'] self.assertIsNotNone(values) self.assertTrue(["A", "A B", "A C"] in values) self.assertTrue("D" in aset) self.assertTrue("E" in aset) self.assertFalse("1" in aset) self.assertFalse("4" in aset) self.assertFalse("5" in aset)
def test_collection(self): loader = SetLoader() self.assertIsNotNone(loader) collection = SetCollection() self.assertIsNotNone(collection) collection._sets = loader.load_from_text(""" Val1 Val2 Val3 Val4 """) self.assertIsNotNone(collection._sets) self.assertEqual(len(collection._sets), 4) self.assertTrue(collection.contains('VAL1')) self.assertTrue(collection.contains('VAL2')) self.assertTrue(collection.contains('VAL3')) self.assertTrue(collection.contains('VAL4')) self.assertFalse(collection.contains('VAL5'))
class Brain(object): def __init__(self, configuration: BrainConfiguration): self._configuration = configuration self._aiml_parser = AIMLParser(self) self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._predicates_collection = PredicatesCollection() self._pronouns_collection = PronounsCollection() self._triples_collection = TriplesCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self._authentication = None self._authorisation = None self._default_oob = None self._oob = {} self.load(self._configuration) @property def configuration(self): return self._configuration @property def aiml_parser(self): return self._aiml_parser @property def denormals(self): return self._denormal_collection @property def normals(self): return self._normal_collection @property def genders(self): return self._gender_collection @property def persons(self): return self._person_collection @property def person2s(self): return self._person2_collection @property def predicates(self): return self._predicates_collection @property def pronouns(self): return self._pronouns_collection @property def triples(self): return self._triples_collection @property def sets(self): return self._sets_collection @property def maps(self): return self._maps_collection @property def properties(self): return self._properties_collection @property def preprocessors(self): return self._preprocessors @property def postprocessors(self): return self._postprocessors @property def authentication(self): return self._authentication @property def authorisation(self): return self._authorisation @property def default_oob(self): return self._default_oob @property def oobs(self): return self._oob def load_binary(self, brain_configuration): logging.info("Loading binary brain from [%s]" % brain_configuration.binaries.binary_filename) try: start = datetime.datetime.now() gc.disable() f = open(brain_configuration.binaries.binary_filename, "rb") self._aiml_parser = pickle.load(f) gc.enable() f.close() stop = datetime.datetime.now() diff = stop - start logging.info("Brain load took a total of %.2f sec" % diff.total_seconds()) load_aiml = False except Exception as e: logging.exception(e) if brain_configuration.binaries.load_aiml_on_binary_fail is True: load_aiml = True else: raise e def load_aiml(self, brain_configuration): logging.info("Loading aiml source brain") self._aiml_parser.load_aiml(brain_configuration) def save_binary(self, brain_configuration): logging.info("Saving binary brain to [%s]" % brain_configuration.binaries.binary_filename) start = datetime.datetime.now() f = open(brain_configuration.binaries.binary_filename, "wb") pickle.dump(self._aiml_parser, f) f.close() stop = datetime.datetime.now() diff = stop - start logging.info("Brain save took a total of %.2f sec" % diff.total_seconds()) def load(self, brain_configuration: BrainConfiguration): if brain_configuration.binaries.load_binary is True: self.load_binary(brain_configuration) self.load_aiml(brain_configuration) if brain_configuration.binaries.save_binary is True: self.save_binary(brain_configuration) logging.info("Loading collections") self.load_collections(brain_configuration) logging.info("Loading services") self.load_services(brain_configuration) logging.info("Loading security services") self.load_security_services(brain_configuration) logging.info("Loading oob processors") self.load_oob_processors(brain_configuration) def _load_denormals(self, brain_configuration): if brain_configuration.files.denormal is not None: total = self._denormal_collection.load_from_filename( brain_configuration.files.denormal) logging.info("Loaded a total of %d denormalisations", total) else: logging.warning("No configuration setting for denormal") def _load_normals(self, brain_configuration): if brain_configuration.files.normal is not None: total = self._normal_collection.load_from_filename( brain_configuration.files.normal) logging.info("Loaded a total of %d normalisations", total) else: logging.warning("No configuration setting for normal") def _load_genders(self, brain_configuration): if brain_configuration.files.gender is not None: total = self._gender_collection.load_from_filename( brain_configuration.files.gender) logging.info("Loaded a total of %d genderisations", total) else: logging.warning("No configuration setting for gender") def _load_persons(self, brain_configuration): if brain_configuration.files.person is not None: total = self._person_collection.load_from_filename( brain_configuration.files.person) logging.info("Loaded a total of %d persons", total) else: logging.warning("No configuration setting for person") def _load_person2s(self, brain_configuration): if brain_configuration.files.person2 is not None: total = self._person2_collection.load_from_filename( brain_configuration.files.person2) logging.info("Loaded a total of %d person2s", total) else: logging.warning("No configuration setting for person2") def _load_predicates(self, brain_configuration): if brain_configuration.files.predicates is not None: total = self._predicates_collection.load_from_filename( brain_configuration.files.predicates) logging.info("Loaded a total of %d predicates", total) else: logging.warning("No configuration setting for predicates") def _load_pronouns(self, brain_configuration): if brain_configuration.files.pronouns is not None: total = self._pronouns_collection.load_from_filename( brain_configuration.files.pronouns) logging.info("Loaded a total of %d pronouns", total) else: logging.warning("No configuration setting for pronouns") def _load_properties(self, brain_configuration): if brain_configuration.files.properties is not None: total = self._properties_collection.load_from_filename( brain_configuration.files.properties) logging.info("Loaded a total of %d properties", total) else: logging.warning("No configuration setting for properties") def _load_triples(self, brain_configuration): if brain_configuration.files.triples is not None: total = self._properties_collection.load_from_filename( brain_configuration.files.triples) logging.info("Loaded a total of %d triples", total) else: logging.warning("No configuration setting for triples") def _load_sets(self, brain_configuration): total = self._sets_collection.load(brain_configuration.files.set_files) logging.info("Loaded a total of %d sets files", total) def _load_maps(self, brain_configuration): total = self._maps_collection.load(brain_configuration.files.map_files) logging.info("Loaded a total of %d maps files", total) def _load_preprocessors(self, brain_configuration): if brain_configuration.files.preprocessors is not None: total = self._preprocessors.load( brain_configuration.files.preprocessors) logging.info("Loaded a total of %d pre processors", total) else: logging.warning("No configuration setting for pre processors") def _load_postprocessors(self, brain_configuration): if brain_configuration.files.postprocessors is not None: total = self._postprocessors.load( brain_configuration.files.postprocessors) logging.info("Loaded a total of %d post processors", total) else: logging.warning("No configuration setting for post processors") def load_collections(self, brain_configuration): self._load_denormals(brain_configuration) self._load_normals(brain_configuration) self._load_genders(brain_configuration) self._load_persons(brain_configuration) self._load_person2s(brain_configuration) self._load_predicates(brain_configuration) self._load_pronouns(brain_configuration) self._load_properties(brain_configuration) self._load_triples(brain_configuration) self._load_sets(brain_configuration) self._load_maps(brain_configuration) self._load_preprocessors(brain_configuration) self._load_postprocessors(brain_configuration) def load_services(self, brain_configuration): ServiceFactory.preload_services(brain_configuration.services) def load_security_services(self, brain_configuration): if brain_configuration.security is not None: if brain_configuration.security.authentication is not None: if brain_configuration.security.authentication.classname is not None: try: classobject = ClassLoader.instantiate_class( brain_configuration.security.authentication. classname) self._authentication = classobject( brain_configuration.security.authentication) except Exception as excep: logging.exception(excep) else: logging.debug("No authentication configuration defined") if brain_configuration.security.authorisation is not None: if brain_configuration.security.authorisation.classname is not None: try: classobject = ClassLoader.instantiate_class( brain_configuration.security.authorisation. classname) self._authorisation = classobject( brain_configuration.security.authorisation) except Exception as excep: logging.exception(excep) else: logging.debug("No authorisation configuration defined") else: logging.debug("No security configuration defined, running open...") def pre_process_question(self, bot, clientid, question): return self.preprocessors.process(bot, clientid, question) def ask_question(self, bot, clientid, sentence) -> str: if self.authentication is not None: if self.authentication.authenticate(clientid) is False: logging.error("[%s] failed authentication!") return self.authentication.configuration.denied_srai conversation = bot.get_conversation(clientid) topic_pattern = conversation.predicate("topic") if topic_pattern is None: logging.info("No Topic pattern default to [*]") topic_pattern = "*" else: logging.info("Topic pattern = [%s]", topic_pattern) try: that_question = conversation.nth_question(2) that_sentence = that_question.current_sentence() # If the last response was valid, i.e not none and not empty string, then use # that as the that_pattern, otherwise we default to '*' as pattern if that_sentence.response is not None and that_sentence.response != '': that_pattern = TextUtils.strip_all_punctuation( that_sentence.response) logging.info("That pattern = [%s]", that_pattern) else: logging.info("That pattern, no response, default to [*]") that_pattern = "*" except Exception: logging.info("No That pattern default to [*]") that_pattern = "*" match_context = self._aiml_parser.match_sentence( bot, clientid, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) if match_context is not None: template_node = match_context.template_node() logging.debug("AIML Parser evaluating template [%s]", template_node.to_string()) response = template_node.template.resolve(bot, clientid) if "<oob>" in response: response, oob = self.strip_oob(response) if oob is not None: oob_response = self.process_oob(bot, clientid, oob) response = response + " " + oob_response return response return None def load_oob_processors(self, brain_configuration): if brain_configuration.oob is not None: if brain_configuration.oob.default() is not None: try: logging.info("Loading default oob") classobject = ClassLoader.instantiate_class( brain_configuration.oob.default().classname) self._default_oob = classobject() except Exception as excep: logging.exception(excep) for oob_name in brain_configuration.oob.oobs(): try: logging.info("Loading oob: %s" % oob_name) classobject = ClassLoader.instantiate_class( brain_configuration.oob.oob(oob_name).classname) self._oob[oob_name] = classobject() except Exception as excep: logging.exception(excep) def strip_oob(self, response): m = re.compile("(.*)(<\s*oob\s*>.*<\/\s*oob\s*>)(.*)") g = m.match(response) if g is not None: front = g.group(1).strip() back = g.group(3).strip() response = "" if front != "": response = front + " " response += back oob = g.group(2) return response, oob return response, None def process_oob(self, bot, clientid, oob_command): oob_content = ET.fromstring(oob_command) if oob_content.tag == 'oob': for tag in oob_content: if tag in self._oob: oob_class = self._oob[tag] return oob_class.process_out_of_bounds(bot, clientid, tag) else: return self._default_oob.process_out_of_bounds( bot, clientid, tag) return "" def post_process_response(self, bot, clientid, response: str): return self.postprocessors.process(bot, clientid, response) def dump_tree(self): self._aiml_parser.pattern_parser.root.dump(tabs="")
class Brain(object): def __init__(self, bot, configuration: BrainConfiguration): self._bot = bot self._configuration = configuration self._tokenizer = self.load_tokenizer() self._aiml_parser = self.load_aiml_parser() self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._rdf_collection = RDFCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._variables_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self._authentication = None self._authorisation = None self._default_oob = None self._oob = {} self._regex_templates = {} self._dynamics_collection = DynamicsCollection() self.load(self.configuration) self.dump_brain_tree() def ylogger_type(self): return "brain" @property def id(self): return self._configuration.section_name @property def bot(self): return self._bot @property def configuration(self): return self._configuration @property def aiml_parser(self): return self._aiml_parser @property def denormals(self): return self._denormal_collection @property def normals(self): return self._normal_collection @property def genders(self): return self._gender_collection @property def persons(self): return self._person_collection @property def person2s(self): return self._person2_collection @property def rdf(self): return self._rdf_collection @property def sets(self): return self._sets_collection @property def maps(self): return self._maps_collection @property def properties(self): return self._properties_collection @property def variables(self): return self._variables_collection @property def preprocessors(self): return self._preprocessors @property def postprocessors(self): return self._postprocessors @property def authentication(self): return self._authentication @property def authorisation(self): return self._authorisation @property def default_oob(self): return self._default_oob @property def oobs(self): return self._oob @property def regex_templates(self): return self._regex_templates @property def dynamics(self): return self._dynamics_collection @property def tokenizer(self): return self._tokenizer def load_tokenizer(self): if self.configuration is not None and self.configuration.tokenizer.classname is not None: YLogger.info(self, "Loading tokenizer from class [%s]", self.configuration.tokenizer.classname) tokenizer_class = ClassLoader.instantiate_class( self.configuration.tokenizer.classname) return tokenizer_class(self.configuration.tokenizer.split_chars) else: return Tokenizer(self.configuration.tokenizer.split_chars) def load_aiml_parser(self): return AIMLParser(self) def load_binary(self, configuration): YLogger.info(self, "Loading binary brain from [%s]", configuration.binaries.binary_filename) try: start = datetime.datetime.now() gc.disable() bin_file = open(configuration.binaries.binary_filename, "rb") self._aiml_parser = pickle.load(bin_file) gc.enable() bin_file.close() stop = datetime.datetime.now() diff = stop - start YLogger.info(self, "Brain load took a total of %.2f sec", diff.total_seconds()) return False # Tell caller, load succeeded and skip aiml load except Exception as excep: YLogger.exception(self, excep) if configuration.binaries.load_aiml_on_binary_fail is True: return True # Tell caller, load failed and to load aiml directly else: raise excep def load_aiml(self, configuration): YLogger.info(self, "Loading aiml source brain") self._aiml_parser.load_aiml(configuration) def save_binary(self, configuration): YLogger.info(self, "Saving binary brain to [%s]", configuration.binaries.binary_filename) start = datetime.datetime.now() bin_file = open(configuration.binaries.binary_filename, "wb") pickle.dump(self._aiml_parser, bin_file) bin_file.close() stop = datetime.datetime.now() diff = stop - start YLogger.info(self, "Brain save took a total of %.2f sec", diff.total_seconds()) def load(self, configuration: BrainConfiguration): load_aiml = True if self.configuration.binaries.load_binary is True: load_aiml = self.load_binary(configuration) if load_aiml is True: self.load_aiml(configuration) if configuration.binaries.save_binary is True: self.save_binary(configuration) YLogger.info(self, "Loading collections") self.load_collections(configuration) YLogger.info(self, "Loading services") self.load_services(configuration) YLogger.info(self, "Loading security services") self.load_security_services(configuration) YLogger.info(self, "Loading oob processors") self.load_oob_processors(configuration) YLogger.info(self, "Loading regex templates") self.load_regex_templates(configuration) YLogger.info(self, "Loading dynamics sets, maps and vars") self.load_dynamics(configuration) def dump_brain_tree(self): if self.configuration.braintree.file is not None: YLogger.debug(self, "Dumping AIML Graph as tree to [%s]", self._configuration.braintree.file) self.aiml_parser.pattern_parser.save_braintree( self.bot, self.clientid, self.configuration.braintree.file, self.configuration.braintree.content) def _load_denormals(self, configuration): if configuration.files.denormal is not None: total = self._denormal_collection.load_from_filename( configuration.files.denormal) YLogger.info(self, "Loaded a total of %d denormalisations", total) else: YLogger.warning(self, "No configuration setting for denormal") def _load_normals(self, configuration): if configuration.files.normal is not None: total = self._normal_collection.load_from_filename( configuration.files.normal) YLogger.info(self, "Loaded a total of %d normalisations", total) else: YLogger.warning(self, "No configuration setting for normal") def _load_genders(self, configuration): if configuration.files.gender is not None: total = self._gender_collection.load_from_filename( configuration.files.gender) YLogger.info(self, "Loaded a total of %d genderisations", total) else: YLogger.warning(self, "No configuration setting for gender") def _load_persons(self, configuration): if configuration.files.person is not None: total = self._person_collection.load_from_filename( configuration.files.person) YLogger.info(self, "Loaded a total of %d persons", total) else: YLogger.warning(self, "No configuration setting for person") def _load_person2s(self, configuration): if configuration.files.person2 is not None: total = self._person2_collection.load_from_filename( configuration.files.person2) YLogger.info(self, "Loaded a total of %d person2s", total) else: YLogger.warning(self, "No configuration setting for person2") def _load_properties(self, configuration): if configuration.files.properties is not None: total = self._properties_collection.load_from_filename( configuration.files.properties) YLogger.info(self, "Loaded a total of %d properties", total) else: YLogger.warning(self, "No configuration setting for properties") def _load_variables(self, configuration): if configuration.files.variables is not None: total = self._variables_collection.load_from_filename( configuration.files.variables) YLogger.info(self, "Loaded a total of %d variables", total) else: YLogger.warning(self, "No configuration setting for variables") def _load_rdf(self, configuration): if configuration.files.rdf_files is not None and configuration.files.rdf_files.files: total = self._rdf_collection.load(configuration.files.rdf_files) YLogger.info(self, "Loaded a total of %d rdf files", total) elif configuration.files.triples is not None: total = self._rdf_collection.load_from_filename( configuration.files.triples) YLogger.info(self, "Loaded a total of %d triples", total) else: YLogger.warning(self, "No configuration setting for triples") def _load_sets(self, configuration): total = self._sets_collection.load(configuration.files.set_files) YLogger.info(self, "Loaded a total of %d sets files", total) def _load_maps(self, configuration): total = self._maps_collection.load(configuration.files.map_files) YLogger.info(self, "Loaded a total of %d maps files", total) def _load_preprocessors(self, configuration): if configuration.files.preprocessors is not None: total = self._preprocessors.load(configuration.files.preprocessors) YLogger.info(self, "Loaded a total of %d pre processors", total) else: YLogger.warning(self, "No configuration setting for pre processors") def _load_postprocessors(self, configuration): if configuration.files.postprocessors is not None: total = self._postprocessors.load( configuration.files.postprocessors) YLogger.info(self, "Loaded a total of %d post processors", total) else: YLogger.warning(self, "No configuration setting for post processors") def load_collections(self, configuration): self._load_denormals(configuration) self._load_normals(configuration) self._load_genders(configuration) self._load_persons(configuration) self._load_person2s(configuration) self._load_properties(configuration) self._load_variables(configuration) self._load_rdf(configuration) self._load_sets(configuration) self._load_maps(configuration) self._load_preprocessors(configuration) self._load_postprocessors(configuration) def load_services(self, configuration): ServiceFactory.preload_services(configuration.services) def load_security_services(self, configuration): if configuration.security is not None: if configuration.security.authentication is not None: if configuration.security.authentication.classname is not None: try: classobject = ClassLoader.instantiate_class( configuration.security.authentication.classname) self._authentication = classobject( configuration.security.authentication) except Exception as excep: YLogger.exception(self, excep) else: YLogger.debug(self, "No authentication configuration defined") if configuration.security.authorisation is not None: if configuration.security.authorisation.classname is not None: try: classobject = ClassLoader.instantiate_class( configuration.security.authorisation.classname) self._authorisation = classobject( configuration.security.authorisation) except Exception as excep: YLogger.exception(self, excep) else: YLogger.debug(self, "No authorisation configuration defined") else: YLogger.debug( self, "No security configuration defined, running open...") def load_dynamics(self, configuration): if configuration.dynamics is not None: self._dynamics_collection.load_from_configuration( configuration.dynamics) else: YLogger.debug(self, "No dynamics configuration defined...") def pre_process_question(self, client_context, question): return self.preprocessors.process(client_context, question) def load_oob_processors(self, configuration): if configuration.oob is not None: if configuration.oob.default() is not None: try: YLogger.info(self, "Loading default oob") classobject = ClassLoader.instantiate_class( configuration.oob.default().classname) self._default_oob = classobject() except Exception as excep: YLogger.exception(self, excep) for oob_name in configuration.oob.oobs(): try: YLogger.info(self, "Loading oob: %s", oob_name) classobject = ClassLoader.instantiate_class( configuration.oob.oob(oob_name).classname) self._oob[oob_name] = classobject() except Exception as excep: YLogger.exception(self, excep) def load_regex_templates(self, configuration): if configuration.files.regex_templates is not None: collection = PropertiesCollection() total = collection.load_from_filename( configuration.files.regex_templates) YLogger.info(self, "Loaded a total of %d regex templates", total) for pair in collection.pairs: name = pair[0] pattern = pair[1] try: self._regex_templates[name] = re.compile( pattern, re.IGNORECASE) except Exception: YLogger.error(self, "Invalid regex template [%s]", pattern) def regex_template(self, name): if name in self._regex_templates: return self._regex_templates[name] return None def strip_oob(self, response): match = re.compile(r"(.*)(<\s*oob\s*>.*<\/\s*oob\s*>)(.*)") groupings = match.match(response) if groupings is not None: front = groupings.group(1).strip() back = groupings.group(3).strip() response = "" if front != "": response = front + " " response += back oob = groupings.group(2) return response, oob return response, None def process_oob(self, client_context, oob_command): oob_content = ET.fromstring(oob_command) if oob_content.tag == 'oob': for child in oob_content.findall('./'): if child.tag in self._oob: oob_class = self._oob[child.tag] return oob_class.process_out_of_bounds( client_context, child) return self._default_oob.process_out_of_bounds( client_context, child) return "" def post_process_response(self, client_context, response: str): return self.postprocessors.process(client_context, response) def failed_authentication(self, client_context): YLogger.error(client_context, "[%s] failed authentication!") # If we have an SRAI defined, then use that if self.authentication.configuration.denied_srai is not None: match_context = self._aiml_parser.match_sentence( client_context, Sentence(self._bot.brain.tokenizer, self.authentication.configuration.denied_srai), topic_pattern="*", that_pattern="*") # If the SRAI matched then return the result if match_context is not None: return self.resolve_matched_template(client_context, match_context) # Otherswise return the static text, which is either # User defined via config.yaml # Or use the default value BrainSecurityConfiguration.DEFAULT_ACCESS_DENIED return self.authentication.configuration.denied_text def authenticate_user(self, client_context): if self.authentication is not None: if self.authentication.authenticate(client_context) is False: return self.failed_authentication(client_context) return None def resolve_matched_template(self, client_context, match_context): template_node = match_context.template_node() YLogger.debug(client_context, "AIML Parser evaluating template [%s]", template_node.to_string()) response = template_node.template.resolve(client_context) if "<oob>" in response: response, oob = self.strip_oob(response) if oob is not None: oob_response = self.process_oob(client_context, oob) response = response + " " + oob_response return response def ask_question(self, client_context, sentence): client_context.brain = self authenticated = self.authenticate_user(client_context) if authenticated is not None: return authenticated conversation = client_context.bot.get_conversation(client_context) topic_pattern = conversation.get_topic_pattern() that_pattern = conversation.get_that_pattern() match_context = self._aiml_parser.match_sentence( client_context, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) if match_context is not None: return self.resolve_matched_template(client_context, match_context) return None
def test_collection_operations_jp(self): collection = SetCollection() set_dict = {"千": ["千葉", "千葉県"], "東": ["東京", "東京都"]} values = {"千葉": "千葉", "千葉県": "千葉県", "東京": "東京", "東京都": "東京都"} collection.add_set("TESTSET", set_dict, "teststore", True, values) set_dict = {"神": ["神奈川", "神戸", "神田"], "大": ["大阪", "大分", "大津"]} values = { "神奈川": "神奈川", "神戸": "神戸", "神田": "神田", "大阪": "大阪", "大分": "大分", "大津": "大津" } collection.add_set("TESTSET2", set_dict, "teststore", True, values) self.assertIsNotNone(collection.sets) self.assertIsNotNone(collection.stores) self.assertTrue(collection.contains("TESTSET")) self.assertTrue(collection.contains("TESTSET2")) self.assertFalse(collection.contains("TESTSET3")) self.assertEqual(collection.store_name("TESTSET"), "teststore") self.assertEqual(collection.store_name("TESTSET2"), "teststore") self.assertIsNone(collection.store_name("TESTSET3")) self.assertTrue(collection.is_cjk("TESTSET")) self.assertTrue(collection.is_cjk("TESTSET2")) self.assertIsNone(collection.is_cjk("TESTSET3")) aset = collection.set_list("TESTSET") self.assertIsNotNone(aset) self.assertEqual(10, collection.count_words_in_sets()) collection.remove("TESTSET2") self.assertTrue(collection.contains("TESTSET")) self.assertFalse(collection.contains("TESTSET2")) self.assertFalse(collection.contains("TESTSET3")) collection.empty() self.assertIsNotNone(collection.sets) self.assertIsNotNone(collection.stores) self.assertFalse(collection.contains("TESTSET")) self.assertIsNone(collection.set_list("TESTSET")) self.assertNotEquals(collection.store_name("TESTSET"), "teststore")
def test_collection_operations(self): collection = SetCollection() set_dict = { "A": [["A", "A B", "A C"]], "D": [["D"]], "E": [["E", "E F"]] } values = { "A": "A", "A B": "A B", "A C": "A C", "D": "D", "E": "E", "E F": "E F" } collection.add_set("TESTSET", set_dict, "teststore", False, values) set_dict = { "1": [["1", "1 2", "1 3"]], "4": [["4"]], "5": [["5", "5 6"]] } values = { "1": "1", "1 2": "1 2", "1 3": "1 3", "4": "4", "5": "5", "5 6": "5 6" } collection.add_set("TESTSET2", set_dict, "teststore", False, values) self.assertIsNotNone(collection.sets) self.assertIsNotNone(collection.stores) self.assertTrue(collection.contains("TESTSET")) self.assertTrue(collection.contains("TESTSET2")) self.assertFalse(collection.contains("TESTSET3")) self.assertEqual(collection.store_name("TESTSET"), "teststore") self.assertEqual(collection.store_name("TESTSET2"), "teststore") self.assertIsNone(collection.store_name("TESTSET3")) self.assertFalse(collection.is_cjk("TESTSET")) self.assertFalse(collection.is_cjk("TESTSET2")) self.assertIsNone(collection.is_cjk("TESTSET3")) aset = collection.set_list("TESTSET") self.assertIsNotNone(aset) self.assertEqual(12, collection.count_words_in_sets()) collection.remove("TESTSET2") self.assertTrue(collection.contains("TESTSET")) self.assertFalse(collection.contains("TESTSET2")) self.assertFalse(collection.contains("TESTSET3")) collection.empty() self.assertIsNotNone(collection.sets) self.assertIsNotNone(collection.stores) self.assertFalse(collection.contains("TESTSET")) self.assertIsNone(collection.set_list("TESTSET")) self.assertNotEquals(collection.store_name("TESTSET"), "teststore")
def test_collection_operations(self): collection = SetCollection() collection.add_set("TESTSET", { "A": [["A", "B", "C"]], "D": [["D"]], "E": [["E", "F"]] }, "teststore") collection.add_set("TESTSET2", { "1": [["1", "2", "3"]], "4": [["4"]], "5": [["5", "6"]] }, "teststore") self.assertIsNotNone(collection.sets) self.assertIsNotNone(collection.stores) self.assertTrue(collection.contains("TESTSET")) self.assertTrue(collection.contains("TESTSET2")) self.assertFalse(collection.contains("TESTSET3")) self.assertEqual("teststore", collection.storename("TESTSET")) self.assertIsNone(collection.storename("TESTMAP4")) aset = collection.set("TESTSET") self.assertIsNotNone(aset) self.assertEqual(6, collection.count_words_in_sets()) collection.remove("TESTSET2") self.assertTrue(collection.contains("TESTSET")) self.assertFalse(collection.contains("TESTSET2")) self.assertFalse(collection.contains("TESTSET3")) collection.empty() self.assertIsNotNone(collection.sets) self.assertIsNotNone(collection.stores) self.assertFalse(collection.contains("TESTSET")) self.assertIsNone(collection.set("TESTSET")) self.assertNotEquals(collection.storename("TESTSET"), "teststore")
def test_initialise_collection(self): collection = SetCollection() self.assertIsNotNone(collection) self.assertIsNotNone(collection.sets) self.assertIsNotNone(collection.stores) self.assertIsNotNone(collection.is_cjk)
class Brain(object): def __init__(self, configuration: BrainConfiguration): self._configuration = configuration self._aiml_parser = AIMLParser(self) self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._rdf_collection = RDFCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self._authentication = None self._authorisation = None self._default_oob = None self._oob = {} self._regex_templates = {} self._dynamics_collection = DynamicsCollection() self.load(self._configuration) @property def configuration(self): return self._configuration @property def aiml_parser(self): return self._aiml_parser @property def denormals(self): return self._denormal_collection @property def normals(self): return self._normal_collection @property def genders(self): return self._gender_collection @property def persons(self): return self._person_collection @property def person2s(self): return self._person2_collection @property def rdf(self): return self._rdf_collection @property def sets(self): return self._sets_collection @property def maps(self): return self._maps_collection @property def properties(self): return self._properties_collection @property def preprocessors(self): return self._preprocessors @property def postprocessors(self): return self._postprocessors @property def authentication(self): return self._authentication @property def authorisation(self): return self._authorisation @property def default_oob(self): return self._default_oob @property def oobs(self): return self._oob @property def regex_templates(self): return self._regex_templates @property def dynamics(self): return self._dynamics_collection def load_binary(self, brain_configuration): if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading binary brain from [%s]" % brain_configuration.binaries.binary_filename) try: start = datetime.datetime.now() gc.disable() f = open(brain_configuration.binaries.binary_filename, "rb") self._aiml_parser = pickle.load(f) gc.enable() f.close() stop = datetime.datetime.now() diff = stop - start if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Brain load took a total of %.2f sec" % diff.total_seconds()) load_aiml = False except Exception as e: logging.exception(e) if brain_configuration.binaries.load_aiml_on_binary_fail is True: load_aiml = True else: raise e def load_aiml(self, brain_configuration): if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading aiml source brain") self._aiml_parser.load_aiml(brain_configuration) def save_binary(self, brain_configuration): if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Saving binary brain to [%s]" % brain_configuration.binaries.binary_filename) start = datetime.datetime.now() f = open(brain_configuration.binaries.binary_filename, "wb") pickle.dump(self._aiml_parser, f) f.close() stop = datetime.datetime.now() diff = stop - start if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Brain save took a total of %.2f sec" % diff.total_seconds()) def load(self, brain_configuration: BrainConfiguration): if brain_configuration.binaries.load_binary is True: self.load_binary(brain_configuration) self.load_aiml(brain_configuration) if brain_configuration.binaries.save_binary is True: self.save_binary(brain_configuration) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading collections") self.load_collections(brain_configuration) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading services") self.load_services(brain_configuration) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading security services") self.load_security_services(brain_configuration) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading oob processors") self.load_oob_processors(brain_configuration) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading regex templates") self.load_regex_templates(brain_configuration) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading dynamics sets, maps and vars") self.load_dynamics(brain_configuration) def _load_denormals(self, brain_configuration): if brain_configuration.files.denormal is not None: total = self._denormal_collection.load_from_filename( brain_configuration.files.denormal) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d denormalisations", total) else: if logging.getLogger().isEnabledFor(logging.WARNING): logging.warning("No configuration setting for denormal") def _load_normals(self, brain_configuration): if brain_configuration.files.normal is not None: total = self._normal_collection.load_from_filename( brain_configuration.files.normal) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d normalisations", total) else: if logging.getLogger().isEnabledFor(logging.WARNING): logging.warning("No configuration setting for normal") def _load_genders(self, brain_configuration): if brain_configuration.files.gender is not None: total = self._gender_collection.load_from_filename( brain_configuration.files.gender) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d genderisations", total) else: if logging.getLogger().isEnabledFor(logging.WARNING): logging.warning("No configuration setting for gender") def _load_persons(self, brain_configuration): if brain_configuration.files.person is not None: total = self._person_collection.load_from_filename( brain_configuration.files.person) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d persons", total) else: if logging.getLogger().isEnabledFor(logging.WARNING): logging.warning("No configuration setting for person") def _load_person2s(self, brain_configuration): if brain_configuration.files.person2 is not None: total = self._person2_collection.load_from_filename( brain_configuration.files.person2) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d person2s", total) else: if logging.getLogger().isEnabledFor(logging.WARNING): logging.warning("No configuration setting for person2") def _load_properties(self, brain_configuration): if brain_configuration.files.properties is not None: total = self._properties_collection.load_from_filename( brain_configuration.files.properties) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d properties", total) else: if logging.getLogger().isEnabledFor(logging.WARNING): logging.warning("No configuration setting for properties") def _load_rdf(self, brain_configuration): if brain_configuration.files.rdf_files is not None and brain_configuration.files.rdf_files.files is not None: total = self._rdf_collection.load( brain_configuration.files.rdf_files) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d rdf files", total) elif brain_configuration.files.triples is not None: total = self._rdf_collection.load_from_filename( brain_configuration.files.triples) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d triples", total) else: if logging.getLogger().isEnabledFor(logging.WARNING): logging.warning("No configuration setting for triples") def _load_sets(self, brain_configuration): total = self._sets_collection.load(brain_configuration.files.set_files) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d sets files", total) def _load_maps(self, brain_configuration): total = self._maps_collection.load(brain_configuration.files.map_files) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d maps files", total) def _load_preprocessors(self, brain_configuration): if brain_configuration.files.preprocessors is not None: total = self._preprocessors.load( brain_configuration.files.preprocessors) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d pre processors", total) else: if logging.getLogger().isEnabledFor(logging.WARNING): logging.warning("No configuration setting for pre processors") def _load_postprocessors(self, brain_configuration): if brain_configuration.files.postprocessors is not None: total = self._postprocessors.load( brain_configuration.files.postprocessors) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d post processors", total) else: if logging.getLogger().isEnabledFor(logging.WARNING): logging.warning("No configuration setting for post processors") def load_collections(self, brain_configuration): self._load_denormals(brain_configuration) self._load_normals(brain_configuration) self._load_genders(brain_configuration) self._load_persons(brain_configuration) self._load_person2s(brain_configuration) self._load_properties(brain_configuration) self._load_rdf(brain_configuration) self._load_sets(brain_configuration) self._load_maps(brain_configuration) self._load_preprocessors(brain_configuration) self._load_postprocessors(brain_configuration) def load_services(self, brain_configuration): ServiceFactory.preload_services(brain_configuration.services) def load_security_services(self, brain_configuration): if brain_configuration.security is not None: if brain_configuration.security.authentication is not None: if brain_configuration.security.authentication.classname is not None: try: classobject = ClassLoader.instantiate_class( brain_configuration.security.authentication. classname) self._authentication = classobject( brain_configuration.security.authentication) except Exception as excep: logging.exception(excep) else: if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("No authentication configuration defined") if brain_configuration.security.authorisation is not None: if brain_configuration.security.authorisation.classname is not None: try: classobject = ClassLoader.instantiate_class( brain_configuration.security.authorisation. classname) self._authorisation = classobject( brain_configuration.security.authorisation) except Exception as excep: logging.exception(excep) else: if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("No authorisation configuration defined") else: if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug( "No security configuration defined, running open...") def load_dynamics(self, brain_configuration): if brain_configuration.dynamics is not None: self._dynamics_collection.load_from_configuration( brain_configuration.dynamics) else: if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("No dynamics configuration defined...") def pre_process_question(self, bot, clientid, question): return self.preprocessors.process(bot, clientid, question) def parse_last_sentences_from_response(self, response): response = re.sub(r'<\s*br\s*/>\s*', ".", response) response = re.sub(r'<br></br>*', ".", response) sentences = response.split(".") sentences = [x for x in sentences if x] last_sentence = sentences[-1] that_pattern = TextUtils.strip_all_punctuation(last_sentence) that_pattern = that_pattern.strip() return that_pattern def load_oob_processors(self, brain_configuration): if brain_configuration.oob is not None: if brain_configuration.oob.default() is not None: try: if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading default oob") classobject = ClassLoader.instantiate_class( brain_configuration.oob.default().classname) self._default_oob = classobject() except Exception as excep: logging.exception(excep) for oob_name in brain_configuration.oob.oobs(): try: if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loading oob: %s" % oob_name) classobject = ClassLoader.instantiate_class( brain_configuration.oob.oob(oob_name).classname) self._oob[oob_name] = classobject() except Exception as excep: logging.exception(excep) def load_regex_templates(self, brain_configuration): if brain_configuration.files.regex_templates is not None: collection = PropertiesCollection() total = collection.load_from_filename( brain_configuration.files.regex_templates) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Loaded a total of %d regex templates", total) for pair in collection.pairs: name = pair[0] pattern = pair[1] try: self._regex_templates[name] = re.compile(pattern) except Exception: if logging.getLogger().isEnabledFor(logging.INFO): logging.error("Invalid regex template [%s]" % pattern) def regex_template(self, name): if name in self._regex_templates: return self._regex_templates[name] else: return None def strip_oob(self, response): m = re.compile("(.*)(<\s*oob\s*>.*<\/\s*oob\s*>)(.*)") g = m.match(response) if g is not None: front = g.group(1).strip() back = g.group(3).strip() response = "" if front != "": response = front + " " response += back oob = g.group(2) return response, oob return response, None def process_oob(self, bot, clientid, oob_command): oob_content = ET.fromstring(oob_command) if oob_content.tag == 'oob': for child in oob_content.findall('./'): if child.tag in self._oob: oob_class = self._oob[child.tag] return oob_class.process_out_of_bounds( bot, clientid, child) else: return self._default_oob.process_out_of_bounds( bot, clientid, child) return "" def post_process_response(self, bot, clientid, response: str): return self.postprocessors.process(bot, clientid, response) def dump_tree(self): self._aiml_parser.pattern_parser.root.dump(tabs="") def ask_question(self, bot, clientid, sentence, srai=False, brain_question_context=None): if brain_question_context is not None: brain_question_context.clientid = clientid brain_question_context.srai = srai brain_question_context.sentence = sentence if self.authentication is not None: if self.authentication.authenticate(clientid) is False: if logging.getLogger().isEnabledFor(logging.ERROR): logging.error("[%s] failed authentication!") return self.authentication.configuration.denied_srai conversation = bot.get_conversation(clientid) topic_pattern = conversation.property("topic") if topic_pattern is None: if logging.getLogger().isEnabledFor(logging.INFO): logging.info("No Topic pattern default to [*]") topic_pattern = "*" else: if logging.getLogger().isEnabledFor(logging.INFO): logging.info("Topic pattern = [%s]", topic_pattern) if brain_question_context is not None: brain_question_context.topic = topic_pattern try: that_question = conversation.previous_nth_question(1) that_sentence = that_question.current_sentence() # If the last response was valid, i.e not none and not empty string, then use # that as the that_pattern, otherwise we default to '*' as pattern if that_sentence.response is not None and that_sentence.response != '': that_pattern = self.parse_last_sentences_from_response( that_sentence.response) if logging.getLogger().isEnabledFor(logging.INFO): logging.info("That pattern = [%s]", that_pattern) else: if logging.getLogger().isEnabledFor(logging.INFO): logging.info("That pattern, no response, default to [*]") that_pattern = "*" except Exception: if logging.getLogger().isEnabledFor(logging.INFO): logging.info("No That pattern default to [*]") that_pattern = "*" if brain_question_context is not None: brain_question_context.that = that_pattern match_context = self._aiml_parser.match_sentence( bot, clientid, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) if match_context is not None: if brain_question_context is not None: brain_question_context.match_context = match_context template_node = match_context.template_node() if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug("AIML Parser evaluating template [%s]", template_node.to_string()) response = template_node.template.resolve(bot, clientid) if brain_question_context is not None: brain_question_context.raw_response = response if "<oob>" in response: response, oob = self.strip_oob(response) if oob is not None: oob_response = self.process_oob(bot, clientid, oob) if brain_question_context is not None: brain_question_context.raw_response = response brain_question_context.oob_response = oob_response response = response + " " + oob_response return response return None
class Brain(object): def __init__(self, bot, configuration: BrainConfiguration): self._bot = bot self._configuration = configuration self._tokenizer = self.load_tokenizer() self._aiml_parser = self.load_aiml_parser() self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._rdf_collection = RDFCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._variables_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self._authentication = None self._authorisation = None self._default_oob = None self._oob = {} self._regex_templates = {} self._dynamics_collection = DynamicsCollection() self.load(self.configuration) def ylogger_type(self): return "brain" @property def id(self): return self._configuration.section_name @property def bot(self): return self._bot @property def configuration(self): return self._configuration @property def aiml_parser(self): return self._aiml_parser @property def denormals(self): return self._denormal_collection @property def normals(self): return self._normal_collection @property def genders(self): return self._gender_collection @property def persons(self): return self._person_collection @property def person2s(self): return self._person2_collection @property def rdf(self): return self._rdf_collection @property def sets(self): return self._sets_collection @property def maps(self): return self._maps_collection @property def properties(self): return self._properties_collection @property def variables(self): return self._variables_collection @property def preprocessors(self): return self._preprocessors @property def postprocessors(self): return self._postprocessors @property def authentication(self): return self._authentication @property def authorisation(self): return self._authorisation @property def default_oob(self): return self._default_oob @property def oobs(self): return self._oob @property def regex_templates(self): return self._regex_templates @property def dynamics(self): return self._dynamics_collection @property def tokenizer(self): return self._tokenizer def load_tokenizer(self): if self.configuration is not None and self.configuration.tokenizer.classname is not None: YLogger.info(self, "Loading tokenizer from class [%s]", self.configuration.tokenizer.classname) tokenizer_class = ClassLoader.instantiate_class(self.configuration.tokenizer.classname) return tokenizer_class(self.configuration.tokenizer.split_chars) else: return Tokenizer(self.configuration.tokenizer.split_chars) def load_aiml_parser(self): return AIMLParser(self) def load_aiml(self, configuration): YLogger.info(self, "Loading aiml source brain") self._aiml_parser.load_aiml(configuration) def reload_aimls(self): YLogger.info(self, "Loading aiml source brain") self._aiml_parser.empty() self._aiml_parser.load_aiml(self.configuration) def load_binary(self, configuration): YLogger.info(self, "Loading binary brain from [%s]", configuration.binaries.binary_filename) try: start = datetime.datetime.now() gc.disable() bin_file = open(configuration.binaries.binary_filename, "rb") self._aiml_parser = pickle.load(bin_file) self._aiml_parser._brain = self gc.enable() bin_file.close() stop = datetime.datetime.now() diff = stop - start YLogger.info(self, "Brain load took a total of %.2f sec", diff.total_seconds()) return False # Tell caller, load succeeded and skip aiml load except Exception as excep: YLogger.exception(self, "Failed to load binary file", excep) if configuration.binaries.load_aiml_on_binary_fail is True: return True # Tell caller, load failed and to load aiml directly else: raise excep def save_binary(self, configuration): YLogger.info(self, "Saving binary brain to [%s]", configuration.binaries.binary_filename) start = datetime.datetime.now() bin_file = open(configuration.binaries.binary_filename, "wb") pickle.dump(self._aiml_parser, bin_file) bin_file.close() stop = datetime.datetime.now() diff = stop - start YLogger.info(self, "Brain save took a total of %.2f sec", diff.total_seconds()) def load(self, configuration: BrainConfiguration): load_aiml = True if self.configuration.binaries.load_binary is True: load_aiml = self.load_binary(configuration) if load_aiml is True: self.load_aiml(configuration) if configuration.binaries.save_binary is True: self.save_binary(configuration) YLogger.info(self, "Loading collections") self.load_collections(configuration) YLogger.info(self, "Loading services") self.load_services(configuration) YLogger.info(self, "Loading security services") self.load_security_services(configuration) YLogger.info(self, "Loading oob processors") self.load_oob_processors(configuration) YLogger.info(self, "Loading regex templates") self.load_regex_templates(configuration) YLogger.info(self, "Loading dynamics sets, maps and vars") self.load_dynamics(configuration) def dump_brain_tree(self): if self.configuration.braintree.file is not None: YLogger.debug(self, "Dumping AIML Graph as tree to [%s]", self._configuration.braintree.file) client_context = self.bot.client.create_client_context("system") self.aiml_parser.pattern_parser.save_braintree( client_context, self.configuration.braintree.file, self.configuration.braintree.content) def _load_denormals(self, configuration): if configuration.files.denormal is not None: self._denormal_collection.empty() total = self._denormal_collection.load_from_filename(configuration.files.denormal) YLogger.info(self, "Loaded a total of %d denormalisations", total) else: YLogger.warning(self, "No configuration setting for denormal") def _load_normals(self, configuration): if configuration.files.normal is not None: self._normal_collection.empty() total = self._normal_collection.load_from_filename(configuration.files.normal) YLogger.info(self, "Loaded a total of %d normalisations", total) else: YLogger.warning(self, "No configuration setting for normal") def _load_genders(self, configuration): if configuration.files.gender is not None: self._gender_collection.empty() total = self._gender_collection.load_from_filename(configuration.files.gender) YLogger.info(self, "Loaded a total of %d genderisations", total) else: YLogger.warning(self, "No configuration setting for gender") def _load_persons(self, configuration): if configuration.files.person is not None: self._person_collection.empty() total = self._person_collection.load_from_filename(configuration.files.person) YLogger.info(self, "Loaded a total of %d persons", total) else: YLogger.warning(self, "No configuration setting for person") def _load_person2s(self, configuration): if configuration.files.person2 is not None: self._person2_collection.empty() total = self._person2_collection.load_from_filename(configuration.files.person2) YLogger.info(self, "Loaded a total of %d person2s", total) else: YLogger.warning(self, "No configuration setting for person2") def _load_properties(self, configuration): if configuration.files.properties is not None: self._properties_collection.empty() total = self._properties_collection.load_from_filename(configuration.files.properties) YLogger.info(self, "Loaded a total of %d properties", total) else: YLogger.warning(self, "No configuration setting for properties") def _load_variables(self, configuration): if configuration.files.variables is not None: self._variables_collection.empty () total = self._variables_collection.load_from_filename(configuration.files.variables) YLogger.info(self, "Loaded a total of %d variables", total) else: YLogger.warning(self, "No configuration setting for variables") def _load_maps(self, configuration): self._maps_collection.empty() total = self._maps_collection.load(configuration.files.map_files) YLogger.info(self, "Loaded a total of %d maps files", total) def reload_map(self, mapname): if self._maps_collection.contains(mapname): filename = self._maps_collection.filename(mapname) self._maps_collection.reload_file(filename) def _load_sets(self, configuration): self._sets_collection.empty() total = self._sets_collection.load(configuration.files.set_files) YLogger.info(self, "Loaded a total of %d sets files", total) def reload_set(self, setname): if self._sets_collection.contains(setname): filename = self._sets_collection.filename(setname) self._sets_collection.reload_file(filename) def _load_rdfs(self, configuration): if configuration.files.rdf_files is not None and configuration.files.rdf_files.files: self._rdf_collection.empty() total = self._rdf_collection.load(configuration.files.rdf_files) YLogger.info(self, "Loaded a total of %d rdf files", total) elif configuration.files.triples is not None: self._rdf_collection.empty() total = self._rdf_collection.load_from_filename(configuration.files.triples) YLogger.info(self, "Loaded a total of %d triples", total) else: YLogger.warning(self, "No configuration setting for triples") def reload_rdf(self, rdfname): if self._rdf_collection.contains(rdfname): self._rdf_collection.reload_file(rdfname) def _load_preprocessors(self, configuration): if configuration.files.preprocessors is not None: self._preprocessors.empty() total = self._preprocessors.load(configuration.files.preprocessors) YLogger.info(self, "Loaded a total of %d pre processors", total) else: YLogger.warning(self, "No configuration setting for pre processors") def _load_postprocessors(self, configuration): if configuration.files.postprocessors is not None: self._postprocessors.empty() total = self._postprocessors.load(configuration.files.postprocessors) YLogger.info(self, "Loaded a total of %d post processors", total) else: YLogger.warning(self, "No configuration setting for post processors") def load_collections(self, configuration): self._load_denormals(configuration) self._load_normals(configuration) self._load_genders(configuration) self._load_persons(configuration) self._load_person2s(configuration) self._load_properties(configuration) self._load_variables(configuration) self._load_rdfs(configuration) self._load_sets(configuration) self._load_maps(configuration) self._load_preprocessors(configuration) self._load_postprocessors(configuration) def load_services(self, configuration): ServiceFactory.preload_services(configuration.services) def load_security_services(self, configuration): if configuration.security is not None: if configuration.security.authentication is not None: if configuration.security.authentication.classname is not None: try: classobject = ClassLoader.instantiate_class( configuration.security.authentication.classname) self._authentication = classobject(configuration.security.authentication) except Exception as excep: YLogger.exception(self, "Failed to load security services", excep) else: YLogger.debug(self, "No authentication configuration defined") if configuration.security.authorisation is not None: if configuration.security.authorisation.classname is not None: try: classobject = ClassLoader.instantiate_class( configuration.security.authorisation.classname) self._authorisation = classobject(configuration.security.authorisation) except Exception as excep: YLogger.exception(self, "Failed to instatiate authorisation class", excep) else: YLogger.debug(self, "No authorisation configuration defined") else: YLogger.debug(self, "No security configuration defined, running open...") def load_dynamics(self, configuration): if configuration.dynamics is not None: self._dynamics_collection.load_from_configuration(configuration.dynamics) else: YLogger.debug(self, "No dynamics configuration defined...") def pre_process_question(self, client_context, question): return self.preprocessors.process(client_context, question) def load_oob_processors(self, configuration): if configuration.oob is not None: if configuration.oob.default() is not None: try: YLogger.info(self, "Loading default oob") classobject = ClassLoader.instantiate_class(configuration.oob.default().classname) self._default_oob = classobject() except Exception as excep: YLogger.exception(self, "Failed to load OOB Processor", excep) for oob_name in configuration.oob.oobs(): try: YLogger.info(self, "Loading oob: %s", oob_name) classobject = ClassLoader.instantiate_class(configuration.oob.oob(oob_name).classname) self._oob[oob_name] = classobject() except Exception as excep: YLogger.exception(self, "Failed to load OOB", excep) def load_regex_templates(self, configuration): if configuration.files.regex_templates is not None: collection = PropertiesCollection() total = collection.load_from_filename(configuration.files.regex_templates) YLogger.info(self, "Loaded a total of %d regex templates", total) self._regex_templates.clear() for pair in collection.pairs: name = pair[0] pattern = pair[1] try: self._regex_templates[name] = re.compile(pattern, re.IGNORECASE) except Exception: YLogger.error(self, "Invalid regex template [%s]", pattern) def regex_template(self, name): if name in self._regex_templates: return self._regex_templates[name] return None def strip_oob(self, response): match = re.compile(r"(.*)(<\s*oob\s*>.*<\/\s*oob\s*>)(.*)") groupings = match.match(response) if groupings is not None: front = groupings.group(1).strip() back = groupings.group(3).strip() response = "" if front != "": response = front + " " response += back oob = groupings.group(2) return response, oob return response, None def process_oob(self, client_context, oob_command): oob_content = ET.fromstring(oob_command) if oob_content.tag == 'oob': for child in oob_content.findall('./'): if child.tag in self._oob: oob_class = self._oob[child.tag] return oob_class.process_out_of_bounds(client_context, child) return self._default_oob.process_out_of_bounds(client_context, child) return "" def post_process_response(self, client_context, response: str): return self.postprocessors.process(client_context, response) def failed_authentication(self, client_context): YLogger.error(client_context, "[%s] failed authentication!") # If we have an SRAI defined, then use that if self.authentication.configuration.denied_srai is not None: match_context = self._aiml_parser.match_sentence(client_context, Sentence(self._bot.brain.tokenizer, self.authentication.configuration.denied_srai), topic_pattern="*", that_pattern="*") # If the SRAI matched then return the result if match_context is not None: return self.resolve_matched_template(client_context, match_context) # Otherswise return the static text, which is either # User defined via config.yaml # Or use the default value BrainSecurityConfiguration.DEFAULT_ACCESS_DENIED return self.authentication.configuration.denied_text def authenticate_user(self, client_context): if self.authentication is not None: if self.authentication.authenticate(client_context) is False: return self.failed_authentication(client_context) return None def resolve_matched_template(self, client_context, match_context): template_node = match_context.template_node() YLogger.debug(client_context, "AIML Parser evaluating template [%s]", template_node.to_string()) response = template_node.template.resolve(client_context) if "<oob>" in response: response, oob = self.strip_oob(response) if oob is not None: oob_response = self.process_oob(client_context, oob) response = response + " " + oob_response return response def ask_question(self, client_context, sentence, srai=False): client_context.brain = self authenticated = self.authenticate_user(client_context) if authenticated is not None: return authenticated conversation = client_context.bot.get_conversation(client_context) topic_pattern = conversation.get_topic_pattern(client_context) that_pattern = conversation.get_that_pattern(client_context, srai) match_context = self._aiml_parser.match_sentence(client_context, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) if match_context is not None: return self.resolve_matched_template(client_context, match_context) return None
class Brain(object): def __init__(self, configuration: BrainConfiguration): self._configuration = configuration self._aiml_parser = AIMLParser() self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._predicates_collection = PredicatesCollection() self._pronouns_collection = PronounsCollection() self._triples_collection = TriplesCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self.load(self._configuration) @property def configuration(self): return self._configuration @property def aiml_parser(self): return self._aiml_parser @property def denormals(self): return self._denormal_collection @property def normals(self): return self._normal_collection @property def genders(self): return self._gender_collection @property def persons(self): return self._person_collection @property def person2s(self): return self._person2_collection @property def predicates(self): return self._predicates_collection @property def pronounds(self): return self._pronouns_collection @property def triples(self): return self._triples_collection @property def sets(self): return self._sets_collection @property def maps(self): return self._maps_collection @property def properties(self): return self._properties_collection @property def preprocessors(self): return self._preprocessors @property def postprocessors(self): return self._postprocessors def load(self, brain_configuration: BrainConfiguration): self._aiml_parser.load_aiml(brain_configuration) self.load_collections(brain_configuration) self.load_services(brain_configuration) def _load_denormals(self, brain_configuration): if brain_configuration.denormal is not None: total = self._denormal_collection.load_from_filename( brain_configuration.denormal) logging.info("Loaded a total of %d denormalisations", total) else: logging.warning("No configuration setting for denormal") def _load_normals(self, brain_configuration): if brain_configuration.normal is not None: total = self._normal_collection.load_from_filename( brain_configuration.normal) logging.info("Loaded a total of %d normalisations", total) else: logging.warning("No configuration setting for normal") def _load_genders(self, brain_configuration): if brain_configuration.gender is not None: total = self._gender_collection.load_from_filename( brain_configuration.gender) logging.info("Loaded a total of %d genderisations", total) else: logging.warning("No configuration setting for gender") def _load_persons(self, brain_configuration): if brain_configuration.person is not None: total = self._person_collection.load_from_filename( brain_configuration.person) logging.info("Loaded a total of %d persons", total) else: logging.warning("No configuration setting for person") def _load_person2s(self, brain_configuration): if brain_configuration.person2 is not None: total = self._person2_collection.load_from_filename( brain_configuration.person2) logging.info("Loaded a total of %d person2s", total) else: logging.warning("No configuration setting for person2") def _load_predicates(self, brain_configuration): if brain_configuration.predicates is not None: total = self._predicates_collection.load_from_filename( brain_configuration.predicates) logging.info("Loaded a total of %d predicates", total) else: logging.warning("No configuration setting for predicates") def _load_pronouns(self, brain_configuration): if brain_configuration.pronouns is not None: total = self._pronouns_collection.load_from_filename( brain_configuration.pronouns) logging.info("Loaded a total of %d pronouns", total) else: logging.warning("No configuration setting for pronouns") def _load_properties(self, brain_configuration): if brain_configuration.properties is not None: total = self._properties_collection.load_from_filename( brain_configuration.properties) logging.info("Loaded a total of %d properties", total) else: logging.warning("No configuration setting for properties") def _load_triples(self, brain_configuration): if brain_configuration.triples is not None: total = self._properties_collection.load_from_filename( brain_configuration.triples) logging.info("Loaded a total of %d triples", total) else: logging.warning("No configuration setting for triples") def _load_sets(self, brain_configuration): if brain_configuration.set_files is not None: total = self._sets_collection.load(brain_configuration.set_files) logging.info("Loaded a total of %d sets files", total) else: logging.warning("No configuration setting for set files") def _load_maps(self, brain_configuration): if brain_configuration.map_files is not None: total = self._maps_collection.load(brain_configuration.map_files) logging.info("Loaded a total of %d maps files", total) else: logging.warning("No configuration setting for map files") def _load_preprocessors(self, brain_configuration): if brain_configuration.preprocessors is not None: total = self._preprocessors.load(brain_configuration.preprocessors) logging.info("Loaded a total of %d pre processors", total) else: logging.warning("No configuration setting for pre processors") def _load_postprocessors(self, brain_configuration): if brain_configuration.postprocessors is not None: total = self._postprocessors.load( brain_configuration.postprocessors) logging.info("Loaded a total of %d post processors", total) else: logging.warning("No configuration setting for post processors") def load_collections(self, brain_configuration): self._load_denormals(brain_configuration) self._load_normals(brain_configuration) self._load_genders(brain_configuration) self._load_persons(brain_configuration) self._load_person2s(brain_configuration) self._load_predicates(brain_configuration) self._load_pronouns(brain_configuration) self._load_properties(brain_configuration) self._load_triples(brain_configuration) self._load_sets(brain_configuration) self._load_maps(brain_configuration) self._load_preprocessors(brain_configuration) self._load_postprocessors(brain_configuration) def load_services(self, brain_configuration): ServiceFactory.preload_services(brain_configuration.services) def pre_process_question(self, bot, clientid, question): return self.preprocessors.process(bot, clientid, question) def ask_question(self, bot, clientid, sentence) -> str: conversation = bot.get_conversation(clientid) topic_pattern = conversation.predicate("topic") if topic_pattern is None: logging.info("No Topic pattern default to [*]") topic_pattern = "*" else: logging.info("Topic pattern = [%s]", topic_pattern) try: that_question = conversation.nth_question(2) that_sentence = that_question.current_sentence() # If the last response was valid, i.e not none and not empty string, then use # that as the that_pattern, otherwise we default to '*' as pattern if that_sentence.response is not None and that_sentence.response != '': that_pattern = that_sentence.response logging.info("That pattern = [%s]", that_pattern) else: logging.info("That pattern, no response, default to [*]") that_pattern = "*" except Exception: logging.info("No That pattern default to [*]") that_pattern = "*" match_context = self._aiml_parser.match_sentence( bot, clientid, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) if match_context is not None: template_node = match_context.template_node() logging.debug("AIML Parser evaluating template [%s]", template_node.to_string()) return template_node.template.resolve(bot, clientid) return None def post_process_response(self, bot, clientid, response: str): return self.postprocessors.process(bot, clientid, response) def dump_tree(self): self._aiml_parser.pattern_parser.root.dump(tabs="") def write_learnf_to_file(self, bot, clientid, pattern, topic, that, template): learnf_path = "%s/learnf%s" % ( self._configuration.aiml_files.files, self._configuration.aiml_files.extension) logging.debug("Writing learnf to %s", learnf_path) if os.path.isfile(learnf_path) is False: file = open(learnf_path, "w+") file.write('<?xml version="1.0" encoding="UTF-8"?>\n') file.write('<aiml>\n') file.write('</aiml>\n') file.close() tree = ET.parse(learnf_path) root = tree.getroot() # Add our new element child = ET.Element("category") child.append(pattern) child.append(topic) child.append(that) child.append(template.xml_tree(bot, clientid)) root.append(child) tree.write(learnf_path, method="xml")
class Brain(object): def __init__(self, configuration: BrainConfiguration): self._configuration = configuration self._aiml_parser = AIMLParser() self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._predicates_collection = PredicatesCollection() self._pronouns_collection = PronounsCollection() self._triples_collection = TriplesCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self.load(self._configuration) @property def aiml_parser(self): return self._aiml_parser @property def denormals(self): return self._denormal_collection @property def normals(self): return self._normal_collection @property def genders(self): return self._gender_collection @property def persons(self): return self._person_collection @property def person2s(self): return self._person2_collection @property def predicates(self): return self._predicates_collection @property def pronounds(self): return self._pronouns_collection @property def triples(self): return self._triples_collection @property def sets(self): return self._sets_collection @property def maps(self): return self._maps_collection @property def properties(self): return self._properties_collection @property def preprocessors(self): return self._preprocessors @property def postprocessors(self): return self._postprocessors def load(self, brain_configuration: BrainConfiguration): self._aiml_parser.load_aiml(brain_configuration) self.load_collections(brain_configuration) def load_collections(self, brain_configuration): if brain_configuration.denormal is not None: total = self._denormal_collection.load_from_filename( brain_configuration.denormal) logging.info("Loaded a total of %d denormalisations" % (total)) else: logging.warning("No configuration setting for denormal") if brain_configuration.normal is not None: total = self._normal_collection.load_from_filename( brain_configuration.normal) logging.info("Loaded a total of %d normalisations" % (total)) else: logging.warning("No configuration setting for normal") if brain_configuration.gender is not None: total = self._gender_collection.load_from_filename( brain_configuration.gender) logging.info("Loaded a total of %d genderisations" % (total)) else: logging.warning("No configuration setting for gender") if brain_configuration.person is not None: total = self._person_collection.load_from_filename( brain_configuration.person) logging.info("Loaded a total of %d persons" % (total)) else: logging.warning("No configuration setting for person") if brain_configuration.person2 is not None: total = self._person2_collection.load_from_filename( brain_configuration.person2) logging.info("Loaded a total of %d person2s" % (total)) else: logging.warning("No configuration setting for person2") if brain_configuration.predicates is not None: total = self._predicates_collection.load_from_filename( brain_configuration.predicates) logging.info("Loaded a total of %d predicates" % (total)) else: logging.warning("No configuration setting for predicates") if brain_configuration.pronouns is not None: total = self._pronouns_collection.load_from_filename( brain_configuration.pronouns) logging.info("Loaded a total of %d pronouns" % (total)) else: logging.warning("No configuration setting for pronouns") if brain_configuration.properties is not None: total = self._properties_collection.load_from_filename( brain_configuration.properties) logging.info("Loaded a total of %d properties" % (total)) else: logging.warning("No configuration setting for properties") if brain_configuration.triples is not None: total = self._properties_collection.load_from_filename( brain_configuration.triples) logging.info("Loaded a total of %d triples" % (total)) else: logging.warning("No configuration setting for triples") if brain_configuration.set_files is not None: total = self._sets_collection.load(brain_configuration.set_files) logging.info("Loaded a total of %d sets files" % (total)) else: logging.warning("No configuration setting for set files") if brain_configuration.map_files is not None: total = self._maps_collection.load(brain_configuration.map_files) logging.info("Loaded a total of %d maps files" % (total)) else: logging.warning("No configuration setting for map files") if brain_configuration.preprocessors is not None: total = self._preprocessors.load(brain_configuration.preprocessors) logging.info("Loaded a total of %d pre processors" % (total)) else: logging.warning("No configuration setting for pre processors") if brain_configuration.postprocessors is not None: total = self._postprocessors.load( brain_configuration.postprocessors) logging.info("Loaded a total of %d post processors" % (total)) else: logging.warning("No configuration setting for post processors") def pre_process_question(self, question): return self.preprocessors.process(question) def ask_question(self, bot, clientid, sentence) -> str: conversation = bot.get_conversation(clientid) try: topic_pattern = conversation.predicate("topic") except: topic_pattern = "*" try: that_question = conversation.nth_question(2) that_sentence = that_question.current_sentence() that_pattern = that_sentence.text() except: that_pattern = "*" return self._aiml_parser.match_sentence(bot, clientid, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) def post_process_response(self, response: str): return self.postprocessors.process(response) def dump_tree(self): self._aiml_parser.pattern_parser.root.dump(tabs="") def write_learnf_to_file(self, bot, clientid, pattern, topic, that, template): learnf_path = "%s/learnf%s" % ( self._configuration.aiml_files.files, self._configuration.aiml_files.extension) logging.debug("Writing learnf to %s" % learnf_path) import os.path if os.path.isfile(learnf_path) is False: file = open(learnf_path, "w+") file.write('<?xml version="1.0" encoding="UTF-8"?>\n') file.write('<aiml>\n') file.write('</aiml>\n') file.close() tree = ET.parse(learnf_path) root = tree.getroot() # Add our new element child = ET.Element("category") child.append(pattern) child.append(topic) child.append(that) child.append(template.xml_tree(bot, clientid)) root.append(child) tree.write(learnf_path, method="xml")
class Brain(object): def __init__(self, configuration: BrainConfiguration): self._configuration = configuration self._aiml_parser = AIMLParser() self._denormal_collection = DenormalCollection() self._normal_collection = NormalCollection() self._gender_collection = GenderCollection() self._person_collection = PersonCollection() self._person2_collection = PersonCollection() self._predicates_collection = PredicatesCollection() self._pronouns_collection = PronounsCollection() self._triples_collection = TriplesCollection() self._sets_collection = SetCollection() self._maps_collection = MapCollection() self._properties_collection = PropertiesCollection() self._preprocessors = ProcessorLoader() self._postprocessors = ProcessorLoader() self.load(self._configuration) @property def configuration(self): return self._configuration @property def aiml_parser(self): return self._aiml_parser @property def denormals(self): return self._denormal_collection @property def normals(self): return self._normal_collection @property def genders(self): return self._gender_collection @property def persons(self): return self._person_collection @property def person2s(self): return self._person2_collection @property def predicates(self): return self._predicates_collection @property def pronounds(self): return self._pronouns_collection @property def triples(self): return self._triples_collection @property def sets(self): return self._sets_collection @property def maps(self): return self._maps_collection @property def properties(self): return self._properties_collection @property def preprocessors(self): return self._preprocessors @property def postprocessors(self): return self._postprocessors def load(self, brain_configuration: BrainConfiguration): self._aiml_parser.load_aiml(brain_configuration) self.load_collections(brain_configuration) self.load_services(brain_configuration) def _load_denormals(self, brain_configuration): if brain_configuration.denormal is not None: total = self._denormal_collection.load_from_filename(brain_configuration.denormal) logging.info("Loaded a total of %d denormalisations", total) else: logging.warning("No configuration setting for denormal") def _load_normals(self, brain_configuration): if brain_configuration.normal is not None: total = self._normal_collection.load_from_filename(brain_configuration.normal) logging.info("Loaded a total of %d normalisations", total) else: logging.warning("No configuration setting for normal") def _load_genders(self, brain_configuration): if brain_configuration.gender is not None: total = self._gender_collection.load_from_filename(brain_configuration.gender) logging.info("Loaded a total of %d genderisations", total) else: logging.warning("No configuration setting for gender") def _load_persons(self, brain_configuration): if brain_configuration.person is not None: total = self._person_collection.load_from_filename(brain_configuration.person) logging.info("Loaded a total of %d persons", total) else: logging.warning("No configuration setting for person") def _load_person2s(self, brain_configuration): if brain_configuration.person2 is not None: total = self._person2_collection.load_from_filename(brain_configuration.person2) logging.info("Loaded a total of %d person2s", total) else: logging.warning("No configuration setting for person2") def _load_predicates(self, brain_configuration): if brain_configuration.predicates is not None: total = self._predicates_collection.load_from_filename(brain_configuration.predicates) logging.info("Loaded a total of %d predicates", total) else: logging.warning("No configuration setting for predicates") def _load_pronouns(self, brain_configuration): if brain_configuration.pronouns is not None: total = self._pronouns_collection.load_from_filename(brain_configuration.pronouns) logging.info("Loaded a total of %d pronouns", total) else: logging.warning("No configuration setting for pronouns") def _load_properties(self, brain_configuration): if brain_configuration.properties is not None: total = self._properties_collection.load_from_filename(brain_configuration.properties) logging.info("Loaded a total of %d properties", total) else: logging.warning("No configuration setting for properties") def _load_triples(self, brain_configuration): if brain_configuration.triples is not None: total = self._properties_collection.load_from_filename(brain_configuration.triples) logging.info("Loaded a total of %d triples", total) else: logging.warning("No configuration setting for triples") def _load_sets(self, brain_configuration): if brain_configuration.set_files is not None: total = self._sets_collection.load(brain_configuration.set_files) logging.info("Loaded a total of %d sets files", total) else: logging.warning("No configuration setting for set files") def _load_maps(self, brain_configuration): if brain_configuration.map_files is not None: total = self._maps_collection.load(brain_configuration.map_files) logging.info("Loaded a total of %d maps files", total) else: logging.warning("No configuration setting for map files") def _load_preprocessors(self, brain_configuration): if brain_configuration.preprocessors is not None: total = self._preprocessors.load(brain_configuration.preprocessors) logging.info("Loaded a total of %d pre processors", total) else: logging.warning("No configuration setting for pre processors") def _load_postprocessors(self, brain_configuration): if brain_configuration.postprocessors is not None: total = self._postprocessors.load(brain_configuration.postprocessors) logging.info("Loaded a total of %d post processors", total) else: logging.warning("No configuration setting for post processors") def load_collections(self, brain_configuration): self._load_denormals(brain_configuration) self._load_normals(brain_configuration) self._load_genders(brain_configuration) self._load_persons(brain_configuration) self._load_person2s(brain_configuration) self._load_predicates(brain_configuration) self._load_pronouns(brain_configuration) self._load_properties(brain_configuration) self._load_triples(brain_configuration) self._load_sets(brain_configuration) self._load_maps(brain_configuration) self._load_preprocessors(brain_configuration) self._load_postprocessors(brain_configuration) def load_services(self, brain_configuration): ServiceFactory.preload_services(brain_configuration.services) def pre_process_question(self, bot, clientid, question): return self.preprocessors.process(bot, clientid, question) def ask_question(self, bot, clientid, sentence) -> str: conversation = bot.get_conversation(clientid) topic_pattern = conversation.predicate("topic") if topic_pattern is None: logging.info("No Topic pattern default to [*]") topic_pattern = "*" else: logging.info("Topic pattern = [%s]", topic_pattern) try: that_question = conversation.nth_question(2) that_sentence = that_question.current_sentence() # If the last response was valid, i.e not none and not empty string, then use # that as the that_pattern, otherwise we default to '*' as pattern if that_sentence.response is not None and that_sentence.response != '': that_pattern = TextUtils.strip_all_punctuation(that_sentence.response) logging.info("That pattern = [%s]", that_pattern) else: logging.info("That pattern, no response, default to [*]") that_pattern = "*" except Exception: logging.info("No That pattern default to [*]") that_pattern = "*" match_context = self._aiml_parser.match_sentence(bot, clientid, sentence, topic_pattern=topic_pattern, that_pattern=that_pattern) if match_context is not None: template_node = match_context.template_node() logging.debug("AIML Parser evaluating template [%s]", template_node.to_string()) return template_node.template.resolve(bot, clientid) return None def post_process_response(self, bot, clientid, response: str): return self.postprocessors.process(bot, clientid, response) def dump_tree(self): self._aiml_parser.pattern_parser.root.dump(tabs="") def write_learnf_to_file(self, bot, clientid, pattern, topic, that, template): learnf_path = "%s/learnf%s" % (self._configuration.aiml_files.files, self._configuration.aiml_files.extension) logging.debug("Writing learnf to %s", learnf_path) if os.path.isfile(learnf_path) is False: file = open(learnf_path, "w+") file.write('<?xml version="1.0" encoding="UTF-8"?>\n') file.write('<aiml>\n') file.write('</aiml>\n') file.close() tree = ET.parse(learnf_path) root = tree.getroot() # Add our new element child = ET.Element("category") child.append(pattern) child.append(topic) child.append(that) child.append(template.xml_tree(bot, clientid)) root.append(child) tree.write(learnf_path, method="xml")