def test_load(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._normal_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "normal.txt", fileformat="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.NORMAL] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.NORMAL] = storage_engine collection = NormalCollection() self.assertIsNotNone(collection) self.assertTrue(collection.load(storage_factory)) self.assertEqual(collection.normalise_string("keithsterling.COM"), "keithsterling dot com") self.assertEquals([ re.compile('(^\\.COM|\\.COM|\\.COM$)', re.IGNORECASE), ' DOT COM ' ], collection.normalise(".COM")) self.assertEquals(None, collection.normalise(".XXX"))
def test_load_duplicates(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "duplicates" tmpfile = tmpdir + os.sep + "duplicates.txt" config.duplicates_storage._dirs = [tmpfile] config.duplicates_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() store = FileDuplicatesStore(engine) path = store._get_dir_from_path(store._get_storage_path()) if os.path.exists(path): shutil.rmtree(path) self.assertFalse(os.path.exists(path)) store.empty() duplicates = [["aiml1.xml", "10", "20", "1", "5", "Duplicate_1"]] store.save_duplicates(duplicates) self.assertTrue(os.path.exists(store._get_storage_path())) duplicateInfo = store.load_duplicates() self.assertIsNotNone(duplicateInfo) self.assertIsNotNone(duplicateInfo['duplicates']) duplicates = duplicateInfo['duplicates'] duplicate = duplicates[0] self.assertEqual("aiml1.xml", duplicate['file']) store.empty() self.assertFalse(os.path.exists(store._get_storage_path())) shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_reload(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._normal_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "normal.txt", format="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.NORMAL] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.NORMAL] = storage_engine collection = NormalCollection() self.assertIsNotNone(collection) collection.load(storage_factory) self.assertEqual( collection.normalise_string(None, "keithsterling.com"), "keithsterling dot com") collection.reload(storage_factory) self.assertEqual( collection.normalise_string(None, "keithsterling.com"), "keithsterling dot com")
def test_store_logs(self): self.tearDown() config = FileStorageConfiguration() config.logs_storage._dirs = [self._tmpdir] engine = FileStorageEngine(config) engine.initialise() store = FileLogsStore(engine) self.assertEqual(store.storage_engine, engine) store.empty() client = TestClient() client_context = client.create_client_context("user1") conversation = Conversation(client_context) error_log = {"error": "logger ERROR log"} warning_log = {"warning": "logger WARNING log"} info_log = {"info": "logger INFO log"} debug_log = {"debug": "logger DEBUG log"} conversation.append_log(error_log) conversation.append_log(warning_log) conversation.append_log(info_log) conversation.append_log(debug_log) store.store_logs(client_context, conversation) store.empty()
def test_rdf_updates_other_case(self): config = FileStorageConfiguration() config._rdf_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "rdfs" + os.sep + "text" ], extension="rdf", subdirs=True, format="text", encoding="utf-8", delete_on_start=False) tmpdir = os.path.dirname(__file__) + os.sep + "rdf_updates" config.rdf_updates_storage._dirs = [tmpdir, tmpdir] config.rdf_updates_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() updates_store = FileRDFUpdatesStore(engine) filepath = updates_store._updates_filename( updates_store._get_storage_path()) updates_store.empty() self.assertFalse(os.path.exists(filepath)) self.assertEqual(updates_store.get_storage(), config.rdf_updates_storage)
def test_load(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._person2_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "person2.txt", format="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.PERSON2] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.PERSON2] = storage_engine collection = Person2Collection() self.assertIsNotNone(collection) collection.load(storage_factory) self.assertEqual(collection.personalise_string(None, "I was"), "he or she was") self.assertEqual( collection.personalise_string(None, "hello he was over there"), "hello I was over there")
def add_default_stores(amap): sql = SQLStorageConfiguration() amap['sqlite'] = { 'type': 'sql', 'config': sql.create_sqlstorage_config() } mongo = MongoStorageConfiguration() amap['mongo'] = { 'type': 'mongo', 'config': mongo.create_mongostorage_config() } redis = RedisStorageConfiguration() amap['redis'] = { 'type': 'redis', 'config': redis.create_redisstorage_config() } file = FileStorageConfiguration() amap['file'] = { 'type': 'file', 'config': file.create_filestorage_config() } logger = LoggerStorageConfiguration() amap['logger'] = { 'type': 'logger', 'config': logger.create_loggerstorage_config() }
def test_load_from_test_dir_with_subdir(self): config = FileStorageConfiguration() config._maps_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "maps" + os.sep + "text" ], extension="txt", subdirs=True, format="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() store = FileMapsStore(engine) map_collection = MapCollection() store.load_all(map_collection) self.assertTrue(map_collection.contains('TESTMAP')) the_map = map_collection.map('TESTMAP') self.assertIsNotNone(the_map) self.assertEqual("6", the_map['ANT']) self.assertTrue(map_collection.contains('TESTMAP2')) the_map = map_collection.map('TESTMAP2') self.assertIsNotNone(the_map) self.assertEqual("grrrrr", the_map['BEAR'])
def test_reload_jp(self): storage_factory = StorageFactory() tokenizer = TokenizerJP() file_store_config = FileStorageConfiguration() file_store_config._gender_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "gender_jp.txt", format="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.GENDER] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.GENDER] = storage_engine collection = GenderCollection() self.assertIsNotNone(collection) collection.load(storage_factory) self.assertEqual(collection.gender("彼"), '彼女') self.assertEqual(collection.genderise_string(tokenizer, "彼が来た"), "彼女が来た") collection.reload(storage_factory) self.assertEqual(collection.gender("彼"), '彼女') self.assertEqual(collection.genderise_string(tokenizer, "彼が来た"), "彼女が来た")
def test_reload_from_file(self): config = FileStorageConfiguration() config._rdf_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "rdfs" ]) factory = StorageFactory() storage_engine = FileStorageEngine(config) factory._storage_engines[StorageFactory.RDF] = storage_engine factory._store_to_engine_map[StorageFactory.RDF] = storage_engine storage_engine.initialise() collection = RDFCollection() self.assertIsNotNone(collection) collection.load(factory) self.assertTrue(collection.has_subject("TEST1")) self.assertTrue(collection.has_predicate("TEST1", "HASPURPOSE")) self.assertTrue(collection.has_object("TEST1", "HASPURPOSE", "to test")) collection.delete_entity("TEST1", "HASPURPOSE", "to test") self.assertFalse( collection.has_object("TEST1", "HASPURPOSE", "to test")) collection.reload(factory, "TESTDATA") self.assertTrue(collection.has_subject("TEST1")) self.assertTrue(collection.has_predicate("TEST1", "HASPURPOSE")) self.assertTrue(collection.has_object("TEST1", "HASPURPOSE", "to test"))
def test_reload(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._denormal_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "denormal.txt", format="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.DENORMAL] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.DENORMAL] = storage_engine collection = DenormalCollection() self.assertIsNotNone(collection) collection.load(storage_factory) collection.reload(storage_factory)
def test_save_learnf_with_exception(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "learnf" config.learnf_storage._dirs = [tmpdir] engine = FileStorageEngine(config) engine.initialise() store = FileLearnfStore(engine) test_client = TestClient() client_context = test_client.create_client_context("test1") learnf_path = store._get_storage_path() learnf_fullpath = store.create_learnf_path(client_context, learnf_path) if os.path.exists(learnf_fullpath): os.remove(learnf_fullpath) self.assertFalse(os.path.exists(learnf_fullpath)) pattern = ET.Element('pattern') pattern.text = "HELLO" topic = ET.Element('topic') topic.text = '*' that = ET.Element('that') that.text = '*' template = TemplateNode() template.append(TemplateWordNode("Hello")) category = LearnCategory(pattern, topic, that, template) self.assertFalse(store.save_learnf(client_context, category))
def test_apply_updates(self): config = FileStorageConfiguration() config._rdf_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "rdfs" ]) tmpdir = os.path.dirname(__file__) + os.sep + "rdf_updates" config.rdf_updates_storage._dirs = [tmpdir] config.rdf_updates_storage._has_single_file = True factory = StorageFactory() storage_engine = FileStorageEngine(config) factory._storage_engines[StorageFactory.RDF] = storage_engine factory._store_to_engine_map[StorageFactory.RDF] = storage_engine factory._storage_engines[StorageFactory.RDF_UPDATES] = storage_engine factory._store_to_engine_map[ StorageFactory.RDF_UPDATES] = storage_engine storage_engine.initialise() updates_engine = factory.entity_storage_engine( StorageFactory.RDF_UPDATES) updates_store = updates_engine.rdf_updates_store() updates_store.empty() collection = RDFCollection() self.assertIsNotNone(collection) collection.load(factory) self.assertTrue(collection.has_subject("TEST1")) self.assertTrue(collection.has_predicate("TEST1", "HASPURPOSE")) self.assertTrue(collection.has_object("TEST1", "HASPURPOSE", "to test")) collection.apply_updates()
def test_load_triggers(self): config = TriggerConfiguration() config._manager = TriggerConfiguration.LOCAL_MANAGER mgr = TriggerManager.load_trigger_manager(config) trigger_file = os.path.dirname(__file__) + os.sep + "triggers.txt" self.assertTrue(os.path.exists(trigger_file)) config = FileStorageConfiguration() config._triggers_storage = FileStoreConfiguration(file=trigger_file, format="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() storage_factory = StorageFactory() storage_factory._storage_engines[StorageFactory.TRIGGERS] = engine storage_factory._store_to_engine_map[StorageFactory.TRIGGERS] = engine mgr.load_triggers(storage_factory) triggers = mgr.get_triggers("SYSTEM_STARTUP") self.assertIsNotNone(triggers) self.assertEquals(2, len(triggers)) triggers = mgr.get_triggers("SYSTEM_SHUTDOWN") self.assertIsNotNone(triggers) self.assertEquals(1, len(triggers)) triggers = mgr.get_triggers("CONVERSATION_START") self.assertIsNotNone(triggers) self.assertEquals(1, len(triggers))
def test_save_load_binaries(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "braintree" tmpfile = tmpdir + os.sep + "braintree.bin" config.binaries_storage._dirs = [tmpfile] config.binaries_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() store = FileBinariesStore(engine) aiml_parser = PretendAimlParser("pretend1") path = store._get_dir_from_path(store._get_storage_path()) if os.path.exists(path): shutil.rmtree(path) self.assertFalse(os.path.exists(path)) store.save_binary(aiml_parser) self.assertTrue(os.path.exists(store._get_storage_path())) aiml_parser2 = store.load_binary() self.assertIsNotNone(aiml_parser2) self.assertEqual(aiml_parser2._name, "pretend1") shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_load_from_test_dir_with_subdir(self): config = FileStorageConfiguration() config._sets_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "sets" + os.sep + "text" ], extension="txt", subdirs=True, format="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() store = FileSetsStore(engine) set_collection = SetCollection() store.load_all(set_collection) self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set_list('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values) self.assertTrue(set_collection.contains('TESTSET2')) values = set_collection.set_list('TESTSET2') self.assertEqual(4, len(values)) self.assertTrue('VAL5' in values) self.assertTrue('VAL6' in values) self.assertTrue('VAL7' in values) self.assertTrue('VAL8' in values)
def test_reload_jp(self): storage_factory = StorageFactory() tokenizer = TokenizerJP() file_store_config = FileStorageConfiguration() file_store_config._person2_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "person2_jp.txt", format="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.PERSON2] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.PERSON2] = storage_engine collection = Person2Collection() self.assertIsNotNone(collection) collection.load(storage_factory) self.assertEqual(collection.personalise_string(tokenizer, "私"), "彼か彼女") self.assertEqual(collection.personalise_string(tokenizer, "彼か彼女が来た"), "私か私が来た") collection.reload(storage_factory) self.assertEqual(collection.personalise_string(tokenizer, "私"), "彼か彼女") self.assertEqual(collection.personalise_string(tokenizer, "彼か彼女が来た"), "私か私が来た")
def test_load_from_file(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._sets_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "sets" ]) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[StorageFactory.SETS] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.SETS] = storage_engine collection = SetCollection() self.assertIsNotNone(collection) self.assertTrue(collection.load(storage_factory) > 0) self.assertIsNotNone(collection._sets) self.assertEqual(len(collection._sets), 1) self.assertIsNotNone(collection._stores) self.assertEqual(len(collection._stores), 1) self.assertTrue("TEST_SET" in collection._sets) self.assertTrue("TEST_SET" in collection._stores) self.assertTrue(collection.contains('TEST_SET')) aset = collection.set('TEST_SET') self.assertIsNotNone(aset) values = aset['AIR'] self.assertIsNotNone(values) self.assertTrue(['Air', 'Force', 'blue'] in values)
def test_load(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._person_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "person.txt", format="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.PERSON] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.PERSON] = storage_engine collection = PersonCollection() self.assertIsNotNone(collection) collection.load(storage_factory) self.assertEqual(collection.personalise_string(" with me "), "with you2") self.assertEqual( collection.personalise_string("Hello are you with me"), "Hello am i2 with you2")
def test_save_learnf(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "learnf" config.learnf_storage._dirs = [tmpdir] engine = FileStorageEngine(config) engine.initialise() store = FileLearnfStore(engine) test_client = TestClient() client_context = test_client.create_client_context("test1") learnf_path = store._get_storage_path() learnf_fullpath = store.create_learnf_path(client_context, learnf_path) if os.path.exists(learnf_fullpath): os.remove(learnf_fullpath) self.assertFalse(os.path.exists(learnf_fullpath)) template = TemplateNode() template.append(TemplateWordNode("Hello")) category = LearnCategory("HELLO *", "*", "*", template) store.save_learnf(client_context, category) self.assertTrue(os.path.exists(learnf_fullpath)) shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_load(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._gender_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "gender.txt", fileformat="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.GENDER] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.GENDER] = storage_engine collection = GenderCollection() self.assertIsNotNone(collection) self.assertTrue(collection.load(storage_factory)) self.assertEqual(collection.gender(" WITH HIM "), [ re.compile('(^WITH HIM | WITH HIM | WITH HIM$)', re.IGNORECASE), ' WITH HER ' ]) self.assertEqual(collection.genderise_string("This is with him "), "This is with her")
def test_load_from_file(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._properties_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "properties.txt", format="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.PROPERTIES] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.PROPERTIES] = storage_engine collection = PropertiesCollection() self.assertIsNotNone(collection) collection.load(storage_factory) self.assertTrue(collection.has_property("name")) self.assertFalse(collection.has_property("age")) self.assertEqual("KeiffBot 1.0", collection.property("name")) self.assertIsNone(collection.property("age"))
def test_apply_rdf_updates_No_files(self): config = FileStorageConfiguration() config._rdf_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "rdfs" + os.sep + "text" ], extension="rdf", subdirs=True, format="text", encoding="utf-8", delete_on_start=False) tmpdir = os.path.dirname(__file__) + os.sep + "rdf_updates" config.rdf_updates_storage._dirs = [tmpdir] config.rdf_updates_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() rdf_store = FileRDFStore(engine) updates_store = FileRDFUpdatesStore(engine) filepath = updates_store._updates_filename( updates_store._get_storage_path()) updates_store.empty_updates() self.assertFalse(os.path.exists(filepath)) map_collection = RDFCollection() rdf_store.load_all(map_collection) updates_store.apply_rdf_updates(map_collection, None) updates_store.empty_updates() self.assertFalse(os.path.exists(filepath))
def test_reload_jp(self): storage_factory = StorageFactory() tokenizer = TokenizerJP() file_store_config = FileStorageConfiguration() file_store_config._normal_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "normal_jp.txt", format="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.NORMAL] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.NORMAL] = storage_engine collection = NormalCollection() self.assertIsNotNone(collection) collection.load(storage_factory) self.assertEqual("丸1の回答", collection.normalise_string(tokenizer, "①の回答")) collection.reload(storage_factory) self.assertEqual("丸1の回答", collection.normalise_string(tokenizer, "①の回答"))
def test_load(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._person_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "person.txt", fileformat="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.PERSON] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.PERSON] = storage_engine collection = PersonCollection() self.assertIsNotNone(collection) self.assertTrue(collection.load(storage_factory)) self.assertEqual(collection.personalise_string(" with me "), "with you2") self.assertEqual( collection.personalise_string("Hello are you with me"), "Hello am i2 with you2") self.assertEqual([ re.compile('(^WITH YOU | WITH YOU | WITH YOU$)', re.IGNORECASE), ' WITH ME2 ' ], collection.person(" WITH YOU ")) self.assertEqual(None, collection.person(" WITH XXX "))
def test_load(self): storage_factory = StorageFactory() file_store_config = FileStorageConfiguration() file_store_config._person2_storage = FileStoreConfiguration( file=os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "person2.txt", fileformat="text", extension="txt", encoding="utf-8", delete_on_start=False) storage_engine = FileStorageEngine(file_store_config) storage_factory._storage_engines[ StorageFactory.PERSON2] = storage_engine storage_factory._store_to_engine_map[ StorageFactory.PERSON2] = storage_engine collection = Person2Collection() self.assertIsNotNone(collection) self.assertTrue(collection.load(storage_factory)) self.assertEqual(collection.personalise_string("I was"), "he or she was") self.assertEqual( collection.personalise_string("hello he was over there"), "hello i was over there") self.assertEquals([ re.compile('(^I WAS | I WAS | I WAS$)', re.IGNORECASE), ' HE OR SHE WAS ' ], collection.person(" I WAS ")) self.assertEquals(None, collection.person(" I XXX "))
def test_save_braintree(self): config = FileStorageConfiguration() config._categories_storage = FileStoreConfiguration(dirs=[os.path.dirname(__file__) + os.sep + "data" + os.sep + "categories"], extension="aiml", subdirs=False, format="xml", encoding="utf-8", delete_on_start=False) tmpdir = os.path.dirname(__file__) + os.sep + "braintree" tmpfile = tmpdir + os.sep + "braintree.xml" config.braintree_storage._dirs = [tmpfile] config.braintree_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() store = FileBraintreeStore(engine) path = store._get_dir_from_path(store._get_storage_path()) if os.path.exists(path): shutil.rmtree(path) self.assertFalse(os.path.exists(path)) test_client = TestClient() client_context = test_client.create_client_context("test1") pattern_graph = client_context.brain.aiml_parser.pattern_parser store.save_braintree(client_context, pattern_graph) self.assertTrue(os.path.exists(store._get_storage_path())) shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_load_errors(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "errors" tmpfile = tmpdir + os.sep + "errors.txt" config.errors_storage._dirs = [tmpfile] config.errors_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() store = FileErrorsStore(engine) path = store._get_dir_from_path(store._get_storage_path()) if os.path.exists(path): shutil.rmtree(path) self.assertFalse(os.path.exists(path)) store.empty() errors = [["aiml1.xml", "10", "20", "1", "5", "node", "Error_1"]] store.save_errors(errors) self.assertTrue(os.path.exists(store._get_storage_path())) errorInfo = store.load_errors() self.assertIsNotNone(errorInfo) self.assertIsNotNone(errorInfo['errors']) errors = errorInfo['errors'] error = errors[0] self.assertEqual("aiml1.xml", error['file']) store.empty() self.assertFalse(os.path.exists(store._get_storage_path())) shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_collection_update_to_updates_file(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "rdf_updates" config.rdf_updates_storage._dirs = [tmpdir] config.rdf_updates_storage._has_single_file = True factory = StorageFactory() storage_engine = FileStorageEngine(config) factory._storage_engines[StorageFactory.RDF_UPDATES] = storage_engine factory._store_to_engine_map[ StorageFactory.RDF_UPDATES] = storage_engine updates_engine = factory.entity_storage_engine( StorageFactory.RDF_UPDATES) updates_store = updates_engine.rdf_updates_store() updates_store.empty() collection = RDFCollection() self.assertIsNotNone(collection) collection._storage_factory = factory collection.add_entity("ACCOUNT", "hasSize", "0", "BANKING", "BANIKING") self.assertTrue(collection.has_subject('ACCOUNT')) self.assertTrue(collection.has_predicate('ACCOUNT', 'hasSize')) self.assertTrue(collection.has_object('ACCOUNT', 'hasSize', "0")) collection.delete_entity("ACCOUNT", "hasSize", "0") self.assertFalse(collection.has_subject('ACCOUNT')) updates_store.empty()
def test_initialise(self): config = FileStorageConfiguration() config.conversation_storage._dirs = [self._tmpdir] engine = FileStorageEngine(config) engine.initialise() store = FileConversationStore(engine) self.assertEqual(store.storage_engine, engine)