def test_save_learnf_with_exception(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "learnf" config.learnf_storage._dirs = [tmpdir] engine = FileStorageEngine(config) engine.initialise() store = FileLearnfStore(engine) test_client = TestClient() client_context = test_client.create_client_context("test1") learnf_path = store._get_storage_path() learnf_fullpath = store.create_learnf_path(client_context, learnf_path) if os.path.exists(learnf_fullpath): os.remove(learnf_fullpath) self.assertFalse(os.path.exists(learnf_fullpath)) pattern = ET.Element('pattern') pattern.text = "HELLO" topic = ET.Element('topic') topic.text = '*' that = ET.Element('that') that.text = '*' template = TemplateNode() template.append(TemplateWordNode("Hello")) category = LearnCategory(pattern, topic, that, template) self.assertFalse(store.save_learnf(client_context, category))
def test_reload_from_file(self): config = FileStorageConfiguration() config._rdf_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "rdfs" ]) factory = StorageFactory() storage_engine = FileStorageEngine(config) factory._storage_engines[StorageFactory.RDF] = storage_engine factory._store_to_engine_map[StorageFactory.RDF] = storage_engine storage_engine.initialise() collection = RDFCollection() self.assertIsNotNone(collection) collection.load(factory) self.assertTrue(collection.has_subject("TEST1")) self.assertTrue(collection.has_predicate("TEST1", "HASPURPOSE")) self.assertTrue(collection.has_object("TEST1", "HASPURPOSE", "to test")) collection.delete_entity("TEST1", "HASPURPOSE", "to test") self.assertFalse( collection.has_object("TEST1", "HASPURPOSE", "to test")) collection.reload(factory, "TESTDATA") self.assertTrue(collection.has_subject("TEST1")) self.assertTrue(collection.has_predicate("TEST1", "HASPURPOSE")) self.assertTrue(collection.has_object("TEST1", "HASPURPOSE", "to test"))
def test_load_triggers(self): config = TriggerConfiguration() config._manager = TriggerConfiguration.LOCAL_MANAGER mgr = TriggerManager.load_trigger_manager(config) trigger_file = os.path.dirname(__file__) + os.sep + "triggers.txt" self.assertTrue(os.path.exists(trigger_file)) config = FileStorageConfiguration() config._triggers_storage = FileStoreConfiguration(file=trigger_file, format="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() storage_factory = StorageFactory() storage_factory._storage_engines[StorageFactory.TRIGGERS] = engine storage_factory._store_to_engine_map[StorageFactory.TRIGGERS] = engine mgr.load_triggers(storage_factory) triggers = mgr.get_triggers("SYSTEM_STARTUP") self.assertIsNotNone(triggers) self.assertEquals(2, len(triggers)) triggers = mgr.get_triggers("SYSTEM_SHUTDOWN") self.assertIsNotNone(triggers) self.assertEquals(1, len(triggers)) triggers = mgr.get_triggers("CONVERSATION_START") self.assertIsNotNone(triggers) self.assertEquals(1, len(triggers))
def test_apply_updates(self): config = FileStorageConfiguration() config._rdf_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "test_files" + os.sep + "rdfs" ]) tmpdir = os.path.dirname(__file__) + os.sep + "rdf_updates" config.rdf_updates_storage._dirs = [tmpdir] config.rdf_updates_storage._has_single_file = True factory = StorageFactory() storage_engine = FileStorageEngine(config) factory._storage_engines[StorageFactory.RDF] = storage_engine factory._store_to_engine_map[StorageFactory.RDF] = storage_engine factory._storage_engines[StorageFactory.RDF_UPDATES] = storage_engine factory._store_to_engine_map[ StorageFactory.RDF_UPDATES] = storage_engine storage_engine.initialise() updates_engine = factory.entity_storage_engine( StorageFactory.RDF_UPDATES) updates_store = updates_engine.rdf_updates_store() updates_store.empty() collection = RDFCollection() self.assertIsNotNone(collection) collection.load(factory) self.assertTrue(collection.has_subject("TEST1")) self.assertTrue(collection.has_predicate("TEST1", "HASPURPOSE")) self.assertTrue(collection.has_object("TEST1", "HASPURPOSE", "to test")) collection.apply_updates()
def test_apply_rdf_updates_No_files(self): config = FileStorageConfiguration() config._rdf_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "rdfs" + os.sep + "text" ], extension="rdf", subdirs=True, format="text", encoding="utf-8", delete_on_start=False) tmpdir = os.path.dirname(__file__) + os.sep + "rdf_updates" config.rdf_updates_storage._dirs = [tmpdir] config.rdf_updates_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() rdf_store = FileRDFStore(engine) updates_store = FileRDFUpdatesStore(engine) filepath = updates_store._updates_filename( updates_store._get_storage_path()) updates_store.empty_updates() self.assertFalse(os.path.exists(filepath)) map_collection = RDFCollection() rdf_store.load_all(map_collection) updates_store.apply_rdf_updates(map_collection, None) updates_store.empty_updates() self.assertFalse(os.path.exists(filepath))
def test_rdf_updates_other_case(self): config = FileStorageConfiguration() config._rdf_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "rdfs" + os.sep + "text" ], extension="rdf", subdirs=True, format="text", encoding="utf-8", delete_on_start=False) tmpdir = os.path.dirname(__file__) + os.sep + "rdf_updates" config.rdf_updates_storage._dirs = [tmpdir, tmpdir] config.rdf_updates_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() updates_store = FileRDFUpdatesStore(engine) filepath = updates_store._updates_filename( updates_store._get_storage_path()) updates_store.empty() self.assertFalse(os.path.exists(filepath)) self.assertEqual(updates_store.get_storage(), config.rdf_updates_storage)
def test_load_errors(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "errors" tmpfile = tmpdir + os.sep + "errors.txt" config.errors_storage._dirs = [tmpfile] config.errors_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() store = FileErrorsStore(engine) path = store._get_dir_from_path(store._get_storage_path()) if os.path.exists(path): shutil.rmtree(path) self.assertFalse(os.path.exists(path)) store.empty() errors = [["aiml1.xml", "10", "20", "1", "5", "node", "Error_1"]] store.save_errors(errors) self.assertTrue(os.path.exists(store._get_storage_path())) errorInfo = store.load_errors() self.assertIsNotNone(errorInfo) self.assertIsNotNone(errorInfo['errors']) errors = errorInfo['errors'] error = errors[0] self.assertEqual("aiml1.xml", error['file']) store.empty() self.assertFalse(os.path.exists(store._get_storage_path())) shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_load_duplicates(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "duplicates" tmpfile = tmpdir + os.sep + "duplicates.txt" config.duplicates_storage._dirs = [tmpfile] config.duplicates_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() store = FileDuplicatesStore(engine) path = store._get_dir_from_path(store._get_storage_path()) if os.path.exists(path): shutil.rmtree(path) self.assertFalse(os.path.exists(path)) store.empty() duplicates = [["aiml1.xml", "10", "20", "1", "5", "Duplicate_1"]] store.save_duplicates(duplicates) self.assertTrue(os.path.exists(store._get_storage_path())) duplicateInfo = store.load_duplicates() self.assertIsNotNone(duplicateInfo) self.assertIsNotNone(duplicateInfo['duplicates']) duplicates = duplicateInfo['duplicates'] duplicate = duplicates[0] self.assertEqual("aiml1.xml", duplicate['file']) store.empty() self.assertFalse(os.path.exists(store._get_storage_path())) shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_load_from_test_dir_with_subdir(self): config = FileStorageConfiguration() config._maps_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "maps" + os.sep + "text" ], extension="txt", subdirs=True, format="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() store = FileMapsStore(engine) map_collection = MapCollection() store.load_all(map_collection) self.assertTrue(map_collection.contains('TESTMAP')) the_map = map_collection.map('TESTMAP') self.assertIsNotNone(the_map) self.assertEqual("6", the_map['ANT']) self.assertTrue(map_collection.contains('TESTMAP2')) the_map = map_collection.map('TESTMAP2') self.assertIsNotNone(the_map) self.assertEqual("grrrrr", the_map['BEAR'])
def test_save_braintree(self): config = FileStorageConfiguration() config._categories_storage = FileStoreConfiguration(dirs=[os.path.dirname(__file__) + os.sep + "data" + os.sep + "categories"], extension="aiml", subdirs=False, format="xml", encoding="utf-8", delete_on_start=False) tmpdir = os.path.dirname(__file__) + os.sep + "braintree" tmpfile = tmpdir + os.sep + "braintree.xml" config.braintree_storage._dirs = [tmpfile] config.braintree_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() store = FileBraintreeStore(engine) path = store._get_dir_from_path(store._get_storage_path()) if os.path.exists(path): shutil.rmtree(path) self.assertFalse(os.path.exists(path)) test_client = TestClient() client_context = test_client.create_client_context("test1") pattern_graph = client_context.brain.aiml_parser.pattern_parser store.save_braintree(client_context, pattern_graph) self.assertTrue(os.path.exists(store._get_storage_path())) shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_save_load_binaries(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "braintree" tmpfile = tmpdir + os.sep + "braintree.bin" config.binaries_storage._dirs = [tmpfile] config.binaries_storage._has_single_file = True engine = FileStorageEngine(config) engine.initialise() store = FileBinariesStore(engine) aiml_parser = PretendAimlParser("pretend1") path = store._get_dir_from_path(store._get_storage_path()) if os.path.exists(path): shutil.rmtree(path) self.assertFalse(os.path.exists(path)) store.save_binary(aiml_parser) self.assertTrue(os.path.exists(store._get_storage_path())) aiml_parser2 = store.load_binary() self.assertIsNotNone(aiml_parser2) self.assertEqual(aiml_parser2._name, "pretend1") shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_save_learnf(self): config = FileStorageConfiguration() tmpdir = os.path.dirname(__file__) + os.sep + "learnf" config.learnf_storage._dirs = [tmpdir] engine = FileStorageEngine(config) engine.initialise() store = FileLearnfStore(engine) test_client = TestClient() client_context = test_client.create_client_context("test1") learnf_path = store._get_storage_path() learnf_fullpath = store.create_learnf_path(client_context, learnf_path) if os.path.exists(learnf_fullpath): os.remove(learnf_fullpath) self.assertFalse(os.path.exists(learnf_fullpath)) template = TemplateNode() template.append(TemplateWordNode("Hello")) category = LearnCategory("HELLO *", "*", "*", template) store.save_learnf(client_context, category) self.assertTrue(os.path.exists(learnf_fullpath)) shutil.rmtree(tmpdir) self.assertFalse(os.path.exists(tmpdir))
def test_load_from_test_dir_with_subdir(self): config = FileStorageConfiguration() config._sets_storage = FileStoreConfiguration(dirs=[ os.path.dirname(__file__) + os.sep + "data" + os.sep + "sets" + os.sep + "text" ], extension="txt", subdirs=True, format="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() store = FileSetsStore(engine) set_collection = SetCollection() store.load_all(set_collection) self.assertTrue(set_collection.contains('TESTSET')) values = set_collection.set_list('TESTSET') self.assertTrue('VAL1' in values) self.assertTrue('VAL2' in values) self.assertTrue('VAL3' in values) self.assertTrue('VAL4' in values) self.assertTrue(set_collection.contains('TESTSET2')) values = set_collection.set_list('TESTSET2') self.assertEqual(4, len(values)) self.assertTrue('VAL5' in values) self.assertTrue('VAL6' in values) self.assertTrue('VAL7' in values) self.assertTrue('VAL8' in values)
def test_store_logs(self): self.tearDown() config = FileStorageConfiguration() config.logs_storage._dirs = [self._tmpdir] engine = FileStorageEngine(config) engine.initialise() store = FileLogsStore(engine) self.assertEqual(store.storage_engine, engine) store.empty() client = TestClient() client_context = client.create_client_context("user1") conversation = Conversation(client_context) error_log = {"error": "logger ERROR log"} warning_log = {"warning": "logger WARNING log"} info_log = {"info": "logger INFO log"} debug_log = {"debug": "logger DEBUG log"} conversation.append_log(error_log) conversation.append_log(warning_log) conversation.append_log(info_log) conversation.append_log(debug_log) store.store_logs(client_context, conversation) store.empty()
def test_load_users_groups_with_exception(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileUserGroupStore(engine) usersgroupsauthorisor = BasicUserGroupAuthorisationService(config) self.assertFalse(store.load_usergroups(usersgroupsauthorisor))
def test_storage_path(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileUserGroupStore(engine) self.assertEquals('/tmp/security/usergroups.yaml', store._get_storage_path()) self.assertIsInstance(store.get_storage(), FileStoreConfiguration)
def test_storage_path(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileCategoryStore(engine) self.assertEquals(['/tmp/categories'], store._get_storage_path()) self.assertIsInstance(store.get_storage(), FileStoreConfiguration)
def test_storage_path(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FilePersonStore(engine) self.assertEquals('/tmp/lookups/person.txt', store._get_storage_path()) self.assertIsInstance(store.get_storage(), FileStoreConfiguration)
def test_storage_path(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FilePostQuestionProcessorsStore(engine) self.assertEquals('/tmp/processing/postquestionprocessors.conf', store._get_storage_path()) self.assertIsInstance(store.get_storage(), FileStoreConfiguration)
def test_storage_path(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileRegexStore(engine) self.assertEquals('/tmp/regex/regex-templates.txt', store._get_storage_path()) self.assertIsInstance(store.get_storage(), FileStoreConfiguration)
def test_initialise(self): config = FileStorageConfiguration() config.conversation_storage._dirs = [self._tmpdir] engine = FileStorageEngine(config) engine.initialise() store = FileConversationStore(engine) self.assertEqual(store.storage_engine, engine)
def test_storage_path(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileServiceStore(engine) self.assertEquals('/tmp/services', store._get_storage_path()) self.assertIsInstance(store.get_storage(), FileStoreConfiguration)
def test_load_spelling_with_exception(self): config = FileStorageConfiguration() config._spelling_storage = FileStoreConfiguration(file=os.path.dirname(__file__) + os.sep + "data" + os.sep + "spelling" + os.sep + "corpus.txt", fileformat="text", encoding="utf-8", delete_on_start=False) engine = FileStorageEngine(config) engine.initialise() store = FileSpellingStore(engine) spelling_checker = NorvigSpellingChecker() self.assertFalse(store.load_spelling(spelling_checker))
def test_storage_path(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileDefaultVariablesStore(engine) self.assertEquals('/tmp/properties/defaults.txt', store._get_storage_path()) self.assertIsInstance(store.get_storage(), FileStoreConfiguration)
def test_load_file_contents(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileProcessorsStore(engine) collection = ProcessorCollection() count = store._load_file_contents(collection, os.path.dirname(__file__) + os.sep + "data" + os.sep + "processors" + os.sep + "preprocessors.conf") self.assertEquals(2, count)
def test_process_line_with_exception(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileProcessorsStore(engine) collection = ProcessorCollection() count = store._process_line("XXXXXX", collection, 0) self.assertEquals(0, count)
def test_process_line(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FilePropertyStore(engine) self.assertFalse(store._process_line("", {})) self.assertFalse(store._process_line("#name:Y-Bot", {})) self.assertTrue(store._process_line("name:Y-Bot", {}))
def test_storage_path(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileTemplateNodeStore(engine) self.assertEquals('/tmp/nodes/template_nodes.conf', store._get_storage_path()) self.assertIsInstance(store.get_storage(), FileStoreConfiguration)
def test_process_config_line_with_exception(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileNodeStore(engine) node_factory = MockNodeFactory() self.assertFalse(store.process_config_line(node_factory, "root=programy.parser.pattern.nodes.root.PatternRootNode", "nodes.txt"))
def test_storage_path(self): config = FileStorageConfiguration() engine = FileStorageEngine(config) engine.initialise() store = FileDuplicatesStore(engine) self.assertEquals('/tmp/debug/duplicates.txt', store._get_storage_path()) self.assertIsInstance(store.get_storage(), FileStoreConfiguration)