def test_collection_invalid(self): collection = DenormalCollection() self.assertIsNotNone(collection) collection.add_to_lookup("dot com", ".com ") self.assertFalse(collection.has_keyVal("dot co")) self.assertIsNone(collection.value("dot co")) self.assertIsNone(collection.denormalise("dot co")) self.assertEqual(collection.denormalise_string(None, "www.dot.co"), "www.dot.co")
def test_collection_invalid_jp(self): collection = DenormalCollection() self.assertIsNotNone(collection) collection.add_to_lookup("丸1", "①") self.assertFalse(collection.has_keyVal("丸")) self.assertIsNone(collection.value("丸")) tokenizer = TokenizerJP() self.assertIsNone(collection.denormalise("丸")) self.assertEqual(collection.denormalise_string(tokenizer, "丸の回答"), "丸の回答")
def test_collection_operations_JP(self): collection = DenormalCollection() self.assertIsNotNone(collection) collection.add_to_lookup("丸1", "①") tokenizer = TokenizerJP() self.assertTrue(collection.has_keyVal("丸1")) self.assertEqual("①", collection.value("丸1")) self.assertEqual(collection.denormalise_string(tokenizer, "丸1の回答"), "①の回答") self.assertIsNone(collection.denormalise("丸"))
def test_collection_operations(self): denormal_text = """ "dot ac",".ac " "dot au",".au " "dot ca",".ca " "dot ch",".ch " "dot co",".co " "dot com",".com " """ collection = DenormalCollection() self.assertIsNotNone(collection) collection.load_from_text(denormal_text) self.assertTrue(collection.has_keyVal("dot com")) self.assertEqual(".com ", collection.value("dot com")) self.assertEqual( collection.denormalise_string(None, "keithsterling dot com"), "keithsterling.com") self.assertIsNone(collection.denormalise("dot cox"))