def test_collection_duplicate(self):
        collection = DenormalCollection()
        self.assertIsNotNone(collection)

        collection.add_to_lookup("dot com", ".com ")
        collection.add_to_lookup("dot com", ".co ")

        self.assertEqual(collection.denormalise("dot com"), '.com ')
    def test_collection_duplicate_jp(self):
        collection = DenormalCollection()
        self.assertIsNotNone(collection)

        collection.add_to_lookup("丸1", "①")
        collection.add_to_lookup("丸1", "②")

        self.assertEqual(collection.denormalise("丸1"), '①')
Пример #3
0
    def test_collection_operations(self):
        collection = DenormalCollection()
        self.assertIsNotNone(collection)

        collection.add_to_lookup(" DOT COM ", [re.compile("(^DOT COM | DOT COM | DOT COM$)", re.IGNORECASE), ".com"])

        self.assertTrue(collection.has_key(" DOT COM "))
        self.assertEqual([re.compile("(^DOT COM | DOT COM | DOT COM$)", re.IGNORECASE), ".com"], collection.value(" DOT COM "))

        self.assertEqual(collection.denormalise_string("keithsterling dot com"), "keithsterling.com")
        self.assertIsNone(collection.denormalise(" dot cox "))
    def test_collection_invalid(self):
        collection = DenormalCollection()
        self.assertIsNotNone(collection)

        collection.add_to_lookup("dot com", ".com ")

        self.assertFalse(collection.has_keyVal("dot co"))
        self.assertIsNone(collection.value("dot co"))

        self.assertIsNone(collection.denormalise("dot co"))
        self.assertEqual(collection.denormalise_string(None, "www.dot.co"),
                         "www.dot.co")
    def test_collection_invalid_jp(self):
        collection = DenormalCollection()
        self.assertIsNotNone(collection)

        collection.add_to_lookup("丸1", "①")

        self.assertFalse(collection.has_keyVal("丸"))
        self.assertIsNone(collection.value("丸"))

        tokenizer = TokenizerJP()
        self.assertIsNone(collection.denormalise("丸"))
        self.assertEqual(collection.denormalise_string(tokenizer, "丸の回答"),
                         "丸の回答")
    def test_collection_operations_JP(self):
        collection = DenormalCollection()
        self.assertIsNotNone(collection)

        collection.add_to_lookup("丸1", "①")
        tokenizer = TokenizerJP()

        self.assertTrue(collection.has_keyVal("丸1"))
        self.assertEqual("①", collection.value("丸1"))

        self.assertEqual(collection.denormalise_string(tokenizer, "丸1の回答"),
                         "①の回答")
        self.assertIsNone(collection.denormalise("丸"))