def test_basic_tokenizer_lower(self):
    tokenizer = tokenization.BasicTokenizer(do_lower_case=True)

    self.assertAllEqual(
        tokenizer.tokenize(u" \tHeLLo!how  \n Are yoU?  "),
        ["hello", "!", "how", "are", "you", "?"])
    self.assertAllEqual(tokenizer.tokenize(u"H\u00E9llo"), ["hello"])
    def test_basic_tokenizer_no_lower(self):
        tokenizer = tokenization.BasicTokenizer(do_lower_case=False)

        self.assertAllEqual(tokenizer.tokenize(u" \tHeLLo!how  \n Are yoU?  "),
                            ["HeLLo", "!", "how", "Are", "yoU", "?"])
    def test_chinese(self):
        tokenizer = tokenization.BasicTokenizer()

        self.assertAllEqual(tokenizer.tokenize(u"ah\u535A\u63A8zz"),
                            [u"ah", u"\u535A", u"\u63A8", u"zz"])