def test_tokenize(self): exemplar = ("(( ( ((() And)) or ) (check:%(miss)s) not)) " "'a-string' \"another-string\"") expected = [ ('(', '('), ('(', '('), ('(', '('), ('(', '('), ('(', '('), ('(', '('), (')', ')'), ('and', 'And'), (')', ')'), (')', ')'), ('or', 'or'), (')', ')'), ('(', '('), ('check', 'check:%(miss)s'), (')', ')'), ('not', 'not'), (')', ')'), (')', ')'), ('string', 'a-string'), ('string', 'another-string'), ] result = list(policy._parse_tokenize(exemplar)) self.assertEqual(result, expected)