def test_normalize(self): "Base._normalize()" b = Base() tests = {'abcdefg ABCDEFG äöü߀ AÖÜ': 'abcdefg abcdefg äöü߀ aöü', r'\ga\Ga\\\ ': r'gaga\ ', r'0123456789': '0123456789', # unicode escape seqs should have been done by # the tokenizer... } for test, exp in list(tests.items()): self.assertEqual(b._normalize(test), exp) # static too self.assertEqual(Base._normalize(test), exp)
class BaseTestCase(basetest.BaseTestCase): def test_normalize(self): "Base._normalize()" b = Base() tests = {u'abcdefg ABCDEFG äöü߀ AÖÜ': u'abcdefg abcdefg äöü߀ aöü', ur'\ga\Ga\\\ ': ur'gaga\ ', ur'0123456789': u'0123456789', # unicode escape seqs should have been done by # the tokenizer... } for test, exp in tests.items(): self.assertEqual(b._normalize(test), exp) # static too self.assertEqual(Base._normalize(test), exp) def test_tokenupto(self): "Base._tokensupto2()" # tests nested blocks of {} [] or () b = Base() tests = [ ('default', u'a[{1}]({2}) { } NOT', u'a[{1}]({2}) { }', False), ('default', u'a[{1}]({2}) { } NOT', u'a[{1}]func({2}) { }', True), ('blockstartonly', u'a[{1}]({2}) { NOT', u'a[{1}]({2}) {', False), ('blockstartonly', u'a[{1}]({2}) { NOT', u'a[{1}]func({2}) {', True), ('propertynameendonly', u'a[(2)1] { }2 : a;', u'a[(2)1] { }2 :', False), ('propertynameendonly', u'a[(2)1] { }2 : a;', u'a[func(2)1] { }2 :', True), ('propertyvalueendonly', u'a{;{;}[;](;)}[;{;}[;](;)](;{;}[;](;)) 1; NOT',