예제 #1
0
 def test_iter_compat(self):
     u = Untokenizer()
     token = (NAME, 'Hello')
     u.compat(token, iter([]))
     self.assertEqual(u.tokens, ["Hello "])
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter([token])), 'Hello ')
예제 #2
0
 def test_iter_compat(self):
     u = Untokenizer()
     token = (NAME, 'Hello')
     u.compat(token, iter([]))
     self.assertEqual(u.tokens, ["Hello "])
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter([token])), 'Hello ')
예제 #3
0
 def test_iter_compat(self):
     u = Untokenizer()
     token = (NAME, 'Hello')
     tokens = [(ENCODING, 'utf-8'), token]
     u.compat(token, iter([]))
     self.assertEqual(u.tokens, ["Hello "])
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter([token])), 'Hello ')
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter(tokens)), 'Hello ')
     self.assertEqual(u.encoding, 'utf-8')
     self.assertEqual(untokenize(iter(tokens)), b'Hello ')
예제 #4
0
 def test_iter_compat(self):
     u = Untokenizer()
     token = (NAME, 'Hello')
     tokens = [(ENCODING, 'utf-8'), token]
     u.compat(token, iter([]))
     self.assertEqual(u.tokens, ["Hello "])
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter([token])), 'Hello ')
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter(tokens)), 'Hello ')
     self.assertEqual(u.encoding, 'utf-8')
     self.assertEqual(untokenize(iter(tokens)), b'Hello ')