Example #1
0
 def test_iter_compat(self):
     u = Untokenizer()
     token = (NAME, 'Hello')
     u.compat(token, iter([]))
     self.assertEqual(u.tokens, ["Hello "])
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter([token])), 'Hello ')
Example #2
0
 def test_iter_compat(self):
     u = Untokenizer()
     token = (NAME, 'Hello')
     u.compat(token, iter([]))
     self.assertEqual(u.tokens, ["Hello "])
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter([token])), 'Hello ')
 def test_iter_compat(self):
     u = Untokenizer()
     token = (NAME, 'Hello')
     tokens = [(ENCODING, 'utf-8'), token]
     u.compat(token, iter([]))
     self.assertEqual(u.tokens, ["Hello "])
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter([token])), 'Hello ')
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter(tokens)), 'Hello ')
     self.assertEqual(u.encoding, 'utf-8')
     self.assertEqual(untokenize(iter(tokens)), b'Hello ')
Example #4
0
 def test_iter_compat(self):
     u = Untokenizer()
     token = (NAME, 'Hello')
     tokens = [(ENCODING, 'utf-8'), token]
     u.compat(token, iter([]))
     self.assertEqual(u.tokens, ["Hello "])
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter([token])), 'Hello ')
     u = Untokenizer()
     self.assertEqual(u.untokenize(iter(tokens)), 'Hello ')
     self.assertEqual(u.encoding, 'utf-8')
     self.assertEqual(untokenize(iter(tokens)), b'Hello ')