Ejemplo n.º 1
0
 def test_tokenize(self, code: str, expected: Sequence[Token]) -> None:
     tokens = tuple(tokenize(code, _PY38))
     self.assertSequenceEqual(tokens, expected)
     for a, b in zip(tokens, tokens[1:]):
         # These must be the same object, so if whitespace gets consumed (mutated) at
         # the end of token a, it shows up at the beginning of token b.
         self.assertIs(a.whitespace_after, b.whitespace_before)
Ejemplo n.º 2
0
 def test_error_dedent(self) -> None:
     with self.assertRaisesRegex(ParserSyntaxError,
                                 "Inconsistent indentation"):
         # create some inconsistent indents to generate an ERROR_DEDENT token
         tuple(tokenize("    a\n  b", _PY38))
Ejemplo n.º 3
0
 def test_errortoken(self) -> None:
     with self.assertRaisesRegex(ParserSyntaxError, "not a valid token"):
         # use tuple() to read everything
         # The copyright symbol isn't a valid token
         tuple(tokenize("\u00a9", _PY38))