Exemplo n.º 1
0
    def test_literal_escape_big_u(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal with a big u escape sequence."""

        end, lit = Lexer.lex_string('"\\U00000000"', 0)
        self.assertEqual(end, 12)
        self.assertEqual(lit, '\0')

        end, lit = Lexer.lex_string('"\\U0001ffff"', 0)
        self.assertEqual(end, 12)
        self.assertEqual(lit, py3_str(u'\U0001ffff'))

        tests = [
            '"\\U',
            '"\\U"        ',
            '"\\U0"       ',
            '"\\Ug"       ',
            '"\\U0g"      ',
            '"\\U00g"     ',
            '"\\U000g"    ',
            '"\\U000g"    ',
            '"\\U0000g"   ',
            '"\\U00000g"  ',
            '"\\U000000g" ',
            '"\\U0000000g"',
        ]

        for test in tests:
            with self.assertRaises(LexerError) as ctx:
                Lexer.lex_string(test, 0)
            self.assertEqual(ctx.exception.line, 1)
            self.assertEqual(ctx.exception.column, 2)
Exemplo n.º 2
0
    def test_literal_escape_big_u(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal with a big u escape sequence."""

        end, lit = Lexer.lex_string('"\\U00000000"', 0)
        self.assertEqual(end, 12)
        self.assertEqual(lit, '\0')

        end, lit = Lexer.lex_string('"\\U0001ffff"', 0)
        self.assertEqual(end, 12)
        self.assertEqual(lit, py3_str(u'\U0001ffff'))

        tests = [
            '"\\U',
            '"\\U"        ',
            '"\\U0"       ',
            '"\\Ug"       ',
            '"\\U0g"      ',
            '"\\U00g"     ',
            '"\\U000g"    ',
            '"\\U000g"    ',
            '"\\U0000g"   ',
            '"\\U00000g"  ',
            '"\\U000000g" ',
            '"\\U0000000g"',
        ]

        for test in tests:
            with self.assertRaises(LexerError) as ctx:
                Lexer.lex_string(test, 0)
            self.assertEqual(ctx.exception.line, 1)
            self.assertEqual(ctx.exception.column, 2)
Exemplo n.º 3
0
    def test_literal_escape_hex(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal with a hexadecimal escape sequence."""

        end, lit = Lexer.lex_string('"\\x00"', 0)
        self.assertEqual(end, 6)
        self.assertEqual(lit, '\0')

        end, lit = Lexer.lex_string('"\\xff"', 0)
        self.assertEqual(end, 6)
        self.assertEqual(lit, '\xff')

        tests = [
            '"\\x',
            '"\\x"  ',
            '"\\x0" ',
            '"\\xg" ',
            '"\\x0g"',
        ]

        for test in tests:
            with self.assertRaises(LexerError) as ctx:
                Lexer.lex_string(test, 0)
            self.assertEqual(ctx.exception.line, 1)
            self.assertEqual(ctx.exception.column, 2)
Exemplo n.º 4
0
    def test_literal_escape_hex(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal with a hexadecimal escape sequence."""

        end, lit = Lexer.lex_string('"\\x00"', 0)
        self.assertEqual(end, 6)
        self.assertEqual(lit, '\0')

        end, lit = Lexer.lex_string('"\\xff"', 0)
        self.assertEqual(end, 6)
        self.assertEqual(lit, '\xff')

        tests = [
            '"\\x',
            '"\\x"  ',
            '"\\x0" ',
            '"\\xg" ',
            '"\\x0g"',
        ]

        for test in tests:
            with self.assertRaises(LexerError) as ctx:
                Lexer.lex_string(test, 0)
            self.assertEqual(ctx.exception.line, 1)
            self.assertEqual(ctx.exception.column, 2)
Exemplo n.º 5
0
    def test_interpreted_string_literal(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal."""

        end, lit = Lexer.lex_string('"a"', 0)
        self.assertEqual(end, 3)
        self.assertEqual(lit, 'a')

        end, lit = Lexer.lex_string('"n"', 0)
        self.assertEqual(end, 3)
        self.assertEqual(lit, 'n')

        with self.assertRaises(LexerError) as ctx:
            Lexer.lex_string('"\\', 0)
        self.assertEqual(ctx.exception.line, 1)
        self.assertEqual(ctx.exception.column, 2)
Exemplo n.º 6
0
    def test_interpreted_string_literal(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal."""

        end, lit = Lexer.lex_string('"a"', 0)
        self.assertEqual(end, 3)
        self.assertEqual(lit, 'a')

        end, lit = Lexer.lex_string('"n"', 0)
        self.assertEqual(end, 3)
        self.assertEqual(lit, 'n')

        with self.assertRaises(LexerError) as ctx:
            Lexer.lex_string('"\\', 0)
        self.assertEqual(ctx.exception.line, 1)
        self.assertEqual(ctx.exception.column, 2)
Exemplo n.º 7
0
    def test_literal_escape_octal(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal with an octal escape sequence."""

        end, lit = Lexer.lex_string('"\\000"', 0)
        self.assertEqual(end, 6)
        self.assertEqual(lit, '\0')

        end, lit = Lexer.lex_string('"\\377"', 0)
        self.assertEqual(end, 6)
        self.assertEqual(lit, '\377')

        tests = [
            '"\\0',
            '"\\0"  ',
            '"\\09" ',
            '"\\009"',
        ]

        for test in tests:
            with self.assertRaises(LexerError) as ctx:
                Lexer.lex_string(test, 0)
            self.assertEqual(ctx.exception.line, 1)
            self.assertEqual(ctx.exception.column, 2)
Exemplo n.º 8
0
    def test_literal_escape_octal(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal with an octal escape sequence."""

        end, lit = Lexer.lex_string('"\\000"', 0)
        self.assertEqual(end, 6)
        self.assertEqual(lit, '\0')

        end, lit = Lexer.lex_string('"\\377"', 0)
        self.assertEqual(end, 6)
        self.assertEqual(lit, '\377')

        tests = [
            '"\\0',
            '"\\0"  ',
            '"\\09" ',
            '"\\009"',
        ]

        for test in tests:
            with self.assertRaises(LexerError) as ctx:
                Lexer.lex_string(test, 0)
            self.assertEqual(ctx.exception.line, 1)
            self.assertEqual(ctx.exception.column, 2)
Exemplo n.º 9
0
    def test_raw_string_lit(self):
        """Test whether Lexer.lex_string() can tokenize raw string literal."""

        end, lit = Lexer.lex_string('`a`', 0)
        self.assertEqual(end, 3)
        self.assertEqual(lit, 'a')

        end, lit = Lexer.lex_string('`a\nb`', 0)
        self.assertEqual(end, 5)
        self.assertEqual(lit, 'a\nb')

        end, lit = Lexer.lex_string('"a""b"', 3)
        self.assertEqual(end, 6)
        self.assertEqual(lit, 'b')

        with self.assertRaises(LexerError) as ctx:
            Lexer.lex_string('`a', 0)
        self.assertEqual(ctx.exception.line, 1)
        self.assertEqual(ctx.exception.column, 3)

        with self.assertRaises(LexerError) as ctx:
            Lexer.lex_string('"a\nb"', 0)
        self.assertEqual(ctx.exception.line, 1)
        self.assertEqual(ctx.exception.column, 3)
Exemplo n.º 10
0
    def test_raw_string_lit(self):
        """Test whether Lexer.lex_string() can tokenize raw string literal."""

        end, lit = Lexer.lex_string('`a`', 0)
        self.assertEqual(end, 3)
        self.assertEqual(lit, 'a')

        end, lit = Lexer.lex_string('`a\nb`', 0)
        self.assertEqual(end, 5)
        self.assertEqual(lit, 'a\nb')

        end, lit = Lexer.lex_string('"a""b"', 3)
        self.assertEqual(end, 6)
        self.assertEqual(lit, 'b')

        with self.assertRaises(LexerError) as ctx:
            Lexer.lex_string('`a', 0)
        self.assertEqual(ctx.exception.line, 1)
        self.assertEqual(ctx.exception.column, 3)

        with self.assertRaises(LexerError) as ctx:
            Lexer.lex_string('"a\nb"', 0)
        self.assertEqual(ctx.exception.line, 1)
        self.assertEqual(ctx.exception.column, 3)
Exemplo n.º 11
0
    def test_literal_escape_char(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal with a escaped character."""

        end, lit = Lexer.lex_string('"\\a"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\a')

        end, lit = Lexer.lex_string('"\\b"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\b')

        end, lit = Lexer.lex_string('"\\f"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\f')

        end, lit = Lexer.lex_string('"\\n"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\n')

        end, lit = Lexer.lex_string('"\\r"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\r')

        end, lit = Lexer.lex_string('"\\t"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\t')

        end, lit = Lexer.lex_string('"\\v"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\v')

        end, lit = Lexer.lex_string('"\\\\"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\\')

        end, lit = Lexer.lex_string('"\\\'"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\'')

        end, lit = Lexer.lex_string('"\\\""', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\"')

        with self.assertRaises(LexerError) as ctx:
            Lexer.lex_string('"\\?"', 0)
        self.assertEqual(ctx.exception.line, 1)
        self.assertEqual(ctx.exception.column, 2)
Exemplo n.º 12
0
    def test_literal_escape_char(self):
        """Test whether Lexer.lex_string() can tokenize interpreted string
        literal with a escaped character."""

        end, lit = Lexer.lex_string('"\\a"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\a')

        end, lit = Lexer.lex_string('"\\b"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\b')

        end, lit = Lexer.lex_string('"\\f"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\f')

        end, lit = Lexer.lex_string('"\\n"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\n')

        end, lit = Lexer.lex_string('"\\r"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\r')

        end, lit = Lexer.lex_string('"\\t"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\t')

        end, lit = Lexer.lex_string('"\\v"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\v')

        end, lit = Lexer.lex_string('"\\\\"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\\')

        end, lit = Lexer.lex_string('"\\\'"', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\'')

        end, lit = Lexer.lex_string('"\\\""', 0)
        self.assertEqual(end, 4)
        self.assertEqual(lit, '\"')

        with self.assertRaises(LexerError) as ctx:
            Lexer.lex_string('"\\?"', 0)
        self.assertEqual(ctx.exception.line, 1)
        self.assertEqual(ctx.exception.column, 2)