Пример #1
0
 def test_lex_non_trivial(self):
     lexer = ninja.Lexer(['$name'])
     with self.assertRaises(ninja.ParseError):
         lexer.lex()
     lexer = ninja.Lexer(['${name}'])
     with self.assertRaises(ninja.ParseError):
         lexer.lex()
Пример #2
0
    def test_lex_str_escape_char_bad(self):
        lexer = ninja.Lexer(['$'])
        with self.assertRaises(ninja.ParseError):
            lexer.lex_string()

        lexer = ninja.Lexer(['$%'])
        with self.assertRaises(ninja.ParseError):
            lexer.lex_string()
Пример #3
0
    def test_peek_skip_empty_line(self):
        lexer = ninja.Lexer([' \n'])
        tok = lexer.peek()
        self.assertEqual(ninja.TK.NEWLINE, tok.kind)

        lexer = ninja.Lexer(['\t\n'])
        tok = lexer.peek()
        self.assertEqual(ninja.TK.NEWLINE, tok.kind)

        lexer = ninja.Lexer([' \t\n'])
        tok = lexer.peek()
        self.assertEqual(ninja.TK.NEWLINE, tok.kind)
Пример #4
0
    def test_lex_skip_space_newline_escape(self):
        lexer = ninja.Lexer(['build $\n', ' \texample'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(1, tok.column)
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(2, tok.line)
        self.assertEqual(3, tok.column)

        lexer = ninja.Lexer(['build $\n', 'example'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(1, tok.column)
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(2, tok.line)
        self.assertEqual(1, tok.column)

        lexer = ninja.Lexer(['build a:$\n', 'example'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(1, tok.column)
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(7, tok.column)
        tok = lexer.lex()
        self.assertEqual(ninja.TK.COLON, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(8, tok.column)
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(2, tok.line)
        self.assertEqual(1, tok.column)

        # Multiple newline escapes.
        lexer = ninja.Lexer(['build $\n', '$\n', '$\n', 'example'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(1, tok.column)
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(4, tok.line)
        self.assertEqual(1, tok.column)
Пример #5
0
    def test_lex_path_var(self):
        lexer = ninja.Lexer(['$a'])
        tok = lexer.lex_path()
        self.assertIs(type(tok.value), ninja.EvalString)
        self.assertEqual(('v', 'a',), tok.value)

        lexer = ninja.Lexer(['${a}'])
        tok = lexer.lex_path()
        self.assertIs(type(tok.value), ninja.EvalString)
        self.assertEqual(('v', 'a',), tok.value)

        lexer = ninja.Lexer(['path/${a}'])
        tok = lexer.lex_path()
        self.assertIs(type(tok.value), ninja.EvalString)
        self.assertEqual(('tv' ,'path/', 'a'), tok.value)
Пример #6
0
    def test_peek_space_after_newline(self):
        lexer = ninja.Lexer(['a b\n', ' c'])

        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(1, tok.column)

        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(3, tok.column)

        tok = lexer.lex()
        self.assertEqual(ninja.TK.NEWLINE, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(4, tok.column)

        # A space token must be emitted.
        tok = lexer.lex()
        self.assertEqual(ninja.TK.SPACE, tok.kind)
        self.assertEqual(2, tok.line)
        self.assertEqual(1, tok.column)

        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(2, tok.line)
        self.assertEqual(2, tok.column)
Пример #7
0
 def test_lex_str_char(self):
     lexer = ninja.Lexer(['string with spaces'])
     tok = lexer.lex_string()
     self.assertEqual(ninja.TK.STRING, tok.kind)
     self.assertEqual(1, tok.line)
     self.assertEqual(1, tok.column)
     self.assertEqual(('t', 'string with spaces'), tok.value)
Пример #8
0
 def test_lex_str_escape_char(self):
     for char in ' \t$:':
         lexer = ninja.Lexer(['$' + char])
         tok = lexer.lex_string()
         self.assertEqual(ninja.TK.STRING, tok.kind)
         self.assertEqual(1, tok.line)
         self.assertEqual(1, tok.column)
         self.assertEqual(('t', char), tok.value)
Пример #9
0
 def test_lex_path_end_char(self):
     for char in ' \t\n:|':
         lexer = ninja.Lexer(['path' + char])
         tok = lexer.lex_path()
         self.assertEqual(ninja.TK.PATH, tok.kind)
         self.assertEqual(1, tok.line)
         self.assertEqual(1, tok.column)
         self.assertEqual(('t', 'path'), tok.value)
Пример #10
0
    def test_peek_space(self):
        lexer = ninja.Lexer([' a'])

        tok = lexer.peek()
        self.assertEqual(ninja.TK.SPACE, tok.kind)
        tok = lexer.peek()  # Again
        self.assertEqual(ninja.TK.SPACE, tok.kind)  # Not changed

        tok = lexer.lex()  # Consume
        self.assertEqual(ninja.TK.SPACE, tok.kind)  # Not changed
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
Пример #11
0
    def test_lex_skip_space(self):
        lexer = ninja.Lexer(['a b'])

        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(1, tok.column)

        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(3, tok.column)
Пример #12
0
    def test_lex_space(self):
        lexer = ninja.Lexer([' '])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.SPACE, tok.kind)

        lexer = ninja.Lexer(['\t'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.SPACE, tok.kind)

        lexer = ninja.Lexer(['\t '])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.SPACE, tok.kind)

        lexer = ninja.Lexer([' \t'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.SPACE, tok.kind)

        lexer = ninja.Lexer([' a'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.SPACE, tok.kind)
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
Пример #13
0
    def test_lex_path_char(self):
        lexer = ninja.Lexer(['path1 path2'])

        tok = lexer.lex_path()
        self.assertEqual(ninja.TK.PATH, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(1, tok.column)
        self.assertEqual(('t', 'path1'), tok.value)

        tok = lexer.lex_path()
        self.assertEqual(ninja.TK.PATH, tok.kind)
        self.assertEqual(1, tok.line)
        self.assertEqual(7, tok.column)
        self.assertEqual(('t', 'path2'), tok.value)
Пример #14
0
    def test_lex_ident(self):
        lexer = ninja.Lexer(['abcdefghijklmnopqrstuvwxyz'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)

        lexer = ninja.Lexer(['ABCDEFGHIJKLMNOPQRSTUVWXYZ'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)

        lexer = ninja.Lexer(['0123456789'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)

        lexer = ninja.Lexer(['.'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)

        lexer = ninja.Lexer(['-'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)

        lexer = ninja.Lexer(['_'])
        tok = lexer.lex()
        self.assertEqual(ninja.TK.IDENT, tok.kind)
Пример #15
0
 def test_lex_assign(self):
     lexer = ninja.Lexer(['='])
     tok = lexer.lex()
     self.assertEqual(ninja.TK.ASSIGN, tok.kind)
Пример #16
0
 def test_lex_match(self):
     lexer = ninja.Lexer(['ident'])
     with self.assertRaises(ninja.ParseError):
         lexer.lex_match({ninja.TK.PIPE})
Пример #17
0
 def test_peek_newline(self):
     lexer = ninja.Lexer(['\n'])
     tok = lexer.peek()
     self.assertEqual(ninja.TK.NEWLINE, tok.kind)
Пример #18
0
 def test_lex_pipe2(self):
     lexer = ninja.Lexer(['||'])
     tok = lexer.lex()
     self.assertEqual(ninja.TK.PIPE2, tok.kind)
Пример #19
0
 def test_lex_colon(self):
     lexer = ninja.Lexer([':'])
     tok = lexer.lex()
     self.assertEqual(ninja.TK.COLON, tok.kind)
Пример #20
0
 def test_peek_skip_comment(self):
     lexer = ninja.Lexer(['#comment'])
     tok = lexer.peek()
     self.assertEqual(ninja.TK.EOF, tok.kind)