Exemplo n.º 1
0
 def verify(self,
            src: str,
            kind: TokenType,
            value: TokenValue = None,
            nonl: bool = True):
     tk = Tokenizer(src, '<test>')
     token = tk.next(ignore_nl=nonl)
     self.assertEqual(kind, token.kind, 'wrong token kind: %s' % token)
     self.assertEqual(value, token.value, 'wrong token value: %s' % token)
     self.assertEqual(tk.next().kind, TokenType.End,
                      'should be EOF: %s' % token)
Exemplo n.º 2
0
 def _run_test(self, src, seq):
     tk = Tokenizer(src, '<test>')
     while seq and not tk.is_eof:
         token = tk.next()
         kind, value, row, col, srow, scol = seq[0]
         self.assertEqual(kind, token.kind, repr(seq[0]))
         self.assertEqual(value, token.value, repr(seq[0]))
         self.assertEqual(row, tk.save.row, repr(seq[0]))
         self.assertEqual(col, tk.save.col, repr(seq[0]))
         self.assertEqual(srow, tk.state.row, repr(seq[0]))
         self.assertEqual(scol, tk.state.col, repr(seq[0]))
         self.assertEqual(row, token.row, repr(seq[0]))
         self.assertEqual(col, token.col, repr(seq[0]))
         seq = seq[1:]
     self.assertTrue(not seq, 'not seq')
     self.assertTrue(tk.is_eof, 'tk.is_eof')
Exemplo n.º 3
0
    def test_line_number(self):
        src = r"""package parser

import (
    `io`
    `net/url`
    `reflect`

    `code.example.org/chenzhuoyu/infra-kernels/utils`

    _ `git.example.org/ee/people/infra/gateway/biz/dispatch/service`
)
"""
        print(Parser(Tokenizer(src, 'test.go')).parse())
Exemplo n.º 4
0
 def test_ellipsis(self):
     tk = Tokenizer('a.....', '<test>')
     token = tk.next()
     self.assertEqual(TokenType.Name, token.kind)
     self.assertEqual('a', token.value)
     token = tk.next()
     self.assertEqual(TokenType.Operator, token.kind)
     self.assertEqual('...', token.value)
     token = tk.next()
     self.assertEqual(TokenType.Operator, token.kind)
     self.assertEqual('.', token.value)
     token = tk.next()
     self.assertEqual(TokenType.Operator, token.kind)
     self.assertEqual('.', token.value)
     token = tk.next()
     self.assertEqual(TokenType.LF, token.kind)
     token = tk.next()
     self.assertEqual(TokenType.End, token.kind)
Exemplo n.º 5
0
 def verify(self, src: str, kind: TokenType, value: TokenValue = None, nonl: bool = True):
     tk = Tokenizer(src, '<test>')
     token = tk.next()
     while token.kind == TokenType.Comments or (nonl and token.kind == TokenType.LF):
         if token.kind == TokenType.Comments and not nonl and '\n' in token.value:
             token = Token.eol(tk)
             break
         else:
             token = tk.next()
     self.assertEqual(kind, token.kind, 'wrong token kind: %s' % token)
     self.assertEqual(value, token.value, 'wrong token value: %s' % token)
     token = tk.next()
     while token.kind == TokenType.Comments or token.kind == TokenType.LF:
         token = tk.next()
     self.assertEqual(TokenType.End, token.kind, 'should be EOF: %s' % token)
Exemplo n.º 6
0
 def invalid(self, src: str, exc: str):
     self.assertRaisesRegex(SyntaxError, exc,
                            lambda: Tokenizer(src, '<test>').next())
Exemplo n.º 7
0
 def test_const(self):
     print(Parser(Tokenizer(_const_src, 'test.go')).parse())
Exemplo n.º 8
0
 def test_import(self):
     Parser(Tokenizer(_import_src, 'test.go')).parse()
Exemplo n.º 9
0
def main():
    with open(FNAME, 'r', newline=None) as fp:
        Parser(Tokenizer(fp.read(), FNAME)).parse()
Exemplo n.º 10
0
def main():
    with open(FNAME, 'r', newline=None) as fp:
        tk = Tokenizer(fp.read(), FNAME)
    while not tk.is_eof:
        print(tk.next())