def test_skip_whitespace_comments(self): l = Lexer(''' def foo # this is a comment # another comment \t\t\t10 ''') self._assert_toks(list(l.tokens()), ['DEF', 'IDENTIFIER', 'NUMBER', 'EOF'])
def parse_generator(self, buf): ''' Given a string, returns an AST node representing it. ''' self.token_generator = Lexer(buf).tokens() self.cur_tok = None self._get_next_token() while self.cur_tok.kind != TokenKind.EOF: self.top_return = False yield self._generate_toplevel()
def test_var_assignments(self): l = Lexer('10. 10 10.0 1b 10B') toks = list(l.tokens()) pos = Position(l.buf, 1, 0) self.assertEqual(toks[0], Token(TokenKind.NUMBER, '10.', VarTypes.f64, pos)) pos = Position(l.buf, 1, 4) self.assertEqual(toks[1], Token(TokenKind.NUMBER, '10', VarTypes.i32, pos)) pos = Position(l.buf, 1, 7) self.assertEqual(toks[2], Token(TokenKind.NUMBER, '10.0', VarTypes.f64, pos)) pos = Position(l.buf, 1, 12) self.assertEqual(toks[3], Token(TokenKind.NUMBER, '1', VarTypes.bool, pos)) pos = Position(l.buf, 1, 15) self.assertEqual(toks[4], Token(TokenKind.NUMBER, '10', VarTypes.i8, pos))
def run(self): if self.args.help: self.parser.print_help() f = open(sys.argv[1], "rt") source = f.read() f.close() lexer = Lexer(source) evaluator = Evaluator(lexer.tokens, lexer.keys_map) f = open(self.args.o, "w+") f.write(evaluator.run()) f.close()
def test_token_kinds(self): l = Lexer('10.1 def der extern foo var (') self._assert_toks(list(l.tokens()), [ 'NUMBER', 'DEF', 'IDENTIFIER', 'EXTERN', 'IDENTIFIER', 'VAR', 'PUNCTUATOR', 'EOF' ]) l = Lexer('+- 1 2 22 22.4 a b2 C3d') self._assert_toks(list(l.tokens()), [ 'OPERATOR', 'OPERATOR', 'NUMBER', 'NUMBER', 'NUMBER', 'NUMBER', 'IDENTIFIER', 'IDENTIFIER', 'IDENTIFIER', 'EOF' ])
def test_lexer_simple_tokens_and_values(self): l = Lexer('a+1.') toks = list(l.tokens()) pos = Position(l.buf, 1, 0) self.assertEqual(toks[0], Token(TokenKind.IDENTIFIER, 'a', None, pos)) pos = Position(l.buf, 1, 1) self.assertEqual(toks[1], Token(TokenKind.OPERATOR, '+', None, pos)) pos = Position(l.buf, 1, 2) self.assertEqual(toks[2], Token(TokenKind.NUMBER, '1.', VarTypes.f64, pos)) pos = Position(l.buf, 1, 3) self.assertEqual(toks[3], Token(TokenKind.EOF, '', None, pos)) pos = Position(l.buf, 1, 0) l = Lexer('0.1519') toks = list(l.tokens()) self.assertEqual(toks[0], Token(TokenKind.NUMBER, '0.1519', VarTypes.f64, pos))
from core.lexer import Lexer import sys sys.path.append('../') print( "Shell para testes rápidos do lexer.\nAutor: Leonam Teixeira de Vasconcelos" ) try: while True: comando = input('> ') print(comando) lexer = Lexer(comando) try: tokens = lexer.tokenizar() for token in tokens: print(token) except Exception as ex: print(ex) except KeyboardInterrupt: print('\b\bBye') except EOFError: print('Bye')
def test_string_assignment(self): l = Lexer('"Hello world"') toks = list(l.tokens()) pos = Position(1, 1) self.assertEqual( toks[0], Token(TokenKind.STRING, 'Hello world', VarTypes.str, pos))
parser = argparse.ArgumentParser( description= "Lexer para a linguagem ANSI C. Leonam Teixeira de Vasconcelos.", add_help=False) parser.add_argument( 'f', nargs='+', action='store', help='Lista de arquivos nos quais a análise léxica será realizada') parser.add_argument('-h', '--help', action='help', help='Mostra essa mensagem e sai.') args = parser.parse_args() if args.f: for file in args.f: if not os.path.isfile(file): print('Arquivo não encontrado: ', file) print('Tokenizando o arquivo: ', file, '...') content = open(file, 'r').read() lex = Lexer(content) tokens = lex.tokenizar() for token in tokens: print(token) else: print('Você precisa de fornecer os arquivos de entrada.')