def test_id(): lexer = DenLexer() data = "hellotest testty test test" tokens = list(lexer.tokenize(data)) assert len(tokens) == 4 for token, content in zip(tokens, data.split(" ")): assert token.type == "ID" assert token.value == content
def test_error(): lexer = DenLexer() data = "~" try: tokens = list(lexer.tokenize(data)) assert len(tokens) == 0 except SyntaxError: assert 1
def test_parser_1(): lexer = DenLexer() parser = DenParser() text = """ int add() => { } """ result = parser.parse(lexer.tokenize(text)) assert result.block.statements[0].return_type.name == "int" assert result.block.statements[0].name.name == "add"
def test_parser_2(): lexer = DenLexer() parser = DenParser() text = """ int add() => { int: x = -10+10*10-(20%213)-10/10; int: y = x; } """ result = parser.parse(lexer.tokenize(text)) assert result.block.statements[0].return_type.name == "int" assert result.block.statements[0].name.name == "add"
def test_literal(): lexer = DenLexer() for literal in "{}():,=+-*/&": assert list(lexer.tokenize(literal))[0].type == literal
def test_newline(): lexer = DenLexer() data = "\n\n\n\n" tokens = list(lexer.tokenize(data)) assert len(tokens) == 0
def test_comment(): lexer = DenLexer() data = "# I should be ignored" assert len(list(lexer.tokenize(data))) == 0
def test_fat_arrow(): lexer = DenLexer() data = "=>" assert list(lexer.tokenize(data))[0].type == "FAT_ARROW"