示例#1
0
    def test_op_or(self):
        lexer = Lexer('||')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_OR, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#2
0
    def test_op_le(self):
        lexer = Lexer('<=')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_LE, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#3
0
    def test_op_and(self):
        lexer = Lexer('&&')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_AND, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#4
0
    def test_op_mod(self):
        lexer = Lexer('%')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_MOD, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#5
0
    def test_op_pov(self):
        lexer = Lexer('^')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_POV, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#6
0
    def test_op_minus(self):
        lexer = Lexer('-')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_MINUS, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#7
0
 def test_hex_with_many_signs_constant(self):
     self.assertEqual(
         Lexer("-+-+-+-+-+-+-+-0Xe4Ae2").check_tokens(),
         "<MINUS><PLUS><MINUS><PLUS><MINUS>" +
         "<PLUS><MINUS><PLUS><MINUS><PLUS>" +
         "<MINUS><PLUS><MINUS><PLUS><MINUS>" + "<CONSTANT=0Xe4Ae2>\n",
     )
    def test_parsing_infix_expressions(self):
        tests = [
            ["5 + 5;", 5, "+", 5],
            ["5 - 5;", 5, "-", 5],
            ["5 * 5;", 5, "*", 5],
            ["5 / 5;", 5, "/", 5],
            ["5 > 5;", 5, ">", 5],
            ["5 < 5;", 5, "<", 5],
            ["5 == 5;", 5, "==", 5],
            ["5 != 5;", 5, "!=", 5],
            ["foobar + barfoo;", "foobar", "+", "barfoo"],
            ["foobar - barfoo;", "foobar", "-", "barfoo"],
            ["foobar * barfoo;", "foobar", "*", "barfoo"],
            ["foobar / barfoo;", "foobar", "/", "barfoo"],
            ["foobar > barfoo;", "foobar", ">", "barfoo"],
            ["foobar < barfoo;", "foobar", "<", "barfoo"],
            ["foobar == barfoo;", "foobar", "==", "barfoo"],
            ["foobar != barfoo;", "foobar", "!=", "barfoo"],
            ["true == true", True, "==", True],
            ["true != false", True, "!=", False],
            ["false == false", False, "==", False],
        ]

        for tt in tests:
            lexer = Lexer(tt[0])
            parser = Parser(lexer)
            program = parser.parse_program()

            self.assert_parser_errors(parser)
            self.assertEqual(1, len(program.statements))

            exp = program.statements[0].expression

            self.assert_infix_expression(tt[1], tt[2], tt[3], exp)
示例#9
0
    def test_assigns_correct_line_number_to_token(self):
        lut = Lexer("""12345
        54321
        /*
        long
        comment
        */
        12345
        """)
        integer = lut.get_next_token()
        self.assertEqual(integer.line_number, 1)

        eol = lut.get_next_token()
        self.assertEqual(eol.line_number, 1)

        integer2 = lut.get_next_token()
        self.assertEqual(integer2.line_number, 2)

        eol = lut.get_next_token()
        self.assertEqual(eol.line_number, 2)

        comment = lut.get_next_token()
        self.assertEqual(comment.line_number, 6)

        eol = lut.get_next_token()
        self.assertEqual(eol.line_number, 6)

        integer2 = lut.get_next_token()
        self.assertEqual(integer2.line_number, 7)
示例#10
0
    def test_single_comment(self):
        lexer = Lexer('+ // test + test - test')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_PLUS, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#11
0
    def test_continues_after_reserved_word(self):
        lut = Lexer('FUNCTION main')

        lut.get_next_token()
        result = lut.get_next_token()

        self.assertEqual(result.type, TOKEN_MAIN)
示例#12
0
    def test_op_plus(self):
        lexer = Lexer('+')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_PLUS, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#13
0
    def test_void_type(self):
        lexer = Lexer('void')

        lexer.lex_all()

        self.assertEqual(TokenType.PRIMITIVE_VOID, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#14
0
    def test_to_stdout(self):
        lexer = Lexer('-->')

        lexer.lex_all()

        self.assertEqual(TokenType.KW_TO_STDOUT, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#15
0
    def test_add_token_rollback(self):
        lexer = Lexer('123')
        lexer.offset = 4

        lexer.add_token(TokenType.OP_PLUS, rollback=True)

        self.assertEqual(lexer.offset, 3)
示例#16
0
    def test_helper(self):
        lexer = Lexer('>include')

        lexer.lex_all()

        self.assertEqual(TokenType.HELPER_INCLUDE, lexer.tokens[0].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#17
0
    def test_file_name_is_set(self):
        lexer = Lexer('2 + 2', 'main.f12')

        lexer.lex_all()

        for t in lexer.tokens:
            self.assertEqual('main.f12', t.file_name)
示例#18
0
    def test_lit_float(self):
        lexer = Lexer(
            '123 12. 12.45 78.80E5 789.4e7 852.78E+50 369.78e-789 .789 .789E-70'
        )

        lexer.lex_all()

        self.assertEqual(TokenType.LIT_INT, lexer.tokens[0].type)
        self.assertEqual(TokenType.LIT_FLOAT, lexer.tokens[1].type)
        self.assertEqual(TokenType.LIT_FLOAT, lexer.tokens[2].type)
        self.assertEqual(TokenType.LIT_FLOAT, lexer.tokens[3].type)
        self.assertEqual(TokenType.LIT_FLOAT, lexer.tokens[4].type)
        self.assertEqual(TokenType.LIT_FLOAT, lexer.tokens[5].type)
        self.assertEqual(TokenType.LIT_FLOAT, lexer.tokens[6].type)
        self.assertEqual(TokenType.LIT_FLOAT, lexer.tokens[7].type)
        self.assertEqual(TokenType.LIT_FLOAT, lexer.tokens[8].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[9].type)

        self.assertEqual('123', lexer.tokens[0].value)
        self.assertEqual('12.', lexer.tokens[1].value)
        self.assertEqual('12.45', lexer.tokens[2].value)
        self.assertEqual('78.80E5', lexer.tokens[3].value)
        self.assertEqual('789.4e7', lexer.tokens[4].value)
        self.assertEqual('852.78E+50', lexer.tokens[5].value)
        self.assertEqual('369.78e-789', lexer.tokens[6].value)
        self.assertEqual('.789', lexer.tokens[7].value)
        self.assertEqual('.789E-70', lexer.tokens[8].value)
示例#19
0
    def test_i_fun_declaration(self):
        lexer = Lexer('fun add(int x[], int y) => int {\nret x[0] + y;\n}')

        lexer.lex_all()

        self.assertEqual(TokenType.KW_FUN, lexer.tokens[0].type)
        self.assertEqual(TokenType.IDENTIFIER, lexer.tokens[1].type)
        self.assertEqual(TokenType.C_ROUND_L, lexer.tokens[2].type)
        self.assertEqual(TokenType.PRIMITIVE_INT, lexer.tokens[3].type)
        self.assertEqual(TokenType.IDENTIFIER, lexer.tokens[4].type)
        self.assertEqual(TokenType.C_SQUARE_L, lexer.tokens[5].type)
        self.assertEqual(TokenType.C_SQUARE_R, lexer.tokens[6].type)
        self.assertEqual(TokenType.C_COMMA, lexer.tokens[7].type)
        self.assertEqual(TokenType.PRIMITIVE_INT, lexer.tokens[8].type)
        self.assertEqual(TokenType.IDENTIFIER, lexer.tokens[9].type)
        self.assertEqual(TokenType.C_ROUND_R, lexer.tokens[10].type)
        self.assertEqual(TokenType.KW_FAT_ARROW, lexer.tokens[11].type)
        self.assertEqual(TokenType.PRIMITIVE_INT, lexer.tokens[12].type)
        self.assertEqual(TokenType.C_CURLY_L, lexer.tokens[13].type)
        self.assertEqual(TokenType.KW_RETURN, lexer.tokens[14].type)
        self.assertEqual(TokenType.IDENTIFIER, lexer.tokens[15].type)
        self.assertEqual(TokenType.C_SQUARE_L, lexer.tokens[16].type)
        self.assertEqual(TokenType.LIT_INT, lexer.tokens[17].type)
        self.assertEqual(TokenType.C_SQUARE_R, lexer.tokens[18].type)
        self.assertEqual(TokenType.OP_PLUS, lexer.tokens[19].type)
        self.assertEqual(TokenType.IDENTIFIER, lexer.tokens[20].type)
        self.assertEqual(TokenType.C_SEMI, lexer.tokens[21].type)
        self.assertEqual(TokenType.C_CURLY_R, lexer.tokens[22].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[23].type)
示例#20
0
    def test_div(self):
        lexer = Lexer('/ /')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_DIV, lexer.tokens[0].type)
        self.assertEqual(TokenType.OP_DIV, lexer.tokens[1].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[2].type)
示例#21
0
    def test_op_mul(self):
        lexer = Lexer('* /* + */ *')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_MUL, lexer.tokens[0].type)
        self.assertEqual(TokenType.OP_MUL, lexer.tokens[1].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[2].type)
示例#22
0
    def test_op_gt(self):
        lexer = Lexer('>>')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_GT, lexer.tokens[0].type)
        self.assertEqual(TokenType.OP_GT, lexer.tokens[1].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[2].type)
示例#23
0
    def test_lit_int(self):
        lexer = Lexer('1234567890')

        lexer.lex_all()

        self.assertEqual(TokenType.LIT_INT, lexer.tokens[0].type)
        self.assertEqual('1234567890', lexer.tokens[0].value)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#24
0
 def test_handles_multi_line_comment(self):
     comment = """this is
     going to be a comment
     that spans many lines"""
     lut = Lexer('  /*{comment}*/  '.format(comment=comment))
     result = lut.get_next_token()
     self.assertEqual(result.type, TOKEN_COMMENT)
     self.assertEqual(result.value, comment)
示例#25
0
    def test_lit_str(self):
        lexer = Lexer('"hello"')

        lexer.lex_all()

        self.assertEqual(TokenType.LIT_STR, lexer.tokens[0].type)
        self.assertEqual('hello', lexer.tokens[0].value)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
示例#26
0
    def test_op_access(self):
        lexer = Lexer('..')

        lexer.lex_all()

        self.assertEqual(TokenType.OP_ACCESS, lexer.tokens[0].type)
        self.assertEqual(TokenType.OP_ACCESS, lexer.tokens[1].type)
        self.assertEqual(TokenType.EOF, lexer.tokens[2].type)
示例#27
0
    def test_begin_tokenizing_new_state_to_buffer(self):
        lexer = Lexer('123')
        lexer.current_char = '1'

        lexer.begin_tokenizing(LexingState.LIT_STR, to_buffer=True)

        self.assertEqual(LexingState.LIT_STR, lexer.state)
        self.assertEqual('1', lexer.token_buffer)
示例#28
0
    def test_lit_str_escape(self):
        lexer = Lexer('"hello \\n \\t \\\" ha"')

        lexer.lex_all()

        self.assertEqual(TokenType.LIT_STR, lexer.tokens[0].type)
        self.assertEqual('hello \n \t \" ha', lexer.tokens[0].value)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)
def eat_tokens(line):
    lex = Lexer(line)
    line = ""
    while lex.get_next_token():
        line += lex.peek_token().test()
        if lex.peek_token().type in ["EOF", "ERROR"]:
            break
    return line
示例#30
0
    def test_identifier(self):
        lexer = Lexer('_function_name')

        lexer.lex_all()

        self.assertEqual(TokenType.IDENTIFIER, lexer.tokens[0].type)
        self.assertEqual('_function_name', lexer.tokens[0].value)
        self.assertEqual(TokenType.EOF, lexer.tokens[1].type)