Пример #1
0
    def test_function_call(self) -> None:

        source: str = """
            variable resultado = suma(dos, tres);
        """

        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []

        for i in range(10):

            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LET, "variable"),
            Token(TokenType.IDENT, "resultado"),
            Token(TokenType.ASSIGN, "="),
            Token(TokenType.IDENT, "suma"),
            Token(TokenType.LPAREN, "("),
            Token(TokenType.IDENT, "dos"),
            Token(TokenType.COMMA, ","),
            Token(TokenType.IDENT, "tres"),
            Token(TokenType.RPAREN, ")"),
            Token(TokenType.SEMICOLON, ";"),
        ]

        self.assertEquals(tokens, expected_tokens)
Пример #2
0
    def test_eof(self) -> None:
        source: str = '+'
        
        tokens = self._load_n_tokens(source, len(source) + 1  )
        
        expected_tokens: List[Token] = [
            Token(TokenType.PLUS, '+',1),
            Token(TokenType.EOF, '',1),
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #3
0
    def test_variable_witch_number(self) -> None:
        source: str = 'variable valor_1;'
        tokens: List[TokenType] = self._load_n_tokens(source,3)

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'variable',1),
            Token(TokenType.IDENT, 'valor_1',1),
            Token(TokenType.SEMICOLON, ';',1),
        ]
        print(tokens)
        self.assertEqual(tokens, expected_tokens)
Пример #4
0
    def test_return_statement(self) -> None:

        program: Program = Program(statements=[
            ReturnStatement(token=Token(TokenType.RETURN, literal="regresa"),
                            return_value=Identifier(token=Token(
                                TokenType.IDENT, literal="mi_var"),
                                                    value="mi_var"))
        ])

        program_str = str(program)
        self.assertEquals(program_str, "regresa mi_var;")
Пример #5
0
    def test_illegal(self) -> None:
        source: str = '¡¿@'
        lexer: Lexer = Lexer(source)

        tokens = self._load_tokens(source)

        expected_tokens: List[Token] = [
            Token(TokenType.ILLEGAL, '¡',1),
            Token(TokenType.ILLEGAL, '¿',1),
            Token(TokenType.ILLEGAL, '@',1),
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #6
0
	def test_eof(self) -> None:
		source: str = '+'
		lexer: Lexer = Lexer(source)

		tokens: List[Token] = []
		for i in range(len(source) + 1):
			tokens.append(lexer.next_token())

		expected_tokens: List[Token] = [
            Token(TokenType.PLUS, '+'),
            Token(TokenType.EOF, ''),
		]
		self.assertEquals(tokens, expected_tokens)
Пример #7
0
    def test_three_character_operator(self) -> None:

        source: str = """
            10 === 10;
            10 !== 9;
        """

        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []

        for i in range(8):

            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.INT, "10"),
            Token(TokenType.SIMILAR, "==="),
            Token(TokenType.INT, "10"),
            Token(TokenType.SEMICOLON, ";"),
            Token(TokenType.INT, "10"),
            Token(TokenType.DIFF, "!=="),
            Token(TokenType.INT, "9"),
            Token(TokenType.SEMICOLON, ";"),
        ]

        self.assertEquals(tokens, expected_tokens)
Пример #8
0
    def test_let_statements(self) -> None:
        
        program: Program = Program(statements=[
                LetStatement(
                    token = Token(TokenType.LET, literal='variable', line=1), 
                    name  = Identifier(token = Token(TokenType.IDENT, literal = 'mi_var', line=1), value = 'mi_var' ),
                    value = Identifier(token = Token(TokenType.IDENT, literal = 'otra_variable', line=1), value = 'otra_var')
                )
            ]
        )

        program_str = str(program)
        print(program_str)
        self.assertEquals(program_str, 'variable mi_var = otra_var;')
    def test_integer_expressions(self) -> None:
        program: Program = Program(statements=[
            ExpressionStatement(
                token=Token(TokenType.INT, literal='5'),
                expression=Integer(
                    token=Token(TokenType.INT, literal='5'),
                    value=5
                )
            ),
        ])
        
        program_str = str(program)

        self.assertEquals(program_str, '5')
Пример #10
0
    def test_assignment(self) -> None:
        source = 'variable cinco = 5 ;'

        tokens: List[Token] = self._load_n_tokens(source,5)

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'variable',1),
            Token(TokenType.IDENT, 'cinco',1),
            Token(TokenType.ASSIGN, '=',1),
            Token(TokenType.INT, '5',1),
            Token(TokenType.SEMICOLON, ';',1),
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #11
0
    def test_let_statement(self) -> None:

        program: Program = Program(statements=[
            LetStatement(token=Token(TokenType.LET, literal="variable"),
                         name=Identifier(token=Token(TokenType.IDENT,
                                                     literal="mi_var"),
                                         value="mi_var"),
                         value=Identifier(token=Token(TokenType.IDENT,
                                                      literal="otra_variable"),
                                          value="otra_variable"))
        ])

        program_str = str(program)
        self.assertEquals(program_str, "variable mi_var = otra_variable;")
Пример #12
0
    def test_return_statement(self) -> None:
        program: Program = Program(
            statements=[
                ReturnStatement(
                    token = Token(TokenType.RETURN, literal='regresa', line=1),
                    return_value = Identifier(
                                Token(TokenType.IDENT, literal = 'otra_variable',line=1), 
                                value = '5'
                            )
                )
            ]
        )

        program_str = str(program)
        self.assertEquals(program_str, 'regresa 5;')
Пример #13
0
    def test_illegal(self) -> None:
        source: str = '¡¿@'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.ILLEGAL, '¡'),
            Token(TokenType.ILLEGAL, '¿'),
            Token(TokenType.ILLEGAL, '@'),
        ]

        self.assertEquals(tokens, expected_tokens)
Пример #14
0
    def test_expression_statement(self) -> None:

        program: Program = Program(statements=[
            ExpressionStatement(token=Token(TokenType.IDENT, literal="foobar"),
                                expression=Identifier(token=Token(
                                    TokenType.IDENT, literal="foobar"),
                                                      value="foobar")),
            ExpressionStatement(token=Token(TokenType.INT, literal="5"),
                                expression=Identifier(token=Token(
                                    TokenType.INT, literal="5"),
                                                      value="5"))
        ])

        program_str = str(program)
        self.assertEquals(program_str, "foobar5")
Пример #15
0
    def _make_two_character_token(self, token_type: TokenType) -> Token:

        prefix = self._character
        self._read_character()
        suffix = self._character

        return Token(token_type, f"{prefix}{suffix}")
Пример #16
0
    def test_string(self) -> None:
        source: str = '''
            "foo";
            "No hay mejor escuela que la que uno se genera";
        '''

        tokens = self._load_n_tokens(source,4)

        expected_tokens: List[Token] = [
            Token(TokenType.STRING, 'foo',2),
            Token(TokenType.SEMICOLON, ';',2),
            Token(TokenType.STRING, 'No hay mejor escuela que la que uno se genera',3),
            Token(TokenType.SEMICOLON, ';',3)
        ]

        self.assertEquals(tokens, expected_tokens)
Пример #17
0
    def test_assignment(self) -> None:
        source: str = 'variable cinco = 5;'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(5):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'variable'),
            Token(TokenType.IDENT, 'cinco'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.INT, '5'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEquals(tokens, expected_tokens)
Пример #18
0
    def _make_three_character_token(self, token_type: TokenType) -> Token:

        first = self._character
        self._read_character()
        second = self._character
        self._read_character()
        third = self._character

        return Token(token_type, f"{first}{second}{third}")
Пример #19
0
    def test_string(self) -> None:
        source: str = '''
            "foo";
            "Platzi es la mejor escuela de CS";
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(4):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.STRING, 'foo'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.STRING, 'Platzi es la mejor escuela de CS'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEquals(tokens, expected_tokens)
Пример #20
0
    def test_function_call(self) -> None:
        source: str = 'variable resultado = suma(dos, tres);'

        tokens: List[Token] = self._load_n_tokens(source,10)

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'variable',1),
            Token(TokenType.IDENT, 'resultado',1),
            Token(TokenType.ASSIGN, '=',1),
            Token(TokenType.IDENT, 'suma',1),
            Token(TokenType.LPAREN, '(',1),
            Token(TokenType.IDENT, 'dos',1),
            Token(TokenType.COMMA, ',',1),
            Token(TokenType.IDENT, 'tres',1),
            Token(TokenType.RPAREN, ')',1),
            Token(TokenType.SEMICOLON, ';',1),
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #21
0
    def test_assignment(self) -> None:

        source: str = "variable cinco = 5;"
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []

        for i in range(5):

            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LET, "variable"),
            Token(TokenType.IDENT, "cinco"),
            Token(TokenType.ASSIGN, "="),
            Token(TokenType.INT, "5"),
            Token(TokenType.SEMICOLON, ";"),
        ]

        self.assertEquals(tokens, expected_tokens)
Пример #22
0
	def test_function_call(self) -> None:
		source: str = 'variable resultado = suma(dos, tres);'
		lexer: Lexer = Lexer(source)

		tokens: List[Token] = []
		for i in range(10):
			tokens.append(lexer.next_token())


		expected_tokens: List[Token] = [
            Token(TokenType.LET, 'variable'),
            Token(TokenType.IDENT, 'resultado'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.FUNCTION, 'suma'),
            Token(TokenType.LPAREN, '('),
            Token(TokenType.IDENT, 'x'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.IDENT, 'y'),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.SEMICOLON, ';'),
        ]
Пример #23
0
    def test_one_character_operator(self) -> None:
        source: str = '=+-/*<>!'
        
        tokens = self._load_tokens(source)

        expected_tokens: List[Token] = [
            Token(TokenType.ASSIGN, '=',1),
            Token(TokenType.PLUS, '+',1),
            Token(TokenType.MINUS, "-",1),
            Token(TokenType.DIVISION, "/",1),
            Token(TokenType.MULTIPLICATION, "*",1),
            Token(TokenType.LT, "<",1),
            Token(TokenType.GT, ">",1),
            Token(TokenType.NEGATION, "!",1)
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #24
0
    def test_two_characters_operator(self) -> None:
        source: str = '''
            10 == 10;
            12 != 11;
        '''
        tokens: List[Token] = self._load_n_tokens(source, 8)
        
        expected_tokens: List[Token] = [
            Token(TokenType.INT, '10',2),
            Token(TokenType.EQ, '==',2),
            Token(TokenType.INT, "10",2),
            Token(TokenType.SEMICOLON, ";",2),
            Token(TokenType.INT, "12",3),
            Token(TokenType.NOT_EQ, "!=",3),
            Token(TokenType.INT, "11",3),
            Token(TokenType.SEMICOLON, ";",3)
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #25
0
	def test_one_character_operator(self) -> None:
		source: str = '=+-/*<>!'
		lexer: Lexer = Lexer(source)

		tokens: List[Token] = []
		for i in range(len(source)):
			tokens.append(lexer.next_token())

		expected_tokens: List[Token] = [
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.PLUS, '+'),
            Token(TokenType.LESS, '-'),
            Token(TokenType.DIVISION, '/'),
            Token(TokenType.MULTIPLICATION, '*'),
            Token(TokenType.LT, '<'),
            Token(TokenType.GT, '>'),
            Token(TokenType.NEGATION, '!'),
		]
		self.assertEquals(tokens, expected_tokens)
Пример #26
0
    def test_delimeters(self) -> None:
        source = '(){},;'

        tokens = self._load_tokens(source)

        expected_tokens: List[Token] = [
            Token(TokenType.LPAREN, '(',1),
            Token(TokenType.RPAREN, ')',1),
            Token(TokenType.LBRACE, '{',1),
            Token(TokenType.RBRACE, '}',1),
            Token(TokenType.COMMA, ',',1),
            Token(TokenType.SEMICOLON, ';',1),
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #27
0
    def test_two_character_operator(self) -> None:
        source: str = '''
            10 == 10;
            10 != 9;
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(8):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.INT, '10'),
            Token(TokenType.EQ, '=='),
            Token(TokenType.INT, '10'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.INT, '10'),
            Token(TokenType.NOT_EQ, '!='),
            Token(TokenType.INT, '9'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEquals(tokens, expected_tokens)
Пример #28
0
    def test_one_character_operator(self) -> None:

        source: str = "=+-/*<>!"
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []

        for i in range(len(source)):

            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.ASSIGN, "="),
            Token(TokenType.PLUS, "+"),
            Token(TokenType.MINUS, "-"),
            Token(TokenType.DIVISION, "/"),
            Token(TokenType.MULTIPLICATION, "*"),
            Token(TokenType.LT, "<"),
            Token(TokenType.GT, ">"),
            Token(TokenType.NEGATION, "!"),
        ]

        self.assertEquals(tokens, expected_tokens)
Пример #29
0
	def test_delimiters(self) -> None:
		source: str = '(){},;'
		lexer: Lexer = Lexer(source)

		tokens: List[Token] = []
		for i in range(len(source)):
			tokens.append(lexer.next_token())

		expected_tokens: List[Token] = [
            Token(TokenType.LPAREN, '('),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.RBRACE, '}'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.SEMICOLON, ';'),
        ]
		self.assertEquals(tokens, expected_tokens)
Пример #30
0
    def test_delimiters(self) -> None:

        source: str = "(){},;"
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []

        for i in range(len(source)):

            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LPAREN, "("),
            Token(TokenType.RPAREN, ")"),
            Token(TokenType.LBRACE, "{"),
            Token(TokenType.RBRACE, "}"),
            Token(TokenType.COMMA, ","),
            Token(TokenType.SEMICOLON, ";"),
        ]

        self.assertEquals(tokens, expected_tokens)