def test_function_call(self) -> None: source: str = """ variable resultado = suma(dos, tres); """ lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(10): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.LET, "variable"), Token(TokenType.IDENT, "resultado"), Token(TokenType.ASSIGN, "="), Token(TokenType.IDENT, "suma"), Token(TokenType.LPAREN, "("), Token(TokenType.IDENT, "dos"), Token(TokenType.COMMA, ","), Token(TokenType.IDENT, "tres"), Token(TokenType.RPAREN, ")"), Token(TokenType.SEMICOLON, ";"), ] self.assertEquals(tokens, expected_tokens)
def test_eof(self) -> None: source: str = '+' tokens = self._load_n_tokens(source, len(source) + 1 ) expected_tokens: List[Token] = [ Token(TokenType.PLUS, '+',1), Token(TokenType.EOF, '',1), ] self.assertEqual(tokens, expected_tokens)
def test_variable_witch_number(self) -> None: source: str = 'variable valor_1;' tokens: List[TokenType] = self._load_n_tokens(source,3) expected_tokens: List[Token] = [ Token(TokenType.LET, 'variable',1), Token(TokenType.IDENT, 'valor_1',1), Token(TokenType.SEMICOLON, ';',1), ] print(tokens) self.assertEqual(tokens, expected_tokens)
def test_return_statement(self) -> None: program: Program = Program(statements=[ ReturnStatement(token=Token(TokenType.RETURN, literal="regresa"), return_value=Identifier(token=Token( TokenType.IDENT, literal="mi_var"), value="mi_var")) ]) program_str = str(program) self.assertEquals(program_str, "regresa mi_var;")
def test_illegal(self) -> None: source: str = '¡¿@' lexer: Lexer = Lexer(source) tokens = self._load_tokens(source) expected_tokens: List[Token] = [ Token(TokenType.ILLEGAL, '¡',1), Token(TokenType.ILLEGAL, '¿',1), Token(TokenType.ILLEGAL, '@',1), ] self.assertEqual(tokens, expected_tokens)
def test_eof(self) -> None: source: str = '+' lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(len(source) + 1): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.PLUS, '+'), Token(TokenType.EOF, ''), ] self.assertEquals(tokens, expected_tokens)
def test_three_character_operator(self) -> None: source: str = """ 10 === 10; 10 !== 9; """ lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(8): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.INT, "10"), Token(TokenType.SIMILAR, "==="), Token(TokenType.INT, "10"), Token(TokenType.SEMICOLON, ";"), Token(TokenType.INT, "10"), Token(TokenType.DIFF, "!=="), Token(TokenType.INT, "9"), Token(TokenType.SEMICOLON, ";"), ] self.assertEquals(tokens, expected_tokens)
def test_let_statements(self) -> None: program: Program = Program(statements=[ LetStatement( token = Token(TokenType.LET, literal='variable', line=1), name = Identifier(token = Token(TokenType.IDENT, literal = 'mi_var', line=1), value = 'mi_var' ), value = Identifier(token = Token(TokenType.IDENT, literal = 'otra_variable', line=1), value = 'otra_var') ) ] ) program_str = str(program) print(program_str) self.assertEquals(program_str, 'variable mi_var = otra_var;')
def test_integer_expressions(self) -> None: program: Program = Program(statements=[ ExpressionStatement( token=Token(TokenType.INT, literal='5'), expression=Integer( token=Token(TokenType.INT, literal='5'), value=5 ) ), ]) program_str = str(program) self.assertEquals(program_str, '5')
def test_assignment(self) -> None: source = 'variable cinco = 5 ;' tokens: List[Token] = self._load_n_tokens(source,5) expected_tokens: List[Token] = [ Token(TokenType.LET, 'variable',1), Token(TokenType.IDENT, 'cinco',1), Token(TokenType.ASSIGN, '=',1), Token(TokenType.INT, '5',1), Token(TokenType.SEMICOLON, ';',1), ] self.assertEqual(tokens, expected_tokens)
def test_let_statement(self) -> None: program: Program = Program(statements=[ LetStatement(token=Token(TokenType.LET, literal="variable"), name=Identifier(token=Token(TokenType.IDENT, literal="mi_var"), value="mi_var"), value=Identifier(token=Token(TokenType.IDENT, literal="otra_variable"), value="otra_variable")) ]) program_str = str(program) self.assertEquals(program_str, "variable mi_var = otra_variable;")
def test_return_statement(self) -> None: program: Program = Program( statements=[ ReturnStatement( token = Token(TokenType.RETURN, literal='regresa', line=1), return_value = Identifier( Token(TokenType.IDENT, literal = 'otra_variable',line=1), value = '5' ) ) ] ) program_str = str(program) self.assertEquals(program_str, 'regresa 5;')
def test_illegal(self) -> None: source: str = '¡¿@' lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(len(source)): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.ILLEGAL, '¡'), Token(TokenType.ILLEGAL, '¿'), Token(TokenType.ILLEGAL, '@'), ] self.assertEquals(tokens, expected_tokens)
def test_expression_statement(self) -> None: program: Program = Program(statements=[ ExpressionStatement(token=Token(TokenType.IDENT, literal="foobar"), expression=Identifier(token=Token( TokenType.IDENT, literal="foobar"), value="foobar")), ExpressionStatement(token=Token(TokenType.INT, literal="5"), expression=Identifier(token=Token( TokenType.INT, literal="5"), value="5")) ]) program_str = str(program) self.assertEquals(program_str, "foobar5")
def _make_two_character_token(self, token_type: TokenType) -> Token: prefix = self._character self._read_character() suffix = self._character return Token(token_type, f"{prefix}{suffix}")
def test_string(self) -> None: source: str = ''' "foo"; "No hay mejor escuela que la que uno se genera"; ''' tokens = self._load_n_tokens(source,4) expected_tokens: List[Token] = [ Token(TokenType.STRING, 'foo',2), Token(TokenType.SEMICOLON, ';',2), Token(TokenType.STRING, 'No hay mejor escuela que la que uno se genera',3), Token(TokenType.SEMICOLON, ';',3) ] self.assertEquals(tokens, expected_tokens)
def test_assignment(self) -> None: source: str = 'variable cinco = 5;' lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(5): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.LET, 'variable'), Token(TokenType.IDENT, 'cinco'), Token(TokenType.ASSIGN, '='), Token(TokenType.INT, '5'), Token(TokenType.SEMICOLON, ';'), ] self.assertEquals(tokens, expected_tokens)
def _make_three_character_token(self, token_type: TokenType) -> Token: first = self._character self._read_character() second = self._character self._read_character() third = self._character return Token(token_type, f"{first}{second}{third}")
def test_string(self) -> None: source: str = ''' "foo"; "Platzi es la mejor escuela de CS"; ''' lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(4): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.STRING, 'foo'), Token(TokenType.SEMICOLON, ';'), Token(TokenType.STRING, 'Platzi es la mejor escuela de CS'), Token(TokenType.SEMICOLON, ';'), ] self.assertEquals(tokens, expected_tokens)
def test_function_call(self) -> None: source: str = 'variable resultado = suma(dos, tres);' tokens: List[Token] = self._load_n_tokens(source,10) expected_tokens: List[Token] = [ Token(TokenType.LET, 'variable',1), Token(TokenType.IDENT, 'resultado',1), Token(TokenType.ASSIGN, '=',1), Token(TokenType.IDENT, 'suma',1), Token(TokenType.LPAREN, '(',1), Token(TokenType.IDENT, 'dos',1), Token(TokenType.COMMA, ',',1), Token(TokenType.IDENT, 'tres',1), Token(TokenType.RPAREN, ')',1), Token(TokenType.SEMICOLON, ';',1), ] self.assertEqual(tokens, expected_tokens)
def test_assignment(self) -> None: source: str = "variable cinco = 5;" lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(5): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.LET, "variable"), Token(TokenType.IDENT, "cinco"), Token(TokenType.ASSIGN, "="), Token(TokenType.INT, "5"), Token(TokenType.SEMICOLON, ";"), ] self.assertEquals(tokens, expected_tokens)
def test_function_call(self) -> None: source: str = 'variable resultado = suma(dos, tres);' lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(10): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.LET, 'variable'), Token(TokenType.IDENT, 'resultado'), Token(TokenType.ASSIGN, '='), Token(TokenType.FUNCTION, 'suma'), Token(TokenType.LPAREN, '('), Token(TokenType.IDENT, 'x'), Token(TokenType.COMMA, ','), Token(TokenType.IDENT, 'y'), Token(TokenType.RPAREN, ')'), Token(TokenType.SEMICOLON, ';'), ]
def test_one_character_operator(self) -> None: source: str = '=+-/*<>!' tokens = self._load_tokens(source) expected_tokens: List[Token] = [ Token(TokenType.ASSIGN, '=',1), Token(TokenType.PLUS, '+',1), Token(TokenType.MINUS, "-",1), Token(TokenType.DIVISION, "/",1), Token(TokenType.MULTIPLICATION, "*",1), Token(TokenType.LT, "<",1), Token(TokenType.GT, ">",1), Token(TokenType.NEGATION, "!",1) ] self.assertEqual(tokens, expected_tokens)
def test_two_characters_operator(self) -> None: source: str = ''' 10 == 10; 12 != 11; ''' tokens: List[Token] = self._load_n_tokens(source, 8) expected_tokens: List[Token] = [ Token(TokenType.INT, '10',2), Token(TokenType.EQ, '==',2), Token(TokenType.INT, "10",2), Token(TokenType.SEMICOLON, ";",2), Token(TokenType.INT, "12",3), Token(TokenType.NOT_EQ, "!=",3), Token(TokenType.INT, "11",3), Token(TokenType.SEMICOLON, ";",3) ] self.assertEqual(tokens, expected_tokens)
def test_one_character_operator(self) -> None: source: str = '=+-/*<>!' lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(len(source)): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.ASSIGN, '='), Token(TokenType.PLUS, '+'), Token(TokenType.LESS, '-'), Token(TokenType.DIVISION, '/'), Token(TokenType.MULTIPLICATION, '*'), Token(TokenType.LT, '<'), Token(TokenType.GT, '>'), Token(TokenType.NEGATION, '!'), ] self.assertEquals(tokens, expected_tokens)
def test_delimeters(self) -> None: source = '(){},;' tokens = self._load_tokens(source) expected_tokens: List[Token] = [ Token(TokenType.LPAREN, '(',1), Token(TokenType.RPAREN, ')',1), Token(TokenType.LBRACE, '{',1), Token(TokenType.RBRACE, '}',1), Token(TokenType.COMMA, ',',1), Token(TokenType.SEMICOLON, ';',1), ] self.assertEqual(tokens, expected_tokens)
def test_two_character_operator(self) -> None: source: str = ''' 10 == 10; 10 != 9; ''' lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(8): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.INT, '10'), Token(TokenType.EQ, '=='), Token(TokenType.INT, '10'), Token(TokenType.SEMICOLON, ';'), Token(TokenType.INT, '10'), Token(TokenType.NOT_EQ, '!='), Token(TokenType.INT, '9'), Token(TokenType.SEMICOLON, ';'), ] self.assertEquals(tokens, expected_tokens)
def test_one_character_operator(self) -> None: source: str = "=+-/*<>!" lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(len(source)): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.ASSIGN, "="), Token(TokenType.PLUS, "+"), Token(TokenType.MINUS, "-"), Token(TokenType.DIVISION, "/"), Token(TokenType.MULTIPLICATION, "*"), Token(TokenType.LT, "<"), Token(TokenType.GT, ">"), Token(TokenType.NEGATION, "!"), ] self.assertEquals(tokens, expected_tokens)
def test_delimiters(self) -> None: source: str = '(){},;' lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(len(source)): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.LPAREN, '('), Token(TokenType.RPAREN, ')'), Token(TokenType.LBRACE, '{'), Token(TokenType.RBRACE, '}'), Token(TokenType.COMMA, ','), Token(TokenType.SEMICOLON, ';'), ] self.assertEquals(tokens, expected_tokens)
def test_delimiters(self) -> None: source: str = "(){},;" lexer: Lexer = Lexer(source) tokens: List[Token] = [] for i in range(len(source)): tokens.append(lexer.next_token()) expected_tokens: List[Token] = [ Token(TokenType.LPAREN, "("), Token(TokenType.RPAREN, ")"), Token(TokenType.LBRACE, "{"), Token(TokenType.RBRACE, "}"), Token(TokenType.COMMA, ","), Token(TokenType.SEMICOLON, ";"), ] self.assertEquals(tokens, expected_tokens)