コード例 #1
0
ファイル: ast_test.py プロジェクト: cantte/CantteLanguage
    def test_expression_statement(self) -> None:
        program: Program = Program(statements=[
            ExpressionStatement(token=Token(TokenType.INT, literal='7'),
                                expression=Integer(token=Token(TokenType.INT,
                                                               literal='7'),
                                                   value=7))
        ])

        program_str = str(program)

        self.assertEqual(program_str, '7')
コード例 #2
0
ファイル: ast_test.py プロジェクト: cantte/CantteLanguage
    def test_return_statement(self) -> None:
        program: Program = Program(statements=[
            ReturnStatement(token=Token(TokenType.RETURN, 'return'),
                            return_value=Identifier(token=Token(
                                TokenType.RETURN, 'return'),
                                                    value='num'))
        ])

        program_str = str(program)

        self.assertEqual(program_str, 'return num;')
コード例 #3
0
ファイル: ast_test.py プロジェクト: cantte/CantteLanguage
    def test_let_statement(self) -> None:
        program: Program = Program(statements=[
            LetStatement(
                token=Token(TokenType.LET, 'let'),
                name=Identifier(token=Token(TokenType.IDENTIFIER, 'num'),
                                value='num'),
                value=Identifier(token=Token(TokenType.IDENTIFIER, 'other'),
                                 value='other'))
        ])

        program_str = str(program)

        self.assertEqual(program_str, 'let num = other;')
コード例 #4
0
    def test_eof(self) -> None:
        source: str = '+'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source) + 1):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.PLUS, '+'),
            Token(TokenType.EOF, '')
        ]

        self.assertEqual(tokens, expected_tokens)
コード例 #5
0
    def test_illegal(self) -> None:
        source: str = '¡¿@'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.ILLEGAL, '¡'),
            Token(TokenType.ILLEGAL, '¿'),
            Token(TokenType.ILLEGAL, '@'),
        ]

        self.assertEqual(tokens, expected_tokens)
コード例 #6
0
    def test_assigment(self) -> None:
        source: str = 'let five = 5;'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(5):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'let'),
            Token(TokenType.IDENTIFIER, 'five'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.INT, '5'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
コード例 #7
0
    def test_function_call(self) -> None:
        source: str = 'let result = sum(dos, tres);'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(10):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'let'),
            Token(TokenType.IDENTIFIER, 'result'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.IDENTIFIER, 'sum'),
            Token(TokenType.LPAREN, '('),
            Token(TokenType.IDENTIFIER, 'dos'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.IDENTIFIER, 'tres'),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
コード例 #8
0
    def next_token(self) -> Token:
        self._skip_whitespace()

        if self._is_letter(self._character):
            ident_literal: str = self._read_identifier()
            token_type = lookup_token_type(ident_literal)
            token = Token(token_type, ident_literal)
        elif self._is_number(self._character):
            num_literal: str = self._read_number()
            token = Token(TokenType.INT, num_literal)
        else:
            token_type = self._get_token_type()
            if token_type == TokenType.EQUAL or token_type == TokenType.NOT_EQUAL:
                token = self._make_two_character_token(token_type)
            elif token_type == TokenType.STRING:
                literal = self._read_string()

                token = Token(token_type, literal)
            else:
                token = Token(token_type, self._character)
            self._read_character()

        return token
コード例 #9
0
    def test_one_character_operator(self) -> None:
        source: str = '=+-/*<>!'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.PLUS, '+'),
            Token(TokenType.MINUS, '-'),
            Token(TokenType.DIVISION, '/'),
            Token(TokenType.MULTIPLICATION, '*'),
            Token(TokenType.LESS_THAN, '<'),
            Token(TokenType.GREATER_THAN, '>'),
            Token(TokenType.NEGATION, '!'),
        ]

        self.assertEqual(tokens, expected_tokens)
コード例 #10
0
    def test_two_character_operator(self) -> None:
        source: str = '''
            10 == 10;
            10 != 9;
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(8):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.INT, '10'),
            Token(TokenType.EQUAL, '=='),
            Token(TokenType.INT, '10'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.INT, '10'),
            Token(TokenType.NOT_EQUAL, '!='),
            Token(TokenType.INT, '9'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
コード例 #11
0
    def test_delimiters(self) -> None:
        source: str = '(){},;'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LPAREN, '('),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.RBRACE, '}'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.SEMICOLON, ';')
        ]

        self.assertEqual(tokens, expected_tokens)
コード例 #12
0
    def test_string(self) -> None:
        source: str = '''
            "foo";
            "This is a string";
            'Other string';
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(6):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.STRING, 'foo'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.STRING, 'This is a string'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.STRING, 'Other string'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
コード例 #13
0
    def _make_two_character_token(self, token_type: TokenType) -> Token:
        prefix = self._character
        self._read_character()
        suffix = self._character

        return Token(token_type, f'{prefix}{suffix}')
コード例 #14
0
from typing import List

from cantte.ast import Program
from cantte.lexer import Lexer
from cantte.token import Token, TokenType
from cantte.parser import Parser
from cantte.evaluator import evaluate
from cantte.object import Environment

EOF_TOKEN: Token = Token(TokenType.EOF, '')


def _print_parse_errors(errors: List[str]):
    for error in errors:
        print(error)


def start_repl() -> None:
    scanned: List[str] = []

    while (source := input('>> ')) != 'exit()':
        scanned.append(source)
        lexer: Lexer = Lexer(' '.join(scanned))
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()
        env: Environment = Environment()

        if len(parser.errors) > 0:
            _print_parse_errors(parser.errors)
            scanned.pop()
コード例 #15
0
    def test_function_declaration(self) -> None:
        source: str = '''
            let sum = func(x, y) {
                x + y;
            };
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(16):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'let'),
            Token(TokenType.IDENTIFIER, 'sum'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.FUNCTION, 'func'),
            Token(TokenType.LPAREN, '('),
            Token(TokenType.IDENTIFIER, 'x'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.IDENTIFIER, 'y'),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.IDENTIFIER, 'x'),
            Token(TokenType.PLUS, '+'),
            Token(TokenType.IDENTIFIER, 'y'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.RBRACE, '}'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
コード例 #16
0
    def test_control_statement(self) -> None:
        source: str = '''
            if (5 < 10) {
                return true;
            } else {
                return false;
            }
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(17):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.IF, 'if'),
            Token(TokenType.LPAREN, '('),
            Token(TokenType.INT, '5'),
            Token(TokenType.LESS_THAN, '<'),
            Token(TokenType.INT, '10'),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.RETURN, 'return'),
            Token(TokenType.TRUE, 'true'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.RBRACE, '}'),
            Token(TokenType.ELSE, 'else'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.RETURN, 'return'),
            Token(TokenType.FALSE, 'false'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.RBRACE, '}')
        ]

        self.assertEqual(tokens, expected_tokens)