Exemple #1
0
def run_prompt(prompt: str = "lox> ") -> None:
    while True:
        global gHadError
        gHadError = False

        try:
            line = input(prompt)
        except EOFError:
            break

        scanner = Scanner(line)
        tokens = scanner.scan_tokens()
        parser = Parser(tokens)
        syntax = parser.parse_repl()

        # Ignore it if there was a syntax error.
        if gHadError:
            continue

        if isinstance(syntax, list):
            gInterpreter.interpret(syntax)
        elif isinstance(syntax, Expr):
            result = gInterpreter.interpret_repl(syntax)
            if result is not None:
                print(f"= {result}")
Exemple #2
0
 def test_var(self):
     line = 'var beverage = "espresso";\nprint beverage;'
     scanner = Scanner(line)
     tokens = scanner.scan_tokens()
     parser = Parser(tokens, "")
     statements = parser.parse()
     Interpreter().interpret(statements)
Exemple #3
0
    def test_not_equal(self):
        scanner = Scanner('!=')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.BANG_EQUAL)
Exemple #4
0
    def test_unterminated_string(self):
        scanner = Scanner('"this string is missing a trailing quote')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertIsInstance(tokens[0], ScannerError)
        self.assertEqual(tokens[1].type, TokenTypes.EOF)
Exemple #5
0
 def test_parens(self):
     line = "()"
     scanner = Scanner(line)
     scanner.scan_tokens()
     assert scanner.tokens[0].__dict__['lexeme'] == '('
     assert scanner.tokens[0].__dict__['type'] == TokenType.LEFT_PAREN
     assert scanner.tokens[1].__dict__['lexeme'] == ')'
     assert scanner.tokens[1].__dict__['type'] == TokenType.RIGHT_PAREN
Exemple #6
0
 def test_semicolon(self):
     line = ';'
     scanner = Scanner(line)
     scanner.scan_tokens()
     assert scanner.tokens[0].__dict__['lexeme'] == ';'
     assert scanner.tokens[0].__dict__['type'] == TokenType.SEMICOLON
     assert scanner.tokens[1].__dict__['lexeme'] == ''
     assert scanner.tokens[1].__dict__['type'] == TokenType.EOF
Exemple #7
0
 def test_keywords(self):
     scanner = Scanner("5")
     tokens = scanner.scan_tokens()
     expected = [
         (TokenType.NUMBER, "5", 5),
         (TokenType.EOF, "", None),
     ]
     self.assertTokens(tokens, expected)
Exemple #8
0
 def test_star(self):
     line = '*'
     scanner = Scanner(line)
     scanner.scan_tokens()
     assert scanner.tokens[0].__dict__['lexeme'] == '*'
     assert scanner.tokens[0].__dict__['type'] == TokenType.STAR
     assert scanner.tokens[1].__dict__['lexeme'] == ''
     assert scanner.tokens[1].__dict__['type'] == TokenType.EOF
Exemple #9
0
 def test_slash(self):
     line = '/'
     scanner = Scanner(line)
     scanner.scan_tokens()
     assert scanner.tokens[0].__dict__['lexeme'] == '/'
     assert scanner.tokens[0].__dict__['type'] == TokenType.SLASH
     assert scanner.tokens[1].__dict__['lexeme'] == ''
     assert scanner.tokens[1].__dict__['type'] == TokenType.EOF
Exemple #10
0
    def test_div_equal(self):
        scanner = Scanner('/=')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 3)
        self.assertEqual(tokens[0].type, TokenTypes.SLASH)
        self.assertEqual(tokens[1].type, TokenTypes.EQUAL)
Exemple #11
0
    def test_not_equal_greater_equal(self):
        scanner = Scanner('!=>=')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 3)
        self.assertEqual(tokens[0].type, TokenTypes.BANG_EQUAL)
        self.assertEqual(tokens[1].type, TokenTypes.GREATER_EQUAL)
Exemple #12
0
 def test_dot(self):
     line = "."
     scanner = Scanner(line)
     scanner.scan_tokens()
     print(scanner)
     assert scanner.tokens[0].__dict__['lexeme'] == '.'
     assert scanner.tokens[0].__dict__['type'] == TokenType.DOT
     assert scanner.tokens[1].__dict__['lexeme'] == ''
     assert scanner.tokens[1].__dict__['type'] == TokenType.EOF
Exemple #13
0
 def test_minus(self):
     line = "-"
     scanner = Scanner(line)
     scanner.scan_tokens()
     print(scanner)
     assert scanner.tokens[0].__dict__['lexeme'] == '-'
     assert scanner.tokens[0].__dict__['type'] == TokenType.MINUS
     assert scanner.tokens[1].__dict__['lexeme'] == ''
     assert scanner.tokens[1].__dict__['type'] == TokenType.EOF
Exemple #14
0
 def test_plus(self):
     line = "+"
     scanner = Scanner(line)
     scanner.scan_tokens()
     print(scanner)
     assert scanner.tokens[0].__dict__['lexeme'] == '+'
     assert scanner.tokens[0].__dict__['type'] == TokenType.PLUS
     assert scanner.tokens[1].__dict__['lexeme'] == ''
     assert scanner.tokens[1].__dict__['type'] == TokenType.EOF
Exemple #15
0
    def test_identifier_made_of_two_keywords(self):
        scanner = Scanner('orclass')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.IDENTIFIER)
        self.assertEqual(tokens[0].lexeme, 'orclass')
        self.assertEqual(tokens[0].literal, None)
Exemple #16
0
 def test_comma(self):
     line = ","
     scanner = Scanner(line)
     scanner.scan_tokens()
     print(scanner)
     assert scanner.tokens[0].__dict__['lexeme'] == ','
     assert scanner.tokens[0].__dict__['type'] == TokenType.COMMA
     assert scanner.tokens[1].__dict__['lexeme'] == ''
     assert scanner.tokens[1].__dict__['type'] == TokenType.EOF
Exemple #17
0
    def test_single_invalid_char_should_return_scanner_error(self):
        scanner = Scanner('@')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertIsInstance(tokens[0], ScannerError)
        self.assertEqual(tokens[0].line, 1)
        self.assertEqual(tokens[1].type, TokenTypes.EOF)
Exemple #18
0
    def test_multiple_chars_with_underscore(self):
        scanner = Scanner('a_b')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.IDENTIFIER)
        self.assertEqual(tokens[0].lexeme, 'a_b')
        self.assertEqual(tokens[0].literal, None)
Exemple #19
0
    def test_identifier_starting_with_keyword(self):
        scanner = Scanner('orchid')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.IDENTIFIER)
        self.assertEqual(tokens[0].lexeme, 'orchid')
        self.assertEqual(tokens[0].literal, None)
Exemple #20
0
    def test_fractional_number(self):
        scanner = Scanner('100.12')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.NUMBER)
        self.assertEqual(tokens[0].lexeme, '100.12')
        self.assertEqual(tokens[0].literal, 100.12)
Exemple #21
0
    def test_multi_digit(self):
        scanner = Scanner('234')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.NUMBER)
        self.assertEqual(tokens[0].lexeme, '234')
        self.assertEqual(tokens[0].literal, 234)
Exemple #22
0
 def test_braces(self):
     line = "{}"
     scanner = Scanner(line)
     scanner.scan_tokens()
     print(scanner)
     assert scanner.tokens[0].__dict__['lexeme'] == '{'
     assert scanner.tokens[0].__dict__['type'] == TokenType.LEFT_BRACE
     assert scanner.tokens[1].__dict__['lexeme'] == '}'
     assert scanner.tokens[1].__dict__['type'] == TokenType.RIGHT_BRACE
Exemple #23
0
    def test_two_newlines(self):
        scanner = Scanner('\n\n')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 3)
        self.assertEqual(tokens[0].type, TokenTypes.NEWLINE)
        self.assertEqual(tokens[0].line, 1)
        self.assertEqual(tokens[1].type, TokenTypes.NEWLINE)
        self.assertEqual(tokens[1].line, 2)
Exemple #24
0
    def test_multiple_single_char_tokens(self):
        scanner = Scanner('(-*')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 4)
        self.assertEqual(tokens[0].type, TokenTypes.LEFT_PAREN)
        self.assertEqual(tokens[1].type, TokenTypes.MINUS)
        self.assertEqual(tokens[2].type, TokenTypes.STAR)
        self.assertEqual(tokens[3].type, TokenTypes.EOF)
Exemple #25
0
    def test_string_with_newlines(self):
        scanner = Scanner('"this string is \nover two lines"')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.STRING)
        self.assertEqual(tokens[0].lexeme, '"this string is \nover two lines"')
        self.assertEqual(tokens[0].literal, 'this string is \nover two lines')
        self.assertEqual(tokens[0].line, 2)
        self.assertEqual(tokens[1].type, TokenTypes.EOF)
Exemple #26
0
    def test_or_keyword(self):
        scanner = Scanner('or')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.OR)
        self.assertEqual(tokens[0].lexeme, 'or')
        self.assertEqual(tokens[0].literal, None)
        self.assertEqual(tokens[0].line, 1)
        self.assertEqual(tokens[1].type, TokenTypes.EOF)
Exemple #27
0
    def test_single_char(self):
        scanner = Scanner('a')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.IDENTIFIER)
        self.assertEqual(tokens[0].lexeme, 'a')
        self.assertEqual(tokens[0].literal, None)
        self.assertEqual(tokens[0].line, 1)
        self.assertEqual(tokens[1].type, TokenTypes.EOF)
Exemple #28
0
    def test_single_nonempty_string(self):
        scanner = Scanner('"asdf"')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.STRING)
        self.assertEqual(tokens[0].lexeme, '"asdf"')
        self.assertEqual(tokens[0].literal, 'asdf')
        self.assertEqual(tokens[0].line, 1)
        self.assertEqual(tokens[1].type, TokenTypes.EOF)
Exemple #29
0
    def test_slash_slash(self):
        scanner = Scanner('//')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.COMMENT)
        self.assertEqual(tokens[0].lexeme, '//')
        self.assertEqual(tokens[0].literal, '')
        self.assertEqual(tokens[0].line, 1)
        self.assertEqual(tokens[1].type, TokenTypes.EOF)
Exemple #30
0
    def test_single_digit(self):
        scanner = Scanner('2')

        tokens = list(scanner.scan_tokens())

        self.assertEqual(len(tokens), 2)
        self.assertEqual(tokens[0].type, TokenTypes.NUMBER)
        self.assertEqual(tokens[0].lexeme, '2')
        self.assertEqual(tokens[0].literal, 2)
        self.assertEqual(tokens[0].line, 1)
        self.assertEqual(tokens[1].type, TokenTypes.EOF)