Esempio n. 1
0
def test_interpreter():
    from lox.scanner import Scanner
    from lox.parser import Parser
    from lox.ast.ast_printer import ASTPrinter
    from lox.interpreter import Interpreter

    source = """ 1 + 999 """

    lox_scan = Scanner(source)

    tokens = lox_scan.scanTokens()

    print("Tokens:")
    for tok in tokens:
        print(tok)
    print("\n")

    lox_parser = Parser(tokens)
    result = lox_parser.parse()

    printer = ASTPrinter()

    print(printer.print(result))

    print(Interpreter().evaluate(result))
Esempio n. 2
0
def test_scanner():
    from lox.scanner import Scanner

    lox_scanner = Scanner("""
        var x = 12.1
        if else 
        for 
        // kdjkdkkd
        /
        {}
        ()
        print 
        "fias'' // "

        class
        @ 
        " student 
        
        // this is a comment
        (( )){} // grouping stuff
        !*+-/=<> <= == // operators
    """)

    token_list = lox_scanner.scanTokens()

    for tok in token_list:
        print(tok)
Esempio n. 3
0
 def run(self, source):
     scanner = Scanner(source, self)
     tokens = scanner.scan_tokens()
     statements = Parser(tokens, self).parse()
     if self.had_error:
         return
     resolver = self.resolver.resolve(statements)
     if self.had_error:
         return
     self.interpreter.interpret(statements)
Esempio n. 4
0
    def run_code(self, source_code):
        scanner = Scanner(source_code, self.error)
        tokens = scanner.scan_tokens()

        parser = Parser(tokens, self.error_token)
        expr = parser.parse()

        if Lox.has_error:
            return

        # print(AstPrinter().print(expr))
        self.interpreter.interpret(expr)
Esempio n. 5
0
def run(source):
    scan = Scanner(source)
    tokens = scan.scanTokens()

    # for token in tokens:
    #     print(token)

    pars = Parser(tokens)
    result = pars.parse()

    print(ASTPrinter().print(result))
    print(Interpreter().interpret(result))
Esempio n. 6
0
File: lox.py Progetto: ehomrich/lox
    def run(source: str) -> None:
        try:
            scanner = Scanner(source)
            tokens = scanner.scan_tokens()
            parser = Parser(tokens)
            stmts = parser.parse()

            Lox.interpreter.interpret(stmts)
        except ParseError as pe:
            Lox.error(pe.token, str(pe))
        except LoxRuntimeError as lre:
            Lox.runtime_error(lre)
Esempio n. 7
0
def test_parser():
    from lox.scanner import Scanner
    from lox.parser import Parser
    from lox.ast.ast_printer import ASTPrinter

    source = " 1 + 2 >= 133 + \" gggg \""

    lox_scan = Scanner(source)

    tokens = lox_scan.scanTokens()

    print("Tokens:")
    for tok in tokens:
        print(tok)
    print("\n")

    lox_parser = Parser(tokens)
    result = lox_parser.parse()

    printer = ASTPrinter()

    print(printer.print(result))
Esempio n. 8
0
def get_tokens(code, l):
    return Scanner(code, l).scan_tokens()
Esempio n. 9
0
 def __init__(self, source, debugging=True):
     self.parser = Parser()
     self.scanner = Scanner(source)
     # The chunk of bytecode we are currently assembling
     self.chunk = Chunk()
     self.DEBUG_PRINT_CODE = debugging
Esempio n. 10
0
class Compiler(object):
    """
    A single pass compiler using Pratt’s parsing technique.
    Note it might be interesting to create a version that
    parses to an AST, then a code generator traverses the AST
    and outputs bytecode.
    """
    def __init__(self, source, debugging=True):
        self.parser = Parser()
        self.scanner = Scanner(source)
        # The chunk of bytecode we are currently assembling
        self.chunk = Chunk()
        self.DEBUG_PRINT_CODE = debugging

    def compile(self):

        # todo could be scanner.advance
        self.advance()
        self.expression()

        self.consume(TokenTypes.EOF, "Expect end of expression.")
        self.end_compiler()

        return not self.parser.had_error

    def _error_at(self, token, msg):
        if self.parser.panic_mode:
            # suppress subsequent errors
            return
        print "[line %d] Error" % token.line

        if token.type == TokenTypes.EOF:
            print " at end"
        elif token.type == TokenTypes.ERROR:
            pass
        else:
            print " at %s" % self.scanner.get_token_string(token)
        print ": %s\n" % msg

        self.parser.had_error = True

    def error_at_current(self, msg):
        self._error_at(self.parser.current, msg)

    def error(self, msg):
        self._error_at(self.parser.previous, msg)

    def end_compiler(self):
        self._emit_return()

        if self.DEBUG_PRINT_CODE and not self.parser.had_error:
            self.current_chunk().disassemble("code")

    def advance(self):
        self.parser.previous = self.parser.current

        while True:
            self.parser.current = self.scanner.scan_token()
            if self.parser.current.type != TokenTypes.ERROR:
                break
            self.error_at_current(self.parser.current.message)

    def consume(self, token_type, msg):
        if self.parser.current.type == token_type:
            self.advance()
            return

        self.error_at_current(msg)

    def make_constant(self, value):
        chunk = self.current_chunk()
        constant = chunk.add_constant(value)
        if constant > 255:
            self.error("Too many constants in one chunk.")
            return 0
        return constant

    def current_chunk(self):
        assert self.chunk is not None
        return self.chunk

    def emit_byte(self, byte):
        self.current_chunk().write_chunk(byte, self.parser.previous.line)

    def emit_bytes(self, byte_a, byte_b):
        self.emit_byte(byte_a)
        self.emit_byte(byte_b)

    def _emit_constant(self, value):
        self.emit_bytes(OpCode.OP_CONSTANT, self.make_constant(value))

    def _emit_return(self):
        self.emit_byte(OpCode.OP_RETURN)

    def grouping(self):
        self.expression()
        self.consume(TokenTypes.RIGHT_PAREN, "Expected ')' after expression.")

    def unary(self):
        op_type = self.parser.previous.type
        # Compile the operand
        self.parse_precedence(Precedence.UNARY)
        # Emit the operator instruction
        if op_type == TokenTypes.MINUS:
            self.emit_byte(OpCode.OP_NEGATE)

    def binary(self):
        op_type = self.parser.previous.type

        # As binary ops are "infix" we've already
        # consumed the left operand.

        # Compile the right operand
        rule = self._get_rule(op_type)
        self.parse_precedence(rule.precedence + 1)

        # Emit the operator instruction
        if op_type == TokenTypes.PLUS: self.emit_byte(OpCode.OP_ADD)
        if op_type == TokenTypes.MINUS: self.emit_byte(OpCode.OP_SUBTRACT)
        if op_type == TokenTypes.STAR: self.emit_byte(OpCode.OP_MULTIPLY)
        if op_type == TokenTypes.SLASH: self.emit_byte(OpCode.OP_DIVIDE)

    def parse_precedence(self, precedence):
        # parses any expression of a given precedence level or higher
        self.advance()
        prefix_rule = self._get_rule(self.parser.previous.type).prefix
        if prefix_rule is None:
            self.error("Expected expression.")
            return
        else:
            prefix_rule(self)

        while precedence <= self._get_rule(
                self.parser.current.type).precedence:
            self.advance()
            infix_method = self._get_rule(self.parser.previous.type).infix
            infix_method(self)

    def number(self):
        value = float(self.scanner.get_token_string(self.parser.previous))
        self._emit_constant(value)

    def expression(self):
        self.parse_precedence(Precedence.ASSIGNMENT)

    @staticmethod
    def _get_rule(op_type):
        return rules[op_type]
Esempio n. 11
0
    def run(self, source) -> None:

        scanner = Scanner(source)
        self.tokens = scanner.scan_tokens()
        pp.pprint(self.tokens)