Beispiel #1
0
    def compile_subroutine_dec(self):
        while self.tokenizer.current_token in [CONSTRUCTOR, FUNCTION, METHOD]:
            self.openNonTerminal(SUBROUTINE_DEC)

            # eat subroutine type: method, function or constructor
            self.eat(self.tokenizer.current_token)

            # eat subroutine return type
            if self.tokenizer.current_token != VOID and not self.is_valid_type(
                    self.tokenizer.current_token):
                raise CompilationError(
                    "Expected valid subroutine return type or void but was " +
                    self.tokenizer.current_token)
            self.eat(self.tokenizer.current_token)

            # eat subroutine name
            if not lexical_elements.is_identifier(
                    self.tokenizer.current_token):
                raise CompilationError(
                    "Expected valid subroutine name but was " +
                    self.tokenizer.current_token)
            self.eat(self.tokenizer.current_token)

            # compile subroutine parameter list (possibly empty)
            self.compile_parameter_list()

            # compile subroutine body
            self.compile_subroutine_body()

            self.closeNonTerminal(SUBROUTINE_DEC)
Beispiel #2
0
    def compile_subroutine_call(self):
        # eat identifier
        if not lexical_elements.is_identifier(self.tokenizer.current_token):
            raise CompilationError(
                "Expected valid identifier in subroutine call but found " +
                self.tokenizer.current_token)
        self.eat(self.tokenizer.current_token)

        if self.tokenizer.current_token == "(":
            self.eat("(")
            self.compile_expression_list()
            self.eat(")")
        elif self.tokenizer.current_token == ".":
            self.eat(".")
            # eat subroutine name
            if not lexical_elements.is_identifier(
                    self.tokenizer.current_token):
                raise CompilationError(
                    "Expected valid identifier in subroutine call but found " +
                    self.tokenizer.current_token)
            self.eat(self.tokenizer.current_token)
            self.eat("(")
            self.compile_expression_list()
            self.eat(")")
        else:
            raise CompilationError(
                "Expected '(' or '.' in subroutine call but found " +
                self.tokenizer.current_token)
Beispiel #3
0
 def eat_var_dec(self):
     # eat var type
     if not self.is_valid_type(self.tokenizer.current_token):
         raise CompilationError("Expected valid var type but found " +
                                self.tokenizer.current_token)
     self.eat(self.tokenizer.current_token)
     # eat var name
     if not lexical_elements.is_identifier(self.tokenizer.current_token):
         raise CompilationError("Expected valid var name but found " +
                                self.tokenizer.current_token)
     self.eat(self.tokenizer.current_token)
Beispiel #4
0
 def eat(self, token):
     current_token = self.tokenizer.current_token
     if current_token != token:
         raise CompilationError(
             "Expected to find token '{0:}' but found '{1:}'".format(
                 token, current_token))
     self.tokenizer.advance()
Beispiel #5
0
    def compile_class(self):
        self.tokenizer.advance()
        self.openNonTerminal(CLASS)

        # eat 'class' keyword
        self.eat(CLASS)

        # eat class name
        if not lexical_elements.is_identifier(self.tokenizer.current_token):
            raise CompilationError(
                "Class name must be a valid identifier but was " +
                self.tokenizer.current_token)
        self.eat(self.tokenizer.current_token)

        # eat opening brace
        self.eat("{")

        # compile class variable declarations
        self.compile_class_var_dec()

        # compile class subroutines
        self.compile_subroutine_dec()

        # eat closing brace
        self.eat("}")

        self.closeNonTerminal(CLASS)
Beispiel #6
0
 def eat(self, token):
     current_token = self.tokenizer.current_token
     if current_token != token:
         raise CompilationError(
             "Expected to find token '{0:}' but found '{1:}'".format(
                 token, current_token))
     self.output_file.write("<{0:}> {1:} </{0:}>".format(
         self.tokenizer.token_type(token),
         self.tokenizer.get_token_value(current_token)) + "\n")
     self.tokenizer.advance()
Beispiel #7
0
 def eat_var_sequence(self):
     self.eat_var_dec()
     while self.tokenizer.current_token == ",":
         self.eat(",")
         if not lexical_elements.is_identifier(
                 self.tokenizer.current_token):
             raise CompilationError(
                 "Expected valid variable name but was " +
                 self.tokenizer.current_token)
         self.eat(self.tokenizer.current_token)  # eat varName
Beispiel #8
0
 def compile_file(self, file_path):
     print("Compiling", file_path, "...")
     file_name = os.path.splitext(os.path.basename(file_path))[0]
     dir_name = os.path.split(file_path)[0]
     output_file_name = os.path.join(dir_name, file_name + "__.xml")
     with open(output_file_name, "w") as output_file:
         tokenizer = Tokenizer(file_path)
         try:
             compiler = CompilationEngine(tokenizer, output_file)
             compiler.compile()
             print("Compilation successful!", file_path, "=>",
                   output_file_name)
         except CompilationError as err:
             tokenizer.close()
             raise CompilationError("ERROR: " + err.message)
Beispiel #9
0
    def compile_let_statement(self):
        self.openNonTerminal(LET_STATEMENT)
        # eat 'let' keyword
        self.eat(LET)

        # eat variable name
        if not lexical_elements.is_identifier(self.tokenizer.current_token):
            raise CompilationError("Expected valid variable name but was " +
                                   self.tokenizer.current_token)
        self.eat(self.tokenizer.current_token)

        # eat potential array indexing expression
        if self.tokenizer.current_token == "[":
            self.eat("[")
            self.compile_expression()
            self.eat("]")

        self.eat("=")
        self.compile_expression()
        self.eat(";")
        self.closeNonTerminal(LET_STATEMENT)