def gen_tree(ast): try: prog = Program(ast) prog.validate_defs() type_check(prog, prog.def_types, prog.tlt_name_map) except CompilationException as e: print "="*10, "COMPILATION ERROR", "="*10 print e.reason print "Terminating Compilation" sys.exit() return prog
def program(self): self.eat(TokenType.PROGRAM) var_node = self.variable() prog_name = var_node.value self.eat(TokenType.SEMI) block_node = self.block() program_node = Program(prog_name, block_node) self.eat(TokenType.DOT) return program_node
def run(self): """ Compiles the code """ if len(self.tokens) == 0: #case when the program is empty self.compiled = ['.'] return preserved_tokens = self.tokens[:] #preserve the tokens because they're consumed during compilation self.program = Program(self).build() #begin recursive descent parsing self.tokens = preserved_tokens #restore the tokens self.program.clean() #begin the recursive clean/modification process (to translate to PL/0) self.program.compile() #generate the compiled PL/0 code
class Compiler(): #valid token types which can comprise a <Statement> valid_statement_tokens = [ Token.LBLOCK, Token.VAR, Token.CONST, Token.IDENTIFIER, Token.IF, Token.INPUT, Token.OUTPUT, Token.WHILE, Token.SEMI] #valid token types which can comprise an <Expression> valid_expression_tokens = [ Token.NUMBER, Token.IDENTIFIER, Token.LPAREN, Token.RPAREN, Token.PLUS, Token.MINUS, Token.DIV, Token.MUL, Token.TRUE, Token.FALSE] #valid token types which can comprise a <Comparison> token valid_comparison_tokens = [ Token.LTEQ, Token.GTEQ, Token.GT, Token.LT, Token.EQUAL, Token.NOTEQUAL] def __init__(self, tokens): """ Compiler initializer. tokens (list): Tokens produced by the parser """ self.tokens = tokens self.compiled = list() #compiled code generated by run() def run(self): """ Compiles the code """ if len(self.tokens) == 0: #case when the program is empty self.compiled = ['.'] return preserved_tokens = self.tokens[:] #preserve the tokens because they're consumed during compilation self.program = Program(self).build() #begin recursive descent parsing self.tokens = preserved_tokens #restore the tokens self.program.clean() #begin the recursive clean/modification process (to translate to PL/0) self.program.compile() #generate the compiled PL/0 code def error(self, text): """ The compiler found a grammar error and will report it to the user. All errors are fatal text (string): Error message to report to user. """ quit('LINE ' + str(self.tokens[0].line) + ' (Token: "' + self.tokens[0].text + '") ERROR <' + callee().lstrip('_').upper() + '> : ' + text) def next(self, pos = 0): """ Return the type of token pos positions ahead in the token list. pos (int, optional): The position to lookup within the tokens. """ return "<NONE>" if len(self.tokens) <= pos else self.tokens[pos].type def skip(self, pos = 1): """ Tell the compiler to skip/discard pos number of tokens. pos (int, optional): The number of tokens to skip/discard. """ self.tokens = self.tokens[pos:] def __str__(self): """ String representation of the compiler """ return ''.join([str(i) for i in self.compiled])
function = SForward( 'function', lambda s, loc, toks: create_function(toks[0].name, toks[0].args, toks[1:]) ) simport = SForward( 'import', lambda s, loc, toks: create_import(toks[1]) ) function_header = SForward( 'function_header', lambda s, loc, toks: FunctionHeader(toks[1], toks[3:-1]) ) arguments = SForward('arguments') statement = SForward('statement') # .setFailAction( # syntax_error('Invalid statement') # ) statements = SForward('statements', listify) block_statement = SForward('block_statement') normal_statement = SForward('normal_statement') string = SForward('string') comment = SForward('comment') imports = SForward('imports', listify) functions = SForward('functions', listify) program = ( SForward('program', lambda s, loc, toks: Program(toks[0], toks[1])) .ignore(comment) ) # forloop.setFailAction(lambda loc: syntax_error(loc, 'Invalid for loop'))