def __init__(self, stdin, stdout=sys.stdout, stderr=sys.stderr, mode=mode_execute): """ Those streams will be closed at last. :param stdin: the source code input stream :param stdout: the standard output stream :param stderr: the standard error stream :param mode: mode """ self.stdin = stdin self.stdout = stdout self.stderr = stderr self.lexer = Lexer(stdin, stdout=stdout, stderr=stderr) self.mode = mode self.tokenTree = TokenTree() self.rootNode = None self.stable = STable() self.ahead = None # The token just read self.buff = [] # unget buffer self.currentLine = 0 # controller for printing lexer analysis result
class Parser(object): mode_lexer = 0 mode_parser = 1 mode_stable = 2 mode_compile = 3 mode_execute = 4 def __init__(self, stdin, stdout=sys.stdout, stderr=sys.stderr, mode=mode_execute): """ Those streams will be closed at last. :param stdin: the source code input stream :param stdout: the standard output stream :param stderr: the standard error stream :param mode: mode """ self.stdin = stdin self.stdout = stdout self.stderr = stderr self.lexer = Lexer(stdin, stdout=stdout, stderr=stderr) self.mode = mode self.tokenTree = TokenTree() self.rootNode = None self.stable = STable() self.ahead = None # The token just read self.buff = [] # unget buffer self.currentLine = 0 # controller for printing lexer analysis result def lexse(self): """ Run lexer :return: token_tree_root_node """ echo = StringIO.StringIO() self.ahead = self.lexer.next_token() while self.ahead: echo.write(self._build_token_tree()) try: self.ahead = self.lexer.next_token() except InvalidTokenError: return self.stdout.write(echo.getvalue()) echo.close() return self.tokenTree.rootNode def parse(self): """ Run parser :return: syntax_tree_root_node,token_tree_root_node """ try: self.rootNode = self._parse_exter_stmts() except InvalidTokenError: return None else: if self.mode == Parser.mode_parser: self.stdout.write('%s\n' % self.rootNode.gen_tree()) return self.rootNode, self.tokenTree.rootNode finally: self.stdin.close() def semantic(self): """ Semantic analysing using DFS. :return: root_stable, root_node, root_token_node """ # do parse first parse_result = self.parse() if not parse_result: return None # add `read` and `write` function to stable self.stable.symbol_append(Symbol('read', STypeFunc(SType(tokens.Token_INT), []))) self.stable.symbol_append(Symbol('write', STypeFunc(SType(tokens.Token_VOID), [SType(Token_INT)]))) stack = [(self.rootNode, self.stable)] # the node and the direct symbol table which it is in while len(stack) > 0: node, stable = stack.pop() try: table = node.gen_stable(stable) except SemanticsError, e: self.stderr.write('%s %s\n' % (str(e), node.gen_location())) return None else: children = list(node.childItems) children.reverse() children = [(child, table or stable) for child in children] stack += children # check main function error = self.stable.check_main() if error: self.stderr.write('%s\n' % error) return None elif self.mode == Parser.mode_stable: self.stdout.write(self.stable.gen_tree()) return self.stable, parse_result[0], parse_result[1]