def lexer(): global content lexer = Lexer() lexer.main(content) buffer = [] for token in lexer.tokens: #print ('(%s, %s)' % (token.type, token.value)) buffer.append('(%s, %s)' % (token.type, token.value)) return buffer
def __init__(self, content): lexer = Lexer() lexer.main(content) # 要分析的tokens self.tokens = lexer.tokens # tokens下标 self.index = 0 # 最终生成的语法树 self.tree = SyntaxTree()