def __init__(self, file): self.lex = Lex(file) self.symbols = SymbolTable() self.vm = VMWriter() self.openout(file) self.compile_class() self.closeout()
def __init__(self, input, debug=False): """Takes code, creates a lexeme table, and generates assembly code.""" self.analyser = Lex.Lex(input, debug) self.jumpnum = 0 self.i = 0 # Index of lexeme list. self.lexemes = self.analyser.lexemes self.assembly = self.parse() # If debugging is enabled, set it up. if debug: self.out = open(os.path.join(os.getcwd(), "assembler_output.txt"), "w") self.out.write(self.assembly) self.out.close()
def show(source): global keys, syms out = '' keywords = keys + syms spectab = { "=": [">"], ":": [":"], } separators = [] #[" ","\n","\t"] lex = Lex(spectab, keywords, separators) source = lex.Tokenise(source) for s in source: if s in keys: out += key(s) elif s in syms: out += symbol(s) elif s not in [" ", "\n", "\t"]: out += text(s) else: out += s return out
def __init__(self, file): self.lex = Lex.Lex(file) self.openout(file) self.compile_class() self.closeout()
def __init__(self, file): self.lex = Lex.Lex(file) self._init_cmd_info()
def lex_and_parse(str_in): tmp = Lex.Lex(str_in) tmp = Parse.Parse(tmp) return tmp
from Gram_SLR import SLR #语法语义分析,生成四元式 import Lex#词法分析器 #语法分析开始符号 start = 'A' #语法分析文法 productions = { 'A': ['V=E', ], 'E': ['E+T', 'E-T', 'T'], 'T': ['T*F', 'T/F', 'F'], 'F': ['(E)', 'i'], 'V': ['i', ], } if __name__ == '__main__' : lex = Lex() lex.analyse('Test.c')
def __init__(self, file): self.lexer = Lex.Lex(file) self._init_instruction_info() self.lineNumber = 0 self.wasError = False
def __init__(self, file): self.lex = Lex.Lex(file)
def l0opt(self,expr,toks): if toks: t,ts = self.unpack(toks) print( t,0,self.infix_tab[0] ) if t in self.infix_tab[0][0]: e2,rest = self.atom(ts) # left return self.l0opt(Node(t,[expr,e2]),rest) elif t in self.infix_tab[0][1]: e2,rest = self.expr(ts) # right return self.l0opt(Node(t,[expr,e2]),rest) return (expr,toks) keywords = ["if","then","else", "let","=","in", "fn", "infix","infixr", "=>",":", "(",",",")","+","-","*","/"] spectab = { "=":[">"], ":":[":"], } separators = [" ","\n","\t"] lex = Lex(spectab,keywords,separators) p = parser(keywords) parse = p.read __all__ = ["lex","parse","p"] # "interpreter","Empty","Extend","enviroment" # implement a user datatype define
def __init__(self, file): self.lexer = Lex.Lex(file) self._init_instruction_info() self._lineNumber = 0