def callback(res): lexical = Lexical() lexical.get_dfa('./help/dfa.json') # 读取DFA转换表 lexical_res = lexical.lexical_run(str(res).replace( '\r\n', '\n')) # 得到词法分析的token序列 tokens, nums_attr = [], [] if not lexical_res[0] and not lexical_res[1]: QMessageBox.warning(self, '输入无效', '请输入有效程序文本') return for idx in range(len( lexical_res[0])): # item[1]为种别码,item[3]为行号,item[2]为属性值 item = lexical_res[0][idx] if 'comment' not in item[1]: tokens.append(item[1]) nums_attr.append((item[3], item[2])) if not self.syntax: self.syntax = Syntax() self.syntax.syntax_init('./help/semantic.json') semantic = Semantic() res_lst, res_err = semantic.semantic_run(tokens, nums_attr, self.syntax) self.semantic_win = QDialog() ui = semantic_res.Ui_Dialog() init_win(self.semantic_win, ui) set_semantic_win(ui, semantic)
def Run(self, event=0): self.objMain = Main() self.consoleT.delete(1.0, END) #open file and write all the IDE code text area content self.codeFile = open(Main._codeFile, 'w') self.codeFile.write(self.codeT.get('1.0', END)) self.codeFile.close() #open file for reading self.codeFile = open(Main._codeFile, 'rb+') self.objMain.mainMethod(self.codeFile) self.codeFile.close() #write tokens to console area self.consoleT.insert(1.0, Main._fileData) #write tokens to test file self.outputFile = open(Main._outputFile, 'w') self.outputFile.write(Main._fileData) self.outputFile.close() #call syntax # self.objMain.PROG() self.objSyntax = Syntax(Main._tokens) self.objSemantic = Semantic(Main._tokens) if self.objSyntax.PROG(): print("Code is parsed") else: print("there is error in code")
def semantic_info(self): # 语义相关信息 if not self.syntax: self.syntax = Syntax() self.syntax.syntax_init('./help/semantic.json') self.semantic_info_win = QDialog() ui = semantic_info.Ui_Dialog() init_win(self.semantic_info_win, ui) _set_grammar_tbl(ui.grammar_tbl, self.syntax)
def __init__(self, input_phrase: str): self.input_phrase = input_phrase self.root = Node() self.load_config() self.syntax = Syntax(self.config) self.parser = Parser(input_phrase, self.config) self.evaluator = Evaluator(self.eval_config, self.config) self.output = ""
def parse(self, expression): self.expression = expression syntactic = Syntax(self.expression) # try: self.ast = syntactic.parse() # except ValueError: # print("erro!") # try: return self.evaluate(self.ast)
def syntax_matching(message): pattern = "^(?P<span>次の?|毎(週|月)|来週の?|来月の?|来年の?)?(?P<days>([0-9]{4}年[0-9]{1,2}月[0-9]{1,2}日|[0-9]{1,2}月[0-9]{1,2}日|[0-9]{1,2}日|末|(第[1-5])?[日月火水木金土]曜日?)+)の?(?P<time>(午前|午後)?[0-9]{1,2}時([0-9]{1,2}分)?|(AM|PM|am|pm)?[0-9]{1,2}:[0-9]{1,2})に?(?P<lab>研究室にいる|研究室の)?(?P<member>B4|M1|M2|awareness|AWARENESS|novel\s?interface|NOVEL\s?INTERFACE|cmc|CMC|全員|人)に(?P<item>\S+)をリマインド$" #pattern = "^(?P<span>次の?|毎(週|月)|来週の?|来月の?|来年の?)?(?P<days>([0-9]{4}年[0-9]{1,2}月[0-9]{1,2}日|[0-9]{1,2}月[0-9]{1,2}日|[0-9]{1,2}日|末|(第[1-5])?[日月火水木金土]曜日?)+)の?(?P<time>(午前|午後)?[0-9]{1,2}時([0-9]{1,2}分)?|(AM|PM|am|pm)?[0-9]{1,2}:[0-9]{1,2})に?(?P<lab>研究室にいる|研究室の)?(?P<member>B4|M1|M2|awareness|AWARENESS|novel\s?interface|NOVEL\s?INTERFACE|cmc|CMC|全員|人)に?(?P<item>\S+)をリマインド$" match = re.search(pattern, message) if match: print("match") print(match.group("span")) s = Syntax(match.group("span"), match.group("days"), match.group("time"), match.group("lab"), match.group("member"), match.group("item")) s.show_all() return s else: print("unmatch") return False
def get_display_paste(self, data, tvars, uid): import xml.sax.saxutils as saxutils from syntax import Syntax tvars = self.set_paste_data(tvars, data.title, data.code, data.comment, data.type) tvars["isPaste"] = True tvars["url"] = self.ctx.request.application_url + "/p/" + uid tvars["codeFormatted"] = Syntax.get_highlighted_code(data.code, data.type) return tvars
def grammar(self): # 展示语法分析中可以计算的集合和表 syntax = Syntax() syntax.syntax_init('./help/syntax.json') syntax.get_follow() self.grammar_window = QDialog() ui = syntax_grammar.Ui_dialog() init_win(self.grammar_window, ui) set_grammar_tbl(ui, syntax)
def set_grammar_tbl(ui: syntax_grammar.Ui_dialog, syntax: Syntax): _set_grammar_tbl(ui.grammar_table, syntax) ui.lst_table.setRowCount(len(syntax.non_terminals)) # 设置First集和Follow集的展示表 for idx, non_term in enumerate(syntax.non_terminals): ui.lst_table.setItem(idx, 0, QTableWidgetItem(non_term)) ui.lst_table.setItem( idx, 1, QTableWidgetItem(' '.join(syntax.first[non_term]))) ui.lst_table.setItem( idx, 2, QTableWidgetItem(' '.join(syntax.follow[non_term]))) symbols = syntax.terminals + syntax.non_terminals symbols.remove(syntax.start_symbol) ui.table.setColumnCount(len(symbols)) # 设置分析表 ui.table.setHorizontalHeaderLabels(symbols) ui.table.setRowCount(len(syntax.table)) ui.table.setVerticalHeaderLabels(map(str, range(len(syntax.table)))) for idx, state in enumerate(syntax.table): for idy, symbol in enumerate(symbols): if symbol in syntax.table[state]: item = QTableWidgetItem(str(syntax.table[state][symbol])) if syntax.table[state][symbol] == 'acc': item.setForeground(QBrush(QColor(0, 0, 255))) ui.table.setItem(idx, idy, item) count, item_num, merged_res = 0, 0, syntax.get_merged_table( ) # item_nums用于项目总数目 for idx in range(len(merged_res)): item_num += len(merged_res[idx]) ui.item_tbl.setRowCount(item_num) ui.item_tbl.setVerticalHeaderLabels(map(str, range(item_num))) for idx in range(len(merged_res)): item_collection = merged_res[idx] for idy, ((production, dot_pos), look_ahead) in enumerate(item_collection.items()): non_term, symbols = syntax.rules[production] copy_symbols = symbols.copy() copy_symbols.insert(dot_pos, '·') # 项目 ui.item_tbl.setItem(count, 0, QTableWidgetItem(str(idx))) ui.item_tbl.setItem( count, 1, QTableWidgetItem(non_term + ' -> ' + ' '.join(copy_symbols))) ui.item_tbl.setItem(count, 2, QTableWidgetItem(' '.join(look_ahead))) count += 1 for table in [ui.grammar_table, ui.item_tbl, ui.lst_table, ui.table]: table.horizontalHeader().setSectionResizeMode( QHeaderView.ResizeToContents)
def get_content(self, **kwds): """Returns the content for the page""" #for debugging: #self.write = self.ctx.response.out.write tvars = {"user":self.user, "user_short":self.user.split("@")[0]} path_parts = self.ctx.request.path.split('/') uid = None if "uid" in kwds: uid = kwds["uid"] else: tvars["url"] = None if len(path_parts) > 2 and path_parts[1] == "p": uid = path_parts[2] if uid: try: data = self.ctx.db.paste.gql("WHERE uid = :1", uid)[0] tvars = self.get_display_paste(data, tvars, uid) except IndexError: tvars = self.get_empty_display(tvars) else: tvars = self.get_empty_display(tvars) if "issues" in kwds: tvars["issues"] = kwds["issues"] if "vars" in kwds: t = kwds["vars"] #t for temporary tvars = self.set_paste_data(tvars,t["title"],t["code"],t["comment"]) else: tvars["issues"] = None query = self.ctx.db.paste.all() try: query.filter('name =', self.ctx.curr_user.nickname()).order("-date") tvars["user_paste_count"] = query.count() tvars["user_pastes"] = query.fetch(10) except IndexError: tvars["user_paste_count"] = 0 tvars["user_pastes"] = None tvars["types"] = Syntax.get_type_list() tvars["types"].sort() tvars["application_url"] = self.ctx.request.application_url tvars.update(self.values) return template.render("templates/index.html", tvars)
def getJobs(self, var, index,sliced_indexes): line =self.l[index] codestr=line.codestr str_pat=var.accessStr()+r"\s*(\[[^\[\]]+\])*\s*[\+\-\*/%&\|]?\s*=(?!=)" print "CHECKING CODE:",codestr m=re.search(str_pat,codestr) if m: span=m.span() left=m.group() i=0 while re.search(r"[A-Za-z0-9_\.\*\->\s]",left[i]): i+=1 name="".join(left[:i].split()) rfl,pp=var.matchAccessPattern(name) right=codestr[span[1]:] right=right.split(';')[0].strip() #strip out ";" va_arg_handler=Va_argHandler(right,pp, rfl, index, sliced_indexes, self.l, self.TG) fopen_handler=FopenHandler(right) type_conv=TypeConvertHandler(right,pp,rfl) cond_exp=ConditionalExpressionHandler(right,pp,rfl) if Syntax.isVariable(right): if pp is None: print "Fatal Error!!!",1/0 return None else: print pp return [TaintJob(index,TaintVar(right, pp, rfl))] elif va_arg_handler.match(): return va_arg_handler.generate_jobs() elif fopen_handler.match(): taintvars= fopen_handler.generate_vars() jobs=map(lambda x : TaintJob(index, x), taintvars) return jobs elif cond_exp.match(): symbols=cond_exp.generate_candidate_vars() elif type_conv.match(): symbols=type_conv.generate_candidate_vars() else: symbols=Filter.expression2vars(right) print symbols varstrs=Filter.filterOutFuncNames(symbols,line.codestr) print "Right variables in assignment:",varstrs taintvars=set(map(lambda x : TaintVar(x, []), varstrs)) jobs=map(lambda x : TaintJob(index, x), taintvars) return jobs
from lexer import Lexer from syntax import Syntax import os, webbrowser, time, sys now = time.time() #Syntax(Lexer('code.for').run()).export_to_file('syntax.dot') if 'debug' in sys.argv: Syntax(Lexer('code.for').run(), debug=True).start() else: Syntax(Lexer('code.for').run(), debug=False).start() done = time.time() #print(done - now) # imrpime tempo levado para análise # Graphviz - Converter o arquivo .dot gerado em uma imagem PNG #os.system("dot syntax.dot -Tpng -o syntax.png") # Abrir o arquivo no Chrome #webbrowser.get("C:/Program Files (x86)/Google/Chrome/Application/chrome.exe %s").open_new("file:///D:/Google%20Drive/Computa%C3%A7%C3%A3o/Atual/_Working/Compiler/syntax.png")
def __init__(self, code): self.tabelaSimbolos = {} self.escopo = "global" self.tree = Syntax(code).ast self.programa(self.tree) self.verificacoes(self.tabelaSimbolos)
class MainWindow(QMainWindow): # noinspection PyArgumentList def __init__(self, editor): super().__init__() self.editor = editor self.setFixedSize(width, height) self.setWindowTitle('编译器@zjr') self.setWindowIcon(QIcon('help/system.ico')) self.setCentralWidget(editor) self.file_path = None # 打开的文件的目录 self.menu_bar = QMenuBar(self) # 菜单栏 self.file_menu = QMenu('文件', self.menu_bar) self.open_file_action = QAction('打开', shortcut=QKeySequence.Open, triggered=self.open_file) self.save_file_action = QAction('保存', shortcut=QKeySequence.Save, triggered=self.save_file) self.save_as_action = QAction('另存为', shortcut='ctrl+shift+s', triggered=self.save_as_file) self.lexical_menu = QMenu('词法分析', self.menu_bar) self.lexical_run_action = QAction('运行', shortcut='ctrl+f1', triggered=self.lexical_run) self.dfa_action = QAction('DFA转换表', triggered=self.dfa) self.nfa_action = QAction('NFA转换表', triggered=self.nfa) self.syntax_menu = QMenu('语法分析', self.menu_bar) self.syntax_run_action = QAction('运行', shortcut='ctrl+f2', triggered=self.syntax_run) self.grammar_action = QAction('语法信息', triggered=self.grammar) self.semantic_menu = QMenu('语义分析', self.menu_bar) self.semantic_run_action = QAction('运行', shortcut='ctrl+f3', triggered=self.semantic_run) self.semantic_info_action = QAction('语义信息', triggered=self.semantic_info) self.about_menu = QMenu('关于', self.menu_bar) self.more_action = QAction('待实现', triggered=self.more) self.init_menu_bar() self.lexical_window = None self.dfa_window = None self.nfa_window = None self.syntax_window = None self.grammar_window = None self.semantic_win = None # 语义分析结果窗口 self.semantic_info_win = None # 语义信息窗口 self.syntax = None def init_menu_bar(self): self.menu_bar.setGeometry(0, 0, width, bar_height) for menu_bar in [ self.file_menu, self.lexical_menu, self.syntax_menu, self.semantic_menu, self.about_menu ]: self.menu_bar.addMenu(menu_bar) self.file_menu.addAction(self.open_file_action) self.file_menu.addAction(self.save_file_action) self.file_menu.addAction(self.save_as_action) self.lexical_menu.addAction(self.lexical_run_action) self.lexical_menu.addAction(self.dfa_action) self.lexical_menu.addAction(self.nfa_action) self.syntax_menu.addAction(self.syntax_run_action) self.syntax_menu.addAction(self.grammar_action) self.semantic_menu.addAction(self.semantic_run_action) self.semantic_menu.addAction(self.semantic_info_action) self.about_menu.addAction(self.more_action) def open_file(self): # 打开文件 self.file_path = QFileDialog.getOpenFileName(self, '', os.getcwd() + '/input', 'C(*.c);;Txt(*.txt)')[0] if self.file_path: with open(self.file_path, 'r', encoding='utf-8') as f: self.editor.set_text(f.read()) def save_file(self): # 保存文件 def callback(res): if self.file_path: with open(self.file_path, 'w', encoding='utf-8', newline='') as f: f.write(res) else: self.save_as_file() self.editor.get_text(callback) def save_as_file(self): # 文件另存为 def callback(res): if self.file_path: with open(self.file_path, 'w', encoding='utf-8', newline='') as f: f.write(res) self.file_path = QFileDialog.getSaveFileName(self, '', os.getcwd() + '/input', 'C(*.c);;Txt(*.txt)')[0] self.editor.get_text(callback) def lexical_run(self): # 运行词法分析 def callback(res): lexical = Lexical() lexical.get_dfa('./help/dfa.json') res = lexical.lexical_run(str(res).replace( '\r\n', '\n')) # 词法分析的token序列,要将window换行符'\r\n'转换 self.lexical_window = LexicalWindow(res) self.lexical_window.show() self.editor.get_text(callback) def dfa(self): # dfa转换表 lexical = Lexical() lexical.get_dfa('./help/dfa.json') self.dfa_window = DFAWindow(lexical.get_dfa_table()) self.dfa_window.show() def nfa(self): # nfa转换表 path = QFileDialog.getOpenFileName(self, '', os.getcwd() + '/help/nfa.json', 'Json(*.json)')[0] if path: lexical = Lexical() lexical.get_nfa(path) self.nfa_window = NFAWindow(lexical.nfa, lexical.nfa2dfa()) self.nfa_window.show() def syntax_run(self): # 运行语法分析 def callback(res): lexical = Lexical() lexical.get_dfa('./help/dfa.json') # 读取DFA转换表 lexical_res = lexical.lexical_run(str(res).replace( '\r\n', '\n')) # 得到词法分析的token序列 tokens, nums_attr = [], [] if not lexical_res[0] and not lexical_res[1]: QMessageBox.warning(self, '输入无效', '请输入有效程序文本') return for idx in range(len( lexical_res[0])): # item[1]为种别码,item[3]为行号,item[2]为属性值 item = lexical_res[0][idx] if 'comment' not in item[1]: tokens.append(item[1]) nums_attr.append((item[3], item[2])) syntax = Syntax() syntax.syntax_init('./help/syntax.json') syntax_lst, syntax_err = syntax.syntax_run(tokens, nums_attr) self.syntax_window = QDialog() ui = syntax_res.Ui_Dialog() init_win(self.syntax_window, ui) set_syntax_win(ui, syntax, syntax_lst, lexical_res[1], syntax_err) self.editor.get_text(callback) def grammar(self): # 展示语法分析中可以计算的集合和表 syntax = Syntax() syntax.syntax_init('./help/syntax.json') syntax.get_follow() self.grammar_window = QDialog() ui = syntax_grammar.Ui_dialog() init_win(self.grammar_window, ui) set_grammar_tbl(ui, syntax) def semantic_run(self): # 语义分析 def callback(res): lexical = Lexical() lexical.get_dfa('./help/dfa.json') # 读取DFA转换表 lexical_res = lexical.lexical_run(str(res).replace( '\r\n', '\n')) # 得到词法分析的token序列 tokens, nums_attr = [], [] if not lexical_res[0] and not lexical_res[1]: QMessageBox.warning(self, '输入无效', '请输入有效程序文本') return for idx in range(len( lexical_res[0])): # item[1]为种别码,item[3]为行号,item[2]为属性值 item = lexical_res[0][idx] if 'comment' not in item[1]: tokens.append(item[1]) nums_attr.append((item[3], item[2])) if not self.syntax: self.syntax = Syntax() self.syntax.syntax_init('./help/semantic.json') semantic = Semantic() res_lst, res_err = semantic.semantic_run(tokens, nums_attr, self.syntax) self.semantic_win = QDialog() ui = semantic_res.Ui_Dialog() init_win(self.semantic_win, ui) set_semantic_win(ui, semantic) self.editor.get_text(callback) def semantic_info(self): # 语义相关信息 if not self.syntax: self.syntax = Syntax() self.syntax.syntax_init('./help/semantic.json') self.semantic_info_win = QDialog() ui = semantic_info.Ui_Dialog() init_win(self.semantic_info_win, ui) _set_grammar_tbl(ui.grammar_tbl, self.syntax) def more(self): # 待实现 pass
class Manager(): config = {} eval_config = {} def __init__(self, input_phrase: str): self.input_phrase = input_phrase self.root = Node() self.load_config() self.syntax = Syntax(self.config) self.parser = Parser(input_phrase, self.config) self.evaluator = Evaluator(self.eval_config, self.config) self.output = "" def update_evaluator(self, config: dict): for k in config: v = config[k] if (v == "True"): config[k] = True elif (v == "False"): config[k] = False else: from utils import fail fail("config", (k, v)) self.evaluator.eval_config = config def load_config(self): with open("config.json") as f: from json import load self.config = load(f) def parse(self): self.syntax.validate(self.input_phrase) self.parser.parse() self.syntax.set_root(self.parser.root) self.root = self.parser.root out = "Parser: ✔️" self.output += out + "\n" print(out) def validate(self): ret = self.syntax.validate(self.input_phrase) self.evaluator.eval_config = {i: False for i in self.syntax.atom_list} out = "Symbols: ✔️" self.output += out + "\n" print(out) return ret def check_syntax(self): if (self.syntax.root == None): self.parse() rez = self.syntax.check_syntax() out = "Syntax: ✔️" self.output += out + "\n" print(out) return rez def reconstruct(self, ): if (self.syntax.root == None): self.parse() return self.syntax.reconstruct() def evaluate(self, standalone=False): if (self.syntax.root == None): self.parse() if (self.evaluator.was_set == False): self.evaluator.setValues(self.syntax.root) rez = self.evaluator.evaluate(self.syntax.root) if standalone == True: self.validate() self.check_syntax() from prettytable import PrettyTable self.reconstruct() table = PrettyTable([k for k in self.evaluator.eval_config.keys()] + self.syntax.operations) table.add_row([k for k in self.evaluator.eval_config.values()] + self.evaluator.results) out = "Evaluation: ✔️" + "\n" out += str(table) self.output += out print(out) return rez def evaluate_all_interpretations(self, pr=True): if (len(self.evaluator.eval_config) == 0): self.validate() p = 2**len( self.evaluator.eval_config.keys()) - 1 # number of bits we need lbits = len(bin(p).split("b")[1]) from prettytable import PrettyTable self.reconstruct() self.check_syntax() rez = PrettyTable([k for k in self.evaluator.eval_config.keys()] + self.syntax.operations) while p != -1: bits = [int(c) for c in bin(p).split("b")[1]] while (len(bits) < lbits): bits.insert(0, 0) i = 0 l = [] for k in self.evaluator.eval_config.keys(): self.evaluator.eval_config[k] = bool(bits[i]) l.append(bool(bits[i])) i += 1 self.evaluator.was_set = False self.evaluate() rez.add_row(l + self.evaluator.results) self.evaluator.results = [] p -= 1 out = "Evaluation: ✔️" + "\n" if pr: out += str(rez) self.output += out print(out) return rez def sat_not_sat(self, pr=False): rez = self.evaluate_all_interpretations(pr) r = [] for row in rez._rows: r.append(row[-1]) # get the last item ok = False out = "Is " if (all(r)): out += "Valid" else: for i in r: if i == True: ok = True if ok: out += "Satisfiable" else: out += "Not satisfiable" print(out) self.output += out def print_tree(self): if (self.syntax.root == None): self.parse() from pptree import print_tree print_tree(self.syntax.root, childattr='childs', nameattr='info')
def get(self): from syntax import Syntax self.response.headers['Content-Type'] = 'text/css' self.response.out.write(Syntax.get_syntax_css())
def sintactic(file, outputType): syntax = Syntax(file, outputType) global syntaxTree syntaxTree = syntax.go(file)
from utils import load, gui_l from lexer import Lexer from sys import argv from syntax import Syntax import os import webbrowser Syntax(Lexer(load('code.for')).get_tokens(''), debug="all").export_to_file('syntax.dot') os.system("dot syntax.dot -Tpng -o syntax.png") # Gambiarra Fedorenta. #os.system('%SystemRoot%\\System32\\rundll32.exe "%ProgramFiles%\\Windows Photo Viewer\\PhotoViewer.dll", ImageView_Fullscreen D:\\Google Drive\\Computação\\Atual\\_Working\\Compiler\\syntax.png') webbrowser.get( "C:/Program Files (x86)/Google/Chrome/Application/chrome.exe %s" ).open_new( "file:///D:/Google%20Drive/Computa%C3%A7%C3%A3o/Atual/_Working/Compiler/syntax.png" ) ''' if __name__ == '__main__': if len(argv) > 1: if '-gui' in argv: argv.remove('-gui') gui_l(Lexer(load(argv[1])).get_tokens()) else: Lexer(load(argv[1]),verbose=True).get_tokens() else: Lexer(load('code.for'),verbose=True).get_tokens() '''
class IDE(): def Run(self, event=0): self.objMain = Main() self.consoleT.delete(1.0, END) #open file and write all the IDE code text area content self.codeFile = open(Main._codeFile, 'w') self.codeFile.write(self.codeT.get('1.0', END)) self.codeFile.close() #open file for reading self.codeFile = open(Main._codeFile, 'rb+') self.objMain.mainMethod(self.codeFile) self.codeFile.close() #write tokens to console area self.consoleT.insert(1.0, Main._fileData) #write tokens to test file self.outputFile = open(Main._outputFile, 'w') self.outputFile.write(Main._fileData) self.outputFile.close() #call syntax # self.objMain.PROG() self.objSyntax = Syntax(Main._tokens) self.objSemantic = Semantic(Main._tokens) if self.objSyntax.PROG(): print("Code is parsed") else: print("there is error in code") # if self.objSyntax.PROG(): # if self.objSemantic.PROG(): # print("Code is parsed") # else: # print("there is error in code") # else: # print("there is error in code") def newFunc(self): self.codeT.delete(1.0, END) self.consoleT.delete(1.0, END) def falto(self): pass def yourFunction(self, event): print('left') def __init__(self, root): root.iconbitmap('icon/Cpy.ico') root.title("Cpy") self.width = root.winfo_screenwidth() self.height = root.winfo_screenheight() self.w2 = self.width - 650 self.h2 = self.height - 300 root.geometry("700x650") self.menu = Menu(root) root.config(menu=self.menu) self.filemenu = Menu(self.menu) self.editmenu = Menu(self.menu) self.menu.add_cascade(label="File", menu=self.filemenu) self.filemenu.add_command(label="New....", command=self.newFunc) self.filemenu.add_command(label="Open", command=self.falto) self.filemenu.add_separator() self.filemenu.add_command(label="Exit", command=lambda: root.quit()) self.menu.add_cascade(label="Edit", menu=self.editmenu) self.editmenu.add_command(label="Copy", command=self.falto) self.editmenu.add_command(label="Cut", command=self.falto) self.editmenu.add_command(label="Paste", command=self.falto) self.menu.add_command(label="Run", command=self.Run) self.codeFrame = Frame(root, height=400, bg="red") #bind F5 key to Run method root.bind("<F5>", self.Run) self.codescroll = Scrollbar(self.codeFrame) #we able to change size of frame self.codeFrame.pack_propagate(0) self.codeT = Text(self.codeFrame, yscrollcommand=self.codescroll.set, padx=10, pady=10) self.codescroll.config(command=self.codeT.yview) self.codescroll.pack(side="right", fill="y") self.codeT.pack(side="left", fill="both", expand=True) self.codeFrame.pack(fill=X) self.consoleFrame = Frame(root, bg="green") self.consolescroll = Scrollbar(self.consoleFrame) self.consoleT = Text(self.consoleFrame, yscrollcommand=self.consolescroll.set, padx=10, pady=10) self.consolescroll.config(command=self.consoleT.yview) self.consolescroll.pack(side="right", fill="y") self.consoleT.pack(side="left", fill="both", expand=True) self.consoleFrame.pack(fill=X)
args = parser.parse_args() file_path = TEST_PATH if args.test else args.f if not os.path.isfile(file_path): raise Exception('File does not exists') def get_source_text(): with open(file_path, 'r') as source_file: return source_file.read() program = get_source_text() print( f'source program:\n--------------------\n{program}\n--------------------\n' ) print('Starting Lexical Analysis') lexer = Lexer() token_list = lexer.tokenize(program) print('Token list:') pprint(token_list) print('Starting Syntax Analyzer') syntax = Syntax(token_list) result = syntax.run() if result: print('Syntax analyzer run successfully')
def __init__(self, code=""): self.syntax = Syntax() self.code = re.sub('\s+', '', code) self.position = 0
args = parser.parse_args() if args.source: abs_file_path = os.path.join(script_dir, args.source) with open(abs_file_path) as src: source_code = src.read() elif args.cmd: source_code = args.cmd with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') lexer = Lexer(source_code) syntax = Syntax(lexer) parse_tree = syntax.parse() print(1, parse_tree.declarator) print(2, parse_tree.declarator.identifier) print(3, parse_tree.next) print(parse_tree.next.next) print(parse_tree.next.next.next) # for node in parse_tree: # print(node) Node.proccess_traversal_semantics(parse_tree) Node.symtable.show() for error in w:
def main(): print('Welcome to IPOL interpreter!') # returns lines of string containing the cleaned code file_reader = FileReader() # tabs removed, double spaces removed lines = file_reader.read_file() tokenizer = Tokenizer() # returns a 2d list containing the tokens per line of code tokens_list = tokenizer.tokenize(lines) tokens_list_copy = tokens_list.copy() # create instance of the parser with the syntax declared in Syntax class parser = Parser(syntax=Syntax().get_syntax()) # iterate each line of the list containing the tokens for line in tokens_list: recursive_parse(parser, line, callback) # create a new instance of the parser now with the syntax for recuding operations to expressions parser = Parser(syntax=Syntax().get_final_syntax()) # Parse to an expression to see if it is valid for line in parsed_list: recursive_parse(parser, line, callback1) exception_checker = ExceptionCheker() for i in range(len(final_parsed_list)): # there must be a syntax error because it cannot be converted to a single statement # check which kind of exception it is if len(final_parsed_list[i]) > 1: exception = exception_checker.check_exception( final_parsed_list[i], i) if isinstance(exception, IpolException): exceptions.append(exception) # now check if the overall structure of the code is valid # check if there are unused values # for index, token in enumerate(reduce(final_parsed_list)): # if token.type == Type.NUMBER or token.type == Type.STR: # exceptions.append(IpolException( # ExceptionType.UNUSED_VALUE_ERROR, None, index)) # print exceptions if there are any and halt the build process if len(exceptions) > 0: for exception in exceptions: exception.print() return else: # create a new instance of the parser now with the syntax of the overall ipol code parser = Parser(syntax=Syntax().get_ipol_syntax()) # finally, verify that the full code is valid reduced_final_parsed_list = reduce(final_parsed_list) # recursive_parse(parser, reduced_final_parsed_list, callback2) reduced_final_parsed_list[:] = (token for token in reduced_final_parsed_list \ if token.type != Type.EMPTY_LINE) recursive_parse(parser, reduced_final_parsed_list, callback2) for line in ipol_code_verified: for token in line: print(token.type) # check syntax in class Syntax # Type.E means accepted build_failed_message = 'Build Failed.' try: if ipol_code_verified[0][0].type == Type.E: print('Build Successful\n') else: print(build_failed_message) return except: print(build_failed_message) return # there are no exceptions # continue with code generation tokens_list_copy.pop(0) tokens_list_copy.pop(len(tokens_list_copy) - 1) generated_code = CodeGenerator().generate(tokens_list_copy) # this may return a bool data type if isinstance(generated_code, list): runnable_code = '\n'.join(generated_code) runnable_code = runnable_code.replace('&n0', '') # run the generated python code with open('ic.py', '+w') as ic: ic.write(runnable_code) print('\nBuild Complete.\nView logs on ipol_logs.txt\nView generated code on ic.py\n') exec(runnable_code, globals()) with open('ipol_logs.txt', '+w') as logs: text_to_write = 'PARSING LOGS\n\nGENERATED TOKENS\n' for line in tokens_list: for token in line: text_to_write = text_to_write + '{} -> {}'.format(token.type, token.val) + ", " text_to_write = text_to_write + '\n' text_to_write = text_to_write + '\PARSED AS...\n' for line in parsed_list: for token in line: text_to_write = text_to_write + str(token.type) + ', ' text_to_write = text_to_write + '\n' text_to_write = text_to_write + '\nGENERATED INTERMEDIATE CODE\n' + runnable_code logs.write(text_to_write) # if bool is returned, that means there was something wrong with the ipol code else: print('Build failed')
def multipleTestCases(): print("=" * 6, "multipleTestCases", "=" * 6) s = Syntax("testCases/target_test1/lex/out.lex", "none") s.go() result = evaluator(s.root.getChild(0).getChild(1)) assertThat((5 - 3) * (8 / 2), result) s = Syntax("testCases/target_test2/lex/out.lex", "none") s.go() result = evaluator(s.root.getChild(0).getChild(1)) assertThat(5 + 1 * 3 - 4 / 5 * 6 - 1 * 2 - 3 * 4, result) s = Syntax("testCases/target_test4/lex/out.lex", "none") s.go() result = evaluator(s.root.getChild(0).getChild(1)) assertThat(24 + 4 - 1 * 3 / 2 + 34 - 1, result) s = Syntax("testCases/target_test3/lex/out.lex", "none") s.go() result = evaluator(s.root.getChild(0).getChild(1)) assertThat(19 - 8 - 7 - 6 - 5 - 4 - 3 - 2 - 1 * (2 + 3 + 4 + 3 + 2), result)