Example #1
0
 def callback(res):
     lexical = Lexical()
     lexical.get_dfa('./help/dfa.json')
     res = lexical.lexical_run(str(res).replace(
         '\r\n', '\n'))  # 词法分析的token序列,要将window换行符'\r\n'转换
     self.lexical_window = LexicalWindow(res)
     self.lexical_window.show()
Example #2
0
 def callback(res):
     lexical = Lexical()
     lexical.get_dfa('./help/dfa.json')  # 读取DFA转换表
     lexical_res = lexical.lexical_run(str(res).replace(
         '\r\n', '\n'))  # 得到词法分析的token序列
     tokens, nums_attr = [], []
     if not lexical_res[0] and not lexical_res[1]:
         QMessageBox.warning(self, '输入无效', '请输入有效程序文本')
         return
     for idx in range(len(
             lexical_res[0])):  # item[1]为种别码,item[3]为行号,item[2]为属性值
         item = lexical_res[0][idx]
         if 'comment' not in item[1]:
             tokens.append(item[1])
             nums_attr.append((item[3], item[2]))
     if not self.syntax:
         self.syntax = Syntax()
         self.syntax.syntax_init('./help/semantic.json')
     semantic = Semantic()
     res_lst, res_err = semantic.semantic_run(tokens, nums_attr,
                                              self.syntax)
     self.semantic_win = QDialog()
     ui = semantic_res.Ui_Dialog()
     init_win(self.semantic_win, ui)
     set_semantic_win(ui, semantic)
Example #3
0
 def nfa(self):  # nfa转换表
     path = QFileDialog.getOpenFileName(self, '',
                                        os.getcwd() + '/help/nfa.json',
                                        'Json(*.json)')[0]
     if path:
         lexical = Lexical()
         lexical.get_nfa(path)
         self.nfa_window = NFAWindow(lexical.nfa, lexical.nfa2dfa())
         self.nfa_window.show()
Example #4
0
 def validate(self):
     with open(self.upload_folder + self.filename, "r") as file:
         code = file.read()
         cr = Lexical(code)
         result = cr.run()
         return jsonify({
             'code': code,
             'lexical_table': result,
             'syntatic_result': []
         })
Example #5
0
def validate():
    assert request.path == '/validate'
    assert request.method == 'POST'

    cr = Lexical(request.form['code'])
    lexical_result = cr.run()

    s = Syntatic(request.form['code'], cr.getTokens())
    syntatic_result = s.run()

    return jsonify({
        'code': request.form['code'],
        'lexical_table': lexical_result,
        'syntatic_result': syntatic_result
    })
Example #6
0
    def test(self):
        NP = Lexical("NP", {"AGR": "?x"})
        NP.set_variable_code("?x", -1L)

        lexicon = self.make_lexicon()
        grammar = self.make_grammar()
        sentence = ["the", "dog"]

        self.parse(NP, lexicon, grammar, sentence)
Example #7
0
 def __init__(self):
     QMainWindow.__init__(self)
     self.ui = Ui_MainWindow()
     self.ui.setupUi(self)
     self.ui.editor = QCodeEditor(self.ui.centralwidget)
     self.ui.editor.setGeometry(QtCore.QRect(10, 20, 661, 751))
     text = '''open a file \nor type something...'''
     self.ui.editor.setPlainText(text)
     self.ui.editor.setObjectName("editor")
     self.ui.open_btn.triggered.connect(self.open_file)
     self.ui.actionread_dfa.triggered.connect(self.read_dfa)
     self.ui.actionclear.triggered.connect(self.clear_dfa)
     self.lexical_window = Lexical()
     self.dfa_window = None
     self.grammer_show = None
     self.grammar_analyzer = GrammarAnalyze()
     self.semantic_analyzer_page = Semantic()
     self.semantic_analyzer = None
     self.SDTShow = None
     self.setWindowTitle("Analyzer")
     self.table = None
def lexical_analysis():
    global content
    # 新建词法分析器
    lexical = Lexical()
    # 载入源代码
    # lexical.load_source(open('code.txt', encoding="utf-8").read())
    lexical.load_source(content)
    # 执行词法分析
    lexical_success = lexical.execute()
    # 将结果保存在tokens文件中
    with open('lexical_result.txt', 'w+') as f:
        lexical_result = lexical.get_result()
        for i in lexical_result:
            f.write(i.type + '\t' + i.str + '\t' + str(i.line) + '\n')
    # 打印结果  -> 结果全部保存在BUFFER字符串中
    buffer = []
    print('词法分析是否成功:\t', lexical_success)
    buffer.append('词法分析是否成功: ' + str(lexical_success) + '\n')

    if lexical._flag:
        with open('lexical_error.txt') as fp:
            for line in fp.readlines():
                buffer.append(line)
        #print('错误原因:\t', lexical.get_error().info,'\t错误数据为:\t',lexical.get_error().values,
        #      '\t错误行数为:\t',lexical.get_error().line)
    else:
        #lexical_result = lexical.get_result()
        print('词法分析结果:')
        buffer.append('词法分析结果: ')
        # print("type content line")
        for i in lexical_result:
            print(i.type, i.str, i.line)
            buffer.append('(%s, %s, %s)' % (i.type, i.str, i.line))


    return buffer
Example #9
0
def main():
    # 新建词法分析器
    lexical = Lexical()

    # 载入源代码
    lexical.load_source(open('3.txt', encoding="utf-8").read())

    # 执行词法分析
    lexical_success = lexical.execute()

    # 将结果保存在tokens文件中
    with open('lexical_result.txt', 'w+') as f:
        lexical_result = lexical.get_result()
        for i in lexical_result:
            f.write(i.type + '\t' + i.str + '\t' + str(i.line) + '\n')

    # 打印结果
    print('词法分析是否成功:\t', lexical_success)
    if lexical_success:
        # lexical_result = lexical.get_result()
        print()
        print('词法分析结果:')
        for i in lexical_result:
            print(i.type, i.str, i.line)
        print()
    else:
        print('错误原因:\t', lexical.get_error().info)

    # 语法分析
    parser = Parser()

    parser.open('grammar1211.txt')  # 语法规则保存的地方

    parser.make_first()
    parser.make_follow()
    parser.make_pretable()
    table = list()
    if not parser.iserror:
        _flag = True
        for key1 in parser.pretable.keys():
            if _flag:
                table.append(list(parser.pretable[key1]))

                _flag = False
            table.append(list(parser.pretable[key1].values()))

    else:
        print("\33[不是LL1文法")

    # 保存ll(1)预测表
    if os.path.exists('LL(1).txt'):
        os.remove('LL(1).txt')
    with open("LL(1).txt", 'a+') as f:
        f.write("\t")
        for i in list(parser.pretable.keys()):
            f.write(str(i) + "\t")
        f.write("\n")
        for i in range(0, len(table[0])):
            for array in table:
                if str(array[i]) == "error":
                    f.write("\t")
                else:
                    f.write(str(array[i]) + "\t")
            f.write("\n")

    # 语义分析
    map_list = list()
    input_str = ''
    map_line = list()
    with open('lexical_result.txt') as f:
        for line in f.readlines():
            line = line[:-1].split('\t')
            if line[0]:
                input_str += line[0]
                map_list.append(line[1])
                map_line.append(line[2])

    p = Pro()

    # input str 标识符 map-lsit 字符 map-line 行数
    p.createSymbolList(input_str, map_list, map_line)
    # 语义分析得到chart 符号表 temp_list 临时变量表 seq_list四元式表
    p.analysis('LL(1).txt')  # 生成的的ll1规则表LL(1)
Example #10
0
from lexical import Lexical
from sintactico import Sintactic
import sys
import re

if __name__ == "__main__":
    filetabla = open("output/tablafile.txt", "w+")
    tokensfilear = open("output/tokens.txt", "w+")
    errorsfile = open("output/errors.txt", "w+")

    f = open(sys.argv[1], 'r')
    datos = f.read()
    f.close()

    JL = Lexical()
    JP = Sintactic()

    tokensFile, simbolTable = JL.tokenizer(datos)
    errors, names = JP.compile(datos)
    for t in simbolTable:
        if re.match(r'ID', t['type']):
            try:
                t['vartype'] = names[t['value']]['vartype']

            except Exception as err:
                print(err)

    # metiendo datos a las diferentes tablas
    filetabla.write("   TABLA DE SIMBOLOS \n \n")
    filetabla.write("  LEXEMA    TOKEN       TIPO \n \n")
Example #11
0
    def make_lexicon(self):
        lexicon = Lexicon()

        the = Lexical("ART", {"ROOT": "?the", "AGR": "?v"})
        the.set_variable("?the", ["the"])
        the.set_variable("?v", ["3s", "3p"])
        lexicon.add_word("the", [the])

        dog = Lexical("N", {"ROOT": "?dog1", "AGR": "?3s"})
        dog.set_variable("?3s", ["3s"])
        dog.set_variable("?dog1", ["DOG1"])
        lexicon.add_word("dog", [dog])

        return lexicon
Example #12
0
 def dfa(self):  # dfa转换表
     lexical = Lexical()
     lexical.get_dfa('./help/dfa.json')
     self.dfa_window = DFAWindow(lexical.get_dfa_table())
     self.dfa_window.show()
Example #13
0
class MyAnalyzer(QMainWindow):
    def __init__(self):
        QMainWindow.__init__(self)
        self.ui = Ui_MainWindow()
        self.ui.setupUi(self)
        self.ui.editor = QCodeEditor(self.ui.centralwidget)
        self.ui.editor.setGeometry(QtCore.QRect(10, 20, 661, 751))
        text = '''open a file \nor type something...'''
        self.ui.editor.setPlainText(text)
        self.ui.editor.setObjectName("editor")
        self.ui.open_btn.triggered.connect(self.open_file)
        self.ui.actionread_dfa.triggered.connect(self.read_dfa)
        self.ui.actionclear.triggered.connect(self.clear_dfa)
        self.lexical_window = Lexical()
        self.dfa_window = None
        self.grammer_show = None
        self.grammar_analyzer = GrammarAnalyze()
        self.semantic_analyzer_page = Semantic()
        self.semantic_analyzer = None
        self.SDTShow = None
        self.setWindowTitle("Analyzer")
        self.table = None

    def clear_editor(self):
        self.ui.editor.clear()

    def clear_dfa(self):
        self.table = None

    def read_dfa(self):
        dfa_file = QFileDialog.getOpenFileName(self, 'choose DFA',
                                               'C:\\Users\\',
                                               'Txt files(*.txt)')[0]
        head = [
            'letter', '_', 'digit', '0', '1-9', 'option', '/', '"', 'bound',
            '.', 'E', '+', '-', '1-7', 'x', '0-7', 'a-f', '*', '=', 'other',
            'legal'
        ]
        table = {}
        file = None
        if not dfa_file: return
        try:
            file = open(dfa_file)
            trans_table, i = file.readlines(), 0
            for line in trans_table:
                line = [int(x) for x in line.split()]
                cur_dict = {}
                for idx in range(len(line)):
                    if line[idx] != -1:
                        cur_dict[head[idx]] = line[idx]
                table[i] = cur_dict
                i += 1
            self.table = table
        except:
            self.ui.editor.clear()
            self.ui.editor.setPlainText("get DFA transform table failed...")
        finally:
            file.close()

    def open_file(self):
        file_name = \
            QFileDialog.getOpenFileName(self, 'choose file',
                                        'C:\\Users\\omnitrix\\PycharmProjects\\CS_Lab1\\test files',
                                        'Txt files(*.txt)')[0]
        file = None
        try:
            file = open(file_name)
            texts = file.read()
            self.ui.editor.clear()
            self.ui.editor.setPlainText(texts)
        except Exception as e:
            self.ui.editor.clear()
            self.ui.editor.setPlainText(str(e))
        finally:
            if file:
                file.close()

    def lexical(self, all_res=True):
        dfa = Cf_Dfa(self.table)
        texts = self.ui.editor.toPlainText().split("\n")
        results = []
        errors = []
        res_lines = []
        info_lines = []
        i = 1
        for line in texts:
            dfa.analyze(line)
            results.extend(dfa.results)
            res_lines.extend([i] * len(dfa.results))
            errors.extend(dfa.error)
            info_lines.extend([i] * len(dfa.error))
            i += 1
        if not all_res:
            return results, res_lines
        return results, errors, res_lines, info_lines

    def cf_analyze(self):
        results, errors, res_lines, info_lines = self.lexical()
        res_num = len(results)
        info_num = len(errors)
        token_table = self.lexical_window.ui.token_table
        info_table = self.lexical_window.ui.infos_table
        token_table.horizontalHeader().setSectionResizeMode(
            QHeaderView.Stretch)
        info_table.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
        token_table.setRowCount(res_num)
        info_table.setRowCount(info_num)
        for i in range(res_num):
            for j in range(2):
                token_table.setItem(i, j, QTableWidgetItem(str(results[i][j])))
            token_table.setItem(i, 2, QTableWidgetItem(str(res_lines[i])))
        for i in range(info_num):
            for j in range(2):
                info_table.setItem(i, j, QTableWidgetItem(str(errors[i][j])))
            info_table.setItem(i, 2, QTableWidgetItem(str(info_lines[i])))
        self.lexical_window.show()
        return results

    def dfa_trans(self):
        if not self.dfa_window:
            self.dfa_window = Dfa_TransTable()
            trans = Cf_Dfa().move_func
            table = self.dfa_window.ui.trans_table
            table.setRowCount(21)
            for i in range(20):
                for j in range(21):
                    if i in trans:
                        idx = table.horizontalHeaderItem(j).text()
                        if idx in trans[i]:
                            table.setItem(i, j,
                                          QTableWidgetItem(str(trans[i][idx])))
        self.dfa_window.show()

    @staticmethod
    def grammar_init(analyzer):
        analyzer = analyzer()
        analyzer.set_grammar()
        analyzer.generate()
        return analyzer

    def grammar(self):
        if not self.grammer_show:
            self.grammer_show = Grammar_Show()
            analyzer = self.grammar_init(GrammarAnalyzer)
            forcast_table = self.grammer_show.ui.Forcast_Table
            forcast_table.setColumnCount(len(analyzer.inputs))
            forcast_table.setRowCount(len(analyzer.Vn))
            forcast_table.setHorizontalHeaderLabels(analyzer.inputs)
            forcast_table.setVerticalHeaderLabels(analyzer.Vn)
            ff_table = self.grammer_show.ui.FF_Table
            select_table = self.grammer_show.ui.Select_Table
            ff_table.horizontalHeader().setSectionResizeMode(
                QHeaderView.Stretch)
            select_table.horizontalHeader().setSectionResizeMode(
                QHeaderView.Stretch)
            ff_table.setRowCount(len(analyzer.Vn))
            ff_table.setVerticalHeaderLabels(analyzer.Vn)
            for i in range(len(analyzer.Vn)):
                for j in range(len(analyzer.inputs)):
                    symbol = forcast_table.horizontalHeaderItem(j).text()
                    vn = forcast_table.verticalHeaderItem(i).text()
                    if symbol in analyzer.forcast_table[vn]:
                        item = analyzer.forcast_table[vn][symbol]
                        production = str(vn + '->' +
                                         item) if item != "synch" else "synch"
                        forcast_table.setItem(i, j,
                                              QTableWidgetItem(production))
                vn = ff_table.verticalHeaderItem(i).text()
                ff_table.setItem(i, 0,
                                 QTableWidgetItem(str(analyzer.first[vn])))
                ff_table.setItem(i, 1,
                                 QTableWidgetItem(str(analyzer.follow[vn])))
            select_table.setRowCount(len(analyzer.select))
            i = 0
            for key in analyzer.select:
                select_table.setItem(i, 0, QTableWidgetItem(key))
                select_table.setItem(
                    i, 1, QTableWidgetItem(str(analyzer.select[key])))
                i += 1
        self.grammer_show.show()

    def grammar_analyze(self):
        analyzer = self.grammar_init(GrammarAnalyzer)
        tokens, lines = self.lexical(False)
        results, errors, props = analyzer.analyze(tokens, lines)
        self.show_grammar(results, errors, props, analyzer)

    def semantic_analyze(self):
        if not self.semantic_analyzer:
            self.semantic_analyzer = self.grammar_init(SemanticAnalyzer)
        else:
            self.semantic_analyzer.semantic_init()
        tokens, lines = self.lexical(False)
        self.semantic_analyzer.analyze(tokens, lines)
        self.show_semantic()

    def show_semantic(self):
        self.semantic_analyzer_page = Semantic()
        info_table = self.semantic_analyzer_page.ui.infos_table
        symbol_table = self.semantic_analyzer_page.ui.symbol_table
        results_table = self.semantic_analyzer_page.ui.result_table
        results_table.horizontalHeader().setSectionResizeMode(
            QHeaderView.Stretch)
        results = self.semantic_analyzer.addr_code
        symbols = self.semantic_analyzer.symbol_table
        errors = self.semantic_analyzer.errors
        results_table.setRowCount(len(results))
        symbol_table.setRowCount(len(symbols))
        info_table.setRowCount(len(errors))
        for i in range(len(results)):
            results_table.setItem(i, 0, QTableWidgetItem(str(i)))
            results_table.setItem(i, 1, QTableWidgetItem(results[i]["code"]))
            results_table.setItem(i, 2, QTableWidgetItem(results[i]["quater"]))
        i = 0
        for name in symbols:
            symbol_table.setItem(i, 0, QTableWidgetItem(str(name)))
            symbol_table.setItem(i, 1,
                                 QTableWidgetItem(str(symbols[name]['type'])))
            symbol_table.setItem(
                i, 2, QTableWidgetItem(str(symbols[name]['offset'])))
            i += 1
        i = 0
        for item in errors:
            info_table.setItem(i, 0, QTableWidgetItem(str(item[0])))
            info_table.setItem(i, 1, QTableWidgetItem(str(item[1])))
            info_table.setItem(i, 2, QTableWidgetItem(str(item[2])))
            i += 1
        self.semantic_analyzer_page.show()

    def semantic(self):
        if not self.SDTShow:
            self.SDTShow = SDTShow()
            file = open("./test files/SDT show.txt", "r", encoding="UTF-8")
            lines = file.readlines()
            table = self.SDTShow.ui.SDT_Table
            table.setRowCount(len(lines))
            table.setColumnCount(1)
            table.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
            for i in range(len(lines)):
                table.setItem(i, 0, QTableWidgetItem(lines[i]))
            file.close()
        self.SDTShow.show()

    def show_grammar(self, results, errors, props, grammar):
        analyzer = self.grammar_analyzer
        tree = analyzer.ui.treeWidget
        tree.clear()
        tree.setColumnCount(1)
        root = QTreeWidgetItem(tree)
        root.setText(0, "Program")
        stack = [root]
        for production in results:
            left, right = production.split('->')
            while left != stack[-1].text(0):
                stack.pop()
            father = stack.pop()
            right = right.split()
            temp = []
            for item in right:
                child = QTreeWidgetItem(father)
                child.setText(0, item)
                if item in grammar.Vn:
                    temp.append(child)
            temp.reverse()
            stack.extend(temp)
        item = QtWidgets.QTreeWidgetItemIterator(tree)
        while item.value():
            node = item.value()
            if node.text(0) == "id" or node.text(0) == "digit":
                leaf = QTreeWidgetItem(item.value())
                leaf.setText(0, props.pop(0))
            item = item.__iadd__(1)
        tree.expandAll()
        table = analyzer.ui.infos_table
        table.clear()
        table.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
        table.verticalHeader().setVisible(False)
        table.setRowCount(len(errors))
        for i in range(len(errors)):
            for j in range(3):
                table.setItem(i, j, QTableWidgetItem(str(errors[i][j])))
        analyzer.show()
Example #14
0
from lexical import Lexical
from semantic import Pro
from syntax import Parser
from vm import VM
import os

if __name__ == '__main__':
    # 新建词法分析器
    lexical = Lexical()

    # 载入源代码
    lexical.load_source(open('fib.txt', encoding="utf-8").read())

    # 执行词法分析
    lexical_success = lexical.execute()

    # 将结果保存在tokens文件中
    with open('lexical_result.txt', 'w+') as f:
        lexical_result = lexical.get_result()
        for i in lexical_result:
            f.write(i.type + '\t' + i.str + '\t' + str(i.line) + '\n')  #
    # 打印结果
    print('词法分析是否成功:\t', lexical_success)
    if lexical_success:
        # lexical_result = lexical.get_result()
        print()
        print('词法分析结果:')
        for i in lexical_result:
            print(i.type, i.str, i.line)
        print()
    else: