示例#1
0
def main():

    commentsFound = 0

    if (len(sys.argv) != 2):
        print('usage: python Main.py file')
        return -1

    try:
        lex = Lexer(sys.argv[1])
    except FileNotFoundError:
        print('File not found')
        return -1

    if debug:
        print('### PROGRAM WILL START ###\n')

    line = lex.inputF.lineNumber + commentsFound
    column = lex.inputF.columnNumber
    token = lex.scan()

    tokens = []
    token2add = ''
    try:
        while True:
            if debug:
                print('' + token.toString() + '\t\t', end='')
            if token.tag != 292:
                if isinstance(token.tag, int):
                    token2add = tag4Parser[token.tag]
                else:
                    token2add = token.tag
                tokens.append([token2add, line, column])
                if debug:
                    print(token2add, end='\t')
            else:
                commentsFound += 1
            if debug:
                print()

            line = lex.inputF.lineNumber + commentsFound
            column = lex.inputF.columnNumber
            token = lex.scan()
    except Exception as error:
        pass

    eofToken = ['$', lex.inputF.lineNumber, lex.inputF.columnNumber]

    if debug:
        print("### END ###")

    myParser = Parser('myParser/table.csv', 'myParser/grammar.csv', tokens,
                      eofToken)

    try:
        myParser.analyze()
        print('All good')
    except SyntaxError as error:
        print(error)
示例#2
0
 def __init__(self):
     super(CompilerWindow, self).__init__()
     self.tokens_widget = TokenWidget()
     self.parse_widget = ParseWidget()
     self.semantic_widget = SemanticWidget()
     self.dfa_table_widget = DFATable("DFA转换表")
     self.l = Lexer()
     self.p = Parser()
     self.s = Semantic()
     self.initUI()
示例#3
0
def main():

    # Read the current source code
    content = ""
    with open('test.f1', 'r') as file:
        content = file.read()

    # Call lexer with the Source Code
    lex = Lexer(content)
    # Now get the Tokens
    tokens = lex.tokenize()
示例#4
0
    def test_analyse(self):
        lexer = Lexer()

        source = "(a+b)^100.20==+100-21"
        it = (i for i in source)
        tokens = lexer.analyse(it)
        self.assertEqual(11, len(tokens))
        self.assertToken(tokens[0], "(", TokenType.BRACKET)
        self.assertToken(tokens[1], "a", TokenType.VARIABLE)
        self.assertToken(tokens[2], "+", TokenType.OPERATOR)
        self.assertToken(tokens[3], "b", TokenType.VARIABLE)
        self.assertToken(tokens[4], ")", TokenType.BRACKET)
        self.assertToken(tokens[5], "^", TokenType.OPERATOR)
        self.assertToken(tokens[6], "100.20", TokenType.FLOAT)
        self.assertToken(tokens[7], "==", TokenType.OPERATOR)
        self.assertToken(tokens[8], "+100", TokenType.INTEGER)
        self.assertToken(tokens[9], "-", TokenType.OPERATOR)
        self.assertToken(tokens[10], "21", TokenType.INTEGER)

        source = "(a+b)*1.22"
        it = (i for i in source)
        token = lexer.analyse(it)
        self.assertEqual(7, len(token))
        self.assertToken(token[0], "(", TokenType.BRACKET)

        source = """
            func foo(a,b){
                print(a+b) // two sum
                /*  multi lines comment
                    whatever written here will not be parsed
                */
            }
        """
        it = (i for i in source)
        token = lexer.analyse(it)
        self.assertEqual(15, len(token))
        self.assertToken(token[0], "func", TokenType.KEYWORD)
        print(token)
示例#5
0
def main():
    filepath = input("Input code path:\n>")
    with open(filepath, 'r') as f:
        text = ""
        for line in f:
            text += line.strip()
        lexer = Lexer(text)
        parser = Parser(lexer)
        viz = ASTVisualizer(parser)
        content = viz.gendot()
        print(content)

    print(text)

    with open("./AST.dot", 'w') as f:
        f.write(content)
示例#6
0
 def __init__(self):
     super(MainWindow, self).__init__()
     self.tokens_widget = MyWidget()
     self.dfa_table_widget = MyTable("DFA转换表")
     self.l = Lexer()
     self.initUI()
示例#7
0
class MainWindow(QtGui.QMainWindow):
    def __init__(self):
        super(MainWindow, self).__init__()
        self.tokens_widget = MyWidget()
        self.dfa_table_widget = MyTable("DFA转换表")
        self.l = Lexer()
        self.initUI()

    def initUI(self):
        self.resize(750, 800)
        self.setWindowTitle("myCompiler")
        self.setWindowIcon(QtGui.QIcon("image/icon.jpg"))

        # exit toolbar
        self.exit = QtGui.QAction('Exit', self)
        self.exit.setShortcut('Ctrl+Q')
        self.exit.setStatusTip("Exit application")
        self.connect(self.exit, QtCore.SIGNAL('triggered()'),
                     QtCore.SLOT("close()"))

        # open file toolbar
        self.openfile = QtGui.QAction('打开文件', self)
        self.openfile.setShortcut('Ctrl+O')
        self.openfile.setStatusTip("Open file")
        self.connect(self.openfile, QtCore.SIGNAL('triggered()'),
                     self.open_file)

        # lexer toolbar
        self.lexer = QtGui.QAction('词法分析', self)
        self.lexer.setShortcut('Ctrl+L')
        self.lexer.setStatusTip("lexer")
        self.connect(self.lexer, QtCore.SIGNAL('triggered()'), self.lexer_ana)

        # lexer toolbar
        self.dfa_table = QtGui.QAction('词法规则', self)
        self.dfa_table.setShortcut('Ctrl+D')
        self.connect(self.dfa_table, QtCore.SIGNAL('triggered()'),
                     self.show_dfa_table)

        # paser toolbar
        self.paser = QtGui.QAction('语法分析', self)
        self.paser.setShortcut('Ctrl+P')
        self.paser.setStatusTip("paser")
        self.connect(self.paser, QtCore.SIGNAL('triggered()'), self.paser_ana)

        # add toolbar
        self.toolbar1 = self.addToolBar("打开文件")
        self.toolbar2 = self.addToolBar("词法分析")
        self.toolbar3 = self.addToolBar("词法规则")
        self.toolbar4 = self.addToolBar("语法分析")
        self.toolbar1.addAction(self.openfile)
        self.toolbar2.addAction(self.lexer)
        self.toolbar3.addAction(self.dfa_table)
        self.toolbar4.addAction(self.paser)

        # add status bar
        self.statusBar().showMessage('Ready')

        # add menubar
        menubar = self.menuBar()
        file = menubar.addMenu("&File")
        file.addAction(self.exit)

        # 中央文本框
        self.textEdit = QtGui.QTextEdit()
        self.setCentralWidget(self.textEdit)
        self.setFocus()

    def open_file(self):
        # 文件选择器
        filename = QtGui.QFileDialog.getOpenFileName(self, "Open file", '.')
        print("open" + filename)
        fname = open(filename, 'r')
        data = fname.read()
        self.textEdit.setText(data)

    def lexer_ana(self):
        # 获取文本框内容
        data = self.textEdit.toPlainText()

        tokens = self.l.lex(data)
        tokens_str = '行数\ttoken\n'
        for t in tokens:
            tokens_str += t + "\n"
        self.tokens_widget.textEdit.setText(tokens_str)
        self.tokens_widget.show()

    def show_dfa_table(self):
        states, chars, t_content = self.l.dfa.get_table()
        # states = ['1', '2']
        # chars = ['a', 'b', 'c']
        # t_content = [['2', '2', '2'], ['1', '1', '1']]
        self.dfa_table_widget.table(chars, states, t_content)
        self.dfa_table_widget.show()

    def paser_ana(self):
        print("paser")
        pass
示例#8
0
from lexer.Lexer import Lexer
from parserr.Yacc import Yacc

# lexer = lexer().build()
f = open("samples/lexer/in1.txt", "r")
# lexer.input(f.read())
# fo = open("foo.txt", "w")
#
# fo.write("RegEx    Token    AttVal\n")
# while True:
#     tok = lexer.token()
#     if not tok:
#         break  # No more input
#     index = "-"
#     if tok.type == 'ID':
#         index = lexer.sTable.index(tok.value)
#     if tok.type == 'Num':
#         index = tok.value
#     fo.write(str(tok.value) + "    " + str(tok.type) + "    " +  str(index) + "\n")
#     print(tok)

y = Yacc()
lexer = Lexer()
y.build().parse(f.read(), lexer.build(), True)

# fo.close()
示例#9
0
class CompilerWindow(QtGui.QMainWindow):
    def __init__(self):
        super(CompilerWindow, self).__init__()
        self.tokens_widget = TokenWidget()
        self.parse_widget = ParseWidget()
        self.semantic_widget = SemanticWidget()
        self.dfa_table_widget = DFATable("DFA转换表")
        self.l = Lexer()
        self.p = Parser()
        self.s = Semantic()
        self.initUI()

    def initUI(self):
        self.resize(750, 800)
        self.setWindowTitle("语义分析")

        # exit toolbar
        self.exit = QtGui.QAction('Exit', self)
        self.exit.setShortcut('Ctrl+Q')
        self.exit.setStatusTip("Exit application")
        self.connect(self.exit, QtCore.SIGNAL('triggered()'), QtCore.SLOT("close()"))

        # open file toolbar
        self.openfile = QtGui.QAction('打开文件', self)
        self.openfile.setShortcut('Ctrl+O')
        self.openfile.setStatusTip("Open file")
        self.connect(self.openfile, QtCore.SIGNAL('triggered()'), self.open_file)

        # lexer toolbar
        self.lexer = QtGui.QAction('词法分析', self)
        self.lexer.setShortcut('Ctrl+L')
        self.lexer.setStatusTip("lexer")
        self.connect(self.lexer, QtCore.SIGNAL('triggered()'), self.lexer_ana)

        # DFA toolbar
        self.dfa_table = QtGui.QAction('DFA转换表', self)
        self.dfa_table.setShortcut('Ctrl+D')
        self.connect(self.dfa_table, QtCore.SIGNAL('triggered()'), self.show_dfa_table)

        #parser toolbar
        self.parser = QtGui.QAction('语法分析', self)
        self.parser.setShortcut('Ctrl+P')
        self.parser.setStatusTip("parser")
        self.connect(self.parser, QtCore.SIGNAL('triggered()'), self.parser_ana)

        #semantic toolbar
        self.semantic = QtGui.QAction('语义分析', self)
        self.semantic.setShortcut('Ctrl+M')
        self.semantic.setStatusTip("semantic")
        self.connect(self.semantic, QtCore.SIGNAL('triggered()'), self.semantic_ana)

        #clear toolbar
        self.clear = QtGui.QAction('清空', self)
        self.clear.setShortcut('Ctrl+A')
        self.clear.setStatusTip("clear")
        self.connect(self.clear, QtCore.SIGNAL('triggered()'), self.clear_data)

        # add toolbar
        self.toolbar1 = self.addToolBar("打开文件")
        self.toolbar3 = self.addToolBar("词法分析")
        self.toolbar4 = self.addToolBar("DFA转换表")
        self.toolbar5 = self.addToolBar("语法分析")
        self.toolbar6 = self.addToolBar("语义分析")
        self.toolbar7 = self.addToolBar("清空")
        self.toolbar1.addAction(self.openfile)
        self.toolbar3.addAction(self.lexer)
        self.toolbar4.addAction(self.dfa_table)
        self.toolbar5.addAction(self.parser)
        self.toolbar6.addAction(self.semantic)
        self.toolbar7.addAction(self.clear)

        # add status bar
        self.statusBar().showMessage('Ready')

        # 中央文本框
        self.textEdit = QtGui.QTextEdit()
        #默认加载文件
        fname = open('声明语句.txt', 'r')
        data = fname.read()
        self.textEdit.setText(data)
        self.setCentralWidget(self.textEdit)
        self.setFocus()


    def open_file(self):
        # 文件选择器
        filename = QtGui.QFileDialog.getOpenFileName(self, "Open file", '.')
        if not filename == '':
            print("open" + filename)
            fname = open(filename, 'r')
            data = fname.read()
            self.textEdit.setText(data)

    def lexer_ana(self):
        # 获取文本框内容
        data = self.textEdit.toPlainText()

        tokens = self.l.lex(data)
        tokens_str = '行数\ttoken\n'
        for t in tokens:
            tokens_str += t + "\n"
        self.tokens_widget.textEdit.setText(tokens_str)
        self.tokens_widget.show()

    def parser_ana(self):
        data = self.textEdit.toPlainText()
        tokens = self.l.lex(data)
        parse_result = self.p.analyze(tokens)
        self.parse_widget.setWindowTitle("语法分析树")
        self.parse_widget.textEdit.setText(parse_result)
        self.parse_widget.show()

    def semantic_ana(self):
        data = self.textEdit.toPlainText()
        tokens = self.l.lex(data)
        parse_result = self.p.analyze(tokens)
        semantic_result = self.s.analyze(parse_result)
        self.semantic_widget.setWindowTitle("语义分析")
        self.semantic_widget.textEdit.setText(semantic_result)
        self.semantic_widget.show()

    def show_dfa_table(self):
        states, chars, t_content = self.l.dfa.get_dfa_table()
        self.dfa_table_widget.table(chars, states, t_content)
        self.dfa_table_widget.show()

    def clear_data(self):
        self.textEdit.setText("")
示例#10
0
                self.tokenConsumer.revertToCheckpoint()
                self.FuncCall()
                self.Expr2()
            else:
                self.tokenConsumer.revertToCheckpoint()
                self.Loc()
                self.Expr2()


if __name__ == "__main__":
    with open('../sample1.decaf', 'r') as f:
        text1 = f.readlines()
        text1 = [l.strip() for l in text1]
    with open('../sample2.decaf', 'r') as f:
        text2 = f.readlines()
        text2 = [l.strip() for l in text2]

    # Test the first file
    lexer = Lexer(text1)
    lexer.evaluate()

    analyser = SyntacticAnalyser(lexer.tokens)
    analyser.analyse()

    #Test the second file
    lexer = Lexer(text2)
    lexer.evaluate()

    analyser = SyntacticAnalyser(lexer.tokens)
    analyser.analyse()
示例#11
0
文件: main.py 项目: Hbaf/Compilers
from lexer.Lexer import Lexer
from argparse import ArgumentParser

parser = ArgumentParser(
    description='Simply returns in program lexeme usage table')
parser.add_argument('-f',
                    type=str,
                    dest='file',
                    required=True,
                    help='File to read program from')
args = parser.parse_args()
file = args.file
try:
    fr = open(file, 'r')
    text = fr.read()
    lexer = Lexer(text)
    fw = open(file + '_out', 'w')
    for j in lexer.parse():
        fw.write('{}\n'.format(j.to_string()))
except FileNotFoundError:
    print("There is no such file : %s" % format(file))