Example #1
0
 def __init__(self, file):
     '''
     Constructor
     '''
     self.file = file
     self.mem = list()
     self.lex = LexicalAnalyzer(file)
     self.parse(self.lex)
Example #2
0
    def compile_handler(self, event):
        self.text_area_bottom.delete('1.0', END)
        text = self.text_area_top.get('1.0', END)
        lexer = LexicalAnalyzer(text)
        try:
            (t_lexemes, t_idns, t_constants) = lexer.run()
            sAn = SyntaxAnalyzer2(t_lexemes, t_idns, t_constants,
                                  transition_table.transition_table)
            state_table = sAn.run()

            if state_table == True:
                state_table = None

            # text2="".join(tablesToString(t_lexemes,t_idns,t_constants))
            text2 = "".join(
                makeTables(t_lexemes, t_idns, t_constants, state_table))
            self.text_area_bottom.insert(1.0, text2)
        except TranslatorException as ex:
            self.text_area_bottom.insert(
                1.0, ex.__class__.__name__ + "\n" + str(ex))
    def __init__(self, filename):
        self.filename = filename
        self.p2File = open(self.filename[:-3] + ".p2", "a")
        self.ct = ""
        self.lex = LA.LexicalAnalyzer(filename.encode("utf-8"))

        self.rule_mappings = {
            1: self.program,
            2: self.define,
            3: self.more_defines,
            4: self.stmt_list,
            5: self.stmt,
            6: self.literal,
            7: self.quoted_lit,
            8: self.more_tokens,
            9: self.param_list,
            10: self.else_part,
            11: self.stmt_pair,
            12: self.stmt_pair_body,
            13: self.action,
            14: self.any_other_token
        }
Example #4
0
                if self.lexeme(31):
                    self.i += 1
                    return True
                else:
                    raise SyntaxException("Відсутня закриваюча дужка )",
                                          self.line())
            else:
                raise SyntaxException("Відсутній вираз", self.line())
        return False

    def idn(self):
        if self.lexeme(100) or self.lexeme(102):
            self.i += 1
            return True
        return False


if __name__ == "__main__":

    FILE_NAME = 'source.txt'
    file = open(FILE_NAME, 'r')
    input_text = file.read()
    file.close()
    lexer = LexicalAnalyzer(input_text)
    # (t_lexemes,t_idns,t_constants) = lexer.run()
    # text="".join(tablesToString(t_lexemes,t_idns,t_constants))
    # print(text)

    sAn = SyntaxAnalyzer(*lexer.run())
    print(sAn.run())
def Main ():
	tokens = LA.LexicalAnalyzer ()
	#LA.PrintTokens (tokens)
	SemanticTable = SA.SyntacticAnalyzer(tokens)
Example #6
0
def main():
    file_name = "test1.jl"

    lex = LexicalAnalyzer.LexicalAnalyzer("test1.jl")
Example #7
0
import LexicalRulesParser
import Thompson
import Subset
import LexicalAnalyzer

rules = LexicalRulesParser.start_parsing('inputs/lexical.txt')

nfas = Thompson.Nfa.compile(rules)

dfas = []

for nfa in nfas:
    dfas.append(Subset.Subset.nfa_to_dfa(nfa))

# for dfa in dfas:
#     print('DFA: {}'.format(dfa.name))
#     for dfaState in dfa.Dstates:
#         print('({})'.format(dfaState.name) if dfaState.is_accept else '{}'.format(dfaState.name))
#         for symb in dfaState.trans:
#             print('{} => {} | '.format(symb, dfaState.trans[symb]), end='')
#         print('\n')
#     print('\n\n')


lexical_analyzer = LexicalAnalyzer.LexicalAnalyzer(dfas, rules['priorities'])

print(lexical_analyzer.parse_code('inputs/program.txt'))