Example #1
0
    def __init__(self, file_name):
        self.symbol_table = Symbol_table()
        self.scanner = Scanner(file_name, self.symbol_table)
        self.error_handler = ErrorHandler(self.scanner)
        self.semantic_stack = Stack()
        self.last_token = None
        self.memory_manager = MemoryManager(start= 1000)
        self.semantic_analyzer = SemanticAnalyzer(semantic_stack=self.semantic_stack ,
                                                  memory_manager= self.memory_manager ,
                                                  symbol_table= self.symbol_table ,
                                                  error_handler= self.error_handler)
        self.code_generator = Code_generator(symbol_table=self.symbol_table,
                                             semantic_stack=self.semantic_stack,
                                             memory_manager=self.memory_manager)

        self.stack = [0]
        with open('parse_table.csv', 'r') as f:
            self.parse_table = [{k: v for k, v in row.items()}
                                for row in csv.DictReader(f, skipinitialspace=True)]
        self.next_token = None
        self.token_history = []
Example #2
0
        lexer.input(cool_program_code)
        for token in lexer:
            pass

        if lexer.errors:
            print(lexer.errors[0])
            exit(1)

        cool_ast = parser.parse(cool_program_code)

        if parser.errors:
            print(parser.errors[0])
            exit(1)

        semantic_analyzer = SemanticAnalyzer(cool_ast)
        context, scope = semantic_analyzer.analyze()

        if semantic_analyzer.errors:
            print(semantic_analyzer.errors[0])
            exit(1)

        cool_to_cil = COOLToCILVisitor(context)
        cil_ast = cool_to_cil.visit(cool_ast, scope)

        # formatter = CIL_AST.get_formatter()
        # cil_code = formatter(cil_ast)
        # with open(f'{sys.argv[1][:-3]}.cil', 'w') as f:
        #     f.write(f'{cil_code}')

        cil_to_mips = CILToMIPSVisitor()
Example #3
0
    def analyze(self, tweet_data):

        sman = SemanticAnalyzer()
        score = sman.analyze(user_data['text'])

        return score
Example #4
0
 def __init__(self, parser: Parser):
     self.parser = parser
     self.analyzer = SemanticAnalyzer()
     self.callstack = CallStack()
Example #5
0
            code = file(sys.argv[1])
            lexer = Lexer(code)
            token = lexer.get_next_token()
            while token.type is not "EOF":
                print("<" + str(token.y) + ">" + "<" + str(token.x) + ">" + "=" + token.type + "," + token.value)
                token = lexer.get_next_token()
            print("Lexer[OK]")

        if sys.argv[2] == '-p':
            print("Lexing...")
            code = file(sys.argv[1])
            lexer = Lexer(code)
            print("Lexer[OK]")
            parser = Parser(lexer)
            tree = parser.parse()
            print(tree.toJSON())
        if sys.argv[2] == '-s':
            print("Lexing...")
            code = file(sys.argv[1])
            lexer = Lexer(code)
            print("Lexer[OK]")
            print("Parsing...")
            parser = Parser(lexer)
            tree = parser.parse()
            print("Parser[OK]")
            print("Semantic Analyzer...")
            semantic_analyzer = SemanticAnalyzer()
            semantic_analyzer.visit(tree)
            print("Semantic[OK]")
    else:
        print("шо по аргументам?")
def compile_sample_text():
    print("\nSample text is: \n")
    rule = "------------------\n"
    print(rule)
    print(sample_text)
    print(rule)

    print("Lexing ...")
    try:
        lexer = Lexer(sample_text)
        print("Lexing successful!")
    except:
        raise Exception('Error while lexing :(')

    print("Creating parser ...")
    try:
        parser = Parser(lexer)
        print("Parser creation successful!")
    except:
        raise Exception('Error while creating parser :(')

    print("Parsing ...")
    try:
        tree = parser.parse()
        print("Parsing successful!")
        print("Would you like to see the abstract syntax tree as JSON?")
        answer = input("y/n")
        if answer == "y":
            print(tree.toJSON())
    except:
        raise Exception('Error while generating abstract syntax tree :(')

    print("Creating semantic analyzer ...")
    try:
        semantic_analyzer = SemanticAnalyzer()
        print("Creation of semantic analyzer successful!")
    except:
        raise Exception('Error while creating semantic analyzer :(')

    print("Analyzing semantics ...")
    try:
        semantic_analyzer.visit(tree)
        print("Semantic analysis successful!")
    except:
        raise Exception('Error while analyzing semantics :(')

    print("Creating instruction generator ...")
    try:
        instruction_generator = InstructionGenerator()
        print("Creation of instruction generator successful!")
    except:
        raise Exception('Error while creating instruction generator :(')

    print("Generating instructions ...")
    try:
        instruction_generator.visit(tree)
        print("Instruction generation successful!")
        print("Would you like to see the instructions?")
        answer = input("y/n")
        if answer == "y":
            for instruction in instruction_generator.code.instructions[::-1]:
                print(instruction)
            print("numbers: ", instruction_generator.code.number_stack)
            print("names: ", instruction_generator.code.name_stack)
    except:
        raise Exception('Error while generating instructions')

    print("Creating instruction interpreter...")
    try:
        instruction_interpreter = InstructionInterpreter()
        print("Creation of instruction interpreter successful!")
    except:
        raise Exception('Error while creating instruciton interpreter')

    print("Running instruction interpreter ...")
    try:
        print("Output is ...")
        instruction_interpreter.run_code(instruction_generator.code)
    except:
        raise Exception('Error while interpreting instructions')
Example #7
0

def p_type(t):
    '''type : INT
            | BOOL
            | CHAR'''
    t[0] = t[1]


def p_error(t):
    print("Syntax error in input!")
    global prog
    prog = None


file = open('input.txt')
data = file.read()

parser = yacc.yacc()

# parser.parse(data, debug=True)

parser.parse(data)

sm = SemanticAnalyzer(mytree)
# if sm.semantics_check():
mytree.print_tree()
cg = CodeGenerator(sm.idents, mytree)
cg.set_instructions()
cg.write()
# input()
Example #8
0
                    gtokens = GenerateTokens(codigoFonte)
                    tokens = gtokens.initialState()
                    lexicalErrors = gtokens.getErrorTokens()

                    # Análise Sintática
                    sintaxParser = Parser(tokens)
                    sintaxResult = sintaxParser.sintaxParser()
                    sintaxErrors = 0

                    # Escrevendo no arquivo de saída
                    for item in sintaxResult:
                        out.write(str(item) + "\n")
                        if type(item) == SintaxError:
                            sintaxErrors += 1
                    out.write("\n")
                    semantic = SemanticAnalyzer(sintaxParser.getTokens())
                    semanticErrors = semantic.symbolTable()

                    for error in semanticErrors:
                        out.write(str(error) + "\n")

                    for error in lexicalErrors:
                        out.write(str(error) + "\n")

                    if len(lexicalErrors) + sintaxErrors + len(
                            semanticErrors) > 0:
                        print(
                            f"ERRO: Encontrados {len(lexicalErrors) + sintaxErrors + len(semanticErrors)} erros durante a leitura do arquivo {file}"
                        )
                    else:
                        out.write(