Пример #1
0
def run_pipeline(text):
    ret_text = '\n' + '=================== TEXT ======================'
    ret_text += '\n' + text
    ret_text += '\n' + '================== TOKENS ====================='
    tokens = tokenize_text(text)
    ret_text += '\n' + pprint_tokens(tokens)
    ret_text += '\n' + '=================== PARSE ====================='
    ret_text += '\n'
    parser = Serializer.load(os.getcwd() + '/parser')
    #parser = LR1Parser(G)
    ret_parser = parser([t.token_type for t in tokens])
    parse, operations = ret_parser
    if parse is None:
        return ret_text + "\nParsing Error at " + operations
    ret_text += '\n'.join(repr(x) for x in parse)
    ret_text += '\n' + '==================== AST ======================'
    ast = evaluate_reverse_parse(parse, operations, tokens)
    formatter = FormatVisitor()
    tree = formatter.visit(ast)
    ret_text += '\n' + tree

    ret_text += '\n' + '============== COLLECTING TYPES ==============='
    errors = []
    collector = TypeCollector(errors)
    collector.visit(ast)
    context = collector.context
    ret_text += '\n' + 'Errors:'
    for error in errors:
        ret_text += '\n' + error
    ret_text += '\n' + 'Context:'
    ret_text += '\n' + str(context)
    ret_text += '\n' + '=============== BUILDING TYPES ================'
    builder = TypeBuilder(context, errors)
    builder.visit(ast)
    ret_text += '\n' + 'Errors: ['
    for error in errors:
        ret_text += '\n' + '\t'
        ret_text += '\n' + error
    ret_text += '\n' + ']'
    ret_text += '\n' + 'Context:'
    ret_text += '\n' + str(context)

    ret_text += '\n' + '=============== CHECKING TYPES ================'
    checker = TypeChecker(context, errors)
    try:
        scope, errors = checker.visit(ast)
        while(checker.changed):
            scope, errors = checker.visit(ast)
    except SemanticError as e:
        errors = [e.text]
    ret_text += '\n' + 'Errors: ['
    for error in errors:
        ret_text += '\n' + '\t'
        ret_text += '\n' + error
    ret_text += '\n' + ']'
    if len(errors) == 0:
        checker.printer(ast)

    return ret_text
Пример #2
0
def run_pipeline(G, text):
    print('=================== TEXT ======================')
    print(text)
    print('================== TOKENS =====================')
    tokens = list(tokenize_text(text))
    pprint_tokens(tokens)
    print('=================== PARSE =====================')
    parser = LR1Parser(G)
    parse, operations = parser([t.token_type for t in tokens], get_shift_reduce=True)
    print('\n'.join(repr(x) for x in parse))
    print('==================== AST ======================')
    ast = evaluate_reverse_parse(parse, operations, tokens)
    formatter = FormatVisitor()
    tree = formatter.visit(ast)
    print(tree)
    print('============== COLLECTING TYPES ===============')
    errors = []
    collector = TypeCollector(errors)
    collector.visit(ast)
    context = collector.context
    print('Errors:', errors)
    print('Context:')
    print(context)
    print('=============== BUILDING TYPES ================')
    builder = TypeBuilder(context, errors)
    builder.visit(ast)
    manager = builder.manager
    print('Errors: [')
    for error in errors:
        print('\t', error)
    print(']')
    print('Context:')
    print(context)
    print('=============== CHECKING TYPES ================')
    checker = TypeChecker(context, manager, [])
    scope = checker.visit(ast)
    print('=============== INFERING TYPES ================')
    temp_errors = []
    inferencer = TypeInferencer(context, manager, temp_errors)
    inferencer.visit(ast, scope)
    print('Errors: [')
    for error in temp_errors:
        print('\t', error)
    print(']')
    print('=============== LAST CHECK ================')
    errors.extend(temp_errors)
    checker = TypeChecker(context, manager, errors)
    checker.visit(ast)
    print('Errors: [')
    for error in errors:
        print('\t', error)
    print(']')
    formatter = FormatVisitor()
    tree = formatter.visit(ast)
    print(tree)
    
    return ast
Пример #3
0
    def __call__(self, program):
        tokens = lexer(program)
        parse, operations, result = self.parser(tokens)
        if not result:
            return parse
        
        ast = evaluate_reverse_parse(parse, operations, tokens)
        formatter = FormatVisitor()
        self.tree = formatter.visit(ast)

        collector = TypeCollector()
        collector.visit(ast)
        context = collector.context

        builder = TypeBuilder(context)
        builder.visit(ast)

        gatherer = InferenceGatherer(context)
        scope = gatherer.visit(ast)

        change = True
        inferencer = TypeInferencer(context)
        while change:
            change = inferencer.visit(ast, scope)

        linker = TypeLinker(context)
        linker.visit(ast, scope)

        s = "Type Collector Errors:\n"
        s = self.format_errors(collector.errors, s)
        s += "Type Builder Errors:\n"
        s = self.format_errors(builder.errors, s)
        s += "Type Linker Errors:\n"
        s = self.format_errors(linker.errors, s)
        s += "Total Errors: " + str(len(collector.errors) + len(builder.errors) + len(linker.errors))
        
        for auto, typex in linker.inferenced:
            s += "Inferenced " + typex.name + " from " + auto + "\n"
        return s
Пример #4
0
    # start = True
    if start:
        tokens = tokenize(input_program)

        if show_tokens:
            st.markdown('### Tokens')
            st.write([str(token) + '\n' for token in tokens])

        try:
            parse, operations = parser(tokens, get_shift_reduce=True)

            if show_parsing:
                st.markdown('### Parsing')
                st.write([str(prod) + '\n' for prod in reversed(parse)])

            ast = evaluate_reverse_parse(parse, operations, tokens)
        except ParsingException as syntax_error:
            ast = None
            st.error(f"{syntax_error}")

        if ast is not None:
            formatter = FormatVisitor()
            errors = []
            context = Context()
            scope = Scope()

            collector = TypeCollector(context, errors)
            collector.visit(ast)
            builder = TypeBuilder(context, errors)
            builder.visit(ast)
Пример #5
0
def main(G):
    st.title('Type Inferencer')

    st.sidebar.markdown('''Produced by:  
    Carmen Irene Cabrera Rodríguez  
    Enrique Martínez González''')

    text = st.text_area('Input your code here:')

    if text:
        st.text(text)
        try:
            tokens = list(tokenize_text(text))
        except Exception as e:
            st.error(f'Lexer Error: You probably did something wrong :wink:')
        else:
            try:
                parser = LR1Parser(G)
                parse, operations = parser([t.token_type for t in tokens], get_shift_reduce=True)
            except Exception as e:
                st.error(f'Parser Error: You probably did something wrong :wink:')
            else:
                ast = evaluate_reverse_parse(parse, operations, tokens)

                st.title('Results:')
                
                errors = []
                collector = TypeCollector(errors)
                collector.visit(ast)
                context = collector.context

                # for e in errors:
                    # st.error(e)
                # st.text('Context:')
                # st.text(context)

                # st.subheader('Building types')
                builder = TypeBuilder(context, errors)
                builder.visit(ast)
                manager = builder.manager
                # for e in errors:
                    # st.error(e)
                # st.text('Context:')
                # st.text(context)

                # st.subheader('Checking types')
                checker = TypeChecker(context, manager, [])
                scope = checker.visit(ast)

                # st.subheader('Infering types')
                temp_errors = []
                inferencer = TypeInferencer(context, manager, temp_errors)
                inferencer.visit(ast, scope)
                # for e in temp_errors:
                #     st.error(e)

                # st.subheader('Last check')
                errors.extend(temp_errors)
                checker = TypeChecker(context, manager, errors)
                checker.visit(ast)
                for e in errors:
                    st.error(e)
                    
                formatter = FormatVisitor()
                tree = formatter.visit(ast)
                st.text(tree)
Пример #6
0
def run_pipeline(G, program):
    print("Executing Program")
    #print(program)
    tokens = lexer(program)
    print("Tokens")
    pprint_tokens(tokens)

    parser = LR1Parser(G)
    parse, operations, result = parser(tokens)
    if not result:
        print(parse)
        return

    ast = evaluate_reverse_parse(parse, operations, tokens)
    formatter = FormatVisitor()
    tree = formatter.visit(ast)
    print(tree)

    collector = TypeCollector()
    collector.visit(ast)
    context = collector.context
    print("Context\n", context)

    builder = TypeBuilder(context)
    builder.visit(ast)
    print("Context\n", context)

    gatherer = InferenceGatherer(context)
    scope = gatherer.visit(ast)
    print("Begining of Inferencer -----------------------------------")
    change = True
    inferencer = TypeInferencer(context)
    while change:
        change = inferencer.visit(ast, scope)
        if inferencer.errors:
            break
        print(f"change: {change} ---------------------------------------")
        #input()

    #ok_till_now = set_global_upper(gatherer.inference_graph)
    #print("set_global_upper:", ok_till_now)
    #if ok_till_now:
    #    print("Nodes in graph and uppers:\n", [[graph.name, graph.upper_global.name] for graph in gatherer.inference_graph])
    #    globals_graph = global_upper_graph(gatherer.inference_graph)
    #else:
    #    globals_graph = dict()
    #checker = TypeChecker(context)
    #scope = checker.visit(ast)

    linker = TypeLinker(context, gatherer.inference_graph)
    for _ in range(2):
        linker.visit(ast, scope)
        if linker.errors:
            break

    finisher = TypeFinisher(context)
    finisher.visit(ast, scope)

    logger = TypeLogger(context)
    types = logger.visit(ast, scope)
    print(types)

    s = "Type Collector Errors:\n"
    s = format_errors(collector.errors, s)
    s += "Type Builder Errors:\n"
    s = format_errors(builder.errors, s)
    s += "Inference Gatherer Errors:\n"
    s = format_errors(gatherer.errors, s)
    s += "Type Inferencer Errors:\n"
    s = format_errors(inferencer.errors, s)
    s += "Type Linker Errors:\n"
    s = format_errors(linker.errors, s)
    s += "Total Errors: " + str(
        len(collector.errors) + len(builder.errors) + len(gatherer.errors) +
        len(inferencer.errors) + len(linker.errors))
    print(s)
def pipeline(input_file: Path, output_file: Path = None):
    errors = []

    if not input_file.is_file:
        errors.append(InvalidInputFileError(str(input_file)))

    if len(errors) > 0:
        report_and_exit(errors)

    text = input_file.read_text()

    # main_error1 = ["A class Main with a method main most be provided"]
    # main_error2 = ['"main" method in class Main does not receive any parameters']

    # define grammar
    grammar, idx, type_id, string, num = define_cool_grammar()

    tokens = tokenize_cool_text(grammar, idx, type_id, string, num, text,
                                errors)

    if len(errors) > 0:
        report_and_exit(errors)
    parser = LR1Parser(grammar, errors)

    if len(errors) > 0:
        report_and_exit(errors)

    parse, operations = parser(tokens)

    if len(errors) > 0:
        report_and_exit(errors)

    # get parsing tree
    ast = evaluate_reverse_parse(parse, operations, tokens)

    # print("-------------------------------Initial AST-------------------------------")
    # formatter = FormatVisitorST()
    # tree = formatter.visit(ast)
    # print(tree)

    visitors = [TypeCollector(errors), TypeBuilder(errors)]
    for visitor in visitors:
        ast = visitor.visit(ast)

    type_checker = TypeChecker(errors)
    scope, typed_ast = type_checker.visit(ast)

    # formatter = FormatVisitorTypedAst()
    # print("-------------------------------Typed AST-------------------------------")
    # tree = formatter.visit(typed_ast)
    # print(tree)

    if len(errors) > 0:
        report_and_exit(errors)

    cool_to_cil_visitor = CILBuilder()
    cil_ast = cool_to_cil_visitor.visit(typed_ast)

    formatter = PrintVisitor()
    tree = formatter.visit(cil_ast)
    print(tree)

    cil_to_mips_visitor = MIPSBuilder()
    mips_ast = cil_to_mips_visitor.visit(cil_ast)

    mips_writer = MIPSWriter()
    output = mips_writer.visit(mips_ast)

    output = '\n'.join(mips_writer.output)

    if output_file is None:
        output_file = input_file.with_suffix(".mips")

    with output_file.open("w") as file:
        print(output, file=file)
Пример #8
0
    def run_pipeline(program):
        if showProgram:
            st.text(program)

        tokens = lexer(program)
        if showTokens:
            st.write("Tokenizing")
            st.text(pprint_tokens(tokens))

        parser = LR1Parser(G)
        parse, operations, success = parser(tokens)
        if not success:
            st.text(parse)
            return
        if showParsing:
            st.write("Parsing")
            st.text("\n".join(repr(x) for x in parse))

        ast = evaluate_reverse_parse(parse, operations, tokens)
        formatter = FormatVisitor()
        tree = formatter.visit(ast)
        if showAST:
            st.write("Building AST")
            st.text(tree)
        s = ""
        collector = TypeCollector()
        collector.visit(ast)
        context = collector.context
        if not collector.errors:
            st.success("Collecting Types")
        else:
            st.error("Collecting Types")
            s = "Type Collector Errors:\n"
            s = format_errors(collector.errors, s)
        if showTypesCollected:
            st.write("Context:")
            st.text(context)

        builder = TypeBuilder(context)
        builder.visit(ast)
        if not builder.errors:
            st.success("Building Types")
        else:
            st.error("Building Types")
            s += "Type Builder Errors:\n"
            s = format_errors(builder.errors, s)
        if showTypesBuilded:
            st.write("Context")
            st.text(context)

        gatherer = InferenceGatherer(context)
        scope = gatherer.visit(ast)

        inferencer = TypeInferencer(context)
        change = True
        count = 1
        while change:
            change = inferencer.visit(ast, scope)
            st.write(f"Running Type Inferencer({count})")
            count += 1
            if inferencer.errors:
                break

        if not inferencer.errors and not gatherer.errors:
            st.success("Inferencing Types")
        else:
            st.error("Inferencing Types")
            if gatherer.errors:
                s += "Inference Gatherer Errors:\n"
                s = format_errors(gatherer.errors, s)
            if inferencer.errors:
                s += "Type Inferencer Errors:\n"
                s = format_errors(inferencer.errors, s)

        linker = TypeLinker(context, gatherer.inference_graph)
        for _ in range(2):
            linker.visit(ast, scope)
            if linker.errors:
                break

        if not linker.errors:
            st.success("Linking Types")
        else:
            st.error("Linking Types")
            s += "Type Linker Errors:\n"
            s = format_errors(linker.errors, s)

        finisher = TypeFinisher(context)
        finisher.visit(ast, scope)

        if showTypesComputed:
            st.write("Computed Nodes Types")
            logger = TypeLogger(context)
            types = logger.visit(ast, scope)
            st.text(types)

        if showProgramResult:
            st.write("Result")
            s += "Total Errors: " + str(
                len(collector.errors) + len(builder.errors) +
                len(gatherer.errors) + len(inferencer.errors) +
                len(linker.errors))
            st.text(s)