예제 #1
0
def main():
    from io import StringIO

    testprog = StringIO('''
        Int a = 5 + -2;
    ''')

    tokenstream = tokenize(testprog)
    tokenlist = list(tokenstream)
    ast = SPL.parse_strict(tokenlist, testprog)

    # Build symbol table
    ERROR_HANDLER.setSourceMapping(testprog)
    symbol_table = buildSymbolTable(ast)
    ERROR_HANDLER.checkpoint()

    # Normalize table
    normalizeAllTypes(symbol_table)
    ERROR_HANDLER.checkpoint()

    # Resolve Expr names
    resolveNames(symbol_table)
    ERROR_HANDLER.checkpoint()

    # Parse expression
    op_table = buildOperatorTable()

    fixExpression(ast, op_table)
    print(ast)
    ERROR_HANDLER.checkpoint()
예제 #2
0
def main():
    from io import StringIO

    module_name = "test"
    testprog = StringIO('''
    
        Bool x = -5;
    
        prefix - (x) :: Int -> Bool {
            return True;
        }
        
        prefix ! (x) :: Int -> Bool {
            return False;
        }
    
        /*infixl 7 ++ (a, b) :: Int Int -> Int {
            return a + b + 2;
        }*/
    
        main() :: -> Int {
            Bool x = !(-(5));
            
            x = True;
        }

    ''')

    # Tokenize / parse
    tokenstream = tokenize(testprog)
    tokenlist = list(tokenstream)
    ast = SPL.parse_strict(tokenlist, testprog)

    # Build symbol table
    ERROR_HANDLER.setSourceMapping(testprog)
    symbol_table = buildSymbolTable(ast)
    ERROR_HANDLER.checkpoint()

    # Normalize table
    normalizeAllTypes(symbol_table)
    ERROR_HANDLER.checkpoint()

    # Resolve Expr names
    resolveNames(symbol_table)
    ERROR_HANDLER.checkpoint()

    # Build operator table
    op_table = buildOperatorTable()
    mergeCustomOps(op_table, symbol_table, module_name)
    builtin_funcs = generateBuiltinFuncs()

    # Parse expressions
    fixExpression(ast, op_table)
    ERROR_HANDLER.checkpoint()

    # Type check
    typecheck_globals(symbol_table, op_table)
    typecheck_functions(symbol_table, op_table, builtin_funcs)
    ERROR_HANDLER.checkpoint()
예제 #3
0
파일: parser.py 프로젝트: W-M-T/gsc
    }
}
'''
if __name__ == "__main__":
    from argparse import ArgumentParser
    from lib.parser.lexer import tokenize
    argparser = ArgumentParser(description="SPL Parser")
    argparser.add_argument("infile",
                           metavar="INPUT",
                           help="Input file",
                           nargs="?",
                           default="../../example programs/p1_example.spl")
    args = argparser.parse_args()

    with open(args.infile, "r") as infile:
        tokenstream = tokenize(infile)
        tokenlist = list(tokenstream)
        #print(tokenlist)
        x = SPL.parse_strict(tokenlist, infile)
        #print(x)
        #print(x.tree_string())
        print(printAST(x))
        exit()

    test1 = [
        Token(None, TOKEN.TYPESYN, None),
        Token(None, TOKEN.TYPE_IDENTIFIER, "String"),
        Token(None, TOKEN.OP_IDENTIFIER, "="),
    ]
    test2 = [
        Token(None, TOKEN.BRACK_OPEN, None),
예제 #4
0
def main():
    from argparse import ArgumentParser
    from lib.parser.lexer import tokenize
    argparser = ArgumentParser(description="TESTCODEGEN")
    argparser.add_argument("infile",
                           metavar="INPUT",
                           help="Input file",
                           nargs="?",
                           default=None)
    argparser.add_argument("--lp",
                           metavar="PATH",
                           help="Directory to import libraries from",
                           nargs="?",
                           type=str)
    argparser.add_argument(
        "--im",
        metavar="LIBNAME:PATH,...",
        help=
        "Comma-separated library_name:path mapping list, to explicitly specify import paths",
        type=str)
    args = argparser.parse_args()

    if not args.infile is None:
        if not args.infile.endswith(SOURCE_EXT):
            print("Input file needs to be {}".format(SOURCE_EXT))
            exit()
        else:
            infile = open(args.infile, "r")
            tokenstream = tokenize(infile)
            ast = parseTokenStream(tokenstream, infile)

            ERROR_HANDLER.setSourceMapping(infile)

        import_mapping = list(
            map(lambda x: x.split(":"),
                args.im.split(","))) if args.im is not None else []
        if not (all(map(lambda x: len(x) == 2, import_mapping)) and all(
                map(lambda x: all(map(lambda y: len(y) > 0, x)),
                    import_mapping))):
            print("Invalid import mapping")
            exit()
        # print(import_mapping)
        import_mapping = {
            a: os.path.splitext(b)[0]
            for (a, b) in import_mapping
        }

        main_mod_path = os.path.splitext(args.infile)[0]
        module_name = os.path.basename(main_mod_path)

    else:
        from io import StringIO

        module_name = "tuples"
        testprog = StringIO(''' 
            
        ''')

        # Tokenize / parse
        tokenstream = tokenize(testprog)

        tokenlist = list(tokenstream)
        ast = SPL.parse_strict(tokenlist, testprog)

        ERROR_HANDLER.setSourceMapping(testprog)

        module_name = "testing"

    # Build external table
    headerfiles, type_headers = getHeaders(
        ast,
        module_name,
        HEADER_EXT,
        os.path.dirname(args.infile),
        file_mapping_arg=import_mapping,
        lib_dir_path=args.lp,
        lib_dir_env=os.environ[IMPORT_DIR_ENV_VAR_NAME]
        if IMPORT_DIR_ENV_VAR_NAME in os.environ else None)
    # Get all external symbols
    external_table, dependencies = getExternalSymbols(ast, headerfiles,
                                                      type_headers)
    external_table = enrichExternalTable(external_table)
    print(external_table)
    ERROR_HANDLER.checkpoint()

    # Build symbol table
    symbol_table, external_table = buildSymbolTable(
        ast,
        module_name,
        just_for_headerfile=False,
        ext_symbol_table=external_table)
    ERROR_HANDLER.checkpoint()

    # Normalize table
    #normalizeAllTypes(symbol_table, external_table)
    #ERROR_HANDLER.checkpoint()

    #forbid_illegal_types(symbol_table)
    #ERROR_HANDLER.checkpoint()

    # Resolve Expr names
    resolveNames(symbol_table, external_table)
    ERROR_HANDLER.checkpoint()

    # Parse expressions
    fixExpression(ast, symbol_table, external_table)
    ERROR_HANDLER.checkpoint()

    # Function control flow analysis
    analyseFunc(symbol_table)
    ERROR_HANDLER.checkpoint()

    # Typechecking
    typecheck_globals(symbol_table, external_table)
    typecheck_functions(symbol_table, external_table)
    ERROR_HANDLER.checkpoint()

    # Code gen
    gen_code = generate_object_file(symbol_table, module_name, dependencies)
    #print(gen_code)

    with open('example programs/demo/' + module_name + '.splo', 'w+') as fh:
        fh.write(gen_code)
예제 #5
0
파일: parser.py 프로젝트: W-M-T/gsc
                           metavar="INPUT",
                           help="Input file",
                           nargs="?",
                           default="./example programs/p1_example.spl")
    args = argparser.parse_args()
    '''
    for rule, content in RULES.items():
        print("{}:".format(rule))
        print(content)
        print()
    
    exit()
    '''

    print(expandRule(RULES[ROOT_RULE]))
    print("DONE")
    exit()

    with open(args.infile, "r") as infile:
        tokenstream = tokenize(infile)
        parseTokenStream(tokenstream)
        '''
        tokenlist = list(tokenstream)
        import random
        randtoken = random.choice(tokenlist)
        print(randtoken)
        print(pointToPosition(infile, randtoken.pos))
        '''

    print("DONE")
예제 #6
0
def main():
    argparser = ArgumentParser(description="SPL Compiler")
    argparser.add_argument("infile", metavar="INPUT", help="Input file", nargs="?", default="./example programs/p1_example.spl")
    argparser.add_argument("--lp", metavar="PATH", help="Directory to import header files from", nargs="?", type=str)
    argparser.add_argument("--im", metavar="LIBNAME:PATH,...", help="Comma-separated object_file:path mapping list, to explicitly specify import header paths", type=str)
    argparser.add_argument("-o", metavar="OUTPUT", help="Output file")
    argparser.add_argument("-C", help="Produce an object file instead of an executable", action="store_true")
    argparser.add_argument("-H", help="Produce a header file instead of an executable", action="store_true")
    argparser.add_argument("--stdout", help="Output to stdout", action="store_true")
    args = argparser.parse_args()

    import_mapping = make_import_mapping(args.im)

    if not args.infile.endswith(SOURCE_EXT):
        ERROR_HANDLER.addError(ERR.CompInputFileExtension, [SOURCE_EXT])

    if not os.path.isfile(args.infile):
        ERROR_HANDLER.addError(ERR.CompInputFileNonExist, [args.infile])

    main_mod_path = os.path.splitext(args.infile)[0]
    main_mod_name = os.path.basename(main_mod_path)

    validate_modname(main_mod_name)

    if args.o:
        outfile_base = args.o
        validate_modname(os.path.basename(outfile_base))
    else:
        outfile_base = main_mod_path


    compiler_target = {
        'header' : args.H,
        'object' : args.C,
        'binary' : not (args.H or args.C)
    }


    if args.H and args.C and args.stdout:
        ERROR_HANDLER.addError(ERR.CompInvalidArguments, ["Cannot output to stdout when creating both a headerfile and an object file!\n(-H and -C and --stdout)"])

    if args.o and args.stdout:
        ERROR_HANDLER.addError(ERR.CompInvalidArguments, ["Cannot write to specified path when outputting to stdout!\n(-o and --stdout)"])

    ERROR_HANDLER.checkpoint()

    try:
        infile = open(args.infile, "r")
    except Exception as e:
        ERROR_HANDLER.addError(ERR.CompInputFileException, [args.infile, "{} {}".format(e.__class__.__name__, str(e))], fatal=True)

    ERROR_HANDLER.setSourceMapping(infile)

    tokenstream = tokenize(infile)

    ast = parseTokenStream(tokenstream, infile)

    #print("Are there imports?",bool(ast.imports))

    #print(ast)

    if compiler_target['header']: # Generate a headerfile
        symbol_table, ext_table = buildSymbolTable(ast, main_mod_name, just_for_headerfile=True)
        #print(ext_table)
        #resolveTypeSyns(symbol_table, ext_table)
        mod_dependencies = get_type_dependencies(ast)
        header_json = export_headers(symbol_table, main_mod_name, mod_dependencies, ext_table)

        if not args.stdout:
            outfile_name = outfile_base + HEADER_EXT
            write_out(header_json, outfile_name, "headerfile")
        else:
            print(header_json)

    if compiler_target['object']: # Generate an object file
        gen_code = generateObjectFile(ast, args, main_mod_name, import_mapping)

        if not args.stdout:
            outfile_name = outfile_base + OBJECT_EXT
            write_out(gen_code, outfile_name, "objectfile")
        else:
            print(gen_code)

    if compiler_target['binary']: # Generate a binary
        gen_code = generateObjectFile(ast, args, main_mod_name, import_mapping)
        from io import StringIO
        pseudo_file_code = StringIO(gen_code)

        mod_dicts = getObjectFiles(
            pseudo_file_code,
            args.infile,
            os.path.dirname(args.infile),
            file_mapping_arg=import_mapping,
            lib_dir_path=args.lp,
            lib_dir_env=os.environ[IMPORT_DIR_ENV_VAR_NAME] if IMPORT_DIR_ENV_VAR_NAME in os.environ else None
        )

        result = linkObjectFiles(mod_dicts, main_mod_name)

        if not args.stdout:
            outfile_name = outfile_base + TARGET_EXT
            write_out(result, outfile_name, "executable")
        else:
            print(result)

    # Final cleanup
    infile.close()