def test_with_cpp(self): ast = parse_file('c_files/memmgr.c', use_cpp=True, cpp_path=CPPPATH, cpp_args=r'-I../utils/fake_libc_include') self.failUnless(isinstance(ast, c_ast.FileAST)) ast2 = parse_file('c_files/year.c', use_cpp=True, cpp_path=CPPPATH, cpp_args=r'-I../utils/fake_libc_include') self.failUnless(isinstance(ast2, c_ast.FileAST))
def test_with_cpp(self): c_files_path = os.path.join('tests', 'c_files') ast = parse_file(self._find_file('memmgr.c'), use_cpp=True, cpp_path=CPPPATH, cpp_args='-I%s' % c_files_path) self.failUnless(isinstance(ast, c_ast.FileAST)) ast2 = parse_file(self._find_file('year.c'), use_cpp=True, cpp_path=CPPPATH, cpp_args=r'-Iutils/fake_libc_include') self.failUnless(isinstance(ast2, c_ast.FileAST))
def get_func_decls(filename, args): cpp_args = s_cpp_args(args) if args.cpp.lower() == "none": ast = parse_file(filename) else: ast = parse_file(filename, use_cpp=True, cpp_path=os.path.join(os.path.dirname(__file__), "fake_cpp"), cpp_args=cpp_args) v = FuncDeclVisitor() for idx, node in ast.children(): v.visit(node) return v._ret
def test_with_cpp(self): memmgr_path = self._find_file('memmgr.c') c_files_path = os.path.dirname(memmgr_path) ast = parse_file(memmgr_path, use_cpp=True, cpp_path=CPPPATH, cpp_args='-I%s' % c_files_path) self.assertTrue(isinstance(ast, c_ast.FileAST)) fake_libc = os.path.join(c_files_path, '..', '..', 'utils', 'fake_libc_include') ast2 = parse_file(self._find_file('year.c'), use_cpp=True, cpp_path=CPPPATH, cpp_args=[r'-I%s' % fake_libc]) self.assertTrue(isinstance(ast2, c_ast.FileAST))
def translate_to_c(filename): """ Simply use the c_generator module to emit a parsed AST. """ ast = parse_file(filename, use_cpp=True) generator = c_generator.CGenerator() ast.show() print(generator.visit(ast))
def create_test_case(ppcg_input_file, ppcg_output_files): assert len(ppcg_output_files) == 1, "Currently support OpenCL only for VOBLA" # Remove PENCIL qualifiers from C code otherwise it will not parse remove_pencil_qualifiers_from_file(ppcg_input_file) # Pre-process and parse the file ast = pycparser.parse_file(ppcg_input_file, use_cpp=True) gen = c_generator.CGenerator() gen.visit(ast) # Find PENCIL function declarations pencil_info = FuncDeclVisitor() pencil_info.visit(ast) # Find struct definitions struct_info = StructDefintionVisitor() struct_info.visit(ast) # We need the struct and function prototypes to dump in the test file ast_new_file = copy_ast_declarations(pencil_info, struct_info) # Analyse the types of parameters in each PENCIL function for function_name in pencil_info.functions.keys(): for formal_param in pencil_info.get_formal_params(function_name): decorate_formal_params(formal_param, struct_info) # Create a main function that will call the PENCIL functions main_func = create_main(pencil_info) ast_new_file.ext.append(main_func) # Write the program to a file test_file = write_to_file(ast_new_file, ppcg_input_file) # Compile the code binary = compile_test_case(test_file, ppcg_output_files[0]) return binary
def translate(filename): ast = parse_file(filename, use_cpp=True, cpp_path='gcc', cpp_args=[ "-E", "-D__FBSDID(x)=", # FreeBSD identifier "-D__attribute__(x)=", # attribute extension "-D__builtin_va_list=void*", # include/x86/_types.h:154 typedef __builtin_va_list __va_list; "-D__inline=", "-D__asm(x)=", "-D_RUNETYPE_H_=1", # skip include/runtype.h "-D_RuneLocale=void*", # but it defines this type "-D_Noreturn=", "-U__BLOCKS__", # no (^) syntax: include/stdlib.h: int atexit_b(void (^)(void)); "-U__nonnull", # avoid __nonnull redefinition "-nostdinc", "-Ipycparser/utils/fake_libc_include", "-Iinclude", # copy from /usr/include ]) generator = js_generator.JavaScriptGenerator() print(generator.visit(ast))
def parse_jamaica_output(filename, includepath = None): """ Use pycparser to parse a Jamaica output file. Because Jamaica's output is pretty complex, cpp is required, and even still some features have to be removed because it uses GCC extensions unsupported by pycparser. Returns a pycparser.c_ast or throws ParseError If includepath is None then the project include files are used for parsing. Else, an absolute path to alternate includes can be provided. """ if '*' in filename: filename = deglob_file(filename) cppargs = ['-E', '-DNDEBUG', #Disable Jamaica debug support '-U__GNUC__', #Prevents use of __attribute__, will cause an "unsupported compiler" #warning #'-W#warnings', #And this hides that warning '-DJAMAICA_NATIVE_TIME_GET_HIGH_RESOLUTION_TIMESTAMP', #These use volatile asm() which is not supported by pycparser '-DJAMAICA_NATIVE_THREAD_COMPARE_AND_SWAP', '-D__CAICOS__', #So we can tell if we are being parsed by caicos r'-I' + project_path("stdlibheaders"), #Override stdlib headers with blank versions (Jamaica builder doesn't use them, but includes them) ] if includepath == None: cppargs.append(r'-I' + project_path("projectfiles", "include")) else: cppargs.append(r'-I' + str(includepath)) return pycparser.parse_file(filename, use_cpp=True, cpp_path="gcc", cpp_args=cppargs)
def show_func_defs(filename): # Note that cpp is used. Provide a path to your own cpp or # make sure one exists in PATH. ast = parse_file(filename, use_cpp=True, cpp_args=r"-Iutils/fake_libc_include") v = FuncDefVisitor() v.visit(ast)
def create_func_defs(filename): ast = parse_file(filename, use_cpp=True) v = FuncDeclVisitor() v.visit(ast) return v.buffer
def process_file(filename, incpath): cppargs = ["-Iutils/fake_libc_include", "-I%s" % incpath, "-DDECLSPEC=", "-D_SDL_platform_h=1", "-D_SDL_endian_h=1", "-DSDL_FORCE_INLINE=", "-D__attribute__(x)=", ]; ast = parse_file(filename, use_cpp=True, cpp_args=cppargs); v = visitor(); v.visit(ast); del ast; cppargs.append("-dM"); defines_text = preprocess_file(filename, cpp_args=cppargs); for s in defines_text.split("\n"): if s[:8] == "#define ": s = s[8:]; m = re.search("(SDL_[A-Za-z0-9_]+)", s); if m != None: d = m.group(0); if isinstance(d, str) and len(d) > 4 and d == d.upper(): v.defines.append(d); generate_output("sdl2.vim", v);
def translate_to_go(filename): firstname = filename if "." in filename: firstname = filename.rsplit(".", 1)[0] clearlog() f = open(filename) data = f.read() f.close() data = cleanup(data) #filename = tempfile.mkstemp()[1] filename2 = "/tmp/jeje" f = open(filename2, "w") f.write(data) f.close() try: ast = parse_file(filename2, use_cpp=True) except plyparser.ParseError as e: print("Could not parse %s:" % (filename)) print("line " + "".join(str(e).split(":", 1)[1:])) return generator = GoGenerator() s = generator.visit(ast) s = generator.fix_int_to_bool(s) s = generator.last_minute_replacements(s, firstname) s = generator.make_header() + s print(s)
def parse_header(filename): return parse_file(filename, use_cpp=True, cpp_args=[ r'-I{}'.format(os.path.dirname(filename)), r'-I{}'.format(FAKE_LIBC_INCLUDE_DIR), r'-D_DOXYGEN_ONLY_'])
def translate(filename, args, portion=None): try: ast = pycparser.parse_file(filename, use_cpp=True) et_obj = EnumTranslator(ast, args.enum, args.output_class) translate_params = { 'import_line': args.importline, 'header': args.header, 'comment': args.comment, 'hash_type': 'SHA-1', 'hash_value': sha1sum(args.header), 'portion': portion } et_obj.translate(translate_params, args.stop) except pycparser.plyparser.ParseError as exc: eprint('Translation error, try to use -p option with proper ' 'boundaries') eprint(exc) return except TypeError: eprint('Translation error, try to use -s option with a stop ' 'enumerator parameter') return if args.output: with open(args.output, 'w') as ftw: et_obj.write(ftw) else: et_obj.write()
def generate_xml(filename): ast = parse_file(filename, use_cpp=True) visitor = FuncDeclVisitor() print '<?xml version="1.0" encoding="UTF-8"?>' print "<blas_functions>" visitor.visit(ast) print "</blas_functions>"
def show_decl_file(filename): ast = parse_file(filename, use_cpp=True) v = FuncDefVisitor() v.visit(ast) v = TypeDeclVisitor() v.visit(ast) printTypes()
def generate_test_app(): c_file = os.path.join(test_app_dir, 'pycparser_main.c') ast = parse_file(c_file, use_cpp=True, cpp_path=os.path.join(os.environ.get('XMOS_TOOL_PATH'), 'bin', 'xcc'), cpp_args=['-E', '{}{}'.format('-I', os.path.join('..','lib_xcore_c','api')), '{}{}'.format('-I', os.path.join('..','lib_xcore_c','src')), '{}{}'.format('-I', os.path.join('..','..','lib_trycatch','lib_trycatch','api')), '{}{}'.format('-I', os.path.join('..','..','lib_trycatch','lib_trycatch','src')), '{}{}'.format('-I', os.path.join('..','..','lib_xassert','lib_xassert','api')) ] ) functions = LibraryFunctionExtractor() function_call_blocks = functions.get_function_call_blocks(ast) with open(os.path.join(test_app_dir, 'test.c'), "w") as test_c_file: test_c_file.writelines(file_top) test_c_file.writelines(function_call_blocks) test_c_file.writelines(file_bottom)
def show_func_defs(filename): # Note that cpp is used. Provide a path to your own cpp or # make sure one exists in PATH. ast = parse_file(filename, use_cpp=True) v = FuncDefVisitor() v.visit(ast)
def generate(filename, origin_path, gen_path): ast = parse_file(os.path.join(origin_path, filename), use_cpp=True, cpp_path='gcc', cpp_args=['-E', r'-D' + CSMITH_MINIMAL_MACRO, r'-I' + CSMITH_RUNTIME, r'-I' + FAKE_SYS_LIB]) # parser = c_parser.CParser() # ast = parser.parse(code) # construct an error label checkpoint = c_ast.FuncCall(c_ast.ID('__VERIFIER_error'), None) error_label = c_ast.Label("ERROR", checkpoint) num_ext_children = len(ast.ext) print("## num of ext children: " + str(num_ext_children)) funcs = findAllFuncDef(ast) for picked in funcs: # picked = random.randint(0, len(funcs)-1) seed_fun = funcs.index(picked) print("## Function picked: " + str(seed_fun)) seed = ast.ext.index(picked) print("## Seed: " + str(seed)) numInsert = algo.countAllInsertion(ast.ext[seed].body, 0) print("$$$$$ numInsert is " + str(numInsert)) for i in range(1, numInsert+1): ast_new = copy.deepcopy(ast) ast_new.ext[seed].body = checkpoint_insert_if(ast_new.ext[seed].body, i, error_label) code = c_generator.CGenerator().visit(ast_new) code_new = removeNonSupport(code) basename = filename basename_gen, file_extension = os.path.splitext(basename) filename_gen = gen_path + basename_gen + postfix + str(seed_fun) + '-' + str(i) + file_extension # print "hhhhhhhhhhhhhhhhh" + basename_gen dumpToFile(filename_gen, code_new) print("## Generate: " + filename_gen)
def _get_imp_funcs(c_fpath): """Gets the functions that can be imported from a C file.""" # gets the path of the fake system headers fsh_path = os.path.join(__path__[0], 'pycparser', 'fake_libc_include') # TODO: USE THE cpp.py MODULE TO PREPROCESS THE FILE # gets the AST ast = pycparser.parse_file(c_fpath, use_cpp=c_fpath, cpp_args=['-I', fsh_path]) # function definition info collector class class FuncDefVisitor(pycparser.c_ast.NodeVisitor): def __init__(self): pycparser.c_ast.NodeVisitor.__init__(self) self.func_names = [] def visit_FuncDef(self, node): self.func_names.append(node.decl.name) # gets the function definition info v = FuncDefVisitor() v.visit(ast) # returns the function definition info return v.func_names
def parse_lrs_classes(): ## construct path to librealsense/rs.h if 'PYRS_INCLUDES' in environ: rs_h_filename = path.join(environ['PYRS_INCLUDES'], 'rs.h') ## for docs, use locally stored elif os.environ.get('READTHEDOCS') == 'True': rs_h_filename = './rs.h' elif sys.platform == 'win32': raise Exception('PYRS_INCLUDES must be set to the location of the librealsense headers!') else: rs_h_filename = '/usr/local/include/librealsense/rs.h' ## if not found, exit if not path.exists(rs_h_filename): raise Exception('librealsense/rs.h header not found at {}'.format(rs_h_filename)) # Dynamically extract API version api_version = 0 lrs_globals = {} with io.open(rs_h_filename, encoding='latin') as rs_h_file: for l in rs_h_file.readlines(): if 'RS_API' in l: key, val = l.split()[1:] lrs_globals[key] = val api_version = api_version * 100 + int(val) if api_version >= 10000: break lrs_globals['RS_API_VERSION'] = api_version # Dynamically generate classes ast = pycparser.parse_file(rs_h_filename, use_cpp=True) lrs_classes = {} for c in ast.ext: if c.name in ['rs_capabilities', 'rs_stream', 'rs_format', 'rs_distortion', 'rs_ivcam_preset', 'rs_option', 'rs_preset']: e = _get_enumlist(c) class_name = c.name class_dict = {} for i, (_, child) in enumerate(e.children()): class_dict[child.name] = i name_for_value = {} for key, val in class_dict.items(): name_for_value[val] = key class_dict['name_for_value'] = name_for_value lrs_classes[class_name] = class_dict classes_list = [lrs_globals, lrs_classes] with io.open('lrs_parsed_classes', "wb") as ser_classes: pickle.dump(classes_list, ser_classes)
def loadSymbols(filename, cppargs = []): """ :param filename: :param cppargs: :return: """ return parse_file(filename, use_cpp=True, cpp_args=cppargs)
def show_decl_file(cFile, function, varFile, scopeVarFile): varList = importVars(varFile) outOfScopeVarList = importOutOfScopeVars(scopeVarFile) ast = parse_file(cFile, use_cpp=True) v = FuncDefVisitor(function, varList, outOfScopeVarList) v.visit(ast) generator = c_generator.CGenerator() print(generator.visit(ast))
def show_func_defs(args): # Note that cpp is used. Provide a path to your own cpp or # make sure one exists in PATH. ast = parse_file(args[0], use_cpp=True, cpp_args=[r'-Ipycparser/utils/fake_libc_include'] + args[1:]) v = FuncDefVisitor() v.visit(ast)
def test_cpp_funkydir(self): # This test contains Windows specific path escapes if sys.platform != 'win32': return c_files_path = os.path.join('tests', 'c_files') ast = parse_file(self._find_file('simplemain.c'), use_cpp=True, cpp_path=CPPPATH, cpp_args='-I%s' % c_files_path) self.assertTrue(isinstance(ast, c_ast.FileAST))
def parsing_file(filename, ans, debug=None): if debug == None: debug = False node = parse_file(filename) hashes_func(node, ans) UncryptDecl(node) if debug: node.show() return node
def get_func_defs(filename): gotten = [] ast = parse_file(filename, use_cpp=True) generator = c_generator.CGenerator() for ext in ast.ext: gotten.append(generator.visit_Decl(ext.decl)) return '\n'.join(gotten)
def get_ast(path): folder=os.path.split(os.path.realpath(__file__))[0] cpp_path='python' cpp_args=[ os.path.join(folder, 'fake_cpp.py'), '-I'+os.path.join(folder, 'fake_libc_include') ] from pycparser import parse_file return parse_file(path, use_cpp=True, cpp_path=cpp_path, cpp_args=cpp_args)
def generate_xml(filename): ast = parse_file(filename, use_cpp=True) visitor = FuncDeclVisitor() print "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" print "<blas_functions>" print " <level1_functions>" visitor.visit(ast) print " </level3_functions>" print "</blas_functions>"
def translateModule(fname): print("parsing file") ast = parse_file(fname,use_cpp=True) ast.show() print("generating IR") irg = irgen.IRGenerator() irg.visit(ast) mod = irg.getModule() return mod
import sys from pycparser import parse_file, c_generator if len(sys.argv) < 2: print(f"Usage: {sys.argv[0]} filename.c") exit() # ast = c_parser.CParser().parse(src) ast = parse_file(sys.argv[1], use_cpp=True) # ast.show() # print(c_generator.CGenerator().visit(ast)) print(ast)
if __name__=="__main__": (args, unknowns) = getArgs() fname = args.file noComments = args.no_comments DEBUG = args.debug if not os.path.isfile(fname): sys.exit("No such file: '" + fname + "'") fakes = [] ignores = set() if args.no_fake_headers is not True: fakes = getFakeHeaders() ignores = findIgnores() try: ast = parse_file(fname, use_cpp=True, cpp_path="cc", cpp_args=["-E"] + fakes + unknowns) except c_parser.ParseError as e: sys.exit("Unable to parse file: " + str(e)) preamble(args, fakes) emit_defines(fname) v = ChapelVisitor() v.visit(ast) if args.no_typedefs is False: handleTypedefs(typeDefs, ignores)
def debugon(): ''' permite al usuario acceder a las opciones del debug si el archivo no esta compilado llamará a la función compilar ''' global filename global compiled global funciones global iterexec global vardicts global estructuras global retornos global debugging try: if not compiled: consola.config(state=tkinter.NORMAL) consola.delete(1.0, tkinter.END) consola.insert(tkinter.END, 'Archivo no compilado') consola.insert(tkinter.END, '\n') consola.insert(tkinter.END, 'Compilando...') consola.insert(tkinter.END, '\n') consola.config(state=tkinter.DISABLED) compilar() except Exception: print("error") else: #crea el diccionario de estructuras estructuras = dict() #crea el diccionario de funciones funciones = dict() #llama al parser para generar el arbol a evaluar por la aplicación parser = pycparser.parse_file(filename) iterar = iter(parser) try: while (True): #Se recorre el arbol siguiente = next(iterar) # print(siguiente.show()) #si se encuentra con la declaración de un struct lo añadimos al diccionario de structs if isinstance(siguiente, pycparser.c_ast.Decl): if isinstance(next(iter(siguiente)), pycparser.c_ast.Struct): structs(next(iter(siguiente))) #si se encuantra con la deficnición de una función lo añadimos al diccionario de funciones elif isinstance(siguiente, pycparser.c_ast.FuncDef): nombre = getattr(next(iter(siguiente)), 'name') funciones[nombre] = siguiente except StopIteration: #Se pone como verdadera la variable de si se está debuggeando debugging = True #Se recicla el mismo botón para entrar y salir del modo debug debugbutton.config(text="Salir del modo debug", command=debugoff, image=iconstop, width=30, height=30) debugbutton_ttp.text = "Salir del modo debug" #Se deshabilitan los botones que no vamos a usar durante el debuggueo newbutton.config(state=tkinter.DISABLED) openbutton.config(state=tkinter.DISABLED) exebutton.config(state=tkinter.DISABLED) nextbutton.config(state=tkinter.NORMAL) stepbutton.config(state=tkinter.NORMAL) code.config(state=tkinter.DISABLED) #Se crea la lista de bicolas para poder ir recorriendo las funciones iterexec = list() #Se añade el main iterexec.append( copy.deepcopy(deque( dict(funciones['main'].children())['body']))) #Se crea la lista de diccionarios para almacenar las variables vardicts = list() #Se añade el primer diccionario para las variables del main vardicts.append(["main", dict()]) #Se crea la bicola para los retornos retornos = deque()
def file_to_dict(filename): """ Load C file into dict representation of ast """ ast = parse_file(filename, use_cpp=True,cpp_args=fake_libc_arg) return to_dict(ast)
# the 'real' cpp if you're on Linux/Unix) and "fake" libc includes # to parse a file that includes standard C headers. # # Copyright (C) 2008-2011, Eli Bendersky # License: BSD #----------------------------------------------------------------- import sys # This is not required if you've installed pycparser into # your site-packages/ with setup.py # sys.path.extend(['.', '..']) # Portable cpp path for Windows and Linux/Unix CPPPATH = '../utils/cpp.exe' if sys.platform == 'win32' else 'cpp' from pycparser import parse_file if __name__ == "__main__": if len(sys.argv) > 1: filename = sys.argv[1] else: filename = 'c_files/year.c' ast = parse_file(filename, use_cpp=True, cpp_path=CPPPATH, cpp_args=r'-I../utils/fake_libc_include') ast.show()
def create_test_harness(path_old, path_new, client, lib): old_ast = parse_file(path_old, use_cpp=True, cpp_path='gcc', cpp_args=['-E', r'-Iutils/fake_libc_include']) new_ast = parse_file(path_new,use_cpp=True, cpp_path='gcc', cpp_args=['-E', r'-Iutils/fake_libc_include']) old_lib_visitor = FuncDefVisitor(lib) old_lib_visitor.visit(old_ast) old_lib_node = old_lib_visitor.container assert not old_lib_node is None, "old lib does not exist" if isinstance(old_lib_node, c_ast.FuncDef): old_lib_node.decl.name += "_old" old_lib_node.decl.type.type.declname += "_old" old_client_visitor = FuncDefVisitor(client) old_client_visitor.visit(old_ast) old_client_node = old_client_visitor.container assert not old_client_node is None, "old lib client not exist" if isinstance(old_lib_node, c_ast.FuncDef): old_client_node.decl.name += "_old" old_client_node.decl.type.type.declname += "_old" new_lib_visitor = FuncDefVisitor(lib) new_lib_visitor.visit(new_ast) new_lib_node = new_lib_visitor.container assert not new_lib_node is None, "new lib does not exist" if isinstance(new_lib_node, c_ast.FuncDef): new_lib_node.decl.name += "_new" new_lib_node.decl.type.type.declname += "_new" new_client_visitor = FuncDefVisitor(client) new_client_visitor.visit(new_ast) new_client_node = new_client_visitor.container assert not new_client_node is None, "new client does not exist" if isinstance(new_client_node, c_ast.FuncDef): new_client_node.decl.name += "_new" new_client_node.decl.type.type.declname += "_new" LHR_new = LibInvoHunter_Reanme(lib, "new") LHR_new.visit(new_client_node) LHR_old = LibInvoHunter_Reanme(lib, "old") LHR_old.visit(old_client_node) #ceate the main function main_func = copy.deepcopy(old_client_node) main_func.decl.name = "CLEVER_main" main_func.decl.type.type.declname ="CLEVER_main" main_func.decl.type.args = c_ast.ParamList(params=[]) main_func.body.block_items =[] calling_list = [] if new_client_node.decl.type.args is not None: for decl in new_client_node.decl.type.args.params: main_func.body.block_items.append(copy.deepcopy(decl)) if not decl.name is None: main_func.body.block_items.append(make_klee_symbolic(decl.name, decl.name)) calling_list.append(c_ast.ID(name=decl.name)) old_client_invo = c_ast.FuncCall(name=c_ast.ID(old_client_node.decl.name), args=c_ast.ExprList(exprs=calling_list)) new_client_invo = c_ast.FuncCall(name=c_ast.ID(new_client_node.decl.name), args=c_ast.ExprList(exprs=calling_list)) format_string = "" ID_list= [] for arg in calling_list: arg_name = arg.name format_string += "CEX {name} : %d,".format(name=arg_name) ID_list.append(c_ast.FuncCall(name=c_ast.ID(name="klee_get_valued"), args=c_ast.ExprList(exprs=[c_ast.ID(name=arg_name)]))) format_string = format_string.rstrip(",") main_func.body.block_items.append(c_ast.FuncCall(name=c_ast.ID(name= "klee_assume"), args=c_ast.BinaryOp(op="!=", left=old_client_invo, right=new_client_invo))) main_func.body.block_items.append(c_ast.FuncCall(name=c_ast.ID(name="printf"), args=c_ast.ExprList(exprs=([c_ast.Constant(type='string', value="\" CLEVER NEQ \\n" + "\"")])))) main_func.body.block_items.append(c_ast.FuncCall(name=c_ast.ID(name="printf"), args=c_ast.ExprList(exprs=([c_ast.Constant(type='string', value="\"" + format_string +"\\n" +"\"")] + ID_list)))) main_func.body.block_items.append(c_ast.FuncCall(name=c_ast.ID(name="abort"), args=c_ast.ExprList(exprs=[]))) harness_File = old_ast harness_File.ext = (harness_File.ext + [new_lib_node ,new_client_node, main_func]) generator = c_generator.CGenerator() output_string = Klee_Header_String + generator.visit(harness_File) output_file_name = "klee_unmerged.c" with open(output_file_name, "w") as outfile: outfile.write(output_string) #print(output_string) return harness_File, output_file_name
api_version = api_version * 100 + int(val) if api_version >= 10000: break globals()['RS_API_VERSION'] = api_version def _get_enumlist(obj): for _, cc in obj.children(): if type(cc) is pycparser.c_ast.EnumeratorList: return cc else: return _get_enumlist(cc) # Dynamically generate classes ast = pycparser.parse_file(rs_h_filename, use_cpp=True) for c in ast.ext: if c.name in [ 'rs_capabilities', 'rs_stream', 'rs_format', 'rs_distortion', 'rs_ivcam_preset', 'rs_option' ]: e = _get_enumlist(c) class_name = c.name class_dict = {} for i, (_, child) in enumerate(e.children()): class_dict[child.name] = i name_for_value = {} for key, val in class_dict.items(): name_for_value[val] = key
def run(pop_size, ngen, cxpb, mutpb, enable_plot=False): """ Run the parameterized experiment. :param pop_size: The size of the population. :param ngen: The number of generations. :param cxpb: The probability of a cross-over event. :param mutpb: The probability of a mutation event. :param enable_plot: A flag to enable live plotting of results. """ window_cls = mglw.get_local_window_cls() window = window_cls(title="Genetic Programming for Shader Optimization", gl_version=(4, 1), vsync=False) mglw.activate_context(ctx=window.ctx) if pop_size % 4 != 0: print("Population size must be multiple of 4") return # Parse shader ast = parse_file("resources/programs/fresnel.glsl", use_cpp=False) name, params, tree = shader.parse(ast.ext[0])[0] pset = sgp.generate_pset(name, params, tree) # Setup GP creator = sgp.setup_creator() toolbox = sgp.setup_toolbox(creator, pset, tree) sgp.setup_operators(toolbox, pset) # Setup evaluator evaluator = Evaluator(window, enable_plot=enable_plot) baseline = evaluator.determine_baseline() toolbox.register("evaluate", evaluator.eval, genesis=tree, baseline=baseline) pop = toolbox.population(n=pop_size) hof = tools.ParetoFront() # Child of tools.HallOfFame stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", np.mean, axis=0) stats.register("std", np.std, axis=0) stats.register("min", np.min, axis=0) stats.register("max", np.max, axis=0) pop, log = sgp.algorithm(pop, toolbox, cxpb=cxpb, mutpb=mutpb, ngen=ngen, stats=stats, halloffame=hof, verbose=True) print(f'Hall of Fame ({len(hof)} individuals):') for individual in hof: frame_time, error = individual.fitness.values print('Fitness:') print(f'Avg Frame Time [ns]: {frame_time}') print(f'Avg Frame Error: {error}') print('Variant:') diff = shader.diff(name, params, tree, individual) sys.stdout.writelines(diff) print('') # Ensure that we write a newline
#!/usr/bin/python import sys, time from pycparser import c_parser, c_ast, parse_file from generate_traces import extract_globals, scan_statement, possible_error_traces from expand_trace import expand_trace, expanded_trace from solve_trace import solve_trace parser = c_parser.CParser() ast = parse_file('simple-flow.c', use_cpp=True) global_vars = [] for obj in ast.ext: if isinstance(obj, c_ast.Decl): global_decl = extract_globals(obj) global_vars.append(global_decl) elif isinstance(obj, c_ast.FuncDef): #print "\n\nscanning function '%s'" % obj.decl.name for stmt in obj.body.stmts: scan_statement(stmt, [obj] + [stmt]) print print len(possible_error_traces), "error trace(s)" for trace in possible_error_traces: #for t in trace: print t print expand_trace(trace) print #print expanded_trace print
def train_model(infile, embeddings, epochs=EPOCHS): os.environ["CUDA_VISIBLE_DEVICES"] = "1" num_feats = len(getWordEmd('Decl')) nodes_node1, children_node1, nodes_node2, children_node2, res = network.init_net_nofinetune( num_feats) labels_node, loss_node = network.loss_layer(res) optimizer = tf.train.GradientDescentOptimizer(LEARN_RATE) train_step = optimizer.minimize(loss_node) config = tf.ConfigProto() config.gpu_options.allow_growth = True sess = tf.Session( config=config) #config=tf.ConfigProto(device_count={'GPU':0})) sess.run(tf.global_variables_initializer()) dictt = {} listrec = [] f = open("flistPOJ.txt", 'r') line = f.readline().rstrip('\t') l = line.split('\t') print(len(l)) for ll in l: if not os.path.exists(ll): listrec.append(ll) continue tree = pycparser.parse_file(ll) sample, size = _traverse_tree_withid(tree) dictt[ll] = sample f.close() for epoch in range(1, epochs + 1): f = open(infile, 'r') line = "123" k = 0 aaa = 0 while line: line = f.readline().rstrip('\n') l = line.split('\t') if len(l) != 3: break k += 1 if (l[0] in listrec) or (l[1] in listrec): continue batch_labels = [] nodes1, children1, nodes2, children2, la = getData_nofinetune( l, dictt, embeddings) batch_labels.append(la) _, err, r = sess.run( [train_step, loss_node, res], feed_dict={ nodes_node1: nodes1, children_node1: children1, nodes_node2: nodes2, children_node2: children2, labels_node: batch_labels }) maxnodes = max(len(nodes1[0]), len(nodes2[0])) if aaa % 1000 == 0: print('Epoch:', epoch, 'Step:', aaa, 'Loss:', err, 'R:', r, 'Max nodes:', maxnodes) aaa += 1 f.close() correct_labels_dev = [] predictions_dev = [] for reci in range(0, 15): predictions_dev.append([]) ff = open("./datasetForVariantsTBCCD/POJ/devdata.txt", 'r') line = "123" k = 0 while line: line = ff.readline().rstrip('\n') l = line.split('\t') if len(l) != 3: break if (l[0] in listrec) or (l[1] in listrec): continue batch_labels = [] nodes1, children1, nodes2, children2, la = getData_nofinetune( l, dictt, embeddings) batch_labels.append(la) k += 1 output = sess.run( [res], feed_dict={ nodes_node1: nodes1, children_node1: children1, nodes_node2: nodes2, children_node2: children2, }) correct_labels_dev.append(int(batch_labels[0])) threaholder = -0.7 for i in range(0, 15): if output[0] >= threaholder: predictions_dev[i].append(1) else: predictions_dev[i].append(-1) threaholder += 0.1 maxstep = 0 maxf1value = 0 for i in range(0, 15): f1score = f1_score(correct_labels_dev, predictions_dev[i], average='binary') if f1score > maxf1value: maxf1value = f1score maxstep = i ff.close() correct_labels_test = [] predictions_test = [] ff = open("./datasetForVariantsTBCCD/POJ/testdata.txt", 'r') line = "123" k = 0 print("starttest:") while line: line = ff.readline().rstrip('\n') l = line.split('\t') if len(l) != 3: break k += 1 if (l[0] in listrec) or (l[1] in listrec): continue batch_labels = [] nodes1, children1, nodes2, children2, la = getData_nofinetune( l, dictt, embeddings) batch_labels.append(la) output = sess.run( [res], feed_dict={ nodes_node1: nodes1, children_node1: children1, nodes_node2: nodes2, children_node2: children2, }) k += 1 correct_labels_test.append(int(batch_labels[0])) threaholderr = -0.7 + maxstep * 0.1 if output[0] >= threaholderr: predictions_test.append(1) else: predictions_test.append(-1) ff.close() print("testdata\n") print("threaholder:") print(threaholderr) p = precision_score(correct_labels_test, predictions_test, average='binary') r = recall_score(correct_labels_test, predictions_test, average='binary') f1score = f1_score(correct_labels_test, predictions_test, average='binary') print("recall_test:" + str(r)) print("precision_test:" + str(p)) print("f1score_test:" + str(f1score))
def show_func_defs(args): # Note that cpp is used. Provide a path to your own cpp or # make sure one exists in PATH. pycparser_path = None # Try to find a fake_libc # In current directory? if verbose: eprint("Called in {0}".format( os.path.abspath(os.path.dirname(sys.argv[0])))) eprint("Looking for fake_lib in current directory...") if os.path.isdir('pycparser'): pycparser_path = r'./pycparser' else: this_script = os.path.abspath(__file__) if verbose: eprint( "Looking for fake_lib in directory of script ({0})...".format( this_script)) # Look in the directory of this script while os.path.islink(this_script): # If the script is a symlink, resolve it first, recursively... # Note: can only handle absolute symlinks? this_script = os.readlink(this_script) if verbose: eprint("Script was a symlink, resolving it to '{0}'...".format( this_script)) if os.path.isdir( os.path.join(os.path.dirname(this_script), 'pycparser')): # Yes, there is a pycparser symlink here pycparser_path = os.path.join(os.path.dirname(this_script), 'pycparser') if pycparser_path: pycparser_lib = reduce(os.path.join, [pycparser_path, 'utils', 'fake_libc_include']) if verbose: print( "/* Generated with cgreen-mocker and pycparser's fake_libc from %s */" % (pycparser_path)) elif verbose: eprint("Not found") try: options = ['-I' + pycparser_lib] if pycparser_path else [] if add_gnuisms: # And add some common GNUisms options = options + [ r'-D__gnuc_va_list(c)=', r'-D__attribute__(x)=', r'-D__extension__=', r'-D__restrict=', r'-D__inline=' ] if verbose: eprint("Parsing with options = {0}".format(options)) cpp_args = list(filter(None, options)) ast = parse_file(args[-1], use_cpp=True, cpp_args=cpp_args + args[0:-1]) except ParseError as e: print("ERROR: {} - C99 parse error".format(e)) return print('/* -*- c -*-*/') # Suggest c-mode for Emacs print('#include "%s"' % args[len(args) - 1]) print('#include <cgreen/mocks.h>') print() v = FuncDefVisitor(args[-1]) v.visit(ast)
cpp_args.append("-DCXPORT(x)=cxport(__ ## x)") cpp_args.append("-DTASKLEVEL(x)=tasklevel(__ ## x)") cpp_args.append("-DPLENGTH=plength") cpp_args.append("-DLIBCALL=libcall") # ensure on x86 processing of __attribute__ whatever is not problematic cpp_args.append("-D__attribute__(...)=") cpp_args.append("-D__extension__=") cpp_args.append("-D__restrict=") cpp_args.append("-D__inline=") cpp_args.append("-D__asm__(...)=") cpp_args.append("-D__builtin_va_list=int") cpp_args.append("-D__builtin_offsetof(x,y)=0") #cpp_args.append("-Dsizeof(x)=0") # parse every files provided ast = parse_file(filename, use_cpp=True, cpp_args=cpp_args) v = FuncVisitor() v.visit(ast) """ generate a .h for syscall function declarations generate a .c for syscall stubs (to be compiled in the userland) generate a .c containing the syscalls dispatcher toward syscalled os functions (kernelmode) """ defs_file = open(args.defs, 'w+') stub_file = open(args.stub,'w+') dispatcher_file = open(args.disp,'w+') print (".h defs: " + args.defs) print (".c stub: " + args.stub)
# print(fin.read()) # with open("C:/Users/fu3uk/Desktop/a/main_pp.c", 'r') as fin: # print(fin.read()) if __name__ == "__main__": '''print(len(sys.argv)) for i in range(0, len(sys.argv)): print(str(i) + " " + sys.argv[i]) print("")''' if len(sys.argv) < 3: print("Too few arguments - need >= 2") quit() try: ast1 = parse_file(sys.argv[1]) ast2 = parse_file(sys.argv[2]) except: print("Failed to parse file") quit() ast1_root = Node(";") ast2_root = Node(";") for i in range(0, len(ast1.ext)): if (not "Typedef" in str(ast1.ext[i])) and (not "Pragma" in str( ast1.ext[i])): compound = ast1.ext[i].body.block_items if not "None" in str(compound): astmscp.ast_compound(compound, ast1_root, ast1_root)
def split(self, rom_bytes, base_path): md = Cs(CS_ARCH_MIPS, CS_MODE_MIPS64 + CS_MODE_BIG_ENDIAN) md.detail = True md.skipdata = True for split_file in self.files: if split_file["subtype"] in ["asm", "hasm", "c"]: if self.type not in self.options[ "modes"] and "all" not in self.options["modes"]: continue out_dir = self.create_split_dir(base_path, "asm") rom_addr = split_file["start"] insns = [] for insn in md.disasm( rom_bytes[split_file["start"]:split_file["end"]], split_file["vram"]): insns.append(insn) funcs = self.process_insns(insns, rom_addr) funcs = self.determine_symbols(funcs) funcs_text = self.add_labels(funcs) # funcs_text = self.rename_duplicates(funcs_text) # TODO need a better solution if split_file["subtype"] == "c": print("Splitting " + split_file["name"]) defined_funcs = set() class FuncDefVisitor(c_ast.NodeVisitor): def visit_FuncDef(self, node): defined_funcs.add(node.decl.name) v = FuncDefVisitor() old_dir = os.getcwd() os.chdir(base_path) c_path = os.path.join(base_path, "src", split_file["name"] + ".c") cpp_args = self.get_pycparser_args() ast = parse_file(c_path, use_cpp=True, cpp_args=cpp_args) os.chdir(old_dir) v.visit(ast) out_dir = self.create_split_dir( base_path, os.path.join("asm", "nonmatchings")) for func in funcs_text: func_name = self.get_func_name(func) if func_name not in defined_funcs: # TODO make more graceful if "compiler" in self.options and self.options[ "compiler"] == "GCC": out_lines = self.get_gcc_inc_header() else: out_lines = [] out_lines.extend(funcs_text[func]) out_lines.append("") outpath = Path( os.path.join(out_dir, split_file["name"], func_name + ".s")) outpath.parent.mkdir(parents=True, exist_ok=True) with open(outpath, "w", newline="\n") as f: f.write("\n".join(out_lines)) else: out_lines = self.get_header() for func in funcs_text: out_lines.extend(funcs_text[func]) out_lines.append("") outpath = Path( os.path.join(out_dir, split_file["name"] + ".s")) outpath.parent.mkdir(parents=True, exist_ok=True) with open(outpath, "w", newline="\n") as f: f.write("\n".join(out_lines)) elif split_file["subtype"] == "bin" and ( "bin" in self.options["modes"] or "all" in self.options["modes"]): out_dir = self.create_split_dir(base_path, "bin") with open(os.path.join(out_dir, split_file["name"] + ".bin"), "wb") as f: f.write(rom_bytes[split_file["start"]:split_file["end"]])
try: the_dict[func.name.name].append(funcname) except Exception as e: the_dict[func.name.name] = [funcname] # raise e # print('funcDefs:',func.name.name,func.name.coord) if __name__ == '__main__': filename = "./codes/notes.c" defList = [] the_dict = {} invoke_dict = {} ast = parse_file(filename, use_cpp=True) extract_funcDef(ast, defList) # print(len(defList)) show_deflist(defList) nameList = [item.decl.name for item in defList] for name in nameList: show_func_defs(ast, name, the_dict, invoke_dict) # parser(filename) print('====Ref_dict====') for k, v in the_dict.items(): print('{}:{}'.format(k, v)) print('====Invoke_dict====') for k, v in invoke_dict.items(): print('{}:{}'.format(k, v))
def translate_to_c(filename): ast = parse_file(filename, use_cpp=True) ast.show() generator = c_generator.CGenerator() print(generator.visit(ast))
#----------------------------------------------------------------- # pycparser: using_cpp_libc.py # # Shows how to use the provided 'cpp' (on Windows, substitute for # the 'real' cpp if you're on Linux/Unix) and "fake" libc includes # to parse a file that includes standard C headers. # # Eli Bendersky [http://eli.thegreenplace.net] # License: BSD #----------------------------------------------------------------- import sys # This is not required if you've installed pycparser into # your site-packages/ with setup.py # sys.path.extend(['.', '..']) from pycparser import parse_file if __name__ == "__main__": if len(sys.argv) > 1: filename = sys.argv[1] else: filename = 'examples/c_files/year.c' ast = parse_file(filename, use_cpp=True, cpp_path='cpp', cpp_args=r'-Iutils/fake_libc_include') ast.show()
import sys import os sys.path.extend(['.', '..']) from pycparser import parse_file from minic.c_ast_to_minic import transform from transform_func import * from func_utils import function_wrapper if __name__ == "__main__": directory_path = "./inputs/checkin5_inputs" directory = os.fsencode(directory_path) for file in os.listdir(directory): filename = os.fsdecode(file) if not filename.endswith("_out.c"): output_c = function_wrapper(os.path.join(directory_path, filename)) mast = transform(parse_file(output_c)) print("File: {} \nInput:".format(filename)) with open(os.path.join(directory_path, filename), 'r') as fin: print(fin.read()) ftranslator = FunctionalTranslator(mast, False) print("No Simplification Output:\n{}\n".format(ftranslator)) ftranslator = FunctionalTranslator(mast, True) print("With Simplification Output:\n{}\n----------".format( ftranslator))
def functionDeclarations(includeDir, includeFile): decls = {} if includeFile.endswith('in6.h'): return (decls, None) if 'ps4/internal/' in includeFile: return (decls, None) args = \ [ r'-E', r'-w', r'-I' + includeDir, r'-nostdinc', r'-std=c11', r'-D_POSIX_C_SOURCE=200809', r'-D__ISO_C_VISIBLE=1999', r'-D__POSIX_VISIBLE=200809', r'-D__XSI_VISIBLE=700', r'-D__BSD_VISIBLE=1', r'-D_DEFAULT_SOURCE=1', r'-D__ISO_C_VISIBLE=1999', # From here on manual fixes for parsing various include files ... r'-Dlint', r'-D__builtin_va_list=int', r'-D__attribute__(...)=', r'-D__extension__(...)=', r'-D__format__(...)=', r'-D__typeof__(...)=', r'-D__asm__(...)=', r'-D__inline=', r'-D__volatile=', r'-D__asm(...)=', r'-Dvolatile(...)=', r'-Duintfptr_t=int', r'-Dintrmask_t=int', r'-Dsa_family_t=int', r'-D_SA_FAMILY_T_DECLARED', r'-DLIST_HEAD(...)=', r'-DLIST_ENTRY(...)=int', r'-DTAILQ_HEAD(...)=', r'-DTAILQ_ENTRY(...)=int', r'-DSLIST_HEAD(...)=', r'-DSLIST_ENTRY(...)=int', # Very specific fixes for missing includes or defines in some files ... #r'-includestdlib.h', #r'-includestdio.h', r'-includesys/cdefs.h', r'-includemachine/_types.h', r'-includesys/types.h', r'-includesys/_stdint.h', r'-includesys/elf.h', r'-D__ELF_WORD_SIZE=64', ] try: ast = parse_file(includeFile, use_cpp=True, cpp_path='gcc', cpp_args=args) v = LibPS4FunctionDeclarationsVisitor(includeDir, decls) v.visit(ast) #ast.show() except Exception as e: print('--->>> ' + includeFile) print(e) p = 'gcc ' + includeFile + ' ' + ' '.join(args).replace( ' -std=c11', ' "-std=c11').replace(' -i', '" "-i').replace( ' -D', '" -D"') + '"' print(p) print(os.popen(p).read()) print('<<<--- ' + includeFile) (decls, e) return (decls, None)
else: nn6, ne6 = funcComplexity(dictionary[str(item.name.name)]) nn, ne = (1 + nn6 + nn), (2 + ne6 + ne) # 接收节点Return 边记为1 节点不记录 elif str(type(item)) == "<class 'pycparser.c_ast.Return'>": nn7, ne7 = 1, 0 nn, ne = (nn7 + nn), (ne7 + ne) return (nn, ne) def funcComplexity(child): # 输入为函数体 if (child is None): return (0, 0) nn = 0 # 节点数 ne = 0 # 边数 try: for item in child.block_items: nn, ne = parser_one(item, nn, ne) except: nn, ne = parser_one(child, nn, ne) return (nn, ne) if __name__ == "__main__": ast = pycparser.parse_file("testp.c", use_cpp=True) no_of_vertices, no_of_edges = findComplexity(ast) print("Cyclomatic Complexity:", no_of_edges - no_of_vertices + 2)
def generate_imgui(source_file, output_file): # Note that cpp is used. Provide a path to your own cpp or # make sure one exists in PATH. ast = parse_file(source_file) v = FuncCollector('ig') v.visit(ast) v = FuncCollector('ImGuiIO_', False, 'ImGuiIO') v.visit(ast) v = FuncCollector('ImDrawList_', False, 'ImDrawList') v.visit(ast) v = FuncCollector('ImFont_', False, 'ImDrawList') v.visit(ast) v = FuncCollector('ImFontAtlas_', False, 'ImFontAtlas') v.visit(ast) v = FuncCollector('ImGuiPayload_', False, 'ImGuiPayload') v.visit(ast) v = FuncCollector('ImGuiListClipper_', False, 'ImGuiListClipper') v.visit(ast) v = FuncCollector('ImGuiTextFilter_', False, 'ImGuiTextFilter') v.visit(ast) v = FuncCollector('ImGuiTextBuffer_', False, 'ImGuiTextBuffer') v.visit(ast) generator = c_generator.CGenerator() with open(output_file, 'wt') as file: file.write('typedef struct rizz_api_imgui\n{\n') for f in func_nodes: if ('comment' in f): file.write(' // ' + f['comment'] + '\n') node = f['node'] src_name = f['src_name'] line = generator.visit(node) name_idx = line.find(src_name) first_paran = line.find('(') # fix pointer star position first_star = line.find('*') if (first_star > 0 and line[first_star-1] == ' ' and first_star < first_paran): line = line[:first_star-1] + line[first_star:] if (first_star > 0 and (first_star+1) == name_idx and first_star < first_paran): line = line[:first_star] + ' ' + line[first_star:] # first_space = -1 space_idx = line.find(' ') while space_idx != -1: if (space_idx == name_idx-1): first_space = space_idx break space_idx = line.find(' ', space_idx+1) if (first_space != -1 and first_paran != -1): final_line = ' %-14s %s%s)%s;\n' % (line[:first_space+1], '(*', node.name, line[first_paran:]) file.write(final_line) file.write('} rizz_api_imgui;\n') file.write('\nrizz_api_imgui the__imgui = {\n') for i, f in enumerate(func_nodes): node = f['node'] src_name = f['src_name'] line = ' .%s = %s' % (node.name, src_name) if i < len(func_nodes)-1: line = line + ',\n' else: line = line + '\n' file.write(line) file.write('};\n') file.close()
count = 0 procount += 1 print '!!!!!!!!!!!!!!!!!! procount = ', procount #if procount >3: # break for onefile in os.listdir(datadir + subdir): #print 'oneoneoneoneone!!!!!!!!!! ' filename = onefile onefile = datadir + subdir + onefile #print savefile #onefile = 'test.c' try: #print '-----------------', onefile, '-----------------' ast = parse_file(onefile, use_cpp=True) except: print 'ooooops, parsing error for', onefile continue nodes = [] leafs = [] #print 'constructing the network' ConstructNodes(ast, 'root', None, None, nodes, leafs) #print len(nodes) #print nodes[0].word nodes.extend(leafs) #print len(nodes) AdjustOrder(nodes) layers = InitByNodes(nodes) #fdump = open(targetdir + subdir + filename, 'w')
def file_to_json(filename, **kwargs): """ Load C file into json string representation of ast """ ast = parse_file(filename, use_cpp=True,cpp_args=fake_libc_arg) return to_json(ast, **kwargs)
class LHSPrinter(NodeVisitor): def __init__(self): self.written_set = set() self.var_set = set() def visit_Assignment(self, assignment): if isinstance(assignment.lvalue, ID): self.written_set.add(assignment.lvalue.name) self.var_set.add(assignment.lvalue.name) if isinstance(assignment.rvalue, BinaryOp): bov = BinaryOpVistor() bov.visit(assignment.rvalue) self.var_set.update(bov.var_set) if isinstance(assignment.rvalue, ArrayRef): self.var_set.add(assignment.rvalue.name.name) if __name__ == "__main__": for i in range(1, 4): function_wrapper('./p3_input{}.txt'.format(i)) mast = transform(parse_file('./p3_input{}_out.c'.format(i))) lhsp = LHSPrinter() print("p3_input{}.txt: ".format(i)) lhsp.visit(mast) print("Written Variables: {} ".format(', '.join(lhsp.written_set))) print("All Variables: {} ".format(', '.join(lhsp.var_set)))
def parse_file_func(): ast = parse_file(filename = 'fly_by_wire')#'log.c') ast.show()
def show_func_calls(filename, funcname): ast = parse_file(filename, use_cpp=True) v = FuncCallVisitor(funcname) v.visit(ast)
def __init__(self, filename): self.ast = pycparser.parse_file(filename, use_cpp=True) #self.ast.show() self.collect_declarations()
import sys #################################################################################################### from pycparser import parse_file, c_ast, c_generator #################################################################################################### class NodeVisitor(c_ast.NodeVisitor): ############################################## def visit_FuncDecl(self, node): print('\nFunction Declaration at %s' % (node.coord)) node.show() #################################################################################################### if __name__ == "__main__": filename = sys.argv[1] ast = parse_file(filename, use_cpp=False) # visitor = NodeVisitor() # visitor.visit(ast) generator = c_generator.CGenerator() print(generator.visit(ast))
# pycparser: using_gcc_E_libc.py # # Similar to the using_cpp_libc.py example, but uses 'gcc -E' instead # of 'cpp'. The same can be achieved with Clang instead of gcc. If you have # Clang installed, simply replace 'gcc' with 'clang' here. # # Copyright (C) 2008-2014, Eli Bendersky # License: BSD #------------------------------------------------------------------------------- import sys # This is not required if you've installed pycparser into # your site-packages/ with setup.py # sys.path.extend(['.', '..']) from pycparser import parse_file if __name__ == "__main__": if len(sys.argv) > 1: filename = sys.argv[1] else: filename = 'c_files/year.c' ast = parse_file(filename, use_cpp=True, cpp_path='gcc', cpp_args=['-E', r'-I../utils/fake_libc_include']) ast.show()
else: false_expr = z3.And(z3.Not(cond_exp), g.as_expr()) g = z3.Goal() g.add(z3.Or(true_expr, false_expr)) g = t(g) # g.simplify() else: return prev_g # print(g.as_expr(), "\n") return g if __name__ == "__main__": c_fname = sys.argv[1] ast = pycp.parse_file(c_fname, use_cpp=True, cpp_path='gcc', cpp_args=['-E', r'-Iutils/fake_libc_include']) # ast.show() vars = {} main_func = None for e in ast.ext: if isinstance(e, pycp.c_ast.FuncDef) and e.decl.name == "main": main_func = e break if main_func is None: raise("no main function")