def parseModule(self, module_name, file_name): importing = False if not self.parse_cache.has_key(file_name): importing = True if self.chain_plat: mod, _ov = self.chain_plat.parseModule(module_name, file_name) else: mod = compiler.parseFile(file_name) self.parse_cache[file_name] = mod else: mod = self.parse_cache[file_name] override = False platform_file_name = self.generatePlatformFilename(file_name) print "platform", platform_file_name if self.platform and os.path.isfile(platform_file_name): mod = copy.deepcopy(mod) mod_override = compiler.parseFile(platform_file_name) if self.verbose: print "Merging", module_name, self.platform merge(module_name, mod, mod_override) override = True if self.verbose: if override: print "Importing %s (Platform %s)" % (module_name, self.platform) elif importing: print "Importing %s" % (module_name) if override: return mod, platform_file_name return mod, file_name
def validate_syntax(directory_name): for file in directory_name: if '.py' in file and '.pyc' not in file: try: compiler.parseFile(file) except Exception as e: logger.error('An error occured while validating build syntax in file: {}, error: {}'.format(file, e)) sys.exit(1)
def parseModule(self, module_name, file_name): if self.parse_cache.has_key(file_name): mod = self.parse_cache[file_name] else: print "Importing " + module_name mod = compiler.parseFile(file_name) self.parse_cache[file_name] = mod platform_file_name = self.generatePlatformFilename(file_name) if self.platform and os.path.isfile(platform_file_name): mod = copy.deepcopy(mod) mod_override = compiler.parseFile(platform_file_name) self.merge(mod, mod_override) return mod
def main(): measure = [] program = open("code.txt").read() program = program.replace("\n", "\\n") program = program.replace("\t", "\\t") for i in range(1000): start = time.time() compiler.parseFile("code_py.txt") end = time.time() measure.append(end - start) sqyt = float(sum(measure) / len(measure)) print "py time >>", sqyt
def measure_file_complexity(filename): """Returns a FlatStats object for the contents of the file at filename.""" modulename = utils.splitpath_root_file_ext(filename)[1] ast = compiler.parseFile(filename) ast.name = modulename visitor = CCVisitor(ast) return FlatStats(visitor.stats)
def _get_all_imported_modulenames(self, filename): """Compiles an AST for filename and returns the module names for all modules imported by. If no file for filename exists, or it is an unparseable file (pyd, pyc), return an empty list. If the file cannot be parsed, append to self.failed and return an empty list. """ #We can only read py files right now if filename.endswith('.pyd'): return [] if filename.endswith('.pyc') or filename.endswith('.pyo'): filename = filename[:-1] elif not os.path.splitext(filename)[1]: #Has no ext whatsoever filename += '.py' if not os.path.exists(filename): return [] try: astnode = compiler.parseFile(filename) except SyntaxError: self.failed.append(self._extless(filename)) return [] allnodes = utils.flatten(astnode, lambda node: node.getChildNodes()) names = itertools.imap(self._extract_modulename, allnodes) names = itertools.ifilter(None, names) return names
def load_file(klass, modname, path, modpath, level=0): path = os.path.normpath(path) if path in klass.PATH2MODULE: module = klass.PATH2MODULE[path] else: if klass.verbose: print >> sys.stderr, ' ' * level + 'loading: %r as %r' % ( path, modname) module = PythonModuleObject(modname, klass.DEFAULT_NAMESPACE, path, modpath, level=level) klass.PATH2MODULE[path] = module try: fp = file(path) for _ in fp: klass.lines += 1 fp.close() klass.files += 1 tree = compiler.parseFile(path) except IOError: raise ModuleNotFound(modname, path) def rec(n): n._module = module for c in n.getChildNodes(): rec(c) return rec(tree) module.set(tree) return module
def get_tests_from_fs(parent_dir, control_pattern, add_noncompliant=False): """Find control jobs in location and create one big job Returns: dictionary of the form: tests[file_path] = parsed_object """ tests = {} profilers = False if 'client/profilers' in parent_dir: profilers = True for dir in [ parent_dir ]: files = recursive_walk(dir, control_pattern) for file in files: if '__init__.py' in file or '.svn' in file: continue if not profilers: if not add_noncompliant: try: found_test = control_data.parse_control(file, raise_warnings=True) tests[file] = found_test except control_data.ControlVariableException, e: print "Skipping %s\n%s" % (file, e) pass else: found_test = control_data.parse_control(file) tests[file] = found_test else: script = file.rstrip(".py") tests[file] = compiler.parseFile(file).doc
def splitdocfor(path): """split the docstring for a path valid paths are:: ./path/to/module.py ./path/to/module.py:SomeClass.method returns (description, long_description) from the docstring for path or (None, None) if there isn't a docstring. Example:: >>> splitdocfor("./wsgi_intercept/__init__.py")[0] 'installs a WSGI application in place of a real URI for testing.' >>> splitdocfor("./wsgi_intercept/__init__.py:WSGI_HTTPConnection.get_app")[0] 'Return the app object for the given (host, port).' >>> """ if ":" in path: filename, objpath = path.split(':') else: filename, objpath = path, None inspector = DocInspector(filename) visitor.walk(compiler.parseFile(filename), inspector) if objpath is None: if inspector.top_level_doc is None: return None, None return pydoc.splitdoc(inspector.top_level_doc) else: if inspector[objpath] is None: return None, None return pydoc.splitdoc(inspector[objpath])
def routes_from_module(module_name, prepath=''): """ Parse a module that contains werkzeug rules and handlers. This will both import the module (so that symbols can be resolved) and parses the file itself (since I do not know how I can extract decorator arguments out of a compiled code object) :param module_name: the module name separated by dots :type module_name: ``str`` :param prepath: the prepath to use :return: the routes contained in the module :rtype: ``list`` (see :class:`RouteFindingASTVisitor`) """ module = import_module(module_name) # this seems fragile filename = re.sub('\.pyc$', '.py', module.__file__) tree = parseFile(filename) routes = [] route_visitor = RouteFindingASTVisitor(routes, vars(module), prepath) walk(tree, route_visitor, walker=route_visitor) return routes
def __init__(self, filename, locals=None, errhandler=None): self.filename = filename # If the open is going to fail, we want it to happen as early # as possible. self.fileobj = open(self.filename) _allLoaders[filename] = self code.InteractiveInterpreter.__init__(self, locals) # self.errhandler is set this way, and not via a default # argument, because excepthook.displayTraceBack may change # between class definition and instantiation. self.errhandler = errhandler or excepthook.displayTraceBack # Parse the file and find the python blocks. We need to find # the blocks first, so that we don't accidentally execute an # "if" and strand its "else". try: modj = compiler.parseFile(self.filename) except SyntaxError: self.showsyntaxerror(self.filename) raise # modj is an AST.Module, modj.node is an AST.Stmt blocknodes = modj.node.nodes # generic list of AST.Nodes # self.blocks is a list of line numbers where blocks start self.blocks = [_findLineNo(node) for node in blocknodes] self.error = None
def load_file(klass, modname, path, modpath, level=0): path = os.path.normpath(path) if path in klass.PATH2MODULE: module = klass.PATH2MODULE[path] else: if klass.verbose: print >>sys.stderr, " " * level + "loading: %r as %r" % (path, modname) module = PythonModuleObject(modname, klass.DEFAULT_NAMESPACE, path, modpath, level=level) klass.PATH2MODULE[path] = module try: fp = file(path) for _ in fp: klass.lines += 1 fp.close() klass.files += 1 tree = compiler.parseFile(path) except IOError: raise ModuleNotFound(modname, path) def rec(n): n._module = module for c in n.getChildNodes(): rec(c) return rec(tree) module.set(tree) return module
def from_filenames(cls, filenames, logger=None): v = cls(logger) v.module_names = {} # First find module full names for all files for filename in filenames: mod_name = get_module_name(filename) short_name = mod_name.rsplit('.', 1)[-1] v.module_names[short_name] = mod_name # Process the set of files, TWICE: so that forward references are # picked up for filename in filenames + filenames: ast = compiler.parseFile(filename) module_name = get_module_name(filename) v.module_name = module_name s = compiler.symbols.SymbolVisitor() compiler.walk(ast, s) v.scopes = s.scopes compiler.walk(ast, v) v.contract_nonexistents() v.expand_unknowns() v.cull_inherited() return v
def driver(self): #Product flatten P0 self.requestUniqueTmpName() self.flattenAST(self.ast) f = open("flat.py", "w") for i in range(0, len(self.flattenStatements)): f.write(str(self.flattenStatements[i]) + "\n") f.close() #Parse flat.py #Uncomment line below when using the standard parser again ast = compiler.parseFile("flat.py") #ast = parser.driver("flat.py") #print ast self.compile(ast) #Output asm file fileOutName = str(sys.argv[1])[0:-3] + ".s" f = open(fileOutName, "w") #Write out setup and tear down f.write(".globl main\nmain:\n pushl %ebp\n movl %esp, %ebp\n") for i in range(0, len(self.x86Statements)): f.write(" " + self.x86Statements[i] + "\n") f.write(" movl $0, %eax\n leave\n ret\n") f.close() #Begin live analysis #open asm file asmFile = open(fileOutName, "r") x86Statement = asmFile.readlines() # :[x86 statements] asmFile.close() #Lex each line and turn it into sublists of tokens l = [] for i in range(0, len(x86Statement)): regexStr = re.sub(',', '', x86Statement[i]) l.append(regexStr.split()) #Create live object liveObj = Live(l) listOfLiveSet = liveObj.driver() #Reverse list of live sets so passing nodes into graph from top to bottom of asm file listOfLiveset = listOfLiveSet.reverse() #print listOfLiveSet #Begin graph / color analysis #for each item in listOfLiveSet: for x in listOfLiveSet: addNode(x) #convert dictOfNodes from strings to nodes convertStingsToNodes(dictOfNodes) #Color the nodes -- add registers to each node newStackSize = colorNodes(dictOfNodes) #Need to *4 #Begin transformation self.transform(newStackSize, l) #for key in dictOfNodes.keys(): # print "Key: "+str(key.name)+" Reg: "+str(key.register) #Clean up remove("flat.py")
def check_file(self, filename): self.current_filename = filename try: ast = compiler.parseFile(filename) except SyntaxError, error: print "SyntaxError on file %s:%d" % (filename, error.lineno) return
def scan_file(options, filename, module_name): """ return module document object for the given file """ f = open(filename, 'rt') text = f.read() f.close() # buffer with dummy line to make line count accurate, code_lines = [ '(dummy)' ] + text.split('\n') # try parsing file att = 0 while att < 2: att += 1 try: # warnings.filterwarnings('ignore') ast = compiler.parseFile(filename) # warnings.resetwarnings() break except SyntaxError, exc: # for some reason, some modules need a CR added... if att < 2: text = text + '\n' else: sys.stderr.write("Syntax error %s:%d:%d: %s.\n" % (filename, exc.lineno, exc.offset, exc.text)) return None
def function_and_class_names_in_file(file_name): """ Find the names of classes contained within a file. fix me: This currently only finds top level classes. Nested classes are ignored. ?? Does this really matter much ?? Example:: # foo.py class Foo: pass class Bar: pass >>> import traits.util.refresh >>> refresh.function_and_class_names_in_file('foo.py') [], ['Foo', 'Bar'] """ mod_ast = compiler.parseFile(file_name) class_names = [] function_names = [] for node in mod_ast.node.nodes: if node.__class__ is compiler.ast.Class: class_names.append(node.name) elif node.__class__ is compiler.ast.Function: function_names.append(node.name) return function_names, class_names
def find_version(fn): """ Try to find a __version__ assignment in a source file """ return "0.0.0" import compiler from compiler.ast import Module, Stmt, Assign, AssName, Const ast = compiler.parseFile(fn) if not isinstance(ast, Module): raise ValueError("expecting Module") statements = ast.getChildNodes() if not (len(statements) == 1 and isinstance(statements[0], Stmt)): raise ValueError("expecting one Stmt") for node in statements[0].getChildNodes(): if not isinstance(node, Assign): continue if not len(node.nodes) == 1: continue assName = node.nodes[0] if not ( isinstance(assName, AssName) and isinstance(node.expr, Const) and assName.flags == 'OP_ASSIGN' and assName.name == '__version__' ): continue return node.expr.value else: raise ValueError("Version not found")
def testLineNo(self): # Test that all nodes except Module have a correct lineno attribute. filename = __file__ if filename.endswith((".pyc", ".pyo")): filename = filename[:-1] tree = compiler.parseFile(filename) self.check_lineno(tree)
def update_from_whitelist(whitelist_set, add_experimental, add_noncompliant, autotest_dir): """ Scans through all tests in the whitelist and add them to the database. This function invoked when -w supplied. :param whitelist_set: set of tests in full-path form from a whitelist. :param add_experimental: add tests with experimental attribute set. :param add_noncompliant: attempt adding test with invalid control files. :param autotest_dir: prepended to path strings (see global_config.ini, COMMON, autotest_top_path). """ tests = {} profilers = {} for file_path in whitelist_set: if file_path.find('client/profilers') == -1: try: found_test = control_data.parse_control(file_path, raise_warnings=True) tests[file_path] = found_test except control_data.ControlVariableException, e: logging.warn("Skipping %s\n%s", file, e) else: profilers[file_path] = compiler.parseFile(file_path).doc
def get_tests_from_fs(parent_dir, control_pattern, add_noncompliant=False): """ Find control files in file system and load a list with their info. :param parent_dir: directory to search recursively. :param control_pattern: name format of control file. :param add_noncompliant: ignore control file parse errors. :return: dictionary of the form: tests[file_path] = parsed_object """ tests = {} profilers = False if "client/profilers" in parent_dir: profilers = True for dir in [parent_dir]: files = recursive_walk(dir, control_pattern) for file in files: if "__init__.py" in file or ".svn" in file: continue if not profilers: if not add_noncompliant: try: found_test = control_data.parse_control(file, raise_warnings=True) tests[file] = found_test except control_data.ControlVariableException, e: logging.warn("Skipping %s\n%s", file, e) except Exception, e: logging.error("Bad %s\n%s", file, e) else: found_test = control_data.parse_control(file) tests[file] = found_test else: tests[file] = compiler.parseFile(file).doc
def testLineNo(self): # Test that all nodes except Module have a correct lineno attribute. filename = __file__ if filename.endswith(".pyc") or filename.endswith(".pyo"): filename = filename[:-1] tree = compiler.parseFile(filename) self.check_lineno(tree)
def main(): # try: # argv1_str = str(sys.argv[1]) # except: # print("Why you no give me argv 1 >:(") # print("Example call to pyyc:") # print("./pyyc tests/test1.py") # sys.exit() #New flow ast -> flat(ast) -> heapify(ast) -> -> explicate(ast) -> compile(ast) #Get standard AST from file #ast = compiler.parseFile(argv1_str) name = sys.argv[1] ast = compiler.parseFile(name) flat = flatten(ast) print ast ColorPrint(flat, MAGENTA) #heap = heapify(flat) #compile(flat, "test.py") heapAST = heap(flat) ColorPrint(heapAST, CYAN) (closed, closureMap) = close(heapAST) ColorPrint(closed, YELLOW) (explicit, stackFrame) = entry(closed) #print stackFrame ColorPrint(explicit, GREEN) compileAST = compile(explicit, name, stackFrame, closureMap)
def check_i18n(input_file, i18n_msg_predicates, msg_format_checkers, debug): input_mod = compiler.parseFile(input_file) v = compiler.visitor.walk( input_mod, Visitor(input_file, i18n_msg_predicates, msg_format_checkers, debug), ASTWalker()) return v.error
def main(): platform = sys.platform #print 'Running on a',platform if (len(sys.argv) != 2): sys.stderr.write(str(argv[0]) + " requires two arguments\n") return 1 inputFile = sys.argv[1] inputFilePath = str(sys.argv[1]) if (inputFilePath[-3:] != ".py"): sys.stderr.write(str(argv[0]) + " input file must be of type *.py\n") return 1 outputFilePath = inputFilePath.split('/') outputFileName = (outputFilePath[-1:])[0] outputFileName = outputFileName[:-3] + ".s" ast = compiler.parseFile(inputFile) #print ast, '\n\n\n' fast = flatten(ast) #print 'flatten(ast)\n',fast assembly = instr_select(fast) write_to_file(map(str, assembly), outputFileName) return 0
def get_tests_from_fs(parent_dir, control_pattern, add_noncompliant=False): """ Find control files in file system and load a list with their info. @param parent_dir: directory to search recursively. @param control_pattern: name format of control file. @param add_noncompliant: ignore control file parse errors. @return dictionary of the form: tests[file_path] = parsed_object """ tests = {} profilers = False if 'client/profilers' in parent_dir: profilers = True for dir in [parent_dir]: files = recursive_walk(dir, control_pattern) for file in files: if '__init__.py' in file or '.svn' in file: continue if not profilers: if not add_noncompliant: try: found_test = control_data.parse_control( file, raise_warnings=True) tests[file] = found_test except control_data.ControlVariableException, e: logging.warn("Skipping %s\n%s", file, e) except Exception, e: logging.error("Bad %s\n%s", file, e) else: found_test = control_data.parse_control(file) tests[file] = found_test else: tests[file] = compiler.parseFile(file).doc
def update_from_whitelist(whitelist_set, add_experimental, add_noncompliant, autotest_dir): """ Scans through all tests in the whitelist and add them to the database. This function invoked when -w supplied. :param whitelist_set: set of tests in full-path form from a whitelist. :param add_experimental: add tests with experimental attribute set. :param add_noncompliant: attempt adding test with invalid control files. :param autotest_dir: prepended to path strings (see global_config.ini, COMMON, autotest_top_path). """ tests = {} profilers = {} for file_path in whitelist_set: if file_path.find('client/profilers') == -1: try: found_test = control_data.parse_control(file_path, raise_warnings=True) tests[file_path] = found_test except control_data.ControlVariableException as e: logging.warn("Skipping %s\n%s", file, e) else: profilers[file_path] = compiler.parseFile(file_path).doc if len(tests) > 0: update_tests_in_db(tests, add_experimental=add_experimental, add_noncompliant=add_noncompliant, autotest_dir=autotest_dir) if len(profilers) > 0: update_profilers_in_db(profilers, add_noncompliant=add_noncompliant, description='NA')
def main(): platform = sys.platform #print 'Running on a',platform if(len(sys.argv) != 2): sys.stderr.write(str(argv[0]) + " requires two arguments\n") return 1; inputFile = sys.argv[1] inputFilePath = str(sys.argv[1]) if(inputFilePath[-3:] != ".py"): sys.stderr.write(str(argv[0]) + " input file must be of type *.py\n") return 1 outputFilePath = inputFilePath.split('/') outputFileName = (outputFilePath[-1:])[0] outputFileName = outputFileName[:-3] + ".s" ast = compiler.parseFile(inputFile); #print ast, '\n\n\n' fast = flatten(ast) #print 'flatten(ast)\n',fast assembly = instr_select(fast) write_to_file(map(str, assembly), outputFileName) return 0
def function_and_class_names_in_file(file_name): """ Find the names of classes contained within a file. fix me: This currently only finds top level classes. Nested classes are ignored. ?? Does this really matter much ?? Example:: # foo.py class Foo: pass class Bar: pass >>> import enthought.util.refresh >>> refresh.function_and_class_names_in_file('foo.py') [], ['Foo', 'Bar'] """ mod_ast = compiler.parseFile(file_name) class_names = [] function_names = [] for node in mod_ast.node.nodes: if node.__class__ is compiler.ast.Class: class_names.append(node.name) elif node.__class__ is compiler.ast.Function: function_names.append(node.name) return function_names, class_names
def find_version(fn): """ Try to find a __version__ assignment in a source file """ return "0.0.0" import compiler from compiler.ast import Module, Stmt, Assign, AssName, Const ast = compiler.parseFile(fn) if not isinstance(ast, Module): raise ValueError("expecting Module") statements = ast.getChildNodes() if not (len(statements) == 1 and isinstance(statements[0], Stmt)): raise ValueError("expecting one Stmt") for node in statements[0].getChildNodes(): if not isinstance(node, Assign): continue if not len(node.nodes) == 1: continue assName = node.nodes[0] if not (isinstance(assName, AssName) and isinstance(node.expr, Const) and assName.flags == 'OP_ASSIGN' and assName.name == '__version__'): continue return node.expr.value else: raise ValueError("Version not found")
def serialize_ast(file_name): import compiler output = StringIO.StringIO() tree = compiler.parseFile(os.path.join(TESTS_FOLDER, file_name)) serialize(tree, 0, '', output.write) return output.getvalue()
def process_file(self, filename): try: ast = compiler.parseFile(filename) except SyntaxError: return for node in self._all_class_nodes(ast): ci = ClassInfo(filename, node.name, [self._get_classname_from_node(node) for namenode in node.bases]) self._classinfos.append(ci)
def main(): s = sys.argv[1] ast = compiler.parseFile(s) if ast.doc: o = copy(ast.doc) print ";; " + o.replace("\n", "\n;; ") for i in ast.node: print dispatch(i)
def check(self, file, unused_checker): try: file.parseTree = parseFile(file.name) # link each node to it's parent _parent_link(file.parseTree) file.parseTree.parent = None except parser.ParserError, detail: file.warning(1, self.syntaxErrors, detail.args[0])
def translate(modulename, modulepath, outstream): ast = parseFile(modulepath) v = SwanVisitor(modulename, outstream) w = ExampleASTVisitor() w.VERBOSE = 1 walk(ast, v, v) #print v.out return (v.deps, v.out)
def find_imports(filename): """Find all imported names in a given file. Returns a list of ImportInfo objects. """ ast = compiler.parseFile(filename) visitor = ImportFinder(filename) compiler.walk(ast, visitor) return visitor.imports
def normalize_file(filename, *args): """ Import-normalize a file. If the file is not parseable, an empty filelike object will be returned. """ try: ast = compiler.parseFile(filename) except Exception, e: return StringIO('')
def check_i18n(input_file, i18n_msg_predicates, msg_format_checkers, debug): input_mod = compiler.parseFile(input_file) v = compiler.visitor.walk(input_mod, Visitor(input_file, i18n_msg_predicates, msg_format_checkers, debug), ASTWalker()) return v.error
def get_module_meta(modfile): ast = compiler.parseFile(modfile) modnode = ModuleVisitor() visitor.walk(ast, modnode) if modnode.mod_doc is None: raise RuntimeError("could not parse doc string from %s" % modfile) if modnode.mod_version is None: raise RuntimeError("could not parse __version__ from %s" % modfile) return (modnode.mod_version, ) + pydoc.splitdoc(modnode.mod_doc)
def __init__(self, codefile): flatAST = python_ast().flatten(compiler.parseFile(codefile)) x86IR = x86Selector(flatAST) self.my_graph = interferenceGraph(x86IR.getIR()) #self.my_graph.drawEdges() #self.my_graph.doColor() #print my_graph.printGraph() #x86IRObj.setIR(self.my_graph.getIR()) self.my_graph.allocateRegisters()
def readConfigSafe(path): ast = compiler.parseFile(path) d = dict() for x in ast.asList()[1].asList(): name = x.asList()[0].name if hasattr(x.asList()[1], "value"): value = x.asList()[1].value else: value = [n.value for n in x.asList()[1].nodes] d[name] = value return d
def compile(): #check that the user has given exactly one file as input assert(len(sys.argv) == 2); #the file to interpret is the first argument given filePath = sys.argv[1] #abstract syntax tree for the contents of the file ast = compiler.parseFile(filePath) #print ast #print "original ast: ", ast, "\n ********" #flatten the ast #(fill the flatStmts tree with assignment statements) flatten(ast) #print variables #print flatStmts print '@.str = private unnamed_addr constant [3 x i8] c"%d\\00", align 1' print '@.str1 = private unnamed_addr constant [4 x i8] c"%d\\0A\\00", align 1' print 'define i32 @input() nounwind uwtable ssp {' print ' %n = alloca i32, align 4' print ' %1 = call i32 (i8*, ...)* @scanf(i8* getelementptr inbounds ([3 x i8]* @.str, i32 0, i32 0), i32* %n)' print ' %2 = load i32* %n, align 4' print ' ret i32 %2' print '}\n' print 'declare i32 @scanf(i8*, ...)\n' print 'define i32 @print_int_nl(i32 %x) nounwind uwtable ssp {' print ' %1 = alloca i32, align 4' print ' store i32 %x, i32* %1, align 4' print ' %2 = load i32* %1, align 4' print ' %3 = call i32 (i8*, ...)* @printf(i8* getelementptr inbounds ([4 x i8]* @.str1, i32 0, i32 0), i32 %2)' print ' ret i32 0' print '}\n' print 'declare i32 @printf(i8*, ...)' #output first line needed for the .ll file print "define i32 @main() nounwind uwtable ssp {" #print "statements list after flattening: ", flatStmts alloc() #TODO: before generating the llvm code from the statements, #iterate over the flatStmts list and generate #an alloca for each variable (so you don't have to worry about it later) #iterate through all of the statments generated by the #flattening (located in flatStmts list) #and generate LLVM code for s in flatStmts: astToLLVM(s, None) #output what you need for the end of the main function in the .ll file print " "+"ret i32 0" print "}" print "declare double @floor(double) nounwind readnone" print "declare double @llvm.pow.f64(double, double) nounwind readonly"
def run(filename, excluded=[]): exstatic.cspwarnings.reset_errors() for checker in checkers: lint = checker(filename) compiler.walk(compiler.parseFile(filename), lint, walker=lint, verbose=5) exstatic.cspwarnings.print_errors(excluded=excluded) return
def parse(self): CategoryParser.parse(self) # Get the root node: node = compiler.parseFile(self.filepath).node # Parse any class objects: for child in node.getChildren(): if isinstance(child, compiler.ast.Class): self.parseClass(child)
def main(): usage = """usage: %prog FILENAME... [--dot|--tgf]""" desc = """Analyse one or more Python source files and generate an approximate call graph of the modules, classes and functions within them.""" parser = OptionParser(usage=usage, description=desc) parser.add_option("--dot", action="store_true", default=False, help="output in GraphViz dot format") parser.add_option("--tgf", action="store_true", default=False, help="output in Trivial Graph Format") parser.add_option("-v", "--verbose", action="store_true", default=False, dest="verbose", help="verbose output") options, args = parser.parse_args() filenames = [fn2 for fn in args for fn2 in glob(fn)] if len(args) == 0: parser.error('Need one or more filenames to process') if not options.verbose: global verbose_output verbose_output = lambda msg: None v = CallGraphVisitor() v.module_names = {} # First find module full names for all files for filename in filenames: mod_name = get_module_name(filename) short_name = mod_name.rsplit('.', 1)[-1] v.module_names[short_name] = mod_name # Process the set of files, TWICE: so that forward references are picked up for filename in filenames + filenames: ast = compiler.parseFile(filename) module_name = get_module_name(filename) v.module_name = module_name s = compiler.symbols.SymbolVisitor() compiler.walk(ast, s) v.scopes = s.scopes compiler.walk(ast, v) v.contract_nonexistents() v.expand_unknowns() v.cull_inherited() if options.dot: print v.to_dot() if options.tgf: print v.to_tgf()
def extract_funcinfos(*filenames): result = [] for f in filenames: try: ast = compiler.parseFile(f) except SyntaxError: continue for node in all_func_nodes(ast): fi = FuncInfo(f, node) result.append(fi) return result
def normalize_file(filename, *args): """ Import-normalize a file. If the file is not parseable, an empty filelike object will be returned. """ try: ast = compiler.parseFile(filename) except Exception as e: return StringIO('') ip = ImportPuller() walk(ast, ip) return StringIO(ip.as_string())
def pull_imports(f): """Pull a list of all the modules imported in a script.""" tree = compiler.parseFile(f) imps = set() for n in tree.node.nodes: node_type = str(n.__class__).split(".")[-1] if node_type == "Import": imps = imps.union([m[0] for m in n.names]) if node_type == "From": imps.add(n.modname) return [m.split(".")[0] for m in imps]
def compile_file(file_name, output_name): ast = compiler.parseFile(file_name) east = explicate(ast) fast = flatten(east) assembly = instr_select(fast) assembly = regalloc.regalloc(assembly) assembly = destructure(assembly) assembly = '.globl main\nmain:\n\t' + '\n\t'.join(map(str,assembly)) + '\n' output_file = open(output_name, 'w+') output_file.write(assembly) output_file.close()