def test(modules=None): import os import ast as ast_mod if not modules: #d = 'C:\\Python25\\Lib\\' d = '/usr/lib/python2.7/' modules = [d + f for f in os.listdir(d) if f.endswith('.py')] for f in modules: shutil.copy(f, 'input.py') msg = '%s...' % os.path.basename(f) ast, comments = parse_with_comments(f) print_code_and_ast(f, ast, comments) print print print ast_mod.dump(ast_mod.parse(file(f).read()), 0) try: ast2py(ast, comments) except: raise msg += 'failed to converting AST back to python code' try: if f == 'parser_test_source.py': print;print print file('output.py').read() # see the results parse(file('output.py').read()) msg += 'OK' except: raise msg += 'failed to parse generated code' print msg break
def main(argv): args = argv[1:] for path in args: if path.endswith('.zip'): zf = zipfile.ZipFile(path) for name in zf.namelist(): sys.stderr.write('parsing: %r\n' % name) sys.stderr.flush() data = zf.read(name) try: tree = compiler.parse(data) traverse(tree) except SyntaxError, e: print 'error:', name, e zf.close() else: sys.stderr.write('parsing: %r\n' % path) sys.stderr.flush() fp = open(path, 'rb') data = fp.read() try: tree = compiler.parse(data) traverse(tree) except SyntaxError, e: print 'error:', name, e fp.close()
def run(self, grammar): self.message = self.name + ': ' if self.code == '': self.message = self.message + 'No test set' return try: try: tree = str(compiler.parse(self.code)) except SyntaxError: self.message = self.message + """Error in test.\n""" + self.code raise EscapeException() try: matcher = grammar(tree) generated = matcher.apply('any') except ParseError: self.message = self.message + """Error in grammar.\n""" + self.code + """\n\n""" + tree raise EscapeException() try: assert str(compiler.parse(generated)) == tree except AssertionError: self.message = self.message + """Error, generated code does not match original.\n""" + self.code + """\n\n""" + tree + """\n\n""" + generated raise EscapeException() except SyntaxError: self.message = self.message + """Error in generated code.\n""" + self.code + """\n\n""" + tree + """\n\n""" + generated raise EscapeException() self.message = self.message + "OK" self.result = True except EscapeException: pass
def _safeEval(self, s): """ Evaluate strings that only contain the following structures: const, tuple, list, dict Taken from c.l.py newsgroup posting Nov 5, 2003 by Huaiyu Zhu at IBM Almaden ??? this may need to be expanded to support complex numbers in lists, etc. """ #print "in _safeEval. input: '%s'" % s node1 = compiler.parse(s) # !!! special case of attempting to compile a lone string if node1.doc is not None and len(node1.node.nodes) == 0: #print "in _safeEval. string: '%s' found as docstring" % node1.doc return node1.doc #print "in _safeEval. nodes: '%s'" % (node1,) stmts = node1.node.nodes assert len(stmts) == 1 node = compiler.parse(s).node.nodes[0] assert node.__class__ == compiler.ast.Discard nodes = node.getChildNodes() assert len(nodes) == 1 result = self._safeAssemble(nodes[0]) #print "in _safeEval result: '%s'" % (result,) return result
def get_ast(): if options.filename: with open(options.filename, 'r') as f: return compiler.parse("".join(f.readlines())) else: import sys return compiler.parse("".join(sys.stdin))
def verify_source(source): tree = fix_tree(parse(source, 'exec')) code = pycodegen.ModuleCodeGenerator(tree).getCode() generator = ModuleSourceCodeGenerator(tree) source = generator.getSourceCode() tree = fix_tree(parse(source, 'exec')) if code.co_code != pycodegen.ModuleCodeGenerator(tree).getCode().co_code: return source
def get_ast(): if options.filename: with open(options.filename, "r") as f: return compiler.parse("".join(f.readlines())) else: import sys return compiler.parse("".join(sys.stdin))
def errorPythonSource(source): """ 检查python代码的合理性 """ try: source = source.replace('\r', '') compiler.parse(source) except: return "语法错误" return ''
def check_syntax(self, cmd_string): "Checks syntax for syntax-errors" try: compiler.parse(cmd_string) return True except Exception, e: self.gui.show_error_dialog("Syntax Error In Experiment Script", "Experiment Script: " + str(e)) self.gui.new_log_message("Syntax Error: " + str(e), "ES") return False
def has_changed(self, fpath): """ answer whether has_changed(fpath) since list mirror """ before = open(fpath,'r').read() after = open((self^fpath),'r').read() file_changed = not( before == after ) ast_changed = compiler.parse(before,'exec')==compiler.parse(after,'exec') out = dict( file_changed=file_changed, ast_changed=ast_changed ) return out
def _analyzeGens(top, absnames): genlist = [] for g in top: if isinstance(g, _UserCode): ast = g elif isinstance(g, (_AlwaysComb, _Always)): f = g.func s = inspect.getsource(f) # remove decorators s = re.sub(r"@.*", "", s) s = s.lstrip() ast = compiler.parse(s) # print ast ast.sourcefile = inspect.getsourcefile(f) ast.lineoffset = inspect.getsourcelines(f)[1]-1 ast.symdict = f.func_globals.copy() ast.callstack = [] # handle free variables if f.func_code.co_freevars: for n, c in zip(f.func_code.co_freevars, f.func_closure): obj = _cell_deref(c) if isinstance(g, _AlwaysComb): # print type(obj) assert isinstance(obj, (int, long, Signal)) or \ _isMem(obj) or _isTupleOfInts(obj) ast.symdict[n] = obj ast.name = absnames.get(id(g), str(_Label("BLOCK"))).upper() v = _NotSupportedVisitor(ast) compiler.walk(ast, v) if isinstance(g, _AlwaysComb): v = _AnalyzeAlwaysCombVisitor(ast, g.senslist) else: v = _AnalyzeAlwaysDecoVisitor(ast, g.senslist) compiler.walk(ast, v) else: # @instance f = g.gen.gi_frame s = inspect.getsource(f) # remove decorators s = re.sub(r"@.*", "", s) s = s.lstrip() ast = compiler.parse(s) # print ast ast.sourcefile = inspect.getsourcefile(f) ast.lineoffset = inspect.getsourcelines(f)[1]-1 ast.symdict = f.f_globals.copy() ast.symdict.update(f.f_locals) ast.callstack = [] ast.name = absnames.get(id(g), str(_Label("BLOCK"))).upper() v = _NotSupportedVisitor(ast) compiler.walk(ast, v) v = _AnalyzeBlockVisitor(ast) compiler.walk(ast, v) genlist.append(ast) return genlist
def pythonCodeContainsErrorOnParse(self, code): flag = False ls = [] try: compiler.parse(code, mode = "exec") except Exception as e: flag = True # traceback.print_exc() s = traceback.format_exc() ls = s.split("\n") return flag,ls
def fetch_git(repo_dir): #create new user object users = Users() os.chdir(repo_dir) os.system('git checkout master -q') proc = subprocess.Popen(['git', 'log', '--pretty=format:%H %ct'],stdout=subprocess.PIPE) commit_log = proc.stdout.readlines() commit_log.reverse() for log in commit_log: log = log.strip() (commit,date) = log.split(' ') os.system('git checkout ' + commit + ' -q') # print(commit,date) proc = subprocess.Popen(['git', 'diff-tree', '--no-commit-id', '--name-only', '-r', commit],stdout=subprocess.PIPE) changes = proc.stdout.readlines() #print(changes) for change in changes: path = change.strip() dir,filename = os.path.split(path) host,name = os.path.split(dir) user = users.get_user(host,name) if user == None: user = users.add_user(host,name,filename) try: with open(path) as fh: data = fh.read() try: compiler.parse(data) syntax = 1 except Exception as e: #print("exception!") #print(e) syntax = 0 lines = len(data.splitlines()) except IOError: continue #times 1000 for javascript flot time = int(date) * 1000 history = History(commit,time,lines,syntax) user.add_version(filename,history) #reset to head os.system('git checkout master -q') os.chdir('..') return users
def verify_source(source): tree = fix_tree(parse(source, 'exec')) code = pycodegen.ModuleCodeGenerator(tree).getCode() generator = ModuleSourceCodeGenerator(tree) generated = generator.getSourceCode() try: new = fix_tree(parse(generated, 'exec')) except SyntaxError: return generated old = code.co_code new = pycodegen.ModuleCodeGenerator(new).getCode().co_code if old != new: return generated
def canonize(self, student_answer, state=None): if re.match('.*;[ \t]*$', student_answer): return (False, 'On ne met pas de <tt>;</tt> en fin de ligne en Python') if '...' in student_answer: return ( False, "Enlevez les «...» en début de ligne, ils ne font pas parti du langage. C'est l'invite de commande de l'interpréteur" ) try: compiler.parse(student_answer) except SyntaxError as e: return (False, 'Message de Python : <b>' + cgi.escape(str(e)) + '</b><br>') return P_clean(student_answer)
def searchImportedModulesForDefinition(scope,node): lines = scope.module.getSourceNode().getLines() for lineno in scope.getImportLineNumbers(): logicalline = getLogicalLine(lines,lineno) logicalline = makeLineParseable(logicalline) ast = compiler.parse(logicalline) class ImportVisitor: def __init__(self,node): self.target = node self.match = None assert isinstance(self.target,Name), \ "Getattr not supported" def visitFrom(self, node): module = resolveImportedModuleOrPackage(scope,node.modname) if module is None: # couldn't find module return if node.names[0][0] == '*': # e.g. from foo import * match = findDefinitionFromASTNode(module,self.target) if match is not None: self.match = match return for name, alias in node.names: if alias is None and name == self.target.name: match = findDefinitionFromASTNode(module,self.target) if match is not None: self.match = match return match = visitor.walk(ast, ImportVisitor(node)).match if match: return match
def literal_eval(node_or_string): """ Safely evaluate an expression node or a string containing a Python expression. The string or node provided may only consist of the following Python literal structures: strings, numbers, tuples, lists, dicts, booleans, and None. """ _safe_names = {'None': None, 'True': True, 'False': False} if isinstance(node_or_string, string_types): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, ast.Expression): node_or_string = node_or_string.node def _convert(node): if isinstance(node, ast.Const) and isinstance(node.value, (text_type, binary_type, float, complex) + integer_types): return node.value elif isinstance(node, ast.Tuple): return tuple(map(_convert, node.nodes)) elif isinstance(node, ast.List): return list(map(_convert, node.nodes)) elif isinstance(node, ast.Dict): return dict((_convert(k), _convert(v)) for k, v in node.items()) elif isinstance(node, ast.Name): if node.name in _safe_names: return _safe_names[node.name] elif isinstance(node, ast.UnarySub): return -_convert(node.expr) # pylint: disable=invalid-unary-operand-type raise ValueError('malformed string') return _convert(node_or_string)
def test_visitLambda(self): from common import MatchFinder finder = MatchFinder() src = '''x = lambda a, b, c=None, d=None: (a + b) and c or d''' ast = compiler.parse(src) finder.reset(src) compiler.walk(ast, finder)
def split_commands(self, python): """ Split multiple lines of code into discrete commands that can be executed singly. Parameters ---------- python : str Pure, exec'able Python code. Returns ------- commands : list of str Separate commands that can be exec'ed independently. """ # The compiler module will parse the code into an abstract syntax tree. ast = compiler.parse(python) # Each separate command is available by iterating over ast.node. The # lineno attribute is the line number (1-indexed) beginning the commands # suite. linenos = [x.lineno-1 for x in ast.node] # When we finally get the slices, we will need to slice all the way to # the end even though we don't have a line number for it. Fortunately, # None does the job nicely. linenos.append(None) lines = python.split('\n') # Hooray for incomprehensible list comprehensions! commands = ['\n'.join(lines[i:j]) for i, j in zip(linenos[:-1], linenos[1:])] return commands
def makejson(config): #config=json.load('make_path') with open(config['source'], encoding='utf8') as f: python_code = f.read() tree = ast.parse(python_code) js_code_body = compiler.parse(tree) # import include code if 'include' in config: js_code_head_list = [] include = config['include'] if type( config['include']) == list else [config['include']] for _include in include: with open(_include, encoding="utf8") as f: js_code_head_list.append(f.read()) js_code_head = '\n'.join(js_code_head_list) else: js_code_head = '' # direct export if 'module_name' not in config: return output(config, js_code_head + '\n' + js_code_body) #module export module_name = config['module_name'] exports = config['export'] comment = config.get('comment', '') content = module_template.render(js_code_head=tab(js_code_head), js_code_body=tab(js_code_body), module_name=module_name, exports=exports, comment=comment) return output(config, content)
def updateModel(self, buf): """ To call whenever the model has been updated in a way that could impact the outline. We do not modify the current outline in case of a syntax error. """ try: mod = compiler.parse(buf) except SyntaxError: log("Syntax error, no outline updated") return self.clear() self._items = [] visitor = OutlineAstVisitor(self) try: visitor.preorder(mod, visitor, None) visitor.endCurrentVisitedItem() del mod, visitor except SyntaxError: log("Syntax error, outline not created") finally: gc.collect()
def getAssignments(lines): class AssignVisitor: def __init__(self): self.assigns = [] def visitAssTuple(self, node): for a in node.nodes: if a.name not in self.assigns: self.assigns.append(a.name) def visitAssName(self, node): if node.name not in self.assigns: self.assigns.append(node.name) def visitAugAssign(self, node): if isinstance(node.node, compiler.ast.Name): if node.node.name not in self.assigns: self.assigns.append(node.node.name) assignfinder = AssignVisitor() for line in lines: doctoredline = makeLineParseable(line) try: ast = compiler.parse(doctoredline) except ParserError: raise ParserException("couldnt parse:"+doctoredline) visitor.walk(ast, assignfinder) return assignfinder.assigns
def check(code_string, filename): """check a code string""" try: tree = compiler.parse(code_string) except (SyntaxError, IndentationError): value = sys.exc_info()[1] try: (lineno, offset, line) = value[1][1:] except IndexError: print('could not compile %r' % (filename,), file=sys.stderr) return 1 if line.endswith("\n"): line = line[:-1] print('%s:%d: could not compile' % (filename, lineno), file=sys.stderr) print(line, file=sys.stderr) print(" " * (offset-2), "^", file=sys.stderr) return 1 else: import locale try: locale.setlocale(locale.LC_ALL, 'C') except locale.Error as e: print('setlocale failed: %s' % (e,), file=sys.stderr) w = FlakeChecker(tree, filename) w.messages.sort(lambda a, b: cmp(a.lineno, b.lineno)) for warning in w.messages: print(warning) return len(w.messages)
def get_pysmell_code_walk_to_text(self, text): code = compiler.parse(text) class GlobalCodeFinder(CodeFinder): def visitFunction(self, func): self.enterScope(func) if self.inClassFunction: if func.name != '__init__': if func.decorators and 'property' in [ getName(n) for n in func.decorators ]: self.modules.addProperty(self.currentClass, func.name) else: self.modules.addMethod(self.currentClass, func.name, getFuncArgs(func), func.doc or "") else: self.modules.setConstructor(self.currentClass, getFuncArgs(func)) elif len(self.scope) == 1: self.modules.addFunction(func.name, getFuncArgs(func, inClass=False), func.doc or "") #self.visit(func.code) Remove this line self.exitScope() if self.scope == SCOPE_GLOBAL: codefinder = GlobalCodeFinder() else: codefinder = CodeFinder() codefinder.modules = PyPleteModuleDict() return compiler.walk(code, codefinder)
def literal_eval(node_or_string): """Safely evaluate a node/string containing a Python expression. Thestring or node provided may only consist of the following Python literal structures: strings, numbers, tuples, lists, dicts, booleans, and None. Essentially a backport of the literal_eval function in python 2.6 onwards. From: http://mail.python.org/pipermail/python-list/2009-September/1219992.html """ _safe_names = {'None': None, 'True': True, 'False': False} if isinstance(node_or_string, str): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, ast.Expression): node_or_string = node_or_string.node def _convert(node): '''Convert node/string to expression.''' if isinstance(node, ast.Const) and isinstance( node.value, (str, int, float, complex)): return node.value elif isinstance(node, ast.Tuple): return tuple(_convert(element) for element in node.nodes) elif isinstance(node, ast.List): return list(_convert(element) for element in node.nodes) elif isinstance(node, ast.Dict): return dict((_convert(k), _convert(v)) for k, v in node.items) elif isinstance(node, ast.Name): if node.name in _safe_names: return _safe_names[node.name] elif isinstance(node, ast.UnarySub): return -_convert(node.expr) raise ValueError('malformed string') return _convert(node_or_string)
def get_vars_in_expression(source): '''Get list of variable names in a python expression.''' import compiler from compiler.ast import Node ## # @brief Internal recursive function. # @param node An AST parse Node. # @param var_list Input list of variables. # @return An updated list of variables. def get_vars_body(node, var_list=[]): if isinstance(node, Node): if node.__class__.__name__ == 'Name': for child in node.getChildren(): if child not in var_list: var_list.append(child) for child in node.getChildren(): if isinstance(child, Node): for child in node.getChildren(): var_list = get_vars_body(child, var_list) break return var_list return get_vars_body(compiler.parse(source))
def literal_eval(node_or_string): _safe_names = {'none': none, 'true': true, 'false': false} if isinstance(node_or_string, basestring): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, compiler.ast.expression): node_or_string = node_or_string.node def _convert(node): if isinstance(node, compiler.ast.const) and isinstance( node.value, (basestring, int, float, long, complex)): return node.value elif isinstance(node, compiler.ast.tuple): return tuple(map(_convert, node.nodes)) elif isinstance(node, compiler.ast.list): return list(map(_convert, node.nodes)) elif isinstance(node, compiler.ast.dict): return dict( (_convert(k), _convert(v)) for k, v in node.items) elif isinstance(node, compiler.ast.name): if node.name in _safe_names: return _safe_names[node.name] elif isinstance(node, compiler.ast.unarysub): return -_convert(node.expr) raise valueerror('malformed string') return _convert(node_or_string)
def get_code_complexity(code, min=7, filename='stdin'): complex = [] try: ast = parse(code) except AttributeError: e = sys.exc_info()[1] sys.stderr.write("Unable to parse %s: %s\n" % (filename, e)) return 0 visitor = PathGraphingAstVisitor() visitor.preorder(ast, visitor) for graph in visitor.graphs.values(): if graph is None: # ? continue if graph.complexity() >= min: complex.append( dict(type='W', lnum=graph.lineno, text='%s %r is too complex (%d)' % ( WARNING_CODE, graph.entity, graph.complexity(), ))) return complex
def main(): filename = 'tests/implicit_main.py' ast = parse(open(filename).read()) import pprint pprint.pprint(ast) s = ast2py(ast) return s
def literal_eval(node_or_string): _safe_names = {'none': none, 'true': true, 'false': false} if isinstance(node_or_string, basestring): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, compiler.ast.expression): node_or_string = node_or_string.node def _convert(node): if isinstance(node, compiler.ast.const) and isinstance(node.value, (basestring, int, float, long, complex)): return node.value elif isinstance(node, compiler.ast.tuple): return tuple(map(_convert, node.nodes)) elif isinstance(node, compiler.ast.list): return list(map(_convert, node.nodes)) elif isinstance(node, compiler.ast.dict): return dict((_convert(k), _convert(v)) for k, v in node.items) elif isinstance(node, compiler.ast.name): if node.name in _safe_names: return _safe_names[node.name] elif isinstance(node, compiler.ast.unarysub): return -_convert(node.expr) raise valueerror('malformed string') return _convert(node_or_string)
def checkBeforeAndAfter(self): from RestrictedPython.RCompile import RModule from RestrictedPython.tests import before_and_after from compiler import parse defre = re.compile(r'def ([_A-Za-z0-9]+)_(after|before)\(') beforel = [ name for name in before_and_after.__dict__ if name.endswith("_before") ] for name in beforel: before = getattr(before_and_after, name) before_src = get_source(before) before_src = re.sub(defre, r'def \1(', before_src) rm = RModule(before_src, '') tree_before = rm._get_tree() after = getattr(before_and_after, name[:-6] + 'after') after_src = get_source(after) after_src = re.sub(defre, r'def \1(', after_src) tree_after = parse(after_src) self.assertEqual(str(tree_before), str(tree_after)) rm.compile() verify.verify(rm.getCode())
def find_executable_statements(self, text, exclude=None): # Find lines which match an exclusion pattern. excluded = {} suite_spots = {} if exclude: reExclude = re.compile(exclude) lines = text.split('\n') for i in range(len(lines)): if reExclude.search(lines[i]): excluded[i + 1] = 1 # Parse the code and analyze the parse tree to find out which statements # are multiline, and where suites begin and end. import parser tree = parser.suite(text + '\n\n').totuple(1) self.get_suite_spots(tree, suite_spots) #print "Suite spots:", suite_spots # Use the compiler module to parse the text and find the executable # statements. We add newlines to be impervious to final partial lines. statements = {} ast = compiler.parse(text + '\n\n') visitor = StatementFindingAstVisitor(statements, excluded, suite_spots) compiler.walk(ast, visitor, walker=visitor) lines = statements.keys() lines.sort() excluded_lines = excluded.keys() excluded_lines.sort() return lines, excluded_lines, suite_spots
def run_pyflakes_for_package(package_name, extra_ignore=None): """ If pyflakes is installed, run it across the given package name returning any warnings found. """ ignore_strings = PYFLAKES_IGNORE if extra_ignore: ignore_strings += extra_ignore try: from pyflakes.checker import Checker except ImportError: return [] warnings = [] for (root, dirs, files) in os.walk(path_for_import(package_name)): for f in files: # Ignore migrations. directory = root.split(os.sep)[-1] if not f.endswith(".py") or directory == "migrations": continue path = os.path.join(root, f) with open(path, "U") as source_file: source = source_file.read() try: compile(source, f, "exec") except (SyntaxError, IndentationError), value: info = (path, value.lineno, value.args[0]) warnings.append("Invalid syntax in %s:%d: %s" % info) result = Checker(parse(source), path) for warning in result.messages: message = unicode(warning) for ignore in ignore_strings: if ignore in message: break else: warnings.append(message)
def main(argv): opar = optparse.OptionParser() opar.add_option("-d", "--dot", dest="dot", help="output a graphviz dot file", action="store_true") opar.add_option("-m", "--min", dest="min", help="minimum complexity for output", type="int", default=2) options, args = opar.parse_args(argv) text = open(args[0], "rU").read() + '\n\n' ast = parse(text) visitor = PathGraphingAstVisitor() visitor.preorder(ast, visitor) if options.dot: print('graph {') for graph in visitor.graphs.values(): if graph.complexity() >= options.min: graph.to_dot() print('}') else: for graph in visitor.graphs.values(): if graph.complexity() >= options.min: print(graph.name, graph.complexity())
def parse_python_source(fn): """Parse the file 'fn' and return two things: 1. The AST tree. 2. A list of lines of the source line (typically used for verbose error messages). If the file has a syntax error in it, the first argument will be None. """ # Read the file's contents to return it. # Note: we make sure to use universal newlines. try: contents = open(fn, 'rU').read() lines = contents.splitlines() except (IOError, OSError) as e: logging.error("Could not read file '%s'." % fn) return None, None # Convert the file to an AST. try: ast = compiler.parse(contents) except SyntaxError as e: err = '%s:%s: %s' % (fn, e.lineno or '--', e.msg) logging.error("Error processing file '%s':\n%s" % (fn, err)) return None, lines except TypeError as e: # Note: this branch untested, applied from a user-submitted patch. err = '%s: %s' % (fn, str(e)) logging.error("Error processing file '%s':\n%s" % (fn, err)) return None, lines return ast, lines
def run(self): self.sleep(2) exts = settings.SYNTAX.get('python')['extension'] file_ext = file_manager.get_file_extension(self._editor.ID) if file_ext in exts: try: self.reset() source = self._editor.get_text() if self._editor.encoding is not None: source = source.encode(self._editor.encoding) parseResult = compiler.parse(source) self.checker = checker.Checker(parseResult, self._editor.ID) for m in self.checker.messages: lineno = m.lineno - 1 if lineno not in self.errorsSummary: message = [m.message % m.message_args] else: message = self.errorsSummary[lineno] message += [m.message % m.message_args] self.errorsSummary[lineno] = message except Exception, reason: if hasattr(reason, 'lineno'): self.errorsSummary[reason.lineno - 1] = [reason.message] else: self.errorsSummary[0] = [reason.message]
def safeEval(s, namespace={'True': True, 'False': False, 'None': None}): """Evaluates s, safely. Useful for turning strings into tuples/lists/etc. without unsafely using eval().""" try: node = compiler.parse(s) except SyntaxError, e: raise ValueError, 'Invalid string: %s.' % e
def check(codeString, filename): message = [] try: codeString = codeString.replace('\r\n', '\n') tree = compiler.parse(codeString) except (SyntaxError, IndentationError): value = sys.exc_info()[1] if isinstance(value, Exception): lineno = value.lineno offset = value.offset line = value.text # if line and line.endswith("\n"): # line = line[:-1] message.append((filename, lineno, value.msg)) except: error.traceback() message.append( (filename, -1, tr('There are some unknown errors, please check error.txt'))) else: w = checker.Checker(tree, filename) w.messages.sort(lambda a, b: cmp(a.lineno, b.lineno)) for warning in w.messages: message.append((filename, warning.lineno, warning.message % warning.message_args)) return message
def literal_eval(node_or_string): """ Safely evaluate an expression node or a string containing a Python expression. The string or node provided may only consist of the following Python literal structures: strings, numbers, tuples, lists, dicts, booleans, and None. """ _safe_names = {'None': None, 'True': True, 'False': False} if isinstance(node_or_string, string_types): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, ast.Expression): node_or_string = node_or_string.node def _convert(node): if isinstance(node, ast.Const) and isinstance(node.value, (text_type, binary_type, float, complex) + integer_types): return node.value elif isinstance(node, ast.Tuple): return tuple(map(_convert, node.nodes)) elif isinstance(node, ast.List): return list(map(_convert, node.nodes)) elif isinstance(node, ast.Dict): return dict((_convert(k), _convert(v)) for k, v in node.items()) elif isinstance(node, ast.Name): if node.name in _safe_names: return _safe_names[node.name] elif isinstance(node, ast.UnarySub): return -_convert(node.expr) raise ValueError('malformed string') return _convert(node_or_string)
def get_code_complexity(code, min=7, filename='stdin'): complex = [] try: ast = parse(code) except (AttributeError, SyntaxError): e = sys.exc_info()[1] sys.stderr.write("Unable to parse %s: %s\n" % (filename, e)) return 0 visitor = PathGraphingAstVisitor() visitor.preorder(ast, visitor) for graph in visitor.graphs.values(): if graph is None: # ? continue if graph.complexity() >= min: graph.filename = filename if not skip_warning(graph): msg = '%s:%d:1: %s %r is too complex (%d)' % ( filename, graph.lineno, WARNING_CODE, graph.entity, graph.complexity(), ) complex.append(msg) if len(complex) == 0: return 0 print('\n'.join(complex)) return len(complex)
def parse_python_source(fn): contents = open(fn, 'rU').read() ast = compiler.parse(contents) vis = ImportVisitor() compiler.walk(ast, vis, ImportWalker(vis)) return vis.finalize()
def _checkBeforeAndAfter(self, mod): from RestrictedPython.RCompile import RModule from compiler import parse defre = re.compile(r'def ([_A-Za-z0-9]+)_(after|before)\(') beforel = [name for name in mod.__dict__ if name.endswith("_before")] for name in beforel: before = getattr(mod, name) before_src = get_source(before) before_src = re.sub(defre, r'def \1(', before_src) rm = RModule(before_src, '') tree_before = rm._get_tree() after = getattr(mod, name[:-6]+'after') after_src = get_source(after) after_src = re.sub(defre, r'def \1(', after_src) tree_after = parse(after_src) self.assertEqual(str(tree_before), str(tree_after)) rm.compile() verify.verify(rm.getCode())
def parse_n3_term(src): """ Parse a Notation3 value into a RDFTerm object (IRI or Literal). This parser understands IRIs and quoted strings; basic non-string types (integers, decimals, booleans, etc) are not supported yet. """ src = unicode(src) if src.startswith('<'): # `src` is an IRI if not src.endswith('>'): raise ValueError value = src[1:-1] if '<' in value or '>' in value: raise ValueError return IRI(value) else: datatype_match = _n3parser_datatype.search(src) if datatype_match is not None: datatype = datatype_match.group('datatype') src = _n3parser_datatype.sub('', src) else: datatype = None lang_match = _n3parser_lang.search(src) if lang_match is not None: lang = lang_match.group('lang') src = _n3parser_lang.sub('', src) else: lang = None # Python literals syntax is mostly compatible with N3. # We don't execute the code, just turn it into an AST. try: ast = compiler.parse("value = u" + src) except: raise ValueError # Don't allow any extra tokens in the AST if len(ast.node.getChildNodes()) != 1: raise ValueError assign_node = ast.node.getChildNodes()[0] if len(assign_node.getChildNodes()) != 2: raise ValueError value_node = assign_node.getChildNodes()[1] if value_node.getChildNodes(): raise ValueError if value_node.__class__ != compiler.ast.Const: raise ValueError value = value_node.value if type(value) is not unicode: raise ValueError return Literal(value, datatype, lang)
def compile(self, viewcode_str, filename): """Compile the view code and return a code object and dictionary of globals needed by the code object. """ viewcode = compile(viewcode_str, filename, 'exec') # scan top-level code only for "import foo" and # "from foo import *" and "from foo import bar, baz" viewglobals = {'__builtins__': __builtins__} for stmt in compiler.parse(viewcode_str).node: if isinstance(stmt, Import): modname, asname = stmt.names[0] if asname is None: asname = modname viewglobals[asname] = __import__(modname) elif isinstance(stmt, From): fromlist = [x[0] for x in stmt.names] module = __import__(stmt.modname, {}, {}, fromlist) for name, asname in stmt.names: if name == '*': for starname in getattr(module, '__all__', dir(module)): viewglobals[starname] = getattr(module, starname) else: if asname is None: asname = name viewglobals[asname] = getattr(module, name) return viewcode, viewglobals
def find_executable_statements(self, text, exclude=None): # Find lines which match an exclusion pattern. excluded = {} suite_spots = {} if exclude: reExclude = re.compile(exclude) lines = text.split('\n') for i in range(len(lines)): if reExclude.search(lines[i]): excluded[i+1] = 1 # Parse the code and analyze the parse tree to find out which statements # are multiline, and where suites begin and end. import parser tree = parser.suite(text+'\n\n').totuple(1) self.get_suite_spots(tree, suite_spots) #print "Suite spots:", suite_spots # Use the compiler module to parse the text and find the executable # statements. We add newlines to be impervious to final partial lines. statements = {} ast = compiler.parse(text+'\n\n') visitor = StatementFindingAstVisitor(statements, excluded, suite_spots) compiler.walk(ast, visitor, walker=visitor) lines = statements.keys() lines.sort() excluded_lines = excluded.keys() excluded_lines.sort() return lines, excluded_lines, suite_spots
def scan_python_file(filename, calls): """Scan a python file for gettext calls.""" def scan(nodelist): for node in nodelist: if isinstance(node, ast.CallFunc): handle = False for pos, n in enumerate(node): if pos == 0: if isinstance(n, ast.Name) and n.name in calls: handle = True elif pos == 1: if handle: if n.__class__ is ast.Const and \ isinstance(n.value, basestring): yield n.lineno, n.value break else: for line in scan([n]): yield line elif hasattr(node, '__iter__'): for n in scan(node): yield n fp = file(filename) try: try: return scan(parse(fp.read())) except: print >> sys.stderr, 'Syntax Error in file %r' % filename finally: fp.close()
def parse_python(self, lineno, gen, template): """ Convert the passed generator into a flat string representing python sourcecode and return an ast node or raise a TemplateSyntaxError. """ tokens = [] for t_lineno, t_token, t_data in gen: if t_token == 'string': # because some libraries have problems with unicode # objects we do some lazy unicode processing here. # if a string is ASCII only we yield it as string # in other cases as unicode. This works around # problems with datetimeobj.strftime() # also escape newlines in strings t_data = t_data.replace('\n', '\\n') try: str(t_data) except UnicodeError: tokens.append('u' + t_data) continue tokens.append(t_data) source = '\xef\xbb\xbf' + (template % (u' '.join(tokens)). encode('utf-8')) try: ast = parse(source, 'exec') except SyntaxError, e: raise TemplateSyntaxError('invalid syntax in expression', lineno + (e.lineno or 0), self.filename)
def _literal_eval(node_or_string): """ Safely evaluate an expression node or a string containing a Python expression. The string or node provided may only consist of the following Python literal structures: strings, numbers, tuples, lists, dicts, booleans, and None. """ _safe_names = {'None': None, 'True': True, 'False': False} if isinstance(node_or_string, basestring): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, Expression): node_or_string = node_or_string.node def _convert(node): if isinstance(node, Const) and isinstance(node.value, (basestring, int, float, long, complex)): return node.value elif isinstance(node, Tuple): return tuple(map(_convert, node.nodes)) elif isinstance(node, List): return list(map(_convert, node.nodes)) elif isinstance(node, Dict): return dict((_convert(k), _convert(v)) for k, v in node.items) elif isinstance(node, Name): if node.name in _safe_names: return _safe_names[node.name] elif isinstance(node, UnarySub): return -_convert(node.expr) raise ValueError('malformed string') return _convert(node_or_string)
def test_with_pyflakes(path='.'): """ Test code with Pyflakes """ import pyflakes.checker exclude = 'ws-meta-in.py', 'test_syntax.py', 'test_point.py', 'test_pml.py' cwd = os.getcwd() top = os.path.join(os.path.dirname(__file__), '..') os.chdir(top) messages = [] for dirpath, dirnames, filenames in os.walk(path): for filename in filenames: if filename.endswith('.py') and filename not in exclude: filename = os.path.join(dirpath, filename) code = open(filename, 'U').read() compile(code, filename, 'exec') tree = compiler.parse(code) checker = pyflakes.checker.Checker(tree, filename) for m in checker.messages: if (not m.filename.endswith('__init__.py') or m.message != '%r imported but unused'): messages += [m] print m os.chdir(cwd) assert messages == [] return
def get_code_complexity(code, min=7, filename='stdin'): complex = [] try: ast = parse(code) except AttributeError: e = sys.exc_info()[1] sys.stderr.write("Unable to parse %s: %s\n" % (filename, e)) return 0 visitor = PathGraphingAstVisitor() visitor.preorder(ast, visitor) for graph in visitor.graphs.values(): if graph is None: # ? continue if graph.complexity() >= min: complex.append(dict( type = 'W', lnum = graph.lineno, text = '%s %r is too complex (%d)' % ( WARNING_CODE, graph.entity, graph.complexity(), ) )) return complex
def parse_module(module_text, filename): """Return a module documentation tree from `module_text`.""" ast = compiler.parse(module_text) token_parser = TokenParser(module_text) visitor = ModuleVisitor(filename, token_parser) compiler.walk(ast, visitor, walker=visitor) return visitor.module
def test_with_pyflakes(path="."): """ Test code with Pyflakes """ import pyflakes.checker exclude = "ws-meta-in.py", "test_syntax.py", "test_point.py", "test_pml.py" cwd = os.getcwd() top = os.path.join(os.path.dirname(__file__), "..") os.chdir(top) messages = [] for dirpath, dirnames, filenames in os.walk(path): for filename in filenames: if filename.endswith(".py") and filename not in exclude: filename = os.path.join(dirpath, filename) code = open(filename, "U").read() compile(code, filename, "exec") tree = compiler.parse(code) checker = pyflakes.checker.Checker(tree, filename) for m in checker.messages: if not m.filename.endswith("__init__.py") or m.message != "%r imported but unused": messages += [m] print m os.chdir(cwd) assert messages == [] return