def __init__(self, f): if target not in self.targets: raise NotImplementedError( "unknown target passed to '@Phylanx()' decorator: %s." % target) self.f = f self.cs = cs self.target = target # Get the source code actual_lineno = inspect.getsourcelines(f)[-1] src = inspect.getsource(f) # Before recompiling the code, take # off the decorator from the source. src = re.sub(r'^\s*@\w+.*\n', '', src) # Create the AST tree = ast.parse(src) ast.increment_lineno(tree, actual_lineno) assert len(tree.body) == 1 self.transformation = self.targets[target](tree, kwargs) self.__src__ = self.transformation.__src__ if target == "PhySL": et.compile(self.__src__, self.cs)
def do_compile(self, key, args): src = textwrap.dedent(oinspect.getsource(self.func)) tree = ast.parse(src) func_body = tree.body[0] func_body.decorator_list = [] visitor = ASTTransformerTotal(is_kernel=False, func=self) visitor.visit(tree) ast.increment_lineno(tree, oinspect.getsourcelines(self.func)[1] - 1) local_vars = {} global_vars = _get_global_vars(self.func) if impl.get_runtime().experimental_real_function: # inject template parameters into globals for i in self.template_slot_locations: template_var_name = self.argument_names[i] global_vars[template_var_name] = args[i] exec( compile(tree, filename=oinspect.getsourcefile(self.func), mode='exec'), global_vars, local_vars) if impl.get_runtime().experimental_real_function: self.compiled[key.instance_id] = local_vars[self.func.__name__] self.taichi_functions[key.instance_id] = _ti_core.create_function( key) self.taichi_functions[key.instance_id].set_function_body( self.compiled[key.instance_id]) else: self.compiled = local_vars[self.func.__name__]
def _parse_escape_block(self, start_pos, initial_offset): assert self.template[start_pos.offset - initial_offset] == self.escape_block assert self.template[start_pos.offset - initial_offset + 1] == self.escape_block_open len_template = len(self.template) current_pos = start_pos.next_char().next_char() block_code = "" continue_parse = True while continue_parse: if current_pos.offset - initial_offset >= len_template: raise TemplateCompileError("Unexpected end of template within block block (missing closing {}{})".format(self.escape_block, self.escape_block_close), self.template, start_pos, current_pos) current_char = self.template[current_pos.offset - initial_offset] if current_char == self.escape_block \ and (current_pos.offset - initial_offset + 1 < len_template \ and self.template[current_pos.offset - initial_offset + 1] == self.escape_block_close): # end of block block current_pos = current_pos.next_char().next_char() continue_parse = False else: block_code += current_char current_pos = current_pos.next_char() # end of while compiled_block = block_code # TODO: compile ! parsed_block = ast.parse(block_code, self.filename, 'exec') ast.increment_lineno(parsed_block, start_pos.lpos) compiled_block = compile(parsed_block, self.filename, 'exec') self.ctemplate.append(Template.Block(self, compiled_block, start_pos, current_pos)) return current_pos
def __init__(self, from_lex): source, filename, first_line = from_lex parsed_ast = ast.parse(source, filename) ast.increment_lineno(parsed_ast, first_line) code = compile(parsed_ast, filename=filename, mode='exec') self.source = source self.code = code
def better_exec(script, globals_=None, locals_=None): parsed = import_expression.parse(script) base_function = "async def __evaluate_code(): pass" parsed_function = import_expression.parse(base_function) for node in parsed.body: ast.increment_lineno(node) def check_for_yield(payload): if isinstance(payload, (list, tuple)): for node_ in payload: if check_for_yield(node_): return True if isinstance(payload, (ast.Yield, ast.YieldFrom)): return True if hasattr(payload, 'body'): for node_ in payload.body: if check_for_yield(node_): return True if hasattr(payload, 'value'): if check_for_yield(payload.value): return True return False if not check_for_yield(parsed.body): insert_returns(parsed.body) parsed_function.body[0].body = parsed.body import_expression.exec( import_expression.compile(parsed_function, filename="<evaluator>", mode='exec'), globals_, locals_ )
def _get_ast(func): if int(os.environ.get('NUMBA_FORCE_META_AST', 0)): func_def = decompile_func(func) assert isinstance(func_def, ast.FunctionDef) return func_def try: source = inspect.getsource(func) except IOError: return decompile_func(func) else: source = textwrap.dedent(source) # Split off decorators decorators = 0 while not source.startswith('def'): # decorator can have multiple lines decorator, sep, source = source.partition('\n') decorators += 1 module_ast = ast.parse(source) # fix line numbering lineoffset = func.func_code.co_firstlineno + decorators ast.increment_lineno(module_ast, lineoffset) assert len(module_ast.body) == 1 func_def = module_ast.body[0] _fix_ast(func_def) assert isinstance(func_def, ast.FunctionDef) return func_def
def function_to_ast(f): """ Obtain the source code of a Python function and create an AST. :param f: Python function. :return: A 4-tuple of (AST, function filename, function line-number, source code as string). """ try: src = inspect.getsource(f) src_file = inspect.getfile(f) _, src_line = inspect.findsource(f) # TypeError: X is not a module, class, method, function, traceback, frame, # or code object; OR OSError: could not get source code except (TypeError, OSError): # Try to import dill to obtain code from compiled functions try: import dill src = dill.source.getsource(f) src_file = '<interpreter>' src_line = 0 except (ImportError, ModuleNotFoundError, TypeError, OSError): raise TypeError( 'Cannot obtain source code for dace program. This may ' 'happen if you are using the "python" default ' 'interpreter. Please either use the "ipython" ' 'interpreter, a Jupyter or Colab notebook, or place ' 'the source code in a file and import it.') src_ast = ast.parse(_remove_outer_indentation(src)) ast.increment_lineno(src_ast, src_line) return src_ast, src_file, src_line, src
def execute(self, source, lineno=None): oldstdout = sys.stdout oldstderr = sys.stderr sys.stdout = self.buffer sys.stderr = self.buffer try: tree = ast.parse(source, '<repl>', 'exec') if lineno: ast.increment_lineno(tree, lineno-1) for node, etype in break_ast_to_exec_parts(tree): result = eval(compile(node, '<repl>', etype), {}, self.locals) if etype == 'eval': if result is not None: self.write(repr(result) + '\n') self.locals['___'] = self.locals.get('__', None) self.locals['__'] = self.locals.get('_', None) self.locals['_'] = result except SystemExit: raise except SyntaxError: self.showsyntaxerror() except: self.showtraceback() finally: sys.stdout = oldstdout sys.stderr = oldstderr
def transformFunction(func: type_checking.Function, unitDescriptor): if 'nativeFunc' + str( id(unitDescriptor.nativeNode )) in func.__globals__ or 'nativeNode' + str( id(unitDescriptor.nativeNode)) in func.__globals__: raise ValueError( 'virtual address is already being used, no duplications allowed' ) func.__globals__[ 'nativeFunc' + str(id(unitDescriptor.nativeNode))] = UnitNode.produceOutEdgeData func.__globals__[ 'nativeNode' + str(id(unitDescriptor.nativeNode))] = unitDescriptor.nativeNode astTree = ast.parse(inspect.getsource(func), filename=func.__code__.co_filename, mode='exec') FunctionTransformer(unitDescriptor).visit(astTree) ast.fix_missing_locations(astTree) ast.increment_lineno(astTree, func.__code__.co_firstlineno - 1) codeObject = compile(astTree, filename=func.__code__.co_filename, mode='exec') return types.FunctionType(codeObject.co_consts[0], func.__globals__, func.__name__, func.__defaults__, func.__closure__)
def try_subproc_toks(self, node, strip_expr=False): """Tries to parse the line of the node as a subprocess.""" line = self.lines[node.lineno - 1] if self.mode == 'eval': mincol = len(line) - len(line.lstrip()) maxcol = None else: mincol = min_col(node) maxcol = max_col(node) if mincol == maxcol: maxcol = find_next_break(line, mincol=mincol, lexer=self.parser.lexer) else: maxcol += 1 spline = subproc_toks(line, mincol=mincol, maxcol=maxcol, returnline=False, lexer=self.parser.lexer) if spline is None: return node try: newnode = self.parser.parse(spline, mode=self.mode) newnode = newnode.body if not isinstance(newnode, AST): # take the first (and only) Expr newnode = newnode[0] increment_lineno(newnode, n=node.lineno - 1) newnode.col_offset = node.col_offset except SyntaxError: newnode = node if strip_expr and isinstance(newnode, Expr): newnode = newnode.value return newnode
def infer(string, scope, lineno=None): tree = ast.parse(string, '<string>', 'eval') if lineno: ast.increment_lineno(tree, lineno-1) return Evaluator().process(tree, scope)
def do_compile(self): from .impl import get_runtime src = remove_indent(inspect.getsource(self.func)) tree = ast.parse(src) func_body = tree.body[0] func_body.decorator_list = [] if get_runtime().print_preprocessed: import astor print('Before preprocessing:') print(astor.to_source(tree.body[0], indent_with=' ')) visitor = ASTTransformer(is_kernel=False, is_classfunc=self.classfunc) visitor.visit(tree) ast.fix_missing_locations(tree) if get_runtime().print_preprocessed: import astor print('After preprocessing:') print(astor.to_source(tree.body[0], indent_with=' ')) ast.increment_lineno(tree, inspect.getsourcelines(self.func)[1] - 1) local_vars = {} #frame = inspect.currentframe().f_back #global_vars = dict(frame.f_globals, **frame.f_locals) import copy global_vars = copy.copy(self.func.__globals__) exec( compile(tree, filename=inspect.getsourcefile(self.func), mode='exec'), global_vars, local_vars) self.compiled = local_vars[self.func.__name__]
def test_rst(path): with open(path) as f: rst = f.read() doctree = publish_doctree(rst) ast_parts = [] for block in doctree.traverse(condition=is_code_block): raw_text = block.astext() num_lines = raw_text.count("\n") + 1 node = ast.parse(raw_text, path) ast.increment_lineno(node, block.line - num_lines - 1) ast_parts.extend(node.body) if sys.version_info >= (3, 8): mod = ast.Module(body=ast_parts, type_ignores=[]) else: mod = ast.Module(body=ast_parts) # Pytest 5 is Python 3 only and there are some API differences we need to # consider if get_pytest_version_info() < (5,): rewrite_asserts(mod, None) else: rewrite_asserts(mod, rst) exec(compile(mod, path, "exec"), {})
def _get_ast(func): if os.environ.get('NUMBA_FORCE_META_AST'): func_def = decompile_func(func) assert isinstance(func_def, ast.FunctionDef) return func_def try: source = inspect.getsource(func) except IOError: return decompile_func(func) else: source = textwrap.dedent(source) if source.startswith('@'): decorator, sep, source = source.partition('\n') while not source.startswith( 'def'): # decorator can have multiple lines decorator, sep, source = source.partition('\n') module_ast = ast.parse(source) # fix line numbering lineoffset = func.func_code.co_firstlineno ast.increment_lineno(module_ast, lineoffset) assert len(module_ast.body) == 1 func_def = module_ast.body[0] _fix_ast(func_def) assert isinstance(func_def, ast.FunctionDef) return func_def
def do_compile(self): src = remove_indent(oinspect.getsource(self.func)) tree = ast.parse(src) func_body = tree.body[0] func_body.decorator_list = [] if impl.get_runtime().print_preprocessed: import astor print('Before preprocessing:') print(astor.to_source(tree.body[0], indent_with=' ')) visitor = ASTTransformer(is_kernel=False, func=self) visitor.visit(tree) ast.fix_missing_locations(tree) if impl.get_runtime().print_preprocessed: import astor print('After preprocessing:') print(astor.to_source(tree.body[0], indent_with=' ')) ast.increment_lineno(tree, oinspect.getsourcelines(self.func)[1] - 1) local_vars = {} global_vars = _get_global_vars(self.func) exec( compile(tree, filename=oinspect.getsourcefile(self.func), mode='exec'), global_vars, local_vars) self.compiled = local_vars[self.func.__name__]
def _get_ast(func): if os.environ.get('NUMBA_FORCE_META_AST'): func_def = decompile_func(func) assert isinstance(func_def, ast.FunctionDef) return func_def try: source = inspect.getsource(func) except IOError: return decompile_func(func) else: if source.lstrip().startswith('@'): decorator, sep, source = source.partition('\n') source = textwrap.dedent(source) module_ast = ast.parse(source) # fix line numbering lineoffset = func.func_code.co_firstlineno + 1 ast.increment_lineno(module_ast, lineoffset) assert len(module_ast.body) == 1 func_def = module_ast.body[0] _fix_ast(func_def) assert isinstance(func_def, ast.FunctionDef) return func_def
def _parse_escape_inline(self, start_pos, initial_offset): assert self.template[start_pos.offset - initial_offset] == self.escape_inline len_template = len(self.template) current_pos = start_pos.next_char() inline_code = "" continue_parse = True while continue_parse: if current_pos.offset - initial_offset >= len_template: raise TemplateCompileError("Unexpected end of template within inline block (missing closing {})".format(self.escape_inline), self.template, start_pos, current_pos) current_char = self.template[current_pos.offset - initial_offset] if current_char == self.escape_inline and (current_pos.offset - initial_offset + 1 >= len_template \ or self.template[current_pos.offset - initial_offset + 1] != self.escape_inline): # end of inline block current_pos = current_pos.next_char() continue_parse = False else: inline_code += current_char current_pos = current_pos.next_char() # end of while compiled_inline = inline_code # TODO: compile ! parsed_inline = ast.parse(inline_code, self.filename, 'eval') ast.increment_lineno(parsed_inline, start_pos.lpos) compiled_inline = compile(parsed_inline, self.filename, 'eval') self.ctemplate.append(Template.Inline(self, compiled_inline, start_pos, current_pos)) return current_pos
def _compile(self, string): """Compile the input string""" # Call compile() directly to retain control over __future__ flags. tree = compile(string, self.filename, 'eval', ast.PyCF_ONLY_AST) ast.increment_lineno(tree, self.line_offset) return compile(tree, self.filename, 'eval')
def syncit(async_func): # Get source of async function as string source = inspect.getsource(async_func) # Remove indentation if it's defined as a method source = textwrap.dedent(source) # Get lineno where function is defined lineno = inspect.getsourcelines(async_func)[1] lineno_increment = lineno - 1 func_name = async_func.__name__ + '__sync' transformer = Transformer(func_name) tree = ast.parse(source) # transform the tree try: tree = transformer.visit(tree) except NotEnclosedAwait as e: raise AssertionError( "Encountered await expression not enclosed in `if %s:` block in " "`%s` at lineno %s" % ( IS_ASYNC_MODE, async_func.__name__, e.expr_lineno + lineno_increment) ) ast.fix_missing_locations(tree) ast.increment_lineno(tree, lineno_increment) filename = inspect.getfile(async_func) module_globals = inspect.getmodule(async_func).__dict__ exec(compile(tree, filename=filename, mode='exec'), module_globals) sync_func = eval(func_name, module_globals) sync_func.async_call = async_func del module_globals[func_name] return sync_func
async def eval_stmts(stmts, env=None): """ Evaluates input. If the last statement is an expression, that is the return value. >>> from asyncio import run >>> run(eval_stmts("1+1")) 2 >>> ctx = {} >>> run(eval_stmts("ctx['foo'] = 1", {"ctx": ctx})) >>> ctx['foo'] 1 >>> run(eval_stmts(''' ... async def f(): ... return 42 ... ... await f()''')) 42 """ parsed_stmts = ast.parse(stmts) fn_name = "_eval_expr" fn = f"async def {fn_name}(): pass" parsed_fn = ast.parse(fn) for node in parsed_stmts.body: ast.increment_lineno(node) insert_returns(parsed_stmts.body) parsed_fn.body[0].body = parsed_stmts.body exec(compile(parsed_fn, filename="<ast>", mode="exec"), env) return await eval(f"{fn_name}()", env)
def func(foo): from .impl import get_runtime src = remove_indent(inspect.getsource(foo)) tree = ast.parse(src) func_body = tree.body[0] func_body.decorator_list = [] visitor = ASTTransformer(is_kernel=False) visitor.visit(tree) ast.fix_missing_locations(tree) if get_runtime().print_preprocessed: import astor print('After preprocessing:') print(astor.to_source(tree.body[0], indent_with=' ')) ast.increment_lineno(tree, inspect.getsourcelines(foo)[1] - 1) frame = inspect.currentframe().f_back exec( compile(tree, filename=inspect.getsourcefile(foo), mode='exec'), dict(frame.f_globals, **frame.f_locals), locals()) compiled = locals()[foo.__name__] return compiled
def exec_python(self, source, filename='<unknown>', line_pos=None): code = ast.parse(source, filename, 'exec') if line_pos is not None: ast.increment_lineno(code, line_pos) ccode = compile(code, filename, 'exec') exec(ccode, self.globals)
def execute_code_block(src_file, code_block, lineno, example_globals, block_vars, gallery_conf): """Executes the code block of the example file""" time_elapsed = 0 # If example is not suitable to run, skip executing its blocks if not block_vars['execute_script']: return '', time_elapsed plt.close('all') cwd = os.getcwd() # Redirect output to stdout and orig_stdout = sys.stdout src_file = block_vars['src_file'] # First cd in the original example dir, so that any file # created by the example get created in this directory my_stdout = MixedEncodingStringIO() os.chdir(os.path.dirname(src_file)) sys.stdout = my_stdout try: code_ast = ast.parse(code_block, src_file) ast.increment_lineno(code_ast, lineno - 1) t_start = time() # don't use unicode_literals at the top of this file or you get # nasty errors here on Py2.7 exec(compile(code_ast, src_file, 'exec'), example_globals) time_elapsed = time() - t_start except Exception: sys.stdout = orig_stdout except_rst = handle_exception(sys.exc_info(), src_file, block_vars, gallery_conf) code_output = u"\n{0}\n\n\n\n".format(except_rst) else: sys.stdout = orig_stdout os.chdir(cwd) my_stdout = my_stdout.getvalue().strip().expandtabs() if my_stdout: stdout = CODE_OUTPUT.format(indent(my_stdout, u' ' * 4)) logger.verbose('Output from %s', src_file, color='brown') logger.verbose(my_stdout) else: stdout = '' images_rst, fig_num = save_figures(block_vars['image_path'], block_vars['fig_count'], gallery_conf) block_vars['fig_count'] += fig_num code_output = u"\n{0}\n\n{1}\n\n".format(images_rst, stdout) finally: os.chdir(cwd) sys.stdout = orig_stdout return code_output, time_elapsed
def import_global_function(self, f): """Imports a global function. This facility is not general and does not allow customization of the containing environment, method import, etc. Most errors are emitted via the MLIR context's diagnostic infrastructure, but errors related to extracting source, etc are raised directly. Args: f: The python callable. """ ic = self._ic target = self._config.target_factory(ic) filename = inspect.getsourcefile(f) source_lines, start_lineno = inspect.getsourcelines(f) source = "".join(source_lines) source = textwrap.dedent(source) ast_root = ast.parse(source, filename=filename) ast.increment_lineno(ast_root, start_lineno - 1) ast_fd = ast_root.body[0] # Define the function. # TODO: Much more needs to be done here (arg/result mapping, etc) logging.debug(":::::::") logging.debug("::: Importing global function {}:\n{}", ast_fd.name, ast.dump(ast_fd, include_attributes=True)) # TODO: VERY BAD: Assumes all positional params. f_signature = inspect.signature(f) f_params = f_signature.parameters f_input_types = [ self._resolve_signature_annotation(target, p.annotation) for p in f_params.values() ] f_return_type = self._resolve_signature_annotation( target, f_signature.return_annotation) ir_f_type = _ir.FunctionType.get(f_input_types, [f_return_type], context=ic.context) ic.set_file_line_col(filename, ast_fd.lineno, ast_fd.col_offset) ic.insert_before_terminator(ic.module.body) ir_f, entry_block = ic.FuncOp(ast_fd.name, ir_f_type, create_entry_block=True) ic.insert_end_of_block(entry_block) env = self._create_const_global_env(f, parameter_bindings=zip( f_params.keys(), entry_block.arguments), target=target) fctx = FunctionContext(ic=ic, ir_f=ir_f, filename=filename, environment=env) fdimport = FunctionDefImporter(fctx, ast_fd) fdimport.import_body() return ir_f
def try_subproc_toks(self, node, strip_expr=False): """Tries to parse the line of the node as a subprocess.""" line, nlogical, idx = get_logical_line(self.lines, node.lineno - 1) if self.mode == "eval": mincol = len(line) - len(line.lstrip()) maxcol = None else: mincol = max(min_col(node) - 1, 0) maxcol = max_col(node) if mincol == maxcol: maxcol = find_next_break(line, mincol=mincol, lexer=self.parser.lexer) elif nlogical > 1: maxcol = None elif maxcol < len(line) and line[maxcol] == ";": pass else: maxcol += 1 spline = subproc_toks( line, mincol=mincol, maxcol=maxcol, returnline=False, lexer=self.parser.lexer, ) if spline is None or spline != "![{}]".format(line[mincol:maxcol].strip()): # failed to get something consistent, try greedy wrap spline = subproc_toks( line, mincol=mincol, maxcol=maxcol, returnline=False, lexer=self.parser.lexer, greedy=True, ) if spline is None: return node try: newnode = self.parser.parse( spline, mode=self.mode, filename=self.filename, debug_level=(self.debug_level > 2), ) newnode = newnode.body if not isinstance(newnode, AST): # take the first (and only) Expr newnode = newnode[0] increment_lineno(newnode, n=node.lineno - 1) newnode.col_offset = node.col_offset if self.debug_level > 1: msg = "{0}:{1}:{2}{3} - {4}\n" "{0}:{1}:{2}{3} + {5}" mstr = "" if maxcol is None else ":" + str(maxcol) msg = msg.format(self.filename, node.lineno, mincol, mstr, line, spline) print(msg, file=sys.stderr) except SyntaxError: newnode = node if strip_expr and isinstance(newnode, Expr): newnode = newnode.value return newnode
def materialize(self, key=None, args=None, extra_frame_backtrace=-1): if key is None: key = (self.func, 0) if not pytaichi.materialized: pytaichi.materialize() if key in self.compiled_functions: return grad_suffix = "" if self.is_grad: grad_suffix = "_grad" kernel_name = "{}_{}_{}".format(self.func.__name__, key[1], grad_suffix) print("Compiling kernel {}...".format(kernel_name)) src = remove_indent(inspect.getsource(self.func)) tree = ast.parse(src) if pytaichi.print_preprocessed: import astor print(astor.to_source(tree.body[0])) func_body = tree.body[0] func_body.decorator_list = [] visitor = ASTTransformer( excluded_paremeters=self.template_slot_locations) visitor.visit(tree) ast.fix_missing_locations(tree) if pytaichi.print_preprocessed: import astor print(astor.to_source(tree.body[0], indent_with=' ')) ast.increment_lineno(tree, inspect.getsourcelines(self.func)[1] - 1) pytaichi.inside_kernel = True frame = inspect.currentframe() for t in range(extra_frame_backtrace + 2): frame = frame.f_back globals = dict(frame.f_globals, **frame.f_locals) # inject template parameters into globals for i in self.template_slot_locations: template_var_name = self.argument_names[i] globals[template_var_name] = args[i] exec( compile(tree, filename=inspect.getsourcefile(self.func), mode='exec'), globals, locals()) pytaichi.inside_kernel = False compiled = locals()[self.func.__name__] taichi_kernel = taichi_lang_core.create_kernel(kernel_name, self.is_grad) taichi_kernel = taichi_kernel.define(lambda: compiled()) assert key not in self.compiled_functions self.compiled_functions[key] = self.get_function_body(taichi_kernel)
def get_python_ast(self, src, f): """Generates the Python AST.""" tree = ast.parse(src) actual_lineno = inspect.getsourcelines(f)[-1] ast.increment_lineno(tree, actual_lineno) assert len(tree.body) == 1 return tree
def _get_function_ast(f) -> Tuple[str, ast.AST]: filename = inspect.getsourcefile(f) source_lines, start_lineno = inspect.getsourcelines(f) source = "".join(source_lines) source = textwrap.dedent(source) ast_root = ast.parse(source, filename=filename) ast.increment_lineno(ast_root, start_lineno - 1) return filename, ast_root
def try_subproc_toks(self, node, strip_expr=False): """Tries to parse the line of the node as a subprocess.""" line, nlogical, idx = get_logical_line(self.lines, node.lineno - 1) if self.mode == 'eval': mincol = len(line) - len(line.lstrip()) maxcol = None else: mincol = max(min_col(node) - 1, 0) maxcol = max_col(node) if mincol == maxcol: maxcol = find_next_break(line, mincol=mincol, lexer=self.parser.lexer) elif nlogical > 1: maxcol = None elif maxcol < len(line) and line[maxcol] == ';': pass else: maxcol += 1 spline = subproc_toks(line, mincol=mincol, maxcol=maxcol, returnline=False, lexer=self.parser.lexer) if spline is None or len(spline) < len(line[mincol:maxcol]) + 2: # failed to get something consistent, try greedy wrap # The +2 comes from "![]" being length 3, minus 1 since maxcol # is one beyond the total length for slicing spline = subproc_toks(line, mincol=mincol, maxcol=maxcol, returnline=False, lexer=self.parser.lexer, greedy=True) if spline is None: return node try: newnode = self.parser.parse(spline, mode=self.mode, filename=self.filename, debug_level=(self.debug_level > 2)) newnode = newnode.body if not isinstance(newnode, AST): # take the first (and only) Expr newnode = newnode[0] increment_lineno(newnode, n=node.lineno - 1) newnode.col_offset = node.col_offset if self.debug_level > 1: msg = ('{0}:{1}:{2}{3} - {4}\n' '{0}:{1}:{2}{3} + {5}') mstr = '' if maxcol is None else ':' + str(maxcol) msg = msg.format(self.filename, node.lineno, mincol, mstr, line, spline) print(msg, file=sys.stderr) except SyntaxError: newnode = node if strip_expr and isinstance(newnode, Expr): newnode = newnode.value return newnode
def visit_Return(self, node): assign = ast.Assign(targets = [ast.Name(id = 'y' , ctx = ast.Store())], value = ast.Num(8)) ast.increment_lineno(node, 1) ast.copy_location(assign, node) ast.fix_missing_locations(assign) #assign.col_offset = 8 # lists = list(ast.iter_child_nodes(assign)) # print lists return assign
def eval_python_expr(self, expr, filename='<unknown>', line_pos=None): code = ast.parse(expr, filename, 'eval') if line_pos is not None: ast.increment_lineno(code, line_pos) ccode = compile(code, filename, 'eval') ret = eval(ccode, self.globals) return ret
def add_code(self, code: str, offset: int) -> None: """add a code section with a line offset""" try: module = ast.parse(code, "<unknown>", mode="exec") except SyntaxError as e: # set correct lineno and reraise if e.lineno is not None: e.lineno += offset raise ast.increment_lineno(module, offset) self.sections.append(module)
def register(name, handler, mask=None, filename=None, lineno=None, data=None): """Register an Event handler""" if data is not None and data.getVar("BB_CURRENT_MC"): mc = data.getVar("BB_CURRENT_MC") name = '%s%s' % (mc.replace('-', '_'), name) # already registered if name in _handlers: if data is not None: bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set()) bbhands_mc.add(name) data.setVar("__BBHANDLERS_MC", bbhands_mc) return AlreadyRegistered if handler is not None: # handle string containing python code if isinstance(handler, str): tmp = "def %s(e):\n%s" % (name, handler) try: code = bb.methodpool.compile_cache(tmp) if not code: if filename is None: filename = "%s(e)" % name code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST) if lineno is not None: ast.increment_lineno(code, lineno - 1) code = compile(code, filename, "exec") bb.methodpool.compile_cache_add(tmp, code) except SyntaxError: logger.error("Unable to register event handler '%s':\n%s", name, ''.join(traceback.format_exc(limit=0))) _handlers[name] = noop return env = {} bb.utils.better_exec(code, env) func = bb.utils.better_eval(name, env) _handlers[name] = func else: _handlers[name] = handler if not mask or '*' in mask: _catchall_handlers[name] = True else: for m in mask: if _event_handler_map.get(m, None) is None: _event_handler_map[m] = {} _event_handler_map[m][name] = True if data is not None: bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set()) bbhands_mc.add(name) data.setVar("__BBHANDLERS_MC", bbhands_mc) return Registered
def rewrite_assertion(test: Test) -> Test: # Get the old code and code object code_lines, line_no = inspect.getsourcelines(test.fn) code = "".join(code_lines) indents = textwrap._leading_whitespace_re.findall(code) col_offset = len(indents[0]) if len(indents) > 0 else 0 code = textwrap.dedent(code) code_obj = test.fn.__code__ # Rewrite the AST of the code tree = ast.parse(code) ast.increment_lineno(tree, line_no - 1) new_tree = RewriteAssert().visit(tree) # We dedented the code so that it was a valid tree, now re-apply the indent for child in ast.walk(new_tree): if hasattr(child, "col_offset"): child.col_offset = getattr(child, "col_offset", 0) + col_offset # Reconstruct the test function new_mod_code_obj = compile(new_tree, code_obj.co_filename, "exec") # TODO: This probably isn't correct for nested closures clo_glob = {} if test.fn.__closure__: clo_glob = test.fn.__closure__[0].cell_contents.__globals__ # Look through the new module, # find the code object with the same name as the original code object, # and build a new function with the injected assert functions added to the global namespace. # Filtering on the code object name prevents finding other kinds of code objects, # like lambdas stored directly in test function arguments. for const in new_mod_code_obj.co_consts: if isinstance(const, types.CodeType) and const.co_name == code_obj.co_name: new_test_func = types.FunctionType( const, { **assert_func_namespace, **test.fn.__globals__, **clo_glob }, test.fn.__name__, test.fn.__defaults__, ) new_test_func.ward_meta = test.fn.ward_meta return Test( **{k: vars(test)[k] for k in vars(test) if k != "fn"}, fn=new_test_func, ) return test
def load_module(self, fullname: str) -> types.ModuleType: self.lines[fullname] = '' """import a notebook as a module""" path = find_notebook(fullname, self.path) if path is None: raise FileNotFoundError(f"Can't find {fullname}") # print ("importing Jupyter notebook from %s" % path) # load the notebook object with io.open(path, 'r', encoding='utf-8') as f: nb = read(f, 4) # create the module and add it to sys.modules # if name in sys.modules: # return sys.modules[name] mod = types.ModuleType(fullname) mod.__file__ = path mod.__loader__ = self mod.__dict__['get_ipython'] = get_ipython sys.modules[fullname] = mod # extra work to ensure that magics that would affect the user_ns # actually affect the notebook module's ns save_user_ns = self.shell.user_ns self.shell.user_ns = mod.__dict__ codecells = [ self.shell.input_transformer_manager.transform_cell(cell.source) for cell in nb.cells if cell.cell_type == 'code' ] source = [code for code in codecells if do_import(code)] lno = 1 try: for code in source: parsed = ast.parse(code, filename=path, mode='exec') ast.increment_lineno(parsed, n=lno - 1) exec(compile(parsed, path, 'exec'), mod.__dict__) lno += len(code.split('\n')) self.lines[fullname] = '\n'.join(source) p = len(self.lines[fullname].split('\n')) + 1 assert lno == p finally: self.shell.user_ns = save_user_ns data = self.lines[fullname] linecache.cache[path] = ( len(data), None, # type: ignore [line + '\n' for line in data.splitlines()], fullname) return mod
def compile(self, f, types): self.set_base_attr(f) baseline = f.func_code.co_firstlineno self.graphs = [] src = sap.util.get_func_code(f) body = ast.parse(src).body[0] self.func_def = body ast.increment_lineno(body, baseline - body.lineno) ast.fix_missing_locations(body) return self.visit(body, f, types)
def compile_offset(source, filename='<string>', lineoffset=0): """Compile the python source and adjust its line numbers by lineoffset.""" try: compiled = compile(source, filename, 'exec', ast.PyCF_ONLY_AST) except SyntaxError as exc: _syntaxerror_offset(exc, lineoffset) raise if lineoffset: ast.increment_lineno(compiled, lineoffset) return compile(compiled, filename, 'exec', dont_inherit=True)
def __init__(self, fn, **extravars): self.fn = fn self.extravars = extravars # if fn.__closure__: for freevar, cell in zip(fn.__code__.co_freevars, fn.__closure__): if freevar not in self.extravars: self.extravars[freevar] = cell.cell_contents # makesrc = self._create_src() self.tree = ast.parse(makesrc) ast.increment_lineno(self.tree, fn.__code__.co_firstlineno - 2)
def parse_snippet(source, filename, mode, flags, firstlineno): args = filename, mode, flags | ast.PyCF_ONLY_AST, True try: code = compile('\n' + source, *args) except IndentationError: code = compile('with 0:\n' + source, *args) code.body = code.body[0].body ast.increment_lineno(code, firstlineno - 2) return code
def transform(fn, interact): src = dedent(inspect.getsource(fn)) comments = {} for tok in tokenize.tokenize(readline_mock(src)): if tok.type == tokenize.COMMENT: if tok.line.strip().startswith("#"): line = tok.end[0] comments[line + 1] = tok.string[1:].strip() if line in comments: comments[line + 1] = (comments[line] + "\n" + comments[line + 1]) del comments[line] filename = inspect.getsourcefile(fn) tree = ast.parse(src, filename) tree = tree.body[0] assert isinstance(tree, ast.FunctionDef) tree.decorator_list = [] transformer = PteraTransformer(tree, comments) new_tree = transformer.result ast.fix_missing_locations(new_tree) _, lineno = inspect.getsourcelines(fn) ast.increment_lineno(new_tree, lineno - 1) new_fn = compile(ast.Module(body=[new_tree], type_ignores=[]), filename, "exec") glb = fn.__globals__ glb["__ptera_interact"] = interact glb["__ptera_ABSENT"] = ABSENT exec(new_fn, glb, glb) state = { k: eval(compile(ast.Expression(v), filename, "eval"), glb, glb) for k, v in transformer.defaults.items() } annotations = { k: eval(compile(ast.Expression(v), filename, "eval"), glb, glb) for k, v in transformer.annotated.items() } fname = fn.__name__ actual_fn = glb[fname] all_vars = transformer.used | transformer.assigned state_obj = state_class(fname, all_vars, transformer.vardoc, annotations)(state) # The necessary globals may not yet be set, so we create a "PreState" that # will be filled in whenever we first need to fetch the state. state_obj = PreState(state=state_obj, names=transformer.external, glbls=glb) return actual_fn, state_obj
def parseSourceCodeToAst(source_code, filename, line_offset): # Workaround: ast.parse cannot cope with some situations where a file is not # terminated by a new line. if not source_code.endswith("\n"): source_code = source_code + "\n" body = ast.parse(source_code, filename) assert getKind(body) == "Module" if line_offset > 0: ast.increment_lineno(body, line_offset) return body
def optimize(fn): source, filename, lineno = get_source(fn) module = ast.parse(source, filename) assert isinstance(module, ast.Module) tree, = module.body assert isinstance(tree, ast.FunctionDef) strip_decorators(tree.decorator_list, optimize) ast.increment_lineno(tree, lineno - 1) module = Optimizer().visit(module) ast.fix_missing_locations(module) res = {} eval(compile(module, filename, 'exec'), globals(), res) v, = res.values() return v
def parse_snippet(source, filename, mode, flags, firstlineno, privateprefix_ignored=None): """ Like ast.parse, but accepts indented code snippet with a line number offset. """ args = filename, mode, flags | ast.PyCF_ONLY_AST, True prefix = '\n' try: a = compile(prefix + source, *args) except IndentationError: # Already indented? Wrap with dummy compound statement prefix = 'with 0:\n' a = compile(prefix + source, *args) # peel wrapper a.body = a.body[0].body ast.increment_lineno(a, firstlineno - 2) return a
def __init__(self, func, sandbox): self.path = func.func_code.co_filename self.name = func.func_name code = func.func_code firstlineno = code.co_firstlineno lines = sandbox._current_source.splitlines(True) lines = inspect.getblock(lines[firstlineno - 1:]) # The code lines we get out of inspect.getsourcelines look like # @template # def Template(*args, **kwargs): # VAR = 'value' # ... func_ast = ast.parse(''.join(lines), self.path) # Remove decorators func_ast.body[0].decorator_list = [] # Adjust line numbers accordingly ast.increment_lineno(func_ast, firstlineno - 1) # When using a custom dictionary for function globals/locals, Cpython # actually never calls __getitem__ and __setitem__, so we need to # modify the AST so that accesses to globals are properly directed # to a dict. self._global_name = b'_data' # AST wants str for this, not unicode # In case '_data' is a name used for a variable in the function code, # prepend more underscores until we find an unused name. while (self._global_name in code.co_names or self._global_name in code.co_varnames): self._global_name += '_' func_ast = self.RewriteName(sandbox, self._global_name).visit(func_ast) # Execute the rewritten code. That code now looks like: # def Template(*args, **kwargs): # _data['VAR'] = 'value' # ... # The result of executing this code is the creation of a 'Template' # function object in the global namespace. glob = {'__builtins__': sandbox._builtins} func = types.FunctionType( compile(func_ast, self.path, 'exec'), glob, self.name, func.func_defaults, func.func_closure, ) func() self._func = glob[self.name]
def test_increment_lineno(self): src = ast.parse('1 + 1', mode='eval') self.assertEqual(ast.increment_lineno(src, n=3), src) self.assertEqual(ast.dump(src, include_attributes=True), 'Expression(body=BinOp(left=Num(n=1, lineno=4, col_offset=0), ' 'op=Add(), right=Num(n=1, lineno=4, col_offset=4), lineno=4, ' 'col_offset=0))' ) # issue10869: do not increment lineno of root twice src = ast.parse('1 + 1', mode='eval') self.assertEqual(ast.increment_lineno(src.body, n=3), src.body) self.assertEqual(ast.dump(src, include_attributes=True), 'Expression(body=BinOp(left=Num(n=1, lineno=4, col_offset=0), ' 'op=Add(), right=Num(n=1, lineno=4, col_offset=4), lineno=4, ' 'col_offset=0))' )
def translate(self, source): parsed_code = ast.parse(source) safe_loops = self.get_safe_loops_fn(parsed_code) if not safe_loops: print("No safe loops found") print(safe_loops) print(safe_loops[0].all_statements) print(ast.dump(parsed_code)) transformer = self.Transformer(ast.increment_lineno(parsed_code, 100), safe_loops) transformed_tree = transformer.transform_tree() # transformed_tree.lineno =1 # transformed_tree.col_offset = 0 print(type(transformed_tree)) transformed_tree = ast.fix_missing_locations(transformed_tree) # print(ast.dump(transformed_tree)) # for node in ast.walk(transformed_tree): # if isinstance(node, ast.expr) or isinstance(node, ast.stmt): # print(node.lineno) # print(node) unparse.Unparser(transformed_tree, sys.stdout) output_code = compile(transformed_tree, self.filename, 'exec') pmod = ParallelModule() try: with open('output.py','w') as f: unparse.Unparser(transformed_tree, f) # f.write(imp.get_magic()) # marshal.dump(output_code, f) # exec(output_code,{}) except: print(sys.exc_info()[0]) print(sys.exc_info()[1]) print(sys.exc_info()[2])
def test_increment_lineno(self): src = ast.parse('a + 1', mode='eval') self.assertEqual(ast.increment_lineno(src, n=3), src) self.assertEqual(ast.dump(src, include_attributes=True), "Expression(body=BinOp(left=Name(id='a', ctx=Load(), lineno=4, col_offset=0), " 'op=Add(), right=Const(c=1, constant=pure_const(), lineno=4, col_offset=4), ' 'lineno=4, col_offset=0))' )
def get_function_body_code(func): filename = inspect.getfile(func) func_body, line_offset = get_function_body(func) body_source = dedent_function_body(func_body) body_code = compile(body_source, filename, "exec", ast.PyCF_ONLY_AST) body_code = ast.increment_lineno(body_code, n=line_offset) body_code = compile(body_code, filename, "exec") return body_code
def test_increment_lineno(self): src = ast.parse("1 + 1", mode="eval") self.assertEqual(ast.increment_lineno(src, n=3), src) self.assertEqual( ast.dump(src, include_attributes=True), "Expression(body=BinOp(left=Num(n=1, lineno=4, col_offset=0), " "op=Add(), right=Num(n=1, lineno=4, col_offset=4), lineno=4, " "col_offset=0))", )
def visit_FunctionDef(self, node): """Delete functions that are empty due to imports being moved""" self.in_class_define = False if self.in_setup: node.col_offset -= 4 ast.increment_lineno(node, -1) if node.name == 'setup': self.in_setup = True self.generic_visit(node) if node.name == 'setup': self.in_setup = False if node.body: return node
def parseSourceCodeToAst(source_code, filename, line_offset): # Workaround: ast.parse cannot cope with some situations where a file is not # terminated by a new line. if not source_code.endswith('\n'): source_code = source_code + '\n' try: body = ast.parse(source_code, filename) except SyntaxError as e: _makeSyntaxErrorCompatible(e) raise e assert getKind(body) == "Module" if line_offset > 0: ast.increment_lineno(body, line_offset) return body
def register(name, handler, mask=None, filename=None, lineno=None): """Register an Event handler""" # already registered if name in _handlers: return AlreadyRegistered if handler is not None: # handle string containing python code if isinstance(handler, str): tmp = "def %s(e):\n%s" % (name, handler) try: code = bb.methodpool.compile_cache(tmp) if not code: if filename is None: filename = "%s(e)" % name code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST) if lineno is not None: ast.increment_lineno(code, lineno-1) code = compile(code, filename, "exec") bb.methodpool.compile_cache_add(tmp, code) except SyntaxError: logger.error("Unable to register event handler '%s':\n%s", name, ''.join(traceback.format_exc(limit=0))) _handlers[name] = noop return env = {} bb.utils.better_exec(code, env) func = bb.utils.better_eval(name, env) _handlers[name] = func else: _handlers[name] = handler if not mask or '*' in mask: _catchall_handlers[name] = True else: for m in mask: if _event_handler_map.get(m, None) is None: _event_handler_map[m] = {} _event_handler_map[m][name] = True return Registered
def component_list_from_file(filename): """ Read in a file containing fit components and return them as a list """ with open(filename) as f: model_tree = ast.parse(f.read()) # Inject distribution and component imports. Put them at the beginning # so user imports may override them. level=1 means relative import, e.g.: # from .ModelComponents import * ast.increment_lineno(model_tree, n=3) comps = ast.ImportFrom(module='ModelComponents', names=[ast.alias(name='*', asname=None)], level=1) dists = ast.ImportFrom(module='distributions', names=[ast.alias(name='*', asname=None)], level=1) model_tree.body.insert(0, comps) model_tree.body.insert(1, dists) # Insert a statement creating an empty list called components comps_node = ast.Assign(targets=[ast.Name(id=_comps_name, ctx=ast.Store())], value=ast.List(elts=[], ctx=ast.Load())) model_tree.body.insert(2, comps_node) # Transform bare components expressions into list append statements model_tree = ExprsToAssigns().visit(model_tree) ast.fix_missing_locations(model_tree) # Process file within its local dir, so file references within are relative # to its location instead of the script run location. prev_dir = os.getcwd() model_dir = os.path.dirname(filename) if model_dir == '': model_dir = '.' os.chdir(model_dir) exec(compile(model_tree, filename, mode='exec')) os.chdir(prev_dir) # Filter out only those object that are subclasses of ComponentBase return [comp for comp in locals()[_comps_name] if isinstance(comp, ComponentBase.ComponentBase)]
def _get_ast(func, flags=0): if int(os.environ.get('NUMBA_FORCE_META_AST', 0)): func_def = decompile_func(func) assert isinstance(func_def, ast.FunctionDef) return func_def try: source = inspect.getsource(func) source_module = inspect.getmodule(func) except IOError: return decompile_func(func) else: # Split off decorators # TODO: This is not quite correct, we can have comments or strings # starting at column 0 and an indented function ! source = textwrap.dedent(source) decorators = 0 while not source.lstrip().startswith('def'): # decorator can have multiple lines assert source decorator, sep, source = source.partition('\n') decorators += 1 if (hasattr(source_module, "print_function") and hasattr(source_module.print_function, "compiler_flag")): flags |= source_module.print_function.compiler_flag source_file = getattr(source_module, '__file__', '<unknown file>') module_ast = compile(source, source_file, "exec", ast.PyCF_ONLY_AST | flags, True) # fix line numbering lineoffset = func.__code__.co_firstlineno + decorators ast.increment_lineno(module_ast, lineoffset) assert len(module_ast.body) == 1 func_def = module_ast.body[0] _fix_ast(func_def) assert isinstance(func_def, ast.FunctionDef) return func_def
def get_function_body_code(func): filename = inspect.getfile(func) func_body, line_offset = get_function_body(func) body_source = dedent_function_body(func_body) try: body_code = compile(body_source, filename, "exec", ast.PyCF_ONLY_AST) body_code = ast.increment_lineno(body_code, n=line_offset) body_code = compile(body_code, filename, "exec") except SyntaxError as e: if e.args[0] == "'return' outside function": filename, lineno, _, statement = e.args[1] raise SyntaxError('No return statements allowed in ConfigScopes\n' '(\'{}\' in File "{}", line {})'.format( statement.strip(), filename, lineno)) elif e.args[0] == "'yield' outside function": filename, lineno, _, statement = e.args[1] raise SyntaxError('No yield statements allowed in ConfigScopes\n' '(\'{}\' in File "{}", line {})'.format( statement.strip(), filename, lineno)) else: raise return body_code