def _rebuildExpressionCache(self): self.expression_codes = dict() self.expression_order_all = [] self.expression_orders = dict() dynamic_cols = self.searchFlag("dynamic") dynamic_names = [self.colInfo[col]["name"] for col in dynamic_cols] column_names = self.colInfo["name"].tolist() solver = DependencySolver(dynamic_names) transformValue = ExpressionValueTransform(column_names) transformFunc = ExpressionFuncTransform(column_names, array_name="data_masked") for col, col_name in zip(dynamic_cols, dynamic_names): try: # determine dependencies for this column tree = ast.parse(self.colInfo[col]["expression"], mode="eval") solver.add(col_name, tree) # transform and compile the expression transformFunc.visit(tree) transformValue.visit(tree) ast.fix_missing_locations(tree) code = compile(tree, "<user expression '%s'>" % col_name, mode="eval") self.expression_codes[col] = code except Exception as e: print("error compiling user expression for column '%s'" % col_name) self.expression_codes[col] = compile("None", "<compile error>", mode="eval") # try to solve dependencies try: self.expression_order_all = [column_names.index(name) for name in solver.solve()] for col, col_name in zip(dynamic_cols, dynamic_names): self.expression_orders[col] = [column_names.index(name) for name in solver.solve(col_name)] except Exception as e: print("error evaluating expressions,", e) self.expression_order_all = dynamic_cols
def visit_Module(self, node): node.body = [k for k in (self.visit(n) for n in node.body) if k] imports = [ast.Import([ast.alias(i, namespace + "::" + i)]) for i in self.imports] node.body = imports + node.body ast.fix_missing_locations(node) return node
def ast_print_node(values): print_node = _ast.Print( values=values, nl=True ) ast.fix_missing_locations(print_node) return print_node
def ast_wrap_in_assn(var_name, ast_expr): assn = _ast.Assign( targets = [_ast.Name(id=var_name, ctx=_ast.Store())], value = ast_expr.value ) ast.fix_missing_locations(assn) return assn
def _convert(self): """Attempts to convert expression to distance function. Constraints are often expressed as inequalities, such as x < 5, meaning that a policy is feasible if the value of x is less than 5. It is sometimes useful to know how far a policy is from a feasibility threshold. For example, x = 7 is closer to the feasibility threshold than x = 15. This method attempts to convert a comparison expression to a distance expression by manipulating the AST. If successful, this method creates the _distance attribute. Even if this method is successful, the generated expression may not be valid. """ root = ast.parse(self.expr, mode="eval") if isinstance(root.body, ast.Compare) and len(root.body.ops) == 1: left_expr = root.body.left right_expr = root.body.comparators[0] distance_expr = ast.Expression(ast.BinOp(left_expr, ast.Sub(), right_expr)) ast.fix_missing_locations(distance_expr) self._distance = compile(distance_expr, "<AST>", "eval")
def get_import_code(self, node, fname='<string>'): """Get compiled code of all top-level import statements found in the AST of node.""" self._import_nodes = [] self.visit(node) body = [] for imp_node in self._import_nodes: if isinstance(imp_node, ast.ImportFrom) and \ imp_node.module == '__future__': # 'SyntaxError: from __future__ imports must occur at the # beginning of the file' is raised if a 'from __future__ import' # is wrapped in try-except, so use only the import statement. body.append(imp_node) else: if sys.version_info[0] >= 3: # Python 3 body.append(ast.Try(body=[imp_node], handlers=[ ast.ExceptHandler(type=None, name=None, body=[ast.Pass()])], orelse=[], finalbody=[])) else: body.append(ast.TryExcept(body=[imp_node], handlers=[ ast.ExceptHandler(type=None, name=None, body=[ast.Pass()])], orelse=[])) node = ast.Module(body=body) ast.fix_missing_locations(node) code = compile(node, fname, 'exec') return code
def asterize(cmd, mode="eval"): tree = ast.parse(cmd, mode=mode) tree = AstWrapper().visit(tree) # Add lineno & col_offset to the nodes we created ast.fix_missing_locations(tree) co = compile(tree, "<ast>", mode) return co
def instrumentModule(module_filename, out_dir, is_app=False, in_dir=""): mod_file = os.path.join(out_dir, module_filename) if os.path.exists(mod_file) and os.stat(os.path.join(in_dir, module_filename)).st_mtime < os.stat(mod_file).st_mtime: return print "Instrumenting %s" % module_filename if "se_dict.py" in module_filename: import_se_dict = False else: import_se_dict = True module_contents = file(os.path.join(in_dir, module_filename), "U").read() if len(module_contents.strip()) == 0: file(mod_file, "w").close() return root_node = ast.parse(module_contents) SplitBoolOpPass1().visit(root_node) LiftComputationFromConditionalPass2().visit(root_node) BranchIdentifierPass3(import_se_dict).visit(root_node) ast.fix_missing_locations(root_node) compile(root_node, module_filename, 'exec') # to make sure the new AST is ok unparse.Unparser(root_node, file(mod_file, "w"))
def visit_mutation_site(self, node, op, num_mutations): """Potentially mutate `node`, returning the mutated version. `Operator` calls this when AST iteration reaches a potential mutation site. If that site is scheduled for mutation, the subclass instance will be asked to perform the mutation. """ # If the current operator will do at least that many mutations, # then let it make the mutation now. if self._count <= self._target < self._count + num_mutations: assert self._activation_record is None assert self._target - self._count < num_mutations self._activation_record = { 'operator': _full_module_name(op), 'occurrence': self._target, 'line_number': cosmic_ray.util.get_line_number(node) } old_node = node node = op.mutate(old_node, self._target - self._count) # add lineno and col_offset for newly created nodes ast.fix_missing_locations(node) self._count += num_mutations return node
def gen_module(self, script): func_args = ast.arguments(args=[]) func = [ast.FunctionDef(name='init', args=func_args, body=self.gen_init_body(script))] m = ast.Module(self.gen_preamble() + func, lineno=0, col_offset=0) FuncFix().visit(m) ast.fix_missing_locations(m) return m
def visit_Module(self, node): """ Visit the whole module and add all import at the top level. >> import math Becomes >> import math as pythonic::math And >> import numpy.linalg Becomes >> import numpy as pythonic::numpy """ node.body = [k for k in (self.visit(n) for n in node.body) if k] imports = [ast.Import([ast.alias(i, namespace + "::" + i)]) for i in self.imports] node.body = imports + node.body ast.fix_missing_locations(node) return node
def __init__ (self, script=None, file=None, tree=None, globals=None, locals=None, **kwargs): if script is None and file is not None: # it's a pity that compile() does not accept a file as input # so we could avoid reading the whole file script= open (file).read () else: file= 'arg_to_main' self.environ= Environment (globals, locals, **kwargs) if tree is None: tree= ast.parse (script) # ImportFrom(module='bar', names=[alias(name='baz', asname=None)], level=0) node= ImportFrom (module='ayrton', names=[alias (name='CommandWrapper', asname=None)], level=0) node.lineno= 0 node.col_offset= 0 ast.fix_missing_locations (node) tree.body.insert (0, node) tree= CrazyASTTransformer(self.environ).visit (tree) self.options= {} self.source= compile (tree, file, 'exec')
def transform(src): """ Transforms the given source to use pvectors, pmaps and psets to replace built in structures """ tree = ast.parse(src) transformer = PyrsistentTransformer() new_tree = transformer.visit(tree) ast.fix_missing_locations(new_tree) return new_tree
def visit_ClassDef(self, node): self.transforms = {} self.in_class_define = True functions_to_promote = [] setup_func = None for class_func in ast.iter_child_nodes(node): if isinstance(class_func, ast.FunctionDef): if class_func.name == 'setup': setup_func = class_func for anon_func in ast.iter_child_nodes(class_func): if isinstance(anon_func, ast.FunctionDef): functions_to_promote.append(anon_func) if setup_func: for func in functions_to_promote: setup_func.body.remove(func) func.args.args.insert(0, ast.Name(id='self', ctx=ast.Load())) node.body.append(func) self.transforms[func.name] = 'self.' + func.name ast.fix_missing_locations(node) self.generic_visit(node) return node
def __init__(self, engine, node): self._scopes = [set()] self._expression_cache = {} self._translations = [] self._markers = set() self._engine = ExpressionCompiler( engine, self._expression_cache, self._markers ) if isinstance(node_annotations, dict): self.lock.acquire() backup = node_annotations.copy() else: backup = None try: module = ast.Module([]) module.body += self.visit(node) ast.fix_missing_locations(module) generator = TemplateCodeGenerator(module) finally: if backup is not None: node_annotations.clear() node_annotations.update(backup) self.lock.release() self.code = generator.code
def _update_widgets(self): """ Updates the tree and editor widgets. """ self.setWindowTitle('{} - {}'.format(self._file_name, PROGRAM_NAME)) self.editor.setPlainText(self._source_code) if not self._source_code: logger.debug("Empty source code, use empty tree.") self.ast_tree.clear() return try: syntax_tree = ast.parse(self._source_code, filename=self._file_name, mode=self._mode) ast.fix_missing_locations(syntax_tree) # Doesn't seem to do anything. except Exception as ex: if DEBUGGING: raise else: stack_trace = traceback.format_exc() msg = "Unable to parse file: {}\n\n{}\n\n{}" \ .format(self._file_name, ex, stack_trace) logger.exception(ex) QtWidgets.QMessageBox.warning(self, 'error', msg) else: last_pos = self.editor.get_last_pos() root_item = self.ast_tree.populate(syntax_tree, last_pos, root_label=self._file_name) self.ast_tree.setCurrentItem(root_item) self.ast_tree.expand_reset()
def inject_print_collector(self, node, position=0): print_used = self.print_info.print_used printed_used = self.print_info.printed_used if print_used or printed_used: # Add '_print = _print_(_getattr_)' add the top of a # function/module. _print = ast.Assign( targets=[ast.Name('_print', ast.Store())], value=ast.Call( func=ast.Name("_print_", ast.Load()), args=[ast.Name("_getattr_", ast.Load())], keywords=[])) if isinstance(node, ast.Module): _print.lineno = position _print.col_offset = position ast.fix_missing_locations(_print) else: copy_locations(_print, node) node.body.insert(position, _print) if not printed_used: self.warn(node, "Prints, but never reads 'printed' variable.") elif not print_used: self.warn(node, "Doesn't print, but reads 'printed' variable.")
def generate(self): self.reset() module = ast.Module() module.body = [] module.body = self.random_body() ast.fix_missing_locations(module) return module
def mod(self, mod, msg=None, mode="exec", *, exc=ValueError): mod.lineno = mod.col_offset = 0 ast.fix_missing_locations(mod) with self.assertRaises(exc) as cm: compile(mod, "<test>", mode) if msg is not None: self.assertIn(msg, str(cm.exception))
def _astIt(self, someCode): theContext = ast.parse(someCode) ast.fix_missing_locations(theContext) co = compile(theContext, "<ast>", "exec") exec(co)
def test_invalid_identitifer(self): m = ast.Module([ast.Expr(ast.Name(42, ast.Load()))]) ast.fix_missing_locations(m) with self.assertRaises(TypeError) as cm: compile(m, "<test>", "exec") if support.check_impl_detail(): self.assertIn("identifier must be of type str", str(cm.exception))
def compile_func(arg_names, statements, name='_the_func', debug=False): """Compile a list of statements as the body of a function and return the resulting Python function. If `debug`, then print out the bytecode of the compiled function. """ arguments = ast.arguments( args=[_to_arg(n) for n in arg_names], defaults=[ex_literal(None) for _ in arg_names], kwonlyargs=[], kw_defaults=[] ) func_def = ast.FunctionDef( name=name, args=arguments, body=statements, decorator_list=[] ) mod = ast.Module([func_def]) ast.fix_missing_locations(mod) prog = compile(mod, '<generated>', 'exec') # Debug: show bytecode. if debug: dis.dis(prog) for const in prog.co_consts: if isinstance(const, types.CodeType): dis.dis(const) the_locals = {} exec(prog, {}, the_locals) return the_locals[name]
def test_invalid_string(self): m = ast.Module([ast.Expr(ast.Str(42))]) ast.fix_missing_locations(m) with self.assertRaises(TypeError) as cm: compile(m, "<test>", "exec") if support.check_impl_detail(): self.assertIn("string must be of type str or uni", str(cm.exception))
def evalValueAst(funcEnv, valueAst, srccode_name=None): if srccode_name is None: srccode_name = "<PyCParser_dynamic_eval>" valueExprAst = ast.Expression(valueAst) ast.fix_missing_locations(valueExprAst) valueCode = compile(valueExprAst, "<PyCParser_globalvar_" + srccode_name + "_init>", "eval") v = eval(valueCode, funcEnv.interpreter.globalsDict) return v
def make_lambda(expression, args, values): def make_arg(name): if sys.version_info >= (3, 0): return ast.arg(arg=name, annotation=None) else: return ast.Name(id=name, ctx=ast.Param(), lineno=1, col_offset=0) lambda_ = ast.Lambda( args=ast.arguments( args=[make_arg(arg) for arg in args + values], varargs=None, varargannotation=None, kwonlyargs=[], kwarg=None, kwargannotation=None, defaults=[ast.Num(i) for i in range(len(values))], kw_defaults=[]), body=expression.body, ) lambda_ = ast.copy_location(lambda_, expression.body) exp = ast.Expression(body=lambda_, lineno=1, col_offset=0) ast.dump(exp) ast.fix_missing_locations(exp) GLOBALS = __GLOBALS.copy() GLOBALS["__builtins__"] = {} return eval(compile(exp, "<lambda>", "eval"), GLOBALS)
def compile_func(arg_names, statements, name='_the_func', debug=False): """Compile a list of statements as the body of a function and return the resulting Python function. If `debug`, then print out the bytecode of the compiled function. """ func_def = ast.FunctionDef( name, ast.arguments( [ast.Name(n, ast.Param()) for n in arg_names], None, None, [ex_literal(None) for _ in arg_names], ), statements, [], ) mod = ast.Module([func_def]) ast.fix_missing_locations(mod) prog = compile(mod, '<generated>', 'exec') # Debug: show bytecode. if debug: dis.dis(prog) for const in prog.co_consts: if isinstance(const, types.CodeType): dis.dis(const) the_locals = {} exec prog in {}, the_locals return the_locals[name]
def function_from_source(source, globals_=None): """ A helper function to construct a Function object from a source with custom __future__ imports. """ module = ast.parse(unindent(source)) ast.fix_missing_locations(module) for stmt in module.body: if type(stmt) == ast.FunctionDef: tree = stmt name = stmt.name break else: raise ValueError("No function definitions found in the provided source") code_object = compile(module, '<nofile>', 'exec', dont_inherit=True) locals_ = {} eval(code_object, globals_, locals_) function_obj = locals_[name] function_obj._peval_source = astunparse.unparse(tree) return Function.from_object(function_obj)
def visit(self, node): if self.has_notmutate(node) or (self.coverage_injector and not self.coverage_injector.is_covered(node)): return if self.only_mutation and self.only_mutation.node != node and self.only_mutation.node not in node.children: return self.fix_lineno(node) visitors = self.find_visitors(node) if visitors: for visitor in visitors: try: if self.sampler and not self.sampler.is_mutation_time(): raise MutationResign if self.only_mutation and \ (self.only_mutation.node != node or self.only_mutation.visitor != visitor.__name__): raise MutationResign new_node = visitor(node) self.visitor = visitor.__name__ self.current_node = node self.fix_node_internals(node, new_node) ast.fix_missing_locations(new_node) yield new_node except MutationResign: pass finally: for new_node in self.generic_visit(node): yield new_node else: for new_node in self.generic_visit(node): yield new_node
def dafile_to_pyast(filename, args=None): """Translates DistAlgo source file into executable Python AST. 'filename' is the filename of source file. Optional argument 'args' is a Namespace object containing the command line parameters for the compiler. Returns the generated Python AST. """ if args is None: args = parse_compiler_args([]) daast = daast_from_file(filename, args) if daast is not None: pyast = PythonGenerator(filename, args).visit(daast) if pyast is None: print("Error: unable to generate Python AST from DistAlgo AST" " for file ", filename, file=stderr) return None assert isinstance(pyast, list) and len(pyast) == 1 and \ isinstance(pyast[0], ast.Module) pyast = pyast[0] ast.fix_missing_locations(pyast) if args and hasattr(args, 'dump_ast') and args.dump_ast: print(ast.dump(pyast, include_attributes=True), file=stderr) return pyast else: return None
def transform_ast(self, node): """Apply the AST transformations from self.ast_transformers Parameters ---------- node : ast.Node The root node to be transformed. Typically called with the ast.Module produced by parsing user input. Returns ------- An ast.Node corresponding to the node it was called with. Note that it may also modify the passed object, so don't rely on references to the original AST. """ for transformer in self.ast_transformers: try: node = transformer.visit(node) except InputRejected: # User-supplied AST transformers can reject an input by raising # an InputRejected. Short-circuit in this case so that we # don't unregister the transform. raise except Exception: warn("AST transformer %r threw an error. It will be unregistered." % transformer) self.ast_transformers.remove(transformer) if self.ast_transformers: ast.fix_missing_locations(node) return node
def _parse_and_transform_ast(self, path): with open(path) as f: tree = ast.parse(f.read(), filename=path) tree = self._node_transformer.visit(tree) ast.fix_missing_locations(tree) return tree
def visit_FunctionDef(self, node): after_nodes = [] if self.curprim is None: self.curprim = self.pdp self.curchild = -1 if isinstance(node.decorator_list[0], ast.Call): self.module_name = node.decorator_list[0].func.value.id else: self.module_name = node.decorator_list[0].value.id # Strip decorator del node.decorator_list[0] oldchild = self.curchild oldprim = self.curprim else: if len(node.decorator_list) == 0: return self.generic_visit(node) dec = node.decorator_list[0] if isinstance(dec, ast.Call): decname = rname(dec.func.attr) else: decname = rname(dec.attr) if decname in [ 'map', 'reduce', 'consume', 'tasklet', 'iterate', 'loop', 'conditional' ]: self.curchild += 1 oldchild = self.curchild oldprim = self.curprim self.curprim = self.curprim.children[self.curchild] self.curchild = -1 if isinstance(self.curprim, astnodes._MapNode): newnode = \ _copy_location(ast.For(target=ast.Tuple(ctx=ast.Store(), elts=[ast.Name(id=name, ctx=ast.Store()) for name in self.curprim.params]), iter=ast.parse('%s.ndrange(%s)' % (self.module_name, self.curprim.range.pystr())).body[0].value, body=node.body, orelse=[]), node) node = newnode elif isinstance(self.curprim, astnodes._ConsumeNode): stream = self.curprim.stream if isinstance(self.curprim.stream, ast.AST): stream = unparse(self.curprim.stream) if '[' not in stream: stream += '[0]' newnode = \ _copy_location(ast.While( test=ast.parse('len(%s) > 0' % stream).body[0].value, body=node.body, orelse=[]), node) node = newnode node.body.insert( 0, _copy_location( ast.parse( '%s = %s.popleft()' % (str(self.curprim.params[0]), stream)).body[0], node)) elif isinstance(self.curprim, astnodes._TaskletNode): # Strip decorator del node.decorator_list[0] newnode = \ _copy_location(ast.parse('if True: pass').body[0], node) newnode.body = node.body newnode = ast.fix_missing_locations(newnode) node = newnode elif isinstance(self.curprim, astnodes._ReduceNode): in_memlet = self.curprim.inputs['input'] out_memlet = self.curprim.outputs['output'] # Create reduction call params = [unparse(p) for p in node.decorator_list[0].args] params.extend([ unparse(kp) for kp in node.decorator_list[0].keywords ]) reduction = ast.parse( '%s.simulator.simulate_reduce(%s, %s)' % (self.module_name, node.name, ', '.join(params))).body[0] reduction = _copy_location(reduction, node) reduction = ast.increment_lineno(reduction, len(node.body) + 1) reduction = ast.fix_missing_locations(reduction) # Strip decorator del node.decorator_list[0] after_nodes.append(reduction) elif isinstance(self.curprim, astnodes._IterateNode): newnode = \ _copy_location(ast.For(target=ast.Tuple(ctx=ast.Store(), elts=[ast.Name(id=name, ctx=ast.Store()) for name in self.curprim.params]), iter=ast.parse('%s.ndrange(%s)' % (self.module_name, self.curprim.range.pystr())).body[0].value, body=node.body, orelse=[]), node) newnode = ast.fix_missing_locations(newnode) node = newnode elif isinstance(self.curprim, astnodes._LoopNode): newnode = \ _copy_location(ast.While(test=node.decorator_list[0].args[0], body=node.body, orelse=[]), node) newnode = ast.fix_missing_locations(newnode) node = newnode else: raise RuntimeError('Unimplemented primitive %s' % decname) else: return self.generic_visit(node) newbody = [] end_stmts = [] substitute_stmts = [] # Incrementally build new body from original body for stmt in node.body: if isinstance(stmt, ast.Expr): res, append, prepend = self.VisitTopLevelExpr(stmt) if res is not None: newbody.append(res) if append is not None: end_stmts.extend(append) if prepend is not None: substitute_stmts.extend(prepend) else: subnodes = self.visit(stmt) if subnodes is not None: if isinstance(subnodes, list): newbody.extend(subnodes) else: newbody.append(subnodes) node.body = newbody + end_stmts self.curchild = oldchild self.curprim = oldprim substitute_stmts.append(node) if len(after_nodes) > 0: return substitute_stmts + after_nodes return substitute_stmts
# AST Walkthrough import ast class ReplaceBinOp(ast.NodeTransformer): """Replace operation by addition in binary operation""" def visit_BinOp(self, node): return ast.BinOp(left=node.lefty, op=ast.Add(), right=node.right) tree = ast.parse("x = 1/3") ast.fix_missing_locations(tree) eval(compile(tree, '', 'exec')) print(ast.dump(tree)) print(x) tree = ReplaceBinOp().visit(tree) ast.fix_missing_locations(tree) print(ast.dump(tree)) eval(compile(tree, '', 'exec')) print(x)
def translate_to_tptv1(parsed_model, data_batch, hypers): parsed_model = u.replace_hypers(parsed_model, hypers) input_dependents = get_input_dependent_vars(parsed_model) idx_var_name = "input_idx" idx_var = ast.Name(idx_var_name, ast.Load()) input_number = len(data_batch['instances']) range_expr = ast.Num(input_number) input_vars = set() output_vars = set() var_decls = [] input_stmts = [] general_stmts = [] output_stmts = [] for stmt in parsed_model.body: if isinstance(stmt, ast.Assign) and is_input_declaration(stmt.value): input_vars.add(get_var_name(stmt.targets[0])) var_decls.append(stmt) elif isinstance(stmt, ast.Assign) and is_output_declaration(stmt.value): output_vars.add(get_var_name(stmt.targets[0])) var_decls.append(stmt) elif isinstance(stmt, ast.Assign) and is_var_declaration(stmt.value): var_decls.append(stmt) elif ast_uses_varset(stmt, input_dependents): input_stmts.append(add_input_indices(stmt, input_dependents, idx_var)) elif ast_uses_varset(stmt, output_vars): output_stmts.append(stmt) else: general_stmts.append(stmt) input_init = [] output_observation = [] for input_idx, instance in enumerate(data_batch['instances']): for var_name, val in instance.iteritems(): if var_name in input_vars: input_init.extend(generate_io_stmt(input_idx, var_name, val, "set_to_constant")) elif var_name in output_vars: output_observation.extend(generate_io_stmt(input_idx, var_name, val, "observe_value")) extended_var_decls = [] for var_decl in var_decls: # If input-dependent, extend dimension by one if get_var_name(var_decl.targets[0]) in input_dependents: new_decl = copy.deepcopy(var_decl) if isinstance(new_decl.value, ast.Subscript): new_decl.value = extend_subscript_for_input(new_decl.value, ast.Num(input_number)) else: new_decl.value = ast.Subscript(new_decl.value, ast.Index(ast.Num(input_number)), ast.Load()) extended_var_decls.append(new_decl) else: extended_var_decls.append(var_decl) input_loop = ast.For(ast.Name(idx_var_name, ast.Store()), ast.Call(ast.Name("range", ast.Load()), [range_expr], [], None, None), input_stmts, []) parsed_model.body = general_stmts + extended_var_decls + input_init + [input_loop] + output_stmts + output_observation ast.fix_missing_locations(parsed_model) return parsed_model
def as_module(self): module = ast.parse("") module.body = self._body return ast.fix_missing_locations(module)
def execute_notebook(nb, secret='secret', initial_env=None, ignore_errors=False, cwd=None, test_dir=None, seed=None): """ Executes a notebook and returns the global environment that results from execution Execute notebook & return the global environment that results from execution. If ``ignore_errors`` is ``True``, exceptions are swallowed. ``secret`` contains random digits so ``check_results`` and ``check`` are not easily modifiable. ``nb`` is passed in as a dictionary that's a parsed notebook Args: nb (``dict``): JSON representation of a notebook secret (``str``, optional): randomly generated integer used to rebind check function initial_env (``str``, optional): name of initial environment ignore_errors (``bool``, optional): whether exceptions should be ignored cwd (``str``, optional): working directory of execution to be appended to ``sys.path`` in grading environment test_dir (``str``, optional): path to directory of tests in grading environment seed (``int``, optional): random seed for intercell seeding Results: ``dict``: global environment resulting from executing all code of the input notebook """ with hide_outputs(): if initial_env: global_env = initial_env.copy() else: global_env = {} # add display from IPython global_env["display"] = display source = "" # if gradescope: # source = "import sys\nsys.path.append(\"/autograder/submission\")\n" # el if cwd: source = f"import sys\nsys.path.append(r\"{cwd}\")\n" exec(source, global_env) if seed is not None: # source += "import numpy as np\nimport random\n" import numpy as np import random global_env["np"] = np global_env["random"] = random # Before rewriting AST, find cells of code that generate errors. # One round of execution is done beforehand to mimic the Jupyter notebook style of running # (e.g. code runs up to the point of execution). # The reason this is workaround is introduced is because once the # source code is parsed into an AST, there is no sense of local cells for cell in nb['cells']: if cell['cell_type'] == 'code': # transform the input to executable Python # FIXME: use appropriate IPython functions here isp = IPythonInputSplitter(line_input_checker=False) try: code_lines = [] cell_source_lines = cell['source'] source_is_str_bool = False if isinstance(cell_source_lines, str): source_is_str_bool = True cell_source_lines = cell_source_lines.split('\n') for line in cell_source_lines: # Filter out ipython magic commands # Filter out interact widget if not line.startswith('%'): if "interact(" not in line and not re.search(r"otter\.Notebook\(.*?\)", line): code_lines.append(line) if source_is_str_bool: code_lines.append('\n') elif re.search(r"otter\.Notebook\(.*?\)", line): # TODO: move this check into CheckCallWrapper # if gradescope: # line = re.sub(r"otter\.Notebook\(.*?\)", "otter.Notebook(\"/autograder/submission/tests\")", line) # el if test_dir: line = re.sub(r"otter\.Notebook\(.*?\)", f"otter.Notebook(\"{test_dir}\")", line) else: line = re.sub(r"otter\.Notebook\(.*?\)", "otter.Notebook(\"/home/tests\")", line) code_lines.append(line) if source_is_str_bool: code_lines.append('\n') if seed is not None: cell_source = "np.random.seed({})\nrandom.seed({})\n".format(seed, seed) + isp.transform_cell(''.join(code_lines)) else: cell_source = isp.transform_cell(''.join(code_lines)) # patch otter.Notebook.export so that we don't create PDFs in notebooks # TODO: move this patch into CheckCallWrapper m = mock.mock_open() with mock.patch('otter.Notebook.export', m), mock.patch("otter.Notebook._log_event", m): exec(cell_source, global_env) source += cell_source except: if not ignore_errors: raise tree = ast.parse(source) # # CODE BELOW COMMENTED OUT BECAUSE the only check function is within the Notebook class # if find_check_assignment(tree) or find_check_definition(tree): # # an empty global_env will fail all the tests # return global_env # wrap check(..) calls into a check_results_X.append(check(..)) transformer = CheckCallWrapper(secret) tree = transformer.visit(tree) ast.fix_missing_locations(tree) cleaned_source = compile(tree, filename="nb-ast", mode="exec") try: with open(os.devnull, 'w') as f, redirect_stdout(f), redirect_stderr(f): # patch otter.Notebook.export so that we don't create PDFs in notebooks m = mock.mock_open() with mock.patch('otter.Notebook.export', m), mock.patch("otter.Notebook._log_event", m): exec(cleaned_source, global_env) except: if not ignore_errors: raise return global_env
def compile(self, expr): tree = ast.parse(expr, mode="eval") tree = PredicateTransformer(self).visit(tree) ast.fix_missing_locations(tree) co = compile(tree, "<ast>", "eval") return co
def materialize(self, key=None, args=None, arg_features=None): if key is None: key = (self.func, 0) if not self.runtime.materialized: self.runtime.materialize() if key in self.compiled_functions: return grad_suffix = "" if self.is_grad: grad_suffix = "_grad" kernel_name = "{}_c{}_{}_{}".format(self.func.__name__, self.kernel_counter, key[1], grad_suffix) import taichi as ti ti.trace("Compiling kernel {}...".format(kernel_name)) src = remove_indent(inspect.getsource(self.func)) tree = ast.parse(src) if self.runtime.print_preprocessed: import astor print('Before preprocessing:') print(astor.to_source(tree.body[0])) func_body = tree.body[0] func_body.decorator_list = [] local_vars = {} # Discussions: https://github.com/yuanming-hu/taichi/issues/282 import copy global_vars = copy.copy(self.func.__globals__) for i, arg in enumerate(func_body.args.args): anno = arg.annotation if isinstance(anno, ast.Name): global_vars[anno.id] = self.arguments[i] visitor = ASTTransformer( excluded_paremeters=self.template_slot_locations, func=self, arg_features=arg_features) visitor.visit(tree) ast.fix_missing_locations(tree) if self.runtime.print_preprocessed: import astor print('After preprocessing:') print(astor.to_source(tree.body[0], indent_with=' ')) ast.increment_lineno(tree, inspect.getsourcelines(self.func)[1] - 1) freevar_names = self.func.__code__.co_freevars closure = self.func.__closure__ if closure: freevar_values = list(map(lambda x: x.cell_contents, closure)) for name, value in zip(freevar_names, freevar_values): global_vars[name] = value # inject template parameters into globals for i in self.template_slot_locations: template_var_name = self.argument_names[i] global_vars[template_var_name] = args[i] exec( compile(tree, filename=inspect.getsourcefile(self.func), mode='exec'), global_vars, local_vars) compiled = local_vars[self.func.__name__] taichi_kernel = taichi_lang_core.create_kernel(kernel_name, self.is_grad) # Do not change the name of 'taichi_ast_generator' # The warning system needs this identifier to remove unnecessary messages def taichi_ast_generator(): if self.runtime.inside_kernel: import taichi as ti raise ti.TaichiSyntaxError( "Kernels cannot call other kernels. I.e., nested kernels are not allowed. Please check if you have direct/indirect invocation of kernels within kernels. Note that some methods provided by the Taichi standard library may invoke kernels, and please move their invocations to Python-scope." ) self.runtime.inside_kernel = True compiled() self.runtime.inside_kernel = False taichi_kernel = taichi_kernel.define(taichi_ast_generator) assert key not in self.compiled_functions self.compiled_functions[key] = self.get_function_body(taichi_kernel)
def __rewrite_tree(self, params): tree = copy.deepcopy(self.__tree) transformer = ParamTransformer(params) transformer.generic_visit(tree) ast.fix_missing_locations(tree) return tree
def eval_concretely(test, line_state): test = Module(body=[Assign(targets=[Name(id="__test_cond_result", ctx=Store())], value=test)]) fix_missing_locations(test) code = compile(test,"<string>","exec") exec code in line_state return line_state["__test_cond_result"]
def parse_ast(root_node, **kwargs): return ast.fix_missing_locations(Transformer(**kwargs).visit(root_node)) def find_imports(root_node, **kwargs):
def test_simple_ast_validator(node, message): validator = ContextualASTValidator() with pytest.raises(SyntaxError) as cm: validator.validate(ast.fix_missing_locations(node)) assert cm.match(message)
def get_comment_file_content(file_content, py_filename): print("get_comment_file_content...") # os.path.join(temp_dir, py_filename) # temp_py_path = NamedTemporaryFile(suffix='.py') temp_py = NamedTemporaryFile(mode="a+", suffix='.py', delete=False) # temp_py_path.replace('.py', '-ast.csv') # temp_ast_csv_path = NamedTemporaryFile(suffix='.csv') temp_ast_csv = NamedTemporaryFile(mode="a+", suffix='.csv', delete=False) # temp_py_path.replace('.py', '-ast.txt') # temp_ast_txt_path = NamedTemporaryFile(suffix='.txt') temp_ast_txt = NamedTemporaryFile(mode="a+", suffix='.txt', delete=False) # with open(temp_py_path, 'a+') as temp_py_file: # temp_py_file.write(file_content) temp_py.write(file_content) temp_py.seek(0) # print("file_content!!!", file_content) # temp_py.seek(0) # print("temp py!!!", temp_py.read()) # 1: py -> ast py_to_ast_converter = ASTFunctionsExtractor( temp_py, temp_ast_csv) # temp_py_path, temp_ast_csv_path py_to_ast_converter.visit(py_to_ast_converter.ast_object) # 2: ast -> docstring preds preds_map = OrderedDict() asts = [] # with open(temp_ast_csv_path, newline='') as temp_ast_csv_file: # ast_reader = csv.reader(temp_ast_csv_file) # for i, ast_row in enumerate(ast_reader): # asts.append(ast_row[AST_COLUMN]) # preds_map[ast_row[FUNCTION_NAME_COLUMN]] = { # 'row_num': i, 'docstring': ''} temp_ast_csv.seek(0) ast_reader = csv.DictReader(temp_ast_csv, fieldnames=AST_CSV_FIELDNAMES) # temp_ast_csv.seek(0) # print('temp_ast_csv.read()', temp_ast_csv.read()) i = 0 for ast_row in ast_reader: asts.append(ast_row[AST_COLUMN]) preds_map[ast_row[FUNCTION_NAME_COLUMN]] = { 'row_num': i, 'docstring': '' } i += 1 print("preds_map", preds_map) print("asts", asts) # with open(temp_ast_txt_path, 'a+') as temp_ast_file: # temp_ast_file.writelines(asts) temp_ast_txt.writelines(asts) temp_ast_txt.seek(0) # predictions = get_model_predictions(temp_ast_txt_path) predictions = get_model_predictions(temp_ast_txt) # 3: insert preds into full python file ast original_py_ast_tree = ast.parse(temp_py) # temp_py_path docstring_inserter = DocstringInserter(preds_map) new_py_ast_tree = transformer.visit(original_py_ast_tree) ast.fix_missing_locations(new_py_ast_tree) new_py_code = astunparse.unparse( ast.parse(inspect.getsource(new_py_ast_tree))) temp_py.close() temp_ast_csv.close() temp_ast_txt.close() print('new_py_code', new_py_code) return new_py_code
def run(*nodes): mod = ast.Module(body=list(nodes), lineno=1, col_offset=0) mod = ast.fix_missing_locations(mod) exec(compile(mod, "<ast>", "exec"))
def test_invalid_identitifer(self): m = ast.Module([ast.Expr(ast.Name(42, ast.Load()))], None) ast.fix_missing_locations(m) with self.assertRaises(TypeError) as cm: compile(m, "<test>", "exec") self.assertIn("identifier must be of type str", str(cm.exception))
Expr, Expression, fix_missing_locations, keyword, expr, dump, ) from functools import partial dump = partial(dump, indent=4) AST.dump = lambda self: print(dump(self)) # type: ignore AST.eval = lambda self, **kwargs: _eval(self, **kwargs) # type: ignore AST.exec = lambda self, **kwargs: _exec(self, **kwargs) AST.compile = lambda self, **kwargs: _compile(self, **kwargs) # type: ignore AST.unparse = lambda self, **kwargs: unparse(fix_missing_locations(self), ** kwargs) # type: ignore def _exec(e, globals={}, locals={}, **kwargs): if kwargs: e = call(lamb(*kwargs.keys())(e), **kwargs) if not isinstance(e, Module): e = m(e) return exec(compile(fix_missing_locations(e), "<string>", "exec"), globals, locals) def _compile(e, **kwargs):
def _lower_array_expr(lowerer, expr): '''Lower an array expression built by RewriteArrayExprs. ''' expr_name = "__numba_array_expr_%s" % (hex(hash(expr)).replace("-", "_")) expr_filename = expr.loc.filename expr_var_list = expr.list_vars() # The expression may use a given variable several times, but we # should only create one parameter for it. expr_var_unique = sorted(set(expr_var_list), key=lambda var: var.name) # Arguments are the names external to the new closure expr_args = [var.name for var in expr_var_unique] # 1. Create an AST tree from the array expression. with _legalize_parameter_names(expr_var_unique) as expr_params: if hasattr(ast, "arg"): # Should be Python 3.x ast_args = [ ast.arg(param_name, None) for param_name in expr_params ] else: # Should be Python 2.x ast_args = [ ast.Name(param_name, ast.Param()) for param_name in expr_params ] # Parse a stub function to ensure the AST is populated with # reasonable defaults for the Python version. ast_module = ast.parse('def {0}(): return'.format(expr_name), expr_filename, 'exec') assert hasattr(ast_module, 'body') and len(ast_module.body) == 1 ast_fn = ast_module.body[0] ast_fn.args.args = ast_args ast_fn.body[0].value, namespace = _arr_expr_to_ast(expr.expr) ast.fix_missing_locations(ast_module) # 2. Compile the AST module and extract the Python function. code_obj = compile(ast_module, expr_filename, 'exec') exec(code_obj, namespace) impl = namespace[expr_name] # 3. Now compile a ufunc using the Python function as kernel. context = lowerer.context builder = lowerer.builder outer_sig = expr.ty(*(lowerer.typeof(name) for name in expr_args)) inner_sig_args = [] for argty in outer_sig.args: if isinstance(argty, types.Optional): argty = argty.type if isinstance(argty, types.Array): inner_sig_args.append(argty.dtype) else: inner_sig_args.append(argty) inner_sig = outer_sig.return_type.dtype(*inner_sig_args) # Follow the Numpy error model. Note this also allows e.g. vectorizing # division (issue #1223). flags = compiler.Flags() flags.set('error_model', 'numpy') cres = context.compile_subroutine(builder, impl, inner_sig, flags=flags, caching=False) # Create kernel subclass calling our native function from ..targets import npyimpl class ExprKernel(npyimpl._Kernel): def generate(self, *args): arg_zip = zip(args, self.outer_sig.args, inner_sig.args) cast_args = [ self.cast(val, inty, outty) for val, inty, outty in arg_zip ] result = self.context.call_internal(builder, cres.fndesc, inner_sig, cast_args) return self.cast(result, inner_sig.return_type, self.outer_sig.return_type) args = [lowerer.loadvar(name) for name in expr_args] return npyimpl.numpy_ufunc_kernel(context, builder, outer_sig, args, ExprKernel, explicit_output=False)
def m(*exprs): m = Module([Expr(e) for e in exprs], type_ignores=[]) m.lineno = 1 m.col_offset = 1 m = fix_missing_locations(m) return m
def rewrite_return_as_assignments(func_node, interface): """Modify FunctionDef node to directly assign instead of return.""" func_node = _RewriteReturn(interface).visit(func_node) ast.fix_missing_locations(func_node) return func_node
def remove_annotations(node: ast.AST) -> ast.Module: return ast.fix_missing_locations(AnnotationRemover().visit(node))
def visit_FunctionDef(self, node: ast.FunctionDef): if len(node.body)>0 and isinstance(node.body[0],Expr) and isinstance(node.body[0].value,Str) and node.body[0].value.s == 'dbg_ignore': oldHHRC = self.hotHasReturnCheck oldBV = self.hot self.hotHasReturnCheck = False self.hot = None self.generic_visit(node) self.hotHasReturnCheck = oldHHRC self.hot = oldBV return node # print("visiting",node.name) frozone = len(self.funcNames) self.funcNames.append(node.name)#+str(node.lineno) if hasattr(node,'definedforclass'): self.classowners.append(node.definedforclass) else: self.classowners.append(None) self.scopes.append(0) self.funcparams.append([k.arg for k in node.args.args]) hasEnter = False hasExit = False fpad = 2 if len(node.body)>0: if self.isEnterFunc(node.body[0]): hasEnter=True elif self.isExitFunc(node.body[0],node.args.args): hasExit=True else: fpad-=1 else: fpad-=1 if len(node.body)>1: if self.isEnterFunc(node.body[1]): hasEnter=True elif self.isExitFunc(node.body[1],node.args.args): hasExit=True else: fpad-=1 else: fpad-=1 oldHHRC = self.hotHasReturnCheck oldBV = self.hot self.hotHasReturnCheck = hasExit self.hot = frozone self.generic_visit(node) if len(self.exitpatterns.get(node.name,[])) > len(node.args.args): print("Exit pattern for function ",node.name," has too many parameters.") assert False shobb = [] for z in range(len(node.args.args) if hasExit else len(self.exitpatterns.get(node.name,[]))): # print("Assign2: ",str(frozone)) shobb.append(Assign( targets=[Name(id=node.args.args[z].arg+'_dbg_str_var_'+str(frozone), ctx=Store())], value=Name(id=node.args.args[z].arg,ctx=Load()) )) if hasExit: expattern = [k.arg for k in node.args.args] # sin.insert(1,Expr(value=Call(func=Name(id='_dbgExit', ctx=Load()), args=[Name(id=pn+'_dbg_str_var_'+str(self.hot),ctx=Load()) for pn in expattern]+[Name(id='_dbg_ret_var', ctx=Load())], keywords=[]))) node.body.append(Expr(value=Call(func=Name(id='_dbgExit', ctx=Load()), args=[Name(id=pn+'_dbg_str_var_'+str(self.hot),ctx=Load()) for pn in expattern]+[NameConstant(value=None)], keywords=[]))) if node.name in self.exitpatterns: expattern = self.exitpatterns[node.name] if len(node.args.args)<len(expattern) or expattern != [k.arg for k in node.args.args][:len(expattern)]: print("You defined an exit pattern, "+node.name+", and then you define a function with different first N parameters from it.") assert False node.body.append(Expr(value=Call(func=Name(id='_dbgExit_'+node.name, ctx=Load()), args=[Name(id=pn+'_dbg_str_var_'+str(self.hot),ctx=Load()) for pn in expattern]+[NameConstant(value=None)], keywords=[]))) track_frames = False freebody = node.body[fpad:]+[] if track_frames: freebody = [With( items=[ withitem( context_expr=Call(func=Attribute(value=Name(id='madscience_debugger', ctx=Load()), attr='push_context', ctx=Load()), args=[Num(n=frozone)], keywords=[]), optional_vars=Name(id='madscience_debug_context', ctx=Store()) ) ], body=node.body[fpad:]+[] )] node.body = shobb + node.body[:fpad] + freebody if track_frames: self.visitblock(node.body[-1].body,"func") else: self.visitblock(node.body[-1],"func") # if node.name=="verify": # print("verify mutated",node.lineno) # node.body.insert(0,Expr(value=Call(func=Name(id='print', ctx=Load()), args=[Str(s='function was knocked on '+str(node.lineno))], keywords=[]))) # node.body[-1].body.insert(0,Expr(value=Call(func=Name(id='print', ctx=Load()), args=[Str(s='function was visited '+str(node.lineno))], keywords=[]))) # print("mutated",node.name) # print(self.enterpatterns,frozone,node.name) if hasEnter: node.body.insert(fpad+len(shobb),Expr(value=Call(func=Name(id='_dbgEnter', ctx=Load()), args=[], keywords=[]))) if node.name in self.enterpatterns: # print("enter pattern added.") expattern = self.enterpatterns[node.name] if len(node.args.args)<len(expattern) or expattern != [k.arg for k in node.args.args][:len(expattern)]: print("You defined an enter pattern, "+node.name+", and then you define a function with different first N parameters from it.") assert False node.body.insert(fpad+len(shobb),Expr(value=Call(func=Name(id='_dbgEnter_'+node.name, ctx=Load()), args=[Name(id=pn,ctx=Load()) for pn in expattern], keywords=[]))) ast.fix_missing_locations(node) if self.isTestFunc(node): # self.generic_visit(node) sin = [ node, Expr(value=Call(func=Name(id='_dbgTest', ctx=Load()), args=[], keywords=[])) ] ast.copy_location(sin[1], node) ast.fix_missing_locations(sin[1]) return sin self.absorbEnterPattern(node) self.absorbExitPattern(node) # print() # print(ast.dump(node)) self.hotHasReturnCheck = oldHHRC self.hot = oldBV return node
def _copy_location(newnode, node): return ast.fix_missing_locations(ast.copy_location(newnode, node))
def compile_function_ast(expressions, symbols, arg_names, output_names=None, funname='anonymous', return_ast=False, print_code=False, definitions=None, vectorize=True, use_file=False): ''' expressions: list of equations as string ''' from collections import OrderedDict table = OrderedDict() aa = arg_names if output_names is not None: aa = arg_names + [output_names] for a in aa: symbol_group = a[0] date = a[1] an = a[2] for b in symbols[symbol_group]: index = symbols[symbol_group].index(b) table[(b, date)] = (an, index) table_symbols = {k: (std_date_symbol(*k)) for k in table.keys()} # standard assignment: i.e. k = s[0] index = lambda x: Index(Num(x)) # declare symbols aux_short_names = [e[2] for e in arg_names if e[0]=='auxiliaries'] preamble = [] for k in table: # order it # k : var, date arg, pos = table[k] if not (arg in aux_short_names): std_name = table_symbols[k] val = Subscript(value=Name(id=arg, ctx=Load()), slice=index(pos), ctx=Load()) line = Assign(targets=[Name(id=std_name, ctx=Store())], value=val) if arg != 'out': preamble.append(line) body = [] std_dates = StandardizeDates(symbols, aa) if definitions is not None: for k,v in definitions.items(): if isinstance(k, str): lhs = ast.parse(k).body[0].value if isinstance(v, str): rhs = ast.parse(v).body[0].value else: rhs = v lhs = std_dates.visit(lhs) rhs = std_dates.visit(rhs) vname = lhs.id line = Assign(targets=[Name(id=vname, ctx=Store())], value=rhs) preamble.append(line) outs = [] for i, expr in enumerate(expressions): expr = ast.parse(expr).body[0].value # if definitions is not None: # expr = ReplaceName(defs).visit(expr) rexpr = std_dates.visit(expr) rhs = rexpr if output_names is not None: varname = symbols[output_names[0]][i] date = output_names[1] out_name = table_symbols[(varname, date)] else: out_name = 'out_{}'.format(i) line = Assign(targets=[Name(id=out_name, ctx=Store())], value=rhs) body.append(line) line = Assign(targets=[Subscript(value=Name(id='out', ctx=Load()), slice=index(i), ctx=Store())], value=Name(id=out_name, ctx=Load())) body.append(line) arg_names = [e for e in arg_names if e[0]!="auxiliaries"] args = [e[2] for e in arg_names] + ['out'] if is_python_3: from ast import arg f = FunctionDef(name=funname, args=arguments(args=[arg(arg=a) for a in args], vararg=None, kwarg=None, kwonlyargs=[], kw_defaults=[], defaults=[]), body=preamble + body, decorator_list=[]) else: f = FunctionDef(name=funname, args=arguments(args=[Name(id=a, ctx=Param()) for a in args], vararg=None, kwarg=None, kwonlyargs=[], kw_defaults=[], defaults=[]), body=preamble + body, decorator_list=[]) mod = Module(body=[f]) mod = ast.fix_missing_locations(mod) if print_code: s = "Function {}".format(mod.body[0].name) print("-" * len(s)) print(s) print("-" * len(s)) print(to_source(mod)) if vectorize: from numba import float64, void coredims = [len(symbols[an[0]]) for an in arg_names] signature = str.join(',', ['(n_{})'.format(d) for d in coredims]) n_out = len(expressions) if n_out in coredims: signature += '->(n_{})'.format(n_out) # ftylist = float64[:](*([float64[:]] * len(coredims))) fty = "void(*[float64[:]]*{})".format(len(coredims)+1) else: signature += ',(n_{})'.format(n_out) fty = "void(*[float64[:]]*{})".format(len(coredims)+1) ftylist = [fty] else: signature=None ftylist=None if use_file: fun = eval_ast_with_file(mod, print_code=True) else: fun = eval_ast(mod) jitted = njit(fun) if vectorize: gufun = guvectorize([fty], signature, target='parallel', nopython=True)(fun) return jitted, gufun else: return jitted
def simulate(dace_program: DaceProgram, *args): """ Simulate a DaCe program using Python. :param dace_program: A program function annotated with `@dace.program`. :param *args: Program arguments to pass. """ pdp, modules = dace_program.generate_pdp() # Transform the decorated AST into working python code (annotated so # that debugging works) simulated_ast = SimulatorTransformer(pdp).visit(pdp.ast) mod = ast.Module(body=simulated_ast, lineno=1) mod = ast.fix_missing_locations(mod) # Compile the transformed AST codeobj = compile(mod, pdp.filename, 'exec') fname = dace_program.name if Config.get_bool('debugprint'): print("Simulating DaCe program with name", fname) param_symbols = {} if len(pdp.params) != len(args): raise SyntaxError('Argument number mismatch in \'' + fname + '\', expecting ' + str(len(args))) ################################################################## # Disallow external variables # EXCEPTIONS: # * The dace module ('import dace') # * The math module ('import math') # * Constants (types int, float, dace.int*, dace.float*) # * DaCe symbols that have been defined in @dace.program args ################################################################## f_globals = {} # WORKAROUND: Works around a bug in CPython 2.x where True and # False are undefined f_globals['True'] = True f_globals['False'] = False ###################### # Allow certain namespaces/modules and constants f_globals.update(pdp.globals) # Resolve symbols symbols = {} symbols.update(symbolic.getsymbols( args)) # from parameter values (externally defined as "dace.symbol") symbols.update(param_symbols) # from parameter values (constant inputs) resolve = {} for gname, gval in f_globals.items(): if isinstance(gval, symbolic.symbol): if gval.name in symbols: resolve[gname] = gval.get() # Raise exception if undefined else: resolve[gname] = None # Mark unrelated symbols for removal f_globals.update(resolve) # Remove unrelated symbols from globals for rk, rv in resolve.items(): if rv is None: del f_globals[rk] # Resolve symbols in arguments as well newargs = tuple(symbolic.eval(a) for a in args) ################################################################## # Store parameter objects pdp.arrayobjs = { k: v for k, v in zip(pdp.params, newargs) if isinstance(v, numpy.ndarray) } # Simulate f ################################ # Obtain function object gen_module = {} gen_module.update(f_globals) exec(codeobj, gen_module) cfunc = gen_module[fname] # Run function result = cfunc(*newargs) ################################ return result
def canonicalize_code(code, ref_type): try: ast.parse( codegen.to_source( ast.fix_missing_locations(ast.parse(code.strip())))) except: return "" # replace all identifiers in the code by TYPES as guessed # parse code root = ast.parse(code) # walk the ast for node in ast.walk(root): # fix all identifiers try: # modify identifier with type if ref_type.get(node.id) is not None: node.id = ref_type[node.id] except: pass ## fix all attributes try: if ref_type.get(node.attr) is not None: node.attr = ref_type[node.attr] except: pass #3 fix all strings try: if isinstance(node, ast.Str): if ref_type.get(node.s) is not None: node.s = ref_type[node.s] except: pass # fix all numbers try: if isinstance(node, ast.Num): if ref_type.get(node.n) is not None: node.n = ref_type[node.n] except: pass # fix all alias try: if isinstance(node, ast.alias): if ref_type.get(node.name) is not None: node.name = ref_type[node.name] except: pass # fix all function definitions try: if isinstance(node, ast.FunctionDef): if ref_type.get(node.name) is not None: node.name = ref_type[node.name] except: pass # fix all class definitions try: if isinstance(node, ast.ClassDef): if ref_type.get(node.name) is not None: node.name = ref_type[node.name] except: pass # fix all kword definitions try: if isinstance(node, ast.keyword): if ref_type.get(node.arg) is not None: node.arg = ref_type[node.arg] except: pass # looks like codegen is bugggy !! hence the patchwork try: # can we parse and unparse the code ?? ast.parse(codegen.to_source(ast.fix_missing_locations(root))) code = codegen.to_source(ast.fix_missing_locations(root)) # code gen does a pretty bad job at generating code :@ # it generated - raiseFUNC('STR' % ANY) # while it should be, raise FUNC('STR' % ANY) # make a space in code when such things happen for t in TYPES.values(): code = re.sub(r'([a-zA-Z]+)' + t, r'\1' + " " + t, code) # check if we can parse it ast.parse(codegen.to_source(ast.fix_missing_locations(root))) return code except: return ""
def eval(self, expr): new_expr = self._preparse(expr) node = ast.fix_missing_locations(ast.parse(new_expr)) return self.visit(node)
def res_query(query): class MyTransformer(ast.NodeTransformer): def visit_Str(self, node): return ast.Set(words_dict[node.s]) try: data = [] hidden_files = list_hidden_files() operator = ['OR', 'AND', 'NOT'] data = [] query = query.replace("\'", "'") # check for more than one operator in a row splited_query = query.split() duplicate_op_counter = 0 for first in operator: for second in operator: if not is_in_order(first, second, splited_query): duplicate_op_counter += 1 if duplicate_op_counter < 9: # bad query detect for op in operator: query = re.sub(r'\b' + op + r'\b', ' ', query) tmp_quote = '' quotes_string = re.findall(r'"([^"]*)"', query) if quotes_string: for text in quotes_string: if len(text.split()) > 1: words_in_quotes = text.split() for item in words_in_quotes: tmp_quote += ' \"' + item + '\" ' query = query.replace('\"{}\"'.format(text), tmp_quote, 1) tmp_quote = '' # check for terms only without operators tmp_query = re.sub(' +', ' ', query) if tmp_query.endswith(' AND') or tmp_query.endswith( ' OR') or tmp_query.endswith(' NOT'): tmp_query = query.replace('AND', '').replace('OR', '').replace('NOT', '') if not query.replace(')', '').replace('(', '').replace( 'AND', '').replace('OR', '').replace('NOT', '').replace( '"', '').strip(): query = 'error' if not ((findWholeWord(operator[0])(query)) or (findWholeWord(operator[1])(query)) or (findWholeWord(operator[2])(query))): query = query.strip() query = ' OR '.join(query.split()) tmp_query = re.split(r'(OR|AND|NOT)', query) # split to text OP text OP text new_query = '' for text in tmp_query: if text in operator: new_query += text + ' ' continue if len(text.split()) > 1: # if len(text.replace('(', '').replace(')', '').split()) > 1: new_query += '(' for word in text.split(): new_query += word + ' OR ' new_query = new_query[:-3] new_query += ') ' else: new_query += text # remove stop list terms query = new_query quotes_words_indexs = [] for word in new_query.split(): for term in conf.STOP_LIST: tmp_word = word.replace(')', '').replace('(', '').replace('"', '') if term == tmp_word: if word[0] == '\"' and word[-1] == '\"': quotes_words_indexs = [ (m.start(0), m.end(0)) for m in re.finditer(r'\b{}\b'.format(term), query) ] if quotes_words_indexs: for tup in quotes_words_indexs: if query[tup[0] - 1] == '\"' and query[tup[1]] == '\"': query = query[:tup[0] - 1] + '$' + query[tup[0]:] query = query[:tup[1]] + '$' + query[ tup[1] + 1:] break else: continue else: quotes_words_indexs = [ (m.start(0), m.end(0)) for m in re.finditer( r'\b{}\b'.format(tmp_word), query) ] # query = new_query.replace(tmp_word, 'STOPPED') for tup in quotes_words_indexs: if query[tup[0] - 1] == '$': continue else: start = query[:tup[0]] mid = ' stoppedword ' end = query[tup[1] + 1:] query_helper = start + mid + end query = query_helper break quotes_words_indexs = [] query = re.sub(' +', ' ', query) query = query.replace('$', ' ') # careful query = query.replace('\"', '') tmp_query = query.replace(')', '').replace('(', '').replace( 'AND', '').replace('OR', '').replace('NOT', '') tmp_query = tmp_query.lower() words_list = tmp_query.split() words_list_in_quotes = [ '\'' + re.sub("'", "\\'", w) + '\'' for w in words_list ] words_dict = {} for i in range(len(words_list)): words_dict[words_list[i]] = words_list_in_quotes[i] processed_query = '' for item in query.split(): if item.lower() in words_dict: processed_query += words_dict[item.lower()] elif item.replace(')', '').lower() in words_dict: b = item.count(')') processed_query += words_dict[item.replace(')', '').lower()] processed_query += b * ')' elif item.replace('(', '').lower() in words_dict: b = item.count('(') processed_query += b * '(' processed_query += words_dict[item.replace('(', '').lower()] else: processed_query += item processed_query += ' ' for k, v in words_dict.items(): if k == 'stoppedword': ast_list = create_ast_list([]) else: doc_list = get_doc_list_by_term(k, hidden_files, words_list) ast_list = create_ast_list(doc_list) words_dict[k] = ast_list words_dict = dict( {k.replace('*', ''): v for k, v in words_dict.items()}) words_list = list([word.replace('*', '') for word in words_list]) processed_query = processed_query.replace('*', '') processed_query = processed_query.replace('AND', '&') processed_query = processed_query.replace('OR', '|') processed_query = processed_query.replace('NOT', '-') input_code = ast.parse(processed_query, mode='eval') MyTransformer().visit(input_code) fixed = ast.fix_missing_locations(input_code) code = compile(fixed, '<string>', 'eval') result = eval(code) result = list(result) ranked_doc = _rank(result, words_list) sorted_by_rank = sorted(ranked_doc, key=lambda tup: tup[1]) sorted_docit = [tup[0] for tup in sorted_by_rank] sorted_docit = sorted_docit[::-1] result = sorted_docit for doc_id in result: data.append(get_data_by_docid(doc_id, words_list)) return utils.create_res_obj(data) except Exception as e: return utils.create_res_obj( { 'traceback': traceback.format_exc(), 'msg': "{}".format(e.args) }, success=False)
def fn(expr): n1 = parse(expr, mode='eval') n0 = Num2Decimal().visit(PowChange().visit(n1)) fix_missing_locations(n0) return eval(compile(n0, 'q', 'eval'))
def __get_ast(self, col_names): parser = self.__QueryParser(col_names, self.__IN_TABLE_NAME) query = ast.fix_missing_locations( parser.visit(ast.parse(self.__query, mode='eval'))) return query
def trace_code(self, source, load_as=None, module=False, dump=False, driver=None, filename=None): """ Trace a module of source code, possibly by running a driver script. :param str source: the source code to trace :param str load_as: the module name to load the source code as :param bool module: True if the driver is a module name instead of a file name :param bool dump: True if the source code should be included in the output :param str driver: the driver script's file name or module name :param str filename: the file name of the source code """ builder = ReportBuilder(self.message_limit) builder.max_width = self.max_width self.return_code = 0 try: tree = parse(source) new_tree = Tracer().visit(tree) fix_missing_locations(new_tree) LineNumberCleaner().visit(new_tree) # from ast import dump # print(dump(new_tree, include_attributes=False)) code = compile(new_tree, PSEUDO_FILENAME, 'exec') self.environment[CONTEXT_NAME] = builder is_own_driver = module and driver and driver[0] == load_as self.run_instrumented_module(code, load_as, filename, is_own_driver) if driver and not is_own_driver: with self.swallow_output(): if not module: self.run_python_file(driver[0], driver) else: module_name = driver[0] try: self.run_python_module(module_name, driver) except SystemExit as ex: if ex.code: if module_name != 'unittest': raise self.return_code = ex.code result = (sys.stderr.last_line or 'FAIL ') result = 'unittest: ' + result self.report_driver_result(builder, [result]) for value in self.environment.values(): if isinstance(value, types.GeneratorType): value.close() except SyntaxError: self.return_code = 1 ex = sys.exc_info()[1] messages = traceback.format_exception_only(type(ex), ex) builder.add_message(messages[-1].strip() + ' ', ex.lineno) except BaseException as ex: self.return_code = getattr(ex, 'code', 1) etype, value, tb = sys.exc_info() is_reported = False messages = traceback.format_exception_only(etype, value) entries = traceback.extract_tb(tb) for filename, _, _, _ in entries: if filename == PSEUDO_FILENAME: is_reported = True while not is_reported and tb is not None: frame = tb.tb_frame code = frame.f_code filename = code.co_filename if __file__ not in (filename, filename + 'c'): break tb = tb.tb_next if not is_reported: if tb: messages = traceback.format_exception(etype, value, tb) else: messages = traceback.format_exception_only(etype, value) self.report_driver_result(builder, messages) report = builder.report() if dump: source_lines = source.splitlines() report_lines = report.splitlines() dump_lines = [] source_width = max(map(len, source_lines)) indent = 4 for source_line, report_line in izip_longest(source_lines, report_lines, fillvalue=''): line = (indent * ' ' + source_line + (source_width-len(source_line))*' ' + ' | ' + report_line) dump_lines.append(line) report = '\n'.join(dump_lines) return report