def visit_AugAssign(self, node): read_target = deepcopy(node.target) existing_node = self.generic_visit(node) line_numbers = set() self._find_line_numbers(existing_node, line_numbers) first_line_number = min(line_numbers) last_line_number = max(line_numbers) new_nodes = [] format_string = (self._wrap_assignment_target(existing_node.target) + ' = {!r}') new_nodes.append(self._create_context_call('start_assignment')) self._wrap_assignment_target(read_target, index_to_get=-1) read_target.ctx = Load() set_assignment_value = self._create_context_call( 'set_assignment_value', [read_target]) report_assignment = self._create_context_call( 'report_assignment', [Str(s=format_string), Num(n=existing_node.lineno)]) end_assignment = self._create_context_call('end_assignment') try_body = [existing_node, set_assignment_value, report_assignment] finally_body = [end_assignment] new_nodes.append(TryFinally(body=try_body, finalbody=finally_body, handlers=[], orelse=[], lineno=first_line_number)) self._set_statement_line_numbers(try_body, first_line_number) self._set_statement_line_numbers(finally_body, last_line_number) return new_nodes
def rewrite_with_to_binds(body, monad): new_body = [] # Construct a transformer for this specific monad's mreturn rdb = RewriteDoBody(monad) # This is the body of the lambda we're about to construct last_part = body[-1].value # Rewrite mreturn rdb.visit(last_part) # Iterate in reverse, making each line the into a lambda whose body is the # rest of the lines (which are each lambdas), and whose names are the # bind assignments. for b in reversed(body[:-1]): rdb.visit(b) if isinstance(b, Assign): name = b.targets[0].id value = b.value else: # If there was no assignment to the bind, just use a random name, eek name = '__DO_NOT_NAME_A_VARIABLE_THIS_STRING__' value = b.value # last part = value.bind(lambda name: last_part) last_part = Call(func=Attribute(value=value, attr='bind', ctx=Load()), args=[ Lambda(args=arguments(args=[ Name(id=name, ctx=Param()), ], vararg=None, kwarg=None, defaults=[]), body=last_part), ], keywords=[], starargs=None, kwargs=None) return last_part
def visit_AugAssign(self, node): read_target = deepcopy(node.target) existing_node = self.generic_visit(node) line_numbers = set() find_line_numbers(existing_node, line_numbers) first_line_number = min(line_numbers) last_line_number = max(line_numbers) new_nodes = [] try_body = [existing_node] new_nodes.append(self._create_context_call('start_assignment')) format_string = self._wrap_assignment_target(existing_node.target) if format_string is not None: if ':' in format_string: existing_node.value = self._create_bare_context_call( 'set_assignment_value', [existing_node.value]) operator_char = OPERATOR_CHARS.get(type(existing_node.op), '?') format_string += ' {}= {{}} '.format(operator_char) else: self._wrap_assignment_target(read_target, index_to_get=-1) read_target.ctx = Load() set_assignment_value = self._create_context_call( 'set_assignment_value', [read_target]) try_body.append(set_assignment_value) format_string += ' = {}' try_body.append( self._create_context_call( 'report_assignment', [Str(s=format_string), Num(n=existing_node.lineno)])) self._create_end_assignment(new_nodes, try_body, first_line_number, last_line_number) return new_nodes
def __init__(self, *, elts: tp.List[expr], ctx: expr_context = Load()): super().__init__( **{ key: value for key, value in locals().items() if key not in ('self', '__class__') })
def visit_Name(self, node): name = node.id if name in self.variables: return Name(id=std_tsymbol((name, 0)), ctx=Load()) else: return node
def visit_Call(self, node): name = node.func.id args = node.args[0] if name in self.variables: if isinstance(args, UnaryOp): # we have s(+1) if (isinstance(args.op, UAdd)): args = args.operand date = args.n elif (isinstance(args.op, USub)): args = args.operand date = -args.n else: raise Exception("Unrecognized subscript.") else: date = args.n newname = std_tsymbol((name, date)) if newname is not None: return Name(newname, Load()) else: # , keywords=node.keywords, starargs=node.starargs, kwargs=node.kwargs) return Call(func=node.func, args=[self.visit(e) for e in node.args], keywords=[])
def getTracer(file_name, arg_name, id): return Expr( value=Call( func=Attribute( value=Name(id='Tracer', ctx=Load()), attr='trace', ctx=Load()), args=[ Constant(value=str(file_name), kind=None), Constant(value=str(id), kind=None), Constant(value=str(arg_name), kind=None), Name(id=arg_name, ctx=Load()), ], keywords=[] ) )
def AugAssignToAssign(node): store_var = node.target load_var = Name(id=store_var.id, ctx=Load()) return Assign(targets=[store_var], value=BinOp(left=load_var, op=node.op, right=node.value))
def visit_Name(self, node): try: replacement = self.replacements[node.id] except (KeyError, AttributeError): return node else: return Name(id=replacement.name, ctx=Load())
def visit_UnaryOp(self, node): self.generic_visit(node) if isinstance(node.op, Not): return Call(Name('logical_not', Load()), [node.operand], [], None, None) else: raise NotImplementedError('%s not implemented' % (node.op, ))
def test_param2argparse_param_default_ast_expr_with_list(self) -> None: """ Tests that param2argparse_param works to change the type based on the default whence said default is an ast.List inside an ast.Expr """ run_ast_test( gen_ast=param2argparse_param( ( "byo", { "default": Expr( List( elts=[], ctx=Load(), expr=None, ), expr_value=None, ), "typ": "str", }, ), ), gold=argparse_add_argument_expr, test_case_instance=self, )
def visit_let(self, letexp: Expr): # To properly account for scoping and ensure that the entire node produces an expression, # we translate the let binding as a function that we call with the value we intend to bind. # Yes, this is somewhat ugly. """ let var = value in body ======================= def let_thunk(var): return body let_thunk(value) """ bind_body, bind_defs = self.visit(letexp.body) func_name = self.generate_function_name("_let_func") binding_func = self.create_def(func_name, [self.get_var_name(letexp.var)], bind_defs + [Return(bind_body)]) # we call the binding func with the intended value for the bound variable # special case: if the value is a function literal, we must ensure it can be # recursive by naming it after the var if isinstance(letexp.value, Function): value_def, value_name = self.convert_func_node( letexp.value, letexp.var) return ( self.create_call(func_name, [Name(value_name, Load())]), [value_def, binding_func], ) value_body, value_defs = self.visit(letexp.value) value_defs.append(binding_func) binding_call = self.create_call(func_name, [value_body]) return (binding_call, value_defs)
def test_replace_in_ast_with_val_on_non_function(self) -> None: """ Tests that `RewriteAtQuery` can actually replace a node at given location """ parsed_ast = ast_parse(class_str) rewrite_at_query = RewriteAtQuery( search="ConfigClass.dataset_name".split("."), replacement_node=AnnAssign( annotation=Name("int", Load()), simple=1, target=Name("dataset_name", Store()), value=set_value(15), expr=None, expr_target=None, expr_annotation=None, ), ) gen_ast = rewrite_at_query.visit(parsed_ast) self.assertTrue(rewrite_at_query.replaced, True) run_ast_test( self, gen_ast, ast.parse( class_str.replace( 'dataset_name: str = "mnist"', "dataset_name: int = 15" ) ), )
def test_annotate_ancestry(self) -> None: """Tests that `annotate_ancestry` properly decorates""" node = Module( body=[ AnnAssign( annotation=Name( "str", Load(), ), simple=1, target=Name("dataset_name", Store()), value=set_value("~/tensorflow_datasets"), expr=None, expr_target=None, expr_annotation=None, ), Assign( annotation=None, simple=1, targets=[Name("epochs", Store())], value=set_value("333"), expr=None, expr_target=None, expr_annotation=None, **maybe_type_comment ), ], stmt=None, ) self.assertFalse(hasattr(node.body[0], "_location")) self.assertFalse(hasattr(node.body[1], "_location")) annotate_ancestry(node) self.assertEqual(node.body[0]._location, ["dataset_name"]) self.assertEqual(node.body[1]._location, ["epochs"])
def test_emit_ann_assign(self) -> None: """Tests that AnnAssign is emitted from `emit_ann_assign`""" self.assertIsInstance(class_ast.body[1], AnnAssign) self.assertIsInstance(emit_ann_assign(class_ast.body[1]), AnnAssign) self.assertIsInstance(emit_ann_assign(class_ast.body[1]), AnnAssign) gen_ast = emit_ann_assign( find_in_ast( "C.function_name.dataset_name".split("."), class_with_method_and_body_types_ast, ) ) self.assertIsInstance(gen_ast, AnnAssign) run_ast_test( self, gen_ast, AnnAssign( annotation=Name( "str", Load(), ), simple=1, target=Name("dataset_name", Store()), value=set_value("~/tensorflow_datasets"), expr=None, expr_target=None, expr_annotation=None, ), )
def test_parse_to_scalar(self) -> None: """Test various inputs and outputs for `parse_to_scalar`""" for fst, snd in ( (5, 5), ("5", "5"), (set_value(5), 5), (ast.Expr(None), NoneStr), ): self.assertEqual(parse_to_scalar(fst), snd) self.assertEqual( get_value(parse_to_scalar(ast.parse("[5]").body[0]).elts[0]), 5 ) self.assertTrue( cmp_ast( parse_to_scalar(ast.parse("[5]").body[0]), List([set_value(5)], Load()), ) ) self.assertEqual(parse_to_scalar(ast.parse("[5]")), "[5]") parse_to_scalar(ast.parse("[5]").body[0]) self.assertRaises(NotImplementedError, parse_to_scalar, memoryview(b"")) self.assertRaises(NotImplementedError, parse_to_scalar, memoryview(b""))
def visit_BoolOp(self, node: BoolOp) -> Union[UnaryOp, Call]: self.generic_visit(node) if isinstance(node.op, And): runtime = '__lazybooland__' elif isinstance(node.op, Or): runtime = '__lazyboolor__' else: return node lhs, rhs = node.values delegate = Call( func=Name(id=runtime, ctx=Load()), args=[ lhs, # Make the rhs a deferred computation by wrapping with a lambda Lambda(args=arguments(args=[], kwonlyargs=[], kw_defaults=[], defaults=[]), body=rhs) ], keywords=[]) copy_location(delegate, node) fix_missing_locations(delegate) return delegate
def visit_call(self, call: Expr): """For calls, we must distinguish between ordinary functions, operators, and constructor calls.""" func = call.op fields, field_defs = self.convert_fields(call.args) if isinstance(func, tvm.ir.Op): raise Exception( 'Operators should have been lowered and eliminated') if isinstance(func, relay.Constructor): # produce a constructor value return (self.create_call('ConstructorValue', [ ast.Num(func.tag), ast.List(fields, Load()), NameConstant(None) ]), field_defs) # lowered operator: generate a call to a function that gets the PackedFunc # from TVM's registry if isinstance( func, Function) and func.attrs and func.attrs.Primitive.value == 1: op_call_def, op_call = self.create_op_call(func, call.args, fields) return (op_call, field_defs + [op_call_def]) # ordinary function converted_func, defs = self.visit(func) defs += field_defs return (ast.Call(converted_func, fields, []), defs)
def convert_input(py_input, arg_type): """Use the types of the function arguments to determine whether we expect a tensor or tuple (returns list of inputs to the lowered op call)""" # equivalent: input.data if isinstance(arg_type, relay.TensorType): return [py_input] assert isinstance(arg_type, relay.TupleType) # convert each input.fields[i] ret = [] for i in range(len(arg_type.fields)): ret += convert_input( ast.Subscript( ast.Attribute(py_input, 'fields', Load()), ast.Index(Num(i)), Load()), arg_type.fields[i]) return ret
def _getblockattr(name, lineno, col): """calls getattr(name, '__xonsh_block__', False).""" return xonsh_call('getattr', args=[ Name(id=name, ctx=Load(), lineno=lineno, col_offset=col), Str(s='__xonsh_block__', lineno=lineno, col_offset=col), NameConstant(value=False, lineno=lineno, col_offset=col)], lineno=lineno, col=col)
def _split_set_elts(elts): # pylint: disable=C0111 collapsed = [] for element in elts: if isinstance(element, BinOp) and isinstance(element.op, BitOr): collapsed.extend(NotationASTTransformer._collapse(element)) else: collapsed.append(element) last_choice = [] choices = [last_choice] for element in collapsed: if isinstance(element, BitOr): last_choice = [] choices.append(last_choice) else: last_choice.append(element) splitted = [] for choice in choices: if len(choice) > 1: splitted.append(Tuple(choice, Load())) else: splitted.append(choice[0]) return splitted
def test_ast_equal() -> None: a = Name(id="print", ctx=Load()) b = Name(id="print", ctx=Load()) assert ast_deep_equal(a, b) a = Expr(value=Call( func=Name(id="print", ctx=Load()), args=[Constant(value="hello, world")], keywords=[], )) b = Expr(value=Call( func=Name(id="print", ctx=Load()), args=[Constant(value="hello, world")], keywords=[], )) assert ast_deep_equal(a, b)
def visit_Tuple(self, node): if len(node.elts) != 2: raise self.parseError(node, 'interval must have exactly two endpoints') newElts = [self.visit(elt) for elt in node.elts] return copy_location(Call(Name(rangeConstructor, Load()), newElts, []), node)
def boolOpBreaker(self, values, func): if len(values) == 1: return values[0] else: return Call(Name(func, Load()), [values[0], self.boolOpBreaker(values[1:], func)], [], None, None)
def compile(self, im_root): """ Compile a :class:`piglet.intermediate.RootNode` to an :class:`ast.Module` object """ assert self.src_root is None, \ "{!r}.compile called more than once".format(self) try: self.src_root = im_root fn = self._compile_function(im_root, self.module, '__piglet_root__') add_arg_default(fn, make_arg('__piglet_bases'), List(elts=[], ctx=Load())) fn.args.kwarg = make_kwarg('__piglet_extra_blocks') module = self.module module = _hoist_variables_to_piglet_context(module) module = _ensure_all_functions_yield(module) module = _deduplicate_exception_annotations(module) module = add_locations(module) except PigletParseError as e: e.filename = self.filename raise return module
def visit_Call(self, node): name = node.func.id args = node.args[0] if name in self.variables: if isinstance(args, UnaryOp): # we have s(+1) assert (isinstance(args.op, UAdd)) args = args.operand date = args.n key = (name, date) newname = self.table_symbols.get(key) if newname is not None: return Name(newname, Load()) else: raise Exception( "Symbol {} incorrectly subscripted with date {}.".format( name, date)) else: return Call(func=node.func, args=[self.visit(e) for e in node.args], keywords=node.keywords, starargs=node.starargs, kwargs=node.kwargs)
def test_param2argparse_param_default_torch(self) -> None: """ Tests that param2argparse_param works to change the type based on the default whence said default is a proxy for an internal PyTorch type """ class FakeTorch(object): """ Not a real torch """ def __str__(self): """But a real str :return: An actual str :rtype: ```Literal['<required parameter>']``` """ return "<required parameter>" # type("FakeTorch", tuple(), {"__str__": lambda _: "<required parameter>"}) run_ast_test( gen_ast=param2argparse_param(( "byo", { "default": FakeTorch(), }, ), ), gold=Expr( Call( args=[set_value("--byo")], func=Attribute( Name("argument_parser", Load()), "add_argument", Load(), ), keywords=[ keyword( arg="type", value=Name(FakeTorch.__name__, Load()), identifier=None, ), keyword(arg="required", value=set_value(True), identifier=None), ], expr=None, expr_func=None, )), test_case_instance=self, )
def compile_translationnode(self, srcnode, parent): translated = Call(func=LoadName('_'), args=[Str(srcnode.get_msgstr())], starargs=None, kwargs=None, keywords=[]) named_children = [(name, node) for name, node in srcnode.named_children() if name is not None] if not named_children: # Simple case - no dynamic children for placeholder replacement parent.body.append(Expr(value=Yield(translated))) return parent.body.append( Assign(targets=[StoreName('__piglet_places')], value=Dict([], [])) ) for name, node in named_children: with self.collect_output(parent) as ACC: self._compile(node, parent) parent.body.append( Assign(targets=[Subscript(value=LoadName('__piglet_places'), slice=Index(value=Str(name)), ctx=Store())], value=Call(func=Attribute(value=Str(s=''), attr='join', ctx=Load()), args=[LoadName(ACC)], starargs=None, kwargs=None, keywords=[])) ) for name, node in named_children: translated = Call( func=Attribute(value=translated, attr='replace', ctx=Load()), args=[Str('${{{}}}'.format(name)), Subscript(value=LoadName('__piglet_places'), slice=Index(value=Str(name)), ctx=Load())], starargs=None, kwargs=None, keywords=[]) set_pos(translated, srcnode) parent.body.append(Expr(value=Yield(translated)))
def visit_ClassDef(self, node): """Process property defaults for Scenic classes.""" if node.name in self.constructors: # Scenic class definition newBody = [] for child in node.body: child = self.visit(child) if isinstance(child, AnnAssign): # default value for property origValue = child.annotation target = child.target # extract any attributes for this property metaAttrs = [] if isinstance(target, Subscript): sl = target.slice if not isinstance(sl, Index): self.parseError(sl, 'malformed attributes for property default') sl = sl.value if isinstance(sl, Name): metaAttrs.append(sl.id) elif isinstance(sl, Tuple): for elt in sl.elts: if not isinstance(elt, Name): self.parseError(elt, 'malformed attributes for property default') metaAttrs.append(elt.id) else: self.parseError(sl, 'malformed attributes for property default') newTarget = Name(target.value.id, Store()) copy_location(newTarget, target) target = newTarget # find dependencies of the default value properties = AttributeFinder.find('self', origValue) # create default value object args = [ Set([Str(prop) for prop in properties]), Set([Str(attr) for attr in metaAttrs]), Lambda(self.self_args, origValue) ] value = Call(Name(createDefault, Load()), args, []) copy_location(value, origValue) newChild = AnnAssign( target=target, annotation=value, value=None, simple=True) child = copy_location(newChild, child) newBody.append(child) node.body = newBody return node else: # ordinary Python class # it's impossible at the moment to define a Python class in a Scenic file, # but we'll leave this check here for future-proofing for base in node.bases: name = None if isinstance(base, Call): name = base.func.id elif isinstance(base, Name): name = base.id if name is not None and name in self.constructors: self.parseError(node, f'Python class {node.name} derives from PRS class {name}') return self.generic_visit(node)
def visit_Yield(self, node): existing_node = self.generic_visit(node) value = existing_node.value if value is None: value = Name(id='None', ctx=Load()) return Yield(value=self._create_bare_context_call( 'yield_value', [value, Num(n=existing_node.lineno)]))