def duplicate_name(self, node: NameExpr) -> NameExpr: # This method is used when the transform result must be a NameExpr. # visit_name_expr() is used when there is no such restriction. new = NameExpr(node.name) new.info = node.info self.copy_ref(new, node) return new
def name_expr(self, name): nexpr = NameExpr(name) nexpr.kind = nodes.LDEF node = self.names[name] nexpr.node = node self.type_map[nexpr] = node.type return nexpr
def convert_arg(index: int, arg: ast27.expr) -> Var: if isinstance(arg, ast27.Name): v = arg.id elif isinstance(arg, ast27.Tuple): v = '__tuple_arg_{}'.format(index + 1) rvalue = NameExpr(v) rvalue.set_line(line) assignment = AssignmentStmt([self.visit(arg)], rvalue) assignment.set_line(line) decompose_stmts.append(assignment) else: raise RuntimeError("'{}' is not a valid argument.".format(ast27.dump(arg))) return Var(v)
def _attribute_from_attrib_maker(ctx: 'mypy.plugin.ClassDefContext', auto_attribs: bool, lhs: NameExpr, rvalue: CallExpr, stmt: AssignmentStmt) -> Optional[Attribute]: """Return an Attribute from the assignment or None if you can't make one.""" if auto_attribs and not stmt.new_syntax: # auto_attribs requires an annotation on *every* attr.ib. assert lhs.node is not None ctx.api.msg.need_annotation_for_var(lhs.node, stmt) return None if len(stmt.lvalues) > 1: ctx.api.fail("Too many names for one attribute", stmt) return None # This is the type that belongs in the __init__ method for this attrib. init_type = stmt.type # Read all the arguments from the call. init = _get_bool_argument(ctx, rvalue, 'init', True) # TODO: Check for attr.NOTHING attr_has_default = bool(_get_argument(rvalue, 'default')) # If the type isn't set through annotation but is passed through `type=` use that. type_arg = _get_argument(rvalue, 'type') if type_arg and not init_type: try: un_type = expr_to_unanalyzed_type(type_arg) except TypeTranslationError: ctx.api.fail('Invalid argument to type', type_arg) else: init_type = ctx.api.anal_type(un_type) if init_type and isinstance(lhs.node, Var) and not lhs.node.type: # If there is no annotation, add one. lhs.node.type = init_type lhs.is_inferred_def = False # Note: convert is deprecated but works the same as converter. converter = _get_argument(rvalue, 'converter') convert = _get_argument(rvalue, 'convert') if convert and converter: ctx.api.fail("Can't pass both `convert` and `converter`.", rvalue) elif convert: ctx.api.fail("convert is deprecated, use converter", rvalue) converter = convert converter_name = _get_converter_name(ctx, converter) return Attribute(lhs.name, ctx.cls.info, attr_has_default, init, converter_name, stmt)
def add_method( ctx: ClassDefContext, name: str, args: List[Argument], return_type: Type, self_type: Optional[Type] = None, tvar_def: Optional[TypeVarDef] = None, is_classmethod: bool = False, is_new: bool = False, # is_staticmethod: bool = False, ) -> None: """ Adds a new method to a class. This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged """ info = ctx.cls.info # First remove any previously generated methods with the same name # to avoid clashes and problems in the semantic analyzer. if name in info.names: sym = info.names[name] if sym.plugin_generated and isinstance(sym.node, FuncDef): ctx.cls.defs.body.remove(sym.node) self_type = self_type or fill_typevars(info) if is_classmethod or is_new: first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)] # elif is_staticmethod: # first = [] else: self_type = self_type or fill_typevars(info) first = [Argument(Var('self'), self_type, None, ARG_POS)] args = first + args arg_types, arg_names, arg_kinds = [], [], [] for arg in args: assert arg.type_annotation, 'All arguments must be fully typed.' arg_types.append(arg.type_annotation) arg_names.append(get_name(arg.variable)) arg_kinds.append(arg.kind) function_type = ctx.api.named_type('__builtins__.function') signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) if tvar_def: signature.variables = [tvar_def] func = FuncDef(name, args, Block([PassStmt()])) func.info = info func.type = set_callable_name(signature, func) func.is_class = is_classmethod # func.is_static = is_staticmethod func._fullname = get_fullname(info) + '.' + name func.line = info.line # NOTE: we would like the plugin generated node to dominate, but we still # need to keep any existing definitions so they get semantically analyzed. if name in info.names: # Get a nice unique name instead. r_name = get_unique_redefinition_name(name, info.names) info.names[r_name] = info.names[name] if is_classmethod: # or is_staticmethod: func.is_decorated = True v = Var(name, func.type) v.info = info v._fullname = func._fullname # if is_classmethod: v.is_classmethod = True dec = Decorator(func, [NameExpr('classmethod')], v) # else: # v.is_staticmethod = True # dec = Decorator(func, [NameExpr('staticmethod')], v) dec.line = info.line sym = SymbolTableNode(MDEF, dec) else: sym = SymbolTableNode(MDEF, func) sym.plugin_generated = True info.names[name] = sym info.defn.defs.body.append(func)
def visit_Name(self, n: Name) -> NameExpr: e = NameExpr(n.id) return self.set_line(e, n)
def visit_Name(self, n: ast3.Name) -> NameExpr: return NameExpr(n.id)
def parse_key_typeddict_fields(attrs_expr: DictExpr) -> Tuple[List[str], List[Type], Set[str]]: fields = [] types = [] required_fields = set() for field_name_expr, field_type_expr in attrs_expr.items: if isinstance(field_name_expr, StrExpr): fields.append(field_name_expr.value) required_fields.add(field_name_expr.value) elif isinstance(field_name_expr, CallExpr): fields.append(field_name_expr.args[0].value) required_expr = field_name_expr.args[1] if len(field_name_expr.args) == 2 else NameExpr('builtins.True') if required_expr.fullname == 'builtins.True': required_fields.add(field_name_expr.args[0].value) else: raise UnsupportedKeyTypeError(str(type(field_name_expr))) types.append(expr_to_unanalyzed_type(field_type_expr)) return fields, types, required_fields
fdef.type = wrapper_sig return fdef Instance self_type(self): return self_type(self.tf.type_context()) Scope make_scope(self): return Scope(self.tf.type_map) class Scope: """Maintain a temporary local scope during transformation.""" void __init__(self, dict<Node, Type> type_map): self.names = <str, Var> {} self.type_map = type_map Var add(self, str name, Type type): v = Var(name) v.type = type self.names[name] = v return v NameExpr name_expr(self, str name): nexpr = NameExpr(name) nexpr.kind = nodes.LDEF node = self.names[name] nexpr.node = node self.type_map[nexpr] = node.type return nexpr
def visit_Name(self, n): return NameExpr(n.id)
def visit_NameConstant(self, n): return NameExpr(str(n.value))
def visit_Name(self, n: ast35.Name) -> Node: return NameExpr(n.id)
def test_multiple_groups_coalescing(self) -> None: x0 = NameExpr('x0') x1 = NameExpr('x1') x2 = NameExpr('x2') x3 = NameExpr('x3') x4 = NameExpr('x4') nothing_combined = [('==', [0, 1, 2]), ('<', [2, 3]), ('==', [3, 4, 5])] everything_combined = [('==', [0, 1, 2, 3, 4, 5]), ('<', [2, 3])] # Note: We do 'x4 == x0' at the very end! two_groups = [ ('==', x0, x1), ('==', x1, x2), ('<', x2, x3), ('==', x3, x4), ('==', x4, x0), ] self.assertEqual( group_comparison_operands( two_groups, self.literal_keymap({ 0: x0, 1: x1, 2: x2, 3: x3, 4: x4, 5: x0 }), {'=='}, ), everything_combined, "All vars are assignable, everything is combined") self.assertEqual( group_comparison_operands( two_groups, self.literal_keymap({ 1: x1, 2: x2, 3: x3, 4: x4 }), {'=='}, ), nothing_combined, "x0 is unassignable, so no combining") self.assertEqual( group_comparison_operands( two_groups, self.literal_keymap({ 0: x0, 1: x1, 3: x3, 5: x0 }), {'=='}, ), everything_combined, "Some vars are unassignable but x0 is, so we combine") self.assertEqual( group_comparison_operands( two_groups, self.literal_keymap({ 0: x0, 5: x0 }), {'=='}, ), everything_combined, "All vars are unassignable but x0 is, so we combine")
def _attribute_from_attrib_maker(ctx: 'mypy.plugin.ClassDefContext', auto_attribs: bool, kw_only: bool, lhs: NameExpr, rvalue: CallExpr, stmt: AssignmentStmt) -> Optional[Attribute]: """Return an Attribute from the assignment or None if you can't make one.""" if auto_attribs and not stmt.new_syntax: # auto_attribs requires an annotation on *every* attr.ib. assert lhs.node is not None ctx.api.msg.need_annotation_for_var(lhs.node, stmt) return None if len(stmt.lvalues) > 1: ctx.api.fail("Too many names for one attribute", stmt) return None # This is the type that belongs in the __init__ method for this attrib. init_type = stmt.type # Read all the arguments from the call. init = _get_bool_argument(ctx, rvalue, 'init', True) # Note: If the class decorator says kw_only=True the attribute is ignored. # See https://github.com/python-attrs/attrs/issues/481 for explanation. kw_only |= _get_bool_argument(ctx, rvalue, 'kw_only', False) if kw_only and ctx.api.options.python_version[0] < 3: ctx.api.fail(KW_ONLY_PYTHON_2_UNSUPPORTED, stmt) return None # TODO: Check for attr.NOTHING attr_has_default = bool(_get_argument(rvalue, 'default')) attr_has_factory = bool(_get_argument(rvalue, 'factory')) if attr_has_default and attr_has_factory: ctx.api.fail("Can't pass both `default` and `factory`.", rvalue) elif attr_has_factory: attr_has_default = True # If the type isn't set through annotation but is passed through `type=` use that. type_arg = _get_argument(rvalue, 'type') if type_arg and not init_type: try: un_type = expr_to_unanalyzed_type(type_arg) except TypeTranslationError: ctx.api.fail('Invalid argument to type', type_arg) else: init_type = ctx.api.anal_type(un_type) if init_type and isinstance(lhs.node, Var) and not lhs.node.type: # If there is no annotation, add one. lhs.node.type = init_type lhs.is_inferred_def = False # Note: convert is deprecated but works the same as converter. converter = _get_argument(rvalue, 'converter') convert = _get_argument(rvalue, 'convert') if convert and converter: ctx.api.fail("Can't pass both `convert` and `converter`.", rvalue) elif convert: ctx.api.fail("convert is deprecated, use converter", rvalue) converter = convert converter_info = _parse_converter(ctx, converter) name = unmangle(lhs.name) return Attribute(name, ctx.cls.info, attr_has_default, init, kw_only, converter_info, stmt)
def add_method_to_class( api: SemanticAnalyzerPluginInterface, cls: ClassDef, name: str, args: List[Argument], return_type: Type, self_type: Optional[Type] = None, tvar_def: Optional[TypeVarDef] = None, is_classmethod: bool = False, ) -> None: """ Adds a new method to a class definition. NOTE: Copied from mypy/plugins/common.py and extended with support for adding classmethods based on https://github.com/python/mypy/pull/7796 """ info = cls.info # First remove any previously generated methods with the same name # to avoid clashes and problems in the semantic analyzer. if name in info.names: sym = info.names[name] if sym.plugin_generated and isinstance(sym.node, FuncDef): cls.defs.body.remove(sym.node) self_type = self_type or fill_typevars(info) # Add either self or cls as the first argument if is_classmethod: first = Argument(Var("cls"), TypeType.make_normalized(self_type), None, ARG_POS) else: first = Argument(Var("self"), self_type, None, ARG_POS) args = [first] + args arg_types, arg_names, arg_kinds = [], [], [] for arg in args: assert arg.type_annotation, "All arguments must be fully typed." arg_types.append(arg.type_annotation) arg_names.append(arg.variable.name) arg_kinds.append(arg.kind) function_type = api.named_type("__builtins__.function") signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) if tvar_def: signature.variables = [tvar_def] func = FuncDef(name, args, Block([PassStmt()])) func.info = info func.type = set_callable_name(signature, func) func._fullname = info.fullname + "." + name # pylint: disable=protected-access func.line = info.line func.is_class = is_classmethod # NOTE: we would like the plugin generated node to dominate, but we still # need to keep any existing definitions so they get semantically analyzed. if name in info.names: # Get a nice unique name instead. r_name = get_unique_redefinition_name(name, info.names) info.names[r_name] = info.names[name] if is_classmethod: func.is_decorated = True v = Var(name, func.type) v.info = info v._fullname = func._fullname # pylint: disable=protected-access v.is_classmethod = True dec = Decorator(func, [NameExpr("classmethod")], v) dec.line = info.line sym = SymbolTableNode(MDEF, dec) else: sym = SymbolTableNode(MDEF, func) sym.plugin_generated = True info.names[name] = sym info.defn.defs.body.append(func)
def visit_type_var(self, t: TypeVar) -> Type: # FIX function type variables return RuntimeTypeVar(NameExpr(tvar_arg_name(t.id)))
def duplicate_name(self, node: NameExpr) -> NameExpr: # This method is used when the transform result must be a NameExpr. # visit_name_expr() is used when there is no such restriction. new = NameExpr(node.name) self.copy_ref(new, node) return new
def _scan_declarative_decorator_stmt( cls: ClassDef, api: SemanticAnalyzerPluginInterface, stmt: Decorator, cls_metadata: util.DeclClassApplied, ) -> None: """Extract mapping information from a @declared_attr in a declarative class. E.g.:: @reg.mapped class MyClass: # ... @declared_attr def updated_at(cls) -> Column[DateTime]: return Column(DateTime) Will resolve in mypy as:: @reg.mapped class MyClass: # ... updated_at: Mapped[Optional[datetime.datetime]] """ for dec in stmt.decorators: if (isinstance(dec, (NameExpr, MemberExpr, SymbolNode)) and names._type_id_for_named_node(dec) is names.DECLARED_ATTR): break else: return dec_index = cls.defs.body.index(stmt) left_hand_explicit_type: Optional[ProperType] = None if isinstance(stmt.func.type, CallableType): func_type = stmt.func.type.ret_type if isinstance(func_type, UnboundType): type_id = names._type_id_for_unbound_type(func_type, cls, api) else: # this does not seem to occur unless the type argument is # incorrect return if (type_id in { names.MAPPED, names.RELATIONSHIP, names.COMPOSITE_PROPERTY, names.MAPPER_PROPERTY, names.SYNONYM_PROPERTY, names.COLUMN_PROPERTY, } and func_type.args): left_hand_explicit_type = get_proper_type(func_type.args[0]) elif type_id is names.COLUMN and func_type.args: typeengine_arg = func_type.args[0] if isinstance(typeengine_arg, UnboundType): sym = api.lookup_qualified(typeengine_arg.name, typeengine_arg) if sym is not None and isinstance(sym.node, TypeInfo): if names._has_base_type_id(sym.node, names.TYPEENGINE): left_hand_explicit_type = UnionType([ infer._extract_python_type_from_typeengine( api, sym.node, []), NoneType(), ]) else: util.fail( api, "Column type should be a TypeEngine " "subclass not '{}'".format(sym.node.fullname), func_type, ) if left_hand_explicit_type is None: # no type on the decorated function. our option here is to # dig into the function body and get the return type, but they # should just have an annotation. msg = ("Can't infer type from @declared_attr on function '{}'; " "please specify a return type from this function that is " "one of: Mapped[<python type>], relationship[<target class>], " "Column[<TypeEngine>], MapperProperty[<python type>]") util.fail(api, msg.format(stmt.var.name), stmt) left_hand_explicit_type = AnyType(TypeOfAny.special_form) left_node = NameExpr(stmt.var.name) left_node.node = stmt.var # totally feeling around in the dark here as I don't totally understand # the significance of UnboundType. It seems to be something that is # not going to do what's expected when it is applied as the type of # an AssignmentStatement. So do a feeling-around-in-the-dark version # of converting it to the regular Instance/TypeInfo/UnionType structures # we see everywhere else. if isinstance(left_hand_explicit_type, UnboundType): left_hand_explicit_type = get_proper_type( util._unbound_to_instance(api, left_hand_explicit_type)) left_node.node.type = api.named_type("__sa_Mapped", [left_hand_explicit_type]) # this will ignore the rvalue entirely # rvalue = TempNode(AnyType(TypeOfAny.special_form)) # rewrite the node as: # <attr> : Mapped[<typ>] = # _sa_Mapped._empty_constructor(lambda: <function body>) # the function body is maintained so it gets type checked internally column_descriptor = nodes.NameExpr("__sa_Mapped") column_descriptor.fullname = "sqlalchemy.orm.attributes.Mapped" mm = nodes.MemberExpr(column_descriptor, "_empty_constructor") arg = nodes.LambdaExpr(stmt.func.arguments, stmt.func.body) rvalue = CallExpr( mm, [arg], [nodes.ARG_POS], ["arg1"], ) new_stmt = AssignmentStmt([left_node], rvalue) new_stmt.type = left_node.node.type cls_metadata.mapped_attr_names.append( (left_node.name, left_hand_explicit_type)) cls.defs.body[dec_index] = new_stmt
def self_expr() -> NameExpr: n = NameExpr('self') n.kind = LDEF return n
def self_expr(): n = NameExpr('self') n.kind = LDEF return n
def visit_NameConstant(self, n: ast3.NameConstant) -> NameExpr: return NameExpr(str(n.value))
if is_alt != BOUND_VAR: if n > 0: # Equivalent to slot name. return tvar_slot_name(n - 1) elif n == -1: return '__ftv' else: return '__ftv{}'.format(-n) else: if n > 0: # Equivalent to slot name. return tvar_slot_name(n - 1, BOUND_VAR) elif n == -1: return '__bftv' # FIX do we need this? else: return '__bftv{}'.format(-n) # FIX do we need this? str dynamic_suffix(bool is_pretty): """Return the suffix of the dynamic wrapper of a method or class.""" if is_pretty: return '*' else: return '___dyn' NameExpr self_expr(): n = NameExpr('self') n.kind = LDEF return n
def self_expr() -> NameExpr: n = NameExpr("self") n.kind = LDEF return n