def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) # TODO: Add dependencies based on remaining TypeInfo attributes. self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency( make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency( make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__'))
def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency(make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency(make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__'))
def record_protocol_subtype_check(cls, left_type: TypeInfo, right_type: TypeInfo) -> None: assert right_type.is_protocol cls._rechecked_types.add(left_type) cls._attempted_protocols.setdefault(left_type.fullname(), set()).add(right_type.fullname()) cls._checked_against_members.setdefault( left_type.fullname(), set()).update(right_type.protocol_members)
def record_protocol_subtype_check(left_type: TypeInfo, right_type: TypeInfo) -> None: assert right_type.is_protocol TypeState._rechecked_types.add(left_type) TypeState._attempted_protocols.setdefault( left_type.fullname(), set()).add(right_type.fullname()) TypeState._checked_against_members.setdefault( left_type.fullname(), set()).update(right_type.protocol_members)
def get_related_field_type(rvalue: CallExpr, related_model_typ: TypeInfo) -> Dict[str, Any]: if rvalue.callee.name in {'ForeignKey', 'ManyToManyField'}: return { 'manager': helpers.RELATED_MANAGER_CLASS_FULLNAME, 'of': [related_model_typ.fullname()] } # return api.named_type_or_none(helpers.RELATED_MANAGER_CLASS_FULLNAME, # args=[Instance(related_model_typ, [])]) else: return {'manager': related_model_typ.fullname(), 'of': []}
def analyze_type_with_type_info(self, info: TypeInfo, args: List[Type], ctx: Context) -> Type: """Bind unbound type when were able to find target TypeInfo. This handles simple cases like 'int', 'modname.UserClass[str]', etc. """ if len(args) > 0 and info.fullname() == 'builtins.tuple': fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line) return TupleType(self.anal_array(args), fallback, ctx.line) # Analyze arguments and (usually) construct Instance type. The # number of type arguments and their values are # checked only later, since we do not always know the # valid count at this point. Thus we may construct an # Instance with an invalid number of type arguments. instance = Instance(info, self.anal_array(args), ctx.line, ctx.column) # Check type argument count. if len(instance.args) != len(info.type_vars) and not self.defining_alias: fix_instance(instance, self.fail) if not args and self.options.disallow_any_generics and not self.defining_alias: # We report/patch invalid built-in instances already during second pass. # This is done to avoid storing additional state on instances. # All other (including user defined) generics will be patched/reported # in the third pass. if not self.is_typeshed_stub and info.fullname() in nongen_builtins: alternative = nongen_builtins[info.fullname()] self.fail(message_registry.IMPLICIT_GENERIC_ANY_BUILTIN.format(alternative), ctx) any_type = AnyType(TypeOfAny.from_error, line=ctx.line) else: any_type = AnyType(TypeOfAny.from_omitted_generics, line=ctx.line) instance.args = [any_type] * len(info.type_vars) tup = info.tuple_type if tup is not None: # The class has a Tuple[...] base class so it will be # represented as a tuple type. if args: self.fail('Generic tuple types not supported', ctx) return AnyType(TypeOfAny.from_error) return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance) td = info.typeddict_type if td is not None: # The class has a TypedDict[...] base class so it will be # represented as a typeddict type. if args: self.fail('Generic TypedDict types not supported', ctx) return AnyType(TypeOfAny.from_error) # Create a named TypedDictType return td.copy_modified(item_types=self.anal_array(list(td.items.values())), fallback=instance) return instance
def dump_typeinfo(self, info: TypeInfo) -> List[str]: if info.fullname() == 'enum.Enum': # Avoid noise return [] s = info.dump(str_conv=self.str_conv, type_str_conv=self.type_str_conv) return s.splitlines()
def add_new_class_for_module(module: MypyFile, name: str, bases: List[Instance], fields: 'OrderedDict[str, MypyType]') -> TypeInfo: new_class_unique_name = checker.gen_unique_name(name, module.names) # make new class expression classdef = ClassDef(new_class_unique_name, Block([])) classdef.fullname = module.fullname() + '.' + new_class_unique_name # make new TypeInfo new_typeinfo = TypeInfo(SymbolTable(), classdef, module.fullname()) new_typeinfo.bases = bases calculate_mro(new_typeinfo) new_typeinfo.calculate_metaclass_type() # add fields for field_name, field_type in fields.items(): var = Var(field_name, type=field_type) var.info = new_typeinfo var._fullname = new_typeinfo.fullname() + '.' + field_name new_typeinfo.names[field_name] = SymbolTableNode(MDEF, var, plugin_generated=True) classdef.info = new_typeinfo module.names[new_class_unique_name] = SymbolTableNode( GDEF, new_typeinfo, plugin_generated=True) return new_typeinfo
def type_object_type_from_function(signature: FunctionLike, info: TypeInfo, def_info: TypeInfo, fallback: Instance) -> FunctionLike: # The __init__ method might come from a generic superclass # (init_or_new.info) with type variables that do not map # identically to the type variables of the class being constructed # (info). For example # # class A(Generic[T]): def __init__(self, x: T) -> None: pass # class B(A[List[T]], Generic[T]): pass # # We need to first map B's __init__ to the type (List[T]) -> None. signature = bind_self(signature) signature = cast(FunctionLike, map_type_from_supertype(signature, info, def_info)) special_sig = None # type: Optional[str] if def_info.fullname() == 'builtins.dict': # Special signature! special_sig = 'dict' if isinstance(signature, CallableType): return class_callable(signature, info, fallback, special_sig) else: # Overloaded __init__/__new__. assert isinstance(signature, Overloaded) items = [] # type: List[CallableType] for item in signature.items(): items.append(class_callable(item, info, fallback, special_sig)) return Overloaded(items)
def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: Errors) -> None: """Calculate abstract status of a class. Set is_abstract of the type to True if the type has an unimplemented abstract attribute. Also compute a list of abstract attributes. Report error is required ABCMeta metaclass is missing. """ concrete = set() # type: Set[str] abstract = [] # type: List[str] abstract_in_this_class = [] # type: List[str] for base in typ.mro: for name, symnode in base.names.items(): node = symnode.node if isinstance(node, OverloadedFuncDef): # Unwrap an overloaded function definition. We can just # check arbitrarily the first overload item. If the # different items have a different abstract status, there # should be an error reported elsewhere. func = node.items[0] # type: Optional[Node] else: func = node if isinstance(func, Decorator): fdef = func.func if fdef.is_abstract and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) elif isinstance(node, Var): if node.is_abstract_var and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) concrete.add(name) # In stubs, abstract classes need to be explicitly marked because it is too # easy to accidentally leave a concrete class abstract by forgetting to # implement some methods. typ.abstract_attributes = sorted(abstract) if is_stub_file: if typ.declared_metaclass and typ.declared_metaclass.type.fullname( ) == 'abc.ABCMeta': return if typ.is_protocol: return if abstract and not abstract_in_this_class: def report(message: str, severity: str) -> None: errors.report(typ.line, typ.column, message, severity=severity) attrs = ", ".join('"{}"'.format(attr) for attr in sorted(abstract)) report( "Class {} has abstract attributes {}".format( typ.fullname(), attrs), 'error') report( "If it is meant to be abstract, add 'abc.ABCMeta' as an explicit metaclass", 'note')
def linearize_hierarchy(info: TypeInfo, obj_type: Optional[Callable[[], Instance]] = None) -> List[TypeInfo]: # TODO describe if info.mro: return info.mro bases = info.direct_base_classes() if (not bases and info.fullname() != 'builtins.object' and obj_type is not None): # Second pass in import cycle, add a dummy `object` base class, # otherwise MRO calculation may spuriously fail. # MRO will be re-calculated for real in the third pass. bases = [obj_type().type] lin_bases = [] for base in bases: assert base is not None, "Cannot linearize bases for %s %s" % (info.fullname(), bases) lin_bases.append(linearize_hierarchy(base, obj_type)) lin_bases.append(bases) return [info] + merge(lin_bases)
def add_var_to_class(name: str, typ: Type, info: TypeInfo) -> None: """Add a variable with given name and type to the symbol table of a class. This also takes care about setting necessary attributes on the variable node. """ var = Var(name) var.info = info var._fullname = info.fullname() + '.' + name var.type = typ info.names[name] = SymbolTableNode(MDEF, var)
def add_new_sym_for_info(info: TypeInfo, *, name: str, sym_type: MypyType) -> None: # type=: type of the variable itself var = Var(name=name, type=sym_type) # var.info: type of the object variable is bound to var.info = info var._fullname = info.fullname() + '.' + name var.is_initialized_in_class = True var.is_inferred = True info.names[name] = SymbolTableNode(MDEF, var, plugin_generated=True)
def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: Errors) -> None: """Calculate abstract status of a class. Set is_abstract of the type to True if the type has an unimplemented abstract attribute. Also compute a list of abstract attributes. Report error is required ABCMeta metaclass is missing. """ concrete = set() # type: Set[str] abstract = [] # type: List[str] abstract_in_this_class = [] # type: List[str] for base in typ.mro: for name, symnode in base.names.items(): node = symnode.node if isinstance(node, OverloadedFuncDef): # Unwrap an overloaded function definition. We can just # check arbitrarily the first overload item. If the # different items have a different abstract status, there # should be an error reported elsewhere. func = node.items[0] # type: Optional[Node] else: func = node if isinstance(func, Decorator): fdef = func.func if fdef.is_abstract and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) elif isinstance(node, Var): if node.is_abstract_var and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) concrete.add(name) # In stubs, abstract classes need to be explicitly marked because it is too # easy to accidentally leave a concrete class abstract by forgetting to # implement some methods. typ.abstract_attributes = sorted(abstract) if is_stub_file: if typ.declared_metaclass and typ.declared_metaclass.type.fullname() == 'abc.ABCMeta': return if typ.is_protocol: return if abstract and not abstract_in_this_class: def report(message: str, severity: str) -> None: errors.report(typ.line, typ.column, message, severity=severity) attrs = ", ".join('"{}"'.format(attr) for attr in sorted(abstract)) report("Class {} has abstract attributes {}".format(typ.fullname(), attrs), 'error') report("If it is meant to be abstract, add 'abc.ABCMeta' as an explicit metaclass", 'note')
def type_object_type_from_function(signature: FunctionLike, info: TypeInfo, def_info: TypeInfo, fallback: Instance, is_new: bool) -> FunctionLike: # We first need to record all non-trivial (explicit) self types in __init__, # since they will not be available after we bind them. Note, we use explicit # self-types only in the defining class, similar to __new__ (but not exactly the same, # see comment in class_callable below). This is mostly useful for annotating library # classes such as subprocess.Popen. default_self = fill_typevars(info) if not is_new and def_info == info and not info.is_newtype: orig_self_types = [(it.arg_types[0] if it.arg_types and it.arg_types[0] != default_self and it.arg_kinds[0] == ARG_POS else None) for it in signature.items()] else: orig_self_types = [None] * len(signature.items()) # The __init__ method might come from a generic superclass 'def_info' # with type variables that do not map identically to the type variables of # the class 'info' being constructed. For example: # # class A(Generic[T]): # def __init__(self, x: T) -> None: ... # class B(A[List[T]]): # ... # # We need to map B's __init__ to the type (List[T]) -> None. signature = bind_self(signature, original_type=default_self, is_classmethod=is_new) signature = cast(FunctionLike, map_type_from_supertype(signature, info, def_info)) special_sig = None # type: Optional[str] if def_info.fullname() == 'builtins.dict': # Special signature! special_sig = 'dict' if isinstance(signature, CallableType): return class_callable(signature, info, fallback, special_sig, is_new, orig_self_types[0]) else: # Overloaded __init__/__new__. assert isinstance(signature, Overloaded) items = [] # type: List[CallableType] for item, orig_self in zip(signature.items(), orig_self_types): items.append( class_callable(item, info, fallback, special_sig, is_new, orig_self)) return Overloaded(items)
def __init__(self, type: TypeInfo, base: 'ClassRepresentation') -> None: self.cname = 'MR_%s' % type.name() self.fullname = type.fullname() self.slotmap = {} self.vtable_index = {} self.defining_class = {} self.vtable_methods = [] if base: self.inherit_from_base(base) for m in sorted(type.names): if isinstance(type.names[m].node, FuncBase): self.add_method(m, type) else: self.slotmap[m] = len(self.slotmap) self.add_method('_' + m, type) # Getter TODO refactor self.add_method('set_' + m, type) # Setter # TODO refactor
def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): # Recheck Liskov if needed, self definitions are checked in the defining method if node.node.is_initialized_in_class and has_user_bases(info): self.add_dependency( make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency( make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency( make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__')) # If the set of abstract attributes change, this may invalidate class # instantiation, or change the generated error message, since Python checks # class abstract status when creating an instance. # # TODO: We should probably add this dependency only from the __init__ of the # current class, and independent of bases (to trigger changes in message # wording, as errors may enumerate all abstract attributes). self.add_dependency( make_trigger(base_info.fullname() + '.(abstract)'), target=make_trigger(info.fullname() + '.__init__')) # If the base class abstract attributes change, subclass abstract # attributes need to be recalculated. self.add_dependency( make_trigger(base_info.fullname() + '.(abstract)'))
def generate_class(self, cls: TypeInfo) -> 'ClassRepresentation': if cls.bases: baserep = self.get_class_representation(cls.bases[0].type) else: baserep = None rep = ClassRepresentation(cls, baserep) self.classes[cls] = rep # Emit vtable. vtable = 'MVT_%s' % cls.name() self.emit_types('MFunction %s[] = {' % vtable) for m in rep.vtable_methods: defining_class = rep.defining_class[m] self.emit_types(' M%s_%s,' % (defining_class, m)) self.emit_types('}; /* %s */' % vtable) # Emit type runtime info. self.emit_types('MTypeRepr %s = {' % rep.cname) self.emit_types(' %s,' % vtable) self.emit_types(' 0,') self.emit_types(' "%s"' % cls.fullname()) self.emit_types('};\n') return rep
def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) if info.is_protocol: for base_info in info.mro[:-1]: # We add dependencies from whole MRO to cover explicit subprotocols. # For example: # # class Super(Protocol): # x: int # class Sub(Super, Protocol): # y: int # # In this example we add <Super[wildcard]> -> <Sub>, to invalidate Sub if # a new member is added to Super. self.add_dependency(make_wildcard_trigger( base_info.fullname()), target=make_trigger(target)) # More protocol dependencies are collected in TypeState._snapshot_protocol_deps # after a full run or update is finished. self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): # Recheck Liskov if needed, self definitions are checked in the defining method if node.node.is_initialized_in_class and has_user_bases(info): self.add_dependency( make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): if self.options and self.options.logical_deps: # Skip logical dependency if an attribute is not overridden. For example, # in case of: # class Base: # x = 1 # y = 2 # class Sub(Base): # x = 3 # we skip <Base.y> -> <Child.y>, because even if `y` is unannotated it # doesn't affect precision of Liskov checking. if name not in info.names: continue self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency( make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency( make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__')) # If the set of abstract attributes change, this may invalidate class # instantiation, or change the generated error message, since Python checks # class abstract status when creating an instance. # # TODO: We should probably add this dependency only from the __init__ of the # current class, and independent of bases (to trigger changes in message # wording, as errors may enumerate all abstract attributes). self.add_dependency( make_trigger(base_info.fullname() + '.(abstract)'), target=make_trigger(info.fullname() + '.__init__')) # If the base class abstract attributes change, subclass abstract # attributes need to be recalculated. self.add_dependency( make_trigger(base_info.fullname() + '.(abstract)'))
def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) if info.is_protocol: for base_info in info.mro[:-1]: # We add dependencies from whole MRO to cover explicit subprotocols. # For example: # # class Super(Protocol): # x: int # class Sub(Super, Protocol): # y: int # # In this example we add <Super[wildcard]> -> <Sub>, to invalidate Sub if # a new member is added to Super. self.add_dependency(make_wildcard_trigger(base_info.fullname()), target=make_trigger(target)) # More protocol dependencies are collected in TypeState._snapshot_protocol_deps # after a full run or update is finished. self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): # Recheck Liskov if needed, self definitions are checked in the defining method if node.node.is_initialized_in_class and has_user_bases(info): self.add_dependency(make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): if self.use_logical_deps(): # Skip logical dependency if an attribute is not overridden. For example, # in case of: # class Base: # x = 1 # y = 2 # class Sub(Base): # x = 3 # we skip <Base.y> -> <Child.y>, because even if `y` is unannotated it # doesn't affect precision of Liskov checking. if name not in info.names: continue # __init__ and __new__ can be overridden with different signatures, so no # logical depedency. if name in ('__init__', '__new__'): continue self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) if not self.use_logical_deps(): # These dependencies are only useful for propagating changes -- # they aren't logical dependencies since __init__ and __new__ can be # overridden with a different signature. self.add_dependency(make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency(make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__')) # If the set of abstract attributes change, this may invalidate class # instantiation, or change the generated error message, since Python checks # class abstract status when creating an instance. # # TODO: We should probably add this dependency only from the __init__ of the # current class, and independent of bases (to trigger changes in message # wording, as errors may enumerate all abstract attributes). self.add_dependency(make_trigger(base_info.fullname() + '.(abstract)'), target=make_trigger(info.fullname() + '.__init__')) # If the base class abstract attributes change, subclass abstract # attributes need to be recalculated. self.add_dependency(make_trigger(base_info.fullname() + '.(abstract)'))
def is_model_subclass_info(info: TypeInfo, django_context: 'DjangoContext') -> bool: return (info.fullname() in django_context.all_registered_model_class_fullnames or info.has_base(fullnames.MODEL_CLASS_FULLNAME))