def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) -> Type: """Return the type of a type object. For a generic type G with type variables T and S the type is generally of form Callable[..., G[T, S]] where ... are argument types for the __init__/__new__ method (without the self argument). Also, the fallback type will be 'type' instead of 'function'. """ init_method = info.get_method('__init__') if not init_method: # Must be an invalid class definition. return AnyType() else: fallback = builtin_type('builtins.type') if init_method.info.fullname() == 'builtins.object': # No non-default __init__ -> look at __new__ instead. new_method = info.get_method('__new__') if new_method and new_method.info.fullname() != 'builtins.object': # Found one! Get signature from __new__. return type_object_type_from_function(new_method, info, fallback) # Both are defined by object. But if we've got a bogus # base class, we can't know for sure, so check for that. if info.fallback_to_any: # Construct a universal callable as the prototype. sig = CallableType(arg_types=[AnyType(), AnyType()], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=["_args", "_kwds"], ret_type=AnyType(), fallback=builtin_type('builtins.function')) return class_callable(sig, info, fallback, None) # Construct callable type based on signature of __init__. Adjust # return type and insert type arguments. return type_object_type_from_function(init_method, info, fallback)
def dump_typeinfo(self, info: TypeInfo) -> List[str]: if info.fullname() == 'enum.Enum': # Avoid noise return [] s = info.dump(str_conv=self.str_conv, type_str_conv=self.type_str_conv) return s.splitlines()
def make_type_info(name: str, is_abstract: bool = False, mro: List[TypeInfo] = None, bases: List[Instance] = None, typevars: List[str] = None) -> TypeInfo: """Make a TypeInfo suitable for use in unit tests.""" class_def = ClassDef(name, Block([]), None, []) class_def.fullname = name if typevars: v = [] # type: List[TypeVarDef] id = 1 for n in typevars: v.append(TypeVarDef(n, id, None)) id += 1 class_def.type_vars = v info = TypeInfo(SymbolTable(), class_def) if mro is None: mro = [] info.mro = [info] + mro if bases is None: if mro: # By default, assume that there is a single non-generic base. bases = [Instance(mro[0], [])] else: bases = [] info.bases = bases return info
def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency(make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency(make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__'))
def visit_type_info(self, info: TypeInfo) -> None: save_info = self.current_info try: self.current_info = info if info.defn: info.defn.accept(self) if info.names: self.visit_symbol_table(info.names) if info.bases: for base in info.bases: base.accept(self.type_fixer) if info._promote: info._promote.accept(self.type_fixer) if info.tuple_type: info.tuple_type.accept(self.type_fixer) if info.typeddict_type: info.typeddict_type.accept(self.type_fixer) if info.declared_metaclass: info.declared_metaclass.accept(self.type_fixer) if info.metaclass_type: info.metaclass_type.accept(self.type_fixer) if info._mro_refs: info.mro = [lookup_qualified_typeinfo(self.modules, name, self.quick_and_dirty) for name in info._mro_refs] info._mro_refs = None finally: self.current_info = save_info
def record_protocol_subtype_check(left_type: TypeInfo, right_type: TypeInfo) -> None: assert right_type.is_protocol TypeState._rechecked_types.add(left_type) TypeState._attempted_protocols.setdefault( left_type.fullname(), set()).add(right_type.fullname()) TypeState._checked_against_members.setdefault( left_type.fullname(), set()).update(right_type.protocol_members)
def base_class_definitions_incompatible( self, name: str, base1: TypeInfo, base2: TypeInfo, context: Context ) -> None: self.fail( 'Definition of "{}" in base class "{}" is incompatible ' 'with definition in base class "{}"'.format(name, base1.name(), base2.name()), context, )
def anal_type_def(self, d): self.check_no_global(d.name, d) d.full_name = self.qualified_name(d.name) info = TypeInfo({}, {}, d) info.set_line(d.line) self.types[d.full_name] = info d.info = info self.globals[d.name] = SymbolTableNode(GDEF, info, self.cur_mod_id)
def stale_info() -> TypeInfo: suggestion = "<stale cache: consider running mypy without --quick>" dummy_def = ClassDef(suggestion, Block([])) dummy_def.fullname = suggestion info = TypeInfo(SymbolTable(), dummy_def, "<stale>") info.mro = [info] info.bases = [] return info
def calculate_mro(info: TypeInfo, obj_type: Optional[Callable[[], Instance]] = None) -> None: """Calculate and set mro (method resolution order). Raise MroError if cannot determine mro. """ mro = linearize_hierarchy(info, obj_type) assert mro, "Could not produce a MRO at all for %s" % (info,) info.mro = mro # The property of falling back to Any is inherited. info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in info.mro) TypeState.reset_all_subtype_caches_for(info)
def missing_info(modules: Dict[str, MypyFile]) -> TypeInfo: suggestion = "<missing info: *should* have gone away during fine-grained update>" dummy_def = ClassDef(suggestion, Block([])) dummy_def.fullname = suggestion info = TypeInfo(SymbolTable(), dummy_def, "<missing>") obj_type = lookup_qualified(modules, 'builtins.object', False) assert isinstance(obj_type, TypeInfo) info.bases = [Instance(obj_type, [])] info.mro = [info, obj_type] return info
def check_type_var_values(self, type: TypeInfo, actuals: List[Type], valids: List[Type], arg_number: int, context: Context) -> None: for actual in actuals: if (not isinstance(actual, AnyType) and not any(is_same_type(actual, value) for value in valids)): if len(actuals) > 1 or not isinstance(actual, Instance): self.fail('Invalid type argument value for "{}"'.format( type.name()), context) else: self.fail('Type argument {} of "{}" has incompatible value "{}"'.format( arg_number, type.name(), actual.type.name()), context)
def stale_info(modules: Dict[str, MypyFile]) -> TypeInfo: suggestion = "<stale cache: consider running mypy without --quick>" dummy_def = ClassDef(suggestion, Block([])) dummy_def.fullname = suggestion info = TypeInfo(SymbolTable(), dummy_def, "<stale>") obj_type = lookup_qualified(modules, 'builtins.object', False) assert isinstance(obj_type, TypeInfo) info.bases = [Instance(obj_type, [])] info.mro = [info, obj_type] return info
def add_info_hook(ctx) -> None: class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) info.metadata['magic'] = True
def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: Errors) -> None: """Calculate abstract status of a class. Set is_abstract of the type to True if the type has an unimplemented abstract attribute. Also compute a list of abstract attributes. Report error is required ABCMeta metaclass is missing. """ concrete = set() # type: Set[str] abstract = [] # type: List[str] abstract_in_this_class = [] # type: List[str] for base in typ.mro: for name, symnode in base.names.items(): node = symnode.node if isinstance(node, OverloadedFuncDef): # Unwrap an overloaded function definition. We can just # check arbitrarily the first overload item. If the # different items have a different abstract status, there # should be an error reported elsewhere. func = node.items[0] # type: Optional[Node] else: func = node if isinstance(func, Decorator): fdef = func.func if fdef.is_abstract and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) elif isinstance(node, Var): if node.is_abstract_var and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) concrete.add(name) # In stubs, abstract classes need to be explicitly marked because it is too # easy to accidentally leave a concrete class abstract by forgetting to # implement some methods. typ.abstract_attributes = sorted(abstract) if is_stub_file: if typ.declared_metaclass and typ.declared_metaclass.type.fullname() == 'abc.ABCMeta': return if typ.is_protocol: return if abstract and not abstract_in_this_class: def report(message: str, severity: str) -> None: errors.report(typ.line, typ.column, message, severity=severity) attrs = ", ".join('"{}"'.format(attr) for attr in sorted(abstract)) report("Class {} has abstract attributes {}".format(typ.fullname(), attrs), 'error') report("If it is meant to be abstract, add 'abc.ABCMeta' as an explicit metaclass", 'note')
def visit_class_def(self, cdef: ClassDef) -> None: kind = self.kind_by_scope() if kind == LDEF: return elif kind == GDEF: self.sem.check_no_global(cdef.name, cdef) cdef.fullname = self.sem.qualified_name(cdef.name) info = TypeInfo(SymbolTable(), cdef, self.sem.cur_mod_id) info.set_line(cdef.line, cdef.column) cdef.info = info if kind == GDEF: self.sem.globals[cdef.name] = SymbolTableNode(kind, info) self.process_nested_classes(cdef)
def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) -> Type: """Return the type of a type object. For a generic type G with type variables T and S the type is generally of form Callable[..., G[T, S]] where ... are argument types for the __init__/__new__ method (without the self argument). Also, the fallback type will be 'type' instead of 'function'. """ # We take the type from whichever of __init__ and __new__ is first # in the MRO, preferring __init__ if there is a tie. init_method = info.get_method('__init__') new_method = info.get_method('__new__') if not init_method: # Must be an invalid class definition. return AnyType(TypeOfAny.from_error) # There *should* always be a __new__ method except the test stubs # lack it, so just copy init_method in that situation new_method = new_method or init_method init_index = info.mro.index(init_method.info) new_index = info.mro.index(new_method.info) fallback = info.metaclass_type or builtin_type('builtins.type') if init_index < new_index: method = init_method elif init_index > new_index: method = new_method else: if init_method.info.fullname() == 'builtins.object': # Both are defined by object. But if we've got a bogus # base class, we can't know for sure, so check for that. if info.fallback_to_any: # Construct a universal callable as the prototype. any_type = AnyType(TypeOfAny.special_form) sig = CallableType(arg_types=[any_type, any_type], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=["_args", "_kwds"], ret_type=any_type, fallback=builtin_type('builtins.function')) return class_callable(sig, info, fallback, None) # Otherwise prefer __init__ in a tie. It isn't clear that this # is the right thing, but __new__ caused problems with # typeshed (#5647). method = init_method # Construct callable type based on signature of __init__. Adjust # return type and insert type arguments. return type_object_type_from_function(method, info, fallback)
def check_type_var_values(self, type: TypeInfo, actuals: List[Type], arg_name: str, valids: List[Type], arg_number: int, context: Context) -> None: for actual in actuals: if (not isinstance(actual, AnyType) and not any(is_same_type(actual, value) for value in valids)): if len(actuals) > 1 or not isinstance(actual, Instance): self.fail('Invalid type argument value for "{}"'.format( type.name()), context) else: class_name = '"{}"'.format(type.name()) actual_type_name = '"{}"'.format(actual.type.name()) self.fail(messages.INCOMPATIBLE_TYPEVAR_VALUE.format( arg_name, class_name, actual_type_name), context)
def check_type_var_values(self, type: TypeInfo, actuals: List[Type], valids: List[Type], context: Context) -> None: for actual in actuals: if (not isinstance(actual, AnyType) and not any(is_same_type(actual, value) for value in valids)): self.fail('Invalid type argument value for "{}"'.format( type.name()), context)
def process_type_info(self, info: TypeInfo) -> None: # TODO additional things like the MRO replace_nodes_in_symbol_table(info.names, self.replacements) for i, item in enumerate(info.mro): info.mro[i] = self.fixup(info.mro[i]) for i, base in enumerate(info.bases): self.fixup_type(info.bases[i])
def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) -> Type: """Return the type of a type object. For a generic type G with type variables T and S the type is of form def [T, S](...) -> G[T, S], where ... are argument types for the __init__ method (without the self argument). """ init_method = info.get_method('__init__') if not init_method: # Must be an invalid class definition. return AnyType() else: # Construct callable type based on signature of __init__. Adjust # return type and insert type arguments. init_type = method_type_with_fallback(init_method, builtin_type('builtins.function')) if isinstance(init_type, CallableType): return class_callable(init_type, info, builtin_type('builtins.type')) else: # Overloaded __init__. items = [] # type: List[CallableType] for it in cast(Overloaded, init_type).items(): items.append(class_callable(it, info, builtin_type('builtins.type'))) return Overloaded(items)
def nearest_builtin_ancestor(type: TypeInfo) -> TypeInfo: for base in type.mro: if base.defn.is_builtinclass: return base else: return None assert False, 'No built-in ancestor found for {}'.format(type.name())
def __init__(self, type: TypeInfo, base: 'ClassRepresentation') -> None: self.cname = 'MR_%s' % type.name() self.fullname = type.fullname() self.slotmap = {} self.vtable_index = {} self.defining_class = {} self.vtable_methods = [] if base: self.inherit_from_base(base) for m in sorted(type.names): if isinstance(type.names[m].node, FuncBase): self.add_method(m, type) else: self.slotmap[m] = len(self.slotmap) self.add_method('_' + m, type) # Getter TODO refactor self.add_method('set_' + m, type) # Setter # TODO refactor
def save_namedtuple_body(self, named_tuple_info: TypeInfo) -> Iterator[None]: """Preserve the generated body of class-based named tuple and then restore it. Temporarily clear the names dict so we don't get errors about duplicate names that were already set in build_namedtuple_typeinfo (we already added the tuple field names while generating the TypeInfo, and actual duplicates are already reported). """ nt_names = named_tuple_info.names named_tuple_info.names = SymbolTable() yield # Make sure we didn't use illegal names, then reset the names in the typeinfo. for prohibited in NAMEDTUPLE_PROHIBITED_NAMES: if prohibited in named_tuple_info.names: if nt_names.get(prohibited) is named_tuple_info.names[prohibited]: continue ctx = named_tuple_info.names[prohibited].node assert ctx is not None self.fail('Cannot overwrite NamedTuple attribute "{}"'.format(prohibited), ctx) # Restore the names in the original symbol table. This ensures that the symbol # table contains the field objects created by build_namedtuple_typeinfo. Exclude # __doc__, which can legally be overwritten by the class. named_tuple_info.names.update({ key: value for key, value in nt_names.items() if key not in named_tuple_info.names or key != '__doc__' })
def add_protocol_members(typ: TypeInfo) -> None: members = set() # type: Set[str] if typ.mro: for base in typ.mro[:-1]: # we skip "object" since everyone implements it if base.is_protocol: for name in base.names: members.add(name) typ.protocol_members = sorted(list(members))
def type_suffix(self, fdef: FuncDef, info: TypeInfo = None) -> str: """Return the suffix for a mangled name. This includes an optional type suffix for a function or method. """ if not info: info = fdef.info # If info is None, we have a global function => no suffix. Also if the # method is not an override, we need no suffix. if not info or (not info.bases or not info.bases[0].type.has_method(fdef.name())): return "" elif is_simple_override(fdef, info): return self.type_suffix(fdef, info.bases[0].type) elif self.is_pretty: return "`" + info.name() else: return "__" + info.name()
def lookup_member_var_or_accessor(info: TypeInfo, name: str, is_lvalue: bool) -> SymbolNode: """Find the attribute/accessor node that refers to a member of a type.""" # TODO handle lvalues node = info.get(name) if node: return node.node else: return None
def linearize_hierarchy(info: TypeInfo, obj_type: Optional[Callable[[], Instance]] = None) -> List[TypeInfo]: # TODO describe if info.mro: return info.mro bases = info.direct_base_classes() if (not bases and info.fullname() != 'builtins.object' and obj_type is not None): # Second pass in import cycle, add a dummy `object` base class, # otherwise MRO calculation may spuriously fail. # MRO will be re-calculated for real in the third pass. bases = [obj_type().type] lin_bases = [] for base in bases: assert base is not None, "Cannot linearize bases for %s %s" % (info.fullname(), bases) lin_bases.append(linearize_hierarchy(base, obj_type)) lin_bases.append(bases) return [info] + merge(lin_bases)
def enter_class_scope(self, info: TypeInfo) -> str: """Enter a class target scope.""" # Duplicate the previous top non-class target (it can't be a class but since the # depths of all stacks must agree we need something). self.target_stack.append(self.target_stack[-1]) full_target = '%s.%s' % (self.full_target_stack[-1], info.name()) self.full_target_stack.append(full_target) self.scope_stack.append(info) return full_target
def make_type_info(self, name: str, module_name: Optional[str] = None, is_abstract: bool = False, mro: Optional[List[TypeInfo]] = None, bases: Optional[List[Instance]] = None, typevars: Optional[List[str]] = None, variances: Optional[List[int]] = None) -> TypeInfo: """Make a TypeInfo suitable for use in unit tests.""" class_def = ClassDef(name, Block([]), None, []) class_def.fullname = name if module_name is None: if '.' in name: module_name = name.rsplit('.', 1)[0] else: module_name = '__main__' if typevars: v = [] # type: List[TypeVarDef] for id, n in enumerate(typevars, 1): if variances: variance = variances[id - 1] else: variance = COVARIANT v.append(TypeVarDef(n, n, id, [], self.o, variance=variance)) class_def.type_vars = v info = TypeInfo(SymbolTable(), class_def, module_name) if mro is None: mro = [] if name != 'builtins.object': mro.append(self.oi) info.mro = [info] + mro if bases is None: if mro: # By default, assume that there is a single non-generic base. bases = [Instance(mro[0], [])] else: bases = [] info.bases = bases return info
def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance, special_sig: Optional[str]) -> CallableType: """Create a type object type based on the signature of __init__.""" variables = [] # type: List[TypeVarDef] variables.extend(info.defn.type_vars) variables.extend(init_type.variables) callable_type = init_type.copy_modified(ret_type=fill_typevars(info), fallback=type_type, name=None, variables=variables, special_sig=special_sig) c = callable_type.with_name(info.name()) return c
def _add_member_to_class(self, member_name: str, member_type: types.Type, clazz: nodes.TypeInfo) -> None: """Add a new member to the class. Add a variable with given name and type to the symbol table of a class. This also takes care about setting necessary attributes on the variable node. """ var = nodes.Var(member_name) var.info = clazz var._fullname = clazz.fullname + "." + member_name var.type = member_type clazz.names[member_name] = nodes.SymbolTableNode(nodes.MDEF, var) self.log("Defined o.vo field: %s.%s as %s" % (clazz.fullname, member_name, member_type))
def class_callable(init_type: Callable, info: TypeInfo, type_type: Instance) -> Callable: """Create a type object type based on the signature of __init__.""" variables = [] # type: List[TypeVarDef] for i, tvar in enumerate(info.defn.type_vars): variables.append( TypeVarDef(tvar.name, i + 1, tvar.values, tvar.upper_bound)) initvars = init_type.variables variables.extend(initvars) c = Callable(init_type.arg_types, init_type.arg_kinds, init_type.arg_names, self_type(info), type_type, None, variables).with_name('"{}"'.format(info.name())) return convert_class_tvars_to_func_tvars(c, len(initvars))
def generate_class(self, cls: TypeInfo) -> 'ClassRepresentation': if cls.bases: baserep = self.get_class_representation(cls.bases[0].type) else: baserep = None rep = ClassRepresentation(cls, baserep) self.classes[cls] = rep # Emit vtable. vtable = 'MVT_%s' % cls.name() self.emit_types('MFunction %s[] = {' % vtable) for m in rep.vtable_methods: defining_class = rep.defining_class[m] self.emit_types(' M%s_%s,' % (defining_class, m)) self.emit_types('}; /* %s */' % vtable) # Emit type runtime info. self.emit_types('MTypeRepr %s = {' % rep.cname) self.emit_types(' %s,' % vtable) self.emit_types(' 0,') self.emit_types(' "%s"' % cls.fullname()) self.emit_types('};\n') return rep
def build_class_with_annotated_fields(api: TypeChecker, base: Type, fields: 'OrderedDict[str, Type]', name: str) -> Instance: """Build an Instance with `name` that contains the specified `fields` as attributes and extends `base`.""" # Credit: This code is largely copied/modified from TypeChecker.intersect_instance_callable and # NamedTupleAnalyzer.build_namedtuple_typeinfo cur_module = cast(MypyFile, api.scope.stack[0]) gen_name = gen_unique_name(name, cur_module.names) cdef = ClassDef(name, Block([])) cdef.fullname = cur_module.fullname() + '.' + gen_name info = TypeInfo(SymbolTable(), cdef, cur_module.fullname()) cdef.info = info info.bases = [base] def add_field(var: Var, is_initialized_in_class: bool = False, is_property: bool = False) -> None: var.info = info var.is_initialized_in_class = is_initialized_in_class var.is_property = is_property var._fullname = '%s.%s' % (info.fullname(), var.name()) info.names[var.name()] = SymbolTableNode(MDEF, var) vars = [Var(item, typ) for item, typ in fields.items()] for var in vars: add_field(var, is_property=True) calculate_mro(info) info.calculate_metaclass_type() cur_module.names[gen_name] = SymbolTableNode(GDEF, info, plugin_generated=True) return Instance(info, [])
def get_private_descriptor_type(type_info: TypeInfo, private_field_name: str, is_nullable: bool) -> MypyType: """ Return declared type of type_info's private_field_name (used for private Field attributes)""" sym = type_info.get(private_field_name) if sym is None: return AnyType(TypeOfAny.explicit) node = sym.node if isinstance(node, Var): descriptor_type = node.type if descriptor_type is None: return AnyType(TypeOfAny.explicit) if is_nullable: descriptor_type = make_optional(descriptor_type) return descriptor_type return AnyType(TypeOfAny.explicit)
def process_nested_classes(self, outer_def: ClassDef) -> None: self.sem.enter_class(outer_def.info) for node in outer_def.defs.body: if isinstance(node, ClassDef): node.info = TypeInfo(SymbolTable(), node, self.sem.cur_mod_id) if outer_def.fullname: node.info._fullname = outer_def.fullname + '.' + node.info.name() else: node.info._fullname = node.info.name() node.fullname = node.info._fullname symbol = SymbolTableNode(MDEF, node.info) outer_def.info.names[node.name] = symbol self.process_nested_classes(node) elif isinstance(node, (ImportFrom, Import, ImportAll, IfStmt)): node.accept(self) self.sem.leave_class()
def add_new_sym_for_info(info: TypeInfo, *, name: str, sym_type: MypyType, no_serialize: bool = False) -> None: # type=: type of the variable itself var = Var(name=name, type=sym_type) # var.info: type of the object variable is bound to var.info = info var._fullname = info.fullname + "." + name var.is_initialized_in_class = True var.is_inferred = True info.names[name] = SymbolTableNode(MDEF, var, plugin_generated=True, no_serialize=no_serialize)
def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo, node: Context, is_lvalue: bool, is_super: bool, builtin_type: Callable[[str], Instance], msg: MessageBuilder, report_type: Type = None) -> Type: """Analyse attribute access that does not target a method. This is logically part of analyze_member_access and the arguments are similar. """ # It was not a method. Try looking up a variable. v = lookup_member_var_or_accessor(info, name, is_lvalue) vv = v if isinstance(vv, Decorator): # The associated Var node of a decorator contains the type. v = vv.var if isinstance(v, Var): return analyze_var(name, v, itype, info, node, is_lvalue, msg) elif isinstance(v, FuncDef): assert False, "Did not expect a function" elif not v and name not in ['__getattr__', '__setattr__']: if not is_lvalue: method = info.get_method('__getattr__') if method: typ = map_instance_to_supertype(itype, method.info) getattr_type = expand_type_by_instance( method_type_with_fallback( method, builtin_type('builtins.function')), typ) if isinstance(getattr_type, CallableType): return getattr_type.ret_type if itype.type.fallback_to_any: return AnyType() # Could not find the member. if is_super: msg.undefined_in_superclass(name, node) return AnyType() else: return msg.has_no_attr(report_type or itype, name, node)
def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance) -> CallableType: """Create a type object type based on the signature of __init__.""" variables = [] # type: List[TypeVarDef] for i, tvar in enumerate(info.defn.type_vars): variables.append( TypeVarDef(tvar.name, i + 1, tvar.values, tvar.upper_bound, tvar.variance)) initvars = init_type.variables variables.extend(initvars) callable_type = init_type.copy_modified(ret_type=self_type(info), fallback=type_type, name=None, variables=variables) c = callable_type.with_name('"{}"'.format(info.name())) return convert_class_tvars_to_func_tvars(c, len(initvars))
def linearize_hierarchy(info: TypeInfo, obj_type: Optional[Callable[[], Instance]] = None) -> List[TypeInfo]: # TODO describe if info.mro: return info.mro bases = info.direct_base_classes() if (not bases and info.fullname != 'builtins.object' and obj_type is not None): # Second pass in import cycle, add a dummy `object` base class, # otherwise MRO calculation may spuriously fail. # MRO will be re-calculated for real in the third pass. bases = [obj_type().type] lin_bases = [] for base in bases: assert base is not None, "Cannot linearize bases for %s %s" % (info.fullname, bases) lin_bases.append(linearize_hierarchy(base, obj_type)) lin_bases.append(bases) return [info] + merge(lin_bases)
def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): # Recheck Liskov if needed, self definitions are checked in the defining method if node.node.is_initialized_in_class and has_user_bases(info): self.add_dependency( make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency( make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency( make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__')) # If the set of abstract attributes change, this may invalidate class # instantiation, or change the generated error message, since Python checks # class abstract status when creating an instance. # # TODO: We should probably add this dependency only from the __init__ of the # current class, and independent of bases (to trigger changes in message # wording, as errors may enumerate all abstract attributes). self.add_dependency( make_trigger(base_info.fullname() + '.(abstract)'), target=make_trigger(info.fullname() + '.__init__')) # If the base class abstract attributes change, subclass abstract # attributes need to be recalculated. self.add_dependency( make_trigger(base_info.fullname() + '.(abstract)'))
def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance, special_sig: Optional[str], is_new: bool) -> CallableType: """Create a type object type based on the signature of __init__.""" variables = [] # type: List[TypeVarDef] variables.extend(info.defn.type_vars) variables.extend(init_type.variables) init_ret_type = get_proper_type(init_type.ret_type) if is_new and isinstance(init_ret_type, (Instance, TupleType)): ret_type = init_type.ret_type # type: Type else: ret_type = fill_typevars(info) callable_type = init_type.copy_modified(ret_type=ret_type, fallback=type_type, name=None, variables=variables, special_sig=special_sig) c = callable_type.with_name(info.name()) return c
def _extract_python_type_from_typeengine(api: SemanticAnalyzerPluginInterface, node: TypeInfo, type_args) -> Instance: if node.fullname == "sqlalchemy.sql.sqltypes.Enum" and type_args: first_arg = type_args[0] if isinstance(first_arg, NameExpr) and isinstance( first_arg.node, TypeInfo): for base_ in first_arg.node.mro: if base_.fullname == "enum.Enum": return Instance(first_arg.node, []) # TODO: support other pep-435 types here else: n = api.lookup_fully_qualified("builtins.str") return Instance(n.node, []) assert node.has_base("sqlalchemy.sql.type_api.TypeEngine"), ( "could not extract Python type from node: %s" % node) type_engine = map_instance_to_supertype( Instance(node, []), api.modules["sqlalchemy.sql.type_api"].names["TypeEngine"].node, ) return type_engine.args[-1]
def add_slots(self, info: TypeInfo, attributes: List[DataclassAttribute], *, correct_version: bool) -> None: if not correct_version: # This means that version is lower than `3.10`, # it is just a non-existent argument for `dataclass` function. self._ctx.api.fail( 'Keyword argument "slots" for "dataclass" ' 'is only valid in Python 3.10 and higher', self._ctx.reason, ) return if info.slots is not None or info.names.get('__slots__'): # This means we have a slots conflict. # Class explicitly specifies `__slots__` field. # And `@dataclass(slots=True)` is used. # In runtime this raises a type error. self._ctx.api.fail( '"{}" both defines "__slots__" and is used with "slots=True"'. format(self._ctx.cls.name, ), self._ctx.cls, ) return info.slots = {attr.name for attr in attributes}
def _dynamic_class_hook(ctx: DynamicClassDefContext) -> None: """Generate a declarative Base class when the declarative_base() function is encountered.""" cls = ClassDef(ctx.name, Block([])) cls.fullname = ctx.api.qualified_name(ctx.name) declarative_meta_sym: SymbolTableNode = ctx.api.modules[ "sqlalchemy.orm.decl_api"].names["DeclarativeMeta"] declarative_meta_typeinfo: TypeInfo = declarative_meta_sym.node declarative_meta_name: NameExpr = NameExpr("DeclarativeMeta") declarative_meta_name.kind = GDEF declarative_meta_name.fullname = "sqlalchemy.orm.decl_api.DeclarativeMeta" declarative_meta_name.node = declarative_meta_typeinfo cls.metaclass = declarative_meta_name declarative_meta_instance = Instance(declarative_meta_typeinfo, []) info = TypeInfo(SymbolTable(), cls, ctx.api.cur_mod_id) info.declared_metaclass = info.metaclass_type = declarative_meta_instance cls.info = info cls_arg = util._get_callexpr_kwarg(ctx.call, "cls") if cls_arg is not None: decl_class._scan_declarative_assignments_and_apply_types( cls_arg.node.defn, ctx.api, is_mixin_scan=True) info.bases = [Instance(cls_arg.node, [])] else: obj = ctx.api.builtin_type("builtins.object") info.bases = [obj] try: calculate_mro(info) except MroError: util.fail(ctx.api, "Not able to calculate MRO for declarative base", ctx.call) info.bases = [obj] info.fallback_to_any = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def create_dynamic_class( ctx: DynamicClassDefContext, bases: List[Instance], *, name: Optional[str] = None, metaclass: Optional[str] = None, symbol_table: Optional[SymbolTable] = None, ) -> TypeInfo: if name is None: name = ctx.name class_def = ClassDef(name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) if metaclass is not None: metaclass_type_info = lookup_type_info(ctx.api, metaclass) if metaclass_type_info is not None: info.declared_metaclass = Instance(metaclass_type_info, []) class_def.info = info obj = ctx.api.builtin_type("builtins.object") info.bases = bases or [obj] try: calculate_mro(info) except MroError: ctx.api.fail("Not able to calculate MRO for dynamic class", ctx.call) info.bases = [obj] info.fallback_to_any = True if symbol_table is None: ctx.api.add_symbol_table_node(name, SymbolTableNode(GDEF, info)) else: symbol_table[name] = SymbolTableNode(GDEF, info) add_metadata_var(ctx.api, info) add_query_cls_var(ctx.api, info) return info
def is_model_subclass_info(info: TypeInfo, django_context: "DjangoContext") -> bool: return info.fullname in django_context.all_registered_model_class_fullnames or info.has_base( fullnames.MODEL_CLASS_FULLNAME)
def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) if info.is_protocol: for base_info in info.mro[:-1]: # We add dependencies from whole MRO to cover explicit subprotocols. # For example: # # class Super(Protocol): # x: int # class Sub(Super, Protocol): # y: int # # In this example we add <Super[wildcard]> -> <Sub>, to invalidate Sub if # a new member is added to Super. self.add_dependency(make_wildcard_trigger( base_info.fullname()), target=make_trigger(target)) # More protocol dependencies are collected in TypeState._snapshot_protocol_deps # after a full run or update is finished. self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): # Recheck Liskov if needed, self definitions are checked in the defining method if node.node.is_initialized_in_class and has_user_bases(info): self.add_dependency( make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): if self.options and self.options.logical_deps: # Skip logical dependency if an attribute is not overridden. For example, # in case of: # class Base: # x = 1 # y = 2 # class Sub(Base): # x = 3 # we skip <Base.y> -> <Child.y>, because even if `y` is unannotated it # doesn't affect precision of Liskov checking. if name not in info.names: continue self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency( make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency( make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__')) # If the set of abstract attributes change, this may invalidate class # instantiation, or change the generated error message, since Python checks # class abstract status when creating an instance. # # TODO: We should probably add this dependency only from the __init__ of the # current class, and independent of bases (to trigger changes in message # wording, as errors may enumerate all abstract attributes). self.add_dependency( make_trigger(base_info.fullname() + '.(abstract)'), target=make_trigger(info.fullname() + '.__init__')) # If the base class abstract attributes change, subclass abstract # attributes need to be recalculated. self.add_dependency( make_trigger(base_info.fullname() + '.(abstract)'))
def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo, mx: MemberContext) -> Type: """Analyse attribute access that does not target a method. This is logically part of analyze_member_access and the arguments are similar. original_type is the type of E in the expression E.var """ # It was not a method. Try looking up a variable. v = lookup_member_var_or_accessor(info, name, mx.is_lvalue) vv = v if isinstance(vv, Decorator): # The associated Var node of a decorator contains the type. v = vv.var if isinstance(vv, TypeInfo): # If the associated variable is a TypeInfo synthesize a Var node for # the purposes of type checking. This enables us to type check things # like accessing class attributes on an inner class. v = Var(name, type=type_object_type(vv, mx.builtin_type)) v.info = info if isinstance(vv, TypeAlias) and isinstance(vv.target, Instance): # Similar to the above TypeInfo case, we allow using # qualified type aliases in runtime context if it refers to an # instance type. For example: # class C: # A = List[int] # x = C.A() <- this is OK typ = instance_alias_type(vv, mx.builtin_type) v = Var(name, type=typ) v.info = info if isinstance(v, Var): implicit = info[name].implicit # An assignment to final attribute is always an error, # independently of types. if mx.is_lvalue and not mx.chk.get_final_context(): check_final_member(name, info, mx.msg, mx.context) return analyze_var(name, v, itype, info, mx, implicit=implicit) elif isinstance(v, FuncDef): assert False, "Did not expect a function" elif not v and name not in [ '__getattr__', '__setattr__', '__getattribute__' ]: if not mx.is_lvalue: for method_name in ('__getattribute__', '__getattr__'): method = info.get_method(method_name) # __getattribute__ is defined on builtins.object and returns Any, so without # the guard this search will always find object.__getattribute__ and conclude # that the attribute exists if method and method.info.fullname() != 'builtins.object': function = function_type( method, mx.builtin_type('builtins.function')) bound_method = bind_self(function, mx.original_type) typ = map_instance_to_supertype(itype, method.info) getattr_type = expand_type_by_instance(bound_method, typ) if isinstance(getattr_type, CallableType): result = getattr_type.ret_type # Call the attribute hook before returning. fullname = '{}.{}'.format(method.info.fullname(), name) hook = mx.chk.plugin.get_attribute_hook(fullname) if hook: result = hook( AttributeContext(mx.original_type, result, mx.context, mx.chk)) return result else: setattr_meth = info.get_method('__setattr__') if setattr_meth and setattr_meth.info.fullname( ) != 'builtins.object': setattr_func = function_type( setattr_meth, mx.builtin_type('builtins.function')) bound_type = bind_self(setattr_func, mx.original_type) typ = map_instance_to_supertype(itype, setattr_meth.info) setattr_type = expand_type_by_instance(bound_type, typ) if isinstance( setattr_type, CallableType) and len(setattr_type.arg_types) > 0: return setattr_type.arg_types[-1] if itype.type.fallback_to_any: return AnyType(TypeOfAny.special_form) # Could not find the member. if mx.is_super: mx.msg.undefined_in_superclass(name, mx.context) return AnyType(TypeOfAny.from_error) else: if mx.chk and mx.chk.should_suppress_optional_error([itype]): return AnyType(TypeOfAny.from_error) return mx.msg.has_no_attr(mx.original_type, itype, name, mx.context)
def has_any_of_bases(info: TypeInfo, bases: Iterable[str]) -> bool: for base_fullname in bases: if info.has_base(base_fullname): return True return False
def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) -> ProperType: """Return the type of a type object. For a generic type G with type variables T and S the type is generally of form Callable[..., G[T, S]] where ... are argument types for the __init__/__new__ method (without the self argument). Also, the fallback type will be 'type' instead of 'function'. """ # We take the type from whichever of __init__ and __new__ is first # in the MRO, preferring __init__ if there is a tie. init_method = info.get('__init__') new_method = info.get('__new__') if not init_method or not is_valid_constructor(init_method.node): # Must be an invalid class definition. return AnyType(TypeOfAny.from_error) # There *should* always be a __new__ method except the test stubs # lack it, so just copy init_method in that situation new_method = new_method or init_method if not is_valid_constructor(new_method.node): # Must be an invalid class definition. return AnyType(TypeOfAny.from_error) # The two is_valid_constructor() checks ensure this. assert isinstance(new_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator)) assert isinstance(init_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator)) init_index = info.mro.index(init_method.node.info) new_index = info.mro.index(new_method.node.info) fallback = info.metaclass_type or builtin_type('builtins.type') if init_index < new_index: method = init_method.node # type: Union[FuncBase, Decorator] is_new = False elif init_index > new_index: method = new_method.node is_new = True else: if init_method.node.info.fullname == 'builtins.object': # Both are defined by object. But if we've got a bogus # base class, we can't know for sure, so check for that. if info.fallback_to_any: # Construct a universal callable as the prototype. any_type = AnyType(TypeOfAny.special_form) sig = CallableType(arg_types=[any_type, any_type], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=["_args", "_kwds"], ret_type=any_type, fallback=builtin_type('builtins.function')) return class_callable(sig, info, fallback, None, is_new=False) # Otherwise prefer __init__ in a tie. It isn't clear that this # is the right thing, but __new__ caused problems with # typeshed (#5647). method = init_method.node is_new = False # Construct callable type based on signature of __init__. Adjust # return type and insert type arguments. if isinstance(method, FuncBase): t = function_type(method, fallback) else: assert isinstance(method.type, ProperType) assert isinstance(method.type, FunctionLike) # is_valid_constructor() ensures this t = method.type return type_object_type_from_function(t, info, method.info, fallback, is_new)
def read_only_property(self, name: str, type: TypeInfo, context: Context) -> None: self.fail( 'Property "{}" defined in "{}" is read-only'.format( name, type.name()), context)
def _define_method(context: Union[mypy.plugin.AnalyzeTypeContext, mypy.plugin.ClassDefContext], cls_info: nodes.TypeInfo, namespace: str, name: str, arguments: List[nodes.Argument], return_type: types.Type) -> None: ''' Helper function to define class level or instance level method. If an instance-level method will be created, the user of this method is responsible for specifying :code:`self` as the first argument. This is basically a ripoff of https://github.com/python/mypy/blob/master/mypy/plugins/common.py#L80 That implementation can't be directly used as it can create only an instance-level method (always adding :code:`self`). It is also not supported when defining new types in :code:`get_type_analyze_hook` hook (see available hooks: https://mypy.readthedocs.io/en/latest/extending_mypy.html#current-list-of-plugin-hooks) :param context: mypy plugin context used to interact with mypy API :param cls_info: :code:`TypeInfo` of class where this method should be bound :param namespace: used to build fullname of this method ''' function_type: types.Instance if isinstance(context, mypy.plugin.ClassDefContext): function_type = context.api.named_type('__builtins__.function') elif isinstance(context, mypy.plugin.AnalyzeTypeContext): function_type = context.api.named_type('builtins.function') else: raise ValueError('Not supported context type = {}.'.format( type(context))) arg_types: List[Optional[types.Type]] = [] arg_names: List[str] = [] # Kinds are kind of arguments (position, key word,..) see :code:`nodes.ARG_POS` for example. arg_kinds: List[int] = [] for arg in arguments: assert arg.type_annotation, 'All arguments must be fully typed.' arg_types.append(arg.type_annotation) arg_names.append(arg.variable.name()) arg_kinds.append(arg.kind) # Creating type of a callable, this is equialent to writing # Callable[[arg_types,...], return_type] # in mypy typing system, except you have to specify arugment position type, # argument names (as when you will write real function) # And then last argument is fallback type :code:`function_type` which I don't know why is needed? signature = types.CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) # Once we have our function type defined we also have to create an AST node. # This is needed so mypy knows that given function is bound to some class, module or something. # So when we call it it can find its type back. # The following line is equivalent to: # def <name>(<arguments>): pass # You can see it is without types - its just an AST node func = nodes.FuncDef(name, arguments, nodes.Block([nodes.PassStmt()])) # I don't know why but we have to add both links: # - From class to method (few lines later) # - And from method to class (maybe needed so it can be seen as a bound method?) func.info = cls_info # Specify method type (return type, argument types, ...) # it is taken from previous :code:`signature` callable type we defined and just named func.type = signature.with_name(name) # We have to define fullname - this is normally filled by mypy's AST parser # Fullname is required by mypy because this should be unique identifier for any object func._fullname = f'{namespace}.{name}' # pylint: disable=protected-access # This should not be required but mypy is then able to say where the error is happening func.line = cls_info.line # And at last we have to register our method on our class (defined as a :code:`cls_info` object). # Every class have :code:`names` attribute which is :code:`SymbolTable` instance and defines # all attributes, methods. # Entries in this table are :code:`SymbolTableNode` where you have to specify first argument kind: # LDEF: local definition # GDEF: global (module-level) definition # MDEF: class member definition # UNBOUND_IMPORTED: temporary kind for imported names # Then the AST node which defines a variable or a function definition. # But this will just register that name on a given class but not that node to the AST of the class. cls_info.names[name] = nodes.SymbolTableNode(nodes.MDEF, func, plugin_generated=True) # To register our method or attribute in the class' AST we have to use the following line. # Beware that mypy can work even without registering this # but won't be able to perform some checks (don't know which exactly). cls_info.defn.defs.body.append(func)
def has_any_of_bases(info: TypeInfo, bases: typing.Sequence[str]) -> bool: for base_fullname in bases: if info.has_base(base_fullname): return True return False
def base_class_definitions_incompatible(self, name: str, base1: TypeInfo, base2: TypeInfo, context: Context) -> None: self.fail('Definition of "{}" in base class "{}" is incompatible ' 'with definition in base class "{}"'.format( name, base1.name(), base2.name()), context)
def strip_type_info(self, info: TypeInfo) -> None: info.type_vars = [] info.bases = [] info.is_abstract = False info.abstract_attributes = [] info.mro = [] info.add_type_vars() info.tuple_type = None info.typeddict_type = None info.tuple_type = None TypeState.reset_subtype_caches_for(info) info.declared_metaclass = None info.metaclass_type = None
def disjointness_violation(self, cls: TypeInfo, disjoint: TypeInfo, context: Context) -> None: self.fail('disjointclass constraint of class {} disallows {} as a ' 'base class'.format(cls.name(), disjoint.name()), context)
def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo, node: Context, is_lvalue: bool, is_super: bool, builtin_type: Callable[[str], Instance], not_ready_callback: Callable[[str, Context], None], msg: MessageBuilder, original_type: Type, chk: 'mypy.checker.TypeChecker') -> Type: """Analyse attribute access that does not target a method. This is logically part of analyze_member_access and the arguments are similar. original_type is the type of E in the expression E.var """ # It was not a method. Try looking up a variable. v = lookup_member_var_or_accessor(info, name, is_lvalue) vv = v if isinstance(vv, Decorator): # The associated Var node of a decorator contains the type. v = vv.var if isinstance(vv, TypeInfo): # If the associated variable is a TypeInfo synthesize a Var node for # the purposes of type checking. This enables us to type check things # like accessing class attributes on an inner class. v = Var(name, type=type_object_type(vv, builtin_type)) v.info = info if isinstance(v, Var): return analyze_var(name, v, itype, info, node, is_lvalue, msg, original_type, not_ready_callback, chk=chk) elif isinstance(v, FuncDef): assert False, "Did not expect a function" elif not v and name not in [ '__getattr__', '__setattr__', '__getattribute__' ]: if not is_lvalue: for method_name in ('__getattribute__', '__getattr__'): method = info.get_method(method_name) # __getattribute__ is defined on builtins.object and returns Any, so without # the guard this search will always find object.__getattribute__ and conclude # that the attribute exists if method and method.info.fullname() != 'builtins.object': function = function_type(method, builtin_type('builtins.function')) bound_method = bind_self(function, original_type) typ = map_instance_to_supertype(itype, method.info) getattr_type = expand_type_by_instance(bound_method, typ) if isinstance(getattr_type, CallableType): return getattr_type.ret_type else: setattr_meth = info.get_method('__setattr__') if setattr_meth and setattr_meth.info.fullname( ) != 'builtins.object': setattr_func = function_type(setattr_meth, builtin_type('builtins.function')) bound_type = bind_self(setattr_func, original_type) typ = map_instance_to_supertype(itype, setattr_meth.info) setattr_type = expand_type_by_instance(bound_type, typ) if isinstance( setattr_type, CallableType) and len(setattr_type.arg_types) > 0: return setattr_type.arg_types[-1] if itype.type.fallback_to_any: return AnyType(TypeOfAny.special_form) # Could not find the member. if is_super: msg.undefined_in_superclass(name, node) return AnyType(TypeOfAny.from_error) else: if chk and chk.should_suppress_optional_error([itype]): return AnyType(TypeOfAny.from_error) return msg.has_no_attr(original_type, itype, name, node)
def dump_typeinfo(self, info: TypeInfo) -> List[str]: s = info.dump(str_conv=self.str_conv, type_str_conv=self.type_str_conv) return s.splitlines()