def make_fake_register_class_instance(api: CheckerPluginInterface, type_args: Sequence[Type] ) -> Instance: defn = ClassDef(REGISTER_RETURN_CLASS, Block([])) defn.fullname = f'functools.{REGISTER_RETURN_CLASS}' info = TypeInfo(SymbolTable(), defn, "functools") obj_type = api.named_generic_type('builtins.object', []).type info.bases = [Instance(obj_type, [])] info.mro = [info, obj_type] defn.info = info func_arg = Argument(Var('name'), AnyType(TypeOfAny.implementation_artifact), None, ARG_POS) add_method_to_class(api, defn, '__call__', [func_arg], NoneType()) return Instance(info, type_args)
def apply_interface( iface_arg: Expression, class_info: TypeInfo, api: SemanticAnalyzerPluginInterface, context: Context, ) -> None: if not isinstance(iface_arg, RefExpr): api.fail("Argument to implementer should be a ref expression", iface_arg) return iface_name = iface_arg.fullname if iface_name is None: # unknown interface, probably from stubless package return iface_type = iface_arg.node if iface_type is None: return if not isinstance(iface_type, TypeInfo): # Possibly an interface from unimported package, ignore return if not self._is_interface(iface_type): api.fail( f"zope.interface.implementer accepts interface, " f"not {iface_name}.", iface_arg, ) api.fail( f"Make sure you have stubs for all packages that " f"provide interfaces for {iface_name} class hierarchy.", iface_arg, ) return # print("CLASS INFO", class_info) md = self._get_metadata(class_info) if "implements" not in md: md["implements"] = [] # impl_list = cast(List[str], md['implements']) md["implements"].append(iface_type.fullname) self.log(f"Found implementation of " f"{iface_type.fullname}: {class_info.fullname}") # Make sure implementation is treates subtype of an interface. Pretend # there is a decorator for the class that will create a "type promotion" faketi = TypeInfo(SymbolTable(), iface_type.defn, iface_type.module_name) faketi._promote = Instance(iface_type, []) class_info.mro.append(faketi)
def process_nested_classes(self, outer_def: ClassDef) -> None: self.sem.enter_class(outer_def.info) for node in outer_def.defs.body: if isinstance(node, ClassDef): node.info = TypeInfo(SymbolTable(), node, self.sem.cur_mod_id) if outer_def.fullname: node.info._fullname = outer_def.fullname + '.' + node.info.name() else: node.info._fullname = node.info.name() node.fullname = node.info._fullname symbol = SymbolTableNode(MDEF, node.info) outer_def.info.names[node.name] = symbol self.process_nested_classes(node) elif isinstance(node, (ImportFrom, Import, ImportAll, IfStmt)): node.accept(self) self.sem.leave_class()
def decl_info_hook(ctx: DynamicClassDefContext) -> None: """Support dynamically defining declarative bases. For example: from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() """ cls_bases = [] # type: List[Instance] # Passing base classes as positional arguments is currently not handled. if 'cls' in ctx.call.arg_names: declarative_base_cls_arg = ctx.call.args[ctx.call.arg_names.index( "cls")] if isinstance(declarative_base_cls_arg, TupleExpr): items = [item for item in declarative_base_cls_arg.items] else: items = [declarative_base_cls_arg] for item in items: if isinstance(item, RefExpr) and isinstance(item.node, TypeInfo): base = fill_typevars_with_any(item.node) # TODO: Support tuple types? if isinstance(base, Instance): cls_bases.append(base) class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.bases = cls_bases or [obj] try: calculate_mro(info) except MroError: ctx.api.fail("Not able to calculate MRO for declarative base", ctx.call) info.bases = [obj] info.fallback_to_any = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) set_declarative(info) # TODO: check what else is added. add_metadata_var(ctx.api, info)
def _apply_interface(self, impl: TypeInfo, iface: TypeInfo) -> None: md = self._get_metadata(impl) if "implements" not in md: md["implements"] = [] md["implements"].append(iface.fullname) self.log(f"Found implementation of " f"{iface.fullname}: {impl.fullname}") # Make sure implementation is treated as a subtype of an interface. Pretend # there is a decorator for the class that will create a "type promotion", # but ensure this only gets applied a single time per interface. promote = Instance(iface, []) if not any(ti._promote == promote for ti in impl.mro): faketi = TypeInfo(SymbolTable(), iface.defn, iface.module_name) faketi._promote = promote impl.mro.append(faketi)
def make_type_info(self, name: str, module_name: Optional[str] = None, is_abstract: bool = False, mro: Optional[List[TypeInfo]] = None, bases: Optional[List[Instance]] = None, typevars: Optional[List[str]] = None, variances: Optional[List[int]] = None) -> TypeInfo: """Make a TypeInfo suitable for use in unit tests.""" class_def = ClassDef(name, Block([]), None, []) class_def.fullname = name if module_name is None: if '.' in name: module_name = name.rsplit('.', 1)[0] else: module_name = '__main__' if typevars: v: List[TypeVarLikeType] = [] for id, n in enumerate(typevars, 1): if variances: variance = variances[id - 1] else: variance = COVARIANT v.append(TypeVarType(n, n, id, [], self.o, variance=variance)) class_def.type_vars = v info = TypeInfo(SymbolTable(), class_def, module_name) if mro is None: mro = [] if name != 'builtins.object': mro.append(self.oi) info.mro = [info] + mro if bases is None: if mro: # By default, assume that there is a single non-generic base. bases = [Instance(mro[0], [])] else: bases = [] info.bases = bases return info
def _basic_new_typeinfo(self, ctx: AnalyzeTypeContext, name: str, basetype_or_fallback: Instance) -> TypeInfo: """ Build a basic :class:`.TypeInfo`. This was basically lifted from ``mypy.semanal``. """ class_def = ClassDef(name, Block([])) class_def.fullname = name info = TypeInfo(SymbolTable(), class_def, '') class_def.info = info mro = basetype_or_fallback.type.mro if not mro: mro = [basetype_or_fallback.type, named_builtin_type(ctx, 'object').type] info.mro = [info] + mro info.bases = [basetype_or_fallback] return info
def _dynamic_class_hook(ctx: DynamicClassDefContext) -> None: """Generate a declarative Base class when the declarative_base() function is encountered.""" cls = ClassDef(ctx.name, Block([])) cls.fullname = ctx.api.qualified_name(ctx.name) declarative_meta_sym: SymbolTableNode = ctx.api.modules[ "sqlalchemy.orm.decl_api"].names["DeclarativeMeta"] declarative_meta_typeinfo: TypeInfo = declarative_meta_sym.node declarative_meta_name: NameExpr = NameExpr("DeclarativeMeta") declarative_meta_name.kind = GDEF declarative_meta_name.fullname = "sqlalchemy.orm.decl_api.DeclarativeMeta" declarative_meta_name.node = declarative_meta_typeinfo cls.metaclass = declarative_meta_name declarative_meta_instance = Instance(declarative_meta_typeinfo, []) info = TypeInfo(SymbolTable(), cls, ctx.api.cur_mod_id) info.declared_metaclass = info.metaclass_type = declarative_meta_instance cls.info = info cls_arg = util._get_callexpr_kwarg(ctx.call, "cls") if cls_arg is not None: decl_class._scan_declarative_assignments_and_apply_types( cls_arg.node.defn, ctx.api, is_mixin_scan=True) info.bases = [Instance(cls_arg.node, [])] else: obj = ctx.api.builtin_type("builtins.object") info.bases = [obj] try: calculate_mro(info) except MroError: util.fail(ctx.api, "Not able to calculate MRO for declarative base", ctx.call) info.bases = [obj] info.fallback_to_any = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def create_dynamic_class( ctx: DynamicClassDefContext, bases: List[Instance], *, name: Optional[str] = None, metaclass: Optional[str] = None, symbol_table: Optional[SymbolTable] = None, ) -> TypeInfo: if name is None: name = ctx.name class_def = ClassDef(name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) if metaclass is not None: metaclass_type_info = lookup_type_info(ctx.api, metaclass) if metaclass_type_info is not None: info.declared_metaclass = Instance(metaclass_type_info, []) class_def.info = info obj = ctx.api.builtin_type("builtins.object") info.bases = bases or [obj] try: calculate_mro(info) except MroError: ctx.api.fail("Not able to calculate MRO for dynamic class", ctx.call) info.bases = [obj] info.fallback_to_any = True if symbol_table is None: ctx.api.add_symbol_table_node(name, SymbolTableNode(GDEF, info)) else: symbol_table[name] = SymbolTableNode(GDEF, info) add_metadata_var(ctx.api, info) add_query_cls_var(ctx.api, info) return info
def create_ortho_diff_class(base1: TypeInfo, base2: TypeInfo, api: SemanticAnalyzerPluginInterface, call_ctx: Context) -> Tuple[str, SymbolTableNode]: # https://github.com/dropbox/sqlalchemy-stubs/blob/55470ceab8149db983411d5c094c9fe16343c58b/sqlmypy.py#L173-L216 cls_name = get_ortho_diff_name(base1.defn, base2.defn) class_def = ClassDef(cls_name, Block([])) class_def.fullname = api.qualified_name(cls_name) info = TypeInfo(SymbolTable(), class_def, api.cur_mod_id) class_def.info = info obj = api.builtin_type('builtins.object') info.bases = [cast(Instance, fill_typevars(b)) for b in (base1, base2)] try: calculate_mro(info) except MroError: api.fail('Unable to calculate MRO for dynamic class', call_ctx) info.bases = [obj] info.fallback_to_any = True return cls_name, SymbolTableNode(GDEF, info)
def decl_info_hook(ctx): """Support dynamically defining declarative bases. For example: from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() """ class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) set_declarative(info) # TODO: check what else is added. add_metadata_var(ctx, info)
def dyn_class_hook(self, ctx: DynamicClassDefContext) -> TypedDictType: """Generate annotations from a JSON Schema.""" schema_path, = ctx.call.args schema_path = os.path.abspath(schema_path.value) schema = self._load_schema(schema_path) make_type = TypeMaker(schema_path, schema) td_type = make_type(ctx) class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) info.typeddict_type = td_type dict_type = named_builtin_type(ctx, 'dict') mro = dict_type.type.mro if not mro: mro = [dict_type.type, named_builtin_type(ctx, 'object').type] class_def.info = info info.mro = mro info.bases = [dict_type] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def analyze_formset_factory(ctx: DynamicClassDefContext) -> None: class_lookup = ( "reactivated.stubs.BaseModelFormSet" if ctx.call.callee.name == "modelformset_factory" # type: ignore[attr-defined] else "reactivated.stubs.BaseFormSet") form_set_class = ctx.api.lookup_fully_qualified_or_none(class_lookup, ) assert form_set_class is not None form_set_class_instance = Instance( form_set_class.node, [] # type: ignore[arg-type] ) cls_bases: List[Instance] = [form_set_class_instance] class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) if class_def.fullname in already_analyzed: # Fixes an issue with max iteration counts. # In theory add_symbol_table_node should already guard against this but # it doesn't. return info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type("builtins.object") info.bases = cls_bases or [obj] try: calculate_mro(info) except MroError: ctx.api.fail("Not able to calculate MRO for declarative base", ctx.call) info.bases = [obj] info.fallback_to_any = True already_analyzed[class_def.fullname] = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def make_type_info(self, name: str, is_abstract: bool = False, mro: List[TypeInfo] = None, bases: List[Instance] = None, typevars: List[str] = None, variances: List[int] = None) -> TypeInfo: """Make a TypeInfo suitable for use in unit tests.""" class_def = ClassDef(name, Block([]), None, []) class_def.fullname = name if typevars: v = [] # type: List[TypeVarDef] for id, n in enumerate(typevars, 1): if variances: variance = variances[id - 1] else: variance = COVARIANT v.append(TypeVarDef(n, id, None, self.o, variance=variance)) class_def.type_vars = v info = TypeInfo(SymbolTable(), class_def) if mro is None: mro = [] if name != 'builtins.object': mro.append(self.oi) info.mro = [info] + mro if bases is None: if mro: # By default, assume that there is a single non-generic base. bases = [Instance(mro[0], [])] else: bases = [] info.bases = bases return info
def build_class_with_annotated_fields(api: TypeChecker, base: Type, fields: 'OrderedDict[str, Type]', name: str) -> Instance: """Build an Instance with `name` that contains the specified `fields` as attributes and extends `base`.""" # Credit: This code is largely copied/modified from TypeChecker.intersect_instance_callable and # NamedTupleAnalyzer.build_namedtuple_typeinfo cur_module = cast(MypyFile, api.scope.stack[0]) gen_name = gen_unique_name(name, cur_module.names) cdef = ClassDef(name, Block([])) cdef.fullname = cur_module.fullname() + '.' + gen_name info = TypeInfo(SymbolTable(), cdef, cur_module.fullname()) cdef.info = info info.bases = [base] def add_field(var: Var, is_initialized_in_class: bool = False, is_property: bool = False) -> None: var.info = info var.is_initialized_in_class = is_initialized_in_class var.is_property = is_property var._fullname = '%s.%s' % (info.fullname(), var.name()) info.names[var.name()] = SymbolTableNode(MDEF, var) vars = [Var(item, typ) for item, typ in fields.items()] for var in vars: add_field(var, is_property=True) calculate_mro(info) info.calculate_metaclass_type() cur_module.names[gen_name] = SymbolTableNode(GDEF, info, plugin_generated=True) return Instance(info, [])
def visit_instance(self, t: Instance) -> Type: if 'pfun.Intersection' == t.type.fullname: args = [get_proper_type(arg) for arg in t.args] if any(isinstance(arg, AnyType) for arg in args): return AnyType(TypeOfAny.special_form) if all( hasattr(arg, 'type') and arg.type.fullname == 'builtins.object' for arg in args): return args[0] is_typevar = lambda arg: isinstance(arg, TypeVarType) has_type_attr = lambda arg: hasattr(arg, 'type') is_protocol = lambda arg: arg.type.is_protocol is_object = lambda arg: arg.type.fullname == 'builtins.object' if not all( is_typevar(arg) or has_type_attr(arg) and (is_protocol(arg) or is_object(arg)) for arg in args): s = str(t) if self.inferred: msg = (f'All arguments to "Intersection" ' f'must be protocols but inferred "{s}"') else: msg = (f'All arguments to "Intersection" ' f'must be protocols, but got "{s}"') self.api.msg.fail(msg, self.context) return AnyType(TypeOfAny.special_form) if not has_no_typevars(t): return t bases = [] for arg in args: if arg in bases: continue bases.extend(self.get_bases(arg, [])) if len(bases) == 1: return bases[0] bases_repr = ', '.join([repr(base) for base in bases]) name = f'Intersection[{bases_repr}]' defn = ClassDef(name, Block([]), [], [ NameExpr(arg.name) if isinstance(arg, TypeVarType) else NameExpr(arg.type.fullname) for arg in args ], None, []) defn.fullname = f'pfun.{name}' info = TypeInfo({}, defn, '') info.is_protocol = True info.is_abstract = True info.bases = bases attrs = [] for base in bases: if isinstance(base, TypeVarType): continue attrs.extend(base.type.abstract_attributes) info.abstract_attributes = attrs try: calculate_mro(info) except MroError: self.api.msg.fail( 'Cannot determine consistent method resolution ' 'order (MRO) for "%s"' % defn.fullname, self.context) return AnyType(TypeOfAny.special_form) return Instance(info, []) return super().visit_instance(t)