def analyze_namedtuple_classdef(self, defn: ClassDef) -> Tuple[bool, Optional[TypeInfo]]: """Analyze if given class definition can be a named tuple definition. Return a tuple where first item indicates whether this can possibly be a named tuple, and the second item is the corresponding TypeInfo (may be None if not ready and should be deferred). """ for base_expr in defn.base_type_exprs: if isinstance(base_expr, RefExpr): self.api.accept(base_expr) if base_expr.fullname == 'typing.NamedTuple': result = self.check_namedtuple_classdef(defn) if result is None: # This is a valid named tuple, but some types are incomplete. return True, None items, types, default_items = result info = self.build_namedtuple_typeinfo( defn.name, items, types, default_items, defn.line) defn.info = info defn.analyzed = NamedTupleExpr(info, is_typed=True) defn.analyzed.line = defn.line defn.analyzed.column = defn.column # All done: this is a valid named tuple with all types known. return True, info # This can't be a valid named tuple. return False, None
def add_new_class_for_module( module: MypyFile, name: str, bases: List[Instance], fields: Optional[Dict[str, MypyType]] = None, no_serialize: bool = False, ) -> TypeInfo: new_class_unique_name = checker.gen_unique_name(name, module.names) # make new class expression classdef = ClassDef(new_class_unique_name, Block([])) classdef.fullname = module.fullname + "." + new_class_unique_name # make new TypeInfo new_typeinfo = TypeInfo(SymbolTable(), classdef, module.fullname) new_typeinfo.bases = bases calculate_mro(new_typeinfo) new_typeinfo.calculate_metaclass_type() # add fields if fields: for field_name, field_type in fields.items(): var = Var(field_name, type=field_type) var.info = new_typeinfo var._fullname = new_typeinfo.fullname + "." + field_name new_typeinfo.names[field_name] = SymbolTableNode( MDEF, var, plugin_generated=True, no_serialize=no_serialize) classdef.info = new_typeinfo module.names[new_class_unique_name] = SymbolTableNode( GDEF, new_typeinfo, plugin_generated=True, no_serialize=no_serialize) return new_typeinfo
def _dynamic_class_hook(ctx: DynamicClassDefContext) -> None: """Generate a declarative Base class when the declarative_base() function is encountered.""" cls = ClassDef(ctx.name, Block([])) cls.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), cls, ctx.api.cur_mod_id) cls.info = info _make_declarative_meta(ctx.api, cls) cls_arg = util._get_callexpr_kwarg(ctx.call, "cls") if cls_arg is not None: decl_class._scan_declarative_assignments_and_apply_types( cls_arg.node.defn, ctx.api, is_mixin_scan=True) info.bases = [Instance(cls_arg.node, [])] else: obj = ctx.api.builtin_type("builtins.object") info.bases = [obj] try: calculate_mro(info) except MroError: util.fail(ctx.api, "Not able to calculate MRO for declarative base", ctx.call) obj = ctx.api.builtin_type("builtins.object") info.bases = [obj] info.fallback_to_any = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def build_class_with_annotated_fields(api: 'TypeChecker', base: Type, fields: 'OrderedDict[str, Type]', name: str) -> Instance: """Build an Instance with `name` that contains the specified `fields` as attributes and extends `base`.""" # Credit: This code is largely copied/modified from TypeChecker.intersect_instance_callable and # NamedTupleAnalyzer.build_namedtuple_typeinfo from mypy.checker import gen_unique_name cur_module = cast(MypyFile, api.scope.stack[0]) gen_name = gen_unique_name(name, cur_module.names) cdef = ClassDef(name, Block([])) cdef.fullname = cur_module.fullname() + '.' + gen_name info = TypeInfo(SymbolTable(), cdef, cur_module.fullname()) cdef.info = info info.bases = [base] def add_field(var: Var, is_initialized_in_class: bool = False, is_property: bool = False) -> None: var.info = info var.is_initialized_in_class = is_initialized_in_class var.is_property = is_property var._fullname = '%s.%s' % (info.fullname(), var.name()) info.names[var.name()] = SymbolTableNode(MDEF, var) vars = [Var(item, typ) for item, typ in fields.items()] for var in vars: add_field(var, is_property=True) calculate_mro(info) info.calculate_metaclass_type() cur_module.names[gen_name] = SymbolTableNode(GDEF, info, plugin_generated=True) return Instance(info, [])
def _dynamic_class_hook(ctx: DynamicClassDefContext) -> None: """Generate a declarative Base class when the declarative_base() function is encountered.""" _add_globals(ctx) cls = ClassDef(ctx.name, Block([])) cls.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), cls, ctx.api.cur_mod_id) cls.info = info _set_declarative_metaclass(ctx.api, cls) cls_arg = util.get_callexpr_kwarg(ctx.call, "cls", expr_types=(NameExpr, )) if cls_arg is not None and isinstance(cls_arg.node, TypeInfo): util.set_is_base(cls_arg.node) decl_class.scan_declarative_assignments_and_apply_types( cls_arg.node.defn, ctx.api, is_mixin_scan=True) info.bases = [Instance(cls_arg.node, [])] else: obj = ctx.api.named_type(names.NAMED_TYPE_BUILTINS_OBJECT) info.bases = [obj] try: calculate_mro(info) except MroError: util.fail(ctx.api, "Not able to calculate MRO for declarative base", ctx.call) obj = ctx.api.named_type(names.NAMED_TYPE_BUILTINS_OBJECT) info.bases = [obj] info.fallback_to_any = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) util.set_is_base(info)
def analyze_namedtuple_classdef(self, defn: ClassDef, is_stub_file: bool ) -> Tuple[bool, Optional[TypeInfo]]: """Analyze if given class definition can be a named tuple definition. Return a tuple where first item indicates whether this can possibly be a named tuple, and the second item is the corresponding TypeInfo (may be None if not ready and should be deferred). """ for base_expr in defn.base_type_exprs: if isinstance(base_expr, RefExpr): self.api.accept(base_expr) if base_expr.fullname == 'typing.NamedTuple': result = self.check_namedtuple_classdef(defn, is_stub_file) if result is None: # This is a valid named tuple, but some types are incomplete. return True, None items, types, default_items = result info = self.build_namedtuple_typeinfo( defn.name, items, types, default_items, defn.line) defn.info = info defn.analyzed = NamedTupleExpr(info, is_typed=True) defn.analyzed.line = defn.line defn.analyzed.column = defn.column # All done: this is a valid named tuple with all types known. return True, info # This can't be a valid named tuple. return False, None
def add_new_class_for_module(module: MypyFile, name: str, bases: List[Instance], fields: 'OrderedDict[str, MypyType]') -> TypeInfo: new_class_unique_name = checker.gen_unique_name(name, module.names) # make new class expression classdef = ClassDef(new_class_unique_name, Block([])) classdef.fullname = module.fullname() + '.' + new_class_unique_name # make new TypeInfo new_typeinfo = TypeInfo(SymbolTable(), classdef, module.fullname()) new_typeinfo.bases = bases calculate_mro(new_typeinfo) new_typeinfo.calculate_metaclass_type() def add_field_to_new_typeinfo(var: Var, is_initialized_in_class: bool = False, is_property: bool = False) -> None: var.info = new_typeinfo var.is_initialized_in_class = is_initialized_in_class var.is_property = is_property var._fullname = new_typeinfo.fullname() + '.' + var.name() new_typeinfo.names[var.name()] = SymbolTableNode(MDEF, var) # add fields var_items = [Var(item, typ) for item, typ in fields.items()] for var_item in var_items: add_field_to_new_typeinfo(var_item, is_property=True) classdef.info = new_typeinfo module.names[new_class_unique_name] = SymbolTableNode( GDEF, new_typeinfo, plugin_generated=True) return new_typeinfo
def dyn_class_hook(self, ctx: DynamicClassDefContext) -> TypedDictType: """Generate annotations from a JSON Schema.""" schema_path = ctx.call.args[0] if len(ctx.call.args) > 1: schema_ref = ctx.call.args[1].value else: schema_ref = '#/' try: schema = self.resolve_name(schema_path.value) except (ImportError, AttributeError): schema_path = os.path.abspath(schema_path.value) schema = self._load_schema(schema_path) make_type = TypeMaker(schema_path, schema) td_type = make_type(ctx, schema_ref) class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) if isinstance(td_type, TypedDictType): info.typeddict_type = td_type base_type = named_builtin_type(ctx, 'dict') else: base_type = td_type mro = base_type.type.mro if not mro: mro = [base_type.type, named_builtin_type(ctx, 'object').type] class_def.info = info info.mro = mro info.bases = [base_type] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def visit_class_def(self, node: ClassDef) -> None: # TODO additional things? node.defs.body = self.replace_statements(node.defs.body) node.info = self.fixup(node.info) for tv in node.type_vars: self.process_type_var_def(tv) self.process_type_info(node.info) super().visit_class_def(node)
def visit_class_def(self, node: ClassDef) -> ClassDef: new = ClassDef(node.name, self.block(node.defs), node.type_vars, self.expressions(node.base_type_exprs), node.metaclass) new.fullname = node.fullname new.info = node.info new.decorators = [ self.expr(decorator) for decorator in node.decorators ] return new
def visit_class_def(self, node: ClassDef) -> Node: new = ClassDef(node.name, self.block(node.defs), node.type_vars, self.types(node.base_types), node.metaclass) new.fullname = node.fullname new.info = node.info new.decorators = [ decorator.accept(self) for decorator in node.decorators ] new.is_builtinclass = node.is_builtinclass return new
def visit_class_def(self, node: ClassDef) -> Node: new = ClassDef(node.name, self.block(node.defs), node.type_vars, self.nodes(node.base_type_exprs), node.metaclass) new.fullname = node.fullname new.info = node.info new.decorators = [ decorator.accept(self) for decorator in node.decorators ] new.is_builtinclass = node.is_builtinclass return new
def add_info_hook(ctx): class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) DECL_BASES.add(class_def.fullname)
def add_info_hook(ctx) -> None: class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.named_type('builtins.object') info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) info.metadata['magic'] = True
def add_info_hook(ctx) -> None: class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) info.metadata['magic'] = True
def visit_class_def(self, node: ClassDef) -> ClassDef: new = ClassDef(node.name, self.block(node.defs), node.type_vars, self.expressions(node.base_type_exprs), node.metaclass) new.fullname = node.fullname new.info = node.info new.decorators = [self.expr(decorator) for decorator in node.decorators] return new
def add_info_hook(ctx: DynamicClassDefContext): class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info queryset_type_fullname = ctx.call.args[0].fullname queryset_info = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname).node # type: TypeInfo obj = ctx.api.named_type('builtins.object') info.mro = [info, queryset_info, obj.type] info.bases = [Instance(queryset_info, [])] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def visit_class_def(self, cdef: ClassDef) -> None: kind = self.kind_by_scope() if kind == LDEF: return elif kind == GDEF: self.sem.check_no_global(cdef.name, cdef) cdef.fullname = self.sem.qualified_name(cdef.name) info = TypeInfo(SymbolTable(), cdef, self.sem.cur_mod_id) info.set_line(cdef.line, cdef.column) cdef.info = info if kind == GDEF: self.sem.globals[cdef.name] = SymbolTableNode(kind, info) self.process_nested_classes(cdef)
def visit_class_def(self, node: ClassDef) -> Node: new = ClassDef(node.name, self.block(node.defs), node.type_vars, self.nodes(node.base_type_exprs), node.metaclass) new.fullname = node.fullname new.info = node.info new.base_types = [] for base in node.base_types: new.base_types.append(cast(Instance, self.type(base))) new.decorators = [ decorator.accept(self) for decorator in node.decorators ] new.is_builtinclass = node.is_builtinclass return new
def visit_class_def(self, node: ClassDef) -> None: # TODO additional things? node.info = self.fixup_and_reset_typeinfo(node.info) node.defs.body = self.replace_statements(node.defs.body) info = node.info for tv in node.type_vars: self.process_type_var_def(tv) if info: if info.is_named_tuple: self.process_synthetic_type_info(info) else: self.process_type_info(info) super().visit_class_def(node)
def make_fake_register_class_instance(api: CheckerPluginInterface, type_args: Sequence[Type] ) -> Instance: defn = ClassDef(REGISTER_RETURN_CLASS, Block([])) defn.fullname = f'functools.{REGISTER_RETURN_CLASS}' info = TypeInfo(SymbolTable(), defn, "functools") obj_type = api.named_generic_type('builtins.object', []).type info.bases = [Instance(obj_type, [])] info.mro = [info, obj_type] defn.info = info func_arg = Argument(Var('name'), AnyType(TypeOfAny.implementation_artifact), None, ARG_POS) add_method_to_class(api, defn, '__call__', [func_arg], NoneType()) return Instance(info, type_args)
def decl_info_hook(ctx: DynamicClassDefContext) -> None: """Support dynamically defining declarative bases. For example: from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() """ cls_bases = [] # type: List[Instance] # Passing base classes as positional arguments is currently not handled. if 'cls' in ctx.call.arg_names: declarative_base_cls_arg = ctx.call.args[ctx.call.arg_names.index( "cls")] if isinstance(declarative_base_cls_arg, TupleExpr): items = [item for item in declarative_base_cls_arg.items] else: items = [declarative_base_cls_arg] for item in items: if isinstance(item, RefExpr) and isinstance(item.node, TypeInfo): base = fill_typevars_with_any(item.node) # TODO: Support tuple types? if isinstance(base, Instance): cls_bases.append(base) class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.bases = cls_bases or [obj] try: calculate_mro(info) except MroError: ctx.api.fail("Not able to calculate MRO for declarative base", ctx.call) info.bases = [obj] info.fallback_to_any = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) set_declarative(info) # TODO: check what else is added. add_metadata_var(ctx.api, info)
def _basic_new_typeinfo(self, ctx: AnalyzeTypeContext, name: str, basetype_or_fallback: Instance) -> TypeInfo: """ Build a basic :class:`.TypeInfo`. This was basically lifted from ``mypy.semanal``. """ class_def = ClassDef(name, Block([])) class_def.fullname = name info = TypeInfo(SymbolTable(), class_def, '') class_def.info = info mro = basetype_or_fallback.type.mro if not mro: mro = [basetype_or_fallback.type, named_builtin_type(ctx, 'object').type] info.mro = [info] + mro info.bases = [basetype_or_fallback] return info
def _dynamic_class_hook(ctx: DynamicClassDefContext) -> None: """Generate a declarative Base class when the declarative_base() function is encountered.""" cls = ClassDef(ctx.name, Block([])) cls.fullname = ctx.api.qualified_name(ctx.name) declarative_meta_sym: SymbolTableNode = ctx.api.modules[ "sqlalchemy.orm.decl_api"].names["DeclarativeMeta"] declarative_meta_typeinfo: TypeInfo = declarative_meta_sym.node declarative_meta_name: NameExpr = NameExpr("DeclarativeMeta") declarative_meta_name.kind = GDEF declarative_meta_name.fullname = "sqlalchemy.orm.decl_api.DeclarativeMeta" declarative_meta_name.node = declarative_meta_typeinfo cls.metaclass = declarative_meta_name declarative_meta_instance = Instance(declarative_meta_typeinfo, []) info = TypeInfo(SymbolTable(), cls, ctx.api.cur_mod_id) info.declared_metaclass = info.metaclass_type = declarative_meta_instance cls.info = info cls_arg = util._get_callexpr_kwarg(ctx.call, "cls") if cls_arg is not None: decl_class._scan_declarative_assignments_and_apply_types( cls_arg.node.defn, ctx.api, is_mixin_scan=True) info.bases = [Instance(cls_arg.node, [])] else: obj = ctx.api.builtin_type("builtins.object") info.bases = [obj] try: calculate_mro(info) except MroError: util.fail(ctx.api, "Not able to calculate MRO for declarative base", ctx.call) info.bases = [obj] info.fallback_to_any = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def create_dynamic_class( ctx: DynamicClassDefContext, bases: List[Instance], *, name: Optional[str] = None, metaclass: Optional[str] = None, symbol_table: Optional[SymbolTable] = None, ) -> TypeInfo: if name is None: name = ctx.name class_def = ClassDef(name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) if metaclass is not None: metaclass_type_info = lookup_type_info(ctx.api, metaclass) if metaclass_type_info is not None: info.declared_metaclass = Instance(metaclass_type_info, []) class_def.info = info obj = ctx.api.builtin_type("builtins.object") info.bases = bases or [obj] try: calculate_mro(info) except MroError: ctx.api.fail("Not able to calculate MRO for dynamic class", ctx.call) info.bases = [obj] info.fallback_to_any = True if symbol_table is None: ctx.api.add_symbol_table_node(name, SymbolTableNode(GDEF, info)) else: symbol_table[name] = SymbolTableNode(GDEF, info) add_metadata_var(ctx.api, info) add_query_cls_var(ctx.api, info) return info
def analyze_namedtuple_classdef(self, defn: ClassDef) -> Optional[TypeInfo]: # special case for NamedTuple for base_expr in defn.base_type_exprs: if isinstance(base_expr, RefExpr): self.api.accept(base_expr) if base_expr.fullname == 'typing.NamedTuple': node = self.api.lookup(defn.name, defn) if node is not None: node.kind = GDEF # TODO in process_namedtuple_definition also applies here items, types, default_items = self.check_namedtuple_classdef(defn) info = self.build_namedtuple_typeinfo( defn.name, items, types, default_items) node.node = info defn.info.replaced = info defn.info = info defn.analyzed = NamedTupleExpr(info, is_typed=True) defn.analyzed.line = defn.line defn.analyzed.column = defn.column return info return None
def create_ortho_diff_class(base1: TypeInfo, base2: TypeInfo, api: SemanticAnalyzerPluginInterface, call_ctx: Context) -> Tuple[str, SymbolTableNode]: # https://github.com/dropbox/sqlalchemy-stubs/blob/55470ceab8149db983411d5c094c9fe16343c58b/sqlmypy.py#L173-L216 cls_name = get_ortho_diff_name(base1.defn, base2.defn) class_def = ClassDef(cls_name, Block([])) class_def.fullname = api.qualified_name(cls_name) info = TypeInfo(SymbolTable(), class_def, api.cur_mod_id) class_def.info = info obj = api.builtin_type('builtins.object') info.bases = [cast(Instance, fill_typevars(b)) for b in (base1, base2)] try: calculate_mro(info) except MroError: api.fail('Unable to calculate MRO for dynamic class', call_ctx) info.bases = [obj] info.fallback_to_any = True return cls_name, SymbolTableNode(GDEF, info)
def decl_info_hook(ctx): """Support dynamically defining declarative bases. For example: from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() """ class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) set_declarative(info) # TODO: check what else is added. add_metadata_var(ctx, info)
def visit_class_def(self, node: ClassDef) -> None: """Strip class body and type info, but don't strip methods.""" # We need to save the implicitly defined instance variables, # i.e. those defined as attributes on self. Otherwise, they would # be lost if we only reprocess top-levels (this kills TypeInfos) # but not the methods that defined those variables. if not self.recurse_into_functions: self.save_implicit_attributes(node) # We need to delete any entries that were generated by plugins, # since they will get regenerated. to_delete = {v.node for v in node.info.names.values() if v.plugin_generated} node.type_vars = [] node.base_type_exprs.extend(node.removed_base_type_exprs) node.removed_base_type_exprs = [] node.defs.body = [s for s in node.defs.body if s not in to_delete] with self.enter_class(node.info): super().visit_class_def(node) TypeState.reset_subtype_caches_for(node.info) # Kill the TypeInfo, since there is none before semantic analysis. node.info = CLASSDEF_NO_INFO
def visit_class_def(self, node: ClassDef) -> None: """Strip class body and type info, but don't strip methods.""" # We need to save the implicitly defined instance variables, # i.e. those defined as attributes on self. Otherwise, they would # be lost if we only reprocess top-levels (this kills TypeInfos) # but not the methods that defined those variables. if not self.recurse_into_functions: self.prepare_implicit_var_patches(node) # We need to delete any entries that were generated by plugins, # since they will get regenerated. to_delete = {v.node for v in node.info.names.values() if v.plugin_generated} node.type_vars = [] node.base_type_exprs.extend(node.removed_base_type_exprs) node.removed_base_type_exprs = [] node.defs.body = [s for s in node.defs.body if s not in to_delete] with self.enter_class(node.info): super().visit_class_def(node) TypeState.reset_subtype_caches_for(node.info) # Kill the TypeInfo, since there is none before semantic analysis. node.info = CLASSDEF_NO_INFO
def dyn_class_hook(self, ctx: DynamicClassDefContext) -> TypedDictType: """Generate annotations from a JSON Schema.""" schema_path, = ctx.call.args schema_path = os.path.abspath(schema_path.value) schema = self._load_schema(schema_path) make_type = TypeMaker(schema_path, schema) td_type = make_type(ctx) class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) info.typeddict_type = td_type dict_type = named_builtin_type(ctx, 'dict') mro = dict_type.type.mro if not mro: mro = [dict_type.type, named_builtin_type(ctx, 'object').type] class_def.info = info info.mro = mro info.bases = [dict_type] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def analyze_formset_factory(ctx: DynamicClassDefContext) -> None: class_lookup = ( "reactivated.stubs.BaseModelFormSet" if ctx.call.callee.name == "modelformset_factory" # type: ignore[attr-defined] else "reactivated.stubs.BaseFormSet") form_set_class = ctx.api.lookup_fully_qualified_or_none(class_lookup, ) assert form_set_class is not None form_set_class_instance = Instance( form_set_class.node, [] # type: ignore[arg-type] ) cls_bases: List[Instance] = [form_set_class_instance] class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) if class_def.fullname in already_analyzed: # Fixes an issue with max iteration counts. # In theory add_symbol_table_node should already guard against this but # it doesn't. return info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type("builtins.object") info.bases = cls_bases or [obj] try: calculate_mro(info) except MroError: ctx.api.fail("Not able to calculate MRO for declarative base", ctx.call) info.bases = [obj] info.fallback_to_any = True already_analyzed[class_def.fullname] = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def analyze_typeddict_classdef(self, defn: ClassDef) -> bool: # special case for TypedDict possible = False for base_expr in defn.base_type_exprs: if isinstance(base_expr, RefExpr): self.api.accept(base_expr) if (base_expr.fullname == 'mypy_extensions.TypedDict' or self.is_typeddict(base_expr)): possible = True if possible: node = self.api.lookup(defn.name, defn) if node is not None: node.kind = GDEF # TODO in process_namedtuple_definition also applies here if (len(defn.base_type_exprs) == 1 and isinstance(defn.base_type_exprs[0], RefExpr) and defn.base_type_exprs[0].fullname == 'mypy_extensions.TypedDict'): # Building a new TypedDict fields, types, required_keys = self.check_typeddict_classdef(defn) info = self.build_typeddict_typeinfo(defn.name, fields, types, required_keys) defn.info.replaced = info defn.info = info node.node = info defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column return True # Extending/merging existing TypedDicts if any(not isinstance(expr, RefExpr) or expr.fullname != 'mypy_extensions.TypedDict' and not self.is_typeddict(expr) for expr in defn.base_type_exprs): self.fail("All bases of a new TypedDict must be TypedDict types", defn) typeddict_bases = list(filter(self.is_typeddict, defn.base_type_exprs)) keys = [] # type: List[str] types = [] required_keys = set() for base in typeddict_bases: assert isinstance(base, RefExpr) assert isinstance(base.node, TypeInfo) assert isinstance(base.node.typeddict_type, TypedDictType) base_typed_dict = base.node.typeddict_type base_items = base_typed_dict.items valid_items = base_items.copy() for key in base_items: if key in keys: self.fail('Cannot overwrite TypedDict field "{}" while merging' .format(key), defn) valid_items.pop(key) keys.extend(valid_items.keys()) types.extend(valid_items.values()) required_keys.update(base_typed_dict.required_keys) new_keys, new_types, new_required_keys = self.check_typeddict_classdef(defn, keys) keys.extend(new_keys) types.extend(new_types) required_keys.update(new_required_keys) info = self.build_typeddict_typeinfo(defn.name, keys, types, required_keys) defn.info.replaced = info defn.info = info node.node = info defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column return True return False