def add_method( funcname: str, ret: Type, args: List[Argument], name: Optional[str] = None, is_classmethod: bool = False, ) -> None: if is_classmethod: first = [ Argument(Var('cls'), TypeType.make_normalized(selftype), None, ARG_POS) ] else: first = [Argument(Var('self'), selftype, None, ARG_POS)] args = first + args types = [arg.type_annotation for arg in args] items = [arg.variable.name() for arg in args] arg_kinds = [arg.kind for arg in args] assert None not in types signature = CallableType(cast(List[Type], types), arg_kinds, items, ret, function_type) signature.variables = [tvd] func = FuncDef(funcname, args, Block([])) func.info = info func.is_class = is_classmethod func.type = set_callable_name(signature, func) func._fullname = info.fullname() + '.' + funcname if is_classmethod: v = Var(funcname, func.type) v.is_classmethod = True v.info = info v._fullname = func._fullname dec = Decorator(func, [NameExpr('classmethod')], v) info.names[funcname] = SymbolTableNode(MDEF, dec) else: info.names[funcname] = SymbolTableNode(MDEF, func)
def check_namedtuple(self, node: Expression, var_name: Optional[str], is_func_scope: bool) -> Optional[TypeInfo]: """Check if a call defines a namedtuple. The optional var_name argument is the name of the variable to which this is assigned, if any. If it does, return the corresponding TypeInfo. Return None otherwise. If the definition is invalid but looks like a namedtuple, report errors but return (some) TypeInfo. """ if not isinstance(node, CallExpr): return None call = node callee = call.callee if not isinstance(callee, RefExpr): return None fullname = callee.fullname if fullname == 'collections.namedtuple': is_typed = False elif fullname == 'typing.NamedTuple': is_typed = True else: return None items, types, defaults, ok = self.parse_namedtuple_args(call, fullname) if not ok: # Error. Construct dummy return value. return self.build_namedtuple_typeinfo('namedtuple', [], [], {}) name = cast(StrExpr, call.args[0]).value if name != var_name or is_func_scope: # Give it a unique name derived from the line number. name += '@' + str(call.line) if len(defaults) > 0: default_items = { arg_name: default for arg_name, default in zip(items[-len(defaults):], defaults) } else: default_items = {} info = self.build_namedtuple_typeinfo(name, items, types, default_items) # Store it as a global just in case it would remain anonymous. # (Or in the nearest class if there is one.) stnode = SymbolTableNode(GDEF, info) self.api.add_symbol_table_node(name, stnode) call.analyzed = NamedTupleExpr(info, is_typed=is_typed) call.analyzed.set_line(call.line, call.column) return info
def process(self) -> None: """Parse the file, store global names and advance to the next state.""" tree = self.parse(self.program_text, self.path) # Store the parsed module in the shared module symbol table. self.manager.semantic_analyzer.modules[self.id] = tree if '.' in self.id: # Include module in the symbol table of the enclosing package. c = self.id.split('.') p = '.'.join(c[:-1]) sem_anal = self.manager.semantic_analyzer sem_anal.modules[p].names[c[-1]] = SymbolTableNode( MODULE_REF, tree, p) if self.id != 'builtins': # The builtins module is imported implicitly in every program (it # contains definitions of int, print etc.). trace('import builtins') if not self.import_module('builtins'): self.fail(self.path, 1, 'Could not find builtins') # Do the first pass of semantic analysis: add top-level definitions in # the file to the symbol table. We must do this before processing imports, # since this may mark some import statements as unreachable. first = FirstPass(self.semantic_analyzer()) first.analyze(tree, self.path, self.id) # Add all directly imported modules to be processed (however they are # not processed yet, just waiting to be processed). for id, line in self.manager.all_imported_modules_in_file(tree): self.errors().push_import_context(self.path, line) try: res = self.import_module(id) finally: self.errors().pop_import_context() if not res: self.fail(self.path, line, "No module named '{}'".format(id), blocker=False) self.manager.missing_modules.add(id) # Initialize module symbol table, which was populated by the semantic # analyzer. tree.names = self.semantic_analyzer().globals # Replace this state object with a parsed state in BuildManager. self.switch_state(ParsedFile(self.info(), tree))
def add_method( ctx: ClassDefContext, name: str, args: List[Argument], return_type: Type, self_type: Optional[Type] = None, tvar_def: Optional[TypeVarDef] = None, ) -> None: """Adds a new method to a class. """ info = ctx.cls.info # First remove any previously generated methods with the same name # to avoid clashes and problems in the semantic analyzer. if name in info.names: sym = info.names[name] if sym.plugin_generated and isinstance(sym.node, FuncDef): ctx.cls.defs.body.remove(sym.node) self_type = self_type or fill_typevars(info) function_type = ctx.api.named_type('__builtins__.function') args = [Argument(Var('self'), self_type, None, ARG_POS)] + args arg_types, arg_names, arg_kinds = [], [], [] for arg in args: assert arg.type_annotation, 'All arguments must be fully typed.' arg_types.append(arg.type_annotation) arg_names.append(arg.variable.name) arg_kinds.append(arg.kind) signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) if tvar_def: signature.variables = [tvar_def] func = FuncDef(name, args, Block([PassStmt()])) func.info = info func.type = set_callable_name(signature, func) func._fullname = info.fullname + '.' + name func.line = info.line # NOTE: we would like the plugin generated node to dominate, but we still # need to keep any existing definitions so they get semantically analyzed. if name in info.names: # Get a nice unique name instead. r_name = get_unique_redefinition_name(name, info.names) info.names[r_name] = info.names[name] info.names[name] = SymbolTableNode(MDEF, func, plugin_generated=True) info.defn.defs.body.append(func)
def enum_hook(ctx: DynamicClassDefContext) -> None: first_argument = ctx.call.args[0] if isinstance(first_argument, NameExpr): if not first_argument.node: ctx.api.defer() return if isinstance(first_argument.node, Var): var_type = first_argument.node.type or AnyType( TypeOfAny.implementation_artifact) type_alias = TypeAlias( var_type, fullname=ctx.api.qualified_name(ctx.name), line=ctx.call.line, column=ctx.call.column, ) ctx.api.add_symbol_table_node( ctx.name, SymbolTableNode(GDEF, type_alias, plugin_generated=False)) return enum_type = _get_type_for_expr(first_argument, ctx.api) type_alias = TypeAlias( enum_type, fullname=ctx.api.qualified_name(ctx.name), line=ctx.call.line, column=ctx.call.column, ) ctx.api.add_symbol_table_node( ctx.name, SymbolTableNode(GDEF, type_alias, plugin_generated=False))
def create_indirect_imported_name( file_node: MypyFile, module: str, relative: int, imported_name: str) -> Optional[SymbolTableNode]: """Create symbol table entry for a name imported from another module. These entries act as indirect references. """ target_module, ok = correct_relative_import( file_node.fullname, relative, module, file_node.is_package_init_file()) if not ok: return None target_name = '%s.%s' % (target_module, imported_name) link = ImportedName(target_name) # Use GDEF since this refers to a module-level definition. return SymbolTableNode(GDEF, link)
def _add_attrs_magic_attribute( ctx: 'mypy.plugin.ClassDefContext', attrs: 'List[Tuple[str, Optional[Type]]]') -> None: any_type = AnyType(TypeOfAny.explicit) attributes_types: 'List[Type]' = [ ctx.api.named_type_or_none('attr.Attribute', [attr_type or any_type]) or any_type for _, attr_type in attrs ] fallback_type = ctx.api.named_type('builtins.tuple', [ ctx.api.named_type_or_none('attr.Attribute', [any_type]) or any_type, ]) ti = ctx.api.basic_new_typeinfo(MAGIC_ATTR_CLS_NAME, fallback_type, 0) ti.is_named_tuple = True for (name, _), attr_type in zip(attrs, attributes_types): var = Var(name, attr_type) var.is_property = True proper_type = get_proper_type(attr_type) if isinstance(proper_type, Instance): var.info = proper_type.type ti.names[name] = SymbolTableNode(MDEF, var, plugin_generated=True) attributes_type = Instance(ti, []) # TODO: refactor using `add_attribute_to_class` var = Var(name=MAGIC_ATTR_NAME, type=TupleType(attributes_types, fallback=attributes_type)) var.info = ctx.cls.info var.is_classvar = True var._fullname = f"{ctx.cls.fullname}.{MAGIC_ATTR_CLS_NAME}" var.allow_incompatible_override = True ctx.cls.info.names[MAGIC_ATTR_NAME] = SymbolTableNode( kind=MDEF, node=var, plugin_generated=True, no_serialize=True, )
def visit_import(self, node: Import) -> None: assert not node.assignments # If the node is unreachable, don't reset entries: they point to something else! if node.is_unreachable: return if self.names: # Reset entries in the symbol table. This is necessary since # otherwise the semantic analyzer will think that the import # assigns to an existing name instead of defining a new one. for name, as_name in node.ids: imported_name = as_name or name initial = imported_name.split('.')[0] if initial in self.names: self.names[initial] = SymbolTableNode( UNBOUND_IMPORTED, None)
def visit_func_def(self, func: FuncDef, decorated: bool = False) -> None: """Process a func def. decorated is true if we are processing a func def in a Decorator that needs a _fullname and to have its body analyzed but does not need to be added to the symbol table. """ sem = self.sem if sem.type is not None: # Don't process methods during pass 1. return func.is_conditional = sem.block_depth[-1] > 0 func._fullname = sem.qualified_name(func.name()) at_module = sem.is_module_scope() and not decorated if (at_module and func.name() == '__getattr__' and self.sem.cur_mod_node.is_package_init_file() and self.sem.cur_mod_node.is_stub): if isinstance(func.type, CallableType): ret = func.type.ret_type if isinstance(ret, UnboundType) and not ret.args: sym = self.sem.lookup_qualified(ret.name, func, suppress_errors=True) # We only interpret a package as partial if the __getattr__ return type # is either types.ModuleType of Any. if sym and sym.node and sym.node.fullname() in ('types.ModuleType', 'typing.Any'): self.sem.cur_mod_node.is_partial_stub_package = True if at_module and func.name() in sem.globals: # Already defined in this module. original_sym = sem.globals[func.name()] if (original_sym.kind == UNBOUND_IMPORTED or isinstance(original_sym.node, ImportedName)): # Ah this is an imported name. We can't resolve them now, so we'll postpone # this until the main phase of semantic analysis. return if not sem.set_original_def(original_sym.node, func): # Report error. sem.check_no_global(func.name(), func) else: if at_module: sem.globals[func.name()] = SymbolTableNode(GDEF, func) # Also analyze the function body (needed in case there are unreachable # conditional imports). sem.function_stack.append(func) sem.scope.enter_function(func) sem.enter() func.body.accept(self) sem.leave() sem.scope.leave() sem.function_stack.pop()
def add_new_sym_for_info(info: TypeInfo, *, name: str, sym_type: MypyType, no_serialize: bool = False) -> None: # type=: type of the variable itself var = Var(name=name, type=sym_type) # var.info: type of the object variable is bound to var.info = info var._fullname = info.fullname + "." + name var.is_initialized_in_class = True var.is_inferred = True info.names[name] = SymbolTableNode(MDEF, var, plugin_generated=True, no_serialize=no_serialize)
def _apply_placeholder_attr_to_class( api: SemanticAnalyzerPluginInterface, cls: ClassDef, qualified_name: str, attrname: str, ): sym = api.lookup_fully_qualified_or_none(qualified_name) if sym: assert isinstance(sym.node, TypeInfo) type_ = Instance(sym.node, []) else: type_ = AnyType(TypeOfAny.special_form) var = Var(attrname) var.info = cls.info var.type = type_ cls.info.names[attrname] = SymbolTableNode(MDEF, var)
def process_nested_classes(self, outer_def: ClassDef) -> None: self.sem.enter_class(outer_def.info) for node in outer_def.defs.body: if isinstance(node, ClassDef): node.info = TypeInfo(SymbolTable(), node, self.sem.cur_mod_id) if outer_def.fullname: node.info._fullname = outer_def.fullname + '.' + node.info.name() else: node.info._fullname = node.info.name() node.fullname = node.info._fullname symbol = SymbolTableNode(MDEF, node.info) outer_def.info.names[node.name] = symbol self.process_nested_classes(node) elif isinstance(node, (ImportFrom, Import, ImportAll, IfStmt)): node.accept(self) self.sem.leave_class()
def _add_dataclass_fields_magic_attribute(self) -> None: attr_name = '__dataclass_fields__' any_type = AnyType(TypeOfAny.explicit) field_type = self._ctx.api.named_type_or_none('dataclasses.Field', [any_type]) or any_type attr_type = self._ctx.api.named_type('builtins.dict', [ self._ctx.api.named_type('builtins.str'), field_type, ]) var = Var(name=attr_name, type=attr_type) var.info = self._ctx.cls.info var._fullname = self._ctx.cls.info.fullname + '.' + attr_name self._ctx.cls.info.names[attr_name] = SymbolTableNode( kind=MDEF, node=var, plugin_generated=True, )
def union_hook(ctx: DynamicClassDefContext) -> None: types = ctx.call.args[1] if isinstance(types, TupleExpr): type_ = UnionType(tuple(_get_type_for_expr(x, ctx.api) for x in types.items)) type_alias = TypeAlias( type_, fullname=ctx.api.qualified_name(ctx.name), line=ctx.call.line, column=ctx.call.column, ) ctx.api.add_symbol_table_node( ctx.name, SymbolTableNode(GDEF, type_alias, plugin_generated=False) )
def _make_frozen(ctx: 'mypy.plugin.ClassDefContext', attributes: List[Attribute]) -> None: """Turn all the attributes into properties to simulate frozen classes.""" for attribute in attributes: if attribute.name in ctx.cls.info.names: # This variable belongs to this class so we can modify it. node = ctx.cls.info.names[attribute.name].node assert isinstance(node, Var) node.is_property = True else: # This variable belongs to a super class so create new Var so we # can modify it. var = Var(attribute.name, ctx.cls.info[attribute.name].type) var.info = ctx.cls.info var._fullname = '%s.%s' % (ctx.cls.info.fullname, var.name) ctx.cls.info.names[var.name] = SymbolTableNode(MDEF, var) var.is_property = True
def _propertize_callables(self, attributes: List[DataclassAttribute]) -> None: """Converts all attributes with callable types to @property methods. This avoids the typechecker getting confused and thinking that `my_dataclass_instance.callable_attr(foo)` is going to receive a `self` argument (it is not). """ info = self._ctx.cls.info for attr in attributes: if isinstance(get_proper_type(attr.type), CallableType): var = attr.to_var() var.info = info var.is_property = True var.is_settable_property = True var._fullname = info.fullname + '.' + var.name info.names[var.name] = SymbolTableNode(MDEF, var)
def create_type_hook(ctx: DynamicClassDefContext) -> None: # returning classes/type aliases is not supported yet by mypy # see https://github.com/python/mypy/issues/5865 type_alias = TypeAlias( AnyType(TypeOfAny.from_error), fullname=ctx.api.qualified_name(ctx.name), line=ctx.call.line, column=ctx.call.column, ) ctx.api.add_symbol_table_node( ctx.name, SymbolTableNode(GDEF, type_alias, plugin_generated=True), ) return
def _add_order(ctx: 'mypy.plugin.ClassDefContext', adder: 'MethodAdder') -> None: """Generate all the ordering methods for this class.""" bool_type = ctx.api.named_type('__builtins__.bool') object_type = ctx.api.named_type('__builtins__.object') # Make the types be: # AT = TypeVar('AT') # def __lt__(self: AT, other: AT) -> bool # This way comparisons with subclasses will work correctly. tvd = TypeVarType(SELF_TVAR_NAME, ctx.cls.info.fullname + '.' + SELF_TVAR_NAME, -1, [], object_type) self_tvar_expr = TypeVarExpr(SELF_TVAR_NAME, ctx.cls.info.fullname + '.' + SELF_TVAR_NAME, [], object_type) ctx.cls.info.names[SELF_TVAR_NAME] = SymbolTableNode(MDEF, self_tvar_expr) args = [Argument(Var('other', tvd), tvd, None, ARG_POS)] for method in ['__lt__', '__le__', '__gt__', '__ge__']: adder.add_method(method, args, bool_type, self_type=tvd, tvd=tvd)
def _freeze(self, attributes: List[DataclassAttribute]) -> None: """Converts all attributes to @property methods in order to emulate frozen classes. """ info = self._ctx.cls.info for attr in attributes: sym_node = info.names.get(attr.name) if sym_node is not None: var = sym_node.node assert isinstance(var, Var) var.is_property = True else: var = attr.to_var() var.info = info var.is_property = True var._fullname = info.fullname + '.' + var.name info.names[var.name] = SymbolTableNode(MDEF, var)
def decl_info_hook(ctx: DynamicClassDefContext) -> None: """Support dynamically defining declarative bases. For example: from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() """ cls_bases = [] # type: List[Instance] # Passing base classes as positional arguments is currently not handled. if 'cls' in ctx.call.arg_names: declarative_base_cls_arg = ctx.call.args[ctx.call.arg_names.index( "cls")] if isinstance(declarative_base_cls_arg, TupleExpr): items = [item for item in declarative_base_cls_arg.items] else: items = [declarative_base_cls_arg] for item in items: if isinstance(item, RefExpr) and isinstance(item.node, TypeInfo): base = fill_typevars_with_any(item.node) # TODO: Support tuple types? if isinstance(base, Instance): cls_bases.append(base) class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info obj = ctx.api.builtin_type('builtins.object') info.bases = cls_bases or [obj] try: calculate_mro(info) except MroError: ctx.api.fail("Not able to calculate MRO for declarative base", ctx.call) info.bases = [obj] info.fallback_to_any = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) set_declarative(info) # TODO: check what else is added. add_metadata_var(ctx.api, info)
def check_typeddict(self, node: Expression, var_name: Optional[str], is_func_scope: bool) -> Optional[TypeInfo]: """Check if a call defines a TypedDict. The optional var_name argument is the name of the variable to which this is assigned, if any. If it does, return the corresponding TypeInfo. Return None otherwise. If the definition is invalid but looks like a TypedDict, report errors but return (some) TypeInfo. """ if not isinstance(node, CallExpr): return None call = node callee = call.callee if not isinstance(callee, RefExpr): return None fullname = callee.fullname if fullname != 'mypy_extensions.TypedDict': return None items, types, total, ok = self.parse_typeddict_args(call) if not ok: # Error. Construct dummy return value. info = self.build_typeddict_typeinfo('TypedDict', [], [], set()) else: name = cast(StrExpr, call.args[0]).value if var_name is not None and name != var_name: self.fail( "First argument '{}' to TypedDict() does not match variable name '{}'".format( name, var_name), node) if name != var_name or is_func_scope: # Give it a unique name derived from the line number. name += '@' + str(call.line) required_keys = set(items) if total else set() info = self.build_typeddict_typeinfo(name, items, types, required_keys) # Store it as a global just in case it would remain anonymous. # (Or in the nearest class if there is one.) stnode = SymbolTableNode(GDEF, info) self.api.add_symbol_table_node(name, stnode) call.analyzed = TypedDictExpr(info) call.analyzed.set_line(call.line, call.column) return info
def add_key_typeddict_to_global_symboltable(ctx: DynamicClassDefContext) -> None: api = cast(SemanticAnalyzerPass2, ctx.api) typeddict_analyzer = TypedDictAnalyzer(options=api.options, api=api, msg=api.msg) try: fields, types, required_fields = parse_key_typeddict_fields(ctx.call.args[1]) except UnsupportedKeyTypeError as error: api.fail(f'Unsupported key type {error.args[0]}', ctx=ctx.call) info = typeddict_analyzer.build_typeddict_typeinfo(ctx.name, items=fields, types=types, required_keys=required_fields) allow_extra = api.parse_bool(ctx.call.args[2]) if len(ctx.call.args) > 2 else False info.typeddict_type.allow_extra = allow_extra api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def _dynamic_class_hook(ctx: DynamicClassDefContext) -> None: """Generate a declarative Base class when the declarative_base() function is encountered.""" cls = ClassDef(ctx.name, Block([])) cls.fullname = ctx.api.qualified_name(ctx.name) declarative_meta_sym: SymbolTableNode = ctx.api.modules[ "sqlalchemy.orm.decl_api"].names["DeclarativeMeta"] declarative_meta_typeinfo: TypeInfo = declarative_meta_sym.node declarative_meta_name: NameExpr = NameExpr("DeclarativeMeta") declarative_meta_name.kind = GDEF declarative_meta_name.fullname = "sqlalchemy.orm.decl_api.DeclarativeMeta" declarative_meta_name.node = declarative_meta_typeinfo cls.metaclass = declarative_meta_name declarative_meta_instance = Instance(declarative_meta_typeinfo, []) info = TypeInfo(SymbolTable(), cls, ctx.api.cur_mod_id) info.declared_metaclass = info.metaclass_type = declarative_meta_instance cls.info = info cls_arg = util._get_callexpr_kwarg(ctx.call, "cls") if cls_arg is not None: decl_class._scan_declarative_assignments_and_apply_types( cls_arg.node.defn, ctx.api, is_mixin_scan=True) info.bases = [Instance(cls_arg.node, [])] else: obj = ctx.api.builtin_type("builtins.object") info.bases = [obj] try: calculate_mro(info) except MroError: util.fail(ctx.api, "Not able to calculate MRO for declarative base", ctx.call) info.bases = [obj] info.fallback_to_any = True ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None: """ Marks all fields as properties so that attempts to set them trigger mypy errors. This is the same approach used by the attrs and dataclasses plugins. """ info = self._ctx.cls.info for field in fields: sym_node = info.names.get(field.name) if sym_node is not None: var = sym_node.node assert isinstance(var, Var) var.is_property = frozen else: var = field.to_var(info, use_alias=False) var.info = info var.is_property = frozen var._fullname = info.fullname() + '.' + var.name() info.names[var.name()] = SymbolTableNode(MDEF, var)
def add_construct_method(self, fields: List['PydanticModelField']) -> None: """ Adds a fully typed `construct` classmethod to the class. Similar to the fields-aware __init__ method, but always uses the field names (not aliases), and does not treat settings fields as optional. """ ctx = self._ctx set_str = ctx.api.named_type( f'{BUILTINS_NAME}.set', [ctx.api.named_type(f'{BUILTINS_NAME}.str')]) optional_set_str = UnionType([set_str, NoneType()]) fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) construct_arguments = self.get_field_arguments( fields, typed=True, force_all_optional=False, use_alias=False) construct_arguments = [fields_set_argument] + construct_arguments obj_type = ctx.api.named_type(f'{BUILTINS_NAME}.object') self_tvar_name = '_PydanticBaseModel' # Make sure it does not conflict with other names in the class tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type) self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type) ctx.cls.info.names[self_tvar_name] = SymbolTableNode( MDEF, self_tvar_expr) # Backward-compatible with TypeVarDef from Mypy 0.910. if isinstance(tvd, TypeVarType): self_type = tvd else: self_type = TypeVarType(tvd) # type: ignore[call-arg] add_method( ctx, 'construct', construct_arguments, return_type=self_type, self_type=self_type, tvar_def=tvd, is_classmethod=True, )
def _add_attrs_magic_attribute(ctx: 'mypy.plugin.ClassDefContext', raw_attr_types: 'List[Optional[Type]]') -> None: attr_name = '__attrs_attrs__' any_type = AnyType(TypeOfAny.explicit) attributes_types: 'List[Type]' = [ ctx.api.named_type_or_none('attr.Attribute', [attr_type or any_type]) or any_type for attr_type in raw_attr_types ] fallback_type = ctx.api.named_type('builtins.tuple', [ ctx.api.named_type_or_none('attr.Attribute', [any_type]) or any_type, ]) var = Var(name=attr_name, type=TupleType(attributes_types, fallback=fallback_type)) var.info = ctx.cls.info var._fullname = ctx.cls.info.fullname + '.' + attr_name ctx.cls.info.names[attr_name] = SymbolTableNode( kind=MDEF, node=var, plugin_generated=True, )
def load_settings_from_names(settings_classdef: ClassDef, modules: Iterable[MypyFile], api: SemanticAnalyzerPass2) -> None: settings_metadata = get_settings_metadata(settings_classdef.info) for module in modules: for name, sym in module.names.items(): if name.isupper() and isinstance(sym.node, Var): if sym.type is not None: copied = make_sym_copy_of_setting(sym) if copied is None: continue settings_classdef.info.names[name] = copied else: var = Var(name, AnyType(TypeOfAny.unannotated)) var.info = api.named_type('__builtins__.object').type settings_classdef.info.names[name] = SymbolTableNode( sym.kind, var) settings_metadata[name] = module.fullname()
def build_newtype_typeinfo(self, name: str, old_type: Type, base_type: Instance) -> TypeInfo: info = self.api.basic_new_typeinfo(name, base_type) info.is_newtype = True # Add __init__ method args = [Argument(Var('self'), NoneType(), None, ARG_POS), self.make_argument('item', old_type)] signature = CallableType( arg_types=[Instance(info, []), old_type], arg_kinds=[arg.kind for arg in args], arg_names=['self', 'item'], ret_type=NoneType(), fallback=self.api.named_type('__builtins__.function'), name=name) init_func = FuncDef('__init__', args, Block([]), typ=signature) init_func.info = info init_func._fullname = info.fullname + '.__init__' info.names['__init__'] = SymbolTableNode(MDEF, init_func) return info
def create_ortho_diff_class(base1: TypeInfo, base2: TypeInfo, api: SemanticAnalyzerPluginInterface, call_ctx: Context) -> Tuple[str, SymbolTableNode]: # https://github.com/dropbox/sqlalchemy-stubs/blob/55470ceab8149db983411d5c094c9fe16343c58b/sqlmypy.py#L173-L216 cls_name = get_ortho_diff_name(base1.defn, base2.defn) class_def = ClassDef(cls_name, Block([])) class_def.fullname = api.qualified_name(cls_name) info = TypeInfo(SymbolTable(), class_def, api.cur_mod_id) class_def.info = info obj = api.builtin_type('builtins.object') info.bases = [cast(Instance, fill_typevars(b)) for b in (base1, base2)] try: calculate_mro(info) except MroError: api.fail('Unable to calculate MRO for dynamic class', call_ctx) info.bases = [obj] info.fallback_to_any = True return cls_name, SymbolTableNode(GDEF, info)
def check_enum_call(self, node: Expression, var_name: str, is_func_scope: bool) -> Optional[TypeInfo]: """Check if a call defines an Enum. Example: A = enum.Enum('A', 'foo bar') is equivalent to: class A(enum.Enum): foo = 1 bar = 2 """ if not isinstance(node, CallExpr): return None call = node callee = call.callee if not isinstance(callee, RefExpr): return None fullname = callee.fullname if fullname not in ('enum.Enum', 'enum.IntEnum', 'enum.Flag', 'enum.IntFlag'): return None items, values, ok = self.parse_enum_call_args(call, fullname.split('.')[-1]) if not ok: # Error. Construct dummy return value. return self.build_enum_call_typeinfo(var_name, [], fullname) name = cast(Union[StrExpr, UnicodeExpr], call.args[0]).value if name != var_name or is_func_scope: # Give it a unique name derived from the line number. name += '@' + str(call.line) info = self.build_enum_call_typeinfo(name, items, fullname) # Store it as a global just in case it would remain anonymous. # (Or in the nearest class if there is one.) stnode = SymbolTableNode(GDEF, info) self.api.add_symbol_table_node(name, stnode) call.analyzed = EnumCallExpr(info, items, values) call.analyzed.set_line(call.line, call.column) return info