def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None: self.errors = errors self.options = options self.is_typeshed_file = is_typeshed_file self.scope = Scope() # Should we also analyze function definitions, or only module top-levels? self.recurse_into_functions = True
def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None: self.errors = errors self.options = options self.is_typeshed_file = is_typeshed_file self.scope = Scope() # Should we also analyze function definitions, or only module top-levels? self.recurse_into_functions = True # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. self.seen_aliases: Set[TypeAliasType] = set()
def __init__(self, modules: Dict[str, MypyFile], errors: Errors, sem: NewSemanticAnalyzer) -> None: self.modules = modules self.errors = errors self.sem = sem self.scope = Scope() # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True
def __init__(self, type_map: Dict[Expression, Type], python_version: Tuple[int, int], alias_deps: 'DefaultDict[str, Set[str]]') -> None: self.scope = Scope() self.type_map = type_map self.python2 = python_version[0] == 2 # This attribute holds a mapping from target to names of type aliases # it depends on. These need to be processed specially, since they are # only present in expanded form in symbol tables. For example, after: # A = List[int] # x: A # The module symbol table will just have a Var `x` with type `List[int]`, # and the dependency of `x` on `A` is lost. Therefore the alias dependencies # are preserved at alias expansion points in `semanal.py`, stored as an attribute # on MypyFile, and then passed here. self.alias_deps = alias_deps self.map = {} # type: Dict[str, Set[str]] self.is_class = False self.is_package_init_file = False
class DependencyVisitor(TraverserVisitor): def __init__(self, type_map: Dict[Expression, Type], python_version: Tuple[int, int], alias_deps: 'DefaultDict[str, Set[str]]') -> None: self.scope = Scope() self.type_map = type_map self.python2 = python_version[0] == 2 # This attribute holds a mapping from target to names of type aliases # it depends on. These need to be processed specially, since they are # only present in expanded form in symbol tables. For example, after: # A = List[int] # x: A # The module symbol table will just have a Var `x` with type `List[int]`, # and the dependency of `x` on `A` is lost. Therefore the alias dependencies # are preserved at alias expansion points in `semanal.py`, stored as an attribute # on MypyFile, and then passed here. self.alias_deps = alias_deps self.map = {} # type: Dict[str, Set[str]] self.is_class = False self.is_package_init_file = False # TODO (incomplete): # await # protocols def visit_mypy_file(self, o: MypyFile) -> None: self.scope.enter_file(o.fullname()) self.is_package_init_file = o.is_package_init_file() self.add_type_alias_deps(self.scope.current_target()) super().visit_mypy_file(o) self.scope.leave() def visit_func_def(self, o: FuncDef) -> None: self.scope.enter_function(o) target = self.scope.current_target() if o.type: if self.is_class and isinstance(o.type, FunctionLike): signature = bind_self(o.type) # type: Type else: signature = o.type for trigger in get_type_triggers(signature): self.add_dependency(trigger) self.add_dependency(trigger, target=make_trigger(target)) if o.info: for base in non_trivial_bases(o.info): self.add_dependency(make_trigger(base.fullname() + '.' + o.name())) self.add_type_alias_deps(self.scope.current_target()) super().visit_func_def(o) variants = set(o.expanded) - {o} for ex in variants: if isinstance(ex, FuncDef): super().visit_func_def(ex) self.scope.leave() def visit_decorator(self, o: Decorator) -> None: # We don't need to recheck outer scope for an overload, only overload itself. # Also if any decorator is nested, it is not externally visible, so we don't need to # generate dependency. if not o.func.is_overload and self.scope.current_function_name() is None: self.add_dependency(make_trigger(o.func.fullname())) super().visit_decorator(o) def visit_class_def(self, o: ClassDef) -> None: self.scope.enter_class(o.info) target = self.scope.current_full_target() self.add_dependency(make_trigger(target), target) old_is_class = self.is_class self.is_class = True # Add dependencies to type variables of a generic class. for tv in o.type_vars: self.add_dependency(make_trigger(tv.fullname), target) self.process_type_info(o.info) super().visit_class_def(o) self.is_class = old_is_class self.scope.leave() def visit_newtype_expr(self, o: NewTypeExpr) -> None: if o.info: self.scope.enter_class(o.info) self.process_type_info(o.info) self.scope.leave() def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency(make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency(make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__')) def visit_import(self, o: Import) -> None: for id, as_id in o.ids: self.add_dependency(make_trigger(id), self.scope.current_target()) def visit_import_from(self, o: ImportFrom) -> None: module_id, _ = correct_relative_import(self.scope.current_module_id(), o.relative, o.id, self.is_package_init_file) for name, as_name in o.names: self.add_dependency(make_trigger(module_id + '.' + name)) def visit_import_all(self, o: ImportAll) -> None: module_id, _ = correct_relative_import(self.scope.current_module_id(), o.relative, o.id, self.is_package_init_file) # The current target needs to be rechecked if anything "significant" changes in the # target module namespace (as the imported definitions will need to be updated). self.add_dependency(make_wildcard_trigger(module_id)) def visit_block(self, o: Block) -> None: if not o.is_unreachable: super().visit_block(o) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: rvalue = o.rvalue if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr): analyzed = rvalue.analyzed self.add_type_dependencies(analyzed.upper_bound, target=make_trigger(analyzed.fullname())) for val in analyzed.values: self.add_type_dependencies(val, target=make_trigger(analyzed.fullname())) # We need to re-analyze the definition if bound or value is deleted. super().visit_call_expr(rvalue) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr): # Depend on types of named tuple items. info = rvalue.analyzed.info prefix = '%s.%s' % (self.scope.current_full_target(), info.name()) for name, symnode in info.names.items(): if not name.startswith('_') and isinstance(symnode.node, Var): typ = symnode.node.type if typ: self.add_type_dependencies(typ) self.add_type_dependencies(typ, target=make_trigger(prefix)) attr_target = make_trigger('%s.%s' % (prefix, name)) self.add_type_dependencies(typ, target=attr_target) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypedDictExpr): # Depend on the underlying typeddict type info = rvalue.analyzed.info assert info.typeddict_type is not None prefix = '%s.%s' % (self.scope.current_full_target(), info.name()) self.add_type_dependencies(info.typeddict_type, target=make_trigger(prefix)) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, EnumCallExpr): # Enum values are currently not checked, but for future we add the deps on them for name, symnode in rvalue.analyzed.info.names.items(): if isinstance(symnode.node, Var) and symnode.node.type: self.add_type_dependencies(symnode.node.type) elif o.is_alias_def: assert len(o.lvalues) == 1 lvalue = o.lvalues[0] assert isinstance(lvalue, NameExpr) # TODO: get rid of this extra dependency from __init__ to alias definition scope typ = self.type_map.get(lvalue) if isinstance(typ, FunctionLike) and typ.is_type_obj(): class_name = typ.type_object().fullname() self.add_dependency(make_trigger(class_name + '.__init__')) self.add_dependency(make_trigger(class_name + '.__new__')) if isinstance(rvalue, IndexExpr) and isinstance(rvalue.analyzed, TypeAliasExpr): self.add_type_dependencies(rvalue.analyzed.type) else: # Normal assignment super().visit_assignment_stmt(o) for lvalue in o.lvalues: self.process_lvalue(lvalue) items = o.lvalues + [rvalue] for i in range(len(items) - 1): lvalue = items[i] rvalue = items[i + 1] if isinstance(lvalue, TupleExpr): self.add_attribute_dependency_for_expr(rvalue, '__iter__') if o.type: for trigger in get_type_triggers(o.type): self.add_dependency(trigger) def process_lvalue(self, lvalue: Expression) -> None: """Generate additional dependencies for an lvalue.""" if isinstance(lvalue, IndexExpr): self.add_operator_method_dependency(lvalue.base, '__setitem__') elif isinstance(lvalue, NameExpr): if lvalue.kind in (MDEF, GDEF): # Assignment to an attribute in the class body, or direct assignment to a # global variable. lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = get_type_triggers(lvalue_type) attr_trigger = make_trigger('%s.%s' % (self.scope.current_full_target(), lvalue.name)) for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, MemberExpr): if lvalue.kind is None: # Reference to a non-module attribute if lvalue.expr not in self.type_map: # Unreachable assignment -> not checked so no dependencies to generate. return object_type = self.type_map[lvalue.expr] lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = get_type_triggers(lvalue_type) for attr_trigger in self.attribute_triggers(object_type, lvalue.name): for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, TupleExpr): for item in lvalue.items: self.process_lvalue(item) # TODO: star lvalue def get_non_partial_lvalue_type(self, lvalue: RefExpr) -> Type: if lvalue not in self.type_map: # Likely a block considered unreachable during type checking. return UninhabitedType() lvalue_type = self.type_map[lvalue] if isinstance(lvalue_type, PartialType): if isinstance(lvalue.node, Var) and lvalue.node.type: lvalue_type = lvalue.node.type else: # Probably a secondary, non-definition assignment that doesn't # result in a non-partial type. We won't be able to infer any # dependencies from this so just return something. (The first, # definition assignment with a partial type is handled # differently, in the semantic analyzer.) assert not lvalue.is_new_def return UninhabitedType() return lvalue_type def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: super().visit_operator_assignment_stmt(o) self.process_lvalue(o.lvalue) method = op_methods[o.op] self.add_attribute_dependency_for_expr(o.lvalue, method) if o.op in ops_with_inplace_method: inplace_method = '__i' + method[2:] self.add_attribute_dependency_for_expr(o.lvalue, inplace_method) def visit_for_stmt(self, o: ForStmt) -> None: super().visit_for_stmt(o) # __getitem__ is only used if __iter__ is missing but for simplicity we # just always depend on both. self.add_attribute_dependency_for_expr(o.expr, '__iter__') self.add_attribute_dependency_for_expr(o.expr, '__getitem__') self.process_lvalue(o.index) if isinstance(o.index, TupleExpr): # Process multiple assignment to index variables. item_type = o.inferred_item_type if item_type: # This is similar to above. self.add_attribute_dependency(item_type, '__iter__') self.add_attribute_dependency(item_type, '__getitem__') if o.index_type: self.add_type_dependencies(o.index_type) def visit_with_stmt(self, o: WithStmt) -> None: super().visit_with_stmt(o) for e in o.expr: self.add_attribute_dependency_for_expr(e, '__enter__') self.add_attribute_dependency_for_expr(e, '__exit__') if o.target_type: self.add_type_dependencies(o.target_type) def visit_print_stmt(self, o: PrintStmt) -> None: super().visit_print_stmt(o) if o.target: self.add_attribute_dependency_for_expr(o.target, 'write') def visit_del_stmt(self, o: DelStmt) -> None: super().visit_del_stmt(o) if isinstance(o.expr, IndexExpr): self.add_attribute_dependency_for_expr(o.expr.base, '__delitem__') # Expressions def process_global_ref_expr(self, o: RefExpr) -> None: if o.fullname is not None: self.add_dependency(make_trigger(o.fullname)) # If this is a reference to a type, generate a dependency to its # constructor. # TODO: avoid generating spurious dependencies for isinstancce checks, # except statements, class attribute reference, etc, if perf problem. typ = self.type_map.get(o) if isinstance(typ, FunctionLike) and typ.is_type_obj(): class_name = typ.type_object().fullname() self.add_dependency(make_trigger(class_name + '.__init__')) self.add_dependency(make_trigger(class_name + '.__new__')) def visit_name_expr(self, o: NameExpr) -> None: if o.kind == LDEF: # We don't track depdendencies to local variables, since they # aren't externally visible. return if o.kind == MDEF: # Direct reference to member is only possible in the scope that # defined the name, so no dependency is required. return self.process_global_ref_expr(o) def visit_member_expr(self, e: MemberExpr) -> None: super().visit_member_expr(e) if e.kind is not None: # Reference to a module attribute self.process_global_ref_expr(e) else: # Reference to a non-module attribute if e.expr not in self.type_map: # No type available -- this happens for unreachable code. Since it's unreachable, # it wasn't type checked and we don't need to generate dependencies. return typ = self.type_map[e.expr] self.add_attribute_dependency(typ, e.name) def visit_super_expr(self, e: SuperExpr) -> None: super().visit_super_expr(e) if e.info is not None: self.add_dependency(make_trigger(e.info.fullname() + '.' + e.name)) def visit_call_expr(self, e: CallExpr) -> None: super().visit_call_expr(e) def visit_cast_expr(self, e: CastExpr) -> None: super().visit_cast_expr(e) self.add_type_dependencies(e.type) def visit_type_application(self, e: TypeApplication) -> None: super().visit_type_application(e) for typ in e.types: self.add_type_dependencies(typ) def visit_index_expr(self, e: IndexExpr) -> None: super().visit_index_expr(e) self.add_operator_method_dependency(e.base, '__getitem__') def visit_unary_expr(self, e: UnaryExpr) -> None: super().visit_unary_expr(e) if e.op not in unary_op_methods: return method = unary_op_methods[e.op] self.add_operator_method_dependency(e.expr, method) def visit_op_expr(self, e: OpExpr) -> None: super().visit_op_expr(e) self.process_binary_op(e.op, e.left, e.right) def visit_comparison_expr(self, e: ComparisonExpr) -> None: super().visit_comparison_expr(e) for i, op in enumerate(e.operators): left = e.operands[i] right = e.operands[i + 1] self.process_binary_op(op, left, right) if self.python2 and op in ('==', '!=', '<', '<=', '>', '>='): self.add_operator_method_dependency(left, '__cmp__') self.add_operator_method_dependency(right, '__cmp__') def process_binary_op(self, op: str, left: Expression, right: Expression) -> None: method = op_methods.get(op) if method: if op == 'in': self.add_operator_method_dependency(right, method) else: self.add_operator_method_dependency(left, method) rev_method = reverse_op_methods.get(method) if rev_method: self.add_operator_method_dependency(right, rev_method) def add_operator_method_dependency(self, e: Expression, method: str) -> None: typ = self.type_map.get(e) if typ is not None: self.add_operator_method_dependency_for_type(typ, method) def add_operator_method_dependency_for_type(self, typ: Type, method: str) -> None: # Note that operator methods can't be (non-metaclass) methods of type objects # (that is, TypeType objects or Callables representing a type). if isinstance(typ, TypeVarType): typ = typ.upper_bound if isinstance(typ, TupleType): typ = typ.fallback if isinstance(typ, Instance): trigger = make_trigger(typ.type.fullname() + '.' + method) self.add_dependency(trigger) elif isinstance(typ, UnionType): for item in typ.items: self.add_operator_method_dependency_for_type(item, method) elif isinstance(typ, FunctionLike) and typ.is_type_obj(): self.add_operator_method_dependency_for_type(typ.fallback, method) elif isinstance(typ, TypeType): if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None: self.add_operator_method_dependency_for_type(typ.item.type.metaclass_type, method) def visit_generator_expr(self, e: GeneratorExpr) -> None: super().visit_generator_expr(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: super().visit_dictionary_comprehension(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_star_expr(self, e: StarExpr) -> None: super().visit_star_expr(e) self.add_iter_dependency(e.expr) def visit_yield_from_expr(self, e: YieldFromExpr) -> None: super().visit_yield_from_expr(e) self.add_iter_dependency(e.expr) # Helpers def add_type_alias_deps(self, target: str) -> None: # Type aliases are special, because some of the dependencies are calculated # in semanal.py, before they are expanded. if target in self.alias_deps: for alias in self.alias_deps[target]: self.add_dependency(make_trigger(alias)) def add_dependency(self, trigger: str, target: Optional[str] = None) -> None: """Add dependency from trigger to a target. If the target is not given explicitly, use the current target. """ if trigger.startswith(('<builtins.', '<typing.')): # Don't track dependencies to certain builtins to keep the size of # the dependencies manageable. These dependencies should only # change on mypy version updates, which will require a full rebuild # anyway. return if target is None: target = self.scope.current_target() self.map.setdefault(trigger, set()).add(target) def add_type_dependencies(self, typ: Type, target: Optional[str] = None) -> None: """Add dependencies to all components of a type. Args: target: If not None, override the default (current) target of the generated dependency. """ # TODO: Use this method in more places where get_type_triggers() + add_dependency() # are called together. for trigger in get_type_triggers(typ): self.add_dependency(trigger, target) def add_attribute_dependency(self, typ: Type, name: str) -> None: """Add dependencies for accessing a named attribute of a type.""" targets = self.attribute_triggers(typ, name) for target in targets: self.add_dependency(target) def attribute_triggers(self, typ: Type, name: str) -> List[str]: """Return all triggers associated with the attribute of a type.""" if isinstance(typ, TypeVarType): typ = typ.upper_bound if isinstance(typ, TupleType): typ = typ.fallback if isinstance(typ, Instance): member = '%s.%s' % (typ.type.fullname(), name) return [make_trigger(member)] elif isinstance(typ, FunctionLike) and typ.is_type_obj(): member = '%s.%s' % (typ.type_object().fullname(), name) triggers = [make_trigger(member)] triggers.extend(self.attribute_triggers(typ.fallback, name)) return triggers elif isinstance(typ, UnionType): targets = [] for item in typ.items: targets.extend(self.attribute_triggers(item, name)) return targets elif isinstance(typ, TypeType): triggers = self.attribute_triggers(typ.item, name) if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None: triggers.append(make_trigger('%s.%s' % (typ.item.type.metaclass_type.type.fullname(), name))) return triggers else: return [] def add_attribute_dependency_for_expr(self, e: Expression, name: str) -> None: typ = self.type_map.get(e) if typ is not None: self.add_attribute_dependency(typ, name) def add_iter_dependency(self, node: Expression) -> None: typ = self.type_map.get(node) if typ: self.add_attribute_dependency(typ, '__iter__')
class SemanticAnalyzerPass3(TraverserVisitor, SemanticAnalyzerCoreInterface): """The third and final pass of semantic analysis. Check type argument counts and values of generic types, and perform some straightforward type inference. """ def __init__(self, modules: Dict[str, MypyFile], errors: Errors, sem: NewSemanticAnalyzer) -> None: self.modules = modules self.errors = errors self.sem = sem self.scope = Scope() # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Tuple[int, Callable[[], None]]]) -> None: self.recurse_into_functions = True self.options = options self.sem.options = options self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() self.cur_mod_node = file_node self.sem.globals = file_node.names def visit_class_def(self, tdef: ClassDef) -> None: # NamedTuple base classes are validated in check_namedtuple_classdef; we don't have to # check them again here. self.scope.enter_class(tdef.info) if not tdef.info.is_named_tuple: types = list(tdef.info.bases) # type: List[Type] for tvar in tdef.type_vars: if tvar.upper_bound: types.append(tvar.upper_bound) if tvar.values: types.extend(tvar.values) if tdef.info.tuple_type: types.append(tdef.info.tuple_type) self.analyze_types(types, tdef.info) for type in tdef.info.bases: if tdef.info.is_protocol: if not isinstance(type, Instance) or not type.type.is_protocol: if type.type.fullname() != 'builtins.object': self.fail('All bases of a protocol must be protocols', tdef) # Recompute MRO now that we have analyzed all modules, to pick # up superclasses of bases imported from other modules in an # import loop. (Only do so if we succeeded the first time.) if tdef.info.mro: tdef.info.mro = [] # Force recomputation self.sem.calculate_class_mro(tdef) super().visit_class_def(tdef) self.analyze_symbol_table(tdef.info.names) self.scope.leave() def visit_decorator(self, dec: Decorator) -> None: """Try to infer the type of the decorated function. This lets us resolve references to decorated functions during type checking when there are cyclic imports, as otherwise the type might not be available when we need it. This basically uses a simple special-purpose type inference engine just for decorators. """ # Don't just call the super method since we don't unconditionally traverse the decorated # function. dec.var.accept(self) for decorator in dec.decorators: decorator.accept(self) if self.recurse_into_functions: dec.func.accept(self) self.analyze(dec.var.type, dec.var) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: """Traverse the assignment statement. This includes the actual assignment and synthetic types resulted from this assignment (if any). Currently this includes NewType, TypedDict, NamedTuple, and TypeVar. """ self.analyze(s.type, s) if isinstance(s.rvalue, IndexExpr) and isinstance(s.rvalue.analyzed, TypeAliasExpr): self.analyze(s.rvalue.analyzed.type, s.rvalue.analyzed, warn=True) if isinstance(s.rvalue, CallExpr): analyzed = s.rvalue.analyzed if isinstance(analyzed, TypeVarExpr): types = [] if analyzed.upper_bound: types.append(analyzed.upper_bound) if analyzed.values: types.extend(analyzed.values) self.analyze_types(types, analyzed) if isinstance(s.lvalues[0], RefExpr) and isinstance(s.lvalues[0].node, Var): self.analyze(s.lvalues[0].node.type, s.lvalues[0].node) super().visit_assignment_stmt(s) # Helpers def analyze(self, type: Optional[Type], node: Node, warn: bool = False) -> None: # Recursive type warnings are only emitted on type definition 'node's, marked by 'warn' # Flags appeared during analysis of 'type' are collected in this dict. indicator = {} # type: Dict[str, bool] if type: analyzer = self.make_type_analyzer(indicator) type.accept(analyzer) if not (isinstance(node, TypeAlias) and node.no_args): # We skip bare type aliases like `A = List`, these # are still valid. In contrast, use/expansion points # like `x: A` will be flagged. self.check_for_omitted_generics(type) if analyzer.aliases_used: target = self.scope.current_target() self.cur_mod_node.alias_deps[target].update(analyzer.aliases_used) def analyze_types(self, types: List[Type], node: Node) -> None: # Similar to above but for nodes with multiple types. indicator = {} # type: Dict[str, bool] for type in types: analyzer = self.make_type_analyzer(indicator) type.accept(analyzer) self.check_for_omitted_generics(type) if analyzer.aliases_used: target = self.scope.current_target() self.cur_mod_node.alias_deps[target].update(analyzer.aliases_used) def analyze_symbol_table(self, names: SymbolTable) -> None: """Analyze types in symbol table nodes only (shallow).""" for node in names.values(): if isinstance(node.node, TypeAlias): self.analyze(node.node.target, node.node) def make_type_analyzer(self, indicator: Dict[str, bool]) -> TypeAnalyserPass3: return TypeAnalyserPass3(self, self.sem.plugin, self.options, self.is_typeshed_file, indicator, self.patches) def check_for_omitted_generics(self, typ: Type) -> None: if not self.options.disallow_any_generics or self.is_typeshed_file: return for t in collect_any_types(typ): if t.type_of_any == TypeOfAny.from_omitted_generics: self.fail(message_registry.BARE_GENERIC, t) def lookup_qualified(self, name: str, ctx: Context, suppress_errors: bool = False) -> Optional[SymbolTableNode]: return self.sem.lookup_qualified(name, ctx, suppress_errors=suppress_errors) def lookup_fully_qualified(self, fullname: str) -> SymbolTableNode: return self.sem.lookup_fully_qualified(fullname) def fail(self, msg: str, ctx: Context, serious: bool = False, *, blocker: bool = False) -> None: self.sem.fail(msg, ctx, serious, blocker=blocker) def fail_blocker(self, msg: str, ctx: Context) -> None: self.fail(msg, ctx, blocker=True) def note(self, msg: str, ctx: Context) -> None: self.sem.note(msg, ctx) def builtin_type(self, name: str, args: Optional[List[Type]] = None) -> Instance: names = self.modules['builtins'] sym = names.names[name] node = sym.node assert isinstance(node, TypeInfo) if args: # TODO: assert len(args) == len(node.defn.type_vars) return Instance(node, args) any_type = AnyType(TypeOfAny.special_form) return Instance(node, [any_type] * len(node.defn.type_vars))
class SemanticAnalyzerPass3(TraverserVisitor, SemanticAnalyzerInterface): """The third and final pass of semantic analysis. Check type argument counts and values of generic types, and perform some straightforward type inference. """ def __init__(self, modules: Dict[str, MypyFile], errors: Errors, sem: 'mypy.semanal.SemanticAnalyzerPass2') -> None: self.modules = modules self.errors = errors self.sem = sem self.scope = Scope() # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Tuple[int, Callable[[], None]]]) -> None: self.recurse_into_functions = True self.errors.set_file(fnam, file_node.fullname(), scope=self.scope) self.options = options self.sem.options = options self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() self.cur_mod_node = file_node self.sem.globals = file_node.names with experiments.strict_optional_set(options.strict_optional): self.scope.enter_file(file_node.fullname()) self.accept(file_node) self.analyze_symbol_table(file_node.names) self.scope.leave() del self.cur_mod_node self.patches = [] def refresh_partial(self, node: Union[MypyFile, FuncItem, OverloadedFuncDef], patches: List[Tuple[int, Callable[[], None]]]) -> None: """Refresh a stale target in fine-grained incremental mode.""" self.patches = patches if isinstance(node, MypyFile): self.recurse_into_functions = False self.refresh_top_level(node) else: self.recurse_into_functions = True self.accept(node) self.patches = [] def refresh_top_level(self, file_node: MypyFile) -> None: """Reanalyze a stale module top-level in fine-grained incremental mode.""" for d in file_node.defs: self.accept(d) def accept(self, node: Node) -> None: try: node.accept(self) except Exception as err: report_internal_error(err, self.errors.file, node.line, self.errors, self.options) def visit_block(self, b: Block) -> None: if b.is_unreachable: return super().visit_block(b) def visit_func_def(self, fdef: FuncDef) -> None: if not self.recurse_into_functions: return with self.scope.function_scope(fdef): self.analyze(fdef.type, fdef) super().visit_func_def(fdef) def visit_overloaded_func_def(self, fdef: OverloadedFuncDef) -> None: if not self.recurse_into_functions: return with self.scope.function_scope(fdef): self.analyze(fdef.type, fdef) super().visit_overloaded_func_def(fdef) def visit_class_def(self, tdef: ClassDef) -> None: # NamedTuple base classes are validated in check_namedtuple_classdef; we don't have to # check them again here. self.scope.enter_class(tdef.info) if not tdef.info.is_named_tuple: types = list(tdef.info.bases) # type: List[Type] for tvar in tdef.type_vars: if tvar.upper_bound: types.append(tvar.upper_bound) if tvar.values: types.extend(tvar.values) self.analyze_types(types, tdef.info) for type in tdef.info.bases: if tdef.info.is_protocol: if not isinstance(type, Instance) or not type.type.is_protocol: if type.type.fullname() != 'builtins.object': self.fail('All bases of a protocol must be protocols', tdef) # Recompute MRO now that we have analyzed all modules, to pick # up superclasses of bases imported from other modules in an # import loop. (Only do so if we succeeded the first time.) if tdef.info.mro: tdef.info.mro = [] # Force recomputation mypy.semanal.calculate_class_mro(tdef, self.fail_blocker) if tdef.info.is_protocol: add_protocol_members(tdef.info) if tdef.analyzed is not None: # Also check synthetic types associated with this ClassDef. # Currently these are TypedDict, and NamedTuple. if isinstance(tdef.analyzed, TypedDictExpr): self.analyze(tdef.analyzed.info.typeddict_type, tdef.analyzed, warn=True) elif isinstance(tdef.analyzed, NamedTupleExpr): self.analyze(tdef.analyzed.info.tuple_type, tdef.analyzed, warn=True) self.analyze_info(tdef.analyzed.info) super().visit_class_def(tdef) self.analyze_symbol_table(tdef.info.names) self.scope.leave() def visit_decorator(self, dec: Decorator) -> None: """Try to infer the type of the decorated function. This lets us resolve references to decorated functions during type checking when there are cyclic imports, as otherwise the type might not be available when we need it. This basically uses a simple special-purpose type inference engine just for decorators. """ # Don't just call the super method since we don't unconditionally traverse the decorated # function. dec.var.accept(self) for decorator in dec.decorators: decorator.accept(self) if self.recurse_into_functions: dec.func.accept(self) if dec.var.is_property: # Decorators are expected to have a callable type (it's a little odd). if dec.func.type is None: dec.var.type = CallableType( [AnyType(TypeOfAny.special_form)], [ARG_POS], [None], AnyType(TypeOfAny.special_form), self.builtin_type('function'), name=dec.var.name()) elif isinstance(dec.func.type, CallableType): dec.var.type = dec.func.type self.analyze(dec.var.type, dec.var) return decorator_preserves_type = True for expr in dec.decorators: preserve_type = False if isinstance(expr, RefExpr) and isinstance(expr.node, FuncDef): if expr.node.type and is_identity_signature(expr.node.type): preserve_type = True if not preserve_type: decorator_preserves_type = False break if decorator_preserves_type: # No non-identity decorators left. We can trivially infer the type # of the function here. dec.var.type = function_type(dec.func, self.builtin_type('function')) if dec.decorators: return_type = calculate_return_type(dec.decorators[0]) if return_type and isinstance(return_type, AnyType): # The outermost decorator will return Any so we know the type of the # decorated function. dec.var.type = AnyType(TypeOfAny.from_another_any, source_any=return_type) sig = find_fixed_callable_return(dec.decorators[0]) if sig: # The outermost decorator always returns the same kind of function, # so we know that this is the type of the decoratored function. orig_sig = function_type(dec.func, self.builtin_type('function')) sig.name = orig_sig.items()[0].name dec.var.type = sig self.analyze(dec.var.type, dec.var) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: """Traverse the assignment statement. This includes the actual assignment and synthetic types resulted from this assignment (if any). Currently this includes NewType, TypedDict, NamedTuple, and TypeVar. """ self.analyze(s.type, s) if isinstance(s.rvalue, IndexExpr) and isinstance(s.rvalue.analyzed, TypeAliasExpr): self.analyze(s.rvalue.analyzed.type, s.rvalue.analyzed, warn=True) if isinstance(s.rvalue, CallExpr): analyzed = s.rvalue.analyzed if isinstance(analyzed, NewTypeExpr): self.analyze(analyzed.old_type, analyzed) if analyzed.info: # Currently NewTypes only have __init__, but to be future proof, # we analyze all symbols. self.analyze_info(analyzed.info) if analyzed.info and analyzed.info.mro: analyzed.info.mro = [] # Force recomputation mypy.semanal.calculate_class_mro(analyzed.info.defn, self.fail_blocker) if isinstance(analyzed, TypeVarExpr): types = [] if analyzed.upper_bound: types.append(analyzed.upper_bound) if analyzed.values: types.extend(analyzed.values) self.analyze_types(types, analyzed) if isinstance(analyzed, TypedDictExpr): self.analyze(analyzed.info.typeddict_type, analyzed, warn=True) if isinstance(analyzed, NamedTupleExpr): self.analyze(analyzed.info.tuple_type, analyzed, warn=True) self.analyze_info(analyzed.info) # We need to pay additional attention to assignments that define a type alias. # The resulting type is also stored in the 'type_override' attribute of # the corresponding SymbolTableNode. if isinstance(s.lvalues[0], RefExpr) and isinstance(s.lvalues[0].node, Var): self.analyze(s.lvalues[0].node.type, s.lvalues[0].node) if isinstance(s.lvalues[0], NameExpr): node = self.sem.lookup(s.lvalues[0].name, s, suppress_errors=True) if node: self.analyze(node.type_override, node) super().visit_assignment_stmt(s) def visit_for_stmt(self, s: ForStmt) -> None: self.analyze(s.index_type, s) super().visit_for_stmt(s) def visit_with_stmt(self, s: WithStmt) -> None: self.analyze(s.target_type, s) super().visit_with_stmt(s) def visit_cast_expr(self, e: CastExpr) -> None: self.analyze(e.type, e) super().visit_cast_expr(e) def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None: super().visit_reveal_type_expr(e) def visit_type_application(self, e: TypeApplication) -> None: for type in e.types: self.analyze(type, e) super().visit_type_application(e) # Helpers def perform_transform(self, node: Union[Node, SymbolTableNode], transform: Callable[[Type], Type]) -> None: """Apply transform to all types associated with node.""" if isinstance(node, ForStmt): if node.index_type: node.index_type = transform(node.index_type) self.transform_types_in_lvalue(node.index, transform) if isinstance(node, WithStmt): if node.target_type: node.target_type = transform(node.target_type) for n in node.target: if isinstance(n, NameExpr) and isinstance(n.node, Var) and n.node.type: n.node.type = transform(n.node.type) if isinstance(node, (FuncDef, OverloadedFuncDef, CastExpr, AssignmentStmt, TypeAliasExpr, Var)): assert node.type, "Scheduled patch for non-existent type" node.type = transform(node.type) if isinstance(node, NewTypeExpr): assert node.old_type, "Scheduled patch for non-existent type" node.old_type = transform(node.old_type) if node.info: new_bases = [] # type: List[Instance] for b in node.info.bases: new_b = transform(b) # TODO: this code can be combined with code in second pass. if isinstance(new_b, Instance): new_bases.append(new_b) elif isinstance(new_b, TupleType): new_bases.append(new_b.fallback) else: self.fail("Argument 2 to NewType(...) must be subclassable" " (got {})".format(new_b), node) new_bases.append(self.builtin_type('object')) node.info.bases = new_bases if isinstance(node, TypeVarExpr): if node.upper_bound: node.upper_bound = transform(node.upper_bound) if node.values: node.values = [transform(v) for v in node.values] if isinstance(node, TypedDictExpr): assert node.info.typeddict_type, "Scheduled patch for non-existent type" node.info.typeddict_type = cast(TypedDictType, transform(node.info.typeddict_type)) if isinstance(node, NamedTupleExpr): assert node.info.tuple_type, "Scheduled patch for non-existent type" node.info.tuple_type = cast(TupleType, transform(node.info.tuple_type)) if isinstance(node, TypeApplication): node.types = [transform(t) for t in node.types] if isinstance(node, SymbolTableNode): assert node.type_override, "Scheduled patch for non-existent type" node.type_override = transform(node.type_override) if isinstance(node, TypeInfo): for tvar in node.defn.type_vars: if tvar.upper_bound: tvar.upper_bound = transform(tvar.upper_bound) if tvar.values: tvar.values = [transform(v) for v in tvar.values] new_bases = [] for base in node.bases: new_base = transform(base) if isinstance(new_base, Instance): new_bases.append(new_base) else: # Don't fix the NamedTuple bases, they are Instance's intentionally. # Patch the 'args' just in case, although generic tuple types are # not supported yet. alt_base = Instance(base.type, [transform(a) for a in base.args]) new_bases.append(alt_base) node.bases = new_bases def transform_types_in_lvalue(self, lvalue: Lvalue, transform: Callable[[Type], Type]) -> None: if isinstance(lvalue, RefExpr): if isinstance(lvalue.node, Var): var = lvalue.node if var.type: var.type = transform(var.type) elif isinstance(lvalue, TupleExpr): for item in lvalue.items: self.transform_types_in_lvalue(item, transform) def analyze(self, type: Optional[Type], node: Union[Node, SymbolTableNode], warn: bool = False) -> None: # Recursive type warnings are only emitted on type definition 'node's, marked by 'warn' # Flags appeared during analysis of 'type' are collected in this dict. indicator = {} # type: Dict[str, bool] if type: analyzer = self.make_type_analyzer(indicator) type.accept(analyzer) self.check_for_omitted_generics(type) self.generate_type_patches(node, indicator, warn) if analyzer.aliases_used: target = self.scope.current_target() self.cur_mod_node.alias_deps[target].update(analyzer.aliases_used) def analyze_types(self, types: List[Type], node: Node) -> None: # Similar to above but for nodes with multiple types. indicator = {} # type: Dict[str, bool] for type in types: analyzer = self.make_type_analyzer(indicator) type.accept(analyzer) self.check_for_omitted_generics(type) if analyzer.aliases_used: target = self.scope.current_target() self.cur_mod_node.alias_deps[target].update(analyzer.aliases_used) self.generate_type_patches(node, indicator, warn=False) def analyze_symbol_table(self, names: SymbolTable) -> None: """Analyze types in symbol table nodes only (shallow).""" for node in names.values(): if node.type_override: self.analyze(node.type_override, node) def make_scoped_patch(self, fn: Callable[[], None]) -> Callable[[], None]: saved_scope = self.scope.save() def patch() -> None: with self.scope.saved_scope(saved_scope): fn() return patch def generate_type_patches(self, node: Union[Node, SymbolTableNode], indicator: Dict[str, bool], warn: bool) -> None: if indicator.get('forward') or indicator.get('synthetic'): def patch() -> None: self.perform_transform(node, lambda tp: tp.accept(ForwardReferenceResolver(self.fail, node, warn))) self.patches.append((PRIORITY_FORWARD_REF, self.make_scoped_patch(patch))) if indicator.get('typevar'): def patch() -> None: self.perform_transform(node, lambda tp: tp.accept(TypeVariableChecker(self.fail))) self.patches.append((PRIORITY_TYPEVAR_VALUES, self.make_scoped_patch(patch))) def analyze_info(self, info: TypeInfo) -> None: # Similar to above but for nodes with synthetic TypeInfos (NamedTuple and NewType). for name in info.names: sym = info.names[name] if isinstance(sym.node, (FuncDef, Decorator)): self.accept(sym.node) if isinstance(sym.node, Var): self.analyze(sym.node.type, sym.node) def make_type_analyzer(self, indicator: Dict[str, bool]) -> TypeAnalyserPass3: return TypeAnalyserPass3(self, self.sem.plugin, self.options, self.is_typeshed_file, indicator, self.patches) def check_for_omitted_generics(self, typ: Type) -> None: if not self.options.disallow_any_generics or self.is_typeshed_file: return for t in collect_any_types(typ): if t.type_of_any == TypeOfAny.from_omitted_generics: self.fail(messages.BARE_GENERIC, t) def lookup_qualified(self, name: str, ctx: Context, suppress_errors: bool = False) -> Optional[SymbolTableNode]: return self.sem.lookup_qualified(name, ctx, suppress_errors=suppress_errors) def lookup_fully_qualified(self, fullname: str) -> SymbolTableNode: return self.sem.lookup_fully_qualified(fullname) def dereference_module_cross_ref( self, node: Optional[SymbolTableNode]) -> Optional[SymbolTableNode]: return self.sem.dereference_module_cross_ref(node) def fail(self, msg: str, ctx: Context, serious: bool = False, *, blocker: bool = False) -> None: self.sem.fail(msg, ctx, serious, blocker=blocker) def fail_blocker(self, msg: str, ctx: Context) -> None: self.fail(msg, ctx, blocker=True) def note(self, msg: str, ctx: Context) -> None: self.sem.note(msg, ctx) def builtin_type(self, name: str, args: Optional[List[Type]] = None) -> Instance: names = self.modules['builtins'] sym = names.names[name] node = sym.node assert isinstance(node, TypeInfo) if args: # TODO: assert len(args) == len(node.defn.type_vars) return Instance(node, args) any_type = AnyType(TypeOfAny.special_form) return Instance(node, [any_type] * len(node.defn.type_vars))
class DependencyVisitor(TraverserVisitor): def __init__(self, type_map: Dict[Expression, Type], python_version: Tuple[int, int], alias_deps: 'DefaultDict[str, Set[str]]') -> None: self.scope = Scope() self.type_map = type_map self.python2 = python_version[0] == 2 # This attribute holds a mapping from target to names of type aliases # it depends on. These need to be processed specially, since they are # only present in expanded form in symbol tables. For example, after: # A = List[int] # x: A # The module symbol table will just have a Var `x` with type `List[int]`, # and the dependency of `x` on `A` is lost. Therefore the alias dependencies # are preserved at alias expansion points in `semanal.py`, stored as an attribute # on MypyFile, and then passed here. self.alias_deps = alias_deps self.map = {} # type: Dict[str, Set[str]] self.is_class = False self.is_package_init_file = False # TODO (incomplete): # from m import * # await # protocols # metaclasses # functional enum # type variable with value restriction def visit_mypy_file(self, o: MypyFile) -> None: self.scope.enter_file(o.fullname()) self.is_package_init_file = o.is_package_init_file() self.add_type_alias_deps(self.scope.current_target()) super().visit_mypy_file(o) self.scope.leave() def visit_func_def(self, o: FuncDef) -> None: self.scope.enter_function(o) target = self.scope.current_target() if o.type: if self.is_class and isinstance(o.type, FunctionLike): signature = bind_self(o.type) # type: Type else: signature = o.type for trigger in get_type_triggers(signature): self.add_dependency(trigger) self.add_dependency(trigger, target=make_trigger(target)) if o.info: for base in non_trivial_bases(o.info): self.add_dependency( make_trigger(base.fullname() + '.' + o.name())) self.add_type_alias_deps(self.scope.current_target()) super().visit_func_def(o) self.scope.leave() def visit_decorator(self, o: Decorator) -> None: self.add_dependency(make_trigger(o.func.fullname())) super().visit_decorator(o) def visit_class_def(self, o: ClassDef) -> None: self.scope.enter_class(o.info) target = self.scope.current_full_target() self.add_dependency(make_trigger(target), target) old_is_class = self.is_class self.is_class = True # Add dependencies to type variables of a generic class. for tv in o.type_vars: self.add_dependency(make_trigger(tv.fullname), target) # Add dependencies to base types. for base in o.info.bases: self.add_type_dependencies(base, target=target) if o.info.tuple_type: self.add_type_dependencies(o.info.tuple_type, target=make_trigger(target)) if o.info.typeddict_type: self.add_type_dependencies(o.info.typeddict_type, target=make_trigger(target)) # TODO: Add dependencies based on remaining TypeInfo attributes. super().visit_class_def(o) self.add_type_alias_deps(self.scope.current_target()) self.is_class = old_is_class info = o.info for name, node in info.names.items(): if isinstance(node.node, Var): for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): self.add_dependency( make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) self.add_dependency( make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.scope.leave() def visit_import(self, o: Import) -> None: for id, as_id in o.ids: # TODO: as_id self.add_dependency(make_trigger(id), self.scope.current_target()) def visit_import_from(self, o: ImportFrom) -> None: module_id, _ = correct_relative_import(self.scope.current_module_id(), o.relative, o.id, self.is_package_init_file) for name, as_name in o.names: self.add_dependency(make_trigger(module_id + '.' + name)) def visit_block(self, o: Block) -> None: if not o.is_unreachable: super().visit_block(o) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: # TODO: Implement all assignment special forms, including these: # Enum rvalue = o.rvalue if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr): # TODO: Support type variable value restriction analyzed = rvalue.analyzed self.add_type_dependencies(analyzed.upper_bound, target=make_trigger( analyzed.fullname())) elif isinstance(rvalue, CallExpr) and isinstance( rvalue.analyzed, NamedTupleExpr): # Depend on types of named tuple items. info = rvalue.analyzed.info prefix = '%s.%s' % (self.scope.current_full_target(), info.name()) for name, symnode in info.names.items(): if not name.startswith('_') and isinstance(symnode.node, Var): typ = symnode.node.type if typ: self.add_type_dependencies(typ) self.add_type_dependencies(typ, target=make_trigger(prefix)) attr_target = make_trigger('%s.%s' % (prefix, name)) self.add_type_dependencies(typ, target=attr_target) elif isinstance(rvalue, CallExpr) and isinstance( rvalue.analyzed, TypedDictExpr): # Depend on the underlying typeddict type info = rvalue.analyzed.info assert info.typeddict_type is not None prefix = '%s.%s' % (self.scope.current_full_target(), info.name()) self.add_type_dependencies(info.typeddict_type, target=make_trigger(prefix)) elif o.is_alias_def: assert len(o.lvalues) == 1 lvalue = o.lvalues[0] assert isinstance(lvalue, NameExpr) # TODO: get rid of this extra dependency from __init__ to alias definition scope typ = self.type_map.get(lvalue) if isinstance(typ, FunctionLike) and typ.is_type_obj(): class_name = typ.type_object().fullname() self.add_dependency(make_trigger(class_name + '.__init__')) if isinstance(rvalue, IndexExpr) and isinstance( rvalue.analyzed, TypeAliasExpr): self.add_type_dependencies(rvalue.analyzed.type) else: # Normal assignment super().visit_assignment_stmt(o) for lvalue in o.lvalues: self.process_lvalue(lvalue) items = o.lvalues + [rvalue] for i in range(len(items) - 1): lvalue = items[i] rvalue = items[i + 1] if isinstance(lvalue, TupleExpr): self.add_attribute_dependency_for_expr(rvalue, '__iter__') if o.type: for trigger in get_type_triggers(o.type): self.add_dependency(trigger) def process_lvalue(self, lvalue: Expression) -> None: """Generate additional dependencies for an lvalue.""" if isinstance(lvalue, IndexExpr): self.add_operator_method_dependency(lvalue.base, '__setitem__') elif isinstance(lvalue, NameExpr): if lvalue.kind in (MDEF, GDEF): # Assignment to an attribute in the class body, or direct assignment to a # global variable. lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = get_type_triggers(lvalue_type) attr_trigger = make_trigger( '%s.%s' % (self.scope.current_full_target(), lvalue.name)) for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, MemberExpr): if lvalue.kind is None: # Reference to a non-module attribute if lvalue.expr not in self.type_map: # Unreachable assignment -> not checked so no dependencies to generate. return object_type = self.type_map[lvalue.expr] lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = get_type_triggers(lvalue_type) for attr_trigger in self.attribute_triggers( object_type, lvalue.name): for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, TupleExpr): for item in lvalue.items: self.process_lvalue(item) # TODO: star lvalue def get_non_partial_lvalue_type(self, lvalue: RefExpr) -> Type: if lvalue not in self.type_map: # Likely a block considered unreachable during type checking. return UninhabitedType() lvalue_type = self.type_map[lvalue] if isinstance(lvalue_type, PartialType): if isinstance(lvalue.node, Var) and lvalue.node.type: lvalue_type = lvalue.node.type else: # Probably a secondary, non-definition assignment that doesn't # result in a non-partial type. We won't be able to infer any # dependencies from this so just return something. (The first, # definition assignment with a partial type is handled # differently, in the semantic analyzer.) assert not lvalue.is_new_def return UninhabitedType() return lvalue_type def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: super().visit_operator_assignment_stmt(o) self.process_lvalue(o.lvalue) method = op_methods[o.op] self.add_attribute_dependency_for_expr(o.lvalue, method) if o.op in ops_with_inplace_method: inplace_method = '__i' + method[2:] self.add_attribute_dependency_for_expr(o.lvalue, inplace_method) def visit_for_stmt(self, o: ForStmt) -> None: super().visit_for_stmt(o) # __getitem__ is only used if __iter__ is missing but for simplicity we # just always depend on both. self.add_attribute_dependency_for_expr(o.expr, '__iter__') self.add_attribute_dependency_for_expr(o.expr, '__getitem__') self.process_lvalue(o.index) if isinstance(o.index, TupleExpr): # Process multiple assignment to index variables. item_type = o.inferred_item_type if item_type: # This is similar to above. self.add_attribute_dependency(item_type, '__iter__') self.add_attribute_dependency(item_type, '__getitem__') if o.index_type: self.add_type_dependencies(o.index_type) def visit_with_stmt(self, o: WithStmt) -> None: super().visit_with_stmt(o) for e in o.expr: self.add_attribute_dependency_for_expr(e, '__enter__') self.add_attribute_dependency_for_expr(e, '__exit__') if o.target_type: self.add_type_dependencies(o.target_type) def visit_print_stmt(self, o: PrintStmt) -> None: super().visit_print_stmt(o) if o.target: self.add_attribute_dependency_for_expr(o.target, 'write') def visit_del_stmt(self, o: DelStmt) -> None: super().visit_del_stmt(o) if isinstance(o.expr, IndexExpr): self.add_attribute_dependency_for_expr(o.expr.base, '__delitem__') # Expressions def process_global_ref_expr(self, o: RefExpr) -> None: if o.fullname is not None: self.add_dependency(make_trigger(o.fullname)) # If this is a reference to a type, generate a dependency to its # constructor. # TODO: avoid generating spurious dependencies for isinstancce checks, # except statements, class attribute reference, etc, if perf problem. typ = self.type_map.get(o) if isinstance(typ, FunctionLike) and typ.is_type_obj(): class_name = typ.type_object().fullname() self.add_dependency(make_trigger(class_name + '.__init__')) def visit_name_expr(self, o: NameExpr) -> None: if o.kind == LDEF: # We don't track depdendencies to local variables, since they # aren't externally visible. return if o.kind == MDEF: # Direct reference to member is only possible in the scope that # defined the name, so no dependency is required. return self.process_global_ref_expr(o) def visit_member_expr(self, e: MemberExpr) -> None: super().visit_member_expr(e) if e.kind is not None: # Reference to a module attribute self.process_global_ref_expr(e) else: # Reference to a non-module attribute if e.expr not in self.type_map: # No type available -- this happens for unreachable code. Since it's unreachable, # it wasn't type checked and we don't need to generate dependencies. return typ = self.type_map[e.expr] self.add_attribute_dependency(typ, e.name) def visit_super_expr(self, e: SuperExpr) -> None: super().visit_super_expr(e) if e.info is not None: self.add_dependency(make_trigger(e.info.fullname() + '.' + e.name)) def visit_call_expr(self, e: CallExpr) -> None: super().visit_call_expr(e) def visit_cast_expr(self, e: CastExpr) -> None: super().visit_cast_expr(e) self.add_type_dependencies(e.type) def visit_type_application(self, e: TypeApplication) -> None: super().visit_type_application(e) for typ in e.types: self.add_type_dependencies(typ) def visit_index_expr(self, e: IndexExpr) -> None: super().visit_index_expr(e) self.add_operator_method_dependency(e.base, '__getitem__') def visit_unary_expr(self, e: UnaryExpr) -> None: super().visit_unary_expr(e) if e.op not in unary_op_methods: return method = unary_op_methods[e.op] self.add_operator_method_dependency(e.expr, method) def visit_op_expr(self, e: OpExpr) -> None: super().visit_op_expr(e) self.process_binary_op(e.op, e.left, e.right) def visit_comparison_expr(self, e: ComparisonExpr) -> None: super().visit_comparison_expr(e) for i, op in enumerate(e.operators): left = e.operands[i] right = e.operands[i + 1] self.process_binary_op(op, left, right) if self.python2 and op in ('==', '!=', '<', '<=', '>', '>='): self.add_operator_method_dependency(left, '__cmp__') self.add_operator_method_dependency(right, '__cmp__') def process_binary_op(self, op: str, left: Expression, right: Expression) -> None: method = op_methods.get(op) if method: if op == 'in': self.add_operator_method_dependency(right, method) else: self.add_operator_method_dependency(left, method) rev_method = reverse_op_methods.get(method) if rev_method: self.add_operator_method_dependency(right, rev_method) def add_operator_method_dependency(self, e: Expression, method: str) -> None: typ = self.type_map.get(e) if typ is not None: self.add_operator_method_dependency_for_type(typ, method) def add_operator_method_dependency_for_type(self, typ: Type, method: str) -> None: # Note that operator methods can't be (non-metaclass) methods of type objects # (that is, TypeType objects or Callables representing a type). # TODO: TypedDict # TODO: metaclasses if isinstance(typ, TypeVarType): typ = typ.upper_bound if isinstance(typ, TupleType): typ = typ.fallback if isinstance(typ, Instance): trigger = make_trigger(typ.type.fullname() + '.' + method) self.add_dependency(trigger) elif isinstance(typ, UnionType): for item in typ.items: self.add_operator_method_dependency_for_type(item, method) def visit_generator_expr(self, e: GeneratorExpr) -> None: super().visit_generator_expr(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: super().visit_dictionary_comprehension(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_star_expr(self, e: StarExpr) -> None: super().visit_star_expr(e) self.add_iter_dependency(e.expr) def visit_yield_from_expr(self, e: YieldFromExpr) -> None: super().visit_yield_from_expr(e) self.add_iter_dependency(e.expr) # Helpers def add_type_alias_deps(self, target: str) -> None: # Type aliases are special, because some of the dependencies are calculated # in semanal.py, before they are expanded. if target in self.alias_deps: for alias in self.alias_deps[target]: self.add_dependency(make_trigger(alias)) def add_dependency(self, trigger: str, target: Optional[str] = None) -> None: """Add dependency from trigger to a target. If the target is not given explicitly, use the current target. """ if trigger.startswith(('<builtins.', '<typing.')): # Don't track dependencies to certain builtins to keep the size of # the dependencies manageable. These dependencies should only # change on mypy version updates, which will require a full rebuild # anyway. return if target is None: target = self.scope.current_target() self.map.setdefault(trigger, set()).add(target) def add_type_dependencies(self, typ: Type, target: Optional[str] = None) -> None: """Add dependencies to all components of a type. Args: target: If not None, override the default (current) target of the generated dependency. """ # TODO: Use this method in more places where get_type_triggers() + add_dependency() # are called together. for trigger in get_type_triggers(typ): self.add_dependency(trigger, target) def add_attribute_dependency(self, typ: Type, name: str) -> None: """Add dependencies for accessing a named attribute of a type.""" targets = self.attribute_triggers(typ, name) for target in targets: self.add_dependency(target) def attribute_triggers(self, typ: Type, name: str) -> List[str]: """Return all triggers associated with the attribute of a type.""" if isinstance(typ, TypeVarType): typ = typ.upper_bound if isinstance(typ, TupleType): typ = typ.fallback if isinstance(typ, Instance): member = '%s.%s' % (typ.type.fullname(), name) return [make_trigger(member)] elif isinstance(typ, FunctionLike) and typ.is_type_obj(): member = '%s.%s' % (typ.type_object().fullname(), name) return [make_trigger(member)] elif isinstance(typ, UnionType): targets = [] for item in typ.items: targets.extend(self.attribute_triggers(item, name)) return targets elif isinstance(typ, TypeType): # TODO: Metaclass attribute lookup return self.attribute_triggers(typ.item, name) else: return [] def add_attribute_dependency_for_expr(self, e: Expression, name: str) -> None: typ = self.type_map.get(e) if typ is not None: self.add_attribute_dependency(typ, name) def add_iter_dependency(self, node: Expression) -> None: typ = self.type_map.get(node) if typ: self.add_attribute_dependency(typ, '__iter__')
class TypeArgumentAnalyzer(MixedTraverserVisitor): def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None: self.errors = errors self.options = options self.is_typeshed_file = is_typeshed_file self.scope = Scope() # Should we also analyze function definitions, or only module top-levels? self.recurse_into_functions = True # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. self.seen_aliases: Set[TypeAliasType] = set() def visit_mypy_file(self, o: MypyFile) -> None: self.errors.set_file(o.path, o.fullname, scope=self.scope) with self.scope.module_scope(o.fullname): super().visit_mypy_file(o) def visit_func(self, defn: FuncItem) -> None: if not self.recurse_into_functions: return with self.scope.function_scope(defn): super().visit_func(defn) def visit_class_def(self, defn: ClassDef) -> None: with self.scope.class_scope(defn.info): super().visit_class_def(defn) def visit_block(self, o: Block) -> None: if not o.is_unreachable: super().visit_block(o) def visit_type_alias_type(self, t: TypeAliasType) -> None: super().visit_type_alias_type(t) if t in self.seen_aliases: # Avoid infinite recursion on recursive type aliases. # Note: it is fine to skip the aliases we have already seen in non-recursive types, # since errors there have already already reported. return self.seen_aliases.add(t) get_proper_type(t).accept(self) def visit_instance(self, t: Instance) -> None: # Type argument counts were checked in the main semantic analyzer pass. We assume # that the counts are correct here. info = t.type if isinstance(info, FakeInfo): return # https://github.com/python/mypy/issues/11079 for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): if isinstance(tvar, TypeVarType): if isinstance(arg, ParamSpecType): # TODO: Better message self.fail(f'Invalid location for ParamSpec "{arg.name}"', t) continue if tvar.values: if isinstance(arg, TypeVarType): arg_values = arg.values if not arg_values: self.fail(message_registry. INVALID_TYPEVAR_AS_TYPEARG.format( arg.name, info.name), t, code=codes.TYPE_VAR) continue else: arg_values = [arg] self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) if not is_subtype(arg, tvar.upper_bound): self.fail( message_registry.INVALID_TYPEVAR_ARG_BOUND.format( format_type(arg), info.name, format_type(tvar.upper_bound)), t, code=codes.TYPE_VAR) super().visit_instance(t) def visit_unpack_type(self, typ: UnpackType) -> None: proper_type = get_proper_type(typ.type) if isinstance(proper_type, TupleType): return if isinstance( proper_type, Instance) and proper_type.type.fullname == "builtins.tuple": return self.fail(message_registry.INVALID_UNPACK.format(proper_type), typ) def check_type_var_values(self, type: TypeInfo, actuals: List[Type], arg_name: str, valids: List[Type], arg_number: int, context: Context) -> None: for actual in get_proper_types(actuals): if (not isinstance(actual, AnyType) and not any(is_same_type(actual, value) for value in valids)): if len(actuals) > 1 or not isinstance(actual, Instance): self.fail( message_registry.INVALID_TYPEVAR_ARG_VALUE.format( type.name), context, code=codes.TYPE_VAR) else: class_name = '"{}"'.format(type.name) actual_type_name = '"{}"'.format(actual.type.name) self.fail( message_registry.INCOMPATIBLE_TYPEVAR_VALUE.format( arg_name, class_name, actual_type_name), context, code=codes.TYPE_VAR) def fail(self, msg: str, context: Context, *, code: Optional[ErrorCode] = None) -> None: self.errors.report(context.get_line(), context.get_column(), msg, code=code)
class DependencyVisitor(TraverserVisitor): def __init__(self, type_map: Dict[Expression, Type], python_version: Tuple[int, int], alias_deps: 'DefaultDict[str, Set[str]]', options: Optional[Options] = None) -> None: self.scope = Scope() self.type_map = type_map self.python2 = python_version[0] == 2 # This attribute holds a mapping from target to names of type aliases # it depends on. These need to be processed specially, since they are # only present in expanded form in symbol tables. For example, after: # A = List[int] # x: A # The module symbol table will just have a Var `x` with type `List[int]`, # and the dependency of `x` on `A` is lost. Therefore the alias dependencies # are preserved at alias expansion points in `semanal.py`, stored as an attribute # on MypyFile, and then passed here. self.alias_deps = alias_deps self.map = {} # type: Dict[str, Set[str]] self.is_class = False self.is_package_init_file = False self.options = options def visit_mypy_file(self, o: MypyFile) -> None: self.scope.enter_file(o.fullname()) self.is_package_init_file = o.is_package_init_file() self.add_type_alias_deps(self.scope.current_target()) for trigger, targets in o.plugin_deps.items(): self.map.setdefault(trigger, set()).update(targets) super().visit_mypy_file(o) self.scope.leave() def visit_func_def(self, o: FuncDef) -> None: self.scope.enter_function(o) target = self.scope.current_target() if o.type: if self.is_class and isinstance(o.type, FunctionLike): signature = bind_self(o.type) # type: Type else: signature = o.type for trigger in self.get_type_triggers(signature): self.add_dependency(trigger) self.add_dependency(trigger, target=make_trigger(target)) if o.info: for base in non_trivial_bases(o.info): # Base class __init__/__new__ doesn't generate a logical # dependency since the override can be incompatible. if not self.use_logical_deps() or o.name() not in ('__init__', '__new__'): self.add_dependency(make_trigger(base.fullname() + '.' + o.name())) self.add_type_alias_deps(self.scope.current_target()) super().visit_func_def(o) variants = set(o.expanded) - {o} for ex in variants: if isinstance(ex, FuncDef): super().visit_func_def(ex) self.scope.leave() def visit_decorator(self, o: Decorator) -> None: if not self.use_logical_deps(): # We don't need to recheck outer scope for an overload, only overload itself. # Also if any decorator is nested, it is not externally visible, so we don't need to # generate dependency. if not o.func.is_overload and self.scope.current_function_name() is None: self.add_dependency(make_trigger(o.func.fullname())) else: # Add logical dependencies from decorators to the function. For example, # if we have # @dec # def func(): ... # then if `dec` is unannotated, then it will "spoil" `func` and consequently # all call sites, making them all `Any`. for d in o.decorators: tname = None # type: Optional[str] if isinstance(d, RefExpr) and d.fullname is not None: tname = d.fullname if (isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and d.callee.fullname is not None): tname = d.callee.fullname if tname is not None: self.add_dependency(make_trigger(tname), make_trigger(o.func.fullname())) super().visit_decorator(o) def visit_class_def(self, o: ClassDef) -> None: self.scope.enter_class(o.info) target = self.scope.current_full_target() self.add_dependency(make_trigger(target), target) old_is_class = self.is_class self.is_class = True # Add dependencies to type variables of a generic class. for tv in o.type_vars: self.add_dependency(make_trigger(tv.fullname), target) self.process_type_info(o.info) super().visit_class_def(o) self.is_class = old_is_class self.scope.leave() def visit_newtype_expr(self, o: NewTypeExpr) -> None: if o.info: self.scope.enter_class(o.info) self.process_type_info(o.info) self.scope.leave() def process_type_info(self, info: TypeInfo) -> None: target = self.scope.current_full_target() for base in info.bases: self.add_type_dependencies(base, target=target) if info.tuple_type: self.add_type_dependencies(info.tuple_type, target=make_trigger(target)) if info.typeddict_type: self.add_type_dependencies(info.typeddict_type, target=make_trigger(target)) if info.declared_metaclass: self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target)) if info.is_protocol: for base_info in info.mro[:-1]: # We add dependencies from whole MRO to cover explicit subprotocols. # For example: # # class Super(Protocol): # x: int # class Sub(Super, Protocol): # y: int # # In this example we add <Super[wildcard]> -> <Sub>, to invalidate Sub if # a new member is added to Super. self.add_dependency(make_wildcard_trigger(base_info.fullname()), target=make_trigger(target)) # More protocol dependencies are collected in TypeState._snapshot_protocol_deps # after a full run or update is finished. self.add_type_alias_deps(self.scope.current_target()) for name, node in info.names.items(): if isinstance(node.node, Var): # Recheck Liskov if needed, self definitions are checked in the defining method if node.node.is_initialized_in_class and has_user_bases(info): self.add_dependency(make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): # If the type of an attribute changes in a base class, we make references # to the attribute in the subclass stale. self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) for base_info in non_trivial_bases(info): for name, node in base_info.names.items(): if self.use_logical_deps(): # Skip logical dependency if an attribute is not overridden. For example, # in case of: # class Base: # x = 1 # y = 2 # class Sub(Base): # x = 3 # we skip <Base.y> -> <Child.y>, because even if `y` is unannotated it # doesn't affect precision of Liskov checking. if name not in info.names: continue # __init__ and __new__ can be overridden with different signatures, so no # logical depedency. if name in ('__init__', '__new__'): continue self.add_dependency(make_trigger(base_info.fullname() + '.' + name), target=make_trigger(info.fullname() + '.' + name)) if not self.use_logical_deps(): # These dependencies are only useful for propagating changes -- # they aren't logical dependencies since __init__ and __new__ can be # overridden with a different signature. self.add_dependency(make_trigger(base_info.fullname() + '.__init__'), target=make_trigger(info.fullname() + '.__init__')) self.add_dependency(make_trigger(base_info.fullname() + '.__new__'), target=make_trigger(info.fullname() + '.__new__')) # If the set of abstract attributes change, this may invalidate class # instantiation, or change the generated error message, since Python checks # class abstract status when creating an instance. # # TODO: We should probably add this dependency only from the __init__ of the # current class, and independent of bases (to trigger changes in message # wording, as errors may enumerate all abstract attributes). self.add_dependency(make_trigger(base_info.fullname() + '.(abstract)'), target=make_trigger(info.fullname() + '.__init__')) # If the base class abstract attributes change, subclass abstract # attributes need to be recalculated. self.add_dependency(make_trigger(base_info.fullname() + '.(abstract)')) def visit_import(self, o: Import) -> None: for id, as_id in o.ids: self.add_dependency(make_trigger(id), self.scope.current_target()) def visit_import_from(self, o: ImportFrom) -> None: if self.use_logical_deps(): # Just importing a name doesn't create a logical dependency. return module_id, _ = correct_relative_import(self.scope.current_module_id(), o.relative, o.id, self.is_package_init_file) for name, as_name in o.names: self.add_dependency(make_trigger(module_id + '.' + name)) def visit_import_all(self, o: ImportAll) -> None: module_id, _ = correct_relative_import(self.scope.current_module_id(), o.relative, o.id, self.is_package_init_file) # The current target needs to be rechecked if anything "significant" changes in the # target module namespace (as the imported definitions will need to be updated). self.add_dependency(make_wildcard_trigger(module_id)) def visit_block(self, o: Block) -> None: if not o.is_unreachable: super().visit_block(o) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: rvalue = o.rvalue if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr): analyzed = rvalue.analyzed self.add_type_dependencies(analyzed.upper_bound, target=make_trigger(analyzed.fullname())) for val in analyzed.values: self.add_type_dependencies(val, target=make_trigger(analyzed.fullname())) # We need to re-analyze the definition if bound or value is deleted. super().visit_call_expr(rvalue) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr): # Depend on types of named tuple items. info = rvalue.analyzed.info prefix = '%s.%s' % (self.scope.current_full_target(), info.name()) for name, symnode in info.names.items(): if not name.startswith('_') and isinstance(symnode.node, Var): typ = symnode.node.type if typ: self.add_type_dependencies(typ) self.add_type_dependencies(typ, target=make_trigger(prefix)) attr_target = make_trigger('%s.%s' % (prefix, name)) self.add_type_dependencies(typ, target=attr_target) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypedDictExpr): # Depend on the underlying typeddict type info = rvalue.analyzed.info assert info.typeddict_type is not None prefix = '%s.%s' % (self.scope.current_full_target(), info.name()) self.add_type_dependencies(info.typeddict_type, target=make_trigger(prefix)) elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, EnumCallExpr): # Enum values are currently not checked, but for future we add the deps on them for name, symnode in rvalue.analyzed.info.names.items(): if isinstance(symnode.node, Var) and symnode.node.type: self.add_type_dependencies(symnode.node.type) elif o.is_alias_def: assert len(o.lvalues) == 1 lvalue = o.lvalues[0] assert isinstance(lvalue, NameExpr) # TODO: get rid of this extra dependency from __init__ to alias definition scope typ = self.type_map.get(lvalue) if isinstance(typ, FunctionLike) and typ.is_type_obj(): class_name = typ.type_object().fullname() self.add_dependency(make_trigger(class_name + '.__init__')) self.add_dependency(make_trigger(class_name + '.__new__')) if isinstance(rvalue, IndexExpr) and isinstance(rvalue.analyzed, TypeAliasExpr): self.add_type_dependencies(rvalue.analyzed.type) elif typ: self.add_type_dependencies(typ) else: # Normal assignment super().visit_assignment_stmt(o) for lvalue in o.lvalues: self.process_lvalue(lvalue) items = o.lvalues + [rvalue] for i in range(len(items) - 1): lvalue = items[i] rvalue = items[i + 1] if isinstance(lvalue, TupleExpr): self.add_attribute_dependency_for_expr(rvalue, '__iter__') if o.type: for trigger in self.get_type_triggers(o.type): self.add_dependency(trigger) if self.use_logical_deps() and o.unanalyzed_type is None: # Special case: for definitions without an explicit type like this: # x = func(...) # we add a logical dependency <func> -> <x>, because if `func` is not annotated, # then it will make all points of use of `x` unchecked. if (isinstance(rvalue, CallExpr) and isinstance(rvalue.callee, RefExpr) and rvalue.callee.fullname is not None): fname = None # type: Optional[str] if isinstance(rvalue.callee.node, TypeInfo): # use actual __init__ as a dependency source init = rvalue.callee.node.get('__init__') if init and isinstance(init.node, FuncBase): fname = init.node.fullname() else: fname = rvalue.callee.fullname if fname is None: return for lv in o.lvalues: if isinstance(lv, RefExpr) and lv.fullname and lv.is_new_def: if lv.kind == LDEF: return # local definitions don't generate logical deps self.add_dependency(make_trigger(fname), make_trigger(lv.fullname)) def process_lvalue(self, lvalue: Expression) -> None: """Generate additional dependencies for an lvalue.""" if isinstance(lvalue, IndexExpr): self.add_operator_method_dependency(lvalue.base, '__setitem__') elif isinstance(lvalue, NameExpr): if lvalue.kind in (MDEF, GDEF): # Assignment to an attribute in the class body, or direct assignment to a # global variable. lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = self.get_type_triggers(lvalue_type) attr_trigger = make_trigger('%s.%s' % (self.scope.current_full_target(), lvalue.name)) for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, MemberExpr): if self.is_self_member_ref(lvalue) and lvalue.is_new_def: node = lvalue.node if isinstance(node, Var): info = node.info if info and has_user_bases(info): # Recheck Liskov for self definitions self.add_dependency(make_trigger(info.fullname() + '.' + lvalue.name)) if lvalue.kind is None: # Reference to a non-module attribute if lvalue.expr not in self.type_map: # Unreachable assignment -> not checked so no dependencies to generate. return object_type = self.type_map[lvalue.expr] lvalue_type = self.get_non_partial_lvalue_type(lvalue) type_triggers = self.get_type_triggers(lvalue_type) for attr_trigger in self.attribute_triggers(object_type, lvalue.name): for type_trigger in type_triggers: self.add_dependency(type_trigger, attr_trigger) elif isinstance(lvalue, TupleExpr): for item in lvalue.items: self.process_lvalue(item) elif isinstance(lvalue, StarExpr): self.process_lvalue(lvalue.expr) def is_self_member_ref(self, memberexpr: MemberExpr) -> bool: """Does memberexpr to refer to an attribute of self?""" if not isinstance(memberexpr.expr, NameExpr): return False node = memberexpr.expr.node return isinstance(node, Var) and node.is_self def get_non_partial_lvalue_type(self, lvalue: RefExpr) -> Type: if lvalue not in self.type_map: # Likely a block considered unreachable during type checking. return UninhabitedType() lvalue_type = self.type_map[lvalue] if isinstance(lvalue_type, PartialType): if isinstance(lvalue.node, Var) and lvalue.node.type: lvalue_type = lvalue.node.type else: # Probably a secondary, non-definition assignment that doesn't # result in a non-partial type. We won't be able to infer any # dependencies from this so just return something. (The first, # definition assignment with a partial type is handled # differently, in the semantic analyzer.) assert not lvalue.is_new_def return UninhabitedType() return lvalue_type def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: super().visit_operator_assignment_stmt(o) self.process_lvalue(o.lvalue) method = op_methods[o.op] self.add_attribute_dependency_for_expr(o.lvalue, method) if o.op in ops_with_inplace_method: inplace_method = '__i' + method[2:] self.add_attribute_dependency_for_expr(o.lvalue, inplace_method) def visit_for_stmt(self, o: ForStmt) -> None: super().visit_for_stmt(o) if not o.is_async: # __getitem__ is only used if __iter__ is missing but for simplicity we # just always depend on both. self.add_attribute_dependency_for_expr(o.expr, '__iter__') self.add_attribute_dependency_for_expr(o.expr, '__getitem__') if o.inferred_iterator_type: if self.python2: method = 'next' else: method = '__next__' self.add_attribute_dependency(o.inferred_iterator_type, method) else: self.add_attribute_dependency_for_expr(o.expr, '__aiter__') if o.inferred_iterator_type: self.add_attribute_dependency(o.inferred_iterator_type, '__anext__') self.process_lvalue(o.index) if isinstance(o.index, TupleExpr): # Process multiple assignment to index variables. item_type = o.inferred_item_type if item_type: # This is similar to above. self.add_attribute_dependency(item_type, '__iter__') self.add_attribute_dependency(item_type, '__getitem__') if o.index_type: self.add_type_dependencies(o.index_type) def visit_with_stmt(self, o: WithStmt) -> None: super().visit_with_stmt(o) for e in o.expr: if not o.is_async: self.add_attribute_dependency_for_expr(e, '__enter__') self.add_attribute_dependency_for_expr(e, '__exit__') else: self.add_attribute_dependency_for_expr(e, '__aenter__') self.add_attribute_dependency_for_expr(e, '__aexit__') for typ in o.analyzed_types: self.add_type_dependencies(typ) def visit_print_stmt(self, o: PrintStmt) -> None: super().visit_print_stmt(o) if o.target: self.add_attribute_dependency_for_expr(o.target, 'write') def visit_del_stmt(self, o: DelStmt) -> None: super().visit_del_stmt(o) if isinstance(o.expr, IndexExpr): self.add_attribute_dependency_for_expr(o.expr.base, '__delitem__') # Expressions def process_global_ref_expr(self, o: RefExpr) -> None: if o.fullname is not None: self.add_dependency(make_trigger(o.fullname)) # If this is a reference to a type, generate a dependency to its # constructor. # IDEA: Avoid generating spurious dependencies for except statements, # class attribute references, etc., if performance is a problem. typ = self.type_map.get(o) if isinstance(typ, FunctionLike) and typ.is_type_obj(): class_name = typ.type_object().fullname() self.add_dependency(make_trigger(class_name + '.__init__')) self.add_dependency(make_trigger(class_name + '.__new__')) def visit_name_expr(self, o: NameExpr) -> None: if o.kind == LDEF: # We don't track dependencies to local variables, since they # aren't externally visible. return if o.kind == MDEF: # Direct reference to member is only possible in the scope that # defined the name, so no dependency is required. return self.process_global_ref_expr(o) def visit_member_expr(self, e: MemberExpr) -> None: if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, TypeInfo): # Special case class attribute so that we don't depend on "__init__". self.add_dependency(make_trigger(e.expr.node.fullname())) else: super().visit_member_expr(e) if e.kind is not None: # Reference to a module attribute self.process_global_ref_expr(e) else: # Reference to a non-module (or missing) attribute if e.expr not in self.type_map: # No type available -- this happens for unreachable code. Since it's unreachable, # it wasn't type checked and we don't need to generate dependencies. return if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, MypyFile): # Special case: reference to a missing module attribute. self.add_dependency(make_trigger(e.expr.node.fullname() + '.' + e.name)) return typ = self.type_map[e.expr] self.add_attribute_dependency(typ, e.name) if self.use_logical_deps() and isinstance(typ, AnyType): name = self.get_unimported_fullname(e, typ) if name is not None: # Generate a logical dependency from an unimported # definition (which comes from a missing module). # Example: # import missing # "missing" not in build # # def g() -> None: # missing.f() # Generate dependency from "missing.f" self.add_dependency(make_trigger(name)) def get_unimported_fullname(self, e: MemberExpr, typ: AnyType) -> Optional[str]: """If e refers to an unimported definition, infer the fullname of this. Return None if e doesn't refer to an unimported definition or if we can't determine the name. """ suffix = '' # Unwrap nested member expression to handle cases like "a.b.c.d" where # "a.b" is a known reference to an unimported module. Find the base # reference to an unimported module (such as "a.b") and the name suffix # (such as "c.d") needed to build a full name. while typ.type_of_any == TypeOfAny.from_another_any and isinstance(e.expr, MemberExpr): suffix = '.' + e.name + suffix e = e.expr if e.expr not in self.type_map: return None obj_type = self.type_map[e.expr] if not isinstance(obj_type, AnyType): # Can't find the base reference to the unimported module. return None typ = obj_type if typ.type_of_any == TypeOfAny.from_unimported_type and typ.missing_import_name: # Infer the full name of the unimported definition. return typ.missing_import_name + '.' + e.name + suffix return None def visit_super_expr(self, e: SuperExpr) -> None: # Arguments in "super(C, self)" won't generate useful logical deps. if not self.use_logical_deps(): super().visit_super_expr(e) if e.info is not None: name = e.name for base in non_trivial_bases(e.info): self.add_dependency(make_trigger(base.fullname() + '.' + name)) if name in base.names: # No need to depend on further base classes, since we found # the target. This is safe since if the target gets # deleted or modified, we'll trigger it. break def visit_call_expr(self, e: CallExpr) -> None: if isinstance(e.callee, RefExpr) and e.callee.fullname == 'builtins.isinstance': self.process_isinstance_call(e) else: super().visit_call_expr(e) def process_isinstance_call(self, e: CallExpr) -> None: """Process "isinstance(...)" in a way to avoid some extra dependencies.""" if len(e.args) == 2: arg = e.args[1] if (isinstance(arg, RefExpr) and arg.kind == GDEF and isinstance(arg.node, TypeInfo) and arg.fullname): # Special case to avoid redundant dependencies from "__init__". self.add_dependency(make_trigger(arg.fullname)) return # In uncommon cases generate normal dependencies. These will include # spurious dependencies, but the performance impact is small. super().visit_call_expr(e) def visit_cast_expr(self, e: CastExpr) -> None: super().visit_cast_expr(e) self.add_type_dependencies(e.type) def visit_type_application(self, e: TypeApplication) -> None: super().visit_type_application(e) for typ in e.types: self.add_type_dependencies(typ) def visit_index_expr(self, e: IndexExpr) -> None: super().visit_index_expr(e) self.add_operator_method_dependency(e.base, '__getitem__') def visit_unary_expr(self, e: UnaryExpr) -> None: super().visit_unary_expr(e) if e.op not in unary_op_methods: return method = unary_op_methods[e.op] self.add_operator_method_dependency(e.expr, method) def visit_op_expr(self, e: OpExpr) -> None: super().visit_op_expr(e) self.process_binary_op(e.op, e.left, e.right) def visit_comparison_expr(self, e: ComparisonExpr) -> None: super().visit_comparison_expr(e) for i, op in enumerate(e.operators): left = e.operands[i] right = e.operands[i + 1] self.process_binary_op(op, left, right) if self.python2 and op in ('==', '!=', '<', '<=', '>', '>='): self.add_operator_method_dependency(left, '__cmp__') self.add_operator_method_dependency(right, '__cmp__') def process_binary_op(self, op: str, left: Expression, right: Expression) -> None: method = op_methods.get(op) if method: if op == 'in': self.add_operator_method_dependency(right, method) else: self.add_operator_method_dependency(left, method) rev_method = reverse_op_methods.get(method) if rev_method: self.add_operator_method_dependency(right, rev_method) def add_operator_method_dependency(self, e: Expression, method: str) -> None: typ = self.type_map.get(e) if typ is not None: self.add_operator_method_dependency_for_type(typ, method) def add_operator_method_dependency_for_type(self, typ: Type, method: str) -> None: # Note that operator methods can't be (non-metaclass) methods of type objects # (that is, TypeType objects or Callables representing a type). if isinstance(typ, TypeVarType): typ = typ.upper_bound if isinstance(typ, TupleType): typ = typ.partial_fallback if isinstance(typ, Instance): trigger = make_trigger(typ.type.fullname() + '.' + method) self.add_dependency(trigger) elif isinstance(typ, UnionType): for item in typ.items: self.add_operator_method_dependency_for_type(item, method) elif isinstance(typ, FunctionLike) and typ.is_type_obj(): self.add_operator_method_dependency_for_type(typ.fallback, method) elif isinstance(typ, TypeType): if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None: self.add_operator_method_dependency_for_type(typ.item.type.metaclass_type, method) def visit_generator_expr(self, e: GeneratorExpr) -> None: super().visit_generator_expr(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None: super().visit_dictionary_comprehension(e) for seq in e.sequences: self.add_iter_dependency(seq) def visit_star_expr(self, e: StarExpr) -> None: super().visit_star_expr(e) self.add_iter_dependency(e.expr) def visit_yield_from_expr(self, e: YieldFromExpr) -> None: super().visit_yield_from_expr(e) self.add_iter_dependency(e.expr) def visit_await_expr(self, e: AwaitExpr) -> None: super().visit_await_expr(e) self.add_attribute_dependency_for_expr(e.expr, '__await__') # Helpers def add_type_alias_deps(self, target: str) -> None: # Type aliases are special, because some of the dependencies are calculated # in semanal.py, before they are expanded. if target in self.alias_deps: for alias in self.alias_deps[target]: self.add_dependency(make_trigger(alias)) def add_dependency(self, trigger: str, target: Optional[str] = None) -> None: """Add dependency from trigger to a target. If the target is not given explicitly, use the current target. """ if trigger.startswith(('<builtins.', '<typing.', '<mypy_extensions.')): # Don't track dependencies to certain library modules to keep the size of # the dependencies manageable. These dependencies should only # change on mypy version updates, which will require a full rebuild # anyway. return if target is None: target = self.scope.current_target() self.map.setdefault(trigger, set()).add(target) def add_type_dependencies(self, typ: Type, target: Optional[str] = None) -> None: """Add dependencies to all components of a type. Args: target: If not None, override the default (current) target of the generated dependency. """ # TODO: Use this method in more places where get_type_triggers() + add_dependency() # are called together. for trigger in self.get_type_triggers(typ): self.add_dependency(trigger, target) def add_attribute_dependency(self, typ: Type, name: str) -> None: """Add dependencies for accessing a named attribute of a type.""" targets = self.attribute_triggers(typ, name) for target in targets: self.add_dependency(target) def attribute_triggers(self, typ: Type, name: str) -> List[str]: """Return all triggers associated with the attribute of a type.""" if isinstance(typ, TypeVarType): typ = typ.upper_bound if isinstance(typ, TupleType): typ = typ.partial_fallback if isinstance(typ, Instance): member = '%s.%s' % (typ.type.fullname(), name) return [make_trigger(member)] elif isinstance(typ, FunctionLike) and typ.is_type_obj(): member = '%s.%s' % (typ.type_object().fullname(), name) triggers = [make_trigger(member)] triggers.extend(self.attribute_triggers(typ.fallback, name)) return triggers elif isinstance(typ, UnionType): targets = [] for item in typ.items: targets.extend(self.attribute_triggers(item, name)) return targets elif isinstance(typ, TypeType): triggers = self.attribute_triggers(typ.item, name) if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None: triggers.append(make_trigger('%s.%s' % (typ.item.type.metaclass_type.type.fullname(), name))) return triggers else: return [] def add_attribute_dependency_for_expr(self, e: Expression, name: str) -> None: typ = self.type_map.get(e) if typ is not None: self.add_attribute_dependency(typ, name) def add_iter_dependency(self, node: Expression) -> None: typ = self.type_map.get(node) if typ: self.add_attribute_dependency(typ, '__iter__') def use_logical_deps(self) -> bool: return self.options is not None and self.options.logical_deps def get_type_triggers(self, typ: Type) -> List[str]: return get_type_triggers(typ, self.use_logical_deps())
def __init__(self, errors: Errors) -> None: self.errors = errors self.scope = Scope()
class SemanticAnalyzerPass3(TraverserVisitor): """The third and final pass of semantic analysis. Check type argument counts and values of generic types, and perform some straightforward type inference. """ def __init__(self, modules: Dict[str, MypyFile], errors: Errors, sem: 'mypy.semanal.SemanticAnalyzerPass2') -> None: self.modules = modules self.errors = errors self.sem = sem self.scope = Scope() # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Tuple[int, Callable[[], None]]]) -> None: self.recurse_into_functions = True self.errors.set_file(fnam, file_node.fullname()) self.options = options self.sem.options = options self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() self.cur_mod_node = file_node self.sem.globals = file_node.names with experiments.strict_optional_set(options.strict_optional): self.scope.enter_file(file_node.fullname()) self.accept(file_node) self.scope.leave() def refresh_partial( self, node: Union[MypyFile, FuncItem, OverloadedFuncDef]) -> None: """Refresh a stale target in fine-grained incremental mode.""" self.scope.enter_file(self.sem.cur_mod_id) if isinstance(node, MypyFile): self.recurse_into_functions = False self.refresh_top_level(node) else: self.recurse_into_functions = True self.accept(node) self.scope.leave() def refresh_top_level(self, file_node: MypyFile) -> None: """Reanalyze a stale module top-level in fine-grained incremental mode.""" for d in file_node.defs: self.accept(d) def accept(self, node: Node) -> None: try: node.accept(self) except Exception as err: report_internal_error(err, self.errors.file, node.line, self.errors, self.options) def visit_block(self, b: Block) -> None: if b.is_unreachable: return super().visit_block(b) def visit_func_def(self, fdef: FuncDef) -> None: if not self.recurse_into_functions: return self.scope.enter_function(fdef) self.errors.push_function(fdef.name()) self.analyze(fdef.type, fdef) super().visit_func_def(fdef) self.errors.pop_function() self.scope.leave() def visit_overloaded_func_def(self, fdef: OverloadedFuncDef) -> None: if not self.recurse_into_functions: return self.analyze(fdef.type, fdef) super().visit_overloaded_func_def(fdef) def visit_class_def(self, tdef: ClassDef) -> None: # NamedTuple base classes are validated in check_namedtuple_classdef; we don't have to # check them again here. self.scope.enter_class(tdef.info) if not tdef.info.is_named_tuple: types = list(tdef.info.bases) # type: List[Type] for tvar in tdef.type_vars: if tvar.upper_bound: types.append(tvar.upper_bound) if tvar.values: types.extend(tvar.values) self.analyze_types(types, tdef.info) for type in tdef.info.bases: if tdef.info.is_protocol: if not isinstance(type, Instance) or not type.type.is_protocol: if type.type.fullname() != 'builtins.object': self.fail( 'All bases of a protocol must be protocols', tdef) # Recompute MRO now that we have analyzed all modules, to pick # up superclasses of bases imported from other modules in an # import loop. (Only do so if we succeeded the first time.) if tdef.info.mro: tdef.info.mro = [] # Force recomputation mypy.semanal.calculate_class_mro(tdef, self.fail_blocker) if tdef.info.is_protocol: add_protocol_members(tdef.info) if tdef.analyzed is not None: # Also check synthetic types associated with this ClassDef. # Currently these are TypedDict, and NamedTuple. if isinstance(tdef.analyzed, TypedDictExpr): self.analyze(tdef.analyzed.info.typeddict_type, tdef.analyzed, warn=True) elif isinstance(tdef.analyzed, NamedTupleExpr): self.analyze(tdef.analyzed.info.tuple_type, tdef.analyzed, warn=True) self.analyze_info(tdef.analyzed.info) super().visit_class_def(tdef) self.scope.leave() def visit_decorator(self, dec: Decorator) -> None: """Try to infer the type of the decorated function. This lets us resolve references to decorated functions during type checking when there are cyclic imports, as otherwise the type might not be available when we need it. This basically uses a simple special-purpose type inference engine just for decorators. """ # Don't just call the super method since we don't unconditionally traverse the decorated # function. dec.var.accept(self) for decorator in dec.decorators: decorator.accept(self) if self.recurse_into_functions: dec.func.accept(self) if dec.var.is_property: # Decorators are expected to have a callable type (it's a little odd). if dec.func.type is None: dec.var.type = CallableType([AnyType(TypeOfAny.special_form)], [ARG_POS], [None], AnyType(TypeOfAny.special_form), self.builtin_type('function'), name=dec.var.name()) elif isinstance(dec.func.type, CallableType): dec.var.type = dec.func.type self.analyze(dec.var.type, dec.var) return decorator_preserves_type = True for expr in dec.decorators: preserve_type = False if isinstance(expr, RefExpr) and isinstance(expr.node, FuncDef): if expr.node.type and is_identity_signature(expr.node.type): preserve_type = True if not preserve_type: decorator_preserves_type = False break if decorator_preserves_type: # No non-identity decorators left. We can trivially infer the type # of the function here. dec.var.type = function_type(dec.func, self.builtin_type('function')) if dec.decorators: return_type = calculate_return_type(dec.decorators[0]) if return_type and isinstance(return_type, AnyType): # The outermost decorator will return Any so we know the type of the # decorated function. dec.var.type = AnyType(TypeOfAny.from_another_any, source_any=return_type) sig = find_fixed_callable_return(dec.decorators[0]) if sig: # The outermost decorator always returns the same kind of function, # so we know that this is the type of the decoratored function. orig_sig = function_type(dec.func, self.builtin_type('function')) sig.name = orig_sig.items()[0].name dec.var.type = sig self.analyze(dec.var.type, dec.var) def visit_assignment_stmt(self, s: AssignmentStmt) -> None: """Traverse the assignment statement. This includes the actual assignment and synthetic types resulted from this assignment (if any). Currently this includes NewType, TypedDict, NamedTuple, and TypeVar. """ self.analyze(s.type, s) if isinstance(s.rvalue, IndexExpr) and isinstance( s.rvalue.analyzed, TypeAliasExpr): self.analyze(s.rvalue.analyzed.type, s.rvalue.analyzed, warn=True) if isinstance(s.rvalue, CallExpr): analyzed = s.rvalue.analyzed if isinstance(analyzed, NewTypeExpr): self.analyze(analyzed.old_type, analyzed) if analyzed.info: # Currently NewTypes only have __init__, but to be future proof, # we analyze all symbols. self.analyze_info(analyzed.info) if analyzed.info and analyzed.info.mro: analyzed.info.mro = [] # Force recomputation mypy.semanal.calculate_class_mro(analyzed.info.defn, self.fail_blocker) if isinstance(analyzed, TypeVarExpr): types = [] if analyzed.upper_bound: types.append(analyzed.upper_bound) if analyzed.values: types.extend(analyzed.values) self.analyze_types(types, analyzed) if isinstance(analyzed, TypedDictExpr): self.analyze(analyzed.info.typeddict_type, analyzed, warn=True) if isinstance(analyzed, NamedTupleExpr): self.analyze(analyzed.info.tuple_type, analyzed, warn=True) self.analyze_info(analyzed.info) # We need to pay additional attention to assignments that define a type alias. # The resulting type is also stored in the 'type_override' attribute of # the corresponding SymbolTableNode. if isinstance(s.lvalues[0], RefExpr) and isinstance( s.lvalues[0].node, Var): self.analyze(s.lvalues[0].node.type, s.lvalues[0].node) if isinstance(s.lvalues[0], NameExpr): node = self.sem.lookup(s.lvalues[0].name, s, suppress_errors=True) if node: self.analyze(node.type_override, node) super().visit_assignment_stmt(s) def visit_for_stmt(self, s: ForStmt) -> None: self.analyze(s.index_type, s) super().visit_for_stmt(s) def visit_with_stmt(self, s: WithStmt) -> None: self.analyze(s.target_type, s) super().visit_with_stmt(s) def visit_cast_expr(self, e: CastExpr) -> None: self.analyze(e.type, e) super().visit_cast_expr(e) def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None: super().visit_reveal_type_expr(e) def visit_type_application(self, e: TypeApplication) -> None: for type in e.types: self.analyze(type, e) super().visit_type_application(e) # Helpers def perform_transform(self, node: Union[Node, SymbolTableNode], transform: Callable[[Type], Type]) -> None: """Apply transform to all types associated with node.""" if isinstance(node, ForStmt): if node.index_type: node.index_type = transform(node.index_type) self.transform_types_in_lvalue(node.index, transform) if isinstance(node, WithStmt): if node.target_type: node.target_type = transform(node.target_type) for n in node.target: if isinstance(n, NameExpr) and isinstance(n.node, Var) and n.node.type: n.node.type = transform(n.node.type) if isinstance(node, (FuncDef, OverloadedFuncDef, CastExpr, AssignmentStmt, TypeAliasExpr, Var)): assert node.type, "Scheduled patch for non-existent type" node.type = transform(node.type) if isinstance(node, NewTypeExpr): assert node.old_type, "Scheduled patch for non-existent type" node.old_type = transform(node.old_type) if node.info: new_bases = [] # type: List[Instance] for b in node.info.bases: new_b = transform(b) # TODO: this code can be combined with code in second pass. if isinstance(new_b, Instance): new_bases.append(new_b) elif isinstance(new_b, TupleType): new_bases.append(new_b.fallback) else: self.fail( "Argument 2 to NewType(...) must be subclassable" " (got {})".format(new_b), node) new_bases.append(self.builtin_type('object')) node.info.bases = new_bases if isinstance(node, TypeVarExpr): if node.upper_bound: node.upper_bound = transform(node.upper_bound) if node.values: node.values = [transform(v) for v in node.values] if isinstance(node, TypedDictExpr): assert node.info.typeddict_type, "Scheduled patch for non-existent type" node.info.typeddict_type = cast( TypedDictType, transform(node.info.typeddict_type)) if isinstance(node, NamedTupleExpr): assert node.info.tuple_type, "Scheduled patch for non-existent type" node.info.tuple_type = cast(TupleType, transform(node.info.tuple_type)) if isinstance(node, TypeApplication): node.types = [transform(t) for t in node.types] if isinstance(node, SymbolTableNode): assert node.type_override, "Scheduled patch for non-existent type" node.type_override = transform(node.type_override) if isinstance(node, TypeInfo): for tvar in node.defn.type_vars: if tvar.upper_bound: tvar.upper_bound = transform(tvar.upper_bound) if tvar.values: tvar.values = [transform(v) for v in tvar.values] new_bases = [] for base in node.bases: new_base = transform(base) if isinstance(new_base, Instance): new_bases.append(new_base) else: # Don't fix the NamedTuple bases, they are Instance's intentionally. # Patch the 'args' just in case, although generic tuple types are # not supported yet. alt_base = Instance(base.type, [transform(a) for a in base.args]) new_bases.append(alt_base) node.bases = new_bases def transform_types_in_lvalue(self, lvalue: Lvalue, transform: Callable[[Type], Type]) -> None: if isinstance(lvalue, RefExpr): if isinstance(lvalue.node, Var): var = lvalue.node if var.type: var.type = transform(var.type) elif isinstance(lvalue, TupleExpr): for item in lvalue.items: self.transform_types_in_lvalue(item, transform) def analyze(self, type: Optional[Type], node: Union[Node, SymbolTableNode], warn: bool = False) -> None: # Recursive type warnings are only emitted on type definition 'node's, marked by 'warn' # Flags appeared during analysis of 'type' are collected in this dict. indicator = {} # type: Dict[str, bool] if type: analyzer = self.make_type_analyzer(indicator) type.accept(analyzer) self.check_for_omitted_generics(type) self.generate_type_patches(node, indicator, warn) if analyzer.aliases_used: target = self.scope.current_target() self.cur_mod_node.alias_deps[target].update( analyzer.aliases_used) def analyze_types(self, types: List[Type], node: Node) -> None: # Similar to above but for nodes with multiple types. indicator = {} # type: Dict[str, bool] for type in types: analyzer = self.make_type_analyzer(indicator) type.accept(analyzer) self.check_for_omitted_generics(type) if analyzer.aliases_used: target = self.scope.current_target() self.cur_mod_node.alias_deps[target].update( analyzer.aliases_used) self.generate_type_patches(node, indicator, warn=False) def generate_type_patches(self, node: Union[Node, SymbolTableNode], indicator: Dict[str, bool], warn: bool) -> None: if indicator.get('forward') or indicator.get('synthetic'): def patch() -> None: self.perform_transform( node, lambda tp: tp.accept( ForwardReferenceResolver(self.fail, node, warn))) self.patches.append((PRIORITY_FORWARD_REF, patch)) if indicator.get('typevar'): def patch() -> None: self.perform_transform( node, lambda tp: tp.accept(TypeVariableChecker(self.fail))) self.patches.append((PRIORITY_TYPEVAR_VALUES, patch)) def analyze_info(self, info: TypeInfo) -> None: # Similar to above but for nodes with synthetic TypeInfos (NamedTuple and NewType). for name in info.names: sym = info.names[name] if isinstance(sym.node, (FuncDef, Decorator)): self.accept(sym.node) if isinstance(sym.node, Var): self.analyze(sym.node.type, sym.node) def make_type_analyzer(self, indicator: Dict[str, bool]) -> TypeAnalyserPass3: return TypeAnalyserPass3(self.sem.lookup_qualified, self.sem.lookup_fully_qualified, self.fail, self.sem.note, self.sem.plugin, self.options, self.is_typeshed_file, indicator, self.patches) def check_for_omitted_generics(self, typ: Type) -> None: if not self.options.disallow_any_generics or self.is_typeshed_file: return for t in collect_any_types(typ): if t.type_of_any == TypeOfAny.from_omitted_generics: self.fail(messages.BARE_GENERIC, t) def fail(self, msg: str, ctx: Context, *, blocker: bool = False) -> None: self.errors.report(ctx.get_line(), ctx.get_column(), msg) def fail_blocker(self, msg: str, ctx: Context) -> None: self.fail(msg, ctx, blocker=True) def builtin_type(self, name: str, args: Optional[List[Type]] = None) -> Instance: names = self.modules['builtins'] sym = names.names[name] node = sym.node assert isinstance(node, TypeInfo) if args: # TODO: assert len(args) == len(node.defn.type_vars) return Instance(node, args) any_type = AnyType(TypeOfAny.special_form) return Instance(node, [any_type] * len(node.defn.type_vars))
def __init__(self, errors: Errors) -> None: self.errors = errors self.scope = Scope() # Should we also analyze function definitions, or only module top-levels? self.recurse_into_functions = True
class TypeArgumentAnalyzer(MixedTraverserVisitor): def __init__(self, errors: Errors) -> None: self.errors = errors self.scope = Scope() # Should we also analyze function definitions, or only module top-levels? self.recurse_into_functions = True def visit_mypy_file(self, o: MypyFile) -> None: self.errors.set_file(o.path, o.fullname(), scope=self.scope) self.scope.enter_file(o.fullname()) super().visit_mypy_file(o) self.scope.leave() def visit_func(self, defn: FuncItem) -> None: if not self.recurse_into_functions: return with self.scope.function_scope(defn): super().visit_func(defn) def visit_class_def(self, defn: ClassDef) -> None: with self.scope.class_scope(defn.info): super().visit_class_def(defn) def visit_block(self, o: Block) -> None: if not o.is_unreachable: super().visit_block(o) def visit_instance(self, t: Instance) -> None: # Type argument counts were checked in the main semantic analyzer pass. We assume # that the counts are correct here. info = t.type for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): if tvar.values: if isinstance(arg, TypeVarType): arg_values = arg.values if not arg_values: self.fail('Type variable "{}" not valid as type ' 'argument value for "{}"'.format( arg.name, info.name()), t) continue else: arg_values = [arg] self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) if not is_subtype(arg, tvar.upper_bound): self.fail('Type argument "{}" of "{}" must be ' 'a subtype of "{}"'.format( arg, info.name(), tvar.upper_bound), t) super().visit_instance(t) def check_type_var_values(self, type: TypeInfo, actuals: List[Type], arg_name: str, valids: List[Type], arg_number: int, context: Context) -> None: for actual in actuals: if (not isinstance(actual, AnyType) and not any(is_same_type(actual, value) for value in valids)): if len(actuals) > 1 or not isinstance(actual, Instance): self.fail('Invalid type argument value for "{}"'.format( type.name()), context) else: class_name = '"{}"'.format(type.name()) actual_type_name = '"{}"'.format(actual.type.name()) self.fail(message_registry.INCOMPATIBLE_TYPEVAR_VALUE.format( arg_name, class_name, actual_type_name), context) def fail(self, msg: str, context: Context) -> None: self.errors.report(context.get_line(), context.get_column(), msg)