Esempio n. 1
0
File: deps.py Progetto: sixolet/mypy
 def attribute_triggers(self, typ: Type, name: str) -> List[str]:
     """Return all triggers associated with the attribute of a type."""
     if isinstance(typ, TypeVarType):
         typ = typ.upper_bound
     if isinstance(typ, TupleType):
         typ = typ.fallback
     if isinstance(typ, Instance):
         member = '%s.%s' % (typ.type.fullname(), name)
         return [make_trigger(member)]
     elif isinstance(typ, FunctionLike) and typ.is_type_obj():
         member = '%s.%s' % (typ.type_object().fullname(), name)
         triggers = [make_trigger(member)]
         triggers.extend(self.attribute_triggers(typ.fallback, name))
         return triggers
     elif isinstance(typ, UnionType):
         targets = []
         for item in typ.items:
             targets.extend(self.attribute_triggers(item, name))
         return targets
     elif isinstance(typ, TypeType):
         triggers = self.attribute_triggers(typ.item, name)
         if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None:
             triggers.append(make_trigger('%s.%s' %
                                          (typ.item.type.metaclass_type.type.fullname(),
                                           name)))
         return triggers
     else:
         return []
Esempio n. 2
0
 def visit_member_expr(self, e: MemberExpr) -> None:
     if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, TypeInfo):
         # Special case class attribute so that we don't depend on "__init__".
         self.add_dependency(make_trigger(e.expr.node.fullname()))
     else:
         super().visit_member_expr(e)
     if e.kind is not None:
         # Reference to a module attribute
         self.process_global_ref_expr(e)
     else:
         # Reference to a non-module (or missing) attribute
         if e.expr not in self.type_map:
             # No type available -- this happens for unreachable code. Since it's unreachable,
             # it wasn't type checked and we don't need to generate dependencies.
             return
         if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, MypyFile):
             # Special case: reference to a missing module attribute.
             self.add_dependency(make_trigger(e.expr.node.fullname() + '.' + e.name))
             return
         typ = self.type_map[e.expr]
         self.add_attribute_dependency(typ, e.name)
         if self.use_logical_deps() and isinstance(typ, AnyType):
             name = self.get_unimported_fullname(e, typ)
             if name is not None:
                 # Generate a logical dependency from an unimported
                 # definition (which comes from a missing module).
                 # Example:
                 #     import missing  # "missing" not in build
                 #
                 #     def g() -> None:
                 #         missing.f()  # Generate dependency from "missing.f"
                 self.add_dependency(make_trigger(name))
Esempio n. 3
0
    def _snapshot_protocol_deps() -> Dict[str, Set[str]]:
        """Collect protocol attribute dependencies found so far from registered subtype checks.

        There are three kinds of protocol dependencies. For example, after a subtype check:

            x: Proto = C()

        the following dependencies will be generated:
            1. ..., <SuperProto[wildcard]>, <Proto[wildcard]> -> <Proto>
            2. ..., <B.attr>, <C.attr> -> <C> [for every attr in Proto members]
            3. <C> -> Proto  # this one to invalidate the subtype cache

        The first kind is generated immediately per-module in deps.py (see also an example there
        for motivation why it is needed). While two other kinds are generated here after all
        modules are type checked and we have recorded all the subtype checks. To understand these
        two kinds, consider a simple example:

            class A:
                def __iter__(self) -> Iterator[int]:
                    ...

            it: Iterable[int] = A()

        We add <a.A.__iter__> -> <a.A> to invalidate the assignment (module target in this case),
        whenever the signature of a.A.__iter__ changes. We also add <a.A> -> typing.Iterable,
        to invalidate the subtype caches of the latter. (Note that the same logic applies to
        proper subtype checks, and calculating meets and joins, if this involves calling
        'subtypes.is_protocol_implementation').
        """
        deps = {}  # type: Dict[str, Set[str]]
        for info in TypeState._rechecked_types:
            for attr in TypeState._checked_against_members[info.fullname()]:
                # The need for full MRO here is subtle, during an update, base classes of
                # a concrete class may not be reprocessed, so not all <B.x> -> <C.x> deps
                # are added.
                for base_info in info.mro[:-1]:
                    trigger = make_trigger('%s.%s' % (base_info.fullname(), attr))
                    if 'typing' in trigger or 'builtins' in trigger:
                        # TODO: avoid everything from typeshed
                        continue
                    deps.setdefault(trigger, set()).add(make_trigger(info.fullname()))
            for proto in TypeState._attempted_protocols[info.fullname()]:
                trigger = make_trigger(info.fullname())
                if 'typing' in trigger or 'builtins' in trigger:
                    continue
                # If any class that was checked against a protocol changes,
                # we need to reset the subtype cache for the protocol.
                #
                # Note: strictly speaking, the protocol doesn't need to be
                # re-checked, we only need to reset the cache, and its uses
                # elsewhere are still valid (unless invalidated by other deps).
                deps.setdefault(trigger, set()).add(proto)
        return deps
Esempio n. 4
0
File: deps.py Progetto: sixolet/mypy
    def process_global_ref_expr(self, o: RefExpr) -> None:
        if o.fullname is not None:
            self.add_dependency(make_trigger(o.fullname))

        # If this is a reference to a type, generate a dependency to its
        # constructor.
        # TODO: avoid generating spurious dependencies for isinstancce checks,
        # except statements, class attribute reference, etc, if perf problem.
        typ = self.type_map.get(o)
        if isinstance(typ, FunctionLike) and typ.is_type_obj():
            class_name = typ.type_object().fullname()
            self.add_dependency(make_trigger(class_name + '.__init__'))
            self.add_dependency(make_trigger(class_name + '.__new__'))
Esempio n. 5
0
File: deps.py Progetto: sixolet/mypy
 def visit_class_def(self, o: ClassDef) -> None:
     self.scope.enter_class(o.info)
     target = self.scope.current_full_target()
     self.add_dependency(make_trigger(target), target)
     old_is_class = self.is_class
     self.is_class = True
     # Add dependencies to type variables of a generic class.
     for tv in o.type_vars:
         self.add_dependency(make_trigger(tv.fullname), target)
     self.process_type_info(o.info)
     super().visit_class_def(o)
     self.is_class = old_is_class
     self.scope.leave()
Esempio n. 6
0
 def visit_class_def(self, o: ClassDef) -> None:
     target = self.enter_class_scope(o.info)
     self.add_dependency(make_trigger(target), target)
     old_is_class = self.is_class
     self.is_class = True
     # Add dependencies to type variables of a generic class.
     for tv in o.type_vars:
         self.add_dependency(make_trigger(tv.fullname), target)
     # Add dependencies to base types.
     for base in o.info.bases:
         self.add_type_dependencies(base, target=target)
     # TODO: Add dependencies based on remaining TypeInfo attributes.
     super().visit_class_def(o)
     self.is_class = old_is_class
     info = o.info
     for name, node in info.names.items():
         if isinstance(node.node, Var):
             for base_info in non_trivial_bases(info):
                 # If the type of an attribute changes in a base class, we make references
                 # to the attribute in the subclass stale.
                 self.add_dependency(make_trigger(base_info.fullname() + '.' + name),
                                     target=make_trigger(info.fullname() + '.' + name))
     for base_info in non_trivial_bases(info):
         for name, node in base_info.names.items():
             self.add_dependency(make_trigger(base_info.fullname() + '.' + name),
                                 target=make_trigger(info.fullname() + '.' + name))
         self.add_dependency(make_trigger(base_info.fullname() + '.__init__'),
                             target=make_trigger(info.fullname() + '.__init__'))
     self.leave_scope()
Esempio n. 7
0
File: deps.py Progetto: sixolet/mypy
 def process_lvalue(self, lvalue: Expression) -> None:
     """Generate additional dependencies for an lvalue."""
     if isinstance(lvalue, IndexExpr):
         self.add_operator_method_dependency(lvalue.base, '__setitem__')
     elif isinstance(lvalue, NameExpr):
         if lvalue.kind in (MDEF, GDEF):
             # Assignment to an attribute in the class body, or direct assignment to a
             # global variable.
             lvalue_type = self.get_non_partial_lvalue_type(lvalue)
             type_triggers = get_type_triggers(lvalue_type)
             attr_trigger = make_trigger('%s.%s' % (self.scope.current_full_target(),
                                                    lvalue.name))
             for type_trigger in type_triggers:
                 self.add_dependency(type_trigger, attr_trigger)
     elif isinstance(lvalue, MemberExpr):
         if lvalue.kind is None:
             # Reference to a non-module attribute
             if lvalue.expr not in self.type_map:
                 # Unreachable assignment -> not checked so no dependencies to generate.
                 return
             object_type = self.type_map[lvalue.expr]
             lvalue_type = self.get_non_partial_lvalue_type(lvalue)
             type_triggers = get_type_triggers(lvalue_type)
             for attr_trigger in self.attribute_triggers(object_type, lvalue.name):
                 for type_trigger in type_triggers:
                     self.add_dependency(type_trigger, attr_trigger)
     elif isinstance(lvalue, TupleExpr):
         for item in lvalue.items:
             self.process_lvalue(item)
Esempio n. 8
0
File: deps.py Progetto: sixolet/mypy
 def visit_import_from(self, o: ImportFrom) -> None:
     module_id, _ = correct_relative_import(self.scope.current_module_id(),
                                            o.relative,
                                            o.id,
                                            self.is_package_init_file)
     for name, as_name in o.names:
         self.add_dependency(make_trigger(module_id + '.' + name))
Esempio n. 9
0
File: deps.py Progetto: sixolet/mypy
 def visit_decorator(self, o: Decorator) -> None:
     # We don't need to recheck outer scope for an overload, only overload itself.
     # Also if any decorator is nested, it is not externally visible, so we don't need to
     # generate dependency.
     if not o.func.is_overload and self.scope.current_function_name() is None:
         self.add_dependency(make_trigger(o.func.fullname()))
     super().visit_decorator(o)
Esempio n. 10
0
 def visit_instance(self, typ: Instance) -> List[str]:
     trigger = make_trigger(typ.type.fullname())
     triggers = [trigger]
     for arg in typ.args:
         triggers.extend(self.get_type_triggers(arg))
     if typ.final_value:
         triggers.extend(self.get_type_triggers(typ.final_value))
     return triggers
Esempio n. 11
0
File: deps.py Progetto: sixolet/mypy
 def visit_type_var(self, typ: TypeVarType) -> List[str]:
     triggers = []
     if typ.fullname:
         triggers.append(make_trigger(typ.fullname))
     if typ.upper_bound:
         triggers.extend(get_type_triggers(typ.upper_bound))
     for val in typ.values:
         triggers.extend(get_type_triggers(val))
     return triggers
Esempio n. 12
0
File: deps.py Progetto: sixolet/mypy
 def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
     rvalue = o.rvalue
     if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr):
         analyzed = rvalue.analyzed
         self.add_type_dependencies(analyzed.upper_bound,
                                    target=make_trigger(analyzed.fullname()))
         for val in analyzed.values:
             self.add_type_dependencies(val, target=make_trigger(analyzed.fullname()))
         # We need to re-analyze the definition if bound or value is deleted.
         super().visit_call_expr(rvalue)
     elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr):
         # Depend on types of named tuple items.
         info = rvalue.analyzed.info
         prefix = '%s.%s' % (self.scope.current_full_target(), info.name())
         for name, symnode in info.names.items():
             if not name.startswith('_') and isinstance(symnode.node, Var):
                 typ = symnode.node.type
                 if typ:
                     self.add_type_dependencies(typ)
                     self.add_type_dependencies(typ, target=make_trigger(prefix))
                     attr_target = make_trigger('%s.%s' % (prefix, name))
                     self.add_type_dependencies(typ, target=attr_target)
     elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypedDictExpr):
         # Depend on the underlying typeddict type
         info = rvalue.analyzed.info
         assert info.typeddict_type is not None
         prefix = '%s.%s' % (self.scope.current_full_target(), info.name())
         self.add_type_dependencies(info.typeddict_type, target=make_trigger(prefix))
     elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, EnumCallExpr):
         # Enum values are currently not checked, but for future we add the deps on them
         for name, symnode in rvalue.analyzed.info.names.items():
             if isinstance(symnode.node, Var) and symnode.node.type:
                 self.add_type_dependencies(symnode.node.type)
     elif o.is_alias_def:
         assert len(o.lvalues) == 1
         lvalue = o.lvalues[0]
         assert isinstance(lvalue, NameExpr)
         # TODO: get rid of this extra dependency from __init__ to alias definition scope
         typ = self.type_map.get(lvalue)
         if isinstance(typ, FunctionLike) and typ.is_type_obj():
             class_name = typ.type_object().fullname()
             self.add_dependency(make_trigger(class_name + '.__init__'))
             self.add_dependency(make_trigger(class_name + '.__new__'))
         if isinstance(rvalue, IndexExpr) and isinstance(rvalue.analyzed, TypeAliasExpr):
             self.add_type_dependencies(rvalue.analyzed.type)
     else:
         # Normal assignment
         super().visit_assignment_stmt(o)
         for lvalue in o.lvalues:
             self.process_lvalue(lvalue)
         items = o.lvalues + [rvalue]
         for i in range(len(items) - 1):
             lvalue = items[i]
             rvalue = items[i + 1]
             if isinstance(lvalue, TupleExpr):
                 self.add_attribute_dependency_for_expr(rvalue, '__iter__')
         if o.type:
             for trigger in get_type_triggers(o.type):
                 self.add_dependency(trigger)
Esempio n. 13
0
 def visit_import_from(self, o: ImportFrom) -> None:
     if self.use_logical_deps():
         # Just importing a name doesn't create a logical dependency.
         return
     module_id, _ = correct_relative_import(self.scope.current_module_id(),
                                            o.relative,
                                            o.id,
                                            self.is_package_init_file)
     for name, as_name in o.names:
         self.add_dependency(make_trigger(module_id + '.' + name))
Esempio n. 14
0
File: deps.py Progetto: sixolet/mypy
 def visit_func_def(self, o: FuncDef) -> None:
     self.scope.enter_function(o)
     target = self.scope.current_target()
     if o.type:
         if self.is_class and isinstance(o.type, FunctionLike):
             signature = bind_self(o.type)  # type: Type
         else:
             signature = o.type
         for trigger in get_type_triggers(signature):
             self.add_dependency(trigger)
             self.add_dependency(trigger, target=make_trigger(target))
     if o.info:
         for base in non_trivial_bases(o.info):
             self.add_dependency(make_trigger(base.fullname() + '.' + o.name()))
     self.add_type_alias_deps(self.scope.current_target())
     super().visit_func_def(o)
     variants = set(o.expanded) - {o}
     for ex in variants:
         if isinstance(ex, FuncDef):
             super().visit_func_def(ex)
     self.scope.leave()
Esempio n. 15
0
 def visit_name_expr(self, o: NameExpr) -> None:
     if o.kind == LDEF:
         # We don't track depdendencies to local variables, since they
         # aren't externally visible.
         return
     if o.kind == MDEF:
         # Direct reference to member is only possible in the scope that
         # defined the name, so no dependency is required.
         return
     if o.fullname is not None:
         trigger = make_trigger(o.fullname)
         self.add_dependency(trigger)
Esempio n. 16
0
 def visit_super_expr(self, e: SuperExpr) -> None:
     # Arguments in "super(C, self)" won't generate useful logical deps.
     if not self.use_logical_deps():
         super().visit_super_expr(e)
     if e.info is not None:
         name = e.name
         for base in non_trivial_bases(e.info):
             self.add_dependency(make_trigger(base.fullname() + '.' + name))
             if name in base.names:
                 # No need to depend on further base classes, since we found
                 # the target.  This is safe since if the target gets
                 # deleted or modified, we'll trigger it.
                 break
Esempio n. 17
0
 def visit_func_def(self, o: FuncDef) -> None:
     if not isinstance(self.current_scope(), FuncDef):
         # Not a nested function, so create a new target.
         new_scope = True
         target = self.enter_function_scope(o)
     else:
         # Treat nested functions as components of the parent function target.
         new_scope = False
         target = self.current_target()
     if o.type:
         if self.is_class and isinstance(o.type, FunctionLike):
             signature = bind_self(o.type)  # type: Type
         else:
             signature = o.type
         for trigger in get_type_triggers(signature):
             self.add_dependency(trigger)
             self.add_dependency(trigger, target=make_trigger(target))
     if o.info:
         for base in non_trivial_bases(o.info):
             self.add_dependency(make_trigger(base.fullname() + '.' + o.name()))
     super().visit_func_def(o)
     if new_scope:
         self.leave_scope()
Esempio n. 18
0
 def visit_func_def(self, o: FuncDef) -> None:
     if not isinstance(self.current_scope(), FuncDef):
         # Not a nested function, so create a new target.
         new_scope = True
         target = self.enter_function_scope(o)
     else:
         # Treat nested functions as components of the parent function target.
         new_scope = False
         target = self.current_target()
     if o.type:
         if self.is_class and isinstance(o.type, FunctionLike):
             signature = bind_self(o.type)  # type: Type
         else:
             signature = o.type
         for trigger in get_type_triggers(signature):
             self.add_dependency(trigger)
             self.add_dependency(trigger, target=make_trigger(target))
     if o.info:
         for base in non_trivial_bases(o.info):
             self.add_dependency(make_trigger(base.fullname() + '.' + o.name()))
     super().visit_func_def(o)
     if new_scope:
         self.leave_scope()
Esempio n. 19
0
 def process_isinstance_call(self, e: CallExpr) -> None:
     """Process "isinstance(...)" in a way to avoid some extra dependencies."""
     if len(e.args) == 2:
         arg = e.args[1]
         if (isinstance(arg, RefExpr)
                 and arg.kind == GDEF
                 and isinstance(arg.node, TypeInfo)
                 and arg.fullname):
             # Special case to avoid redundant dependencies from "__init__".
             self.add_dependency(make_trigger(arg.fullname))
             return
     # In uncommon cases generate normal dependencies. These will include
     # spurious dependencies, but the performance impact is small.
     super().visit_call_expr(e)
Esempio n. 20
0
 def process_isinstance_call(self, e: CallExpr) -> None:
     """Process "isinstance(...)" in a way to avoid some extra dependencies."""
     if len(e.args) == 2:
         arg = e.args[1]
         if (isinstance(arg, RefExpr)
                 and arg.kind == GDEF
                 and isinstance(arg.node, TypeInfo)
                 and arg.fullname):
             # Special case to avoid redundant dependencies from "__init__".
             self.add_dependency(make_trigger(arg.fullname))
             return
     # In uncommon cases generate normal dependencies. These will include
     # spurious dependencies, but the performance impact is small.
     super().visit_call_expr(e)
Esempio n. 21
0
 def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
     # TODO: Implement all assignment special forms, including these:
     #   TypedDict
     #   NamedTuple
     #   Enum
     #   type aliases
     rvalue = o.rvalue
     if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr):
         # TODO: Support type variable value restriction
         analyzed = rvalue.analyzed
         self.add_type_dependencies(analyzed.upper_bound,
                                    target=make_trigger(analyzed.fullname()))
     elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr):
         # Depend on types of named tuple items.
         info = rvalue.analyzed.info
         prefix = '%s.%s' % (self.scope.current_full_target(), info.name())
         for name, symnode in info.names.items():
             if not name.startswith('_') and isinstance(symnode.node, Var):
                 typ = symnode.node.type
                 if typ:
                     self.add_type_dependencies(typ)
                     attr_target = make_trigger('%s.%s' % (prefix, name))
                     self.add_type_dependencies(typ, target=attr_target)
     else:
         # Normal assignment
         super().visit_assignment_stmt(o)
         for lvalue in o.lvalues:
             self.process_lvalue(lvalue)
         items = o.lvalues + [rvalue]
         for i in range(len(items) - 1):
             lvalue = items[i]
             rvalue = items[i + 1]
             if isinstance(lvalue, (TupleExpr, ListExpr)):
                 self.add_attribute_dependency_for_expr(rvalue, '__iter__')
         if o.type:
             for trigger in get_type_triggers(o.type):
                 self.add_dependency(trigger)
Esempio n. 22
0
 def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
     # TODO: Implement all assignment special forms, including these:
     #   TypedDict
     #   NamedTuple
     #   Enum
     #   type aliases
     rvalue = o.rvalue
     if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr):
         # TODO: Support type variable value restriction
         analyzed = rvalue.analyzed
         self.add_type_dependencies(analyzed.upper_bound,
                                    target=make_trigger(analyzed.fullname()))
     elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr):
         # Depend on types of named tuple items.
         info = rvalue.analyzed.info
         prefix = '%s.%s' % (self.current_full_target(), info.name())
         for name, symnode in info.names.items():
             if not name.startswith('_') and isinstance(symnode.node, Var):
                 typ = symnode.node.type
                 if typ:
                     self.add_type_dependencies(typ)
                     attr_target = make_trigger('%s.%s' % (prefix, name))
                     self.add_type_dependencies(typ, target=attr_target)
     else:
         # Normal assignment
         super().visit_assignment_stmt(o)
         for lvalue in o.lvalues:
             self.process_lvalue(lvalue)
         items = o.lvalues + [rvalue]
         for i in range(len(items) - 1):
             lvalue = items[i]
             rvalue = items[i + 1]
             if isinstance(lvalue, (TupleExpr, ListExpr)):
                 self.add_attribute_dependency_for_expr(rvalue, '__iter__')
         if o.type:
             for trigger in get_type_triggers(o.type):
                 self.add_dependency(trigger)
Esempio n. 23
0
 def add_operator_method_dependency_for_type(self, typ: Type, method: str) -> None:
     # Note that operator methods can't be (non-metaclass) methods of type objects
     # (that is, TypeType objects or Callables representing a type).
     # TODO: TypedDict
     # TODO: metaclasses
     if isinstance(typ, TypeVarType):
         typ = typ.upper_bound
     if isinstance(typ, TupleType):
         typ = typ.fallback
     if isinstance(typ, Instance):
         trigger = make_trigger(typ.type.fullname() + '.' + method)
         self.add_dependency(trigger)
     elif isinstance(typ, UnionType):
         for item in typ.items:
             self.add_operator_method_dependency_for_type(item, method)
Esempio n. 24
0
 def visit_member_expr(self, e: MemberExpr) -> None:
     super().visit_member_expr(e)
     if e.kind is not None:
         # Reference to a module attribute
         if e.fullname is not None:
             trigger = make_trigger(e.fullname)
             self.add_dependency(trigger)
     else:
         # Reference to a non-module attribute
         if e.expr not in self.type_map:
             # No type available -- this happens for unreachable code. Since it's unreachable,
             # it wasn't type checked and we don't need to generate dependencies.
             return
         typ = self.type_map[e.expr]
         self.add_attribute_dependency(typ, e.name)
Esempio n. 25
0
 def visit_member_expr(self, e: MemberExpr) -> None:
     super().visit_member_expr(e)
     if e.kind is not None:
         # Reference to a module attribute
         if e.fullname is not None:
             trigger = make_trigger(e.fullname)
             self.add_dependency(trigger)
     else:
         # Reference to a non-module attribute
         if e.expr not in self.type_map:
             # No type available -- this happens for unreachable code. Since it's unreachable,
             # it wasn't type checked and we don't need to generate dependencies.
             return
         typ = self.type_map[e.expr]
         self.add_attribute_dependency(typ, e.name)
Esempio n. 26
0
 def visit_func_def(self, o: FuncDef) -> None:
     self.scope.enter_function(o)
     target = self.scope.current_target()
     if o.type:
         if self.is_class and isinstance(o.type, FunctionLike):
             signature = bind_self(o.type)  # type: Type
         else:
             signature = o.type
         for trigger in self.get_type_triggers(signature):
             self.add_dependency(trigger)
             self.add_dependency(trigger, target=make_trigger(target))
     if o.info:
         for base in non_trivial_bases(o.info):
             # Base class __init__/__new__ doesn't generate a logical
             # dependency since the override can be incompatible.
             if not self.use_logical_deps() or o.name() not in ('__init__', '__new__'):
                 self.add_dependency(make_trigger(base.fullname() + '.' + o.name()))
     self.add_type_alias_deps(self.scope.current_target())
     super().visit_func_def(o)
     variants = set(o.expanded) - {o}
     for ex in variants:
         if isinstance(ex, FuncDef):
             super().visit_func_def(ex)
     self.scope.leave()
Esempio n. 27
0
 def add_operator_method_dependency_for_type(self, typ: Type, method: str) -> None:
     # Note that operator methods can't be (non-metaclass) methods of type objects
     # (that is, TypeType objects or Callables representing a type).
     # TODO: TypedDict
     # TODO: metaclasses
     if isinstance(typ, TypeVarType):
         typ = typ.upper_bound
     if isinstance(typ, TupleType):
         typ = typ.fallback
     if isinstance(typ, Instance):
         trigger = make_trigger(typ.type.fullname() + '.' + method)
         self.add_dependency(trigger)
     elif isinstance(typ, UnionType):
         for item in typ.items:
             self.add_operator_method_dependency_for_type(item, method)
Esempio n. 28
0
 def visit_func_def(self, o: FuncDef) -> None:
     self.scope.enter_function(o)
     target = self.scope.current_target()
     if o.type:
         if self.is_class and isinstance(o.type, FunctionLike):
             signature = bind_self(o.type)  # type: Type
         else:
             signature = o.type
         for trigger in self.get_type_triggers(signature):
             self.add_dependency(trigger)
             self.add_dependency(trigger, target=make_trigger(target))
     if o.info:
         for base in non_trivial_bases(o.info):
             # Base class __init__/__new__ doesn't generate a logical
             # dependency since the override can be incompatible.
             if not self.use_logical_deps() or o.name() not in ('__init__', '__new__'):
                 self.add_dependency(make_trigger(base.fullname() + '.' + o.name()))
     self.add_type_alias_deps(self.scope.current_target())
     super().visit_func_def(o)
     variants = set(o.expanded) - {o}
     for ex in variants:
         if isinstance(ex, FuncDef):
             super().visit_func_def(ex)
     self.scope.leave()
Esempio n. 29
0
 def visit_decorator(self, o: Decorator) -> None:
     if not self.use_logical_deps():
         # We don't need to recheck outer scope for an overload, only overload itself.
         # Also if any decorator is nested, it is not externally visible, so we don't need to
         # generate dependency.
         if not o.func.is_overload and self.scope.current_function_name() is None:
             self.add_dependency(make_trigger(o.func.fullname()))
     else:
         # Add logical dependencies from decorators to the function. For example,
         # if we have
         #     @dec
         #     def func(): ...
         # then if `dec` is unannotated, then it will "spoil" `func` and consequently
         # all call sites, making them all `Any`.
         for d in o.decorators:
             tname = None  # type: Optional[str]
             if isinstance(d, RefExpr) and d.fullname is not None:
                 tname = d.fullname
             if (isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and
                     d.callee.fullname is not None):
                 tname = d.callee.fullname
             if tname is not None:
                 self.add_dependency(make_trigger(tname), make_trigger(o.func.fullname()))
     super().visit_decorator(o)
Esempio n. 30
0
def calculate_active_triggers(manager: BuildManager,
                              old_snapshots: Dict[str, Dict[str,
                                                            SnapshotItem]],
                              new_modules: Dict[str, MypyFile]) -> Set[str]:
    """Determine activated triggers by comparing old and new symbol tables.

    For example, if only the signature of function m.f is different in the new
    symbol table, return {'<m.f>'}.
    """
    names = set()  # type: Set[str]
    for id in new_modules:
        snapshot1 = old_snapshots[id]
        snapshot2 = snapshot_symbol_table(id, new_modules[id].names)
        names |= compare_symbol_table_snapshots(id, snapshot1, snapshot2)
    return {make_trigger(name) for name in names}
Esempio n. 31
0
 def visit_decorator(self, o: Decorator) -> None:
     if not self.use_logical_deps():
         # We don't need to recheck outer scope for an overload, only overload itself.
         # Also if any decorator is nested, it is not externally visible, so we don't need to
         # generate dependency.
         if not o.func.is_overload and self.scope.current_function_name() is None:
             self.add_dependency(make_trigger(o.func.fullname))
     else:
         # Add logical dependencies from decorators to the function. For example,
         # if we have
         #     @dec
         #     def func(): ...
         # then if `dec` is unannotated, then it will "spoil" `func` and consequently
         # all call sites, making them all `Any`.
         for d in o.decorators:
             tname: Optional[str] = None
             if isinstance(d, RefExpr) and d.fullname is not None:
                 tname = d.fullname
             if (isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and
                     d.callee.fullname is not None):
                 tname = d.callee.fullname
             if tname is not None:
                 self.add_dependency(make_trigger(tname), make_trigger(o.func.fullname))
     super().visit_decorator(o)
Esempio n. 32
0
def refresh_suppressed_submodules(module: str, path: Optional[str],
                                  deps: Dict[str, Set[str]], graph: Graph,
                                  fscache: FileSystemCache) -> None:
    """Look for submodules that are now suppressed in target package.

    If a submodule a.b gets added, we need to mark it as suppressed
    in modules that contain "from a import b". Previously we assumed
    that 'a.b' is not a module but a regular name.

    This is only relevant when following imports normally.

    Args:
        module: target package in which to look for submodules
        path: path of the module
    """
    if path is None or not path.endswith(INIT_SUFFIXES):
        # Only packages have submodules.
        return
    # Find any submodules present in the directory.
    pkgdir = os.path.dirname(path)
    for fnam in fscache.listdir(pkgdir):
        if (not fnam.endswith(('.py', '.pyi')) or fnam.startswith("__init__.")
                or fnam.count('.') != 1):
            continue
        shortname = fnam.split('.')[0]
        submodule = module + '.' + shortname
        trigger = make_trigger(submodule)
        if trigger in deps:
            for dep in deps[trigger]:
                # TODO: <...> deps, etc.
                state = graph.get(dep)
                if not state:
                    # Maybe it's a non-top-level target. We only care about the module.
                    dep_module = module_prefix(graph, dep)
                    if dep_module is not None:
                        state = graph.get(dep_module)
                if state:
                    tree = state.tree
                    assert tree  # TODO: What if doesn't exist?
                    for imp in tree.imports:
                        if isinstance(imp, ImportFrom):
                            if (imp.id == module
                                    and any(name == shortname
                                            for name, _ in imp.names)):
                                # TODO: Only if does not exist already
                                state.suppressed.append(submodule)
                                state.suppressed_set.add(submodule)
Esempio n. 33
0
 def add_operator_method_dependency_for_type(self, typ: ProperType, method: str) -> None:
     # Note that operator methods can't be (non-metaclass) methods of type objects
     # (that is, TypeType objects or Callables representing a type).
     if isinstance(typ, TypeVarType):
         typ = get_proper_type(typ.upper_bound)
     if isinstance(typ, TupleType):
         typ = typ.partial_fallback
     if isinstance(typ, Instance):
         trigger = make_trigger(typ.type.fullname + '.' + method)
         self.add_dependency(trigger)
     elif isinstance(typ, UnionType):
         for item in typ.items:
             self.add_operator_method_dependency_for_type(get_proper_type(item), method)
     elif isinstance(typ, FunctionLike) and typ.is_type_obj():
         self.add_operator_method_dependency_for_type(typ.fallback, method)
     elif isinstance(typ, TypeType):
         if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None:
             self.add_operator_method_dependency_for_type(typ.item.type.metaclass_type, method)
Esempio n. 34
0
 def add_operator_method_dependency_for_type(self, typ: Type, method: str) -> None:
     # Note that operator methods can't be (non-metaclass) methods of type objects
     # (that is, TypeType objects or Callables representing a type).
     if isinstance(typ, TypeVarType):
         typ = typ.upper_bound
     if isinstance(typ, TupleType):
         typ = typ.partial_fallback
     if isinstance(typ, Instance):
         trigger = make_trigger(typ.type.fullname() + '.' + method)
         self.add_dependency(trigger)
     elif isinstance(typ, UnionType):
         for item in typ.items:
             self.add_operator_method_dependency_for_type(item, method)
     elif isinstance(typ, FunctionLike) and typ.is_type_obj():
         self.add_operator_method_dependency_for_type(typ.fallback, method)
     elif isinstance(typ, TypeType):
         if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None:
             self.add_operator_method_dependency_for_type(typ.item.type.metaclass_type, method)
Esempio n. 35
0
def calculate_active_triggers(manager: BuildManager,
                              old_snapshots: Dict[str, Dict[str, SnapshotItem]],
                              new_modules: Dict[str, Optional[MypyFile]]) -> Set[str]:
    """Determine activated triggers by comparing old and new symbol tables.

    For example, if only the signature of function m.f is different in the new
    symbol table, return {'<m.f>'}.
    """
    names = set()  # type: Set[str]
    for id in new_modules:
        snapshot1 = old_snapshots.get(id)
        if snapshot1 is None:
            names.add(id)
            snapshot1 = {}
        new = new_modules[id]
        if new is None:
            snapshot2 = snapshot_symbol_table(id, SymbolTable())
            names.add(id)
        else:
            snapshot2 = snapshot_symbol_table(id, new.names)
        diff = compare_symbol_table_snapshots(id, snapshot1, snapshot2)
        package_nesting_level = id.count('.')
        for item in diff.copy():
            if (item.count('.') <= package_nesting_level + 1
                    and item.split('.')[-1] not in ('__builtins__',
                                                    '__file__',
                                                    '__name__',
                                                    '__package__',
                                                    '__doc__')):
                # Activate catch-all wildcard trigger for top-level module changes (used for
                # "from m import *"). This also gets triggered by changes to module-private
                # entries, but as these unneeded dependencies only result in extra processing,
                # it's a minor problem.
                #
                # TODO: Some __* names cause mistriggers. Fix the underlying issue instead of
                #     special casing them here.
                diff.add(id + WILDCARD_TAG)
            if item.count('.') > package_nesting_level + 1:
                # These are for changes within classes, used by protocols.
                diff.add(item.rsplit('.', 1)[0] + WILDCARD_TAG)

        names |= diff
    return {make_trigger(name) for name in names}
Esempio n. 36
0
def calculate_active_triggers(manager: BuildManager,
                              old_snapshots: Dict[str, Dict[str, SnapshotItem]],
                              new_modules: Dict[str, Optional[MypyFile]]) -> Set[str]:
    """Determine activated triggers by comparing old and new symbol tables.

    For example, if only the signature of function m.f is different in the new
    symbol table, return {'<m.f>'}.
    """
    names = set()  # type: Set[str]
    for id in new_modules:
        snapshot1 = old_snapshots.get(id)
        if snapshot1 is None:
            names.add(id)
            snapshot1 = {}
        new = new_modules[id]
        if new is None:
            snapshot2 = snapshot_symbol_table(id, SymbolTable())
            names.add(id)
        else:
            snapshot2 = snapshot_symbol_table(id, new.names)
        diff = compare_symbol_table_snapshots(id, snapshot1, snapshot2)
        package_nesting_level = id.count('.')
        for item in diff.copy():
            if (item.count('.') <= package_nesting_level + 1
                    and item.split('.')[-1] not in ('__builtins__',
                                                    '__file__',
                                                    '__name__',
                                                    '__package__',
                                                    '__doc__')):
                # Activate catch-all wildcard trigger for top-level module changes (used for
                # "from m import *"). This also gets triggered by changes to module-private
                # entries, but as these unneeded dependencies only result in extra processing,
                # it's a minor problem.
                #
                # TODO: Some __* names cause mistriggers. Fix the underlying issue instead of
                #     special casing them here.
                diff.add(id + WILDCARD_TAG)
            if item.count('.') > package_nesting_level + 1:
                # These are for changes within classes, used by protocols.
                diff.add(item.rsplit('.', 1)[0] + WILDCARD_TAG)

        names |= diff
    return {make_trigger(name) for name in names}
Esempio n. 37
0
 def visit_member_expr(self, e: MemberExpr) -> None:
     super().visit_member_expr(e)
     if e.kind is not None:
         # Reference to a module attribute
         self.process_global_ref_expr(e)
     else:
         # Reference to a non-module attribute
         if e.expr not in self.type_map:
             # No type available -- this happens for unreachable code. Since it's unreachable,
             # it wasn't type checked and we don't need to generate dependencies.
             return
         typ = self.type_map[e.expr]
         self.add_attribute_dependency(typ, e.name)
         if self.use_logical_deps() and isinstance(typ, AnyType):
             name = self.get_unimported_fullname(e, typ)
             if name is not None:
                 # Generate a logical dependency from an unimported
                 # definition (which comes from a missing module).
                 # Example:
                 #     import missing  # "missing" not in build
                 #
                 #     def g() -> None:
                 #         missing.f()  # Generate dependency from "missing.f"
                 self.add_dependency(make_trigger(name))
Esempio n. 38
0
 def visit_class_def(self, o: ClassDef) -> None:
     target = self.push(o.name)
     self.add_dependency(make_trigger(target))
     old_is_class = self.is_class
     self.is_class = True
     # TODO: Add dependencies based on MRO and other attributes.
     super().visit_class_def(o)
     self.is_class = old_is_class
     info = o.info
     for name, node in info.names.items():
         if isinstance(node.node, Var):
             for base in non_trivial_bases(info):
                 # If the type of an attribute changes in a base class, we make references
                 # to the attribute in the subclass stale.
                 self.add_dependency(make_trigger(base.fullname() + '.' + name),
                                     target=make_trigger(info.fullname() + '.' + name))
     for base in non_trivial_bases(info):
         for name, node in base.names.items():
             self.add_dependency(make_trigger(base.fullname() + '.' + name),
                                 target=make_trigger(info.fullname() + '.' + name))
         self.add_dependency(make_trigger(base.fullname() + '.__init__'),
                             target=make_trigger(info.fullname() + '.__init__'))
     self.pop()
Esempio n. 39
0
File: deps.py Progetto: sixolet/mypy
 def visit_import(self, o: Import) -> None:
     for id, as_id in o.ids:
         self.add_dependency(make_trigger(id), self.scope.current_target())
Esempio n. 40
0
File: deps.py Progetto: sixolet/mypy
 def process_type_info(self, info: TypeInfo) -> None:
     target = self.scope.current_full_target()
     for base in info.bases:
         self.add_type_dependencies(base, target=target)
     if info.tuple_type:
         self.add_type_dependencies(info.tuple_type, target=make_trigger(target))
     if info.typeddict_type:
         self.add_type_dependencies(info.typeddict_type, target=make_trigger(target))
     if info.declared_metaclass:
         self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target))
     self.add_type_alias_deps(self.scope.current_target())
     for name, node in info.names.items():
         if isinstance(node.node, Var):
             for base_info in non_trivial_bases(info):
                 # If the type of an attribute changes in a base class, we make references
                 # to the attribute in the subclass stale.
                 self.add_dependency(make_trigger(base_info.fullname() + '.' + name),
                                     target=make_trigger(info.fullname() + '.' + name))
     for base_info in non_trivial_bases(info):
         for name, node in base_info.names.items():
             self.add_dependency(make_trigger(base_info.fullname() + '.' + name),
                                 target=make_trigger(info.fullname() + '.' + name))
         self.add_dependency(make_trigger(base_info.fullname() + '.__init__'),
                             target=make_trigger(info.fullname() + '.__init__'))
         self.add_dependency(make_trigger(base_info.fullname() + '.__new__'),
                             target=make_trigger(info.fullname() + '.__new__'))
Esempio n. 41
0
 def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
     rvalue = o.rvalue
     if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed,
                                                    TypeVarExpr):
         analyzed = rvalue.analyzed
         self.add_type_dependencies(analyzed.upper_bound,
                                    target=make_trigger(
                                        analyzed.fullname()))
         for val in analyzed.values:
             self.add_type_dependencies(val,
                                        target=make_trigger(
                                            analyzed.fullname()))
         # We need to re-analyze the definition if bound or value is deleted.
         super().visit_call_expr(rvalue)
     elif isinstance(rvalue, CallExpr) and isinstance(
             rvalue.analyzed, NamedTupleExpr):
         # Depend on types of named tuple items.
         info = rvalue.analyzed.info
         prefix = '%s.%s' % (self.scope.current_full_target(), info.name())
         for name, symnode in info.names.items():
             if not name.startswith('_') and isinstance(symnode.node, Var):
                 typ = symnode.node.type
                 if typ:
                     self.add_type_dependencies(typ)
                     self.add_type_dependencies(typ,
                                                target=make_trigger(prefix))
                     attr_target = make_trigger('%s.%s' % (prefix, name))
                     self.add_type_dependencies(typ, target=attr_target)
     elif isinstance(rvalue, CallExpr) and isinstance(
             rvalue.analyzed, TypedDictExpr):
         # Depend on the underlying typeddict type
         info = rvalue.analyzed.info
         assert info.typeddict_type is not None
         prefix = '%s.%s' % (self.scope.current_full_target(), info.name())
         self.add_type_dependencies(info.typeddict_type,
                                    target=make_trigger(prefix))
     elif isinstance(rvalue, CallExpr) and isinstance(
             rvalue.analyzed, EnumCallExpr):
         # Enum values are currently not checked, but for future we add the deps on them
         for name, symnode in rvalue.analyzed.info.names.items():
             if isinstance(symnode.node, Var) and symnode.node.type:
                 self.add_type_dependencies(symnode.node.type)
     elif o.is_alias_def:
         assert len(o.lvalues) == 1
         lvalue = o.lvalues[0]
         assert isinstance(lvalue, NameExpr)
         # TODO: get rid of this extra dependency from __init__ to alias definition scope
         typ = self.type_map.get(lvalue)
         if isinstance(typ, FunctionLike) and typ.is_type_obj():
             class_name = typ.type_object().fullname()
             self.add_dependency(make_trigger(class_name + '.__init__'))
             self.add_dependency(make_trigger(class_name + '.__new__'))
         if isinstance(rvalue, IndexExpr) and isinstance(
                 rvalue.analyzed, TypeAliasExpr):
             self.add_type_dependencies(rvalue.analyzed.type)
     else:
         # Normal assignment
         super().visit_assignment_stmt(o)
         for lvalue in o.lvalues:
             self.process_lvalue(lvalue)
         items = o.lvalues + [rvalue]
         for i in range(len(items) - 1):
             lvalue = items[i]
             rvalue = items[i + 1]
             if isinstance(lvalue, TupleExpr):
                 self.add_attribute_dependency_for_expr(rvalue, '__iter__')
         if o.type:
             for trigger in get_type_triggers(o.type):
                 self.add_dependency(trigger)
Esempio n. 42
0
    def process_type_info(self, info: TypeInfo) -> None:
        target = self.scope.current_full_target()
        for base in info.bases:
            self.add_type_dependencies(base, target=target)
        if info.tuple_type:
            self.add_type_dependencies(info.tuple_type,
                                       target=make_trigger(target))
        if info.typeddict_type:
            self.add_type_dependencies(info.typeddict_type,
                                       target=make_trigger(target))
        if info.declared_metaclass:
            self.add_type_dependencies(info.declared_metaclass,
                                       target=make_trigger(target))
        if info.is_protocol:
            for base_info in info.mro[:-1]:
                # We add dependencies from whole MRO to cover explicit subprotocols.
                # For example:
                #
                #     class Super(Protocol):
                #         x: int
                #     class Sub(Super, Protocol):
                #         y: int
                #
                # In this example we add <Super[wildcard]> -> <Sub>, to invalidate Sub if
                # a new member is added to Super.
                self.add_dependency(make_wildcard_trigger(
                    base_info.fullname()),
                                    target=make_trigger(target))
                # More protocol dependencies are collected in TypeState._snapshot_protocol_deps
                # after a full run or update is finished.

        self.add_type_alias_deps(self.scope.current_target())
        for name, node in info.names.items():
            if isinstance(node.node, Var):
                # Recheck Liskov if needed, self definitions are checked in the defining method
                if node.node.is_initialized_in_class and has_user_bases(info):
                    self.add_dependency(
                        make_trigger(info.fullname() + '.' + name))
                for base_info in non_trivial_bases(info):
                    # If the type of an attribute changes in a base class, we make references
                    # to the attribute in the subclass stale.
                    self.add_dependency(
                        make_trigger(base_info.fullname() + '.' + name),
                        target=make_trigger(info.fullname() + '.' + name))
        for base_info in non_trivial_bases(info):
            for name, node in base_info.names.items():
                if self.options and self.options.logical_deps:
                    # Skip logical dependency if an attribute is not overridden. For example,
                    # in case of:
                    #     class Base:
                    #         x = 1
                    #         y = 2
                    #     class Sub(Base):
                    #         x = 3
                    # we skip <Base.y> -> <Child.y>, because even if `y` is unannotated it
                    # doesn't affect precision of Liskov checking.
                    if name not in info.names:
                        continue
                self.add_dependency(
                    make_trigger(base_info.fullname() + '.' + name),
                    target=make_trigger(info.fullname() + '.' + name))
            self.add_dependency(
                make_trigger(base_info.fullname() + '.__init__'),
                target=make_trigger(info.fullname() + '.__init__'))
            self.add_dependency(
                make_trigger(base_info.fullname() + '.__new__'),
                target=make_trigger(info.fullname() + '.__new__'))
            # If the set of abstract attributes change, this may invalidate class
            # instantiation, or change the generated error message, since Python checks
            # class abstract status when creating an instance.
            #
            # TODO: We should probably add this dependency only from the __init__ of the
            #     current class, and independent of bases (to trigger changes in message
            #     wording, as errors may enumerate all abstract attributes).
            self.add_dependency(
                make_trigger(base_info.fullname() + '.(abstract)'),
                target=make_trigger(info.fullname() + '.__init__'))
            # If the base class abstract attributes change, subclass abstract
            # attributes need to be recalculated.
            self.add_dependency(
                make_trigger(base_info.fullname() + '.(abstract)'))
Esempio n. 43
0
File: deps.py Progetto: sixolet/mypy
 def add_type_alias_deps(self, target: str) -> None:
     # Type aliases are special, because some of the dependencies are calculated
     # in semanal.py, before they are expanded.
     if target in self.alias_deps:
         for alias in self.alias_deps[target]:
             self.add_dependency(make_trigger(alias))
Esempio n. 44
0
 def visit_instance(self, typ: Instance) -> List[str]:
     trigger = make_trigger(typ.type.fullname())
     triggers = [trigger]
     for arg in typ.args:
         triggers.extend(get_type_triggers(arg))
     return triggers
Esempio n. 45
0
File: deps.py Progetto: sixolet/mypy
 def visit_any(self, typ: AnyType) -> List[str]:
     if typ.missing_import_name is not None:
         return [make_trigger(typ.missing_import_name)]
     return []
Esempio n. 46
0
 def visit_super_expr(self, e: SuperExpr) -> None:
     super().visit_super_expr(e)
     if e.info is not None:
         self.add_dependency(make_trigger(e.info.fullname() + '.' + e.name))
Esempio n. 47
0
 def add_type_alias_deps(self, target: str) -> None:
     # Type aliases are special, because some of the dependencies are calculated
     # in semanal.py, before they are expanded.
     if target in self.alias_deps:
         for alias in self.alias_deps[target]:
             self.add_dependency(make_trigger(alias))
Esempio n. 48
0
 def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
     rvalue = o.rvalue
     if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed,
                                                    TypeVarExpr):
         analyzed = rvalue.analyzed
         self.add_type_dependencies(analyzed.upper_bound,
                                    target=make_trigger(
                                        analyzed.fullname()))
         for val in analyzed.values:
             self.add_type_dependencies(val,
                                        target=make_trigger(
                                            analyzed.fullname()))
         # We need to re-analyze the definition if bound or value is deleted.
         super().visit_call_expr(rvalue)
     elif isinstance(rvalue, CallExpr) and isinstance(
             rvalue.analyzed, NamedTupleExpr):
         # Depend on types of named tuple items.
         info = rvalue.analyzed.info
         prefix = '%s.%s' % (self.scope.current_full_target(), info.name())
         for name, symnode in info.names.items():
             if not name.startswith('_') and isinstance(symnode.node, Var):
                 typ = symnode.node.type
                 if typ:
                     self.add_type_dependencies(typ)
                     self.add_type_dependencies(typ,
                                                target=make_trigger(prefix))
                     attr_target = make_trigger('%s.%s' % (prefix, name))
                     self.add_type_dependencies(typ, target=attr_target)
     elif isinstance(rvalue, CallExpr) and isinstance(
             rvalue.analyzed, TypedDictExpr):
         # Depend on the underlying typeddict type
         info = rvalue.analyzed.info
         assert info.typeddict_type is not None
         prefix = '%s.%s' % (self.scope.current_full_target(), info.name())
         self.add_type_dependencies(info.typeddict_type,
                                    target=make_trigger(prefix))
     elif isinstance(rvalue, CallExpr) and isinstance(
             rvalue.analyzed, EnumCallExpr):
         # Enum values are currently not checked, but for future we add the deps on them
         for name, symnode in rvalue.analyzed.info.names.items():
             if isinstance(symnode.node, Var) and symnode.node.type:
                 self.add_type_dependencies(symnode.node.type)
     elif o.is_alias_def:
         assert len(o.lvalues) == 1
         lvalue = o.lvalues[0]
         assert isinstance(lvalue, NameExpr)
         # TODO: get rid of this extra dependency from __init__ to alias definition scope
         typ = self.type_map.get(lvalue)
         if isinstance(typ, FunctionLike) and typ.is_type_obj():
             class_name = typ.type_object().fullname()
             self.add_dependency(make_trigger(class_name + '.__init__'))
             self.add_dependency(make_trigger(class_name + '.__new__'))
         if isinstance(rvalue, IndexExpr) and isinstance(
                 rvalue.analyzed, TypeAliasExpr):
             self.add_type_dependencies(rvalue.analyzed.type)
         elif typ:
             self.add_type_dependencies(typ)
     else:
         # Normal assignment
         super().visit_assignment_stmt(o)
         for lvalue in o.lvalues:
             self.process_lvalue(lvalue)
         items = o.lvalues + [rvalue]
         for i in range(len(items) - 1):
             lvalue = items[i]
             rvalue = items[i + 1]
             if isinstance(lvalue, TupleExpr):
                 self.add_attribute_dependency_for_expr(rvalue, '__iter__')
         if o.type:
             for trigger in get_type_triggers(o.type):
                 self.add_dependency(trigger)
     if self.options and self.options.logical_deps and o.unanalyzed_type is None:
         # Special case: for definitions without an explicit type like this:
         #     x = func(...)
         # we add a logical dependency <func> -> <x>, because if `func` is not annotated,
         # then it will make all points of use of `x` unchecked.
         if (isinstance(rvalue, CallExpr)
                 and isinstance(rvalue.callee, RefExpr)
                 and rvalue.callee.fullname is not None):
             fname = None  # type: Optional[str]
             if isinstance(rvalue.callee.node, TypeInfo):
                 # use actual __init__ as a dependency source
                 init = rvalue.callee.node.get('__init__')
                 if init and isinstance(init.node, FuncBase):
                     fname = init.node.fullname()
             else:
                 fname = rvalue.callee.fullname
             if fname is None:
                 return
             for lv in o.lvalues:
                 if isinstance(lv,
                               RefExpr) and lv.fullname and lv.is_new_def:
                     if lv.kind == LDEF:
                         return  # local definitions don't generate logical deps
                     self.add_dependency(make_trigger(fname),
                                         make_trigger(lv.fullname))
Esempio n. 49
0
 def visit_import_from(self, o: ImportFrom) -> None:
     module_id, _ = correct_relative_import(self.scope.current_module_id(),
                                            o.relative, o.id,
                                            self.is_package_init_file)
     for name, as_name in o.names:
         self.add_dependency(make_trigger(module_id + '.' + name))
Esempio n. 50
0
 def visit_import(self, o: Import) -> None:
     for id, as_id in o.ids:
         self.add_dependency(make_trigger(id), self.scope.current_target())
Esempio n. 51
0
File: deps.py Progetto: nimin98/mypy
 def visit_import(self, o: Import) -> None:
     for id, as_id in o.ids:
         # TODO: as_id
         self.add_dependency(make_trigger(id), self.current())
Esempio n. 52
0
File: deps.py Progetto: nimin98/mypy
 def visit_import_from(self, o: ImportFrom) -> None:
     assert o.relative == 0  # Relative imports not supported
     for name, as_name in o.names:
         assert as_name is None or as_name == name
         self.add_dependency(make_trigger(o.id + '.' + name))
Esempio n. 53
0
File: deps.py Progetto: nimin98/mypy
 def visit_call_expr(self, e: CallExpr) -> None:
     super().visit_call_expr(e)
     callee_type = self.type_map.get(e.callee)
     if isinstance(callee_type, FunctionLike) and callee_type.is_type_obj():
         class_name = callee_type.type_object().fullname()
         self.add_dependency(make_trigger(class_name + '.__init__'))
Esempio n. 54
0
 def visit_type_var(self, typ: TypeVarType) -> List[str]:
     # TODO: bound (values?)
     triggers = []
     if typ.fullname:
         triggers.append(make_trigger(typ.fullname))
     return triggers
Esempio n. 55
0
File: deps.py Progetto: sixolet/mypy
 def visit_super_expr(self, e: SuperExpr) -> None:
     super().visit_super_expr(e)
     if e.info is not None:
         self.add_dependency(make_trigger(e.info.fullname() + '.' + e.name))
Esempio n. 56
0
 def visit_decorator(self, o: Decorator) -> None:
     self.add_dependency(make_trigger(o.func.fullname()))
     super().visit_decorator(o)
Esempio n. 57
0
 def visit_any(self, typ: AnyType) -> List[str]:
     if typ.missing_import_name is not None:
         return [make_trigger(typ.missing_import_name)]
     return []
Esempio n. 58
0
 def visit_param_spec(self, typ: ParamSpecType) -> List[str]:
     triggers = []
     if typ.fullname:
         triggers.append(make_trigger(typ.fullname))
     triggers.extend(self.get_type_triggers(typ.upper_bound))
     return triggers
Esempio n. 59
0
def reprocess_nodes(manager: BuildManager,
                    graph: Dict[str, State],
                    module_id: str,
                    nodeset: Set[FineGrainedDeferredNode],
                    deps: Dict[str, Set[str]],
                    processed_targets: List[str]) -> Set[str]:
    """Reprocess a set of nodes within a single module.

    Return fired triggers.
    """
    if module_id not in graph:
        manager.log_fine_grained('%s not in graph (blocking errors or deleted?)' %
                    module_id)
        return set()

    file_node = manager.modules[module_id]
    old_symbols = find_symbol_tables_recursive(file_node.fullname, file_node.names)
    old_symbols = {name: names.copy() for name, names in old_symbols.items()}
    old_symbols_snapshot = snapshot_symbol_table(file_node.fullname, file_node.names)

    def key(node: FineGrainedDeferredNode) -> int:
        # Unlike modules which are sorted by name within SCC,
        # nodes within the same module are sorted by line number, because
        # this is how they are processed in normal mode.
        return node.node.line

    nodes = sorted(nodeset, key=key)

    options = graph[module_id].options
    manager.errors.set_file_ignored_lines(
        file_node.path, file_node.ignored_lines, options.ignore_errors)

    targets = set()
    for node in nodes:
        target = target_from_node(module_id, node.node)
        if target is not None:
            targets.add(target)
    manager.errors.clear_errors_in_targets(file_node.path, targets)

    # If one of the nodes is the module itself, emit any errors that
    # happened before semantic analysis.
    for target in targets:
        if target == module_id:
            for info in graph[module_id].early_errors:
                manager.errors.add_error_info(info)

    # Strip semantic analysis information.
    saved_attrs = {}  # type: SavedAttributes
    for deferred in nodes:
        processed_targets.append(deferred.node.fullname)
        strip_target(deferred.node, saved_attrs)
    semantic_analysis_for_targets(graph[module_id], nodes, graph, saved_attrs)
    # Merge symbol tables to preserve identities of AST nodes. The file node will remain
    # the same, but other nodes may have been recreated with different identities, such as
    # NamedTuples defined using assignment statements.
    new_symbols = find_symbol_tables_recursive(file_node.fullname, file_node.names)
    for name in old_symbols:
        if name in new_symbols:
            merge_asts(file_node, old_symbols[name], file_node, new_symbols[name])

    # Type check.
    checker = graph[module_id].type_checker()
    checker.reset()
    # We seem to need additional passes in fine-grained incremental mode.
    checker.pass_num = 0
    checker.last_pass = 3
    more = checker.check_second_pass(nodes)
    while more:
        more = False
        if graph[module_id].type_checker().check_second_pass():
            more = True

    if manager.options.export_types:
        manager.all_types.update(graph[module_id].type_map())

    new_symbols_snapshot = snapshot_symbol_table(file_node.fullname, file_node.names)
    # Check if any attribute types were changed and need to be propagated further.
    changed = compare_symbol_table_snapshots(file_node.fullname,
                                             old_symbols_snapshot,
                                             new_symbols_snapshot)
    new_triggered = {make_trigger(name) for name in changed}

    # Dependencies may have changed.
    update_deps(module_id, nodes, graph, deps, options)

    # Report missing imports.
    graph[module_id].verify_dependencies()

    graph[module_id].free_state()

    return new_triggered
Esempio n. 60
0
File: update.py Progetto: rkday/mypy
def reprocess_nodes(manager: BuildManager, graph: Dict[str, State],
                    module_id: str, nodeset: Set[DeferredNode],
                    deps: Dict[str, Set[str]]) -> Set[str]:
    """Reprocess a set of nodes within a single module.

    Return fired triggers.
    """
    if module_id not in graph:
        manager.log_fine_grained(
            '%s not in graph (blocking errors or deleted?)' % module_id)
        return set()

    file_node = manager.modules[module_id]
    old_symbols = find_symbol_tables_recursive(file_node.fullname(),
                                               file_node.names)
    old_symbols = {name: names.copy() for name, names in old_symbols.items()}
    old_symbols_snapshot = snapshot_symbol_table(file_node.fullname(),
                                                 file_node.names)

    def key(node: DeferredNode) -> int:
        # Unlike modules which are sorted by name within SCC,
        # nodes within the same module are sorted by line number, because
        # this is how they are processed in normal mode.
        return node.node.line

    nodes = sorted(nodeset, key=key)

    # TODO: ignore_all argument to set_file_ignored_lines
    manager.errors.set_file_ignored_lines(file_node.path,
                                          file_node.ignored_lines)

    # Strip semantic analysis information.
    for deferred in nodes:
        strip_target(deferred.node)
    semantic_analyzer = manager.semantic_analyzer

    patches = []  # type: List[Tuple[int, Callable[[], None]]]

    # Second pass of semantic analysis. We don't redo the first pass, because it only
    # does local things that won't go stale.
    for deferred in nodes:
        with semantic_analyzer.file_context(
                file_node=file_node,
                fnam=file_node.path,
                options=manager.options,
                active_type=deferred.active_typeinfo):
            manager.semantic_analyzer.refresh_partial(deferred.node, patches)

    # Third pass of semantic analysis.
    for deferred in nodes:
        with semantic_analyzer.file_context(
                file_node=file_node,
                fnam=file_node.path,
                options=manager.options,
                active_type=deferred.active_typeinfo):
            manager.semantic_analyzer_pass3.refresh_partial(
                deferred.node, patches)

    apply_semantic_analyzer_patches(patches)

    # Merge symbol tables to preserve identities of AST nodes. The file node will remain
    # the same, but other nodes may have been recreated with different identities, such as
    # NamedTuples defined using assignment statements.
    new_symbols = find_symbol_tables_recursive(file_node.fullname(),
                                               file_node.names)
    for name in old_symbols:
        if name in new_symbols:
            merge_asts(file_node, old_symbols[name], file_node,
                       new_symbols[name])

    # Type check.
    checker = graph[module_id].type_checker()
    checker.reset()
    # We seem to need additional passes in fine-grained incremental mode.
    checker.pass_num = 0
    checker.last_pass = 3
    more = checker.check_second_pass(nodes)
    while more:
        more = False
        if graph[module_id].type_checker().check_second_pass():
            more = True

    new_symbols_snapshot = snapshot_symbol_table(file_node.fullname(),
                                                 file_node.names)
    # Check if any attribute types were changed and need to be propagated further.
    changed = compare_symbol_table_snapshots(file_node.fullname(),
                                             old_symbols_snapshot,
                                             new_symbols_snapshot)
    new_triggered = {make_trigger(name) for name in changed}

    # Dependencies may have changed.
    update_deps(module_id, nodes, graph, deps, manager.options)

    # Report missing imports.
    verify_dependencies(graph[module_id], manager)

    return new_triggered