def visit_instance(self, t: Instance) -> None: info = t.type if info.replaced or info.tuple_type: self.indicator['synthetic'] = True # Check type argument count. if len(t.args) != len(info.type_vars): if len(t.args) == 0: from_builtins = t.type.fullname() in nongen_builtins and not t.from_generic_builtin if (self.options.disallow_any_generics and not self.is_typeshed_stub and from_builtins): alternative = nongen_builtins[t.type.fullname()] self.fail(messages.IMPLICIT_GENERIC_ANY_BUILTIN.format(alternative), t) # Insert implicit 'Any' type arguments. if from_builtins: # this 'Any' was already reported elsewhere any_type = AnyType(TypeOfAny.special_form, line=t.line, column=t.column) else: any_type = AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) t.args = [any_type] * len(info.type_vars) return # Invalid number of type parameters. n = len(info.type_vars) s = '{} type arguments'.format(n) if n == 0: s = 'no type arguments' elif n == 1: s = '1 type argument' act = str(len(t.args)) if act == '0': act = 'none' self.fail('"{}" expects {}, but {} given'.format( info.name(), s, act), t) # Construct the correct number of type arguments, as # otherwise the type checker may crash as it expects # things to be right. t.args = [AnyType(TypeOfAny.from_error) for _ in info.type_vars] t.invalid = True elif info.defn.type_vars: # Check type argument values. This is postponed to the end of semantic analysis # since we need full MROs and resolved forward references. for tvar in info.defn.type_vars: if (tvar.values or not isinstance(tvar.upper_bound, Instance) or tvar.upper_bound.type.fullname() != 'builtins.object'): # Some restrictions on type variable. These can only be checked later # after we have final MROs and forward references have been resolved. self.indicator['typevar'] = True for arg in t.args: arg.accept(self) if info.is_newtype: for base in info.bases: base.accept(self)
def visit_instance(self, inst: Instance) -> None: # TODO: Combine Instances that are exactly the same? type_ref = inst.type_ref if type_ref is None: return # We've already been here. inst.type_ref = None inst.type = lookup_qualified_typeinfo(self.modules, type_ref, self.quick_and_dirty) # TODO: Is this needed or redundant? # Also fix up the bases, just in case. for base in inst.type.bases: if base.type is NOT_READY: base.accept(self) for a in inst.args: a.accept(self) if inst.final_value is not None: inst.final_value.accept(self)
def visit_instance(self, t: Instance) -> None: info = t.type # Check type argument count. if len(t.args) != len(info.type_vars): if len(t.args) == 0: # Insert implicit 'Any' type arguments. t.args = [AnyType()] * len(info.type_vars) return # Invalid number of type parameters. n = len(info.type_vars) s = "{} type arguments".format(n) if n == 0: s = "no type arguments" elif n == 1: s = "1 type argument" act = str(len(t.args)) if act == "0": act = "none" self.fail('"{}" expects {}, but {} given'.format(info.name(), s, act), t) # Construct the correct number of type arguments, as # otherwise the type checker may crash as it expects # things to be right. t.args = [AnyType() for _ in info.type_vars] elif info.defn.type_vars: # Check type argument values. for arg, TypeVar in zip(t.args, info.defn.type_vars): if TypeVar.values: if isinstance(arg, TypeVarType): arg_values = arg.values if not arg_values: self.fail( 'Type variable "{}" not valid as type ' 'argument value for "{}"'.format(arg.name, info.name()), t, ) continue else: arg_values = [arg] self.check_type_var_values(info, arg_values, TypeVar.values, t) if not satisfies_upper_bound(arg, TypeVar.upper_bound): self.fail( 'Type argument "{}" of "{}" must be ' 'a subtype of "{}"'.format(arg, info.name(), TypeVar.upper_bound), t, ) for arg in t.args: arg.accept(self)
def visit_instance(self, inst: Instance) -> None: # TODO: Combine Instances that are exactly the same? type_ref = inst.type_ref if type_ref is None: return # We've already been here. del inst.type_ref node = lookup_qualified(self.modules, type_ref, self.quick_and_dirty) if isinstance(node, TypeInfo): inst.type = node # TODO: Is this needed or redundant? # Also fix up the bases, just in case. for base in inst.type.bases: if base.type is NOT_READY: base.accept(self) else: # Looks like a missing TypeInfo in quick mode, put something there assert self.quick_and_dirty, "Should never get here in normal mode" inst.type = stale_info(self.modules) for a in inst.args: a.accept(self)
def visit_instance(self, inst: Instance) -> None: # TODO: Combine Instances that are exactly the same? type_ref = inst.type_ref if type_ref is None: return # We've already been here. del inst.type_ref node = lookup_qualified(self.modules, type_ref) if isinstance(node, TypeInfo): inst.type = node # TODO: Is this needed or redundant? # Also fix up the bases, just in case. for base in inst.type.bases: if base.type is None: base.accept(self) for a in inst.args: a.accept(self)
def visit_instance(self, t: Instance) -> None: info = t.type if len(t.args) != len(info.type_vars): if len(t.args) == 0: # Implicit 'Any' type arguments. # TODO remove <Type> below t.args = [AnyType()] * len(info.type_vars) return # Invalid number of type parameters. n = len(info.type_vars) s = '{} type arguments'.format(n) if n == 0: s = 'no type arguments' elif n == 1: s = '1 type argument' act = str(len(t.args)) if act == '0': act = 'none' self.fail('"{}" expects {}, but {} given'.format( info.name(), s, act), t) for arg in t.args: arg.accept(self)
def visit_instance(self, t: Instance) -> None: info = t.type # Check type argument count. if len(t.args) != len(info.type_vars): if len(t.args) == 0: # Insert implicit 'Any' type arguments. t.args = [AnyType()] * len(info.type_vars) return # Invalid number of type parameters. n = len(info.type_vars) s = '{} type arguments'.format(n) if n == 0: s = 'no type arguments' elif n == 1: s = '1 type argument' act = str(len(t.args)) if act == '0': act = 'none' self.fail('"{}" expects {}, but {} given'.format( info.name(), s, act), t) elif info.defn.type_vars: # Check type argument values. for arg, TypeVar in zip(t.args, info.defn.type_vars): if TypeVar.values: if isinstance(arg, TypeVarType): arg_values = arg.values if not arg_values: self.fail('Type variable "{}" not valid as type ' 'argument value for "{}"'.format( arg.name, info.name()), t) continue else: arg_values = [arg] self.check_type_var_values(info, arg_values, TypeVar.values, t) for arg in t.args: arg.accept(self)
def visit_unbound_type(self, t: UnboundType) -> Type: if t.optional: t.optional = False # We don't need to worry about double-wrapping Optionals or # wrapping Anys: Union simplification will take care of that. return make_optional_type(self.visit_unbound_type(t)) sym = self.lookup(t.name, t) if sym is not None: if sym.node is None: # UNBOUND_IMPORTED can happen if an unknown name was imported. if sym.kind != UNBOUND_IMPORTED: self.fail('Internal error (node is None, kind={})'.format(sym.kind), t) return AnyType() fullname = sym.node.fullname() hook = self.plugin.get_type_analyze_hook(fullname) if hook: return hook(AnalyzeTypeContext(t, t, self)) if (fullname in nongen_builtins and t.args and not sym.normalized and not self.allow_unnormalized): self.fail(no_subscript_builtin_alias(fullname), t) tvar_def = self.tvar_scope.get_binding(sym) if sym.kind == TVAR and tvar_def is not None: if len(t.args) > 0: self.fail('Type variable "{}" used with arguments'.format( t.name), t) return TypeVarType(tvar_def, t.line) elif fullname == 'builtins.None': return NoneTyp() elif fullname == 'typing.Any' or fullname == 'builtins.Any': return AnyType(explicit=True) elif fullname == 'typing.Tuple': if len(t.args) == 0 and not t.empty_tuple_index: # Bare 'Tuple' is same as 'tuple' if 'generics' in self.options.disallow_any and not self.is_typeshed_stub: self.fail(messages.BARE_GENERIC, t) typ = self.named_type('builtins.tuple', line=t.line, column=t.column) typ.from_generic_builtin = True return typ if len(t.args) == 2 and isinstance(t.args[1], EllipsisType): # Tuple[T, ...] (uniform, variable-length tuple) instance = self.named_type('builtins.tuple', [self.anal_type(t.args[0])]) instance.line = t.line return instance return self.tuple_type(self.anal_array(t.args)) elif fullname == 'typing.Union': items = self.anal_array(t.args) return UnionType.make_union(items) elif fullname == 'typing.Optional': if len(t.args) != 1: self.fail('Optional[...] must have exactly one type argument', t) return AnyType() item = self.anal_type(t.args[0]) return make_optional_type(item) elif fullname == 'typing.Callable': return self.analyze_callable_type(t) elif fullname == 'typing.Type': if len(t.args) == 0: any_type = AnyType(from_omitted_generics=True, line=t.line, column=t.column) return TypeType(any_type, line=t.line, column=t.column) if len(t.args) != 1: self.fail('Type[...] must have exactly one type argument', t) item = self.anal_type(t.args[0]) return TypeType.make_normalized(item, line=t.line) elif fullname == 'typing.ClassVar': if self.nesting_level > 0: self.fail('Invalid type: ClassVar nested inside other type', t) if len(t.args) == 0: return AnyType(line=t.line) if len(t.args) != 1: self.fail('ClassVar[...] must have at most one type argument', t) return AnyType() item = self.anal_type(t.args[0]) if isinstance(item, TypeVarType) or get_type_vars(item): self.fail('Invalid type: ClassVar cannot be generic', t) return AnyType() return item elif fullname in ('mypy_extensions.NoReturn', 'typing.NoReturn'): return UninhabitedType(is_noreturn=True) elif sym.kind == TYPE_ALIAS: override = sym.type_override all_vars = sym.alias_tvars assert override is not None an_args = self.anal_array(t.args) if all_vars is not None: exp_len = len(all_vars) else: exp_len = 0 act_len = len(an_args) if exp_len > 0 and act_len == 0: # Interpret bare Alias same as normal generic, i.e., Alias[Any, Any, ...] assert all_vars is not None return set_any_tvars(override, all_vars, t.line, t.column) if exp_len == 0 and act_len == 0: return override if act_len != exp_len: self.fail('Bad number of arguments for type alias, expected: %s, given: %s' % (exp_len, act_len), t) return set_any_tvars(override, all_vars or [], t.line, t.column, implicit=False) assert all_vars is not None return replace_alias_tvars(override, all_vars, an_args, t.line, t.column) elif not isinstance(sym.node, TypeInfo): name = sym.fullname if name is None: name = sym.node.name() if isinstance(sym.node, Var) and isinstance(sym.node.type, AnyType): # Something with an Any type -- make it an alias for Any in a type # context. This is slightly problematic as it allows using the type 'Any' # as a base class -- however, this will fail soon at runtime so the problem # is pretty minor. return AnyType(from_unimported_type=True) # Allow unbound type variables when defining an alias if not (self.aliasing and sym.kind == TVAR and self.tvar_scope.get_binding(sym) is None): self.fail('Invalid type "{}"'.format(name), t) return t info = sym.node # type: TypeInfo if len(t.args) > 0 and info.fullname() == 'builtins.tuple': return TupleType(self.anal_array(t.args), Instance(info, [AnyType()], t.line), t.line) else: # Analyze arguments and construct Instance type. The # number of type arguments and their values are # checked only later, since we do not always know the # valid count at this point. Thus we may construct an # Instance with an invalid number of type arguments. instance = Instance(info, self.anal_array(t.args), t.line, t.column) instance.from_generic_builtin = sym.normalized tup = info.tuple_type if tup is not None: # The class has a Tuple[...] base class so it will be # represented as a tuple type. if t.args: self.fail('Generic tuple types not supported', t) return AnyType() return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance) td = info.typeddict_type if td is not None: # The class has a TypedDict[...] base class so it will be # represented as a typeddict type. if t.args: self.fail('Generic TypedDict types not supported', t) return AnyType() # Create a named TypedDictType return td.copy_modified(item_types=self.anal_array(list(td.items.values())), fallback=instance) return instance else: return AnyType()
def visit_instance(self, t: Instance) -> None: info = t.type if info.replaced or info.tuple_type: self.indicator['synthetic'] = True # Check type argument count. if len(t.args) != len(info.type_vars): if len(t.args) == 0: from_builtins = t.type.fullname( ) in nongen_builtins and not t.from_generic_builtin if (self.options.disallow_any_generics and not self.is_typeshed_stub and from_builtins): alternative = nongen_builtins[t.type.fullname()] self.fail( messages.IMPLICIT_GENERIC_ANY_BUILTIN.format( alternative), t) # Insert implicit 'Any' type arguments. if from_builtins: # this 'Any' was already reported elsewhere any_type = AnyType(TypeOfAny.special_form, line=t.line, column=t.column) else: any_type = AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) t.args = [any_type] * len(info.type_vars) return # Invalid number of type parameters. n = len(info.type_vars) s = '{} type arguments'.format(n) if n == 0: s = 'no type arguments' elif n == 1: s = '1 type argument' act = str(len(t.args)) if act == '0': act = 'none' self.fail( '"{}" expects {}, but {} given'.format(info.name(), s, act), t) # Construct the correct number of type arguments, as # otherwise the type checker may crash as it expects # things to be right. t.args = [AnyType(TypeOfAny.from_error) for _ in info.type_vars] t.invalid = True elif info.defn.type_vars: # Check type argument values. # TODO: Calling is_subtype and is_same_types in semantic analysis is a bad idea for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): if tvar.values: if isinstance(arg, TypeVarType): arg_values = arg.values if not arg_values: self.fail( 'Type variable "{}" not valid as type ' 'argument value for "{}"'.format( arg.name, info.name()), t) continue else: arg_values = [arg] self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) # TODO: These hacks will be not necessary when this will be moved to later stage. arg = self.resolve_type(arg) bound = self.resolve_type(tvar.upper_bound) if not is_subtype(arg, bound): self.fail( 'Type argument "{}" of "{}" must be ' 'a subtype of "{}"'.format(arg, info.name(), bound), t) for arg in t.args: arg.accept(self) if info.is_newtype: for base in info.bases: base.accept(self)
def visit_unbound_type(self, t: UnboundType) -> Type: sym = self.lookup(t.name, t) if sym is not None: fullname = sym.node.fullname() if sym.kind == BOUND_TVAR: if len(t.args) > 0: self.fail( 'Type variable "{}" used with arguments'.format( t.name), t) values = cast(TypeVarExpr, sym.node).values return TypeVarType(t.name, sym.tvar_id, values, self.builtin_type('builtins.object'), t.line) elif fullname == 'builtins.None': return Void() elif fullname == 'typing.Any': return AnyType() elif fullname == 'typing.Tuple': return TupleType(self.anal_array(t.args), self.builtin_type('builtins.tuple')) elif fullname == 'typing.Union': items = self.anal_array(t.args) items = [item for item in items if not isinstance(item, Void)] return UnionType.make_union(items) elif fullname == 'typing.Optional': if len(t.args) != 1: self.fail( 'Optional[...] must have exactly one type argument', t) items = self.anal_array(t.args) # Currently Optional[t] is just an alias for t. return items[0] elif fullname == 'typing.Callable': return self.analyze_callable_type(t) elif sym.kind == TYPE_ALIAS: # TODO: Generic type aliases. return sym.type_override elif not isinstance(sym.node, TypeInfo): name = sym.fullname if name is None: name = sym.node.name() self.fail('Invalid type "{}"'.format(name), t) return t info = cast(TypeInfo, sym.node) if len(t.args) > 0 and info.fullname() == 'builtins.tuple': return TupleType(self.anal_array(t.args), Instance(info, [], t.line), t.line) else: # Analyze arguments and construct Instance type. The # number of type arguments and their values are # checked only later, since we do not always know the # valid count at this point. Thus we may construct an # Instance with an invalid number of type arguments. instance = Instance(info, self.anal_array(t.args), t.line) if info.tuple_type is None: return instance else: # The class has a Tuple[...] base class so it will be # represented as a tuple type. return TupleType(self.anal_array(info.tuple_type.items), fallback=instance, line=t.line) else: return t
def object_from_instance(instance: Instance) -> Instance: """Construct the type 'builtins.object' from an instance type.""" # Use the fact that 'object' is always the last class in the mro. res = Instance(instance.type.mro[-1], []) return res
def reparametrize_instance(instance: Instance, new_args: List[MypyType]) -> Instance: return Instance(instance.type, args=new_args, line=instance.line, column=instance.column)
def fill_typevars(tp: Instance, type_to_fill: Instance) -> Instance: typevar_values: typing.List[Type] = [] for typevar_arg in type_to_fill.args: if isinstance(typevar_arg, TypeVarType): typevar_values.append(extract_typevar_value(tp, typevar_arg.name)) return Instance(type_to_fill.type, typevar_values)
def visit_class_pattern(self, o: ClassPattern) -> PatternType: current_type = get_proper_type(self.type_context[-1]) # # Check class type # type_info = o.class_ref.node assert type_info is not None if isinstance(type_info, TypeAlias) and not type_info.no_args: self.msg.fail(message_registry.CLASS_PATTERN_GENERIC_TYPE_ALIAS, o) return self.early_non_match() if isinstance(type_info, TypeInfo): any_type = AnyType(TypeOfAny.implementation_artifact) typ: Type = Instance(type_info, [any_type] * len(type_info.defn.type_vars)) elif isinstance(type_info, TypeAlias): typ = type_info.target else: if isinstance(type_info, Var): name = str(type_info.type) else: name = type_info.name self.msg.fail( message_registry.CLASS_PATTERN_TYPE_REQUIRED.format(name), o.class_ref) return self.early_non_match() new_type, rest_type = self.chk.conditional_types_with_intersection( current_type, [get_type_range(typ)], o, default=current_type) if is_uninhabited(new_type): return self.early_non_match() # TODO: Do I need this? narrowed_type = narrow_declared_type(current_type, new_type) # # Convert positional to keyword patterns # keyword_pairs: List[Tuple[Optional[str], Pattern]] = [] match_arg_set: Set[str] = set() captures: Dict[Expression, Type] = {} if len(o.positionals) != 0: if self.should_self_match(typ): if len(o.positionals) > 1: self.msg.fail( message_registry. CLASS_PATTERN_TOO_MANY_POSITIONAL_ARGS, o) pattern_type = self.accept(o.positionals[0], narrowed_type) if not is_uninhabited(pattern_type.type): return PatternType( pattern_type.type, join_types(rest_type, pattern_type.rest_type), pattern_type.captures) captures = pattern_type.captures else: local_errors = self.msg.clean_copy() match_args_type = analyze_member_access("__match_args__", typ, o, False, False, False, local_errors, original_type=typ, chk=self.chk) if local_errors.is_errors(): self.msg.fail( message_registry.MISSING_MATCH_ARGS.format(typ), o) return self.early_non_match() proper_match_args_type = get_proper_type(match_args_type) if isinstance(proper_match_args_type, TupleType): match_arg_names = get_match_arg_names( proper_match_args_type) if len(o.positionals) > len(match_arg_names): self.msg.fail( message_registry. CLASS_PATTERN_TOO_MANY_POSITIONAL_ARGS, o) return self.early_non_match() else: match_arg_names = [None] * len(o.positionals) for arg_name, pos in zip(match_arg_names, o.positionals): keyword_pairs.append((arg_name, pos)) if arg_name is not None: match_arg_set.add(arg_name) # # Check for duplicate patterns # keyword_arg_set = set() has_duplicates = False for key, value in zip(o.keyword_keys, o.keyword_values): keyword_pairs.append((key, value)) if key in match_arg_set: self.msg.fail( message_registry.CLASS_PATTERN_KEYWORD_MATCHES_POSITIONAL. format(key), value) has_duplicates = True elif key in keyword_arg_set: self.msg.fail( message_registry.CLASS_PATTERN_DUPLICATE_KEYWORD_PATTERN. format(key), value) has_duplicates = True keyword_arg_set.add(key) if has_duplicates: return self.early_non_match() # # Check keyword patterns # can_match = True for keyword, pattern in keyword_pairs: key_type: Optional[Type] = None local_errors = self.msg.clean_copy() if keyword is not None: key_type = analyze_member_access(keyword, narrowed_type, pattern, False, False, False, local_errors, original_type=new_type, chk=self.chk) else: key_type = AnyType(TypeOfAny.from_error) if local_errors.is_errors() or key_type is None: key_type = AnyType(TypeOfAny.from_error) self.msg.fail( message_registry.CLASS_PATTERN_UNKNOWN_KEYWORD.format( typ, keyword), value) inner_type, inner_rest_type, inner_captures = self.accept( pattern, key_type) if is_uninhabited(inner_type): can_match = False else: self.update_type_map(captures, inner_captures) if not is_uninhabited(inner_rest_type): rest_type = current_type if not can_match: new_type = UninhabitedType() return PatternType(new_type, rest_type, captures)
def fill_descriptor_types_for_related_field( ctx: FunctionContext, django_context: DjangoContext) -> MypyType: current_field = _get_current_field_from_assignment(ctx, django_context) if current_field is None: return AnyType(TypeOfAny.from_error) assert isinstance(current_field, RelatedField) related_model_cls = django_context.get_field_related_model_cls( current_field) if related_model_cls is None: return AnyType(TypeOfAny.from_error) default_related_field_type = set_descriptor_types_for_field(ctx) # self reference with abstract=True on the model where ForeignKey is defined current_model_cls = current_field.model if (current_model_cls._meta.abstract and current_model_cls == related_model_cls): # for all derived non-abstract classes, set variable with this name to # __get__/__set__ of ForeignKey of derived model for model_cls in django_context.all_registered_model_classes: if issubclass(model_cls, current_model_cls) and not model_cls._meta.abstract: derived_model_info = helpers.lookup_class_typeinfo( helpers.get_typechecker_api(ctx), model_cls) if derived_model_info is not None: fk_ref_type = Instance(derived_model_info, []) derived_fk_type = reparametrize_related_field_type( default_related_field_type, set_type=fk_ref_type, get_type=fk_ref_type) helpers.add_new_sym_for_info(derived_model_info, name=current_field.name, sym_type=derived_fk_type) related_model = related_model_cls related_model_to_set = related_model_cls if related_model_to_set._meta.proxy_for_model is not None: related_model_to_set = related_model_to_set._meta.proxy_for_model typechecker_api = helpers.get_typechecker_api(ctx) related_model_info = helpers.lookup_class_typeinfo(typechecker_api, related_model) if related_model_info is None: # maybe no type stub related_model_type = AnyType(TypeOfAny.unannotated) else: related_model_type = Instance(related_model_info, []) # type: ignore related_model_to_set_info = helpers.lookup_class_typeinfo( typechecker_api, related_model_to_set) if related_model_to_set_info is None: # maybe no type stub related_model_to_set_type = AnyType(TypeOfAny.unannotated) else: related_model_to_set_type = Instance(related_model_to_set_info, []) # type: ignore # replace Any with referred_to_type return reparametrize_related_field_type(default_related_field_type, set_type=related_model_to_set_type, get_type=related_model_type)
def make_oneoff_named_tuple(api: TypeChecker, name: str, fields: "OrderedDict[str, MypyType]") -> TupleType: current_module = get_current_module(api) namedtuple_info = add_new_class_for_module( current_module, name, bases=[api.named_generic_type("typing.NamedTuple", [])], fields=fields ) return TupleType(list(fields.values()), fallback=Instance(namedtuple_info, []))
def test_erase_with_generic_type(self): self.assert_erase(self.fx.ga, self.fx.gdyn) self.assert_erase(self.fx.hab, Instance(self.fx.hi, [self.fx.anyt, self.fx.anyt]))
def visit_unbound_type_nonoptional(self, t: UnboundType) -> Type: sym = self.lookup(t.name, t, suppress_errors=self.third_pass) if '.' in t.name: # Handle indirect references to imported names. # # TODO: Do this for module-local references as well and remove ImportedName # type check below. sym = self.api.dereference_module_cross_ref(sym) if sym is not None: if isinstance(sym.node, ImportedName): # Forward reference to an imported name that hasn't been processed yet. # To maintain backward compatibility, these get translated to Any. # # TODO: Remove this special case. return AnyType(TypeOfAny.implementation_artifact) if sym.fullname in type_aliases: # Resolve forward reference to type alias like 'typing.List'. # TODO: Unify how type aliases are handled; currently we resolve them in two # places (the other is in the semantic analyzer pass 2). resolved = type_aliases[sym.fullname] new = self.api.lookup_qualified(resolved, t) if new: sym = new.copy() sym.normalized = True if sym.node is None: # UNBOUND_IMPORTED can happen if an unknown name was imported. if sym.kind != UNBOUND_IMPORTED: self.fail( 'Internal error (node is None, kind={})'.format( sym.kind), t) return AnyType(TypeOfAny.special_form) fullname = sym.node.fullname() hook = self.plugin.get_type_analyze_hook(fullname) if hook: return hook(AnalyzeTypeContext(t, t, self)) if (fullname in nongen_builtins and t.args and not sym.normalized and not self.allow_unnormalized): self.fail(no_subscript_builtin_alias(fullname), t) if self.tvar_scope: tvar_def = self.tvar_scope.get_binding(sym) else: tvar_def = None if self.warn_bound_tvar and sym.kind == TVAR and tvar_def is not None: self.fail( 'Can\'t use bound type variable "{}"' ' to define generic alias'.format(t.name), t) return AnyType(TypeOfAny.from_error) elif sym.kind == TVAR and tvar_def is not None: if len(t.args) > 0: self.fail( 'Type variable "{}" used with arguments'.format( t.name), t) return TypeVarType(tvar_def, t.line) elif fullname == 'builtins.None': return NoneTyp() elif fullname == 'typing.Any' or fullname == 'builtins.Any': return AnyType(TypeOfAny.explicit) elif fullname == 'typing.Tuple': if len(t.args) == 0 and not t.empty_tuple_index: # Bare 'Tuple' is same as 'tuple' if self.options.disallow_any_generics and not self.is_typeshed_stub: self.fail(messages.BARE_GENERIC, t) typ = self.named_type('builtins.tuple', line=t.line, column=t.column) typ.from_generic_builtin = True return typ if len(t.args) == 2 and isinstance(t.args[1], EllipsisType): # Tuple[T, ...] (uniform, variable-length tuple) instance = self.named_type('builtins.tuple', [self.anal_type(t.args[0])]) instance.line = t.line return instance return self.tuple_type(self.anal_array(t.args)) elif fullname == 'typing.Union': items = self.anal_array(t.args) return UnionType.make_union(items) elif fullname == 'typing.Optional': if len(t.args) != 1: self.fail( 'Optional[...] must have exactly one type argument', t) return AnyType(TypeOfAny.from_error) item = self.anal_type(t.args[0]) return make_optional_type(item) elif fullname == 'typing.Callable': return self.analyze_callable_type(t) elif fullname == 'typing.Type': if len(t.args) == 0: any_type = AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) return TypeType(any_type, line=t.line, column=t.column) if len(t.args) != 1: self.fail('Type[...] must have exactly one type argument', t) item = self.anal_type(t.args[0]) return TypeType.make_normalized(item, line=t.line) elif fullname == 'typing.ClassVar': if self.nesting_level > 0: self.fail( 'Invalid type: ClassVar nested inside other type', t) if len(t.args) == 0: return AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) if len(t.args) != 1: self.fail( 'ClassVar[...] must have at most one type argument', t) return AnyType(TypeOfAny.from_error) item = self.anal_type(t.args[0]) if isinstance(item, TypeVarType) or get_type_vars(item): self.fail('Invalid type: ClassVar cannot be generic', t) return AnyType(TypeOfAny.from_error) return item elif fullname in ('mypy_extensions.NoReturn', 'typing.NoReturn'): return UninhabitedType(is_noreturn=True) elif sym.kind == TYPE_ALIAS: if sym.alias_name is not None: self.aliases_used.add(sym.alias_name) override = sym.type_override all_vars = sym.alias_tvars assert override is not None an_args = self.anal_array(t.args) if all_vars is not None: exp_len = len(all_vars) else: exp_len = 0 act_len = len(an_args) if exp_len > 0 and act_len == 0: # Interpret bare Alias same as normal generic, i.e., Alias[Any, Any, ...] assert all_vars is not None return set_any_tvars(override, all_vars, t.line, t.column) if exp_len == 0 and act_len == 0: return override if act_len != exp_len: self.fail( 'Bad number of arguments for type alias, expected: %s, given: %s' % (exp_len, act_len), t) return set_any_tvars(override, all_vars or [], t.line, t.column, implicit=False) assert all_vars is not None return replace_alias_tvars(override, all_vars, an_args, t.line, t.column) elif not isinstance(sym.node, TypeInfo): name = sym.fullname if name is None: name = sym.node.name() if isinstance(sym.node, Var) and isinstance( sym.node.type, AnyType): # Something with an Any type -- make it an alias for Any in a type # context. This is slightly problematic as it allows using the type 'Any' # as a base class -- however, this will fail soon at runtime so the problem # is pretty minor. return AnyType( TypeOfAny.from_unimported_type, missing_import_name=sym.node.type.missing_import_name) # Allow unbound type variables when defining an alias if not (self.aliasing and sym.kind == TVAR and (not self.tvar_scope or self.tvar_scope.get_binding(sym) is None)): if (not self.third_pass and not self.in_dynamic_func and not (isinstance(sym.node, (FuncDef, Decorator)) or isinstance(sym.node, Var) and sym.node.is_ready) and not (sym.kind == TVAR and tvar_def is None)): if t.args and not self.global_scope: self.fail( 'Unsupported forward reference to "{}"'.format( t.name), t) return AnyType(TypeOfAny.from_error) return ForwardRef(t) self.fail('Invalid type "{}"'.format(name), t) if self.third_pass and sym.kind == TVAR: self.note_func( "Forward references to type variables are prohibited", t) return t info = sym.node # type: TypeInfo if sym.is_aliasing: if sym.alias_name is not None: self.aliases_used.add(sym.alias_name) if len(t.args) > 0 and info.fullname() == 'builtins.tuple': fallback = Instance(info, [AnyType(TypeOfAny.special_form)], t.line) return TupleType(self.anal_array(t.args), fallback, t.line) else: # Analyze arguments and construct Instance type. The # number of type arguments and their values are # checked only later, since we do not always know the # valid count at this point. Thus we may construct an # Instance with an invalid number of type arguments. instance = Instance(info, self.anal_array(t.args), t.line, t.column) instance.from_generic_builtin = sym.normalized tup = info.tuple_type if tup is not None: # The class has a Tuple[...] base class so it will be # represented as a tuple type. if t.args: self.fail('Generic tuple types not supported', t) return AnyType(TypeOfAny.from_error) return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance) td = info.typeddict_type if td is not None: # The class has a TypedDict[...] base class so it will be # represented as a typeddict type. if t.args: self.fail('Generic TypedDict types not supported', t) return AnyType(TypeOfAny.from_error) # Create a named TypedDictType return td.copy_modified(item_types=self.anal_array( list(td.items.values())), fallback=instance) return instance else: if self.third_pass: self.fail('Invalid type "{}"'.format(t.name), t) return AnyType(TypeOfAny.from_error) return AnyType(TypeOfAny.special_form)
'You can update to the latest version with ' '`python3 -m pip install -U typed-ast`.', file=sys.stderr) else: print('Mypy requires the typed_ast package, which is only compatible with\n' 'Python 3.3 and greater.', file=sys.stderr) sys.exit(1) T = TypeVar('T', bound=Union[ast27.expr, ast27.stmt]) U = TypeVar('U', bound=Node) V = TypeVar('V') # There is no way to create reasonable fallbacks at this stage, # they must be patched later. MISSING_FALLBACK = FakeInfo("fallback can't be filled out until semanal") # type: Final _dummy_fallback = Instance(MISSING_FALLBACK, [], -1) # type: Final TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment' # type: Final TYPE_COMMENT_AST_ERROR = 'invalid type comment' # type: Final def parse(source: Union[str, bytes], fnam: str, module: Optional[str], errors: Optional[Errors] = None, options: Optional[Options] = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. """
def run_with_model_cls(self, model_cls: Type[Model]) -> None: for manager_name, manager in model_cls._meta.managers_map.items(): manager_fullname = helpers.get_class_fullname(manager.__class__) manager_info = self.lookup_typeinfo_or_incomplete_defn_error( manager_fullname) if manager_name not in self.model_classdef.info.names: manager_type = Instance( manager_info, [Instance(self.model_classdef.info, [])]) self.add_new_node_to_model_class(manager_name, manager_type) else: # create new MODELNAME_MANAGERCLASSNAME class that represents manager parametrized with current model has_manager_any_base = any( self._is_manager_any(base) for base in manager_info.bases) if has_manager_any_base: custom_model_manager_name = manager.model.__name__ + '_' + manager.__class__.__name__ bases = [] for original_base in manager_info.bases: if self._is_manager_any(original_base): if original_base.type is None: if not self.api.final_iteration: self.api.defer() original_base = helpers.reparametrize_instance( original_base, [Instance(self.model_classdef.info, [])]) bases.append(original_base) current_module = self.api.modules[ self.model_classdef.info.module_name] custom_manager_info = helpers.add_new_class_for_module( current_module, custom_model_manager_name, bases=bases, fields=OrderedDict()) custom_manager_type = Instance( custom_manager_info, [Instance(self.model_classdef.info, [])]) self.add_new_node_to_model_class(manager_name, custom_manager_type) # add _default_manager if '_default_manager' not in self.model_classdef.info.names: default_manager_fullname = helpers.get_class_fullname( model_cls._meta.default_manager.__class__) default_manager_info = self.lookup_typeinfo_or_incomplete_defn_error( default_manager_fullname) default_manager = Instance( default_manager_info, [Instance(self.model_classdef.info, [])]) self.add_new_node_to_model_class('_default_manager', default_manager) # add related managers for relation in self.django_context.get_model_relations(model_cls): attname = relation.get_accessor_name() if attname is None: # no reverse accessor continue related_model_cls = self.django_context.fields_context.get_related_model_cls( relation) related_model_info = self.lookup_class_typeinfo_or_incomplete_defn_error( related_model_cls) if isinstance(relation, OneToOneRel): self.add_new_node_to_model_class( attname, Instance(related_model_info, [])) continue if isinstance(relation, (ManyToOneRel, ManyToManyRel)): manager_info = self.lookup_typeinfo_or_incomplete_defn_error( fullnames.RELATED_MANAGER_CLASS_FULLNAME) self.add_new_node_to_model_class( attname, Instance(manager_info, [Instance(related_model_info, [])])) continue
def _infer_type_from_relationship( api: SemanticAnalyzerPluginInterface, stmt: AssignmentStmt, node: Var, left_hand_explicit_type: Optional[ProperType], ) -> Optional[ProperType]: """Infer the type of mapping from a relationship. E.g.:: @reg.mapped class MyClass: # ... addresses = relationship(Address, uselist=True) order: Mapped["Order"] = relationship("Order") Will resolve in mypy as:: @reg.mapped class MyClass: # ... addresses: Mapped[List[Address]] order: Mapped["Order"] """ assert isinstance(stmt.rvalue, CallExpr) target_cls_arg = stmt.rvalue.args[0] python_type_for_type: Optional[ProperType] = None if isinstance(target_cls_arg, NameExpr) and isinstance( target_cls_arg.node, TypeInfo): # type related_object_type = target_cls_arg.node python_type_for_type = Instance(related_object_type, []) # other cases not covered - an error message directs the user # to set an explicit type annotation # # node.type == str, it's a string # if isinstance(target_cls_arg, NameExpr) and isinstance( # target_cls_arg.node, Var # ) # points to a type # isinstance(target_cls_arg, NameExpr) and isinstance( # target_cls_arg.node, TypeAlias # ) # string expression # isinstance(target_cls_arg, StrExpr) uselist_arg = util._get_callexpr_kwarg(stmt.rvalue, "uselist") collection_cls_arg: Optional[Expression] = util._get_callexpr_kwarg( stmt.rvalue, "collection_class") type_is_a_collection = False # this can be used to determine Optional for a many-to-one # in the same way nullable=False could be used, if we start supporting # that. # innerjoin_arg = _get_callexpr_kwarg(stmt.rvalue, "innerjoin") if (uselist_arg is not None and api.parse_bool(uselist_arg) is True and collection_cls_arg is None): type_is_a_collection = True if python_type_for_type is not None: python_type_for_type = api.named_type("__builtins__.list", [python_type_for_type]) elif (uselist_arg is None or api.parse_bool(uselist_arg) is True ) and collection_cls_arg is not None: type_is_a_collection = True if isinstance(collection_cls_arg, CallExpr): collection_cls_arg = collection_cls_arg.callee if isinstance(collection_cls_arg, NameExpr) and isinstance( collection_cls_arg.node, TypeInfo): if python_type_for_type is not None: # this can still be overridden by the left hand side # within _infer_Type_from_left_and_inferred_right python_type_for_type = Instance(collection_cls_arg.node, [python_type_for_type]) elif (isinstance(collection_cls_arg, NameExpr) and isinstance(collection_cls_arg.node, FuncDef) and collection_cls_arg.node.type is not None): if python_type_for_type is not None: # this can still be overridden by the left hand side # within _infer_Type_from_left_and_inferred_right # TODO: handle mypy.types.Overloaded if isinstance(collection_cls_arg.node.type, CallableType): rt = get_proper_type(collection_cls_arg.node.type.ret_type) if isinstance(rt, CallableType): callable_ret_type = get_proper_type(rt.ret_type) if isinstance(callable_ret_type, Instance): python_type_for_type = Instance( callable_ret_type.type, [python_type_for_type], ) else: util.fail( api, "Expected Python collection type for " "collection_class parameter", stmt.rvalue, ) python_type_for_type = None elif uselist_arg is not None and api.parse_bool(uselist_arg) is False: if collection_cls_arg is not None: util.fail( api, "Sending uselist=False and collection_class at the same time " "does not make sense", stmt.rvalue, ) if python_type_for_type is not None: python_type_for_type = UnionType( [python_type_for_type, NoneType()]) else: if left_hand_explicit_type is None: msg = ( "Can't infer scalar or collection for ORM mapped expression " "assigned to attribute '{}' if both 'uselist' and " "'collection_class' arguments are absent from the " "relationship(); please specify a " "type annotation on the left hand side.") util.fail(api, msg.format(node.name), node) if python_type_for_type is None: return _infer_type_from_left_hand_type_only(api, node, left_hand_explicit_type) elif left_hand_explicit_type is not None: if type_is_a_collection: assert isinstance(left_hand_explicit_type, Instance) assert isinstance(python_type_for_type, Instance) return _infer_collection_type_from_left_and_inferred_right( api, node, left_hand_explicit_type, python_type_for_type) else: return _infer_type_from_left_and_inferred_right( api, node, left_hand_explicit_type, python_type_for_type, ) else: return python_type_for_type
def visit_instance(self, t: Instance) -> Type: return Instance(t.type, self.translate_types(t.args), t.line, t.column)
def _add_bool_dunder(self, type_info: TypeInfo) -> None: signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function) bool_func = FuncDef('__bool__', [], Block([])) bool_func.type = set_callable_name(signature, bool_func) type_info.names[bool_func.name] = SymbolTableNode(MDEF, bool_func)
def get_reducers_type(ctx: FunctionContext) -> Type: """ Determine a more specific model type for functions that combine models. This function operates on function *calls*. It analyzes each function call by looking at the function definition and the arguments passed as part of the function call, then determines a more specific return type for the function call. This method accepts a `FunctionContext` as part of the Mypy plugin interface. This function context provides easy access to: * `args`: List of "actual arguments" filling each "formal argument" of the called function. "Actual arguments" are those passed to the function as part of the function call. "Formal arguments" are the parameters defined by the function definition. The same actual argument may serve to fill multiple formal arguments. In some cases the relationship may even be ambiguous. For example, calling `range(*args)`, the actual argument `*args` may fill the `start`, `stop` or `step` formal arguments, depending on the length of the list. The `args` list is of length `num_formals`, with each element corresponding to a formal argument. Each value in the `args` list is a list of actual arguments which may fill the formal argument. For example, in the function call `range(*args, num)`, `num` may fill the `start`, `end` or `step` formal arguments depending on the length of `args`, so type-checking needs to consider all of these possibilities. * `arg_types`: Type annotation (or inferred type) of each argument. Like `args`, this value is a list of lists with an outer list entry for each formal argument and an inner list entry for each possible actual argument for the formal argument. * `arg_kinds`: "Kind" of argument passed to the function call. Argument kinds include positional, star (`*args`), named (`x=y`) and star2 (`**kwargs`) arguments (among others). Like `args`, this value is a list of lists. * `context`: AST node representing the function call with all available type information. Notable attributes include: * `args` and `arg_kinds`: Simple list of actual arguments, not mapped to formal arguments. * `callee`: AST node representing the function being called. Typically this is a `NameExpr`. To resolve this node to the function definition it references, accessing `callee.node` will usually return either a `FuncDef` or `Decorator` node. * etc. This function infers a more specific type for model-combining functions by making certain assumptions about how the function operates based on the order of its formal arguments and its return type. If the return type is `Model[InT, XY_YZ_OutT]`, the output of each argument is expected to be used as the input to the next argument. It's therefore necessary to check that the output type of each model is compatible with the input type of the following model. The combined model has the type `Model[InT, OutT]`, where `InT` is the input type of the first model and `OutT` is the output type of the last model. If the return type is `Model[InT, XY_XY_OutT]`, all model arguments receive input of the same type and are expected to produce output of the same type. It's therefore necessary to check that all models have the same input types and the same output types. The combined model has the type `Model[InT, OutT]`, where `InT` is the input type of all model arguments and `OutT` is the output type of all model arguments. Raises: AssertionError: Raised if a more specific model type couldn't be determined, indicating that the default general return type should be used. """ # Verify that we have a type-checking API and a default return type (presumably a # `thinc.model.Model` instance) assert isinstance(ctx.api, TypeChecker) assert isinstance(ctx.default_return_type, Instance) # Verify that we're inspecting a function call to a callable defined or decorated function assert isinstance(ctx.context, CallExpr) callee = ctx.context.callee assert isinstance(callee, NameExpr) callee_node = callee.node assert isinstance(callee_node, (FuncDef, Decorator)) callee_node_type = callee_node.type assert isinstance(callee_node_type, CallableType) # Verify that the callable returns a `thinc.model.Model` # TODO: Use `map_instance_to_supertype` to map subtypes to `Model` instances. # (figure out how to look up the `TypeInfo` for a class outside of the module being type-checked) callee_return_type = callee_node_type.ret_type assert isinstance(callee_return_type, Instance) assert callee_return_type.type.fullname == thinc_model_fullname assert callee_return_type.args assert len(callee_return_type.args) == 2 # Obtain the output type parameter of the `thinc.model.Model` return type # of the called API function out_type = callee_return_type.args[1] # Check if the `Model`'s output type parameter is one of the "special # type variables" defined to represent model composition (chaining) and # homogenous reduction assert isinstance(out_type, TypeVarType) assert out_type.fullname if out_type.fullname not in { intoin_outtoout_out_fullname, chained_out_fullname }: return ctx.default_return_type # Extract type of each argument used to call the API function, making sure that they are also # `thinc.model.Model` instances args = list(itertools.chain(*ctx.args)) arg_types = [] for arg_type in itertools.chain(*ctx.arg_types): # TODO: Use `map_instance_to_supertype` to map subtypes to `Model` instances. assert isinstance(arg_type, Instance) assert arg_type.type.fullname == thinc_model_fullname assert len(arg_type.args) == 2 arg_types.append(arg_type) # Collect neighboring pairs of arguments and their types arg_pairs = list(zip(args[:-1], args[1:])) arg_types_pairs = list(zip(arg_types[:-1], arg_types[1:])) # Determine if passed models will be chained or if they all need to have # the same input and output type if out_type.fullname == chained_out_fullname: # Models will be chained, meaning that the output of each model will # be passed as the input to the next model # Verify that model inputs and outputs are compatible for (arg1, arg2), (type1, type2) in zip(arg_pairs, arg_types_pairs): assert isinstance(type1, Instance) assert isinstance(type2, Instance) assert type1.type.fullname == thinc_model_fullname assert type2.type.fullname == thinc_model_fullname check_chained(l1_arg=arg1, l1_type=type1, l2_arg=arg2, l2_type=type2, api=ctx.api) # Generated model takes the first model's input and returns the last model's output return Instance(ctx.default_return_type.type, [arg_types[0].args[0], arg_types[-1].args[1]]) elif out_type.fullname == intoin_outtoout_out_fullname: # Models must have the same input and output types # Verify that model inputs and outputs are compatible for (arg1, arg2), (type1, type2) in zip(arg_pairs, arg_types_pairs): assert isinstance(type1, Instance) assert isinstance(type2, Instance) assert type1.type.fullname == thinc_model_fullname assert type2.type.fullname == thinc_model_fullname check_intoin_outtoout(l1_arg=arg1, l1_type=type1, l2_arg=arg2, l2_type=type2, api=ctx.api) # Generated model accepts and returns the same types as all passed models return Instance(ctx.default_return_type.type, [arg_types[0].args[0], arg_types[0].args[1]]) # Make sure the default return type is returned if no branch was selected assert False, "Thinc mypy plugin error: it should return before this point"
def __init__(self, variance: int = COVARIANT) -> None: # The 'object' class self.oi = self.make_type_info('builtins.object') # class object self.o = Instance(self.oi, []) # object # Type variables (these are effectively global) def make_type_var(name: str, id: int, values: List[Type], upper_bound: Type, variance: int) -> TypeVarType: return TypeVarType(name, name, id, values, upper_bound, variance) self.t = make_type_var('T', 1, [], self.o, variance) # T`1 (type variable) self.tf = make_type_var('T', -1, [], self.o, variance) # T`-1 (type variable) self.tf2 = make_type_var('T', -2, [], self.o, variance) # T`-2 (type variable) self.s = make_type_var('S', 2, [], self.o, variance) # S`2 (type variable) self.s1 = make_type_var('S', 1, [], self.o, variance) # S`1 (type variable) self.sf = make_type_var('S', -2, [], self.o, variance) # S`-2 (type variable) self.sf1 = make_type_var('S', -1, [], self.o, variance) # S`-1 (type variable) # Simple types self.anyt = AnyType(TypeOfAny.special_form) self.nonet = NoneType() self.uninhabited = UninhabitedType() # Abstract class TypeInfos # class F self.fi = self.make_type_info('F', is_abstract=True) # class F2 self.f2i = self.make_type_info('F2', is_abstract=True) # class F3(F) self.f3i = self.make_type_info('F3', is_abstract=True, mro=[self.fi]) # Class TypeInfos self.std_tuplei = self.make_type_info('builtins.tuple', mro=[self.oi], typevars=['T'], variances=[COVARIANT ]) # class tuple self.type_typei = self.make_type_info('builtins.type') # class type self.bool_type_info = self.make_type_info('builtins.bool') self.functioni = self.make_type_info( 'builtins.function') # function TODO self.ai = self.make_type_info('A', mro=[self.oi]) # class A self.bi = self.make_type_info('B', mro=[self.ai, self.oi]) # class B(A) self.ci = self.make_type_info('C', mro=[self.ai, self.oi]) # class C(A) self.di = self.make_type_info('D', mro=[self.oi]) # class D # class E(F) self.ei = self.make_type_info('E', mro=[self.fi, self.oi]) # class E2(F2, F) self.e2i = self.make_type_info('E2', mro=[self.f2i, self.fi, self.oi]) # class E3(F, F2) self.e3i = self.make_type_info('E3', mro=[self.fi, self.f2i, self.oi]) # Generic class TypeInfos # G[T] self.gi = self.make_type_info('G', mro=[self.oi], typevars=['T'], variances=[variance]) # G2[T] self.g2i = self.make_type_info('G2', mro=[self.oi], typevars=['T'], variances=[variance]) # H[S, T] self.hi = self.make_type_info('H', mro=[self.oi], typevars=['S', 'T'], variances=[variance, variance]) # GS[T, S] <: G[S] self.gsi = self.make_type_info('GS', mro=[self.gi, self.oi], typevars=['T', 'S'], variances=[variance, variance], bases=[Instance(self.gi, [self.s])]) # GS2[S] <: G[S] self.gs2i = self.make_type_info('GS2', mro=[self.gi, self.oi], typevars=['S'], variances=[variance], bases=[Instance(self.gi, [self.s1])]) # list[T] self.std_listi = self.make_type_info('builtins.list', mro=[self.oi], typevars=['T'], variances=[variance]) # Instance types self.std_tuple = Instance(self.std_tuplei, [self.anyt]) # tuple self.type_type = Instance(self.type_typei, []) # type self.function = Instance(self.functioni, []) # function TODO self.a = Instance(self.ai, []) # A self.b = Instance(self.bi, []) # B self.c = Instance(self.ci, []) # C self.d = Instance(self.di, []) # D self.e = Instance(self.ei, []) # E self.e2 = Instance(self.e2i, []) # E2 self.e3 = Instance(self.e3i, []) # E3 self.f = Instance(self.fi, []) # F self.f2 = Instance(self.f2i, []) # F2 self.f3 = Instance(self.f3i, []) # F3 # Generic instance types self.ga = Instance(self.gi, [self.a]) # G[A] self.gb = Instance(self.gi, [self.b]) # G[B] self.gd = Instance(self.gi, [self.d]) # G[D] self.go = Instance(self.gi, [self.o]) # G[object] self.gt = Instance(self.gi, [self.t]) # G[T`1] self.gtf = Instance(self.gi, [self.tf]) # G[T`-1] self.gtf2 = Instance(self.gi, [self.tf2]) # G[T`-2] self.gs = Instance(self.gi, [self.s]) # G[S] self.gdyn = Instance(self.gi, [self.anyt]) # G[Any] self.gn = Instance(self.gi, [NoneType()]) # G[None] self.g2a = Instance(self.g2i, [self.a]) # G2[A] self.gsaa = Instance(self.gsi, [self.a, self.a]) # GS[A, A] self.gsab = Instance(self.gsi, [self.a, self.b]) # GS[A, B] self.gsba = Instance(self.gsi, [self.b, self.a]) # GS[B, A] self.gs2a = Instance(self.gs2i, [self.a]) # GS2[A] self.gs2b = Instance(self.gs2i, [self.b]) # GS2[B] self.gs2d = Instance(self.gs2i, [self.d]) # GS2[D] self.hab = Instance(self.hi, [self.a, self.b]) # H[A, B] self.haa = Instance(self.hi, [self.a, self.a]) # H[A, A] self.hbb = Instance(self.hi, [self.b, self.b]) # H[B, B] self.hts = Instance(self.hi, [self.t, self.s]) # H[T, S] self.had = Instance(self.hi, [self.a, self.d]) # H[A, D] self.hao = Instance(self.hi, [self.a, self.o]) # H[A, object] self.lsta = Instance(self.std_listi, [self.a]) # List[A] self.lstb = Instance(self.std_listi, [self.b]) # List[B] self.lit1 = LiteralType(1, self.a) self.lit2 = LiteralType(2, self.a) self.lit3 = LiteralType("foo", self.d) self.lit1_inst = Instance(self.ai, [], last_known_value=self.lit1) self.lit2_inst = Instance(self.ai, [], last_known_value=self.lit2) self.lit3_inst = Instance(self.di, [], last_known_value=self.lit3) self.type_a = TypeType.make_normalized(self.a) self.type_b = TypeType.make_normalized(self.b) self.type_c = TypeType.make_normalized(self.c) self.type_d = TypeType.make_normalized(self.d) self.type_t = TypeType.make_normalized(self.t) self.type_any = TypeType.make_normalized(self.anyt) self._add_bool_dunder(self.bool_type_info) self._add_bool_dunder(self.ai)
def get_expected_types(self, api: TypeChecker, model_cls: Type[Model], *, method: str) -> Dict[str, MypyType]: contenttypes_in_apps = self.apps_registry.is_installed( "django.contrib.contenttypes") if contenttypes_in_apps: from django.contrib.contenttypes.fields import GenericForeignKey expected_types = {} # add pk if not abstract=True if not model_cls._meta.abstract: primary_key_field = self.get_primary_key_field(model_cls) field_set_type = self.get_field_set_type(api, primary_key_field, method=method) expected_types["pk"] = field_set_type for field in model_cls._meta.get_fields(): if isinstance(field, Field): field_name = field.attname field_set_type = self.get_field_set_type(api, field, method=method) expected_types[field_name] = field_set_type if isinstance(field, ForeignKey): field_name = field.name foreign_key_info = helpers.lookup_class_typeinfo( api, field.__class__) if foreign_key_info is None: # maybe there's no type annotation for the field expected_types[field_name] = AnyType( TypeOfAny.unannotated) continue related_model = self.get_field_related_model_cls(field) if related_model is None: expected_types[field_name] = AnyType( TypeOfAny.from_error) continue if related_model._meta.proxy_for_model is not None: related_model = related_model._meta.proxy_for_model related_model_info = helpers.lookup_class_typeinfo( api, related_model) if related_model_info is None: expected_types[field_name] = AnyType( TypeOfAny.unannotated) continue is_nullable = self.get_field_nullability(field, method) foreign_key_set_type = helpers.get_private_descriptor_type( foreign_key_info, "_pyi_private_set_type", is_nullable=is_nullable) model_set_type = helpers.convert_any_to_type( foreign_key_set_type, Instance(related_model_info, [])) expected_types[field_name] = model_set_type elif contenttypes_in_apps and isinstance(field, GenericForeignKey): # it's generic, so cannot set specific model field_name = field.name gfk_info = helpers.lookup_class_typeinfo(api, field.__class__) gfk_set_type = helpers.get_private_descriptor_type( gfk_info, "_pyi_private_set_type", is_nullable=True) expected_types[field_name] = gfk_set_type return expected_types
def visit_unbound_type(self, t: UnboundType) -> Type: sym = self.lookup(t.name, t) if sym is not None: if sym.node is None: # UNBOUND_IMPORTED can happen if an unknown name was imported. if sym.kind != UNBOUND_IMPORTED: self.fail( 'Internal error (node is None, kind={})'.format( sym.kind), t) return AnyType() fullname = sym.node.fullname() if sym.kind == BOUND_TVAR: if len(t.args) > 0: self.fail( 'Type variable "{}" used with arguments'.format( t.name), t) tvar_expr = cast(TypeVarExpr, sym.node) return TypeVarType(t.name, sym.tvar_id, tvar_expr.values, self.builtin_type('builtins.object'), tvar_expr.variance, t.line) elif fullname == 'builtins.None': return Void() elif fullname == 'typing.Any': return AnyType() elif fullname == 'typing.Tuple': if len(t.args) == 2 and isinstance(t.args[1], EllipsisType): # Tuple[T, ...] (uniform, variable-length tuple) node = self.lookup_fqn_func('builtins.tuple') info = cast(TypeInfo, node.node) return Instance(info, [t.args[0].accept(self)], t.line) return TupleType(self.anal_array(t.args), self.builtin_type('builtins.tuple')) elif fullname == 'typing.Union': items = self.anal_array(t.args) items = [item for item in items if not isinstance(item, Void)] return UnionType.make_union(items) elif fullname == 'typing.Optional': if len(t.args) != 1: self.fail( 'Optional[...] must have exactly one type argument', t) items = self.anal_array(t.args) # Currently Optional[t] is just an alias for t. return items[0] elif fullname == 'typing.Callable': return self.analyze_callable_type(t) elif sym.kind == TYPE_ALIAS: # TODO: Generic type aliases. return sym.type_override elif not isinstance(sym.node, TypeInfo): name = sym.fullname if name is None: name = sym.node.name() if isinstance(sym.node, Var) and isinstance( sym.node.type, AnyType): # Something with an Any type -- make it an alias for Any in a type # context. This is slightly problematic as it allows using the type 'Any' # as a base class -- however, this will fail soon at runtime so the problem # is pretty minor. return AnyType() self.fail('Invalid type "{}"'.format(name), t) return t info = cast(TypeInfo, sym.node) if len(t.args) > 0 and info.fullname() == 'builtins.tuple': return TupleType(self.anal_array(t.args), Instance(info, [AnyType()], t.line), t.line) else: # Analyze arguments and construct Instance type. The # number of type arguments and their values are # checked only later, since we do not always know the # valid count at this point. Thus we may construct an # Instance with an invalid number of type arguments. instance = Instance(info, self.anal_array(t.args), t.line) if info.tuple_type is None: return instance else: # The class has a Tuple[...] base class so it will be # represented as a tuple type. if t.args: self.fail('Generic tuple types not supported', t) return AnyType() return TupleType(self.anal_array(info.tuple_type.items), fallback=instance, line=t.line) else: return AnyType()
def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: # x in (...)/[...] # x not in (...)/[...] if (e.operators[0] in ['in', 'not in'] and len(e.operators) == 1 and isinstance(e.operands[1], (TupleExpr, ListExpr))): items = e.operands[1].items n_items = len(items) # x in y -> x == y[0] or ... or x == y[n] # x not in y -> x != y[0] and ... and x != y[n] # 16 is arbitrarily chosen to limit code size if 1 < n_items < 16: if e.operators[0] == 'in': bin_op = 'or' cmp_op = '==' else: bin_op = 'and' cmp_op = '!=' lhs = e.operands[0] mypy_file = builder.graph['builtins'].tree assert mypy_file is not None bool_type = Instance(cast(TypeInfo, mypy_file.names['bool'].node), []) exprs = [] for item in items: expr = ComparisonExpr([cmp_op], [lhs, item]) builder.types[expr] = bool_type exprs.append(expr) or_expr: Expression = exprs.pop(0) for expr in exprs: or_expr = OpExpr(bin_op, or_expr, expr) builder.types[or_expr] = bool_type return builder.accept(or_expr) # x in [y]/(y) -> x == y # x not in [y]/(y) -> x != y elif n_items == 1: if e.operators[0] == 'in': cmp_op = '==' else: cmp_op = '!=' e.operators = [cmp_op] e.operands[1] = items[0] # x in []/() -> False # x not in []/() -> True elif n_items == 0: if e.operators[0] == 'in': return builder.false() else: return builder.true() # TODO: Don't produce an expression when used in conditional context # All of the trickiness here is due to support for chained conditionals # (`e1 < e2 > e3`, etc). `e1 < e2 > e3` is approximately equivalent to # `e1 < e2 and e2 > e3` except that `e2` is only evaluated once. expr_type = builder.node_type(e) # go(i, prev) generates code for `ei opi e{i+1} op{i+1} ... en`, # assuming that prev contains the value of `ei`. def go(i: int, prev: Value) -> Value: if i == len(e.operators) - 1: return transform_basic_comparison( builder, e.operators[i], prev, builder.accept(e.operands[i + 1]), e.line) next = builder.accept(e.operands[i + 1]) return builder.builder.shortcircuit_helper( 'and', expr_type, lambda: transform_basic_comparison( builder, e.operators[i], prev, next, e.line), lambda: go(i + 1, next), e.line) return go(0, builder.accept(e.operands[0]))
def visit_instance(self, t: Instance) -> Type: return Instance(t.type, [AnyType()] * len(t.args), t.line)
def visit_instance(self, t: Instance) -> Type: args = self.expand_types(t.args) return Instance(t.type, args, t.line, t.column)
def visit_instance(self, t: Instance) -> None: info = t.type if info.replaced or info.tuple_type: self.indicator['synthetic'] = True # Check type argument count. if len(t.args) != len(info.type_vars): if len(t.args) == 0: from_builtins = t.type.fullname() in nongen_builtins and not t.from_generic_builtin if (self.options.disallow_any_generics and not self.is_typeshed_stub and from_builtins): alternative = nongen_builtins[t.type.fullname()] self.fail(messages.IMPLICIT_GENERIC_ANY_BUILTIN.format(alternative), t) # Insert implicit 'Any' type arguments. if from_builtins: # this 'Any' was already reported elsewhere any_type = AnyType(TypeOfAny.special_form, line=t.line, column=t.column) else: any_type = AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) t.args = [any_type] * len(info.type_vars) return # Invalid number of type parameters. n = len(info.type_vars) s = '{} type arguments'.format(n) if n == 0: s = 'no type arguments' elif n == 1: s = '1 type argument' act = str(len(t.args)) if act == '0': act = 'none' self.fail('"{}" expects {}, but {} given'.format( info.name(), s, act), t) # Construct the correct number of type arguments, as # otherwise the type checker may crash as it expects # things to be right. t.args = [AnyType(TypeOfAny.from_error) for _ in info.type_vars] t.invalid = True elif info.defn.type_vars: # Check type argument values. # TODO: Calling is_subtype and is_same_types in semantic analysis is a bad idea for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): if tvar.values: if isinstance(arg, TypeVarType): arg_values = arg.values if not arg_values: self.fail('Type variable "{}" not valid as type ' 'argument value for "{}"'.format( arg.name, info.name()), t) continue else: arg_values = [arg] self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) # TODO: These hacks will be not necessary when this will be moved to later stage. arg = self.resolve_type(arg) bound = self.resolve_type(tvar.upper_bound) if not is_subtype(arg, bound): self.fail('Type argument "{}" of "{}" must be ' 'a subtype of "{}"'.format( arg, info.name(), bound), t) for arg in t.args: arg.accept(self) if info.is_newtype: for base in info.bases: base.accept(self)
def analyze_class_attribute_access(itype: Instance, name: str, mx: MemberContext, override_info: Optional[TypeInfo] = None) -> Optional[Type]: """original_type is the type of E in the expression E.var""" info = itype.type if override_info: info = override_info node = info.get(name) if not node: if info.fallback_to_any: return AnyType(TypeOfAny.special_form) return None is_decorated = isinstance(node.node, Decorator) is_method = is_decorated or isinstance(node.node, FuncBase) if mx.is_lvalue: if is_method: mx.msg.cant_assign_to_method(mx.context) if isinstance(node.node, TypeInfo): mx.msg.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, mx.context) # If a final attribute was declared on `self` in `__init__`, then it # can't be accessed on the class object. if node.implicit and isinstance(node.node, Var) and node.node.is_final: mx.msg.fail(message_registry.CANNOT_ACCESS_FINAL_INSTANCE_ATTR .format(node.node.name()), mx.context) # An assignment to final attribute on class object is also always an error, # independently of types. if mx.is_lvalue and not mx.chk.get_final_context(): check_final_member(name, info, mx.msg, mx.context) if info.is_enum and not (mx.is_lvalue or is_decorated or is_method): enum_literal = LiteralType(name, fallback=itype) return itype.copy_modified(last_known_value=enum_literal) t = node.type if t: if isinstance(t, PartialType): symnode = node.node assert isinstance(symnode, Var) return mx.chk.handle_partial_var_type(t, mx.is_lvalue, symnode, mx.context) # Find the class where method/variable was defined. if isinstance(node.node, Decorator): super_info = node.node.var.info # type: Optional[TypeInfo] elif isinstance(node.node, (Var, SYMBOL_FUNCBASE_TYPES)): super_info = node.node.info else: super_info = None # Map the type to how it would look as a defining class. For example: # class C(Generic[T]): ... # class D(C[Tuple[T, S]]): ... # D[int, str].method() # Here itype is D[int, str], isuper is C[Tuple[int, str]]. if not super_info: isuper = None else: isuper = map_instance_to_supertype(itype, super_info) if isinstance(node.node, Var): assert isuper is not None # Check if original variable type has type variables. For example: # class C(Generic[T]): # x: T # C.x # Error, ambiguous access # C[int].x # Also an error, since C[int] is same as C at runtime if isinstance(t, TypeVarType) or get_type_vars(t): # Exception: access on Type[...], including first argument of class methods is OK. if not isinstance(get_proper_type(mx.original_type), TypeType): mx.msg.fail(message_registry.GENERIC_INSTANCE_VAR_CLASS_ACCESS, mx.context) # Erase non-mapped variables, but keep mapped ones, even if there is an error. # In the above example this means that we infer following types: # C.x -> Any # C[int].x -> int t = erase_typevars(expand_type_by_instance(t, isuper)) is_classmethod = ((is_decorated and cast(Decorator, node.node).func.is_class) or (isinstance(node.node, FuncBase) and node.node.is_class)) result = add_class_tvars(get_proper_type(t), itype, isuper, is_classmethod, mx.builtin_type, mx.original_type) if not mx.is_lvalue: result = analyze_descriptor_access(mx.original_type, result, mx.builtin_type, mx.msg, mx.context, chk=mx.chk) return result elif isinstance(node.node, Var): mx.not_ready_callback(name, mx.context) return AnyType(TypeOfAny.special_form) if isinstance(node.node, TypeVarExpr): mx.msg.fail(message_registry.CANNOT_USE_TYPEVAR_AS_EXPRESSION.format( info.name(), name), mx.context) return AnyType(TypeOfAny.from_error) if isinstance(node.node, TypeInfo): return type_object_type(node.node, mx.builtin_type) if isinstance(node.node, MypyFile): # Reference to a module object. return mx.builtin_type('types.ModuleType') if (isinstance(node.node, TypeAlias) and isinstance(get_proper_type(node.node.target), Instance)): return instance_alias_type(node.node, mx.builtin_type) if is_decorated: assert isinstance(node.node, Decorator) if node.node.type: return node.node.type else: mx.not_ready_callback(name, mx.context) return AnyType(TypeOfAny.from_error) else: return function_type(cast(FuncBase, node.node), mx.builtin_type('builtins.function'))
def relationship_hook(ctx: FunctionContext) -> Type: """Support basic use cases for relationships. Examples: from sqlalchemy.orm import relationship from one import OneModel if TYPE_CHECKING: from other import OtherModel class User(Base): __tablename__ = 'users' id = Column(Integer(), primary_key=True) one = relationship(OneModel) other = relationship("OtherModel") This also tries to infer the type argument for 'RelationshipProperty' using the 'uselist' flag. """ assert isinstance(ctx.default_return_type, Instance) # type: ignore[misc] original_type_arg = ctx.default_return_type.args[0] has_annotation = not isinstance(get_proper_type(original_type_arg), UninhabitedType) arg = get_argument_by_name(ctx, 'argument') arg_type = get_proper_type(get_argtype_by_name(ctx, 'argument')) uselist_arg = get_argument_by_name(ctx, 'uselist') if isinstance(arg, StrExpr): name = arg.value sym = None # type: Optional[SymbolTableNode] try: # Private API for local lookup, but probably needs to be public. sym = ctx.api.lookup_qualified(name) # type: ignore except (KeyError, AssertionError): pass if sym and isinstance(sym.node, TypeInfo): new_arg = fill_typevars_with_any(sym.node) # type: Type else: ctx.api.fail('Cannot find model "{}"'.format(name), ctx.context) # TODO: Add note() to public API. ctx.api.note( 'Only imported models can be found;' # type: ignore ' use "if TYPE_CHECKING: ..." to avoid import cycles', ctx.context) new_arg = AnyType(TypeOfAny.from_error) else: if isinstance(arg_type, CallableType) and arg_type.is_type_obj(): new_arg = fill_typevars_with_any(arg_type.type_object()) else: # Something complex, stay silent for now. new_arg = AnyType(TypeOfAny.special_form) # We figured out, the model type. Now check if we need to wrap it in Iterable if uselist_arg: if parse_bool(uselist_arg): new_arg = ctx.api.named_generic_type('builtins.list', [new_arg]) else: if has_annotation: # If there is an annotation we use it as a source of truth. # This will cause false negatives, but it is better than lots of false positives. new_arg = original_type_arg return Instance(ctx.default_return_type.type, [new_arg], line=ctx.default_return_type.line, column=ctx.default_return_type.column)
def builtin_type(self, fully_qualified_name: str) -> Instance: node = self.lookup_fqn_func(fully_qualified_name) info = cast(TypeInfo, node.node) return Instance(info, [])
def builtin_type(self, fully_qualified_name: str, args: List[Type] = None) -> Instance: node = self.lookup_fqn_func(fully_qualified_name) assert isinstance(node.node, TypeInfo) return Instance(node.node, args or [])
def create_new_manager_class_from_from_queryset_method( ctx: DynamicClassDefContext) -> None: semanal_api = helpers.get_semanal_api(ctx) callee = ctx.call.callee assert isinstance(callee, MemberExpr) assert isinstance(callee.expr, RefExpr) base_manager_info = callee.expr.node if base_manager_info is None: if not semanal_api.final_iteration: semanal_api.defer() return assert isinstance(base_manager_info, TypeInfo) new_manager_info = semanal_api.basic_new_typeinfo( ctx.name, basetype_or_fallback=Instance(base_manager_info, [AnyType(TypeOfAny.unannotated)])) new_manager_info.line = ctx.call.line new_manager_info.defn.line = ctx.call.line new_manager_info.metaclass_type = new_manager_info.calculate_metaclass_type( ) current_module = semanal_api.cur_mod_node current_module.names[ctx.name] = SymbolTableNode(GDEF, new_manager_info, plugin_generated=True) passed_queryset = ctx.call.args[0] assert isinstance(passed_queryset, NameExpr) derived_queryset_fullname = passed_queryset.fullname assert derived_queryset_fullname is not None sym = semanal_api.lookup_fully_qualified_or_none(derived_queryset_fullname) assert sym is not None if sym.node is None: if not semanal_api.final_iteration: semanal_api.defer() else: # inherit from Any to prevent false-positives, if queryset class cannot be resolved new_manager_info.fallback_to_any = True return derived_queryset_info = sym.node assert isinstance(derived_queryset_info, TypeInfo) if len(ctx.call.args) > 1: expr = ctx.call.args[1] assert isinstance(expr, StrExpr) custom_manager_generated_name = expr.value else: custom_manager_generated_name = base_manager_info.name + "From" + derived_queryset_info.name custom_manager_generated_fullname = ".".join( ["django.db.models.manager", custom_manager_generated_name]) if "from_queryset_managers" not in base_manager_info.metadata: base_manager_info.metadata["from_queryset_managers"] = {} base_manager_info.metadata["from_queryset_managers"][ custom_manager_generated_fullname] = new_manager_info.fullname class_def_context = ClassDefContext(cls=new_manager_info.defn, reason=ctx.call, api=semanal_api) self_type = Instance(new_manager_info, []) # we need to copy all methods in MRO before django.db.models.query.QuerySet for class_mro_info in derived_queryset_info.mro: if class_mro_info.fullname == fullnames.QUERYSET_CLASS_FULLNAME: break for name, sym in class_mro_info.names.items(): if isinstance(sym.node, FuncDef): helpers.copy_method_to_another_class(class_def_context, self_type, new_method_name=name, method_node=sym.node)
def visit_unbound_type(self, t: UnboundType) -> Type: if t.optional: t.optional = False # We don't need to worry about double-wrapping Optionals or # wrapping Anys: Union simplification will take care of that. return make_optional_type(self.visit_unbound_type(t)) sym = self.lookup(t.name, t, suppress_errors=self.third_pass) # type: ignore if sym is not None: if sym.node is None: # UNBOUND_IMPORTED can happen if an unknown name was imported. if sym.kind != UNBOUND_IMPORTED: self.fail('Internal error (node is None, kind={})'.format(sym.kind), t) return AnyType(TypeOfAny.special_form) fullname = sym.node.fullname() hook = self.plugin.get_type_analyze_hook(fullname) if hook: return hook(AnalyzeTypeContext(t, t, self)) if (fullname in nongen_builtins and t.args and not sym.normalized and not self.allow_unnormalized): self.fail(no_subscript_builtin_alias(fullname), t) if self.tvar_scope: tvar_def = self.tvar_scope.get_binding(sym) else: tvar_def = None if self.warn_bound_tvar and sym.kind == TVAR and tvar_def is not None: self.fail('Can\'t use bound type variable "{}"' ' to define generic alias'.format(t.name), t) return AnyType(TypeOfAny.from_error) elif sym.kind == TVAR and tvar_def is not None: if len(t.args) > 0: self.fail('Type variable "{}" used with arguments'.format( t.name), t) return TypeVarType(tvar_def, t.line) elif fullname == 'builtins.None': return NoneTyp() elif fullname == 'typing.Any' or fullname == 'builtins.Any': return AnyType(TypeOfAny.explicit) elif fullname == 'typing.Tuple': if len(t.args) == 0 and not t.empty_tuple_index: # Bare 'Tuple' is same as 'tuple' if self.options.disallow_any_generics and not self.is_typeshed_stub: self.fail(messages.BARE_GENERIC, t) typ = self.named_type('builtins.tuple', line=t.line, column=t.column) typ.from_generic_builtin = True return typ if len(t.args) == 2 and isinstance(t.args[1], EllipsisType): # Tuple[T, ...] (uniform, variable-length tuple) instance = self.named_type('builtins.tuple', [self.anal_type(t.args[0])]) instance.line = t.line return instance return self.tuple_type(self.anal_array(t.args)) elif fullname == 'typing.Union': items = self.anal_array(t.args) return UnionType.make_union(items) elif fullname == 'typing.Optional': if len(t.args) != 1: self.fail('Optional[...] must have exactly one type argument', t) return AnyType(TypeOfAny.from_error) item = self.anal_type(t.args[0]) return make_optional_type(item) elif fullname == 'typing.Callable': return self.analyze_callable_type(t) elif fullname == 'typing.Type': if len(t.args) == 0: any_type = AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) return TypeType(any_type, line=t.line, column=t.column) if len(t.args) != 1: self.fail('Type[...] must have exactly one type argument', t) item = self.anal_type(t.args[0]) return TypeType.make_normalized(item, line=t.line) elif fullname == 'typing.ClassVar': if self.nesting_level > 0: self.fail('Invalid type: ClassVar nested inside other type', t) if len(t.args) == 0: return AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) if len(t.args) != 1: self.fail('ClassVar[...] must have at most one type argument', t) return AnyType(TypeOfAny.from_error) item = self.anal_type(t.args[0]) if isinstance(item, TypeVarType) or get_type_vars(item): self.fail('Invalid type: ClassVar cannot be generic', t) return AnyType(TypeOfAny.from_error) return item elif fullname in ('mypy_extensions.NoReturn', 'typing.NoReturn'): return UninhabitedType(is_noreturn=True) elif sym.kind == TYPE_ALIAS: override = sym.type_override all_vars = sym.alias_tvars assert override is not None an_args = self.anal_array(t.args) if all_vars is not None: exp_len = len(all_vars) else: exp_len = 0 act_len = len(an_args) if exp_len > 0 and act_len == 0: # Interpret bare Alias same as normal generic, i.e., Alias[Any, Any, ...] assert all_vars is not None return set_any_tvars(override, all_vars, t.line, t.column) if exp_len == 0 and act_len == 0: return override if act_len != exp_len: self.fail('Bad number of arguments for type alias, expected: %s, given: %s' % (exp_len, act_len), t) return set_any_tvars(override, all_vars or [], t.line, t.column, implicit=False) assert all_vars is not None return replace_alias_tvars(override, all_vars, an_args, t.line, t.column) elif not isinstance(sym.node, TypeInfo): name = sym.fullname if name is None: name = sym.node.name() if isinstance(sym.node, Var) and isinstance(sym.node.type, AnyType): # Something with an Any type -- make it an alias for Any in a type # context. This is slightly problematic as it allows using the type 'Any' # as a base class -- however, this will fail soon at runtime so the problem # is pretty minor. return AnyType(TypeOfAny.from_unimported_type) # Allow unbound type variables when defining an alias if not (self.aliasing and sym.kind == TVAR and (not self.tvar_scope or self.tvar_scope.get_binding(sym) is None)): if (not self.third_pass and not self.in_dynamic_func and not (isinstance(sym.node, (FuncDef, Decorator)) or isinstance(sym.node, Var) and sym.node.is_ready) and not (sym.kind == TVAR and tvar_def is None)): if t.args and not self.global_scope: self.fail('Unsupported forward reference to "{}"'.format(t.name), t) return AnyType(TypeOfAny.from_error) return ForwardRef(t) self.fail('Invalid type "{}"'.format(name), t) if self.third_pass and sym.kind == TVAR: self.note_func("Forward references to type variables are prohibited", t) return t info = sym.node # type: TypeInfo if len(t.args) > 0 and info.fullname() == 'builtins.tuple': fallback = Instance(info, [AnyType(TypeOfAny.special_form)], t.line) return TupleType(self.anal_array(t.args), fallback, t.line) else: # Analyze arguments and construct Instance type. The # number of type arguments and their values are # checked only later, since we do not always know the # valid count at this point. Thus we may construct an # Instance with an invalid number of type arguments. instance = Instance(info, self.anal_array(t.args), t.line, t.column) instance.from_generic_builtin = sym.normalized tup = info.tuple_type if tup is not None: # The class has a Tuple[...] base class so it will be # represented as a tuple type. if t.args: self.fail('Generic tuple types not supported', t) return AnyType(TypeOfAny.from_error) return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance) td = info.typeddict_type if td is not None: # The class has a TypedDict[...] base class so it will be # represented as a typeddict type. if t.args: self.fail('Generic TypedDict types not supported', t) return AnyType(TypeOfAny.from_error) # Create a named TypedDictType return td.copy_modified(item_types=self.anal_array(list(td.items.values())), fallback=instance) return instance else: if self.third_pass: self.fail('Invalid type "{}"'.format(t.name), t) return AnyType(TypeOfAny.from_error) return AnyType(TypeOfAny.special_form)
def perform_transform(self, node: Node, transform: Callable[[Type], Type]) -> None: """Apply transform to all types associated with node.""" if isinstance(node, ForStmt): if node.index_type: node.index_type = transform(node.index_type) self.transform_types_in_lvalue(node.index, transform) if isinstance(node, WithStmt): if node.target_type: node.target_type = transform(node.target_type) for n in node.target: if isinstance(n, NameExpr) and isinstance(n.node, Var) and n.node.type: n.node.type = transform(n.node.type) if isinstance(node, (FuncDef, OverloadedFuncDef, CastExpr, AssignmentStmt, TypeAliasExpr, Var)): assert node.type, "Scheduled patch for non-existent type" node.type = transform(node.type) if isinstance(node, TypeAlias): node.target = transform(node.target) if isinstance(node, NewTypeExpr): assert node.old_type, "Scheduled patch for non-existent type" node.old_type = transform(node.old_type) if node.info: new_bases = [] # type: List[Instance] for b in node.info.bases: new_b = transform(b) # TODO: this code can be combined with code in second pass. if isinstance(new_b, Instance): new_bases.append(new_b) elif isinstance(new_b, TupleType): new_bases.append(new_b.fallback) else: self.fail( "Argument 2 to NewType(...) must be subclassable" " (got {})".format(new_b), node) new_bases.append(self.builtin_type('object')) node.info.bases = new_bases if isinstance(node, TypeVarExpr): if node.upper_bound: node.upper_bound = transform(node.upper_bound) if node.values: node.values = [transform(v) for v in node.values] if isinstance(node, TypedDictExpr): assert node.info.typeddict_type, "Scheduled patch for non-existent type" node.info.typeddict_type = cast( TypedDictType, transform(node.info.typeddict_type)) if isinstance(node, NamedTupleExpr): assert node.info.tuple_type, "Scheduled patch for non-existent type" node.info.tuple_type = cast(TupleType, transform(node.info.tuple_type)) if isinstance(node, TypeApplication): node.types = [transform(t) for t in node.types] if isinstance(node, TypeInfo): for tvar in node.defn.type_vars: if tvar.upper_bound: tvar.upper_bound = transform(tvar.upper_bound) if tvar.values: tvar.values = [transform(v) for v in tvar.values] new_bases = [] for base in node.bases: new_base = transform(base) if isinstance(new_base, Instance): new_bases.append(new_base) else: # Don't fix the NamedTuple bases, they are Instance's intentionally. # Patch the 'args' just in case, although generic tuple types are # not supported yet. alt_base = Instance(base.type, [transform(a) for a in base.args]) new_bases.append(alt_base) node.bases = new_bases if node.tuple_type: new_tuple_type = transform(node.tuple_type) assert isinstance(new_tuple_type, TupleType) node.tuple_type = new_tuple_type
def visit_instance(self, typ: Instance) -> None: typ.type = self.fixup(typ.type) for arg in typ.args: arg.accept(self) if typ.last_known_value: typ.last_known_value.accept(self)
def visit_instance(self, typ: Instance) -> None: typ.type = self.fixup(typ.type) for arg in typ.args: arg.accept(self)