Example #1
0
def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo,
                              mx: MemberContext) -> Type:
    """Analyse attribute access that does not target a method.

    This is logically part of analyze_member_access and the arguments are similar.

    original_type is the type of E in the expression E.var
    """
    # It was not a method. Try looking up a variable.
    v = lookup_member_var_or_accessor(info, name, mx.is_lvalue)

    vv = v
    if isinstance(vv, Decorator):
        # The associated Var node of a decorator contains the type.
        v = vv.var

    if isinstance(vv, TypeInfo):
        # If the associated variable is a TypeInfo synthesize a Var node for
        # the purposes of type checking.  This enables us to type check things
        # like accessing class attributes on an inner class.
        v = Var(name, type=type_object_type(vv, mx.builtin_type))
        v.info = info

    if isinstance(vv, TypeAlias) and isinstance(get_proper_type(vv.target),
                                                Instance):
        # Similar to the above TypeInfo case, we allow using
        # qualified type aliases in runtime context if it refers to an
        # instance type. For example:
        #     class C:
        #         A = List[int]
        #     x = C.A() <- this is OK
        typ = instance_alias_type(vv, mx.builtin_type)
        v = Var(name, type=typ)
        v.info = info

    if isinstance(v, Var):
        implicit = info[name].implicit

        # An assignment to final attribute is always an error,
        # independently of types.
        if mx.is_lvalue and not mx.chk.get_final_context():
            check_final_member(name, info, mx.msg, mx.context)

        return analyze_var(name, v, itype, info, mx, implicit=implicit)
    elif isinstance(v, FuncDef):
        assert False, "Did not expect a function"
    elif (not v
          and name not in ['__getattr__', '__setattr__', '__getattribute__']
          and not mx.is_operator):
        if not mx.is_lvalue:
            for method_name in ('__getattribute__', '__getattr__'):
                method = info.get_method(method_name)
                # __getattribute__ is defined on builtins.object and returns Any, so without
                # the guard this search will always find object.__getattribute__ and conclude
                # that the attribute exists
                if method and method.info.fullname() != 'builtins.object':
                    function = function_type(
                        method, mx.builtin_type('builtins.function'))
                    bound_method = bind_self(function, mx.self_type)
                    typ = map_instance_to_supertype(itype, method.info)
                    getattr_type = expand_type_by_instance(bound_method, typ)
                    if isinstance(getattr_type, CallableType):
                        result = getattr_type.ret_type

                        # Call the attribute hook before returning.
                        fullname = '{}.{}'.format(method.info.fullname(), name)
                        hook = mx.chk.plugin.get_attribute_hook(fullname)
                        if hook:
                            result = hook(
                                AttributeContext(
                                    get_proper_type(mx.original_type), result,
                                    mx.context, mx.chk))
                        return result
        else:
            setattr_meth = info.get_method('__setattr__')
            if setattr_meth and setattr_meth.info.fullname(
            ) != 'builtins.object':
                setattr_func = function_type(
                    setattr_meth, mx.builtin_type('builtins.function'))
                bound_type = bind_self(setattr_func, mx.self_type)
                typ = map_instance_to_supertype(itype, setattr_meth.info)
                setattr_type = expand_type_by_instance(bound_type, typ)
                if isinstance(
                        setattr_type,
                        CallableType) and len(setattr_type.arg_types) > 0:
                    return setattr_type.arg_types[-1]

    if itype.type.fallback_to_any:
        return AnyType(TypeOfAny.special_form)

    # Could not find the member.
    if mx.is_super:
        mx.msg.undefined_in_superclass(name, mx.context)
        return AnyType(TypeOfAny.from_error)
    else:
        if mx.chk and mx.chk.should_suppress_optional_error([itype]):
            return AnyType(TypeOfAny.from_error)
        return mx.msg.has_no_attr(mx.original_type, itype, name, mx.context)
Example #2
0
def is_literal_in_union(x: ProperType, y: ProperType) -> bool:
    """Return True if x is a Literal and y is an Union that includes x"""
    return (isinstance(x, LiteralType) and isinstance(y, UnionType)
            and any(x == get_proper_type(z) for z in y.items))
Example #3
0
def _scan_declarative_assignment_stmt(
    cls: ClassDef,
    api: SemanticAnalyzerPluginInterface,
    stmt: AssignmentStmt,
    cls_metadata: util.DeclClassApplied,
) -> None:
    """Extract mapping information from an assignment statement in a
    declarative class.

    """
    lvalue = stmt.lvalues[0]
    if not isinstance(lvalue, NameExpr):
        return

    sym = cls.info.names.get(lvalue.name)

    # this establishes that semantic analysis has taken place, which
    # means the nodes are populated and we are called from an appropriate
    # hook.
    assert sym is not None
    node = sym.node

    if isinstance(node, PlaceholderNode):
        return

    assert node is lvalue.node
    assert isinstance(node, Var)

    if node.name == "__abstract__":
        if api.parse_bool(stmt.rvalue) is True:
            cls_metadata.is_mapped = False
        return
    elif node.name == "__tablename__":
        cls_metadata.has_table = True
    elif node.name.startswith("__"):
        return
    elif node.name == "_mypy_mapped_attrs":
        if not isinstance(stmt.rvalue, ListExpr):
            util.fail(api, "_mypy_mapped_attrs is expected to be a list", stmt)
        else:
            for item in stmt.rvalue.items:
                if isinstance(item, (NameExpr, StrExpr)):
                    apply._apply_mypy_mapped_attr(cls, api, item, cls_metadata)

    left_hand_mapped_type: Optional[Type] = None
    left_hand_explicit_type: Optional[ProperType] = None

    if node.is_inferred or node.type is None:
        if isinstance(stmt.type, UnboundType):
            # look for an explicit Mapped[] type annotation on the left
            # side with nothing on the right

            # print(stmt.type)
            # Mapped?[Optional?[A?]]

            left_hand_explicit_type = stmt.type

            if stmt.type.name == "Mapped":
                mapped_sym = api.lookup_qualified("Mapped", cls)
                if (mapped_sym is not None and mapped_sym.node is not None
                        and names._type_id_for_named_node(
                            mapped_sym.node) is names.MAPPED):
                    left_hand_explicit_type = get_proper_type(
                        stmt.type.args[0])
                    left_hand_mapped_type = stmt.type

            # TODO: do we need to convert from unbound for this case?
            # left_hand_explicit_type = util._unbound_to_instance(
            #     api, left_hand_explicit_type
            # )
    else:
        node_type = get_proper_type(node.type)
        if (isinstance(node_type, Instance) and
                names._type_id_for_named_node(node_type.type) is names.MAPPED):
            # print(node.type)
            # sqlalchemy.orm.attributes.Mapped[<python type>]
            left_hand_explicit_type = get_proper_type(node_type.args[0])
            left_hand_mapped_type = node_type
        else:
            # print(node.type)
            # <python type>
            left_hand_explicit_type = node_type
            left_hand_mapped_type = None

    if isinstance(stmt.rvalue, TempNode) and left_hand_mapped_type is not None:
        # annotation without assignment and Mapped is present
        # as type annotation
        # equivalent to using _infer_type_from_left_hand_type_only.

        python_type_for_type = left_hand_explicit_type
    elif isinstance(stmt.rvalue, CallExpr) and isinstance(
            stmt.rvalue.callee, RefExpr):

        python_type_for_type = infer._infer_type_from_right_hand_nameexpr(
            api, stmt, node, left_hand_explicit_type, stmt.rvalue.callee)

        if python_type_for_type is None:
            return

    else:
        return

    assert python_type_for_type is not None

    cls_metadata.mapped_attr_names.append((node.name, python_type_for_type))

    apply._apply_type_to_mapped_statement(
        api,
        stmt,
        lvalue,
        left_hand_explicit_type,
        python_type_for_type,
    )
Example #4
0
def bind_self(method: F,
              original_type: Optional[Type] = None,
              is_classmethod: bool = False) -> F:
    """Return a copy of `method`, with the type of its first parameter (usually
    self or cls) bound to original_type.

    If the type of `self` is a generic type (T, or Type[T] for classmethods),
    instantiate every occurrence of type with original_type in the rest of the
    signature and in the return type.

    original_type is the type of E in the expression E.copy(). It is None in
    compatibility checks. In this case we treat it as the erasure of the
    declared type of self.

    This way we can express "the type of self". For example:

    T = TypeVar('T', bound='A')
    class A:
        def copy(self: T) -> T: ...

    class B(A): pass

    b = B().copy()  # type: B

    """
    if isinstance(method, Overloaded):
        return cast(
            F,
            Overloaded([
                bind_self(c, original_type, is_classmethod)
                for c in method.items
            ]))
    assert isinstance(method, CallableType)
    func = method
    if not func.arg_types:
        # Invalid method, return something.
        return cast(F, func)
    if func.arg_kinds[0] == ARG_STAR:
        # The signature is of the form 'def foo(*args, ...)'.
        # In this case we shouldn't drop the first arg,
        # since func will be absorbed by the *args.

        # TODO: infer bounds on the type of *args?
        return cast(F, func)
    self_param_type = get_proper_type(func.arg_types[0])

    variables: Sequence[TypeVarLikeType] = []
    if func.variables and supported_self_type(self_param_type):
        from mypy.infer import infer_type_arguments

        if original_type is None:
            # TODO: type check method override (see #7861).
            original_type = erase_to_bound(self_param_type)
        original_type = get_proper_type(original_type)

        all_ids = func.type_var_ids()
        typeargs = infer_type_arguments(all_ids,
                                        self_param_type,
                                        original_type,
                                        is_supertype=True)
        if (is_classmethod
                # TODO: why do we need the extra guards here?
                and any(
                    isinstance(get_proper_type(t), UninhabitedType)
                    for t in typeargs) and isinstance(
                        original_type, (Instance, TypeVarType, TupleType))):
            # In case we call a classmethod through an instance x, fallback to type(x)
            typeargs = infer_type_arguments(all_ids,
                                            self_param_type,
                                            TypeType(original_type),
                                            is_supertype=True)

        ids = [
            tid for tid in all_ids
            if any(tid == t.id for t in get_type_vars(self_param_type))
        ]

        # Technically, some constrains might be unsolvable, make them <nothing>.
        to_apply = [
            t if t is not None else UninhabitedType() for t in typeargs
        ]

        def expand(target: Type) -> Type:
            return expand_type(target,
                               {id: to_apply[all_ids.index(id)]
                                for id in ids})

        arg_types = [expand(x) for x in func.arg_types[1:]]
        ret_type = expand(func.ret_type)
        variables = [v for v in func.variables if v.id not in ids]
    else:
        arg_types = func.arg_types[1:]
        ret_type = func.ret_type
        variables = func.variables

    original_type = get_proper_type(original_type)
    if isinstance(original_type, CallableType) and original_type.is_type_obj():
        original_type = TypeType.make_normalized(original_type.ret_type)
    res = func.copy_modified(arg_types=arg_types,
                             arg_kinds=func.arg_kinds[1:],
                             arg_names=func.arg_names[1:],
                             variables=variables,
                             ret_type=ret_type,
                             bound_args=[original_type])
    return cast(F, res)
Example #5
0
 def format_type(self, cur_module: Optional[str], typ: Type) -> str:
     if self.use_fixme and isinstance(get_proper_type(typ), AnyType):
         return self.use_fixme
     return typ.accept(TypeFormatter(cur_module, self.graph))
Example #6
0
def _scan_symbol_table_entry(
    cls: ClassDef,
    api: SemanticAnalyzerPluginInterface,
    name: str,
    value: SymbolTableNode,
    cls_metadata: util.DeclClassApplied,
) -> None:
    """Extract mapping information from a SymbolTableNode that's in the
    type.names dictionary.

    """
    value_type = get_proper_type(value.type)
    if not isinstance(value_type, Instance):
        return

    left_hand_explicit_type = None
    type_id = names._type_id_for_named_node(value_type.type)
    # type_id = names._type_id_for_unbound_type(value.type.type, cls, api)

    err = False

    # TODO: this is nearly the same logic as that of
    # _scan_declarative_decorator_stmt, likely can be merged
    if type_id in {
            names.MAPPED,
            names.RELATIONSHIP,
            names.COMPOSITE_PROPERTY,
            names.MAPPER_PROPERTY,
            names.SYNONYM_PROPERTY,
            names.COLUMN_PROPERTY,
    }:
        if value_type.args:
            left_hand_explicit_type = get_proper_type(value_type.args[0])
        else:
            err = True
    elif type_id is names.COLUMN:
        if not value_type.args:
            err = True
        else:
            typeengine_arg: Union[ProperType, TypeInfo] = get_proper_type(
                value_type.args[0])
            if isinstance(typeengine_arg, Instance):
                typeengine_arg = typeengine_arg.type

            if isinstance(typeengine_arg, (UnboundType, TypeInfo)):
                sym = api.lookup_qualified(typeengine_arg.name, typeengine_arg)
                if sym is not None and isinstance(sym.node, TypeInfo):
                    if names._has_base_type_id(sym.node, names.TYPEENGINE):

                        left_hand_explicit_type = UnionType([
                            infer._extract_python_type_from_typeengine(
                                api, sym.node, []),
                            NoneType(),
                        ])
                    else:
                        util.fail(
                            api,
                            "Column type should be a TypeEngine "
                            "subclass not '{}'".format(sym.node.fullname),
                            value_type,
                        )

    if err:
        msg = ("Can't infer type from attribute {} on class {}. "
               "please specify a return type from this function that is "
               "one of: Mapped[<python type>], relationship[<target class>], "
               "Column[<TypeEngine>], MapperProperty[<python type>]")
        util.fail(api, msg.format(name, cls.name), cls)

        left_hand_explicit_type = AnyType(TypeOfAny.special_form)

    if left_hand_explicit_type is not None:
        cls_metadata.mapped_attr_names.append((name, left_hand_explicit_type))
Example #7
0
def enum_value_callback(ctx: 'mypy.plugin.AttributeContext') -> Type:
    """This plugin refines the 'value' attribute in enums to refer to
    the original underlying value. For example, suppose we have the
    following:

        class SomeEnum:
            FOO = A()
            BAR = B()

    By default, mypy will infer that 'SomeEnum.FOO.value' and
    'SomeEnum.BAR.value' both are of type 'Any'. This plugin refines
    this inference so that mypy understands the expressions are
    actually of types 'A' and 'B' respectively. This better reflects
    the actual runtime behavior.

    This plugin works simply by looking up the original value assigned
    to the enum. For example, when this plugin sees 'SomeEnum.BAR.value',
    it will look up whatever type 'BAR' had in the SomeEnum TypeInfo and
    use that as the inferred type of the overall expression.

    This plugin assumes that the provided context is an attribute access
    matching one of the strings found in 'ENUM_VALUE_ACCESS'.
    """
    enum_field_name = _extract_underlying_field_name(ctx.type)
    if enum_field_name is None:
        # We do not know the enum field name (perhaps it was passed to a
        # function and we only know that it _is_ a member).  All is not lost
        # however, if we can prove that the all of the enum members have the
        # same value-type, then it doesn't matter which member was passed in.
        # The value-type is still known.
        if isinstance(ctx.type, Instance):
            info = ctx.type.type

            # As long as mypy doesn't understand attribute creation in __new__,
            # there is no way to predict the value type if the enum class has a
            # custom implementation
            if _implements_new(info):
                return ctx.default_attr_type

            stnodes = (info.get(name) for name in info.names)

            # Enums _can_ have methods and instance attributes.
            # Omit methods and attributes created by assigning to self.*
            # for our value inference.
            node_types = (
                get_proper_type(n.type) if n else None
                for n in stnodes
                if n is None or not n.implicit)
            proper_types = (
                _infer_value_type_with_auto_fallback(ctx, t)
                for t in node_types
                if t is None or not isinstance(t, CallableType))
            underlying_type = _first(proper_types)
            if underlying_type is None:
                return ctx.default_attr_type
            all_same_value_type = all(
                proper_type is not None and proper_type == underlying_type
                for proper_type in proper_types)
            if all_same_value_type:
                if underlying_type is not None:
                    return underlying_type
        return ctx.default_attr_type

    assert isinstance(ctx.type, Instance)
    info = ctx.type.type

    # As long as mypy doesn't understand attribute creation in __new__,
    # there is no way to predict the value type if the enum class has a
    # custom implementation
    if _implements_new(info):
        return ctx.default_attr_type

    stnode = info.get(enum_field_name)
    if stnode is None:
        return ctx.default_attr_type

    underlying_type = _infer_value_type_with_auto_fallback(
        ctx, get_proper_type(stnode.type))
    if underlying_type is None:
        return ctx.default_attr_type

    return underlying_type
Example #8
0
def relationship_hook(ctx: FunctionContext) -> Type:
    """Support basic use cases for relationships.

    Examples:
        from sqlalchemy.orm import relationship

        from one import OneModel
        if TYPE_CHECKING:
            from other import OtherModel

        class User(Base):
            __tablename__ = 'users'
            id = Column(Integer(), primary_key=True)
            one = relationship(OneModel)
            other = relationship("OtherModel")

    This also tries to infer the type argument for 'RelationshipProperty'
    using the 'uselist' flag.
    """
    assert isinstance(ctx.default_return_type, Instance)  # type: ignore[misc]
    original_type_arg = ctx.default_return_type.args[0]
    has_annotation = not isinstance(get_proper_type(original_type_arg),
                                    UninhabitedType)

    arg = get_argument_by_name(ctx, 'argument')
    arg_type = get_proper_type(get_argtype_by_name(ctx, 'argument'))

    uselist_arg = get_argument_by_name(ctx, 'uselist')

    if isinstance(arg, StrExpr):
        name = arg.value
        sym = None  # type: Optional[SymbolTableNode]
        try:
            # Private API for local lookup, but probably needs to be public.
            sym = ctx.api.lookup_qualified(name)  # type: ignore
        except (KeyError, AssertionError):
            pass
        if sym and isinstance(sym.node, TypeInfo):
            new_arg = fill_typevars_with_any(sym.node)  # type: Type
        else:
            ctx.api.fail('Cannot find model "{}"'.format(name), ctx.context)
            # TODO: Add note() to public API.
            ctx.api.note(
                'Only imported models can be found;'  # type: ignore
                ' use "if TYPE_CHECKING: ..." to avoid import cycles',
                ctx.context)
            new_arg = AnyType(TypeOfAny.from_error)
    else:
        if isinstance(arg_type, CallableType) and arg_type.is_type_obj():
            new_arg = fill_typevars_with_any(arg_type.type_object())
        else:
            # Something complex, stay silent for now.
            new_arg = AnyType(TypeOfAny.special_form)

    # We figured out, the model type. Now check if we need to wrap it in Iterable
    if uselist_arg:
        if parse_bool(uselist_arg):
            new_arg = ctx.api.named_generic_type('typing.Iterable', [new_arg])
    else:
        if has_annotation:
            # If there is an annotation we use it as a source of truth.
            # This will cause false negatives, but it is better than lots of false positives.
            new_arg = original_type_arg

    return Instance(ctx.default_return_type.type, [new_arg],
                    line=ctx.default_return_type.line,
                    column=ctx.default_return_type.column)
Example #9
0
def re_apply_declarative_assignments(
    cls: ClassDef,
    api: SemanticAnalyzerPluginInterface,
    attributes: List[util.SQLAlchemyAttribute],
) -> None:
    """For multiple class passes, re-apply our left-hand side types as mypy
    seems to reset them in place.

    """
    mapped_attr_lookup = {attr.name: attr for attr in attributes}
    update_cls_metadata = False

    for stmt in cls.defs.body:
        # for a re-apply, all of our statements are AssignmentStmt;
        # @declared_attr calls will have been converted and this
        # currently seems to be preserved by mypy (but who knows if this
        # will change).
        if (isinstance(stmt, AssignmentStmt)
                and isinstance(stmt.lvalues[0], NameExpr)
                and stmt.lvalues[0].name in mapped_attr_lookup
                and isinstance(stmt.lvalues[0].node, Var)):

            left_node = stmt.lvalues[0].node
            python_type_for_type = mapped_attr_lookup[
                stmt.lvalues[0].name].type

            left_node_proper_type = get_proper_type(left_node.type)

            # if we have scanned an UnboundType and now there's a more
            # specific type than UnboundType, call the re-scan so we
            # can get that set up correctly
            if (isinstance(python_type_for_type, UnboundType)
                    and not isinstance(left_node_proper_type, UnboundType)
                    and (isinstance(stmt.rvalue, CallExpr)
                         and isinstance(stmt.rvalue.callee, MemberExpr)
                         and isinstance(stmt.rvalue.callee.expr, NameExpr)
                         and stmt.rvalue.callee.expr.node is not None
                         and stmt.rvalue.callee.expr.node.fullname
                         == NAMED_TYPE_SQLA_MAPPED
                         and stmt.rvalue.callee.name == "_empty_constructor"
                         and isinstance(stmt.rvalue.args[0], CallExpr)
                         and isinstance(stmt.rvalue.args[0].callee, RefExpr))):

                python_type_for_type = (
                    infer.infer_type_from_right_hand_nameexpr(
                        api,
                        stmt,
                        left_node,
                        left_node_proper_type,
                        stmt.rvalue.args[0].callee,
                    ))

                if python_type_for_type is None or isinstance(
                        python_type_for_type, UnboundType):
                    continue

                # update the SQLAlchemyAttribute with the better information
                mapped_attr_lookup[
                    stmt.lvalues[0].name].type = python_type_for_type

                update_cls_metadata = True

            if python_type_for_type is not None:
                left_node.type = api.named_type(NAMED_TYPE_SQLA_MAPPED,
                                                [python_type_for_type])

    if update_cls_metadata:
        util.set_mapped_attributes(cls.info, attributes)
Example #10
0
    def argument(self, ctx: 'mypy.plugin.ClassDefContext') -> Argument:
        """Return this attribute as an argument to __init__."""
        assert self.init
        init_type = self.info[self.name].type

        if self.converter.name:
            # When a converter is set the init_type is overridden by the first argument
            # of the converter method.
            converter = lookup_qualified_stnode(ctx.api.modules,
                                                self.converter.name, True)
            if not converter:
                # The converter may be a local variable. Check there too.
                converter = ctx.api.lookup_qualified(self.converter.name,
                                                     self.info, True)

            # Get the type of the converter.
            converter_type = None
            if converter and isinstance(converter.node, TypeInfo):
                from mypy.checkmember import type_object_type  # To avoid import cycle.
                converter_type = type_object_type(converter.node,
                                                  ctx.api.builtin_type)
            elif converter and isinstance(converter.node, OverloadedFuncDef):
                converter_type = converter.node.type
            elif converter and converter.type:
                converter_type = converter.type

            init_type = None
            converter_type = get_proper_type(converter_type)
            if isinstance(converter_type,
                          CallableType) and converter_type.arg_types:
                init_type = ctx.api.anal_type(converter_type.arg_types[0])
            elif isinstance(converter_type, Overloaded):
                types = []  # type: List[Type]
                for item in converter_type.items():
                    # Walk the overloads looking for methods that can accept one argument.
                    num_arg_types = len(item.arg_types)
                    if not num_arg_types:
                        continue
                    if num_arg_types > 1 and any(
                            kind == ARG_POS for kind in item.arg_kinds[1:]):
                        continue
                    types.append(item.arg_types[0])
                # Make a union of all the valid types.
                if types:
                    args = UnionType.make_simplified_union(types)
                    init_type = ctx.api.anal_type(args)

            if self.converter.is_attr_converters_optional and init_type:
                # If the converter was attr.converter.optional(type) then add None to
                # the allowed init_type.
                init_type = UnionType.make_union([init_type, NoneType()])

            if not init_type:
                ctx.api.fail("Cannot determine __init__ type from converter",
                             self.context)
                init_type = AnyType(TypeOfAny.from_error)
        elif self.converter.name == '':
            # This means we had a converter but it's not of a type we can infer.
            # Error was shown in _get_converter_name
            init_type = AnyType(TypeOfAny.from_error)

        if init_type is None:
            if ctx.api.options.disallow_untyped_defs:
                # This is a compromise.  If you don't have a type here then the
                # __init__ will be untyped. But since the __init__ is added it's
                # pointing at the decorator. So instead we also show the error in the
                # assignment, which is where you would fix the issue.
                node = self.info[self.name].node
                assert node is not None
                ctx.api.msg.need_annotation_for_var(node, self.context)

            # Convert type not set to Any.
            init_type = AnyType(TypeOfAny.unannotated)

        if self.kw_only:
            arg_kind = ARG_NAMED_OPT if self.has_default else ARG_NAMED
        else:
            arg_kind = ARG_OPT if self.has_default else ARG_POS

        # Attrs removes leading underscores when creating the __init__ arguments.
        return Argument(Var(self.name.lstrip("_"), init_type), init_type, None,
                        arg_kind)
Example #11
0
def get_declaration(expr: BindableExpression) -> Optional[Type]:
    if isinstance(expr, RefExpr) and isinstance(expr.node, Var):
        type = get_proper_type(expr.node.type)
        if not isinstance(type, PartialType):
            return type
    return None
Example #12
0
    def assign_type(self,
                    expr: Expression,
                    type: Type,
                    declared_type: Optional[Type],
                    restrict_any: bool = False) -> None:
        # We should erase last known value in binder, because if we are using it,
        # it means that the target is not final, and therefore can't hold a literal.
        type = remove_instance_last_known_values(type)

        type = get_proper_type(type)
        declared_type = get_proper_type(declared_type)

        if self.type_assignments is not None:
            # We are in a multiassign from union, defer the actual binding,
            # just collect the types.
            self.type_assignments[expr].append((type, declared_type))
            return
        if not isinstance(expr, (IndexExpr, MemberExpr, NameExpr)):
            return None
        if not literal(expr):
            return
        self.invalidate_dependencies(expr)

        if declared_type is None:
            # Not sure why this happens.  It seems to mainly happen in
            # member initialization.
            return
        if not is_subtype(type, declared_type):
            # Pretty sure this is only happens when there's a type error.

            # Ideally this function wouldn't be called if the
            # expression has a type error, though -- do other kinds of
            # errors cause this function to get called at invalid
            # times?
            return

        enclosing_type = get_proper_type(
            self.most_recent_enclosing_type(expr, type))
        if isinstance(enclosing_type, AnyType) and not restrict_any:
            # If x is Any and y is int, after x = y we do not infer that x is int.
            # This could be changed.
            # Instead, since we narrowed type from Any in a recent frame (probably an
            # isinstance check), but now it is reassigned, we broaden back
            # to Any (which is the most recent enclosing type)
            self.put(expr, enclosing_type)
        # As a special case, when assigning Any to a variable with a
        # declared Optional type that has been narrowed to None,
        # replace all the Nones in the declared Union type with Any.
        # This overrides the normal behavior of ignoring Any assignments to variables
        # in order to prevent false positives.
        # (See discussion in #3526)
        elif (isinstance(type, AnyType)
              and isinstance(declared_type, UnionType) and any(
                  isinstance(get_proper_type(item), NoneType)
                  for item in declared_type.items) and isinstance(
                      get_proper_type(
                          self.most_recent_enclosing_type(expr, NoneType())),
                      NoneType)):
            # Replace any Nones in the union type with Any
            new_items = [
                type if isinstance(get_proper_type(item), NoneType) else item
                for item in declared_type.items
            ]
            self.put(expr, UnionType(new_items))
        elif (isinstance(type, AnyType)
              and not (isinstance(declared_type, UnionType) and any(
                  isinstance(get_proper_type(item), AnyType)
                  for item in declared_type.items))):
            # Assigning an Any value doesn't affect the type to avoid false negatives, unless
            # there is an Any item in a declared union type.
            self.put(expr, declared_type)
        else:
            self.put(expr, type)

        for i in self.try_frames:
            # XXX This should probably not copy the entire frame, but
            # just copy this variable into a single stored frame.
            self.allow_jump(i)
Example #13
0
def is_tuple(typ: Type) -> bool:
    typ = get_proper_type(typ)
    return (isinstance(typ, TupleType)
            or (isinstance(typ, Instance)
                and typ.type.fullname == 'builtins.tuple'))
Example #14
0
def analyze_class_attribute_access(
    itype: Instance,
    name: str,
    mx: MemberContext,
    override_info: Optional[TypeInfo] = None,
    original_vars: Optional[Sequence[TypeVarLikeType]] = None
) -> Optional[Type]:
    """Analyze access to an attribute on a class object.

    itype is the return type of the class object callable, original_type is the type
    of E in the expression E.var, original_vars are type variables of the class callable
    (for generic classes).
    """
    info = itype.type
    if override_info:
        info = override_info

    node = info.get(name)
    if not node:
        if info.fallback_to_any:
            return AnyType(TypeOfAny.special_form)
        return None

    is_decorated = isinstance(node.node, Decorator)
    is_method = is_decorated or isinstance(node.node, FuncBase)
    if mx.is_lvalue:
        if is_method:
            mx.msg.cant_assign_to_method(mx.context)
        if isinstance(node.node, TypeInfo):
            mx.msg.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, mx.context)

    # If a final attribute was declared on `self` in `__init__`, then it
    # can't be accessed on the class object.
    if node.implicit and isinstance(node.node, Var) and node.node.is_final:
        mx.msg.fail(
            message_registry.CANNOT_ACCESS_FINAL_INSTANCE_ATTR.format(
                node.node.name), mx.context)

    # An assignment to final attribute on class object is also always an error,
    # independently of types.
    if mx.is_lvalue and not mx.chk.get_final_context():
        check_final_member(name, info, mx.msg, mx.context)

    if info.is_enum and not (mx.is_lvalue or is_decorated or is_method):
        enum_class_attribute_type = analyze_enum_class_attribute_access(
            itype, name, mx)
        if enum_class_attribute_type:
            return enum_class_attribute_type

    t = node.type
    if t:
        if isinstance(t, PartialType):
            symnode = node.node
            assert isinstance(symnode, Var)
            return mx.chk.handle_partial_var_type(t, mx.is_lvalue, symnode,
                                                  mx.context)

        # Find the class where method/variable was defined.
        if isinstance(node.node, Decorator):
            super_info: Optional[TypeInfo] = node.node.var.info
        elif isinstance(node.node, (Var, SYMBOL_FUNCBASE_TYPES)):
            super_info = node.node.info
        else:
            super_info = None

        # Map the type to how it would look as a defining class. For example:
        #     class C(Generic[T]): ...
        #     class D(C[Tuple[T, S]]): ...
        #     D[int, str].method()
        # Here itype is D[int, str], isuper is C[Tuple[int, str]].
        if not super_info:
            isuper = None
        else:
            isuper = map_instance_to_supertype(itype, super_info)

        if isinstance(node.node, Var):
            assert isuper is not None
            # Check if original variable type has type variables. For example:
            #     class C(Generic[T]):
            #         x: T
            #     C.x  # Error, ambiguous access
            #     C[int].x  # Also an error, since C[int] is same as C at runtime
            if isinstance(t, TypeVarType) or has_type_vars(t):
                # Exception: access on Type[...], including first argument of class methods is OK.
                if not isinstance(get_proper_type(mx.original_type),
                                  TypeType) or node.implicit:
                    if node.node.is_classvar:
                        message = message_registry.GENERIC_CLASS_VAR_ACCESS
                    else:
                        message = message_registry.GENERIC_INSTANCE_VAR_CLASS_ACCESS
                    mx.msg.fail(message, mx.context)

            # Erase non-mapped variables, but keep mapped ones, even if there is an error.
            # In the above example this means that we infer following types:
            #     C.x -> Any
            #     C[int].x -> int
            t = erase_typevars(expand_type_by_instance(t, isuper))

        is_classmethod = (
            (is_decorated and cast(Decorator, node.node).func.is_class)
            or (isinstance(node.node, FuncBase) and node.node.is_class))
        t = get_proper_type(t)
        if isinstance(t, FunctionLike) and is_classmethod:
            t = check_self_arg(t, mx.self_type, False, mx.context, name,
                               mx.msg)
        result = add_class_tvars(t,
                                 isuper,
                                 is_classmethod,
                                 mx.self_type,
                                 original_vars=original_vars)
        if not mx.is_lvalue:
            result = analyze_descriptor_access(result, mx)
        return result
    elif isinstance(node.node, Var):
        mx.not_ready_callback(name, mx.context)
        return AnyType(TypeOfAny.special_form)

    if isinstance(node.node, TypeVarExpr):
        mx.msg.fail(
            message_registry.CANNOT_USE_TYPEVAR_AS_EXPRESSION.format(
                info.name, name), mx.context)
        return AnyType(TypeOfAny.from_error)

    if isinstance(node.node, TypeInfo):
        return type_object_type(node.node, mx.named_type)

    if isinstance(node.node, MypyFile):
        # Reference to a module object.
        return mx.named_type('types.ModuleType')

    if (isinstance(node.node, TypeAlias)
            and isinstance(get_proper_type(node.node.target), Instance)):
        return instance_alias_type(node.node, mx.named_type)

    if is_decorated:
        assert isinstance(node.node, Decorator)
        if node.node.type:
            return node.node.type
        else:
            mx.not_ready_callback(name, mx.context)
            return AnyType(TypeOfAny.from_error)
    else:
        assert isinstance(node.node, FuncBase)
        typ = function_type(node.node, mx.named_type('builtins.function'))
        # Note: if we are accessing class method on class object, the cls argument is bound.
        # Annotated and/or explicit class methods go through other code paths above, for
        # unannotated implicit class methods we do this here.
        if node.node.is_class:
            typ = bind_self(typ, is_classmethod=True)
        return typ
Example #15
0
 def visit_type_alias_type(self, t: TypeAliasType) -> T:
     return get_proper_type(t).accept(self)
Example #16
0
    def collect_attributes(self) -> Optional[List[DataclassAttribute]]:
        """Collect all attributes declared in the dataclass and its parents.

        All assignments of the form

          a: SomeType
          b: SomeOtherType = ...

        are collected.
        """
        # First, collect attributes belonging to the current class.
        ctx = self._ctx
        cls = self._ctx.cls
        attrs = []  # type: List[DataclassAttribute]
        known_attrs = set()  # type: Set[str]
        for stmt in cls.defs.body:
            # Any assignment that doesn't use the new type declaration
            # syntax can be ignored out of hand.
            if not (isinstance(stmt, AssignmentStmt) and stmt.new_syntax):
                continue

            # a: int, b: str = 1, 'foo' is not supported syntax so we
            # don't have to worry about it.
            lhs = stmt.lvalues[0]
            if not isinstance(lhs, NameExpr):
                continue

            sym = cls.info.names.get(lhs.name)
            if sym is None:
                # This name is likely blocked by a star import. We don't need to defer because
                # defer() is already called by mark_incomplete().
                continue

            node = sym.node
            if isinstance(node, PlaceholderNode):
                # This node is not ready yet.
                return None
            assert isinstance(node, Var)

            # x: ClassVar[int] is ignored by dataclasses.
            if node.is_classvar:
                continue

            # x: InitVar[int] is turned into x: int and is removed from the class.
            is_init_var = False
            node_type = get_proper_type(node.type)
            if (isinstance(node_type, Instance) and
                    node_type.type.fullname() == 'dataclasses.InitVar'):
                is_init_var = True
                node.type = node_type.args[0]

            has_field_call, field_args = _collect_field_args(stmt.rvalue)

            is_in_init_param = field_args.get('init')
            if is_in_init_param is None:
                is_in_init = True
            else:
                is_in_init = bool(ctx.api.parse_bool(is_in_init_param))

            has_default = False
            # Ensure that something like x: int = field() is rejected
            # after an attribute with a default.
            if has_field_call:
                has_default = 'default' in field_args or 'default_factory' in field_args

            # All other assignments are already type checked.
            elif not isinstance(stmt.rvalue, TempNode):
                has_default = True

            known_attrs.add(lhs.name)
            attrs.append(DataclassAttribute(
                name=lhs.name,
                is_in_init=is_in_init,
                is_init_var=is_init_var,
                has_default=has_default,
                line=stmt.line,
                column=stmt.column,
            ))

        # Next, collect attributes belonging to any class in the MRO
        # as long as those attributes weren't already collected.  This
        # makes it possible to overwrite attributes in subclasses.
        # copy() because we potentially modify all_attrs below and if this code requires debugging
        # we'll have unmodified attrs laying around.
        all_attrs = attrs.copy()
        init_method = cls.info.get_method('__init__')
        for info in cls.info.mro[1:-1]:
            if 'dataclass' not in info.metadata:
                continue

            super_attrs = []
            # Each class depends on the set of attributes in its dataclass ancestors.
            ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname()))

            for data in info.metadata['dataclass']['attributes']:
                name = data['name']  # type: str
                if name not in known_attrs:
                    attr = DataclassAttribute.deserialize(info, data)
                    if attr.is_init_var and isinstance(init_method, FuncDef):
                        # InitVars are removed from classes so, in order for them to be inherited
                        # properly, we need to re-inject them into subclasses' sym tables here.
                        # To do that, we look 'em up from the parents' __init__.  These variables
                        # are subsequently removed from the sym table at the end of
                        # DataclassTransformer.transform.
                        for arg, arg_name in zip(init_method.arguments, init_method.arg_names):
                            if arg_name == attr.name:
                                cls.info.names[attr.name] = SymbolTableNode(MDEF, arg.variable)

                    known_attrs.add(name)
                    super_attrs.append(attr)
                else:
                    # How early in the attribute list an attribute appears is determined by the
                    # reverse MRO, not simply MRO.
                    # See https://docs.python.org/3/library/dataclasses.html#inheritance for
                    # details.
                    (attr,) = [a for a in all_attrs if a.name == name]
                    all_attrs.remove(attr)
                    super_attrs.append(attr)
            all_attrs = super_attrs + all_attrs

        # Ensure that arguments without a default don't follow
        # arguments that have a default.
        found_default = False
        for attr in all_attrs:
            # If we find any attribute that is_in_init but that
            # doesn't have a default after one that does have one,
            # then that's an error.
            if found_default and attr.is_in_init and not attr.has_default:
                # If the issue comes from merging different classes, report it
                # at the class definition point.
                context = (Context(line=attr.line, column=attr.column) if attr in attrs
                           else ctx.cls)
                ctx.api.fail(
                    'Attributes without a default cannot follow attributes with one',
                    context,
                )

            found_default = found_default or (attr.has_default and attr.is_in_init)

        return all_attrs
Example #17
0
    def type_to_rtype(self, typ: Optional[Type]) -> RType:
        if typ is None:
            return object_rprimitive

        typ = get_proper_type(typ)
        if isinstance(typ, Instance):
            if typ.type.fullname == 'builtins.int':
                return int_rprimitive
            elif typ.type.fullname == 'builtins.float':
                return float_rprimitive
            elif typ.type.fullname == 'builtins.str':
                return str_rprimitive
            elif typ.type.fullname == 'builtins.bool':
                return bool_rprimitive
            elif typ.type.fullname == 'builtins.list':
                return list_rprimitive
            # Dict subclasses are at least somewhat common and we
            # specifically support them, so make sure that dict operations
            # get optimized on them.
            elif any(cls.fullname == 'builtins.dict' for cls in typ.type.mro):
                return dict_rprimitive
            elif typ.type.fullname == 'builtins.set':
                return set_rprimitive
            elif typ.type.fullname == 'builtins.tuple':
                return tuple_rprimitive  # Varying-length tuple
            elif typ.type in self.type_to_ir:
                inst = RInstance(self.type_to_ir[typ.type])
                # Treat protocols as Union[protocol, object], so that we can do fast
                # method calls in the cases where the protocol is explicitly inherited from
                # and fall back to generic operations when it isn't.
                if typ.type.is_protocol:
                    return RUnion([inst, object_rprimitive])
                else:
                    return inst
            else:
                return object_rprimitive
        elif isinstance(typ, TupleType):
            # Use our unboxed tuples for raw tuples but fall back to
            # being boxed for NamedTuple.
            if typ.partial_fallback.type.fullname == 'builtins.tuple':
                return RTuple([self.type_to_rtype(t) for t in typ.items])
            else:
                return tuple_rprimitive
        elif isinstance(typ, CallableType):
            return object_rprimitive
        elif isinstance(typ, NoneTyp):
            return none_rprimitive
        elif isinstance(typ, UnionType):
            return RUnion([self.type_to_rtype(item) for item in typ.items])
        elif isinstance(typ, AnyType):
            return object_rprimitive
        elif isinstance(typ, TypeType):
            return object_rprimitive
        elif isinstance(typ, TypeVarType):
            # Erase type variable to upper bound.
            # TODO: Erase to union if object has value restriction?
            return self.type_to_rtype(typ.upper_bound)
        elif isinstance(typ, PartialType):
            assert typ.var.type is not None
            return self.type_to_rtype(typ.var.type)
        elif isinstance(typ, Overloaded):
            return object_rprimitive
        elif isinstance(typ, TypedDictType):
            return dict_rprimitive
        elif isinstance(typ, LiteralType):
            return self.type_to_rtype(typ.fallback)
        elif isinstance(typ, (UninhabitedType, UnboundType)):
            # Sure, whatever!
            return object_rprimitive

        # I think we've covered everything that is supposed to
        # actually show up, so anything else is a bug somewhere.
        assert False, 'unexpected type %s' % type(typ)
Example #18
0
def is_implicit_any(typ: Type) -> bool:
    typ = get_proper_type(typ)
    return isinstance(typ, AnyType) and not is_explicit_any(typ)
Example #19
0
def infer_constraints(template: Type, actual: Type,
                      direction: int) -> List[Constraint]:
    """Infer type constraints.

    Match a template type, which may contain type variable references,
    recursively against a type which does not contain (the same) type
    variable references. The result is a list of type constrains of
    form 'T is a supertype/subtype of x', where T is a type variable
    present in the template and x is a type without reference to type
    variables present in the template.

    Assume T and S are type variables. Now the following results can be
    calculated (read as '(template, actual) --> result'):

      (T, X)            -->  T :> X
      (X[T], X[Y])      -->  T <: Y and T :> Y
      ((T, T), (X, Y))  -->  T :> X and T :> Y
      ((T, S), (X, Y))  -->  T :> X and S :> Y
      (X[T], Any)       -->  T <: Any and T :> Any

    The constraints are represented as Constraint objects.
    """
    template = get_proper_type(template)
    actual = get_proper_type(actual)

    # If the template is simply a type variable, emit a Constraint directly.
    # We need to handle this case before handling Unions for two reasons:
    #  1. "T <: Union[U1, U2]" is not equivalent to "T <: U1 or T <: U2",
    #     because T can itself be a union (notably, Union[U1, U2] itself).
    #  2. "T :> Union[U1, U2]" is logically equivalent to "T :> U1 and
    #     T :> U2", but they are not equivalent to the constraint solver,
    #     which never introduces new Union types (it uses join() instead).
    if isinstance(template, TypeVarType):
        return [Constraint(template.id, direction, actual)]

    # Now handle the case of either template or actual being a Union.
    # For a Union to be a subtype of another type, every item of the Union
    # must be a subtype of that type, so concatenate the constraints.
    if direction == SUBTYPE_OF and isinstance(template, UnionType):
        res = []
        for t_item in template.items:
            res.extend(infer_constraints(t_item, actual, direction))
        return res
    if direction == SUPERTYPE_OF and isinstance(actual, UnionType):
        res = []
        for a_item in actual.items:
            res.extend(infer_constraints(template, a_item, direction))
        return res

    # Now the potential subtype is known not to be a Union or a type
    # variable that we are solving for. In that case, for a Union to
    # be a supertype of the potential subtype, some item of the Union
    # must be a supertype of it.
    if direction == SUBTYPE_OF and isinstance(actual, UnionType):
        # If some of items is not a complete type, disregard that.
        items = simplify_away_incomplete_types(actual.items)
        # We infer constraints eagerly -- try to find constraints for a type
        # variable if possible. This seems to help with some real-world
        # use cases.
        return any_constraints([
            infer_constraints_if_possible(template, a_item, direction)
            for a_item in items
        ],
                               eager=True)
    if direction == SUPERTYPE_OF and isinstance(template, UnionType):
        # When the template is a union, we are okay with leaving some
        # type variables indeterminate. This helps with some special
        # cases, though this isn't very principled.
        return any_constraints([
            infer_constraints_if_possible(t_item, actual, direction)
            for t_item in template.items
        ],
                               eager=False)

    # Remaining cases are handled by ConstraintBuilderVisitor.
    return template.accept(ConstraintBuilderVisitor(actual, direction))
Example #20
0
 def visit_type_alias_type(self, t: TypeAliasType) -> Type:
     exp_t = get_proper_type(t)
     if isinstance(exp_t,
                   Instance) and exp_t.type.fullname == 'builtins.str':
         return self.text_type
     return t.copy_modified(args=[a.accept(self) for a in t.args])
Example #21
0
def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint],
                      strict: bool = True) -> List[Optional[Type]]:
    """Solve type line_constraints.

    Return the best type(s) for type variables; each type can be None if the value of the variable
    could not be solved.

    If a variable has no line_constraints, if strict=True then arbitrarily
    pick NoneType as the value of the type variable.  If strict=False,
    pick AnyType.
    """
    # Collect a list of line_constraints for each type variable.
    cmap = defaultdict(list)  # type: Dict[TypeVarId, List[Constraint]]
    for con in constraints:
        cmap[con.type_var].append(con)

    res = []  # type: List[Optional[Type]]

    # Solve each type variable separately.
    for tvar in vars:
        bottom = None  # type: Optional[Type]
        top = None  # type: Optional[Type]
        candidate = None  # type: Optional[Type]

        # Process each constraint separately, and calculate the lower and upper
        # bounds based on line_constraints. Note that we assume that the constraint
        # targets do not have constraint references.
        for c in cmap.get(tvar, []):
            if c.op == SUPERTYPE_OF:
                if bottom is None:
                    bottom = c.target
                else:
                    bottom = join_types(bottom, c.target)
            else:
                if top is None:
                    top = c.target
                else:
                    top = meet_types(top, c.target)

        top = get_proper_type(top)
        bottom = get_proper_type(bottom)
        if isinstance(top, AnyType) or isinstance(bottom, AnyType):
            source_any = top if isinstance(top, AnyType) else bottom
            assert isinstance(source_any, AnyType)
            res.append(AnyType(TypeOfAny.from_another_any, source_any=source_any))
            continue
        elif bottom is None:
            if top:
                candidate = top
            else:
                # No line_constraints for type variable -- 'UninhabitedType' is the most specific type.
                if strict:
                    candidate = UninhabitedType()
                    candidate.ambiguous = True
                else:
                    candidate = AnyType(TypeOfAny.special_form)
        elif top is None:
            candidate = bottom
        elif is_subtype(bottom, top):
            candidate = bottom
        else:
            candidate = None
        res.append(candidate)

    return res
Example #22
0
def _scan_declarative_decorator_stmt(
    cls: ClassDef,
    api: SemanticAnalyzerPluginInterface,
    stmt: Decorator,
    cls_metadata: util.DeclClassApplied,
) -> None:
    """Extract mapping information from a @declared_attr in a declarative
    class.

    E.g.::

        @reg.mapped
        class MyClass:
            # ...

            @declared_attr
            def updated_at(cls) -> Column[DateTime]:
                return Column(DateTime)

    Will resolve in mypy as::

        @reg.mapped
        class MyClass:
            # ...

            updated_at: Mapped[Optional[datetime.datetime]]

    """
    for dec in stmt.decorators:
        if (isinstance(dec, (NameExpr, MemberExpr, SymbolNode))
                and names._type_id_for_named_node(dec) is names.DECLARED_ATTR):
            break
    else:
        return

    dec_index = cls.defs.body.index(stmt)

    left_hand_explicit_type: Optional[ProperType] = None

    if isinstance(stmt.func.type, CallableType):
        func_type = stmt.func.type.ret_type
        if isinstance(func_type, UnboundType):
            type_id = names._type_id_for_unbound_type(func_type, cls, api)
        else:
            # this does not seem to occur unless the type argument is
            # incorrect
            return

        if (type_id in {
                names.MAPPED,
                names.RELATIONSHIP,
                names.COMPOSITE_PROPERTY,
                names.MAPPER_PROPERTY,
                names.SYNONYM_PROPERTY,
                names.COLUMN_PROPERTY,
        } and func_type.args):
            left_hand_explicit_type = get_proper_type(func_type.args[0])
        elif type_id is names.COLUMN and func_type.args:
            typeengine_arg = func_type.args[0]
            if isinstance(typeengine_arg, UnboundType):
                sym = api.lookup_qualified(typeengine_arg.name, typeengine_arg)
                if sym is not None and isinstance(sym.node, TypeInfo):
                    if names._has_base_type_id(sym.node, names.TYPEENGINE):
                        left_hand_explicit_type = UnionType([
                            infer._extract_python_type_from_typeengine(
                                api, sym.node, []),
                            NoneType(),
                        ])
                    else:
                        util.fail(
                            api,
                            "Column type should be a TypeEngine "
                            "subclass not '{}'".format(sym.node.fullname),
                            func_type,
                        )

    if left_hand_explicit_type is None:
        # no type on the decorated function.  our option here is to
        # dig into the function body and get the return type, but they
        # should just have an annotation.
        msg = ("Can't infer type from @declared_attr on function '{}';  "
               "please specify a return type from this function that is "
               "one of: Mapped[<python type>], relationship[<target class>], "
               "Column[<TypeEngine>], MapperProperty[<python type>]")
        util.fail(api, msg.format(stmt.var.name), stmt)

        left_hand_explicit_type = AnyType(TypeOfAny.special_form)

    left_node = NameExpr(stmt.var.name)
    left_node.node = stmt.var

    # totally feeling around in the dark here as I don't totally understand
    # the significance of UnboundType.  It seems to be something that is
    # not going to do what's expected when it is applied as the type of
    # an AssignmentStatement.  So do a feeling-around-in-the-dark version
    # of converting it to the regular Instance/TypeInfo/UnionType structures
    # we see everywhere else.
    if isinstance(left_hand_explicit_type, UnboundType):
        left_hand_explicit_type = get_proper_type(
            util._unbound_to_instance(api, left_hand_explicit_type))

    left_node.node.type = api.named_type("__sa_Mapped",
                                         [left_hand_explicit_type])

    # this will ignore the rvalue entirely
    # rvalue = TempNode(AnyType(TypeOfAny.special_form))

    # rewrite the node as:
    # <attr> : Mapped[<typ>] =
    # _sa_Mapped._empty_constructor(lambda: <function body>)
    # the function body is maintained so it gets type checked internally
    column_descriptor = nodes.NameExpr("__sa_Mapped")
    column_descriptor.fullname = "sqlalchemy.orm.attributes.Mapped"
    mm = nodes.MemberExpr(column_descriptor, "_empty_constructor")

    arg = nodes.LambdaExpr(stmt.func.arguments, stmt.func.body)
    rvalue = CallExpr(
        mm,
        [arg],
        [nodes.ARG_POS],
        ["arg1"],
    )

    new_stmt = AssignmentStmt([left_node], rvalue)
    new_stmt.type = left_node.node.type

    cls_metadata.mapped_attr_names.append(
        (left_node.name, left_hand_explicit_type))
    cls.defs.body[dec_index] = new_stmt
Example #23
0
    def collect_attributes(self) -> Optional[List[DataclassAttribute]]:
        """Collect all attributes declared in the dataclass and its parents.

        All assignments of the form

          a: SomeType
          b: SomeOtherType = ...

        are collected.
        """
        # First, collect attributes belonging to the current class.
        ctx = self._ctx
        cls = self._ctx.cls
        attrs: List[DataclassAttribute] = []
        known_attrs: Set[str] = set()
        kw_only = _get_decorator_bool_argument(ctx, 'kw_only', False)
        for stmt in cls.defs.body:
            # Any assignment that doesn't use the new type declaration
            # syntax can be ignored out of hand.
            if not (isinstance(stmt, AssignmentStmt) and stmt.new_syntax):
                continue

            # a: int, b: str = 1, 'foo' is not supported syntax so we
            # don't have to worry about it.
            lhs = stmt.lvalues[0]
            if not isinstance(lhs, NameExpr):
                continue

            sym = cls.info.names.get(lhs.name)
            if sym is None:
                # This name is likely blocked by a star import. We don't need to defer because
                # defer() is already called by mark_incomplete().
                continue

            node = sym.node
            if isinstance(node, PlaceholderNode):
                # This node is not ready yet.
                return None
            assert isinstance(node, Var)

            # x: ClassVar[int] is ignored by dataclasses.
            if node.is_classvar:
                continue

            # x: InitVar[int] is turned into x: int and is removed from the class.
            is_init_var = False
            node_type = get_proper_type(node.type)
            if (isinstance(node_type, Instance)
                    and node_type.type.fullname == 'dataclasses.InitVar'):
                is_init_var = True
                node.type = node_type.args[0]

            if self._is_kw_only_type(node_type):
                kw_only = True

            has_field_call, field_args = _collect_field_args(stmt.rvalue, ctx)

            is_in_init_param = field_args.get('init')
            if is_in_init_param is None:
                is_in_init = True
            else:
                is_in_init = bool(ctx.api.parse_bool(is_in_init_param))

            has_default = False
            # Ensure that something like x: int = field() is rejected
            # after an attribute with a default.
            if has_field_call:
                has_default = 'default' in field_args or 'default_factory' in field_args

            # All other assignments are already type checked.
            elif not isinstance(stmt.rvalue, TempNode):
                has_default = True

            if not has_default:
                # Make all non-default attributes implicit because they are de-facto set
                # on self in the generated __init__(), not in the class body.
                sym.implicit = True

            is_kw_only = kw_only
            # Use the kw_only field arg if it is provided. Otherwise use the
            # kw_only value from the decorator parameter.
            field_kw_only_param = field_args.get('kw_only')
            if field_kw_only_param is not None:
                is_kw_only = bool(ctx.api.parse_bool(field_kw_only_param))

            known_attrs.add(lhs.name)
            attrs.append(
                DataclassAttribute(
                    name=lhs.name,
                    is_in_init=is_in_init,
                    is_init_var=is_init_var,
                    has_default=has_default,
                    line=stmt.line,
                    column=stmt.column,
                    type=sym.type,
                    info=cls.info,
                    kw_only=is_kw_only,
                ))

        # Next, collect attributes belonging to any class in the MRO
        # as long as those attributes weren't already collected.  This
        # makes it possible to overwrite attributes in subclasses.
        # copy() because we potentially modify all_attrs below and if this code requires debugging
        # we'll have unmodified attrs laying around.
        all_attrs = attrs.copy()
        for info in cls.info.mro[1:-1]:
            if 'dataclass' not in info.metadata:
                continue

            super_attrs = []
            # Each class depends on the set of attributes in its dataclass ancestors.
            ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname))

            for data in info.metadata["dataclass"]["attributes"]:
                name: str = data["name"]
                if name not in known_attrs:
                    attr = DataclassAttribute.deserialize(info, data, ctx.api)
                    attr.expand_typevar_from_subtype(ctx.cls.info)
                    known_attrs.add(name)
                    super_attrs.append(attr)
                elif all_attrs:
                    # How early in the attribute list an attribute appears is determined by the
                    # reverse MRO, not simply MRO.
                    # See https://docs.python.org/3/library/dataclasses.html#inheritance for
                    # details.
                    for attr in all_attrs:
                        if attr.name == name:
                            all_attrs.remove(attr)
                            super_attrs.append(attr)
                            break
            all_attrs = super_attrs + all_attrs
            all_attrs.sort(key=lambda a: a.kw_only)

        # Ensure that arguments without a default don't follow
        # arguments that have a default.
        found_default = False
        # Ensure that the KW_ONLY sentinel is only provided once
        found_kw_sentinel = False
        for attr in all_attrs:
            # If we find any attribute that is_in_init, not kw_only, and that
            # doesn't have a default after one that does have one,
            # then that's an error.
            if found_default and attr.is_in_init and not attr.has_default and not attr.kw_only:
                # If the issue comes from merging different classes, report it
                # at the class definition point.
                context = (Context(line=attr.line, column=attr.column)
                           if attr in attrs else ctx.cls)
                ctx.api.fail(
                    'Attributes without a default cannot follow attributes with one',
                    context,
                )

            found_default = found_default or (attr.has_default
                                              and attr.is_in_init)
            if found_kw_sentinel and self._is_kw_only_type(attr.type):
                context = (Context(line=attr.line, column=attr.column)
                           if attr in attrs else ctx.cls)
                ctx.api.fail(
                    'There may not be more than one field with the KW_ONLY type',
                    context,
                )
            found_kw_sentinel = found_kw_sentinel or self._is_kw_only_type(
                attr.type)

        return all_attrs
Example #24
0
def create_new_manager_class_from_from_queryset_method(
        ctx: DynamicClassDefContext) -> None:
    """
    Insert a new manager class node for a: '<Name> = <Manager>.from_queryset(<QuerySet>)'.
    When the assignment expression lives at module level.
    """
    semanal_api = helpers.get_semanal_api(ctx)

    # Don't redeclare the manager class if we've already defined it.
    manager_node = semanal_api.lookup_current_scope(ctx.name)
    if manager_node and isinstance(manager_node.node, TypeInfo):
        # This is just a deferral run where our work is already finished
        return

    callee = ctx.call.callee
    assert isinstance(callee, MemberExpr)
    assert isinstance(callee.expr, RefExpr)

    base_manager_info = callee.expr.node
    if base_manager_info is None:
        if not semanal_api.final_iteration:
            semanal_api.defer()
        return

    assert isinstance(base_manager_info, TypeInfo)

    passed_queryset = ctx.call.args[0]
    assert isinstance(passed_queryset, NameExpr)

    derived_queryset_fullname = passed_queryset.fullname
    if derived_queryset_fullname is None:
        # In some cases, due to the way the semantic analyzer works, only passed_queryset.name is available.
        # But it should be analyzed again, so this isn't a problem.
        return

    base_manager_instance = fill_typevars(base_manager_info)
    assert isinstance(base_manager_instance, Instance)
    new_manager_info = semanal_api.basic_new_typeinfo(
        ctx.name,
        basetype_or_fallback=base_manager_instance,
        line=ctx.call.line)

    sym = semanal_api.lookup_fully_qualified_or_none(derived_queryset_fullname)
    assert sym is not None
    if sym.node is None:
        if not semanal_api.final_iteration:
            semanal_api.defer()
        else:
            # inherit from Any to prevent false-positives, if queryset class cannot be resolved
            new_manager_info.fallback_to_any = True
        return

    derived_queryset_info = sym.node
    assert isinstance(derived_queryset_info, TypeInfo)

    new_manager_info.line = ctx.call.line
    new_manager_info.type_vars = base_manager_info.type_vars
    new_manager_info.defn.type_vars = base_manager_info.defn.type_vars
    new_manager_info.defn.line = ctx.call.line
    new_manager_info.metaclass_type = new_manager_info.calculate_metaclass_type(
    )
    # Stash the queryset fullname which was passed to .from_queryset
    # So that our 'resolve_manager_method' attribute hook can fetch the method from that QuerySet class
    new_manager_info.metadata["django"] = {
        "from_queryset_manager": derived_queryset_fullname
    }

    if len(ctx.call.args) > 1:
        expr = ctx.call.args[1]
        assert isinstance(expr, StrExpr)
        custom_manager_generated_name = expr.value
    else:
        custom_manager_generated_name = base_manager_info.name + "From" + derived_queryset_info.name

    custom_manager_generated_fullname = ".".join(
        ["django.db.models.manager", custom_manager_generated_name])
    base_manager_info.metadata.setdefault("from_queryset_managers", {})
    base_manager_info.metadata["from_queryset_managers"][
        custom_manager_generated_fullname] = new_manager_info.fullname

    # So that the plugin will reparameterize the manager when it is constructed inside of a Model definition
    helpers.add_new_manager_base(semanal_api, new_manager_info.fullname)

    class_def_context = ClassDefContext(cls=new_manager_info.defn,
                                        reason=ctx.call,
                                        api=semanal_api)
    self_type = fill_typevars(new_manager_info)
    assert isinstance(self_type, Instance)

    # We collect and mark up all methods before django.db.models.query.QuerySet as class members
    for class_mro_info in derived_queryset_info.mro:
        if class_mro_info.fullname == fullnames.QUERYSET_CLASS_FULLNAME:
            break
        for name, sym in class_mro_info.names.items():
            if not isinstance(sym.node, (FuncDef, Decorator)):
                continue
            # Insert the queryset method name as a class member. Note that the type of
            # the method is set as Any. Figuring out the type is the job of the
            # 'resolve_manager_method' attribute hook, which comes later.
            #
            # class BaseManagerFromMyQuerySet(BaseManager):
            #    queryset_method: Any = ...
            #
            helpers.add_new_sym_for_info(
                new_manager_info,
                name=name,
                sym_type=AnyType(TypeOfAny.special_form),
            )

    # we need to copy all methods in MRO before django.db.models.query.QuerySet
    # Gather names of all BaseManager methods
    manager_method_names = []
    for manager_mro_info in new_manager_info.mro:
        if manager_mro_info.fullname == fullnames.BASE_MANAGER_CLASS_FULLNAME:
            for name, sym in manager_mro_info.names.items():
                manager_method_names.append(name)

    # Copy/alter all methods in common between BaseManager/QuerySet over to the new manager if their return type is
    # the QuerySet's self-type. Alter the return type to be the custom queryset, parameterized by the manager's model
    # type variable.
    for class_mro_info in derived_queryset_info.mro:
        if class_mro_info.fullname != fullnames.QUERYSET_CLASS_FULLNAME:
            continue
        for name, sym in class_mro_info.names.items():
            if name not in manager_method_names:
                continue

            if isinstance(sym.node, FuncDef):
                func_node = sym.node
            elif isinstance(sym.node, Decorator):
                func_node = sym.node.func
            else:
                continue

            method_type = func_node.type
            if not isinstance(method_type, CallableType):
                if not semanal_api.final_iteration:
                    semanal_api.defer()
                return None
            original_return_type = method_type.ret_type

            # Skip any method that doesn't return _QS
            original_return_type = get_proper_type(original_return_type)
            if isinstance(original_return_type, UnboundType):
                if original_return_type.name != "_QS":
                    continue
            elif isinstance(original_return_type, TypeVarType):
                if original_return_type.name != "_QS":
                    continue
            else:
                continue

            # Return the custom queryset parameterized by the manager's type vars
            return_type = Instance(derived_queryset_info, self_type.args)

            helpers.copy_method_to_another_class(
                class_def_context,
                self_type,
                new_method_name=name,
                method_node=func_node,
                return_type=return_type,
                original_module_name=class_mro_info.module_name,
            )

    # Insert the new manager (dynamic) class
    assert semanal_api.add_symbol_table_node(
        ctx.name, SymbolTableNode(GDEF,
                                  new_manager_info,
                                  plugin_generated=True))
Example #25
0
def analyze_descriptor_access(descriptor_type: Type,
                              mx: MemberContext) -> Type:
    """Type check descriptor access.

    Arguments:
        descriptor_type: The type of the descriptor attribute being accessed
            (the type of ``f`` in ``a.f`` when ``f`` is a descriptor).
        mx: The current member access context.
    Return:
        The return type of the appropriate ``__get__`` overload for the descriptor.
    """
    instance_type = get_proper_type(mx.original_type)
    descriptor_type = get_proper_type(descriptor_type)

    if isinstance(descriptor_type, UnionType):
        # Map the access over union types
        return make_simplified_union([
            analyze_descriptor_access(typ, mx) for typ in descriptor_type.items
        ])
    elif not isinstance(descriptor_type, Instance):
        return descriptor_type

    if not descriptor_type.type.has_readable_member('__get__'):
        return descriptor_type

    dunder_get = descriptor_type.type.get_method('__get__')
    if dunder_get is None:
        mx.msg.fail(
            message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format(
                descriptor_type), mx.context)
        return AnyType(TypeOfAny.from_error)

    bound_method = analyze_decorator_or_funcbase_access(
        defn=dunder_get,
        itype=descriptor_type,
        info=descriptor_type.type,
        self_type=descriptor_type,
        name='__set__',
        mx=mx)

    typ = map_instance_to_supertype(descriptor_type, dunder_get.info)
    dunder_get_type = expand_type_by_instance(bound_method, typ)

    if isinstance(instance_type, FunctionLike) and instance_type.is_type_obj():
        owner_type = instance_type.items[0].ret_type
        instance_type = NoneType()
    elif isinstance(instance_type, TypeType):
        owner_type = instance_type.item
        instance_type = NoneType()
    else:
        owner_type = instance_type

    callable_name = mx.chk.expr_checker.method_fullname(
        descriptor_type, "__get__")
    dunder_get_type = mx.chk.expr_checker.transform_callee_type(
        callable_name,
        dunder_get_type,
        [
            TempNode(instance_type, context=mx.context),
            TempNode(TypeType.make_normalized(owner_type), context=mx.context)
        ],
        [ARG_POS, ARG_POS],
        mx.context,
        object_type=descriptor_type,
    )

    _, inferred_dunder_get_type = mx.chk.expr_checker.check_call(
        dunder_get_type, [
            TempNode(instance_type, context=mx.context),
            TempNode(TypeType.make_normalized(owner_type), context=mx.context)
        ], [ARG_POS, ARG_POS],
        mx.context,
        object_type=descriptor_type,
        callable_name=callable_name)

    inferred_dunder_get_type = get_proper_type(inferred_dunder_get_type)
    if isinstance(inferred_dunder_get_type, AnyType):
        # check_call failed, and will have reported an error
        return inferred_dunder_get_type

    if not isinstance(inferred_dunder_get_type, CallableType):
        mx.msg.fail(
            message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format(
                descriptor_type), mx.context)
        return AnyType(TypeOfAny.from_error)

    return inferred_dunder_get_type.ret_type
Example #26
0
def erase_to_union_or_bound(typ: TypeVarType) -> ProperType:
    if typ.values:
        return make_simplified_union(typ.values)
    else:
        return get_proper_type(typ.upper_bound)
Example #27
0
def analyze_var(name: str,
                var: Var,
                itype: Instance,
                info: TypeInfo,
                mx: MemberContext,
                *,
                implicit: bool = False) -> Type:
    """Analyze access to an attribute via a Var node.

    This is conceptually part of analyze_member_access and the arguments are similar.

    itype is the class object in which var is defined
    original_type is the type of E in the expression E.var
    if implicit is True, the original Var was created as an assignment to self
    """
    # Found a member variable.
    itype = map_instance_to_supertype(itype, var.info)
    typ = var.type
    if typ:
        if isinstance(typ, PartialType):
            return mx.chk.handle_partial_var_type(typ, mx.is_lvalue, var,
                                                  mx.context)
        if mx.is_lvalue and var.is_property and not var.is_settable_property:
            # TODO allow setting attributes in subclass (although it is probably an error)
            mx.msg.read_only_property(name, itype.type, mx.context)
        if mx.is_lvalue and var.is_classvar:
            mx.msg.cant_assign_to_classvar(name, mx.context)
        t = get_proper_type(expand_type_by_instance(typ, itype))
        result: Type = t
        typ = get_proper_type(typ)
        if var.is_initialized_in_class and isinstance(
                typ, FunctionLike) and not typ.is_type_obj():
            if mx.is_lvalue:
                if var.is_property:
                    if not var.is_settable_property:
                        mx.msg.read_only_property(name, itype.type, mx.context)
                else:
                    mx.msg.cant_assign_to_method(mx.context)

            if not var.is_staticmethod:
                # Class-level function objects and classmethods become bound methods:
                # the former to the instance, the latter to the class.
                functype = typ
                # Use meet to narrow original_type to the dispatched type.
                # For example, assume
                # * A.f: Callable[[A1], None] where A1 <: A (maybe A1 == A)
                # * B.f: Callable[[B1], None] where B1 <: B (maybe B1 == B)
                # * x: Union[A1, B1]
                # In `x.f`, when checking `x` against A1 we assume x is compatible with A
                # and similarly for B1 when checking against B
                dispatched_type = meet.meet_types(mx.original_type, itype)
                signature = freshen_function_type_vars(functype)
                signature = check_self_arg(signature, dispatched_type,
                                           var.is_classmethod, mx.context,
                                           name, mx.msg)
                signature = bind_self(signature, mx.self_type,
                                      var.is_classmethod)
                expanded_signature = get_proper_type(
                    expand_type_by_instance(signature, itype))
                freeze_type_vars(expanded_signature)
                if var.is_property:
                    # A property cannot have an overloaded type => the cast is fine.
                    assert isinstance(expanded_signature, CallableType)
                    result = expanded_signature.ret_type
                else:
                    result = expanded_signature
    else:
        if not var.is_ready:
            mx.not_ready_callback(var.name, mx.context)
        # Implicit 'Any' type.
        result = AnyType(TypeOfAny.special_form)
    fullname = '{}.{}'.format(var.info.fullname, name)
    hook = mx.chk.plugin.get_attribute_hook(fullname)
    if result and not mx.is_lvalue and not implicit:
        result = analyze_descriptor_access(result, mx)
    if hook:
        result = hook(
            AttributeContext(get_proper_type(mx.original_type), result,
                             mx.context, mx.chk))
    return result
Example #28
0
def analyze_descriptor_access(instance_type: Type, descriptor_type: Type,
                              builtin_type: Callable[[str], Instance],
                              msg: MessageBuilder, context: Context, *,
                              chk: 'mypy.checker.TypeChecker') -> Type:
    """Type check descriptor access.

    Arguments:
        instance_type: The type of the instance on which the descriptor
            attribute is being accessed (the type of ``a`` in ``a.f`` when
            ``f`` is a descriptor).
        descriptor_type: The type of the descriptor attribute being accessed
            (the type of ``f`` in ``a.f`` when ``f`` is a descriptor).
        context: The node defining the context of this inference.
    Return:
        The return type of the appropriate ``__get__`` overload for the descriptor.
    """
    instance_type = get_proper_type(instance_type)
    descriptor_type = get_proper_type(descriptor_type)

    if isinstance(descriptor_type, UnionType):
        # Map the access over union types
        return make_simplified_union([
            analyze_descriptor_access(instance_type,
                                      typ,
                                      builtin_type,
                                      msg,
                                      context,
                                      chk=chk) for typ in descriptor_type.items
        ])
    elif not isinstance(descriptor_type, Instance):
        return descriptor_type

    if not descriptor_type.type.has_readable_member('__get__'):
        return descriptor_type

    dunder_get = descriptor_type.type.get_method('__get__')

    if dunder_get is None:
        msg.fail(
            message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format(
                descriptor_type), context)
        return AnyType(TypeOfAny.from_error)

    function = function_type(dunder_get, builtin_type('builtins.function'))
    bound_method = bind_self(function, descriptor_type)
    typ = map_instance_to_supertype(descriptor_type, dunder_get.info)
    dunder_get_type = expand_type_by_instance(bound_method, typ)

    if isinstance(instance_type, FunctionLike) and instance_type.is_type_obj():
        owner_type = instance_type.items()[0].ret_type
        instance_type = NoneType()
    elif isinstance(instance_type, TypeType):
        owner_type = instance_type.item
        instance_type = NoneType()
    else:
        owner_type = instance_type

    _, inferred_dunder_get_type = chk.expr_checker.check_call(
        dunder_get_type, [
            TempNode(instance_type),
            TempNode(TypeType.make_normalized(owner_type))
        ], [ARG_POS, ARG_POS], context)

    inferred_dunder_get_type = get_proper_type(inferred_dunder_get_type)
    if isinstance(inferred_dunder_get_type, AnyType):
        # check_call failed, and will have reported an error
        return inferred_dunder_get_type

    if not isinstance(inferred_dunder_get_type, CallableType):
        msg.fail(
            message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format(
                descriptor_type), context)
        return AnyType(TypeOfAny.from_error)

    return inferred_dunder_get_type.ret_type
Example #29
0
def is_enum_overlapping_union(x: ProperType, y: ProperType) -> bool:
    """Return True if x is an Enum, and y is an Union with at least one Literal from x"""
    return (isinstance(x, Instance) and x.type.is_enum
            and isinstance(y, UnionType) and any(
                isinstance(p, LiteralType) and x.type == p.fallback.type
                for p in (get_proper_type(z) for z in y.relevant_items())))