Exemple #1
0
def _infer_stmts(stmts, context, frame=None):
    """Return an iterator on statements inferred by each statement in *stmts*."""
    inferred = False
    if context is not None:
        name = context.lookupname
        context = context.clone()
    else:
        name = None
        context = InferenceContext()

    for stmt in stmts:
        if stmt is Uninferable:
            yield stmt
            inferred = True
            continue
        context.lookupname = stmt._infer_name(frame, name)
        try:
            for inf in stmt.infer(context=context):
                yield inf
                inferred = True
        except NameInferenceError:
            continue
        except InferenceError:
            yield Uninferable
            inferred = True
    if not inferred:
        raise InferenceError(
            "Inference failed for all members of {stmts!r}.",
            stmts=stmts,
            frame=frame,
            context=context,
        )
Exemple #2
0
    def igetattr(self, name, context=None):
        """inferred getattr"""
        if not context:
            context = InferenceContext()
        try:
            context.lookupname = name
            # avoid recursively inferring the same attr on the same class
            if context.push(self._proxied):
                raise InferenceError(
                    message="Cannot infer the same attribute again",
                    node=self,
                    context=context,
                )

            # XXX frame should be self._proxied, or not ?
            get_attr = self.getattr(name, context, lookupclass=False)
            yield from _infer_stmts(self._wrap_attr(get_attr, context),
                                    context,
                                    frame=self)
        except AttributeInferenceError:
            try:
                # fallback to class.igetattr since it has some logic to handle
                # descriptors
                # But only if the _proxied is the Class.
                if self._proxied.__class__.__name__ != "ClassDef":
                    raise
                attrs = self._proxied.igetattr(name,
                                               context,
                                               class_context=False)
                yield from self._wrap_attr(attrs, context)
            except AttributeInferenceError as error:
                raise InferenceError(**vars(error)) from error
Exemple #3
0
def _infer_augassign(self, context=None):
    """Inference logic for augmented binary operations."""
    if context is None:
        context = InferenceContext()

    rhs_context = context.clone()

    lhs_iter = self.target.infer_lhs(context=context)
    rhs_iter = self.value.infer(context=rhs_context)
    for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
        if any(value is util.Uninferable for value in (rhs, lhs)):
            # Don't know how to process this.
            yield util.Uninferable
            return

        try:
            yield from _infer_binary_operation(
                left=lhs,
                right=rhs,
                binary_opnode=self,
                context=context,
                flow_factory=_get_aug_flow,
            )
        except _NonDeducibleTypeHierarchy:
            yield util.Uninferable
Exemple #4
0
def _infer_binop(
    self: nodes.BinOp,
    context: InferenceContext | None = None
) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]:
    """Binary operation inference logic."""
    left = self.left
    right = self.right

    # we use two separate contexts for evaluating lhs and rhs because
    # 1. evaluating lhs may leave some undesired entries in context.path
    #    which may not let us infer right value of rhs
    context = context or InferenceContext()
    lhs_context = copy_context(context)
    rhs_context = copy_context(context)
    lhs_iter = left.infer(context=lhs_context)
    rhs_iter = right.infer(context=rhs_context)
    for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
        if any(value is util.Uninferable for value in (rhs, lhs)):
            # Don't know how to process this.
            yield util.Uninferable
            return

        try:
            yield from _infer_binary_operation(lhs, rhs, self, context,
                                               _get_binop_flow)
        except _NonDeducibleTypeHierarchy:
            yield util.Uninferable
Exemple #5
0
def infer_import_from(
    self: nodes.ImportFrom,
    context: InferenceContext | None = None,
    asname: bool = True,
    **kwargs: Any,
) -> Generator[InferenceResult, None, None]:
    """infer a ImportFrom node: return the imported module/object"""
    context = context or InferenceContext()
    name = context.lookupname
    if name is None:
        raise InferenceError(node=self, context=context)
    if asname:
        try:
            name = self.real_name(name)
        except AttributeInferenceError as exc:
            # See https://github.com/PyCQA/pylint/issues/4692
            raise InferenceError(node=self, context=context) from exc
    try:
        module = self.do_import_module()
    except AstroidBuildingError as exc:
        raise InferenceError(node=self, context=context) from exc

    try:
        context = copy_context(context)
        context.lookupname = name
        stmts = module.getattr(name, ignore_locals=module is self.root())
        return bases._infer_stmts(stmts, context)
    except AttributeInferenceError as error:
        raise InferenceError(str(error),
                             target=self,
                             attribute=name,
                             context=context) from error
Exemple #6
0
def _infer_augassign(
    self: nodes.AugAssign,
    context: InferenceContext | None = None
) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]:
    """Inference logic for augmented binary operations."""
    context = context or InferenceContext()

    rhs_context = context.clone()

    lhs_iter = self.target.infer_lhs(context=context)
    rhs_iter = self.value.infer(context=rhs_context)
    for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
        if any(value is util.Uninferable for value in (rhs, lhs)):
            # Don't know how to process this.
            yield util.Uninferable
            return

        try:
            yield from _infer_binary_operation(
                left=lhs,
                right=rhs,
                binary_opnode=self,
                context=context,
                flow_factory=_get_aug_flow,
            )
        except _NonDeducibleTypeHierarchy:
            yield util.Uninferable
Exemple #7
0
def infer_ifexp(self, context=None):
    """Support IfExp inference

    If we can't infer the truthiness of the condition, we default
    to inferring both branches. Otherwise, we infer either branch
    depending on the condition.
    """
    both_branches = False
    # We use two separate contexts for evaluating lhs and rhs because
    # evaluating lhs may leave some undesired entries in context.path
    # which may not let us infer right value of rhs.

    context = context or InferenceContext()
    lhs_context = copy_context(context)
    rhs_context = copy_context(context)
    try:
        test = next(self.test.infer(context=context.clone()))
    except (InferenceError, StopIteration):
        both_branches = True
    else:
        if test is not util.Uninferable:
            if test.bool_value():
                yield from self.body.infer(context=lhs_context)
            else:
                yield from self.orelse.infer(context=rhs_context)
        else:
            both_branches = True
    if both_branches:
        yield from self.body.infer(context=lhs_context)
        yield from self.orelse.infer(context=rhs_context)
Exemple #8
0
    def bool_value(self, context=None):
        """Infer the truth value for an Instance

        The truth value of an instance is determined by these conditions:

           * if it implements __bool__ on Python 3 or __nonzero__
             on Python 2, then its bool value will be determined by
             calling this special method and checking its result.
           * when this method is not defined, __len__() is called, if it
             is defined, and the object is considered true if its result is
             nonzero. If a class defines neither __len__() nor __bool__(),
             all its instances are considered true.
        """
        context = context or InferenceContext()
        context.boundnode = self

        try:
            result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD,
                                                context)
        except (InferenceError, AttributeInferenceError):
            # Fallback to __len__.
            try:
                result = _infer_method_result_truth(self, "__len__", context)
            except (AttributeInferenceError, InferenceError):
                return True
        return result
def test_max_inferred_for_complicated_class_hierarchy() -> None:
    """Regression test for a crash reported in https://github.com/PyCQA/pylint/issues/5679.

    The class hierarchy of 'sqlalchemy' is so intricate that it becomes uninferable with
    the standard max_inferred of 100. We used to crash when this happened.
    """
    # Create module and get relevant nodes
    module = resources.build_file(
        str(resources.RESOURCE_PATH / "max_inferable_limit_for_classes" / "main.py")
    )
    init_attr_node = module.body[-1].body[0].body[0].value.func
    init_object_node = module.body[-1].mro()[-1]["__init__"]
    super_node = next(init_attr_node.expr.infer())

    # Arbitrarily limit the max number of infered nodes per context
    InferenceContext.max_inferred = -1
    context = InferenceContext()

    # Try to infer 'object.__init__' > because of limit is impossible
    for inferred in bases._infer_stmts([init_object_node], context, frame=super):
        assert inferred == Uninferable

    # Reset inference limit
    InferenceContext.max_inferred = 100
    # Check that we don't crash on a previously uninferable node
    assert super_node.getattr("__init__", context=context)[0] == Uninferable
Exemple #10
0
    def wrapped(node, context=None, _func=func, **kwargs):
        """wrapper function handling context"""
        if context is None:
            context = InferenceContext()
        if context.push(node):
            return

        yielded = set()

        for res in _func(node, context, **kwargs):
            # unproxy only true instance, not const, tuple, dict...
            if res.__class__.__name__ == "Instance":
                ares = res._proxied
            else:
                ares = res
            if ares not in yielded:
                yield res
                yielded.add(ares)
Exemple #11
0
    def from_call(cls, call_node, context: InferenceContext | None = None):
        """Get a CallSite object from the given Call node.

        context will be used to force a single inference path.
        """

        # Determine the callcontext from the given `context` object if any.
        context = context or InferenceContext()
        callcontext = CallContext(call_node.args, call_node.keywords)
        return cls(callcontext, context=context)
Exemple #12
0
def class_instance_as_index(node):
    """Get the value as an index for the given instance.

    If an instance provides an __index__ method, then it can
    be used in some scenarios where an integer is expected,
    for instance when multiplying or subscripting a list.
    """
    context = InferenceContext()
    try:
        for inferred in node.igetattr("__index__", context=context):
            if not isinstance(inferred, bases.BoundMethod):
                continue

            context.boundnode = node
            context.callcontext = CallContext(args=[], callee=inferred)
            for result in inferred.infer_call_result(node, context=context):
                if isinstance(result, nodes.Const) and isinstance(
                        result.value, int):
                    return result
    except InferenceError:
        pass
    return None
Exemple #13
0
def infer_attribute(self, context=None):
    """infer an Attribute node by using getattr on the associated object"""
    for owner in self.expr.infer(context):
        if owner is util.Uninferable:
            yield owner
            continue

        if not context:
            context = InferenceContext()

        old_boundnode = context.boundnode
        try:
            context.boundnode = owner
            yield from owner.igetattr(self.attrname, context)
        except (
            AttributeInferenceError,
            InferenceError,
            AttributeError,
        ):
            pass
        finally:
            context.boundnode = old_boundnode
    return dict(node=self, context=context)
Exemple #14
0
    def _unpack_keywords(self, keywords, context=None):
        values = {}
        context = context or InferenceContext()
        context.extra_context = self.argument_context_map
        for name, value in keywords:
            if name is None:
                # Then it's an unpacking operation (**)
                try:
                    inferred = next(value.infer(context=context))
                except InferenceError:
                    values[name] = Uninferable
                    continue
                except StopIteration:
                    continue

                if not isinstance(inferred, nodes.Dict):
                    # Not something we can work with.
                    values[name] = Uninferable
                    continue

                for dict_key, dict_value in inferred.items:
                    try:
                        dict_key = next(dict_key.infer(context=context))
                    except InferenceError:
                        values[name] = Uninferable
                        continue
                    except StopIteration:
                        continue
                    if not isinstance(dict_key, nodes.Const):
                        values[name] = Uninferable
                        continue
                    if not isinstance(dict_key.value, str):
                        values[name] = Uninferable
                        continue
                    if dict_key.value in values:
                        # The name is already in the dictionary
                        values[dict_key.value] = Uninferable
                        self.duplicated_keywords.add(dict_key.value)
                        continue
                    values[dict_key.value] = dict_value
            else:
                values[name] = value
        return values
Exemple #15
0
def infer_import(
    self: nodes.Import,
    context: InferenceContext | None = None,
    asname: bool = True,
    **kwargs: Any,
) -> Generator[nodes.Module, None, None]:
    """infer an Import node: return the imported module/object"""
    context = context or InferenceContext()
    name = context.lookupname
    if name is None:
        raise InferenceError(node=self, context=context)

    try:
        if asname:
            yield self.do_import_module(self.real_name(name))
        else:
            yield self.do_import_module(name)
    except AstroidBuildingError as exc:
        raise InferenceError(node=self, context=context) from exc
Exemple #16
0
def _object_type(node, context=None):
    astroid_manager = manager.AstroidManager()
    builtins = astroid_manager.builtins_module
    context = context or InferenceContext()

    for inferred in node.infer(context=context):
        if isinstance(inferred, scoped_nodes.ClassDef):
            if inferred.newstyle:
                metaclass = inferred.metaclass(context=context)
                if metaclass:
                    yield metaclass
                    continue
            yield builtins.getattr("type")[0]
        elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)):
            yield _function_type(inferred, builtins)
        elif isinstance(inferred, scoped_nodes.Module):
            yield _build_proxy_class("module", builtins)
        else:
            yield inferred._proxied
Exemple #17
0
def tl_infer_binary_op(
    self,
    opnode: nodes.BinOp,
    operator: str,
    other: nodes.NodeNG,
    context: InferenceContext,
    method: nodes.FunctionDef,
) -> Generator[nodes.NodeNG | type[util.Uninferable], None, None]:
    """Infer a binary operation on a tuple or list.

    The instance on which the binary operation is performed is a tuple
    or list. This refers to the left-hand side of the operation, so:
    'tuple() + 1' or '[] + A()'
    """
    # For tuples and list the boundnode is no longer the tuple or list instance
    context.boundnode = None
    not_implemented = nodes.Const(NotImplemented)
    if isinstance(other, self.__class__) and operator == "+":
        node = self.__class__(parent=opnode)
        node.elts = list(
            itertools.chain(
                _filter_uninferable_nodes(self.elts, context),
                _filter_uninferable_nodes(other.elts, context),
            ))
        yield node
    elif isinstance(other, nodes.Const) and operator == "*":
        if not isinstance(other.value, int):
            yield not_implemented
            return
        yield _multiply_seq_by_int(self, opnode, other, context)
    elif isinstance(other, bases.Instance) and operator == "*":
        # Verify if the instance supports __index__.
        as_index = helpers.class_instance_as_index(other)
        if not as_index:
            yield util.Uninferable
        else:
            yield _multiply_seq_by_int(self, opnode, as_index, context)
    else:
        yield not_implemented
Exemple #18
0
def _infer_stmts(
    stmts: Sequence[nodes.NodeNG | type[Uninferable] | Instance],
    context: InferenceContext | None,
    frame: nodes.NodeNG | Instance | None = None,
) -> collections.abc.Generator[InferenceResult, None, None]:
    """Return an iterator on statements inferred by each statement in *stmts*."""
    inferred = False
    if context is not None:
        name = context.lookupname
        context = context.clone()
    else:
        name = None
        context = InferenceContext()

    for stmt in stmts:
        if stmt is Uninferable:
            yield stmt
            inferred = True
            continue
        # 'context' is always InferenceContext and Instances get '_infer_name' from ClassDef
        context.lookupname = stmt._infer_name(frame,
                                              name)  # type: ignore[union-attr]
        try:
            # Mypy doesn't recognize that 'stmt' can't be Uninferable
            for inf in stmt.infer(context=context):  # type: ignore[union-attr]
                yield inf
                inferred = True
        except NameInferenceError:
            continue
        except InferenceError:
            yield Uninferable
            inferred = True
    if not inferred:
        raise InferenceError(
            "Inference failed for all members of {stmts!r}.",
            stmts=stmts,
            frame=frame,
            context=context,
        )
Exemple #19
0
    def _unpack_args(self, args, context=None):
        values = []
        context = context or InferenceContext()
        context.extra_context = self.argument_context_map
        for arg in args:
            if isinstance(arg, nodes.Starred):
                try:
                    inferred = next(arg.value.infer(context=context))
                except InferenceError:
                    values.append(Uninferable)
                    continue
                except StopIteration:
                    continue

                if inferred is Uninferable:
                    values.append(Uninferable)
                    continue
                if not hasattr(inferred, "elts"):
                    values.append(Uninferable)
                    continue
                values.extend(inferred.elts)
            else:
                values.append(arg)
        return values
Exemple #20
0
    def __init__(self):
        # Add a context so that inferences called from an instance don't recurse endlessly
        self.context = InferenceContext()

        super().__init__()
Exemple #21
0
 def attr___bases__(self):
     obj = node_classes.Tuple()
     context = InferenceContext()
     elts = list(self._instance._inferred_bases(context))
     obj.postinit(elts=elts)
     return obj
Exemple #22
0
def starred_assigned_stmts(self, node=None, context=None, assign_path=None):
    """
    Arguments:
        self: nodes.Starred
        node: a node related to the current underlying Node.
        context: Inference context used for caching already inferred objects
        assign_path:
            A list of indices, where each index specifies what item to fetch from
            the inference results.
    """
    # pylint: disable=too-many-locals,too-many-statements
    def _determine_starred_iteration_lookups(starred, target, lookups):
        # Determine the lookups for the rhs of the iteration
        itered = target.itered()
        for index, element in enumerate(itered):
            if (
                isinstance(element, nodes.Starred)
                and element.value.name == starred.value.name
            ):
                lookups.append((index, len(itered)))
                break
            if isinstance(element, nodes.Tuple):
                lookups.append((index, len(element.itered())))
                _determine_starred_iteration_lookups(starred, element, lookups)

    stmt = self.statement(future=True)
    if not isinstance(stmt, (nodes.Assign, nodes.For)):
        raise InferenceError(
            "Statement {stmt!r} enclosing {node!r} " "must be an Assign or For node.",
            node=self,
            stmt=stmt,
            unknown=node,
            context=context,
        )

    if context is None:
        context = InferenceContext()

    if isinstance(stmt, nodes.Assign):
        value = stmt.value
        lhs = stmt.targets[0]

        if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1:
            raise InferenceError(
                "Too many starred arguments in the " " assignment targets {lhs!r}.",
                node=self,
                targets=lhs,
                unknown=node,
                context=context,
            )

        try:
            rhs = next(value.infer(context))
        except (InferenceError, StopIteration):
            yield util.Uninferable
            return
        if rhs is util.Uninferable or not hasattr(rhs, "itered"):
            yield util.Uninferable
            return

        try:
            elts = collections.deque(rhs.itered())
        except TypeError:
            yield util.Uninferable
            return

        # Unpack iteratively the values from the rhs of the assignment,
        # until the find the starred node. What will remain will
        # be the list of values which the Starred node will represent
        # This is done in two steps, from left to right to remove
        # anything before the starred node and from right to left
        # to remove anything after the starred node.

        for index, left_node in enumerate(lhs.elts):
            if not isinstance(left_node, nodes.Starred):
                if not elts:
                    break
                elts.popleft()
                continue
            lhs_elts = collections.deque(reversed(lhs.elts[index:]))
            for right_node in lhs_elts:
                if not isinstance(right_node, nodes.Starred):
                    if not elts:
                        break
                    elts.pop()
                    continue

                # We're done unpacking.
                packed = nodes.List(
                    ctx=Context.Store,
                    parent=self,
                    lineno=lhs.lineno,
                    col_offset=lhs.col_offset,
                )
                packed.postinit(elts=list(elts))
                yield packed
                break

    if isinstance(stmt, nodes.For):
        try:
            inferred_iterable = next(stmt.iter.infer(context=context))
        except (InferenceError, StopIteration):
            yield util.Uninferable
            return
        if inferred_iterable is util.Uninferable or not hasattr(
            inferred_iterable, "itered"
        ):
            yield util.Uninferable
            return
        try:
            itered = inferred_iterable.itered()
        except TypeError:
            yield util.Uninferable
            return

        target = stmt.target

        if not isinstance(target, nodes.Tuple):
            raise InferenceError(
                "Could not make sense of this, the target must be a tuple",
                context=context,
            )

        lookups = []
        _determine_starred_iteration_lookups(self, target, lookups)
        if not lookups:
            raise InferenceError(
                "Could not make sense of this, needs at least a lookup", context=context
            )

        # Make the last lookup a slice, since that what we want for a Starred node
        last_element_index, last_element_length = lookups[-1]
        is_starred_last = last_element_index == (last_element_length - 1)

        lookup_slice = slice(
            last_element_index,
            None if is_starred_last else (last_element_length - last_element_index),
        )
        lookups[-1] = lookup_slice

        for element in itered:

            # We probably want to infer the potential values *for each* element in an
            # iterable, but we can't infer a list of all values, when only a list of
            # step values are expected:
            #
            # for a, *b in [...]:
            #   b
            #
            # *b* should now point to just the elements at that particular iteration step,
            # which astroid can't know about.

            found_element = None
            for lookup in lookups:
                if not hasattr(element, "itered"):
                    break
                if not isinstance(lookup, slice):
                    # Grab just the index, not the whole length
                    lookup = lookup[0]
                try:
                    itered_inner_element = element.itered()
                    element = itered_inner_element[lookup]
                except IndexError:
                    break
                except TypeError:
                    # Most likely the itered() call failed, cannot make sense of this
                    yield util.Uninferable
                    return
                else:
                    found_element = element

            unpacked = nodes.List(
                ctx=Context.Store,
                parent=self,
                lineno=self.lineno,
                col_offset=self.col_offset,
            )
            unpacked.postinit(elts=found_element or [])
            yield unpacked
            return

        yield util.Uninferable