Exemple #1
0
def infer_param(function_value, param, ignore_stars=False):
    values = _infer_param(function_value, param)
    if ignore_stars:
        return values
    inference_state = function_value.inference_state
    if param.star_count == 1:
        tuple_ = builtin_from_name(inference_state, 'tuple')
        return ValueSet([
            GenericClass(
                tuple_,
                TupleGenericManager((values, )),
            ) for c in values
        ])
    elif param.star_count == 2:
        dct = builtin_from_name(inference_state, 'dict')
        generics = (ValueSet([builtin_from_name(inference_state,
                                                'str')]), values)
        return ValueSet([
            GenericClass(
                dct,
                TupleGenericManager(generics),
            ) for c in values
        ])
        pass
    return values
Exemple #2
0
    def execute_annotation(self):
        string_name = self._tree_name.value

        if string_name == 'Union':
            # This is kind of a special case, because we have Unions (in Jedi
            # ValueSets).
            return self.gather_annotation_classes().execute_annotation()
        elif string_name == 'Optional':
            # Optional is basically just saying it's either None or the actual
            # type.
            return self.gather_annotation_classes().execute_annotation() \
                | ValueSet([builtin_from_name(self.inference_state, u'None')])
        elif string_name == 'Type':
            # The type is actually already given in the index_value
            return ValueSet([self._index_value])
        elif string_name == 'ClassVar':
            # For now don't do anything here, ClassVars are always used.
            return self._index_value.execute_annotation()

        cls = globals()[string_name]
        return ValueSet([cls(
            self.inference_state,
            self.parent_context,
            self._tree_name,
            self._index_value,
            self._context_of_index
        )])
Exemple #3
0
    def execute_annotation(self):
        string_name = self._tree_name.value

        if string_name == 'Union':
            # This is kind of a special case, because we have Unions (in Jedi
            # ValueSets).
            return self.gather_annotation_classes().execute_annotation()
        elif string_name == 'Optional':
            # Optional is basically just saying it's either None or the actual
            # type.
            return self.gather_annotation_classes().execute_annotation() \
                | ValueSet([builtin_from_name(self.inference_state, 'None')])
        elif string_name == 'Type':
            # The type is actually already given in the index_value
            return self._generics_manager[0]
        elif string_name == 'ClassVar':
            # For now don't do anything here, ClassVars are always used.
            return self._generics_manager[0].execute_annotation()

        mapped = {
            'Tuple': Tuple,
            'Generic': Generic,
            'Protocol': Protocol,
            'Callable': Callable,
        }
        cls = mapped[string_name]
        return ValueSet([
            cls(
                self.parent_context,
                self,
                self._tree_name,
                generics_manager=self._generics_manager,
            )
        ])
Exemple #4
0
    def _remap(self):
        name = self.string_name
        inference_state = self.parent_context.inference_state
        try:
            actual = _TYPE_ALIAS_TYPES[name]
        except KeyError:
            pass
        else:
            yield TypeAlias.create_cached(inference_state, self.parent_context,
                                          self.tree_name, actual)
            return

        if name in _PROXY_CLASS_TYPES:
            yield ProxyTypingClassValue.create_cached(inference_state,
                                                      self.parent_context,
                                                      self.tree_name)
        elif name in _PROXY_TYPES:
            yield ProxyTypingValue.create_cached(inference_state,
                                                 self.parent_context,
                                                 self.tree_name)
        elif name == 'runtime':
            # We don't want anything here, not sure what this function is
            # supposed to do, since it just appears in the stubs and shouldn't
            # have any effects there (because it's never executed).
            return
        elif name == 'TypeVar':
            yield TypeVarClass.create_cached(inference_state,
                                             self.parent_context,
                                             self.tree_name)
        elif name == 'Any':
            yield AnyClass.create_cached(inference_state, self.parent_context,
                                         self.tree_name)
        elif name == 'TYPE_CHECKING':
            # This is needed for e.g. imports that are only available for type
            # checking or are in cycles. The user can then check this variable.
            yield builtin_from_name(inference_state, 'True')
        elif name == 'overload':
            yield OverloadFunction.create_cached(inference_state,
                                                 self.parent_context,
                                                 self.tree_name)
        elif name == 'NewType':
            yield NewTypeFunction.create_cached(inference_state,
                                                self.parent_context,
                                                self.tree_name)
        elif name == 'cast':
            yield CastFunction.create_cached(inference_state,
                                             self.parent_context,
                                             self.tree_name)
        elif name == 'TypedDict':
            # TODO doesn't even exist in typeshed/typing.py, yet. But will be
            # added soon.
            yield TypedDictClass.create_cached(inference_state,
                                               self.parent_context,
                                               self.tree_name)
        elif name in ('no_type_check', 'no_type_check_decorator'):
            # This is not necessary, as long as we are not doing type checking.
            yield from self._wrapped_name.infer()
        else:
            # Everything else shouldn't be relevant for type checking.
            yield from self._wrapped_name.infer()
Exemple #5
0
    def get_filters(self, origin_scope=None, is_instance=False, include_metaclasses=True):
        if include_metaclasses:
            metaclasses = self.get_metaclasses()
            if metaclasses:
                for f in self.get_metaclass_filters(metaclasses):
                    yield f

        for cls in self.py__mro__():
            if cls.is_compiled():
                for filter in cls.get_filters(is_instance=is_instance):
                    yield filter
            else:
                yield ClassFilter(
                    self, node_context=cls.as_context(),
                    origin_scope=origin_scope,
                    is_instance=is_instance
                )
        if not is_instance:
            from jedi.inference.compiled import builtin_from_name
            type_ = builtin_from_name(self.inference_state, u'type')
            assert isinstance(type_, ClassValue)
            if type_ != self:
                # We are not using execute_with_values here, because the
                # plugin function for type would get executed instead of an
                # instance creation.
                args = ValuesArguments([])
                for instance in type_.py__call__(args):
                    instance_filters = instance.get_filters()
                    # Filter out self filters
                    next(instance_filters, None)
                    next(instance_filters, None)
                    x = next(instance_filters, None)
                    assert x is not None
                    yield x
Exemple #6
0
    def get_filters(self, origin_scope=None, is_instance=False):
        metaclasses = self.get_metaclasses()
        if metaclasses:
            for f in self.get_metaclass_filters(metaclasses):
                yield f

        for cls in self.py__mro__():
            if isinstance(cls, compiled.CompiledObject):
                for filter in cls.get_filters(is_instance=is_instance):
                    yield filter
            else:
                yield ClassFilter(self,
                                  node_context=cls.as_context(),
                                  origin_scope=origin_scope,
                                  is_instance=is_instance)
        if not is_instance:
            from jedi.inference.compiled import builtin_from_name
            type_ = builtin_from_name(self.inference_state, u'type')
            assert isinstance(type_, ClassValue)
            if type_ != self:
                for instance in type_.py__call__():
                    instance_filters = instance.get_filters()
                    # Filter out self filters
                    next(instance_filters)
                    next(instance_filters)
                    yield next(instance_filters)
 def _get_wrapped_value(self):
     from jedi.inference.gradual.base import GenericClass
     from jedi.inference.gradual.generics import TupleGenericManager
     klass = compiled.builtin_from_name(self.inference_state,
                                        self.array_type)
     c, = GenericClass(klass, TupleGenericManager(
         self._cached_generics())).execute_annotation()
     return c
 def py__stop_iteration_returns(self):
     for cls in self._wrapped_value.class_value.py__mro__():
         if cls.py__name__() == 'Generator':
             generics = cls.get_generics()
             try:
                 return generics[2].execute_annotation()
             except IndexError:
                 pass
         elif cls.py__name__() == 'Iterator':
             return ValueSet([builtin_from_name(self.inference_state, 'None')])
     return self._wrapped_value.py__stop_iteration_returns()
Exemple #9
0
def _literals_to_types(inference_state, result):
    # Changes literals ('a', 1, 1.0, etc) to its type instances (str(),
    # int(), float(), etc).
    new_result = NO_VALUES
    for typ in result:
        if is_literal(typ):
            # Literals are only valid as long as the operations are
            # correct. Otherwise add a value-free instance.
            cls = compiled.builtin_from_name(inference_state, typ.name.string_name)
            new_result |= cls.execute_with_values()
        else:
            new_result |= ValueSet([typ])
    return new_result
Exemple #10
0
    def _get_yield_lazy_value(self, yield_expr):
        if yield_expr.type == 'keyword':
            # `yield` just yields None.
            ctx = compiled.builtin_from_name(self.inference_state, 'None')
            yield LazyKnownValue(ctx)
            return

        node = yield_expr.children[1]
        if node.type == 'yield_arg':  # It must be a yield from.
            cn = ContextualizedNode(self, node.children[1])
            yield from cn.infer().iterate(cn)
        else:
            yield LazyTreeValue(self, node)
Exemple #11
0
 def py__get__(self, obj, class_value):
     """
     obj may be None.
     """
     # Arguments in __get__ descriptors are obj, class.
     # `method` is the new parent of the array, don't know if that's good.
     names = self.get_function_slot_names(u'__get__')
     if names:
         if obj is None:
             obj = compiled.builtin_from_name(self.inference_state, u'None')
         return self.execute_function_slots(names, obj, class_value)
     else:
         return ValueSet([self])
Exemple #12
0
    def infer_type_vars(self, value_set):
        annotation_generics = self.get_generics()

        if not annotation_generics:
            return {}

        annotation_name = self.py__name__()
        if annotation_name == 'Optional':
            # Optional[T] is equivalent to Union[T, None]. In Jedi unions
            # are represented by members within a ValueSet, so we extract
            # the T from the Optional[T] by removing the None value.
            none = builtin_from_name(self.inference_state, 'None')
            return annotation_generics[0].infer_type_vars(
                value_set.filter(lambda x: x != none), )

        return {}
Exemple #13
0
    def values(self):
        from jedi.inference.compiled import builtin_from_name
        names = []
        needs_type_completions, dir_infos = self.compiled_object.access_handle.get_dir_infos()
        for name in dir_infos:
            names += self._get(
                name,
                lambda: dir_infos[name],
                lambda: dir_infos.keys(),
            )

        # ``dir`` doesn't include the type names.
        if not self.is_instance and needs_type_completions:
            for filter in builtin_from_name(self._inference_state, u'type').get_filters():
                names += filter.values()
        return names
Exemple #14
0
    def py__get__(self, instance, class_value):
        """
        obj may be None.
        """
        # Arguments in __get__ descriptors are obj, class.
        # `method` is the new parent of the array, don't know if that's good.
        for cls in self.class_value.py__mro__():
            result = cls.py__get__on_class(self, instance, class_value)
            if result is not NotImplemented:
                return result

        names = self.get_function_slot_names('__get__')
        if names:
            if instance is None:
                instance = compiled.builtin_from_name(self.inference_state, 'None')
            return self.execute_function_slots(names, instance, class_value)
        else:
            return ValueSet([self])
Exemple #15
0
    def _execute_function(self, params):
        from jedi.inference import docstrings
        from jedi.inference.compiled import builtin_from_name
        if self.api_type != 'function':
            return

        for name in self._parse_function_doc()[1].split():
            try:
                # TODO wtf is this? this is exactly the same as the thing
                # below. It uses getattr as well.
                self.inference_state.builtins_module.access_handle.getattr_paths(
                    name)
            except AttributeError:
                continue
            else:
                bltn_obj = builtin_from_name(self.inference_state, name)
                yield from self.inference_state.execute(bltn_obj, params)
        yield from docstrings.infer_return_types(self)
Exemple #16
0
    def values(self):
        from jedi.inference.compiled import builtin_from_name
        names = []
        needs_type_completions, dir_infos = self.compiled_value.access_handle.get_dir_infos()
        # We could use `unsafe` here as well, especially as a parameter to
        # get_dir_infos. But this would lead to a lot of property executions
        # that are probably not wanted. The drawback for this is that we
        # have a different name for `get` and `values`. For `get` we always
        # execute.
        for name in dir_infos:
            names += self._get(
                name,
                lambda name, unsafe: dir_infos[name],
                lambda name: name in dir_infos,
            )

        # ``dir`` doesn't include the type names.
        if not self.is_instance and needs_type_completions:
            for filter in builtin_from_name(self._inference_state, u'type').get_filters():
                names += filter.values()
        return names
Exemple #17
0
def builtins_isinstance(objects, types, arguments, inference_state):
    bool_results = set()
    for o in objects:
        cls = o.py__class__()
        try:
            cls.py__bases__
        except AttributeError:
            # This is temporary. Everything should have a class attribute in
            # Python?! Maybe we'll leave it here, because some numpy objects or
            # whatever might not.
            bool_results = set([True, False])
            break

        mro = list(cls.py__mro__())

        for cls_or_tup in types:
            if cls_or_tup.is_class():
                bool_results.add(cls_or_tup in mro)
            elif cls_or_tup.name.string_name == 'tuple' \
                    and cls_or_tup.get_root_context().is_builtins_module():
                # Check for tuples.
                classes = ValueSet.from_sets(
                    lazy_value.infer()
                    for lazy_value in cls_or_tup.iterate()
                )
                bool_results.add(any(cls in mro for cls in classes))
            else:
                _, lazy_value = list(arguments.unpack())[1]
                if isinstance(lazy_value, LazyTreeValue):
                    node = lazy_value.data
                    message = 'TypeError: isinstance() arg 2 must be a ' \
                              'class, type, or tuple of classes and types, ' \
                              'not %s.' % cls_or_tup
                    analysis.add(lazy_value.context, 'type-error-isinstance', node, message)

    return ValueSet(
        compiled.builtin_from_name(inference_state, str(b))
        for b in bool_results
    )
Exemple #18
0
    def get_return_values(self, check_yields=False):
        funcdef = self.tree_node
        if funcdef.type == 'lambdef':
            return self.infer_node(funcdef.children[-1])

        if check_yields:
            value_set = NO_VALUES
            returns = get_yield_exprs(self.inference_state, funcdef)
        else:
            value_set = self._infer_annotations()
            if value_set:
                # If there are annotations, prefer them over anything else.
                # This will make it faster.
                return value_set
            value_set |= docstrings.infer_return_types(self._value)
            returns = funcdef.iter_return_stmts()

        for r in returns:
            check = flow_analysis.reachability_check(self, funcdef, r)
            if check is flow_analysis.UNREACHABLE:
                debug.dbg('Return unreachable: %s', r)
            else:
                if check_yields:
                    value_set |= ValueSet.from_sets(
                        lazy_value.infer()
                        for lazy_value in self._get_yield_lazy_value(r))
                else:
                    try:
                        children = r.children
                    except AttributeError:
                        ctx = compiled.builtin_from_name(
                            self.inference_state, u'None')
                        value_set |= ValueSet([ctx])
                    else:
                        value_set |= self.infer_node(children[1])
            if check is flow_analysis.REACHABLE:
                debug.dbg('Return reachable: %s', r)
                break
        return value_set
 def py__stop_iteration_returns(self):
     return ValueSet(
         [compiled.builtin_from_name(self.inference_state, u'None')])
Exemple #20
0
def _bool_to_value(inference_state, bool_):
    return compiled.builtin_from_name(inference_state, str(bool_))
Exemple #21
0
def infer_atom(context, atom):
    """
    Basically to process ``atom`` nodes. The parser sometimes doesn't
    generate the node (because it has just one child). In that case an atom
    might be a name or a literal as well.
    """
    state = context.inference_state
    if atom.type == 'name':
        # This is the first global lookup.
        stmt = tree.search_ancestor(atom, 'expr_stmt', 'lambdef',
                                    'if_stmt') or atom
        if stmt.type == 'if_stmt':
            if not any(n.start_pos <= atom.start_pos < n.end_pos
                       for n in stmt.get_test_nodes()):
                stmt = atom
        elif stmt.type == 'lambdef':
            stmt = atom
        position = stmt.start_pos
        if _is_annotation_name(atom):
            # Since Python 3.7 (with from __future__ import annotations),
            # annotations are essentially strings and can reference objects
            # that are defined further down in code. Therefore just set the
            # position to None, so the finder will not try to stop at a certain
            # position in the module.
            position = None
        return context.py__getattribute__(atom, position=position)
    elif atom.type == 'keyword':
        # For False/True/None
        if atom.value in ('False', 'True', 'None'):
            return ValueSet([compiled.builtin_from_name(state, atom.value)])
        elif atom.value == 'yield':
            # Contrary to yield from, yield can just appear alone to return a
            # value when used with `.send()`.
            return NO_VALUES
        assert False, 'Cannot infer the keyword %s' % atom

    elif isinstance(atom, tree.Literal):
        string = state.compiled_subprocess.safe_literal_eval(atom.value)
        return ValueSet([compiled.create_simple_object(state, string)])
    elif atom.type == 'strings':
        # Will be multiple string.
        value_set = infer_atom(context, atom.children[0])
        for string in atom.children[1:]:
            right = infer_atom(context, string)
            value_set = _infer_comparison(context, value_set, '+', right)
        return value_set
    elif atom.type == 'fstring':
        return compiled.get_string_value_set(state)
    else:
        c = atom.children
        # Parentheses without commas are not tuples.
        if c[0] == '(' and not len(c) == 2 \
                and not(c[1].type == 'testlist_comp'
                        and len(c[1].children) > 1):
            return context.infer_node(c[1])

        try:
            comp_for = c[1].children[1]
        except (IndexError, AttributeError):
            pass
        else:
            if comp_for == ':':
                # Dict comprehensions have a colon at the 3rd index.
                try:
                    comp_for = c[1].children[3]
                except IndexError:
                    pass

            if comp_for.type in ('comp_for', 'sync_comp_for'):
                return ValueSet(
                    [iterable.comprehension_from_atom(state, context, atom)])

        # It's a dict/list/tuple literal.
        array_node = c[1]
        try:
            array_node_c = array_node.children
        except AttributeError:
            array_node_c = []
        if c[0] == '{' and (array_node == '}' or ':' in array_node_c
                            or '**' in array_node_c):
            new_value = iterable.DictLiteralValue(state, context, atom)
        else:
            new_value = iterable.SequenceLiteralValue(state, context, atom)
        return ValueSet([new_value])
Exemple #22
0
def _infer_node(context, element):
    debug.dbg('infer_node %s@%s in %s', element, element.start_pos, context)
    inference_state = context.inference_state
    typ = element.type
    if typ in ('name', 'number', 'string', 'atom', 'strings', 'keyword',
               'fstring'):
        return infer_atom(context, element)
    elif typ == 'lambdef':
        return ValueSet([FunctionValue.from_context(context, element)])
    elif typ == 'expr_stmt':
        return infer_expr_stmt(context, element)
    elif typ in ('power', 'atom_expr'):
        first_child = element.children[0]
        children = element.children[1:]
        had_await = False
        if first_child.type == 'keyword' and first_child.value == 'await':
            had_await = True
            first_child = children.pop(0)

        value_set = context.infer_node(first_child)
        for (i, trailer) in enumerate(children):
            if trailer == '**':  # has a power operation.
                right = context.infer_node(children[i + 1])
                value_set = _infer_comparison(context, value_set, trailer,
                                              right)
                break
            value_set = infer_trailer(context, value_set, trailer)

        if had_await:
            return value_set.py__await__().py__stop_iteration_returns()
        return value_set
    elif typ in (
            'testlist_star_expr',
            'testlist',
    ):
        # The implicit tuple in statements.
        return ValueSet(
            [iterable.SequenceLiteralValue(inference_state, context, element)])
    elif typ in ('not_test', 'factor'):
        value_set = context.infer_node(element.children[-1])
        for operator in element.children[:-1]:
            value_set = infer_factor(value_set, operator)
        return value_set
    elif typ == 'test':
        # `x if foo else y` case.
        return (context.infer_node(element.children[0])
                | context.infer_node(element.children[-1]))
    elif typ == 'operator':
        # Must be an ellipsis, other operators are not inferred.
        if element.value != '...':
            origin = element.parent
            raise AssertionError("unhandled operator %s in %s " %
                                 (repr(element.value), origin))
        return ValueSet(
            [compiled.builtin_from_name(inference_state, 'Ellipsis')])
    elif typ == 'dotted_name':
        value_set = infer_atom(context, element.children[0])
        for next_name in element.children[2::2]:
            value_set = value_set.py__getattribute__(next_name,
                                                     name_context=context)
        return value_set
    elif typ == 'eval_input':
        return context.infer_node(element.children[0])
    elif typ == 'annassign':
        return annotation.infer_annotation(context, element.children[1]) \
            .execute_annotation()
    elif typ == 'yield_expr':
        if len(element.children) and element.children[1].type == 'yield_arg':
            # Implies that it's a yield from.
            element = element.children[1].children[1]
            generators = context.infer_node(element) \
                .py__getattribute__('__iter__').execute_with_values()
            return generators.py__stop_iteration_returns()

        # Generator.send() is not implemented.
        return NO_VALUES
    elif typ == 'namedexpr_test':
        return context.infer_node(element.children[2])
    else:
        return infer_or_test(context, element)
Exemple #23
0
 def _get_wrapped_value(self):
     object_, = builtin_from_name(self.inference_state,
                                  u'object').execute_annotation()
     return object_
Exemple #24
0
def test_next_docstr(inference_state):
    next_ = compiled.builtin_from_name(inference_state, u'next')
    assert next_.tree_node is not None
    assert next_.py__doc__() == ''  # It's a stub
    for non_stub in _stub_to_python_value_set(next_):
        assert non_stub.py__doc__() == next.__doc__
Exemple #25
0
 def py__class__(self):
     return compiled.builtin_from_name(self.inference_state, u'type')
Exemple #26
0
 def py__class__(self):
     # This might not be 100% correct, but it is good enough. The details of
     # the typing library are not really an issue for Jedi.
     return builtin_from_name(self.inference_state, u'type')
Exemple #27
0
 def py__class__(self):
     # TODO this is obviously not correct, but at least gives us a class if
     # we have none. Some of these objects don't really have a base class in
     # typeshed.
     return builtin_from_name(self.inference_state, u'object')
 def _get_wrapped_value(self):
     value = compiled.builtin_from_name(self._context.inference_state,
                                        'slice')
     slice_value, = value.execute_with_values()
     return slice_value
Exemple #29
0
 def _get_wrapped_value(self):
     from jedi.inference.gradual.typing import GenericClass
     klass = compiled.builtin_from_name(self.inference_state,
                                        self.array_type)
     c, = GenericClass(klass, self._get_generics()).execute_annotation()
     return c