Exemplo n.º 1
0
def import_module_by_names(inference_state,
                           import_names,
                           sys_path=None,
                           module_context=None,
                           prefer_stubs=True):
    if sys_path is None:
        sys_path = inference_state.get_sys_path()

    str_import_names = tuple(
        force_unicode(i.value if isinstance(i, tree.Name) else i)
        for i in import_names)
    value_set = [None]
    for i, name in enumerate(import_names):
        value_set = ValueSet.from_sets([
            import_module(
                inference_state,
                str_import_names[:i + 1],
                parent_module_value,
                sys_path,
                prefer_stubs=prefer_stubs,
            ) for parent_module_value in value_set
        ])
        if not value_set:
            message = 'No module named ' + '.'.join(str_import_names)
            if module_context is not None:
                _add_error(module_context, name, message)
            else:
                debug.warning(message)
            return NO_VALUES
    return value_set
Exemplo n.º 2
0
def _split_comment_param_declaration(decl_text):
    """
    Split decl_text on commas, but group generic expressions
    together.

    For example, given "foo, Bar[baz, biz]" we return
    ['foo', 'Bar[baz, biz]'].

    """
    try:
        node = parse(decl_text, error_recovery=False).children[0]
    except ParserSyntaxError:
        debug.warning('Comment annotation is not valid Python: %s' % decl_text)
        return []

    if node.type in ['name', 'atom_expr', 'power']:
        return [node.get_code().strip()]

    params = []
    try:
        children = node.children
    except AttributeError:
        return []
    else:
        for child in children:
            if child.type in ['name', 'atom_expr', 'power']:
                params.append(child.get_code().strip())

    return params
Exemplo n.º 3
0
def _get_buildout_script_paths(search_path):
    """
    if there is a 'buildout.cfg' file in one of the parent directories of the
    given module it will return a list of all files in the buildout bin
    directory that look like python files.

    :param search_path: absolute path to the module.
    :type search_path: str
    """
    project_root = _get_parent_dir_with_file(search_path, 'buildout.cfg')
    if not project_root:
        return
    bin_path = os.path.join(project_root, 'bin')
    if not os.path.exists(bin_path):
        return

    for filename in os.listdir(bin_path):
        try:
            filepath = os.path.join(bin_path, filename)
            with open(filepath, 'r') as f:
                firstline = f.readline()
                if firstline.startswith('#!') and 'python' in firstline:
                    yield filepath
        except (UnicodeDecodeError, IOError) as e:
            # Probably a binary file; permission error or race cond. because
            # file got deleted. Ignore it.
            debug.warning(unicode(e))
            continue
Exemplo n.º 4
0
 def py__mro__(self):
     mro = [self]
     yield self
     # TODO Do a proper mro resolution. Currently we are just listing
     # classes. However, it's a complicated algorithm.
     for lazy_cls in self.py__bases__():
         # TODO there's multiple different mro paths possible if this yields
         # multiple possibilities. Could be changed to be more correct.
         for cls in lazy_cls.infer():
             # TODO detect for TypeError: duplicate base class str,
             # e.g.  `class X(str, str): pass`
             try:
                 mro_method = cls.py__mro__
             except AttributeError:
                 # TODO add a TypeError like:
                 """
                 >>> class Y(lambda: test): pass
                 Traceback (most recent call last):
                   File "<stdin>", line 1, in <module>
                 TypeError: function() argument 1 must be code, not str
                 >>> class Y(1): pass
                 Traceback (most recent call last):
                   File "<stdin>", line 1, in <module>
                 TypeError: int() takes at most 2 arguments (3 given)
                 """
                 debug.warning('Super class of %s is not a class: %s', self,
                               cls)
             else:
                 for cls_new in mro_method():
                     if cls_new not in mro:
                         mro.append(cls_new)
                         yield cls_new
Exemplo n.º 5
0
def _iter_module_names(inference_state, paths):
    # Python modules/packages
    for path in paths:
        try:
            dirs = scandir(path)
        except OSError:
            # The file might not exist or reading it might lead to an error.
            debug.warning("Not possible to list directory: %s", path)
            continue
        for dir_entry in dirs:
            name = dir_entry.name
            # First Namespaces then modules/stubs
            if dir_entry.is_dir():
                # pycache is obviously not an interestin namespace. Also the
                # name must be a valid identifier.
                # TODO use str.isidentifier, once Python 2 is removed
                if name != '__pycache__' and not re.search(r'\W|^\d', name):
                    yield name
            else:
                if name.endswith('.pyi'):  # Stub files
                    modname = name[:-4]
                else:
                    modname = inspect.getmodulename(name)

                if modname and '.' not in modname:
                    if modname != '__init__':
                        yield modname
Exemplo n.º 6
0
 def _get_classes(self):
     if self._bound_lazy_value is not None:
         return self._bound_lazy_value.infer()
     if self._constraints_lazy_values:
         return self.constraints
     debug.warning('Tried to infer the TypeVar %s without a given type',
                   self._var_name)
     return NO_VALUES
Exemplo n.º 7
0
def _static_getmro_newstyle(klass):
    mro = type.__dict__['__mro__'].__get__(klass)
    if not isinstance(mro, (tuple, list)):
        # There are unfortunately no tests for this, I was not able to
        # reproduce this in pure Python. However should still solve the issue
        # raised in GH #1517.
        debug.warning('mro of %s returned %s, should be a tuple' %
                      (klass, mro))
        return ()
    return mro
Exemplo n.º 8
0
def add(node_context, error_name, node, message=None, typ=Error, payload=None):
    exception = CODES[error_name][1]
    if _check_for_exception_catch(node_context, node, exception, payload):
        return

    # TODO this path is probably not right
    module_context = node_context.get_root_context()
    module_path = module_context.py__file__()
    issue_instance = typ(error_name, module_path, node.start_pos, message)
    debug.warning(str(issue_instance), format=False)
    node_context.inference_state.analysis.append(issue_instance)
    return issue_instance
Exemplo n.º 9
0
 def too_many_args(argument):
     m = _error_argument_count(funcdef, len(unpacked_va))
     # Just report an error for the first param that is not needed (like
     # cPython).
     if arguments.get_calling_nodes():
         # There might not be a valid calling node so check for that first.
         issues.append(
             _add_argument_issue('type-error-too-many-arguments',
                                 argument,
                                 message=m))
     else:
         issues.append(None)
         debug.warning('non-public warning: %s', m)
Exemplo n.º 10
0
def _get_forward_reference_node(context, string):
    try:
        new_node = context.inference_state.grammar.parse(
            force_unicode(string),
            start_symbol='eval_input',
            error_recovery=False)
    except ParserSyntaxError:
        debug.warning('Annotation not parsed: %s' % string)
        return None
    else:
        module = context.tree_node.get_root_node()
        parser_utils.move(new_node, module.end_pos[0])
        new_node.parent = context.tree_node
        return new_node
Exemplo n.º 11
0
    def py__call__(self, arguments):
        unpacked = arguments.unpack()

        key, lazy_value = next(unpacked, (None, None))
        var_name = self._find_string_name(lazy_value)
        # The name must be given, otherwise it's useless.
        if var_name is None or key is not None:
            debug.warning('Found a variable without a name %s', arguments)
            return NO_VALUES

        return ValueSet([
            TypeVar.create_cached(self.inference_state, self.parent_context,
                                  self._tree_name, var_name, unpacked)
        ])
Exemplo n.º 12
0
 def py__call__(self, arguments):
     """
         def x() -> Callable[[Callable[..., _T]], _T]: ...
     """
     # The 0th index are the arguments.
     try:
         param_values = self._generics_manager[0]
         result_values = self._generics_manager[1]
     except IndexError:
         debug.warning('Callable[...] defined without two arguments')
         return NO_VALUES
     else:
         from medi.inference.gradual.annotation import infer_return_for_callable
         return infer_return_for_callable(arguments, param_values,
                                          result_values)
Exemplo n.º 13
0
def _apply_decorators(context, node):
    """
    Returns the function, that should to be executed in the end.
    This is also the places where the decorators are processed.
    """
    if node.type == 'classdef':
        decoratee_value = ClassValue(
            context.inference_state,
            parent_context=context,
            tree_node=node
        )
    else:
        decoratee_value = FunctionValue.from_context(context, node)
    initial = values = ValueSet([decoratee_value])

    if is_big_annoying_library(context):
        return values

    for dec in reversed(node.get_decorators()):
        debug.dbg('decorator: %s %s', dec, values, color="MAGENTA")
        with debug.increase_indent_cm():
            dec_values = context.infer_node(dec.children[1])
            trailer_nodes = dec.children[2:-1]
            if trailer_nodes:
                # Create a trailer and infer it.
                trailer = tree.PythonNode('trailer', trailer_nodes)
                trailer.parent = dec
                dec_values = infer_trailer(context, dec_values, trailer)

            if not len(dec_values):
                code = dec.get_code(include_prefix=False)
                # For the short future, we don't want to hear about the runtime
                # decorator in typing that was intentionally omitted. This is not
                # "correct", but helps with debugging.
                if code != '@runtime\n':
                    debug.warning('decorator not found: %s on %s', dec, node)
                return initial

            values = dec_values.execute(arguments.ValuesArguments([values]))
            if not len(values):
                debug.warning('not possible to resolve wrappers found %s', node)
                return initial

        debug.dbg('decorator end %s', values, color="MAGENTA")
    if values != initial:
        return ValueSet([Decoratee(c, decoratee_value) for c in values])
    return values
Exemplo n.º 14
0
def execution_allowed(inference_state, node):
    """
    A decorator to detect recursions in statements. In a recursion a statement
    at the same place, in the same module may not be executed two times.
    """
    pushed_nodes = inference_state.recursion_detector.pushed_nodes

    if node in pushed_nodes:
        debug.warning('catched stmt recursion: %s @%s', node,
                      getattr(node, 'start_pos', None))
        yield False
    else:
        try:
            pushed_nodes.append(node)
            yield True
        finally:
            pushed_nodes.pop()
Exemplo n.º 15
0
def _infer_param(function_value, param):
    """
    Infers the type of a function parameter, using type annotations.
    """
    annotation = param.annotation
    if annotation is None:
        # If no Python 3-style annotation, look for a Python 2-style comment
        # annotation.
        # Identify parameters to function in the same sequence as they would
        # appear in a type comment.
        all_params = [
            child for child in param.parent.children if child.type == 'param'
        ]

        node = param.parent.parent
        comment = parser_utils.get_following_comment_same_line(node)
        if comment is None:
            return NO_VALUES

        match = re.match(r"^#\s*type:\s*\(([^#]*)\)\s*->", comment)
        if not match:
            return NO_VALUES
        params_comments = _split_comment_param_declaration(match.group(1))

        # Find the specific param being investigated
        index = all_params.index(param)
        # If the number of parameters doesn't match length of type comment,
        # ignore first parameter (assume it's self).
        if len(params_comments) != len(all_params):
            debug.warning("Comments length != Params length %s %s",
                          params_comments, all_params)
        if function_value.is_bound_method():
            if index == 0:
                # Assume it's self, which is already handled
                return NO_VALUES
            index -= 1
        if index >= len(params_comments):
            return NO_VALUES

        param_comment = params_comments[index]
        return _infer_annotation_string(
            function_value.get_default_param_context(), param_comment)
    # Annotations are like default params and resolve in the same way.
    context = function_value.get_default_param_context()
    return infer_annotation(context, annotation)
Exemplo n.º 16
0
 def iterate():
     for generator in self.execute_function_slots(iter_slot_names):
         if generator.is_instance() and not generator.is_compiled():
             # `__next__` logic.
             if self.inference_state.environment.version_info.major == 2:
                 name = u'next'
             else:
                 name = u'__next__'
             next_slot_names = generator.get_function_slot_names(name)
             if next_slot_names:
                 yield LazyKnownValues(
                     generator.execute_function_slots(next_slot_names))
             else:
                 debug.warning(
                     'Instance has no __next__ function in %s.',
                     generator)
         else:
             for lazy_value in generator.py__iter__():
                 yield lazy_value
Exemplo n.º 17
0
    def wrapper(context, *args, **kwargs):
        n = context.tree_node
        inference_state = context.inference_state
        try:
            inference_state.inferred_element_counts[n] += 1
            maximum = 300
            if context.parent_context is None \
                    and context.get_value() is inference_state.builtins_module:
                # Builtins should have a more generous inference limit.
                # It is important that builtins can be executed, otherwise some
                # functions that depend on certain builtins features would be
                # broken, see e.g. GH #1432
                maximum *= 100

            if inference_state.inferred_element_counts[n] > maximum:
                debug.warning('In value %s there were too many inferences.', n)
                return NO_VALUES
        except KeyError:
            inference_state.inferred_element_counts[n] = 1
        return func(context, *args, **kwargs)
Exemplo n.º 18
0
    def __init__(self, parent_context, tree_name, var_name, unpacked_args):
        super(TypeVar, self).__init__(parent_context, tree_name)
        self._var_name = var_name

        self._constraints_lazy_values = []
        self._bound_lazy_value = None
        self._covariant_lazy_value = None
        self._contravariant_lazy_value = None
        for key, lazy_value in unpacked_args:
            if key is None:
                self._constraints_lazy_values.append(lazy_value)
            else:
                if key == 'bound':
                    self._bound_lazy_value = lazy_value
                elif key == 'covariant':
                    self._covariant_lazy_value = lazy_value
                elif key == 'contravariant':
                    self._contra_variant_lazy_value = lazy_value
                else:
                    debug.warning('Invalid TypeVar param name %s', key)
Exemplo n.º 19
0
def _get_paths_from_buildout_script(inference_state, buildout_script_path):
    file_io = FileIO(buildout_script_path)
    try:
        module_node = inference_state.parse(
            file_io=file_io,
            cache=True,
            cache_path=settings.cache_directory
        )
    except IOError:
        debug.warning('Error trying to read buildout_script: %s', buildout_script_path)
        return

    from medi.inference.value import ModuleValue
    module_context = ModuleValue(
        inference_state, module_node,
        file_io=file_io,
        string_names=None,
        code_lines=get_cached_code_lines(inference_state.grammar, buildout_script_path),
    ).as_context()
    for path in check_sys_path_modifications(module_context):
        yield path
Exemplo n.º 20
0
def infer_annotation(context, annotation):
    """
    Inferes an annotation node. This means that it inferes the part of
    `int` here:

        foo: int = 3

    Also checks for forward references (strings)
    """
    value_set = context.infer_node(annotation)
    if len(value_set) != 1:
        debug.warning("Inferred typing index %s should lead to 1 object, "
                      " not %s" % (annotation, value_set))
        return value_set

    inferred_value = list(value_set)[0]
    if is_string(inferred_value):
        result = _get_forward_reference_node(context,
                                             inferred_value.get_safe_value())
        if result is not None:
            return context.infer_node(result)
    return value_set
Exemplo n.º 21
0
    def _find_string_name(self, lazy_value):
        if lazy_value is None:
            return None

        value_set = lazy_value.infer()
        if not value_set:
            return None
        if len(value_set) > 1:
            debug.warning('Found multiple values for a type variable: %s',
                          value_set)

        name_value = next(iter(value_set))
        try:
            method = name_value.get_safe_value
        except AttributeError:
            return None
        else:
            safe_value = method(default=None)
            if self.inference_state.environment.version_info.major == 2:
                if isinstance(safe_value, bytes):
                    return force_unicode(safe_value)
            if isinstance(safe_value, (str, unicode)):
                return safe_value
            return None
Exemplo n.º 22
0
    def push_execution(self, execution):
        funcdef = execution.tree_node

        # These two will be undone in pop_execution.
        self._recursion_level += 1
        self._parent_execution_funcs.append(funcdef)

        module_context = execution.get_root_context()

        if module_context.is_builtins_module():
            # We have control over builtins so we know they are not recursing
            # like crazy. Therefore we just let them execute always, because
            # they usually just help a lot with getting good results.
            return False

        if self._recursion_level > recursion_limit:
            debug.warning('Recursion limit (%s) reached', recursion_limit)
            return True

        if self._execution_count >= total_function_execution_limit:
            debug.warning('Function execution limit (%s) reached', total_function_execution_limit)
            return True
        self._execution_count += 1

        if self._funcdef_execution_counts.setdefault(funcdef, 0) >= per_function_execution_limit:
            if module_context.py__name__() == 'typing':
                return False
            debug.warning(
                'Per function execution limit (%s) reached: %s',
                per_function_execution_limit,
                funcdef
            )
            return True
        self._funcdef_execution_counts[funcdef] += 1

        if self._parent_execution_funcs.count(funcdef) > per_function_recursion_limit:
            debug.warning(
                'Per function recursion limit (%s) reached: %s',
                per_function_recursion_limit,
                funcdef
            )
            return True
        return False
Exemplo n.º 23
0
def iterate_argument_clinic(inference_state, arguments, clinic_string):
    """Uses a list with argument clinic information (see PEP 436)."""
    clinic_args = list(_parse_argument_clinic(clinic_string))

    iterator = PushBackIterator(arguments.unpack())
    for i, (name, optional, allow_kwargs, stars) in enumerate(clinic_args):
        if stars == 1:
            lazy_values = []
            for key, argument in iterator:
                if key is not None:
                    iterator.push_back((key, argument))
                    break

                lazy_values.append(argument)
            yield ValueSet([iterable.FakeTuple(inference_state, lazy_values)])
            lazy_values
            continue
        elif stars == 2:
            raise NotImplementedError()
        key, argument = next(iterator, (None, None))
        if key is not None:
            debug.warning('Keyword arguments in argument clinic are currently not supported.')
            raise ParamIssue
        if argument is None and not optional:
            debug.warning('TypeError: %s expected at least %s arguments, got %s',
                          name, len(clinic_args), i)
            raise ParamIssue

        value_set = NO_VALUES if argument is None else argument.infer()

        if not value_set and not optional:
            # For the stdlib we always want values. If we don't get them,
            # that's ok, maybe something is too hard to resolve, however,
            # we will not proceed with the type inference of that function.
            debug.warning('argument_clinic "%s" not resolvable.', name)
            raise ParamIssue
        yield value_set
Exemplo n.º 24
0
 def get_index_and_execute(self, index):
     try:
         return self[index].execute_annotation()
     except IndexError:
         debug.warning('No param #%s found for annotation %s', index, self)
         return NO_VALUES
Exemplo n.º 25
0
 def execute_annotation(self):
     debug.warning('Used Any - returned no results')
     return NO_VALUES
Exemplo n.º 26
0
 def py__await__(self):
     await_value_set = self.py__getattribute__(u"__await__")
     if not await_value_set:
         debug.warning('Tried to run __await__ on value %s', self)
     return await_value_set.execute_with_values()
Exemplo n.º 27
0
 def py__get__(self, instance, class_value):
     debug.warning("No __get__ defined on %s", self)
     return ValueSet([self])
Exemplo n.º 28
0
 def py__stop_iteration_returns(self):
     debug.warning("Not possible to return the stop iterations of %s", self)
     return NO_VALUES
Exemplo n.º 29
0
 def py__call__(self, arguments):
     debug.warning("no execution possible %s", self)
     return NO_VALUES
Exemplo n.º 30
0
 def execute_operation(self, other, operator):
     debug.warning("%s not possible between %s and %s", operator, self,
                   other)
     return NO_VALUES