Exemple #1
0
    def error_recovery(self, grammar, stack, typ, value, start_pos, prefix,
                       add_token_callback):
        """
        This parser is written in a dynamic way, meaning that this parser
        allows using different grammars (even non-Python). However, error
        recovery is purely written for Python.
        """
        def current_suite(stack):
            # For now just discard everything that is not a suite or
            # file_input, if we detect an error.
            for index, (dfa, state,
                        (typ, nodes)) in reversed(list(enumerate(stack))):
                # `suite` can sometimes be only simple_stmt, not stmt.
                symbol = grammar.number2symbol[typ]
                if symbol == 'file_input':
                    break
                elif symbol == 'suite' and len(nodes) > 1:
                    # suites without an indent in them get discarded.
                    break
                elif symbol == 'simple_stmt' and len(nodes) > 1:
                    # simple_stmt can just be turned into a Node, if there are
                    # enough statements. Ignore the rest after that.
                    break
            return index, symbol, nodes

        index, symbol, nodes = current_suite(stack)
        if symbol == 'simple_stmt':
            index -= 2
            (_, _, (typ, suite_nodes)) = stack[index]
            symbol = grammar.number2symbol[typ]
            suite_nodes.append(pt.Node(symbol, list(nodes)))
            # Remove
            nodes[:] = []
            nodes = suite_nodes
            stack[index]

        #print('err', token.tok_name[typ], repr(value), start_pos, len(stack), index)
        self._stack_removal(grammar, stack, index + 1, value, start_pos)
        if typ == INDENT:
            # For every deleted INDENT we have to delete a DEDENT as well.
            # Otherwise the parser will get into trouble and DEDENT too early.
            self._omit_dedent_list.append(self._indent_counter)

        if value in ('import', 'from', 'class', 'def', 'try', 'while',
                     'return'):
            # Those can always be new statements.
            add_token_callback(typ, value, prefix, start_pos)
        elif typ == DEDENT and symbol == 'suite':
            # Close the current suite, with DEDENT.
            # Note that this may cause some suites to not contain any
            # statements at all. This is contrary to valid Python syntax. We
            # keep incomplete suites in Jedi to be able to complete param names
            # or `with ... as foo` names. If we want to use this parser for
            # syntax checks, we have to check in a separate turn if suites
            # contain statements or not. However, a second check is necessary
            # anyway (compile.c does that for Python), because Python's grammar
            # doesn't stop you from defining `continue` in a module, etc.
            add_token_callback(typ, value, prefix, start_pos)
Exemple #2
0
    def get_decorated_func(self):
        """
        Returns the function, that should to be executed in the end.
        This is also the places where the decorators are processed.
        """
        f = self.base_func
        decorators = self.base_func.get_decorators()

        if not decorators or self.is_decorated:
            return self

        # Only enter it, if has not already been processed.
        if not self.is_decorated:
            for dec in reversed(decorators):
                debug.dbg('decorator: %s %s', dec, f)
                dec_results = self._evaluator.eval_element(dec.children[1])
                trailer = dec.children[2:-1]
                if trailer:
                    # Create a trailer and evaluate it.
                    trailer = tree.Node('trailer', trailer)
                    trailer.parent = dec
                    dec_results = self._evaluator.eval_trailer(
                        dec_results, trailer)

                if not len(dec_results):
                    debug.warning('decorator not found: %s on %s', dec,
                                  self.base_func)
                    return self
                decorator = dec_results.pop()
                if dec_results:
                    debug.warning('multiple decorators found %s %s',
                                  self.base_func, dec_results)

                # Create param array.
                if isinstance(f, Function):
                    old_func = f  # TODO this is just hacky. change. id:133 gh:134
                elif f.type == 'funcdef':
                    old_func = Function(self._evaluator, f, is_decorated=True)
                else:
                    old_func = f

                wrappers = self._evaluator.execute_evaluated(
                    decorator, old_func)
                if not len(wrappers):
                    debug.warning('no wrappers found %s', self.base_func)
                    return self
                if len(wrappers) > 1:
                    # TODO resolve issue with multiple wrappers -> multiple types id:88 gh:89
                    debug.warning('multiple wrappers found %s %s',
                                  self.base_func, wrappers)
                f = list(wrappers)[0]
                if isinstance(f, (Instance, Function)):
                    f.decorates = self

                debug.dbg('decorator end %s', f)
        return f
Exemple #3
0
def get_faked(module, obj, name=None):
    obj = obj.__class__ if is_class_instance(obj) else obj
    result = _faked(module, obj, name)
    # TODO may this ever happen? result None? if so, document!
    if not isinstance(result, pt.Class) and result is not None:
        # Set the docstr which was previously not set (faked modules don't
        # contain it).
        doc = '"""%s"""' % obj.__doc__  # TODO need escapes.
        suite = result.children[-1]
        string = pt.String(pt.zero_position_modifier, doc, (0, 0), '')
        new_line = pt.Whitespace('\n', (0, 0), '')
        docstr_node = pt.Node('simple_stmt', [string, new_line])
        suite.children.insert(2, docstr_node)
        return result
    def error_recovery(self, grammar, stack, arcs, typ, value, start_pos,
                       prefix, add_token_callback):
        """
        This parser is written in a dynamic way, meaning that this parser
        allows using different grammars (even non-Python). However, error
        recovery is purely written for Python.
        """
        def current_suite(stack):
            # For now just discard everything that is not a suite or
            # file_input, if we detect an error.
            for index, (dfa, state,
                        (type_, nodes)) in reversed(list(enumerate(stack))):
                # `suite` can sometimes be only simple_stmt, not stmt.
                symbol = grammar.number2symbol[type_]
                if symbol == 'file_input':
                    break
                elif symbol == 'suite' and len(nodes) > 1:
                    # suites without an indent in them get discarded.
                    break
                elif symbol == 'simple_stmt' and len(nodes) > 1:
                    # simple_stmt can just be turned into a Node, if there are
                    # enough statements. Ignore the rest after that.
                    break
            return index, symbol, nodes

        index, symbol, nodes = current_suite(stack)
        if symbol == 'simple_stmt':
            index -= 2
            (_, _, (type_, suite_nodes)) = stack[index]
            symbol = grammar.number2symbol[type_]
            suite_nodes.append(pt.Node(symbol, list(nodes)))
            # Remove
            nodes[:] = []
            nodes = suite_nodes
            stack[index]

        # print('err', token.tok_name[typ], repr(value), start_pos, len(stack), index)
        if self._stack_removal(grammar, stack, arcs, index + 1, value,
                               start_pos):
            add_token_callback(typ, value, start_pos, prefix)
        else:
            if typ == INDENT:
                # For every deleted INDENT we have to delete a DEDENT as well.
                # Otherwise the parser will get into trouble and DEDENT too early.
                self._omit_dedent_list.append(self._indent_counter)
            else:
                error_leaf = pt.ErrorLeaf(self.position_modifier, typ, value,
                                          start_pos, prefix)
                stack[-1][2][1].append(error_leaf)
Exemple #5
0
def get_faked(module, obj, name=None):
    obj = obj.__class__ if is_class_instance(obj) else obj
    result = _faked(module, obj, name)
    if result is None or isinstance(result, pt.Class):
        # We're not interested in classes. What we want is functions.
        return None
    else:
        # Set the docstr which was previously not set (faked modules don't
        # contain it).
        doc = '"""%s"""' % obj.__doc__  # TODO need escapes.
        suite = result.children[-1]
        string = pt.String(pt.zero_position_modifier, doc, (0, 0), '')
        new_line = pt.Whitespace('\n', (0, 0), '')
        docstr_node = pt.Node('simple_stmt', [string, new_line])
        suite.children.insert(2, docstr_node)
        return result
Exemple #6
0
def call_of_leaf(leaf, cut_own_trailer=False):
    """
    Creates a "call" node that consist of all ``trailer`` and ``power``
    objects.  E.g. if you call it with ``append``::

        list([]).append(3) or None

    You would get a node with the content ``list([]).append`` back.

    This generates a copy of the original ast node.

    If you're using the leaf, e.g. the bracket `)` it will return ``list([])``.

    # TODO remove cut_own_trailer option, since its always used with it. Just id:80 gh:81
    #      ignore it, It's not what we want anyway. Or document it better?
    """
    trailer = leaf.parent
    # The leaf may not be the last or first child, because there exist three
    # different trailers: `( x )`, `[ x ]` and `.x`. In the first two examples
    # we should not match anything more than x.
    if trailer.type != 'trailer' or leaf not in (trailer.children[0], trailer.children[-1]):
        if trailer.type == 'atom':
            return trailer
        return leaf

    power = trailer.parent
    index = power.children.index(trailer)
    power = deep_ast_copy(power)
    if cut_own_trailer:
        cut = index
    else:
        cut = index + 1
    power.children[cut:] = []

    if power.type == 'error_node':
        start = index
        while True:
            start -= 1
            if power.children[start].type != 'trailer':
                break
        transformed = tree.Node('power', power.children[start:])
        transformed.parent = power.parent
        return transformed

    return power
def _get_faked(module, obj, name=None):
    result, fake_module = _faked(module, obj, name)
    if result is None:
        # We're not interested in classes. What we want is functions.
        raise FakeDoesNotExist
    elif result.type == 'classdef':
        return result, fake_module
    else:
        # Set the docstr which was previously not set (faked modules don't
        # contain it).
        assert result.type == 'funcdef'
        doc = '"""%s"""' % obj.__doc__  # TODO need escapes.
        suite = result.children[-1]
        string = pt.String(doc, (0, 0), '')
        new_line = pt.Newline('\n', (0, 0))
        docstr_node = pt.Node('simple_stmt', [string, new_line])
        suite.children.insert(1, docstr_node)
        return result, fake_module
Exemple #8
0
    def convert_node(self, grammar, type, children):
        """
        Convert raw node information to a Node instance.

        This is passed to the parser driver which calls it whenever a reduction of a
        grammar rule produces a new complete node, so that the tree is build
        strictly bottom-up.
        """
        symbol = grammar.number2symbol[type]
        try:
            return Parser.AST_MAPPING[symbol](children)
        except KeyError:
            if symbol == 'suite':
                # We don't want the INDENT/DEDENT in our parser tree. Those
                # leaves are just cancer. They are virtual leaves and not real
                # ones and therefore have pseudo start/end positions and no
                # prefixes. Just ignore them.
                children = [children[0]] + children[2:-1]
            return pt.Node(symbol, children)
Exemple #9
0
def call_of_leaf(leaf):
    """
    Creates a "call" node that consist of all ``trailer`` and ``power``
    objects.  E.g. if you call it with ``append``::

        list([]).append(3) or None

    You would get a node with the content ``list([]).append`` back.

    This generates a copy of the original ast node.

    If you're using the leaf, e.g. the bracket `)` it will return ``list([])``.
    """
    # TODO this is the old version of this call. Try to remove it.
    trailer = leaf.parent
    # The leaf may not be the last or first child, because there exist three
    # different trailers: `( x )`, `[ x ]` and `.x`. In the first two examples
    # we should not match anything more than x.
    if trailer.type != 'trailer' or leaf not in (trailer.children[0], trailer.children[-1]):
        if trailer.type == 'atom':
            return trailer
        return leaf

    power = trailer.parent
    index = power.children.index(trailer)

    new_power = copy.copy(power)
    new_power.children = list(new_power.children)
    new_power.children[index + 1:] = []

    if power.type == 'error_node':
        start = index
        while True:
            start -= 1
            if power.children[start].type != 'trailer':
                break
        transformed = tree.Node('power', power.children[start:])
        transformed.parent = power.parent
        return transformed

    return power
Exemple #10
0
    def convert_node(self, grammar, type, children):
        """
        Convert raw node information to a Node instance.

        This is passed to the parser driver which calls it whenever a reduction of a
        grammar rule produces a new complete node, so that the tree is build
        strictly bottom-up.
        """
        symbol = grammar.number2symbol[type]
        try:
            new_node = self._ast_mapping[symbol](children)
        except KeyError:
            new_node = pt.Node(symbol, children)

        # We need to check raw_node always, because the same node can be
        # returned by convert multiple times.
        if symbol == 'global_stmt':
            self._global_names += new_node.get_global_names()
        elif isinstance(new_node, pt.Lambda):
            new_node.names_dict = self._scope_names_stack.pop()
        elif isinstance(new_node, (pt.ClassOrFunc, pt.Module)) \
                and symbol in ('funcdef', 'classdef', 'file_input'):
            # scope_name_stack handling
            scope_names = self._scope_names_stack.pop()
            if isinstance(new_node, pt.ClassOrFunc):
                n = new_node.name
                scope_names[n.value].remove(n)
                # Set the func name of the current node
                arr = self._scope_names_stack[-1].setdefault(n.value, [])
                arr.append(n)
            new_node.names_dict = scope_names
        elif isinstance(new_node, pt.CompFor):
            # The name definitions of comprehenions shouldn't be part of the
            # current scope. They are part of the comprehension scope.
            for n in new_node.get_defined_names():
                self._scope_names_stack[-1][n.value].remove(n)
        return new_node
Exemple #11
0
def _apply_decorators(evaluator, context, node):
    """
    Returns the function, that should to be executed in the end.
    This is also the places where the decorators are processed.
    """
    if node.type == 'classdef':
        decoratee_context = er.ClassContext(evaluator,
                                            parent_context=context,
                                            classdef=node)
    else:
        decoratee_context = er.FunctionContext(evaluator,
                                               parent_context=context,
                                               funcdef=node)
    initial = values = set([decoratee_context])
    for dec in reversed(node.get_decorators()):
        debug.dbg('decorator: %s %s', dec, values)
        dec_values = context.eval_node(dec.children[1])
        trailer_nodes = dec.children[2:-1]
        if trailer_nodes:
            # Create a trailer and evaluate it.
            trailer = tree.Node('trailer', trailer_nodes)
            trailer.parent = dec
            dec_values = evaluator.eval_trailer(context, dec_values, trailer)

        if not len(dec_values):
            debug.warning('decorator not found: %s on %s', dec, node)
            return initial

        values = unite(
            dec_value.execute(param.ValuesArguments([values]))
            for dec_value in dec_values)
        if not len(values):
            debug.warning('not possible to resolve wrappers found %s', node)
            return initial

        debug.dbg('decorator end %s', values)
    return values