Exemple #1
0
    def append_comment(self, comment: Leaf) -> bool:
        """Add an inline or standalone comment to the line."""
        if (comment.type == STANDALONE_COMMENT
                and self.bracket_tracker.any_open_brackets()):
            comment.prefix = ""
            return False

        if comment.type != token.COMMENT:
            return False

        if not self.leaves:
            comment.type = STANDALONE_COMMENT
            comment.prefix = ""
            return False

        last_leaf = self.leaves[-1]
        if (last_leaf.type == token.RPAR and not last_leaf.value
                and last_leaf.parent
                and len(list(last_leaf.parent.leaves())) <= 3
                and not is_type_comment(comment)):
            # Comments on an optional parens wrapping a single leaf should belong to
            # the wrapped node except if it's a type comment. Pinning the comment like
            # this avoids unstable formatting caused by comment migration.
            if len(self.leaves) < 2:
                comment.type = STANDALONE_COMMENT
                comment.prefix = ""
                return False

            last_leaf = self.leaves[-2]
        self.comments.setdefault(id(last_leaf), []).append(comment)
        return True
Exemple #2
0
    def mark(self, leaf: Leaf) -> None:
        if leaf.type == token.COMMENT:
            return

        if leaf.type in CLOSING_BRACKETS:
            self.depth -= 1
            opening_bracket = self.bracket_match.pop((self.depth, leaf.type))
            leaf.opening_bracket = opening_bracket  # type: ignore
        leaf.bracket_depth = self.depth  # type: ignore
        if self.depth == 0:
            delim = is_delimiter(leaf)
            if delim:
                self.delimiters[id(leaf)] = delim
            elif self.previous is not None:
                if leaf.type == token.STRING and self.previous.type == token.STRING:
                    self.delimiters[id(self.previous)] = STRING_PRIORITY
                elif (leaf.type == token.NAME and leaf.value == 'for'
                      and leaf.parent and leaf.parent.type
                      in {syms.comp_for, syms.old_comp_for}):
                    self.delimiters[id(self.previous)] = COMPREHENSION_PRIORITY
                elif (leaf.type == token.NAME and leaf.value == 'if'
                      and leaf.parent and leaf.parent.type
                      in {syms.comp_if, syms.old_comp_if}):
                    self.delimiters[id(self.previous)] = COMPREHENSION_PRIORITY
        if leaf.type in OPENING_BRACKETS:
            self.bracket_match[self.depth, BRACKET[leaf.type]] = leaf
            self.depth += 1
        self.previous = leaf
Exemple #3
0
    def visit_default(self, node: LN) -> Iterator[Line]:
        """Default `visit_*()` implementation. Recurses to children of `node`."""
        if isinstance(node, Leaf):
            any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
            append_blank_lines = (
                any_open_brackets and self.mode.keep_blank_lines_in_brackets
            )

            try:
                last_leaf: Optional[Leaf] = self.current_line.leaves[-1]
            except IndexError:
                last_leaf = None

            for comment in generate_comments(node):
                if (
                    append_blank_lines
                    and last_leaf
                    and last_leaf.type not in OPENING_BRACKETS
                ):
                    newlines = comment.prefix.count("\n")
                    if last_leaf and last_leaf.type in COMMENTS:
                        newlines += 1
                    if newlines > 1:
                        self.current_line.append(Leaf(STANDALONE_COMMENT, ""))

                if any_open_brackets:
                    # any comment within brackets is subject to splitting
                    self.current_line.append(comment)
                    last_leaf = comment
                elif comment.type == token.COMMENT:
                    # regular trailing comment
                    self.current_line.append(comment)
                    yield from self.line()

                else:
                    # regular standalone comment
                    yield from self.line()

                    self.current_line.append(comment)
                    yield from self.line()

            if (
                append_blank_lines
                and last_leaf
                and last_leaf.type not in OPENING_BRACKETS
                and node.type not in CLOSING_BRACKETS
                and node.prefix.split("#")[-1].count("\n") > 1
            ):
                self.current_line.append(Leaf(STANDALONE_COMMENT, ""))

            normalize_prefix(node, inside_brackets=any_open_brackets)
            if self.mode.string_normalization and node.type == token.STRING:
                node.value = normalize_string_prefix(node.value)
                node.value = normalize_string_quotes(node.value)
            if node.type == token.NUMBER:
                normalize_numeric_literal(node)
            if node.type not in WHITESPACE:
                self.current_line.append(node)
        yield from super().visit_default(node)
Exemple #4
0
def normalize_prefix(leaf: Leaf) -> None:
    """Leave existing extra newlines for imports.  Remove everything else."""
    if is_import(leaf):
        spl = leaf.prefix.split('#', 1)
        nl_count = spl[0].count('\n')
        leaf.prefix = '\n' * nl_count
        return

    leaf.prefix = ''
Exemple #5
0
def normalize_invisible_parens(node: Node, parens_after: Set[str], *,
                               preview: bool) -> None:
    """Make existing optional parentheses invisible or create new ones.

    `parens_after` is a set of string leaf values immediately after which parens
    should be put.

    Standardizes on visible parentheses for single-element tuples, and keeps
    existing visible parentheses for other tuples and generator expressions.
    """
    for pc in list_comments(node.prefix, is_endmarker=False, preview=preview):
        if pc.value in FMT_OFF:
            # This `node` has a prefix with `# fmt: off`, don't mess with parens.
            return
    check_lpar = False
    for index, child in enumerate(list(node.children)):
        # Fixes a bug where invisible parens are not properly stripped from
        # assignment statements that contain type annotations.
        if isinstance(child, Node) and child.type == syms.annassign:
            normalize_invisible_parens(child,
                                       parens_after=parens_after,
                                       preview=preview)

        # Add parentheses around long tuple unpacking in assignments.
        if (index == 0 and isinstance(child, Node)
                and child.type == syms.testlist_star_expr):
            check_lpar = True

        if check_lpar:
            if child.type == syms.atom:
                if maybe_make_parens_invisible_in_atom(
                        child,
                        parent=node,
                        preview=preview,
                ):
                    wrap_in_parentheses(node, child, visible=False)
            elif is_one_tuple(child):
                wrap_in_parentheses(node, child, visible=True)
            elif node.type == syms.import_from:
                # "import from" nodes store parentheses directly as part of
                # the statement
                if is_lpar_token(child):
                    assert is_rpar_token(node.children[-1])
                    # make parentheses invisible
                    child.value = ""
                    node.children[-1].value = ""
                elif child.type != token.STAR:
                    # insert invisible parentheses
                    node.insert_child(index, Leaf(token.LPAR, ""))
                    node.append_child(Leaf(token.RPAR, ""))
                break

            elif not (isinstance(child, Leaf) and is_multiline_string(child)):
                wrap_in_parentheses(node, child, visible=False)

        check_lpar = isinstance(child, Leaf) and child.value in parens_after
Exemple #6
0
def ensure_visible(leaf: Leaf) -> None:
    """Make sure parentheses are visible.

    They could be invisible as part of some statements (see
    :func:`normalize_invisible_parens` and :func:`visit_import_from`).
    """
    if leaf.type == token.LPAR:
        leaf.value = "("
    elif leaf.type == token.RPAR:
        leaf.value = ")"
Exemple #7
0
def normalize_prefix(leaf: Leaf) -> None:
    """Leave existing extra newlines for imports.  Remove everything else."""
    if is_import(leaf):
        spl = leaf.prefix.split('#', 1)
        nl_count = spl[0].count('\n')
        if len(spl) > 1:
            # Skip one newline since it was for a standalone comment.
            nl_count -= 1
        leaf.prefix = '\n' * nl_count
        return

    leaf.prefix = ''
Exemple #8
0
    def visit_factor(self, node: Node) -> Iterator[Line]:
        """Force parentheses between a unary op and a binary power:

        -2 ** 8 -> -(2 ** 8)
        """
        _operator, operand = node.children
        if (operand.type == syms.power and len(operand.children) == 3
                and operand.children[1].type == token.DOUBLESTAR):
            lpar = Leaf(token.LPAR, "(")
            rpar = Leaf(token.RPAR, ")")
            index = operand.remove() or 0
            node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
        yield from self.visit_default(node)
Exemple #9
0
    def mark(self, leaf: Leaf) -> None:
        """Mark `leaf` with bracket-related metadata. Keep track of delimiters.

        All leaves receive an int `bracket_depth` field that stores how deep
        within brackets a given leaf is. 0 means there are no enclosing brackets
        that started on this line.

        If a leaf is itself a closing bracket, it receives an `opening_bracket`
        field that it forms a pair with. This is a one-directional link to
        avoid reference cycles.

        If a leaf is a delimiter (a token on which Black can split the line if
        needed) and it's on depth 0, its `id()` is stored in the tracker's
        `delimiters` field.
        """
        if leaf.type == token.COMMENT:
            return

        self.maybe_decrement_after_for_loop_variable(leaf)
        self.maybe_decrement_after_lambda_arguments(leaf)
        if leaf.type in CLOSING_BRACKETS:
            self.depth -= 1
            try:
                opening_bracket = self.bracket_match.pop((self.depth, leaf.type))
            except KeyError as e:
                raise BracketMatchError(
                    "Unable to match a closing bracket to the following opening"
                    f" bracket: {leaf}"
                ) from e
            leaf.opening_bracket = opening_bracket
            if not leaf.value:
                self.invisible.append(leaf)
        leaf.bracket_depth = self.depth
        if self.depth == 0:
            delim = is_split_before_delimiter(leaf, self.previous)
            if delim and self.previous is not None:
                self.delimiters[id(self.previous)] = delim
            else:
                delim = is_split_after_delimiter(leaf, self.previous)
                if delim:
                    self.delimiters[id(leaf)] = delim
        if leaf.type in OPENING_BRACKETS:
            self.bracket_match[self.depth, BRACKET[leaf.type]] = leaf
            self.depth += 1
            if not leaf.value:
                self.invisible.append(leaf)
        self.previous = leaf
        self.maybe_increment_lambda_arguments(leaf)
        self.maybe_increment_for_loop_variable(leaf)
Exemple #10
0
def generate_ignored_nodes(leaf: Leaf, comment: ProtoComment) -> Iterator[LN]:
    """Starting from the container of `leaf`, generate all leaves until `# fmt: on`.

    If comment is skip, returns leaf only.
    Stops at the end of the block.
    """
    container: Optional[LN] = container_of(leaf)
    if comment.value in FMT_SKIP:
        prev_sibling = leaf.prev_sibling
        if comment.value in leaf.prefix and prev_sibling is not None:
            leaf.prefix = leaf.prefix.replace(comment.value, "")
            siblings = [prev_sibling]
            while ("\n" not in prev_sibling.prefix
                   and prev_sibling.prev_sibling is not None):
                prev_sibling = prev_sibling.prev_sibling
                siblings.insert(0, prev_sibling)
            for sibling in siblings:
                yield sibling
        elif leaf.parent is not None:
            yield leaf.parent
        return
    while container is not None and container.type != token.ENDMARKER:
        if is_fmt_on(container):
            return

        # fix for fmt: on in children
        if contains_fmt_on_at_column(container, leaf.column):
            for child in container.children:
                if contains_fmt_on_at_column(child, leaf.column):
                    return
                yield child
        else:
            yield container
            container = container.next_sibling
def wrap_in_parentheses(parent: Node, child: LN, *, visible: bool = True) -> None:
    """Wrap `child` in parentheses.

    This replaces `child` with an atom holding the parentheses and the old
    child.  That requires moving the prefix.

    If `visible` is False, the leaves will be valueless (and thus invisible).
    """
    lpar = Leaf(token.LPAR, "(" if visible else "")
    rpar = Leaf(token.RPAR, ")" if visible else "")
    prefix = child.prefix
    child.prefix = ""
    index = child.remove() or 0
    new_child = Node(syms.atom, [lpar, child, rpar])
    new_child.prefix = prefix
    parent.insert_child(index, new_child)
Exemple #12
0
    def maybe_adapt_standalone_comment(self, comment: Leaf) -> bool:
        """Hack a standalone comment to act as a trailing comment for line splitting.

        If this line has brackets and a standalone `comment`, we need to adapt
        it to be able to still reformat the line.

        This is not perfect, the line to which the standalone comment gets
        appended will appear "too long" when splitting.
        """
        if not (comment.type == STANDALONE_COMMENT
                and self.bracket_tracker.any_open_brackets()):
            return False

        comment.type = token.COMMENT
        comment.prefix = '\n' + '    ' * (self.depth + 1)
        return self.append_comment(comment)
def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
    """Leave existing extra newlines if not `inside_brackets`. Remove everything
    else.

    Note: don't use backslashes for formatting or you'll lose your voting rights.
    """
    if not inside_brackets:
        spl = leaf.prefix.split("#")
        if "\\" not in spl[0]:
            nl_count = spl[-1].count("\n")
            if len(spl) > 1:
                nl_count -= 1
            leaf.prefix = "\n" * nl_count
            return

    leaf.prefix = ""
Exemple #14
0
def convert_one_fmt_off_pair(node: Node) -> bool:
    """Convert content of a single `# fmt: off`/`# fmt: on` into a standalone comment.

    Returns True if a pair was converted.
    """
    for leaf in node.leaves():
        previous_consumed = 0
        for comment in list_comments(leaf.prefix, is_endmarker=False):
            if comment.value not in FMT_PASS:
                previous_consumed = comment.consumed
                continue
            # We only want standalone comments. If there's no previous leaf or
            # the previous leaf is indentation, it's a standalone comment in
            # disguise.
            if comment.value in FMT_PASS and comment.type != STANDALONE_COMMENT:
                prev = preceding_leaf(leaf)
                if prev:
                    if comment.value in FMT_OFF and prev.type not in WHITESPACE:
                        continue
                    if comment.value in FMT_SKIP and prev.type in WHITESPACE:
                        continue

            ignored_nodes = list(generate_ignored_nodes(leaf, comment))
            if not ignored_nodes:
                continue

            first = ignored_nodes[
                0]  # Can be a container node with the `leaf`.
            parent = first.parent
            prefix = first.prefix
            first.prefix = prefix[comment.consumed:]
            hidden_value = "".join(str(n) for n in ignored_nodes)
            if comment.value in FMT_OFF:
                hidden_value = comment.value + "\n" + hidden_value
            if comment.value in FMT_SKIP:
                hidden_value += "  " + comment.value
            if hidden_value.endswith("\n"):
                # That happens when one of the `ignored_nodes` ended with a NEWLINE
                # leaf (possibly followed by a DEDENT).
                hidden_value = hidden_value[:-1]
            first_idx: Optional[int] = None
            for ignored in ignored_nodes:
                index = ignored.remove()
                if first_idx is None:
                    first_idx = index
            assert parent is not None, "INTERNAL ERROR: fmt: on/off handling (1)"
            assert first_idx is not None, "INTERNAL ERROR: fmt: on/off handling (2)"
            parent.insert_child(
                first_idx,
                Leaf(
                    STANDALONE_COMMENT,
                    hidden_value,
                    prefix=prefix[:previous_consumed] +
                    "\n" * comment.newlines,
                ),
            )
            return True

    return False
Exemple #15
0
    def append_comment(self, comment: Leaf) -> bool:
        if comment.type != token.COMMENT:
            return False

        try:
            after = id(self.last_non_delimiter())
        except LookupError:
            comment.type = STANDALONE_COMMENT
            comment.prefix = ''
            return False

        else:
            if after in self.comments:
                self.comments[after].value += str(comment)
            else:
                self.comments[after] = comment
            return True
Exemple #16
0
def generate_comments(leaf: Leaf) -> Iterator[Leaf]:
    """Cleans the prefix of the `leaf` and generates comments from it, if any.

    Comments in lib2to3 are shoved into the whitespace prefix.  This happens
    in `pgen2/driver.py:Driver.parse_tokens()`.  This was a brilliant implementation
    move because it does away with modifying the grammar to include all the
    possible places in which comments can be placed.

    The sad consequence for us though is that comments don't "belong" anywhere.
    This is why this function generates simple parentless Leaf objects for
    comments.  We simply don't know what the correct parent should be.

    No matter though, we can live without this.  We really only need to
    differentiate between inline and standalone comments.  The latter don't
    share the line with any code.

    Inline comments are emitted as regular token.COMMENT leaves.  Standalone
    are emitted with a fake STANDALONE_COMMENT token identifier.
    """
    if not leaf.prefix:
        return

    if '#' not in leaf.prefix:
        return

    before_comment, content = leaf.prefix.split('#', 1)
    content = content.rstrip()
    if content and (content[0] not in {' ', '!', '#'}):
        content = ' ' + content
    is_standalone_comment = (
        '\n' in before_comment or '\n' in content or leaf.type == token.ENDMARKER
    )
    if not is_standalone_comment:
        # simple trailing comment
        yield Leaf(token.COMMENT, value='#' + content)
        return

    for line in ('#' + content).split('\n'):
        line = line.lstrip()
        if not line.startswith('#'):
            continue

        yield Leaf(STANDALONE_COMMENT, line)
def bracket_split_build_line(
    leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
) -> Line:
    """Return a new line with given `leaves` and respective comments from `original`.

    If `is_body` is True, the result line is one-indented inside brackets and as such
    has its first leaf's prefix normalized and a trailing comma added when expected.
    """
    result = Line(mode=original.mode, depth=original.depth)
    if is_body:
        result.inside_brackets = True
        result.depth += 1
        if leaves:
            # Since body is a new indent level, remove spurious leading whitespace.
            normalize_prefix(leaves[0], inside_brackets=True)
            # Ensure a trailing comma for imports and standalone function arguments, but
            # be careful not to add one after any comments or within type annotations.
            no_commas = (
                original.is_def
                and opening_bracket.value == "("
                and not any(leaf.type == token.COMMA for leaf in leaves)
                # In particular, don't add one within a parenthesized return annotation.
                # Unfortunately the indicator we're in a return annotation (RARROW) may
                # be defined directly in the parent node, the parent of the parent ...
                # and so on depending on how complex the return annotation is.
                # This isn't perfect and there's some false negatives but they are in
                # contexts were a comma is actually fine.
                and not any(
                    node.prev_sibling.type == RARROW
                    for node in (
                        leaves[0].parent,
                        getattr(leaves[0].parent, "parent", None),
                    )
                    if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
                )
            )

            if original.is_import or no_commas:
                for i in range(len(leaves) - 1, -1, -1):
                    if leaves[i].type == STANDALONE_COMMENT:
                        continue

                    if leaves[i].type != token.COMMA:
                        new_comma = Leaf(token.COMMA, ",")
                        leaves.insert(i + 1, new_comma)
                    break

    # Populate the line
    for leaf in leaves:
        result.append(leaf, preformatted=True)
        for comment_after in original.comments_after(leaf):
            result.append(comment_after, preformatted=True)
    if is_body and should_split_line(result, opening_bracket):
        result.should_split_rhs = True
    return result
Exemple #18
0
def is_stub_body(node: LN) -> bool:
    """Return True if `node` is a simple statement containing an ellipsis."""
    if not isinstance(node, Node) or node.type != syms.simple_stmt:
        return False

    if len(node.children) != 2:
        return False

    child = node.children[0]
    return (child.type == syms.atom and len(child.children) == 3
            and all(leaf == Leaf(token.DOT, ".") for leaf in child.children))
Exemple #19
0
def delimiter_split(line: Line, py36: bool = False) -> Iterator[Line]:
    """Split according to delimiters of the highest priority.

    This kind of split doesn't increase indentation.
    If `py36` is True, the split will add trailing commas also in function
    signatures that contain * and **.
    """
    try:
        last_leaf = line.leaves[-1]
    except IndexError:
        raise CannotSplit("Line empty")

    delimiters = line.bracket_tracker.delimiters
    try:
        delimiter_priority = line.bracket_tracker.max_priority(exclude={id(last_leaf)})
    except ValueError:
        raise CannotSplit("No delimiters found")

    current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
    lowest_depth = sys.maxsize
    trailing_comma_safe = True
    for leaf in line.leaves:
        current_line.append(leaf, preformatted=True)
        comment_after = line.comments.get(id(leaf))
        if comment_after:
            current_line.append(comment_after, preformatted=True)
        lowest_depth = min(lowest_depth, leaf.bracket_depth)
        if (
            leaf.bracket_depth == lowest_depth
            and leaf.type == token.STAR
            or leaf.type == token.DOUBLESTAR
        ):
            trailing_comma_safe = trailing_comma_safe and py36
        leaf_priority = delimiters.get(id(leaf))
        if leaf_priority == delimiter_priority:
            normalize_prefix(current_line.leaves[0])
            yield current_line

            current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
    if current_line:
        if (
            delimiter_priority == COMMA_PRIORITY
            and current_line.leaves[-1].type != token.COMMA
            and trailing_comma_safe
        ):
            current_line.append(Leaf(token.COMMA, ','))
        normalize_prefix(current_line.leaves[0])
        yield current_line
def normalize_numeric_literal(leaf: Leaf) -> None:
    """Normalizes numeric (float, int, and complex) literals.

    All letters used in the representation are normalized to lowercase."""
    text = leaf.value.lower()
    if text.startswith(("0o", "0b")):
        # Leave octal and binary literals alone.
        pass
    elif text.startswith("0x"):
        text = format_hex(text)
    elif "e" in text:
        text = format_scientific_notation(text)
    elif text.endswith("j"):
        text = format_complex_number(text)
    else:
        text = format_float_or_int_string(text)
    leaf.value = text
Exemple #21
0
    def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
        if is_docstring(leaf) and "\\\n" not in leaf.value:
            # We're ignoring docstrings with backslash newline escapes because changing
            # indentation of those changes the AST representation of the code.
            docstring = normalize_string_prefix(leaf.value)
            prefix = get_string_prefix(docstring)
            docstring = docstring[len(prefix):]  # Remove the prefix
            quote_char = docstring[0]
            # A natural way to remove the outer quotes is to do:
            #   docstring = docstring.strip(quote_char)
            # but that breaks on """""x""" (which is '""x').
            # So we actually need to remove the first character and the next two
            # characters but only if they are the same as the first.
            quote_len = 1 if docstring[1] != quote_char else 3
            docstring = docstring[quote_len:-quote_len]
            docstring_started_empty = not docstring

            if is_multiline_string(leaf):
                indent_style = " " * 4 if not self.mode.use_tabs else "\t"
                indent = indent_style * self.current_line.depth
                docstring = fix_docstring(docstring, indent,
                                          not self.mode.use_tabs)
            else:
                docstring = docstring.strip()

            if docstring:
                # Add some padding if the docstring starts / ends with a quote mark.
                if docstring[0] == quote_char:
                    docstring = " " + docstring
                if docstring[-1] == quote_char:
                    docstring += " "
                if docstring[-1] == "\\":
                    backslash_count = len(docstring) - len(
                        docstring.rstrip("\\"))
                    if backslash_count % 2:
                        # Odd number of tailing backslashes, add some padding to
                        # avoid escaping the closing string quote.
                        docstring += " "
            elif not docstring_started_empty:
                docstring = " "

            # We could enforce triple quotes at this point.
            quote = quote_char * quote_len
            leaf.value = prefix + quote + docstring + quote

        yield from self.visit_default(leaf)
Exemple #22
0
def generate_ignored_nodes(leaf: Leaf, comment: ProtoComment, *,
                           preview: bool) -> Iterator[LN]:
    """Starting from the container of `leaf`, generate all leaves until `# fmt: on`.

    If comment is skip, returns leaf only.
    Stops at the end of the block.
    """
    container: Optional[LN] = container_of(leaf)
    if comment.value in FMT_SKIP:
        prev_sibling = leaf.prev_sibling
        # Need to properly format the leaf prefix to compare it to comment.value,
        # which is also formatted
        comments = list_comments(leaf.prefix,
                                 is_endmarker=False,
                                 preview=preview)
        if comments and comment.value == comments[
                0].value and prev_sibling is not None:
            leaf.prefix = ""
            siblings = [prev_sibling]
            while ("\n" not in prev_sibling.prefix
                   and prev_sibling.prev_sibling is not None):
                prev_sibling = prev_sibling.prev_sibling
                siblings.insert(0, prev_sibling)
            for sibling in siblings:
                yield sibling
        elif leaf.parent is not None:
            yield leaf.parent
        return
    while container is not None and container.type != token.ENDMARKER:
        if is_fmt_on(container, preview=preview):
            return

        # fix for fmt: on in children
        if contains_fmt_on_at_column(container, leaf.column, preview=preview):
            for child in container.children:
                if contains_fmt_on_at_column(child,
                                             leaf.column,
                                             preview=preview):
                    return
                yield child
        else:
            yield container
            container = container.next_sibling
Exemple #23
0
def generate_comments(leaf: Leaf) -> Iterator[Leaf]:
    """Cleans the prefix of the `leaf` and generates comments from it, if any.

    Comments in lib2to3 are shoved into the whitespace prefix.  This happens
    in `pgen2/driver.py:Driver.parse_tokens()`.  This was a brilliant implementation
    move because it does away with modifying the grammar to include all the
    possible places in which comments can be placed.

    The sad consequence for us though is that comments don't "belong" anywhere.
    This is why this function generates simple parentless Leaf objects for
    comments.  We simply don't know what the correct parent should be.

    No matter though, we can live without this.  We really only need to
    differentiate between inline and standalone comments.  The latter don't
    share the line with any code.

    Inline comments are emitted as regular token.COMMENT leaves.  Standalone
    are emitted with a fake STANDALONE_COMMENT token identifier.
    """
    p = leaf.prefix
    if not p:
        return

    if '#' not in p:
        return

    nlines = 0
    for index, line in enumerate(p.split('\n')):
        line = line.lstrip()
        if not line:
            nlines += 1
        if not line.startswith('#'):
            continue

        if index == 0 and leaf.type != token.ENDMARKER:
            comment_type = token.COMMENT  # simple trailing comment
        else:
            comment_type = STANDALONE_COMMENT
        yield Leaf(comment_type, make_comment(line), prefix='\n' * nlines)

        nlines = 0
Exemple #24
0
def bracket_split_build_line(leaves: List[Leaf],
                             original: Line,
                             opening_bracket: Leaf,
                             *,
                             is_body: bool = False) -> Line:
    """Return a new line with given `leaves` and respective comments from `original`.

    If `is_body` is True, the result line is one-indented inside brackets and as such
    has its first leaf's prefix normalized and a trailing comma added when expected.
    """
    result = Line(mode=original.mode, depth=original.depth)
    if is_body:
        result.inside_brackets = True
        result.depth += 1
        if leaves:
            # Since body is a new indent level, remove spurious leading whitespace.
            normalize_prefix(leaves[0], inside_brackets=True)
            # Ensure a trailing comma for imports and standalone function arguments, but
            # be careful not to add one after any comments or within type annotations.
            no_commas = (original.is_def and opening_bracket.value == "("
                         and not any(leaf.type == token.COMMA
                                     for leaf in leaves))

            if original.is_import or no_commas:
                for i in range(len(leaves) - 1, -1, -1):
                    if leaves[i].type == STANDALONE_COMMENT:
                        continue

                    if leaves[i].type != token.COMMA and not original.is_import:
                        new_comma = Leaf(token.COMMA, ",")
                        leaves.insert(i + 1, new_comma)
                    break

    # Populate the line
    for leaf in leaves:
        result.append(leaf, preformatted=True)
        for comment_after in original.comments_after(leaf):
            result.append(comment_after, preformatted=True)
    if is_body and should_split_line(result, opening_bracket):
        result.should_split_rhs = True
    return result
def append_leaves(
    new_line: Line, old_line: Line, leaves: List[Leaf], preformatted: bool = False
) -> None:
    """
    Append leaves (taken from @old_line) to @new_line, making sure to fix the
    underlying Node structure where appropriate.

    All of the leaves in @leaves are duplicated. The duplicates are then
    appended to @new_line and used to replace their originals in the underlying
    Node structure. Any comments attached to the old leaves are reattached to
    the new leaves.

    Pre-conditions:
        set(@leaves) is a subset of set(@old_line.leaves).
    """
    for old_leaf in leaves:
        new_leaf = Leaf(old_leaf.type, old_leaf.value)
        replace_child(old_leaf, new_leaf)
        new_line.append(new_leaf, preformatted=preformatted)

        for comment_leaf in old_line.comments_after(old_leaf):
            new_line.append(comment_leaf, preformatted=True)
Exemple #26
0
def generate_comments(leaf: LN) -> Iterator[Leaf]:
    """Clean the prefix of the `leaf` and generate comments from it, if any.

    Comments in lib2to3 are shoved into the whitespace prefix.  This happens
    in `pgen2/driver.py:Driver.parse_tokens()`.  This was a brilliant implementation
    move because it does away with modifying the grammar to include all the
    possible places in which comments can be placed.

    The sad consequence for us though is that comments don't "belong" anywhere.
    This is why this function generates simple parentless Leaf objects for
    comments.  We simply don't know what the correct parent should be.

    No matter though, we can live without this.  We really only need to
    differentiate between inline and standalone comments.  The latter don't
    share the line with any code.

    Inline comments are emitted as regular token.COMMENT leaves.  Standalone
    are emitted with a fake STANDALONE_COMMENT token identifier.
    """
    for pc in list_comments(leaf.prefix,
                            is_endmarker=leaf.type == token.ENDMARKER):
        yield Leaf(pc.type, pc.value, prefix="\n" * pc.newlines)
Exemple #27
0
def delimiter_split(line: Line) -> Iterator[Line]:
    """Split according to delimiters of the highest priority.

    This kind of split doesn't increase indentation.
    """
    try:
        last_leaf = line.leaves[-1]
    except IndexError:
        raise CannotSplit("Line empty")

    delimiters = line.bracket_tracker.delimiters
    try:
        delimiter_priority = line.bracket_tracker.max_priority(
            exclude={id(last_leaf)})
    except ValueError:
        raise CannotSplit("No delimiters found")

    current_line = Line(depth=line.depth, inside_brackets=line.inside_brackets)
    for leaf in line.leaves:
        current_line.append(leaf, preformatted=True)
        comment_after = line.comments.get(id(leaf))
        if comment_after:
            current_line.append(comment_after, preformatted=True)
        leaf_priority = delimiters.get(id(leaf))
        if leaf_priority == delimiter_priority:
            normalize_prefix(current_line.leaves[0])
            yield current_line

            current_line = Line(depth=line.depth,
                                inside_brackets=line.inside_brackets)
    if current_line:
        if (delimiter_priority == COMMA_PRIORITY
                and current_line.leaves[-1].type != token.COMMA):
            current_line.append(Leaf(token.COMMA, ','))
        normalize_prefix(current_line.leaves[0])
        yield current_line
def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
    """Split according to delimiters of the highest priority.

    If the appropriate Features are given, the split will add trailing commas
    also in function signatures and calls that contain `*` and `**`.
    """
    try:
        last_leaf = line.leaves[-1]
    except IndexError:
        raise CannotSplit("Line empty") from None

    bt = line.bracket_tracker
    try:
        delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
    except ValueError:
        raise CannotSplit("No delimiters found") from None

    if delimiter_priority == DOT_PRIORITY:
        if bt.delimiter_count_with_priority(delimiter_priority) == 1:
            raise CannotSplit("Splitting a single attribute from its owner looks wrong")

    current_line = Line(
        mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
    )
    lowest_depth = sys.maxsize
    trailing_comma_safe = True

    def append_to_line(leaf: Leaf) -> Iterator[Line]:
        """Append `leaf` to current line or to new line if appending impossible."""
        nonlocal current_line
        try:
            current_line.append_safe(leaf, preformatted=True)
        except ValueError:
            yield current_line

            current_line = Line(
                mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
            )
            current_line.append(leaf)

    for leaf in line.leaves:
        yield from append_to_line(leaf)

        for comment_after in line.comments_after(leaf):
            yield from append_to_line(comment_after)

        lowest_depth = min(lowest_depth, leaf.bracket_depth)
        if leaf.bracket_depth == lowest_depth:
            if is_vararg(leaf, within={syms.typedargslist}):
                trailing_comma_safe = (
                    trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
                )
            elif is_vararg(leaf, within={syms.arglist, syms.argument}):
                trailing_comma_safe = (
                    trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
                )

        leaf_priority = bt.delimiters.get(id(leaf))
        if leaf_priority == delimiter_priority:
            yield current_line

            current_line = Line(
                mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
            )
    if current_line:
        if (
            trailing_comma_safe
            and delimiter_priority == COMMA_PRIORITY
            and current_line.leaves[-1].type != token.COMMA
            and current_line.leaves[-1].type != STANDALONE_COMMENT
        ):
            new_comma = Leaf(token.COMMA, ",")
            current_line.append(new_comma)
        yield current_line
 def is_stub_class(self) -> bool:
     """Is this line a class definition with a body consisting only of "..."?"""
     return self.is_class and self.leaves[-3:] == [
         Leaf(token.DOT, ".") for _ in range(3)
     ]