コード例 #1
0
ファイル: constants.py プロジェクト: aroberge/ideas
def transform_source(source, filename=None, **kwargs):
    """Identifies simple assignments with a Final type hint, returning
    the source unchanged.

    The pattern we are looking for is::

        |python_identifier : Final ...

    where ``|`` indicates the beginning of a line.
    """
    if filename not in CONSTANTS:
        CONSTANTS[filename] = {}
    if filename not in DECLARED_FINAL:
        DECLARED_FINAL[filename] = set([])

    for tokens in token_utils.get_lines(source):
        # a line of tokens can start with DEDENT tokens ...
        if token_utils.get_number(tokens) > 3:
            index = token_utils.get_first_index(tokens)
            first_token = tokens[index]
            if (first_token.start_col == 0 and first_token.is_identifier()
                    and tokens[index + 1] == ":"
                    and tokens[index + 2] == "Final"):

                DECLARED_FINAL[filename].add(first_token.string)
    return source
コード例 #2
0
def automatic_self(source):
    """Replaces code like::

        self .= :
            a
            b
            c = this if __ == that else ___

    by::

        self.a = a
        self.b = b
        self.c = this if c == that else c
    """
    new_tokens = []
    auto_self_block = False
    self_name = ""
    indentation = 0

    get_nb = token_utils.get_number
    get_first = token_utils.get_first
    get_first_index = token_utils.get_first_index

    for tokens in token_utils.get_lines(source):
        if auto_self_block:
            variable = get_first(tokens)
            if variable is not None:  # None would mean an empty line
                var_name = variable.string
                block_indent = variable.start_col
                if block_indent > indentation:
                    dedent = block_indent - indentation
                    if get_nb(tokens) == 1:
                        variable.string = f"{self_name}.{var_name} = {var_name}"
                        tokens = token_utils.dedent(tokens, dedent)
                    else:
                        variable.string = f"{self_name}.{var_name}"
                        for token in tokens:
                            if token.string == "__":
                                token.string = var_name
                        tokens = token_utils.dedent(tokens, dedent)
                else:
                    auto_self_block = False
        elif get_nb(tokens) == 4:
            index = get_first_index(tokens)
            if (tokens[index].is_identifier() and tokens[index + 1] == "."
                    and tokens[index + 2] == "=" and tokens[index + 1].end_col
                    == tokens[index + 2].start_col
                    and tokens[index + 3] == ":"):
                self_name = tokens[index].string
                indentation = tokens[index].start_col
                auto_self_block = True
                continue
        new_tokens.extend(tokens)
    return token_utils.untokenize(new_tokens)
コード例 #3
0
ファイル: repeat.py プロジェクト: aroberge/ideas
def convert_repeat(source, predictable_names=False):
    """Replaces instances of::

        repeat forever: -> while True:
        repeat while condition: -> while  condition:
        repeat until condition: -> while not condition:
        repeat n: -> for _uid in range(n):

    A complete repeat statement is restricted to be on a single line ending
    with a colon (optionally followed by a comment). If the colon is
    missing, a ``RepeatSyntaxError`` is raised.
    """

    new_tokens = []
    if predictable_names:
        variable_name = utils.generate_predictable_names()
    else:
        variable_name = utils.generate_variable_names()

    for tokens in token_utils.get_lines(source):
        # a line of tokens can start with INDENT or DEDENT tokens ...
        first_token = token_utils.get_first(tokens)
        if first_token == "repeat":
            last_token = token_utils.get_last(tokens)
            if last_token != ":":
                raise RepeatSyntaxError(
                    "Missing colon for repeat statement on line " +
                    f"{first_token.start_row}\n    {first_token.line}.")

            repeat_index = token_utils.get_first_index(tokens)
            second_token = tokens[repeat_index + 1]
            if second_token == "forever":
                first_token.string = "while"
                second_token.string = "True"
            elif second_token == "while":
                first_token.string = "while"
                second_token.string = ""
            elif second_token == "until":
                first_token.string = "while"
                second_token.string = "not"
            else:
                first_token.string = "for %s in range(" % next(variable_name)
                last_token.string = "):"

        new_tokens.extend(tokens)

    return token_utils.untokenize(new_tokens)
コード例 #4
0
def transform_source(source, **kwargs):
    """Does the following transformation::

        with float_as_Decimal:
            a = 1.0
            b = 2.0
        c = 3.0

    to::

        if True: # with float_as_Decimal:
            a = Decimal('1.0')
            b = Decimal('2.0')
        c = 3.0
    """

    new_tokens = []
    decimal_block = False

    for line in token_utils.get_lines(source):
        first = token_utils.get_first(line)
        if first is None:
            new_tokens.extend(line)
            continue
        elif first == "with" :
            first_index = token_utils.get_first_index(line)
            if len(line) > first_index + 1:
                second = line[first_index + 1]
                if second == "float_as_Decimal":
                    first.string = "if"
                    second.string = "True"
                    indentation = first.start_col
                    decimal_block = True
        elif decimal_block and first.start_col > indentation:
            for token in line:
                if token.is_number() and "." in token.string:
                    token.string = f"Decimal('{token.string}')"
        else:
            indentation = first.start_col

        new_tokens.extend(line)

    return token_utils.untokenize(new_tokens)
コード例 #5
0
ファイル: nobreak.py プロジェクト: aroberge/ideas
def nobreak_as_a_keyword(source):
    """``nobreak`` is replaced by ``else`` only if it is the first
    non-space token on a line and if its indentation matches
    that of a ``for`` or ``while`` block.
    """
    indentations = {}
    lines = token_utils.get_lines(source)
    new_tokens = []
    for line in lines:
        first = token_utils.get_first(line)
        if first is None:
            new_tokens.extend(line)
            continue
        if first == "nobreak":
            if first.start_col in indentations:
                if indentations[first.start_col] in ["for", "while"]:
                    first.string = "else"
        indentations[first.start_col] = first.string
        new_tokens.extend(line)

    return token_utils.untokenize(new_tokens)
コード例 #6
0
def check_lines(source):
    lines = token_utils.get_lines(source)
    tokens = []
    for line in lines:
        tokens.extend(line)
    assert source == token_utils.untokenize(tokens)
コード例 #7
0
    check(source)
    check(source2)

    check_lines(source)
    check_lines(source2)


source1 = "a = b"
source2 = "a = b # comment\n"
source3 = """
if True:
    a = b # comment
"""
tokens1 = token_utils.tokenize(source1)
tokens2 = token_utils.tokenize(source2)
lines3 = token_utils.get_lines(source3)


def test_first():
    assert token_utils.get_first(tokens1) == token_utils.get_first(tokens2)
    assert token_utils.get_first(tokens1) == "a"
    assert token_utils.get_first(tokens2, exclude_comment=False) == "a"
    assert token_utils.get_first_index(tokens1) == 0

    assert token_utils.get_first(lines3[2]) == "a"
    assert token_utils.get_first_index(lines3[2]) == 1


def test_last():
    assert token_utils.get_last(tokens1) == token_utils.get_last(tokens2)
    assert token_utils.get_last(tokens1) == "b"
コード例 #8
0
ファイル: switch.py プロジェクト: aroberge/ideas
def convert_switch(source, predictable_names=False):
    """Replaces code like::

        switch EXPR:
            case EXPR_1:
                SUITE
            case EXPR_2:
                SUITE
            case in EXPR_3, EXPR_4, ...:
                SUITE
            ...
            else:
                SUITE

    by::

        var_name = EXPR
        if var_name == EXPR_1:
                SUITE
        elif var_name == EXPR_2:
                SUITE
        elif var_name in EXPR_3, EXPR_4, ...:
                SUITE
        else:
                SUITE
        del var_name

    Limitation: switch blocks cannot be part of a SUITE of another switch block.
    """
    new_tokens = []
    switch_block = False
    first_case = False
    if predictable_names:
        variable_name = utils.generate_predictable_names()
    else:
        variable_name = utils.generate_variable_names()

    for line in token_utils.get_lines(source):
        first_token = token_utils.get_first(line)
        if first_token is None:
            new_tokens.extend(line)
            continue

        if len(line) > 1:
            _index = token_utils.get_first_index(line)
            second_token = line[_index + 1]
        else:
            second_token = None

        if not switch_block:
            if first_token == "switch":
                switch_indent = first_token.start_col
                var_name = next(variable_name)
                first_token.string = f"{var_name} ="
                switch_block = True
                first_case = True
                colon = token_utils.get_last(line)
                colon.string = ""
        else:
            if first_token.start_col == switch_indent:
                switch_block = False
                new_tokens.extend([" " * switch_indent + f"del {var_name}\n"])

            elif first_token == "case" or first_token == "else":
                if first_case and first_token == "case":
                    if second_token == "in":
                        first_token.string = f"if {var_name}"
                    else:
                        first_token.string = f"if {var_name} =="
                    first_case = False
                elif first_token == "case":
                    if second_token == "in":
                        first_token.string = f"elif {var_name}"
                    else:
                        first_token.string = f"elif {var_name} =="
                dedent = first_token.start_col - switch_indent
                line = token_utils.dedent(line, dedent)

        new_tokens.extend(line)
    return token_utils.untokenize(new_tokens)