示例#1
0
def controller_name(cls) -> str:
    name = cls.__name__
    for suffix in getattr(cls, REMOVE_SUFFIXES_ATTR):
        if name.endswith(suffix):
            name = right_replace(name, suffix, '')
            break
    return snake_case(name)
示例#2
0
def test_right_replace():
    assert right_replace('aabb', 'b', 'c') == 'aabc'
    assert right_replace('aabbcc', 'b', 'd') == 'aabdcc'
    assert right_replace('aabaaa', 'a', 'c', count=2) == 'aabacc'
    assert right_replace('aabaaa', 'a', 'c', count=3) == 'aabccc'
    assert right_replace('aabaaa', 'a', 'c', count=4) == 'acbccc'
    assert right_replace('abc', 'd', 'e') == 'abc'
    assert right_replace('', 'a', 'b') == ''
    assert right_replace(None, 'a', 'b') is None
示例#3
0
def controller_name(cls, _remove_suffixes=None) -> str:
    """
    Returns the snake-cased name for a controller/resource class. Automatically
    strips ``Controller``, ``View``, and ``MethodView`` suffixes, eg::

        SiteController -> site
        FooBarBazView -> foo_bar_baz
        UsersMethodView -> users
    """
    name = cls if isinstance(cls, str) else cls.__name__
    remove_suffixes = _remove_suffixes or getattr(cls, REMOVE_SUFFIXES_ATTR)
    for suffix in remove_suffixes:
        if name.endswith(suffix):
            name = right_replace(name, suffix, '')
            break
    return snake_case(name)
示例#4
0
def _process_tokens(lines: List[str],
                    token: Token,
                    *,
                    is_jinja: bool = False,
                    _depth: int = 0,
                    _real_start_i: int = 0):
    start_str = JINJA_START_STR if is_jinja else OTHER_START_STR
    end_str = JINJA_END_STR if is_jinja else None
    i: int = 0
    resume_from_real_i: int = 0
    for i, line in enumerate(lines):
        if (_real_start_i + i) < resume_from_real_i:
            continue

        stripped = line.strip()
        if not stripped.startswith(start_str):
            token.tokens.append(
                _extract_inline_token(line, _real_start_i + i, is_jinja))
            continue

        stripped = stripped[len(start_str):].strip()
        if end_str:
            stripped = right_replace(stripped, end_str, '').strip()

        if stripped == 'endif' and _depth > 0:
            return token, _real_start_i + i

        if_m = IF_RE.match(stripped)
        elif_m = ELIF_RE.match(stripped)
        else_m = ELSE_RE.match(stripped)

        if not any([if_m, elif_m, else_m]) and stripped != 'endif':
            token.tokens.append(
                InlineToken(_real_start_i + i, [
                    line[:line.find(start_str)] + stripped,
                ]))
            continue

        next_start_i = _real_start_i + i + 1
        if if_m is not None:
            condition = if_m.groupdict()['condition']
            statement = if_m.groupdict()['statement']
            if_token = IfToken(
                _real_start_i + i, condition, line[:line.find(start_str):] +
                statement if statement else None)
            if not statement:
                if_token, resume_from_real_i = _process_tokens(
                    lines[i + 1:],
                    if_token,
                    is_jinja=is_jinja,
                    _depth=_depth + 1,
                    _real_start_i=next_start_i)
            token.tokens.append(if_token)

        elif elif_m is not None:
            condition = elif_m.groupdict()['condition']
            statement = elif_m.groupdict()['statement']
            if_token = IfToken(
                _real_start_i + i, condition, line[:line.find(start_str):] +
                statement if statement else None)
            if not statement:
                if_token, resume_from_real_i = _process_tokens(
                    lines[i + 1:],
                    if_token,
                    is_jinja=is_jinja,
                    _depth=_depth,
                    _real_start_i=next_start_i)
            token.next = if_token

        elif else_m is not None:
            statement = else_m.groupdict()['statement']
            if_token = IfToken(
                _real_start_i + i, True, line[:line.find(start_str):] +
                statement if statement else None)
            if not statement:
                if_token, resume_from_real_i = _process_tokens(
                    lines[i + 1:],
                    if_token,
                    is_jinja=is_jinja,
                    _depth=_depth,
                    _real_start_i=next_start_i)
            token.next = if_token
            continue

    return token, _real_start_i + i