Example #1
0
def _read_python_expr(token_source, c):
    end_tokens = set(c.binary_ops.keys()
                     + c.unary_ops.keys()
                     + [")"])
    token_types = []
    tokens = []
    bracket_level = 0
    while (bracket_level
           or (token_source.peek()[1] not in end_tokens
               and token_source.peek()[0] != tokenize.ENDMARKER)):
        assert bracket_level >= 0
        (token_type, token) = token_source.next()
        _check_token(token_type, token)
        if token in ("(", "[", "{"):
            bracket_level += 1
        if token in (")", "]", "}"):
            bracket_level -= 1
        if bracket_level < 0:
            raise CharltonError("unmatched close bracket", token)
        if token_type == tokenize.ENDMARKER:
            assert bracket_level > 0
            raise CharltonError("unclosed bracket in embedded Python "
                                "expression",
                                _combine_origin_attrs(tokens))
        token_types.append(token_type)
        tokens.append(token)
    text = pretty_untokenize(zip(token_types, tokens))
    return StringWithOrigin(text, _combine_origin_attrs(tokens))
Example #2
0
def _read_python_expr(token_source, c):
    end_tokens = set(c.binary_ops.keys() + c.unary_ops.keys() + [")"])
    token_types = []
    tokens = []
    bracket_level = 0
    while (bracket_level
           or (token_source.peek()[1] not in end_tokens
               and token_source.peek()[0] != tokenize.ENDMARKER)):
        assert bracket_level >= 0
        (token_type, token) = token_source.next()
        _check_token(token_type, token)
        if token in ("(", "[", "{"):
            bracket_level += 1
        if token in (")", "]", "}"):
            bracket_level -= 1
        if bracket_level < 0:
            raise CharltonError("unmatched close bracket", token)
        if token_type == tokenize.ENDMARKER:
            assert bracket_level > 0
            raise CharltonError(
                "unclosed bracket in embedded Python "
                "expression", _combine_origin_attrs(tokens))
        token_types.append(token_type)
        tokens.append(token)
    text = pretty_untokenize(zip(token_types, tokens))
    return StringWithOrigin(text, _combine_origin_attrs(tokens))
Example #3
0
def capture_obj_method_calls(obj_name, code):
    capturers = []
    for (token_type, token, props) in annotated_tokens(code):
        for capturer in capturers:
            capturer.add_token(token_type, token)
        if props["bare_ref"] and token == obj_name:
            capturers.append(_FuncallCapturer(token_type, token))
    return [("".join(capturer.func), pretty_untokenize(capturer.tokens)) for capturer in capturers]
Example #4
0
def capture_obj_method_calls(obj_name, code):
    capturers = []
    for (token_type, token, props) in annotated_tokens(code):
        for capturer in capturers:
            capturer.add_token(token_type, token)
        if props["bare_ref"] and token == obj_name:
            capturers.append(_FuncallCapturer(token_type, token))
    return [("".join(capturer.func), pretty_untokenize(capturer.tokens))
            for capturer in capturers]
Example #5
0
def replace_bare_funcalls(code, replacer):
    tokens = []
    for (token_type, token, props) in annotated_tokens(code):
        if props["bare_ref"]:
            replacement = replacer(token)
            if replacement != token:
                if not props["bare_funcall"]:
                    msg = "magic functions like '%s' can only be called, " "not otherwise referenced" % (token,)
                    raise CharltonError(msg, token.origin)
                token = replacement
        tokens.append((token_type, token))
    return pretty_untokenize(tokens)
Example #6
0
def replace_bare_funcalls(code, replacer):
    tokens = []
    for (token_type, token, props) in annotated_tokens(code):
        if props["bare_ref"]:
            replacement = replacer(token)
            if replacement != token:
                if not props["bare_funcall"]:
                    msg = ("magic functions like '%s' can only be called, "
                           "not otherwise referenced" % (token, ))
                    raise CharltonError(msg, token.origin)
                token = replacement
        tokens.append((token_type, token))
    return pretty_untokenize(tokens)