Exemplo n.º 1
0
def annotated_tokens(code):
    prev_was_dot = False
    it = PushbackAdapter(python_tokenize(code))
    for (token_type, token, origin) in it:
        props = {}
        props["bare_ref"] = (not prev_was_dot and token_type == tokenize.NAME)
        props["bare_funcall"] = (props["bare_ref"]
                                 and it.has_more() and it.peek()[1] == "(")
        yield (token_type, token, origin, props)
        prev_was_dot = (token == ".")
Exemplo n.º 2
0
def _tokenize_formula(code, operator_strings):
    assert "(" not in operator_strings
    assert ")" not in operator_strings
    magic_token_types = {"(": Token.LPAREN, ")": Token.RPAREN}
    for operator_string in operator_strings:
        magic_token_types[operator_string] = operator_string
    # Once we enter a Python expression, a ( does not end it, but any other
    # "magic" token does:
    end_tokens = set(magic_token_types)
    end_tokens.remove("(")

    it = PushbackAdapter(python_tokenize(code))
    for pytype, token_string, origin in it:
        if token_string in magic_token_types:
            yield Token(magic_token_types[token_string], origin)
        else:
            it.push_back((pytype, token_string, origin))
            yield _read_python_expr(it, end_tokens)
Exemplo n.º 3
0
def _tokenize_formula(code, operator_strings):
    assert "(" not in operator_strings
    assert ")" not in operator_strings
    magic_token_types = {"(": Token.LPAREN,
                         ")": Token.RPAREN,
                         }
    for operator_string in operator_strings:
        magic_token_types[operator_string] = operator_string
    # Once we enter a Python expression, a ( does not end it, but any other
    # "magic" token does:
    end_tokens = set(magic_token_types)
    end_tokens.remove("(")
    
    it = PushbackAdapter(python_tokenize(code))
    for pytype, token_string, origin in it:
        if token_string in magic_token_types:
            yield Token(magic_token_types[token_string], origin)
        else:
            it.push_back((pytype, token_string, origin))
            yield _read_python_expr(it, end_tokens)