Beispiel #1
0
def tokenize(template):
    t = Tokens(template)
    while not t.at_end():
        if t.current_char == '{':
            path = t.read_until('}') + t.read_char()
            yield Key(path[1:-1].strip())
        else:
            yield t.read_until('{')
Beispiel #2
0
def parse(statement, root_exp=None):
    if root_exp is None:
        root_exp = and_exp

    tokens = list(Tokens(statement))
    exp = root_exp(tokens)
    if tokens:
        raise SyntaxError('Incomplete statement {}'.format(tokens))
    return exp
Beispiel #3
0
def parse(statement, root_exp=None):
    term = set(terminators)

    if root_exp is None:
        root_exp = and_exp

    tokens = [
        token.lower() if token.lower() in term else token
        for token in Tokens(statement)
    ]

    exp = root_exp(tokens)
    if tokens:
        raise SyntaxError('Incomplete statement {}'.format(tokens))
    return exp