Пример #1
0
def tokenize(template):
  t = Tokens(template)
  while not t.at_end():
    if t.current_char == '{':
      path = t.read_until('}') + t.read_char()
      yield Key(path[1:-1].strip())
    else:
      yield t.read_until('{')
Пример #2
0
def parse(statement, root_exp=None):
    if root_exp is None:
        root_exp = and_exp

    tokens = list(Tokens(statement))
    exp = root_exp(tokens)
    if tokens:
        raise SyntaxError('Incomplete statement {}'.format(tokens))
    return exp
Пример #3
0
def tokenize(template):
    t = Tokens(template)
    while not t.at_end():
        if t.current_char == '{':
            path = t.read_until('}') + t.read_char()
            yield Key(path[1:-1].strip())
        else:
            yield t.read_until('{')
Пример #4
0
def tokenize_pattern(pattern):
  t = Tokens(pattern)
  while not t.at_end():
    if t.current_char == '/':
      yield t.read_char()
    elif t.current_char == '{':
      yield t.read_until('}') + t.read_char()
    else:
      yield t.read_until('/{')
Пример #5
0
def parse(statement, root_exp=None):
    term = set(terminators)

    if root_exp is None:
        root_exp = and_exp

    tokens = [
        token.lower() if token.lower() in term else token
        for token in Tokens(statement)
    ]

    exp = root_exp(tokens)
    if tokens:
        raise SyntaxError('Incomplete statement {}'.format(tokens))
    return exp