Beispiel #1
0
def pytokens(t):
    """Splits the string starting at t's position into Python tokens, yielding them."""
    try:
        for tok in toks(t.lexer.lexdata[t.lexer.lexpos:]):
            _, s, _, (_, ecol), _ = tok
            yield tok
            for _ in range(s.count('\n')):
                t.lexer.lineno += 1
                t.lexer.lexpos = t.lexer.lexdata.find('\n', t.lexer.lexpos) + 1
    except TokenError, ex:
        raise HamlParserException, (t.lineno, t.lexpos, ex[0])
Beispiel #2
0
def pytokens(t):
    """Splits the string starting at t's position into Python tokens, yielding them."""
    try:
        for tok in toks(t.lexer.lexdata[t.lexer.lexpos:]):
            _, s, _, (_, ecol), _ = tok
            yield tok
            for _ in range(s.count('\n')):
                t.lexer.lineno += 1
                t.lexer.lexpos = t.lexer.lexdata.find('\n', t.lexer.lexpos) + 1
    except TokenError, ex:
        raise HamlParserException, (t.lineno, t.lexpos, ex[0])
Beispiel #3
0
 def read_script():
     tokens = []
     level = 0
     rest = None
     for tok in toks(s):
         _, t, _, (_, col), _ = tok
         if t == '{':
             level += 1
         elif t == '}':
             if level == 0:
                 rest = s[col:]
                 break
             level -= 1
         tokens.append(tok)
     if rest == None:
         #never reached break, so @{ was not closed with }
         self.error("End of line reached when reading inline Python")
     python_code = untokenize(tokens)
     fmt.append("%s")
     args.append("(%s)" % python_code)
     return rest
Beispiel #4
0
 def read_script():
     tokens = []
     level = 0
     rest = None
     for tok in toks(s):
         _, t, _, (_, col), _ = tok
         if t == '{':
             level += 1
         elif t == '}':
             if level == 0:
                 rest = s[col:]
                 break
             level -= 1
         tokens.append(tok)
     if rest == None:
         #never reached break, so @{ was not closed with }
         self.error("End of line reached when reading inline Python")
     python_code = untokenize(tokens)
     fmt.append("%s")
     args.append("(%s)" % python_code)
     return rest