Example #1
0
File: lexer.py Project: kroo/pyhaml
def read_script(t):
    """Starting at a script token (- or =), reads a Python script and sets
t.value to a string representation of it."""
    src = []
    for tok in pytokens(t):
        type, s, _, (_, ecol), _ = tok
        if s == '':
            t.lexer.lexpos = len(t.lexer.lexdata)
            src = untokenize(src).strip()
            return src
        src.append(tok)
        if type == token.NEWLINE:
            t.lexer.lexpos += ecol - 1
            src = untokenize(src).strip()
            return src
Example #2
0
def read_script(t):
    """Starting at a script token (- or =), reads a Python script and sets
t.value to a string representation of it."""
    src = []
    for tok in pytokens(t):
        type, s, _, (_, ecol), _ = tok
        if s == '':
            t.lexer.lexpos = len(t.lexer.lexdata)
            src = untokenize(src).strip()
            return src
        src.append(tok)
        if type == token.NEWLINE:
            t.lexer.lexpos += ecol - 1
            src = untokenize(src).strip()
            return src
Example #3
0
File: lexer.py Project: kroo/pyhaml
def read_dict(t):
    """Starting from a { token, reads a Python dictionary expression and sets 
t.value to a string representation of it."""
    t.value = []
    lvl = 0
    for tok in pytokens(t):
        _, s, _, (_, ecol), _ = tok
        t.value.append(tok)
        if s == '{':
            lvl += 1
        elif s == '}':
            lvl -= 1
            if lvl == 0:
                t.lexer.lexpos += ecol
                t.value = untokenize(t.value)
                return t
Example #4
0
def read_dict(t):
    """Starting from a { token, reads a Python dictionary expression and sets 
t.value to a string representation of it."""
    t.value = []
    lvl = 0
    for tok in pytokens(t):
        _, s, _, (_, ecol), _ = tok
        t.value.append(tok)
        if s == '{':
            lvl += 1
        elif s == '}':
            lvl -= 1
            if lvl == 0:
                t.lexer.lexpos += ecol
                t.value = untokenize(t.value)
                return t
Example #5
0
 def read_script():
     tokens = []
     level = 0
     rest = None
     for tok in toks(s):
         _, t, _, (_, col), _ = tok
         if t == '{':
             level += 1
         elif t == '}':
             if level == 0:
                 rest = s[col:]
                 break
             level -= 1
         tokens.append(tok)
     if rest == None:
         #never reached break, so @{ was not closed with }
         self.error("End of line reached when reading inline Python")
     python_code = untokenize(tokens)
     fmt.append("%s")
     args.append("(%s)" % python_code)
     return rest
Example #6
0
 def read_script():
     tokens = []
     level = 0
     rest = None
     for tok in toks(s):
         _, t, _, (_, col), _ = tok
         if t == '{':
             level += 1
         elif t == '}':
             if level == 0:
                 rest = s[col:]
                 break
             level -= 1
         tokens.append(tok)
     if rest == None:
         #never reached break, so @{ was not closed with }
         self.error("End of line reached when reading inline Python")
     python_code = untokenize(tokens)
     fmt.append("%s")
     args.append("(%s)" % python_code)
     return rest