Beispiel #1
0
class TreeSerializer(IterObject):
    def __init__(self, tree):
        self.max_look_behind = 10
        super(TreeSerializer, self).__init__(tree)

    def resetIter(self):
        self.outData = LimLQueue(
            self.max_look_behind)  # limited record of yielded tokens
        self.tokenStream = LQueue(self.inData.toListG())
        self.tokenStreamIterable = iter(self.tokenStream)
        super(TreeSerializer, self).resetIter()

    ##
    # Peek n tokens behind
    def lookbehind(self, n=1):
        if n > self.max_look_behind:
            raise SyntaxException(
                "TokenStream: can only look %d elements behind" %
                self.max_look_behind)
        return self.outData[n]

    ##
    # Peek n tokens ahead
    #
    # peek needs to circumvent __iter__ and access the LQueue directly
    def peek(self, n=1):
        toks = []
        cnt = 0
        # get the desired token
        while cnt < n:
            t = self.tokenStreamIterable.next()
            toks.append(t)
            if t['type'] == "eof":
                break
            cnt += 1
        # put all retrieved tokens back
        for t in toks[::-1]:
            self.tokenStream.putBack(t)
        return toks[-1]

    def __iter__(self):
        for tok in self.tokenStreamIterable:
            self.outData.appendleft(tok)
            yield tok
Beispiel #2
0
class TreeSerializer(IterObject):

    def __init__(self, tree):
        self.max_look_behind = 10
        super(TreeSerializer, self).__init__(tree)

    def resetIter(self):
        self.outData = LimLQueue(self.max_look_behind)  # limited record of yielded tokens
        self.tokenStream = LQueue(self.inData.toListG())
        self.tokenStreamIterable = iter(self.tokenStream)
        super(TreeSerializer, self).resetIter()

    ##
    # Peek n tokens behind
    def lookbehind(self, n=1):
        if n>self.max_look_behind:
            raise SyntaxException("TokenStream: can only look %d elements behind" % self.max_look_behind)
        return self.outData[n]

    ##
    # Peek n tokens ahead
    # 
    # peek needs to circumvent __iter__ and access the LQueue directly
    def peek(self, n=1):
        toks = []
        cnt  = 0
        # get the desired token
        while cnt < n:
            t = self.tokenStreamIterable.next()
            toks.append(t)
            if t['type'] == "eof":
                break
            cnt += 1
        # put all retrieved tokens back
        for t in toks[::-1]:
            self.tokenStream.putBack(t)
        return toks[-1]


    def __iter__(self):
        for tok in self.tokenStreamIterable:
            self.outData.appendleft(tok)
            yield tok
Beispiel #3
0
 def resetIter(self):
     self.outData = LimLQueue(self.max_look_behind)  # limited record of yielded tokens
     self.tokenStream = LQueue(self.inData.toListG())
     self.tokenStreamIterable = iter(self.tokenStream)
     super(TreeSerializer, self).resetIter()
Beispiel #4
0
 def resetIter(self):
     self.outData = LimLQueue(
         self.max_look_behind)  # limited record of yielded tokens
     self.tokenStream = LQueue(self.inData.toListG())
     self.tokenStreamIterable = iter(self.tokenStream)
     super(TreeSerializer, self).resetIter()