Esempio n. 1
0
    def _advance(self, delta=1):
        '''
        Move forwards in the stream.

        I've tried to optimise for the common (delta=1) case.

        The following conventions are followed:
        - `offset` is the offset from the initial input
        - `stream` is the stream starting at the current location
        - `next_stream` is the stream after current
        - `current` is the character at the current location
        - `previous` is the character just before the current location
        - `excess` is the amount by which we advanced past the end

        If `excess` is set, streams should not be used.
        '''
        assert delta >= 0
        self._offset += delta
        if self._excess:
            self._excess += delta
            self._previous = None
        elif delta == 1:
            self._stream = self._next_stream
            self._previous = self._current
            try:
                (self._current, self._next_stream) = s_next(self._next_stream)
            except StopIteration:
                self._current = None
                self._next_stream = None
                self._excess = 1
        elif delta:
            old_stream = self._stream
            try:
                (advanced, self._stream) = s_next(old_stream, delta)
                self._previous = advanced[-1:]
                try:
                    (self._current, self._next_stream) = s_next(self._stream)
                except StopIteration:
                    self._current = None
                    self._next_stream = None
                    self._excess = 1
            except StopIteration:
                self._stream = None
                self._next_stream = None
                self._current = None
                self._previous = None
                self._excess = delta - s_len(old_stream) + 1
        return True
Esempio n. 2
0
    def _advance(self, delta=1):
        '''
        Move forwards in the stream.

        I've tried to optimise for the common (delta=1) case.

        The following conventions are followed:
        - `offset` is the offset from the initial input
        - `stream` is the stream starting at the current location
        - `next_stream` is the stream after current
        - `current` is the character at the current location
        - `previous` is the character just before the current location
        - `excess` is the amount by which we advanced past the end

        If `excess` is set, streams should not be used.
        '''
        assert delta >= 0
        self._offset += delta
        if self._excess:
            self._excess += delta
            self._previous = None
        elif delta == 1:
            self._stream = self._next_stream
            self._previous = self._current
            try:
                (self._current, self._next_stream) = s_next(self._next_stream)
            except StopIteration:
                self._current = None
                self._next_stream = None
                self._excess = 1
        elif delta:
            old_stream = self._stream
            try:
                (advanced, self._stream) = s_next(old_stream, delta)
                self._previous = advanced[-1:]
                try:
                    (self._current, self._next_stream) = s_next(self._stream)
                except StopIteration:
                    self._current = None
                    self._next_stream = None
                    self._excess = 1
            except StopIteration:
                self._stream = None
                self._next_stream = None
                self._current = None
                self._previous = None
                self._excess = delta - s_len(old_stream) + 1
        return True
Esempio n. 3
0
 def _untagged_match(self, stream):
     '''
     Match the stream without trampolining.
     '''
     key = s_key(stream, self.__state)
     if key not in self.__depth:
         self.__depth[key] = 0
     depth = self.__depth[key]
     if self.curtail(depth, s_len(stream)):
         return
     if (key, depth) not in self.__table:
         self.__table[(key, depth)] = [[], self.matcher._match(stream)]
     descriptor = self.__table[(key, depth)]
     for i in count():
         assert depth == self.__depth[key]
         if i == len(descriptor[0]):
             result = next(descriptor[1].generator)
             descriptor[0].append(result)
         yield descriptor[0][i]
Esempio n. 4
0
 def _untagged_match(self, stream):
     '''
     Match the stream without trampolining.
     '''
     key = s_key(stream, self.__state)
     if key not in self.__depth:
         self.__depth[key] = 0
     depth = self.__depth[key]
     if self.curtail(depth, s_len(stream)):
         return
     if (key, depth) not in self.__table:
         self.__table[(key, depth)] = [[], self.matcher._match(stream)]
     descriptor = self.__table[(key, depth)]
     for i in count():
         assert depth == self.__depth[key]
         if i == len(descriptor[0]):
             result = next(descriptor[1].generator)
             descriptor[0].append(result)
         yield descriptor[0][i]
Esempio n. 5
0
 def _match(self, in_stream):
     '''
     Implement matching - pass token stream to tokens.
     '''
     (max, clean_stream) = s_new_max(in_stream)
     try:
         length = s_len(in_stream)
     except TypeError:
         length = None
     factory = s_factory(in_stream)
     token_stream = factory.to_token(
                         self._tokens(clean_stream, max), 
                         id=s_id(in_stream), factory=factory, 
                         max=s_max(in_stream), 
                         global_kargs=s_global_kargs(in_stream),
                         delta=s_delta(in_stream), len=length,
                         cache_level=s_cache_level(in_stream)+1) 
     in_stream = None
     generator = self.matcher._match(token_stream)
     while True:
         yield (yield generator)
Esempio n. 6
0
 def _match(self, stream):
     '''
     Attempt to match the stream.
     '''
     key = s_key(stream, self.__state)
     if key not in self.__depth:
         self.__depth[key] = 0
     depth = self.__depth[key]
     if self.curtail(depth, s_len(stream)):
         return
     if (key, depth) not in self.__table:
         self.__table[(key, depth)] = [[], self.matcher._match(stream)]
     descriptor = self.__table[(key, depth)]
     for i in count():
         assert depth == self.__depth[key]
         if i == len(descriptor[0]):
             try:
                 self.__depth[key] += 1
                 result = yield descriptor[1]
             finally:
                 self.__depth[key] -= 1
             descriptor[0].append(result)
         yield descriptor[0][i]
Esempio n. 7
0
 def _match(self, stream):
     '''
     Attempt to match the stream.
     '''
     key = s_key(stream, self.__state)
     if key not in self.__depth:
         self.__depth[key] = 0
     depth = self.__depth[key]
     if self.curtail(depth, s_len(stream)):
         return
     if (key, depth) not in self.__table:
         self.__table[(key, depth)] = [[], self.matcher._match(stream)]
     descriptor = self.__table[(key, depth)]
     for i in count():
         assert depth == self.__depth[key]
         if i == len(descriptor[0]):
             try:
                 self.__depth[key] += 1
                 result = yield descriptor[1]
             finally:
                 self.__depth[key] -= 1
             descriptor[0].append(result)
         yield descriptor[0][i]
Esempio n. 8
0
 def _match(self, in_stream):
     '''
     Implement matching - pass token stream to tokens.
     '''
     (max, clean_stream) = s_new_max(in_stream)
     try:
         length = s_len(in_stream)
     except TypeError:
         length = None
     factory = s_factory(in_stream)
     token_stream = factory.to_token(self._tokens(clean_stream, max),
                                     id=s_id(in_stream),
                                     factory=factory,
                                     max=s_max(in_stream),
                                     global_kargs=s_global_kargs(in_stream),
                                     delta=s_delta(in_stream),
                                     len=length,
                                     cache_level=s_cache_level(in_stream) +
                                     1)
     in_stream = None
     generator = self.matcher._match(token_stream)
     while True:
         yield (yield generator)