コード例 #1
0
# epydoc convention

# pylint: disable-msg=C0103
# it's a class
NonToken = ABCMeta('NonToken', (object, ), {})
'''
ABC used to identify matchers that actually consume from the stream.  These
are the "leaf" matchers that "do the real work" and they cannot be used at
the same level as Tokens, but must be embedded inside them.

This is a purely infmtive interface used, for example, to generate warnings 
for the user.  Not implementing this interface will not block any 
functionality.
'''

add_children(NonToken, Lookahead, Any, Literal, Regexp)
# don't register Empty() here because it's useful as a token(!)


# pylint: disable-msg=R0901, R0904, R0913, W0201, W0142, E1101
# lepl standards
class BaseToken(OperatorMatcher, NoMemo):
    '''
    Introduce a token that will be recognised by the lexer.  A Token instance
    can be specialised to match particular contents by calling as a function.
    
    This is a base class that provides all the functionality, but doesn't
    set the regexp attribute.  This allows subclasses to provide a fixed
    value, while `Token` uses the constructor.
    '''
    
コード例 #2
0
ファイル: combine.py プロジェクト: willtang/lyx2ebook
                if count1 >= start and (stop is None or count1 <= stop):
                    yield (acc1, stream1)
                count2 = count1 + 1
                for (value, stream2) in generator:
                    acc2 = acc1 + value
                    if stop is None or count2 <= stop:
                        queue.append((count2, acc2, stream2, 
                                      rest._untagged_match(stream2)))
        finally:
            for (_count, _acc, _stream, generator) in queue:
                generator.close()
            
    return matcher


add_children(BaseSearch, DepthFirst, BreadthFirst, \
             DepthNoTrampoline, BreadthNoTrampoline)

                
class _BaseCombiner(Transformable):
    '''
    Support for `And` and `Or`.
    '''
    
    def __init__(self, *matchers):
        super(_BaseCombiner, self).__init__()
        self._args(matchers=lmap(coerce_, matchers))
        
    def compose(self, wrapper):
        '''
        Generate a new instance with the composed function from the Transform.
        '''
コード例 #3
0
                for (value, stream2) in generator:
                    acc2 = acc1 + value
                    if stop == None or count2 <= stop:
                        queue.append((count2, acc2, stream2,
                                      rest._untagged_match(stream2)))
                while support.generator_manager_queue_len \
                        and len(queue) > support.generator_manager_queue_len:
                    queue.popleft()[3].close()
        finally:
            while queue:
                queue.popleft()[3].close()

    return matcher


add_children(BaseSearch, DepthFirst, BreadthFirst, \
             DepthNoTrampoline, BreadthNoTrampoline)


class _BaseCombiner(Transformable):
    '''
    Support for `And` and `Or`.
    '''
    def __init__(self, *matchers):
        super(_BaseCombiner, self).__init__()
        self._args(matchers=lmap(coerce_, matchers))

    def compose(self, wrapper):
        '''
        Generate a new instance with the composed function from the Transform.
        '''
        copy = type(self)(*self.matchers)