예제 #1
0
def lexed_simple_stream(tokens, discard, stream):
    '''
    Given a simple stream, create a simple stream of (terminals, match) pairs.
    '''
    log = getLogger('lepl.lexer.stream.lexed_simple_stream')
    def generator(stream=stream):
        '''
        This creates the sequence of tokens returned by the stream.
        '''
        try:
            while stream:
                try:
                    (terminals, match, stream_after) = tokens.match(stream)
                    if stream_after == stream:
                        raise RuntimeLexerError('Tokens matched an empty '
                            'string.\nChange your token definitions so that '
                            'they cannot be empty.')
                    else:
                        stream = stream_after
                    log.debug(format('Token: {0!r} {1!r} {2!r}', 
                                     terminals, match, stream))
                    yield (terminals, match)
                except TypeError:
                    (terminals, _size, stream) = discard.size_match(stream)
                    log.debug(format('Space: {0!r} {1!r}', terminals, discard))
        except TypeError:
            raise RuntimeLexerError(format('No lexer for \'{0}\'.', stream))
        except AttributeError:
            raise RuntimeLexerError(format('No discard for \'{0}\'.', stream))
    return DEFAULT_STREAM_FACTORY.from_items(generator())
예제 #2
0
def lexed_simple_stream(tokens, discard, stream):
    '''
    Given a simple stream, create a simple stream of (terminals, match) pairs.
    '''
    log = getLogger('lepl.lexer.stream.lexed_simple_stream')

    def generator(stream=stream):
        '''
        This creates the sequence of tokens returned by the stream.
        '''
        try:
            while stream:
                try:
                    (terminals, match, stream_after) = tokens.match(stream)
                    if stream_after == stream:
                        raise RuntimeLexerError(
                            'Tokens matched an empty '
                            'string.\nChange your token definitions so that '
                            'they cannot be empty.')
                    else:
                        stream = stream_after
                    log.debug(
                        format('Token: {0!r} {1!r} {2!r}', terminals, match,
                               stream))
                    yield (terminals, match)
                except TypeError:
                    (terminals, _size, stream) = discard.size_match(stream)
                    log.debug(format('Space: {0!r} {1!r}', terminals, discard))
        except TypeError:
            raise RuntimeLexerError(format('No lexer for \'{0}\'.', stream))
        except AttributeError:
            raise RuntimeLexerError(format('No discard for \'{0}\'.', stream))

    return DEFAULT_STREAM_FACTORY.from_items(generator())
예제 #3
0
 def get_tester(self):
     return SimpleStreamTester(
         [1, "two", [3]],
         lambda l: DEFAULT_STREAM_FACTORY.from_items(l, sub_list=False))
예제 #4
0
 def get_tester(self):
     return SimpleStreamTester([1, "two", [3]], 
         lambda l: DEFAULT_STREAM_FACTORY.from_items(l, sub_list=False))