Esempio n. 1
0
 def __new_stream(contents, stream):
     '''
     Create a new stream to pass to the content matcher.
     '''
     if isinstance(stream.source, TokenSource):
         return DEFAULT_STREAM_FACTORY(ContentSource(contents, stream))
     else:
         # this branch when the original stream is not a location stream 
         return contents
Esempio n. 2
0
def lexed_location_stream(tokens, discard, stream, source=None):
    '''
    Given a location stream, create a location stream of regexp matches.
    '''
    log = getLogger('lepl.lexer.stream.lexed_location_stream')
    if source is None:
        source = TokenSource

    def generator(stream_before):
        '''
        This creates the sequence of tokens returned by the stream.
        '''
        try:
            while stream_before:
                try:
                    (terminals, size, stream_after) = \
                            tokens.size_match(stream_before)
                    if stream_after == stream_before:
                        raise RuntimeLexerError(
                            'Tokens matched an empty '
                            'string.\nChange your token definitions so that '
                            'they cannot be empty.')
                    log.debug(
                        format('Token: {0!r} {1!r} {2!r}', terminals, size,
                               stream_before))
                    # stream_before here to give correct location
                    yield (terminals, size, stream_before)
                    stream_before = stream_after
                except TypeError:
                    (terminals, size, stream_before) = \
                            discard.size_match(stream_before)
                    log.debug(format('Space: {0!r} {1!r}', terminals, size))
        except TypeError:
            raise RuntimeLexerError(
                format(
                    'No lexer for \'{0}\' at line {1} character {2} of {3}.',
                    stream_before.text, stream_before.line_number,
                    stream_before.line_offset, stream_before.source))

    token_stream = generator(stream)
    return DEFAULT_STREAM_FACTORY(source(token_stream, stream))