def lexed_simple_stream(tokens, discard, stream): ''' Given a simple stream, create a simple stream of (terminals, match) pairs. ''' log = getLogger('lepl.lexer.stream.lexed_simple_stream') def generator(stream=stream): ''' This creates the sequence of tokens returned by the stream. ''' try: while stream: try: (terminals, match, stream_after) = tokens.match(stream) if stream_after == stream: raise RuntimeLexerError('Tokens matched an empty ' 'string.\nChange your token definitions so that ' 'they cannot be empty.') else: stream = stream_after log.debug(format('Token: {0!r} {1!r} {2!r}', terminals, match, stream)) yield (terminals, match) except TypeError: (terminals, _size, stream) = discard.size_match(stream) log.debug(format('Space: {0!r} {1!r}', terminals, discard)) except TypeError: raise RuntimeLexerError(format('No lexer for \'{0}\'.', stream)) except AttributeError: raise RuntimeLexerError(format('No discard for \'{0}\'.', stream)) return DEFAULT_STREAM_FACTORY.from_items(generator())
def test_single_line(self): s1 = DEFAULT_STREAM_FACTORY.from_string('abc') assert s1[0] == 'a', s1[0] assert s1[0:3] == 'abc', s1[0:3] assert s1[2] == 'c' , s1[2] s2 = s1[1:] assert s2[0] == 'b', s2[0]
def test_eof(self): s1 = DEFAULT_STREAM_FACTORY.from_string('abc\npqs') assert s1[6] == 's', s1[6] try: # pylint: disable-msg=W0104 s1[7] assert False, 'expected error' except IndexError: pass
def test_filter(self): def consonant(s): return s[0] not in 'aeiou' stream1 = DEFAULT_STREAM_FACTORY.from_string('abcdef\nghijklm\n') stream2 = FilteredSource.filtered_stream(consonant, stream1) assert stream2[0:2] == 'bc', stream2[0:2] assert stream2[0:].line_number == 1, stream2[0:].line_number assert stream2[0:].line_offset == 1, stream2[0:].line_offset assert stream2[0:12] == 'bcdf\nghjklm\n' assert stream2[5:].line_number == 2, stream2[5:].line_number assert stream2[5:].line_offset == 0, stream2[5:].line_offset assert len(stream2) == 12
def test_multiple_lines(self): s1 = DEFAULT_STREAM_FACTORY.from_string('abc\npqr\nxyz') assert s1[0:3] == 'abc', repr(s1[0:3]) assert s1[0:4] == 'abc\n', s1[0:4] assert s1[0:5] == 'abc\np', s1[0:5] assert s1[0:11] == 'abc\npqr\nxyz' assert s1[5] == 'q', s1[5] s2 = s1[5:] assert s2[0] == 'q', s2[0] assert repr26(s2) == "'pqr\\n'[1:]", repr26(s2) s3 = s2[3:] assert repr26(s3) == "'xyz'[0:]", repr26(s3)
def test_cached_filter(self): def consonant(x): return x not in 'aeiou' stream1 = DEFAULT_STREAM_FACTORY.from_string('abcdef\nghijklm\n') filter_ = Filter(consonant, stream1) stream2 = filter_.stream assert stream2[0:2] == 'bc', stream2[0:2] assert stream2[0:].line_number == 1, stream2[0:].line_number assert stream2[0:].line_offset == 1, stream2[0:].line_offset assert stream2[0:12] == 'bcdf\nghjklm\n' assert filter_.locate(stream2[0:])[0] == 'a', \ filter_.locate(stream2[0:])[0] assert filter_.locate(stream2[1:])[0] == 'c', \ filter_.locate(stream2[1:])[0] assert stream2[5:].line_number == 2, stream2[5:].line_number assert stream2[5:].line_offset == 0, stream2[5:].line_offset assert len(stream2) == 12, len(stream2)
def get_tester(self): return SimpleStreamTester( [1, "two", [3]], lambda l: DEFAULT_STREAM_FACTORY.from_items(l, sub_list=False))
def test_read(self): s1 = DEFAULT_STREAM_FACTORY.from_string('12\n123\n') assert '12\n' == s1.text
def test_string(self): s1 = DEFAULT_STREAM_FACTORY.from_string('12') assert '1' == s1[0:1] assert '12' == s1[0:2] s2 = s1[1:] assert '2' == s2[0:1]
def get_tester(self): return SimpleStreamTester(['a\n', 'bc\n', 'def\n'], lambda l: DEFAULT_STREAM_FACTORY.from_lines(iter(l)), ''.join)
def get_tester(self): return SimpleStreamTester(list('a\nbc\ndef\n'), lambda l: DEFAULT_STREAM_FACTORY.from_string(''.join(l)))
def get_tester(self): return SimpleStreamTester([1, "two", [3]], lambda l: DEFAULT_STREAM_FACTORY.from_items(l, sub_list=False))