Пример #1
0
def test_next():
    stream = TokenStream(iter(TEST_STREAM))
    for exp in TEST_STREAM:
        eq_(exp.as_tuple(), stream.current.as_tuple())
        stream.next()
    assert_equal(stream.current.type, 'eof')
    # test `TokenStream.eof` as well
    assert_true(stream.eof)
Пример #2
0
def test_feed():
    stream = TokenStream()
    for name in ('bold', 'italic', 'uff', 'papapapa', 'foo',
                 'python', 'spaghetti', 'car', 'mom'):
        stream.push(Token(name))
    for idx, received in enumerate(stream):
        exp = TEST_STREAM[idx]
        assert_equal(exp.type, received.type)
    stream.push(Token('fam', 'foo'), True)
    assert_equal(stream.current.type, 'fam')
    assert_true(stream.test('fam', 'foo'))
    assert_equal(Token('fam', 'foo'), stream.expect('fam', 'foo'))
Пример #3
0
def test_token_stream_iterator():
    #Low level tests, for more coverage
    stream = TokenStream.from_tuple_iter(TEST_STREAM_TUPLE)
    assert_true(isinstance(iter(stream), TokenStreamIterator))
    # check that TokenStreamIterator.__iter__ works as expected (required for coverage)
    assert_true(isinstance(iter(iter(stream)), TokenStreamIterator))
    iter_ = iter(stream)
    assert_equal(iter_._stream.current.type, 'bold')
    iter_.next()
    assert_equal(iter_._stream.current.type, 'italic')
Пример #4
0
def test_look():
    stream = TokenStream.from_tuple_iter(TEST_STREAM)
    for iexp, exp in enumerate(TEST_STREAM):
        new = stream.look()
        if new.type != 'eof':
            assert_equal(TEST_STREAM[iexp+1].as_tuple(),
                         new.as_tuple())
        stream.next()
    # this is a bit fancy, but imho the right behaviour
    # XXX: does this belong here and not to `test_feed`?
    stream.push(Token('fooobaaaar'))
    assert_equal(stream.current.type, 'eof')
    assert_equal(stream.look().type, 'fooobaaaar')
    # skip the current 'eof' token and the 'fooobaaaar' token
    stream.skip(2)
    assert_equal(stream.current.type, 'eof')
Пример #5
0
    def tokenize(self, raw=None, enable_escaping=False):
        """
        Tokenize the raw document, apply stream-filters
        and return the processing-ready token stream.

        :param raw: The raw document.
        :return: A `TokenStream` instance.
        """
        ctx = Context(self, enable_escaping)
        stream = TokenStream.from_tuple_iter(self._process_lexing_rules(raw or self.raw, enable_escaping))

        for callback in events.iter_callbacks("process-stream"):
            ret = callback(stream, ctx)
            if ret is not None:
                stream = ret

        return stream
Пример #6
0
    def tokenize(self, raw=None, enable_escaping=False):
        """
        Tokenize the raw document, apply stream-filters
        and return the processing-ready token stream.

        :param raw: The raw document.
        :return: A `TokenStream` instance.
        """
        ctx = Context(self, enable_escaping)
        stream = TokenStream.from_tuple_iter(
            self._process_lexing_rules(raw or self.raw, enable_escaping))

        for callback in events.iter_callbacks('process-stream'):
            ret = callback(stream, ctx)
            if ret is not None:
                stream = ret

        return stream