Exemple #1
0
def test_token_stream_iterator():
    #Low level tests, for more coverage
    stream = TokenStream.from_tuple_iter(TEST_STREAM_TUPLE)
    assert_true(isinstance(iter(stream), TokenStreamIterator))
    # check that TokenStreamIterator.__iter__ works as expected (required for coverage)
    assert_true(isinstance(iter(iter(stream)), TokenStreamIterator))
    iter_ = iter(stream)
    assert_equal(iter_._stream.current.type, 'bold')
    iter_.next()
    assert_equal(iter_._stream.current.type, 'italic')
Exemple #2
0
def test_look():
    stream = TokenStream.from_tuple_iter(TEST_STREAM)
    for iexp, exp in enumerate(TEST_STREAM):
        new = stream.look()
        if new.type != 'eof':
            assert_equal(TEST_STREAM[iexp+1].as_tuple(),
                         new.as_tuple())
        stream.next()
    # this is a bit fancy, but imho the right behaviour
    # XXX: does this belong here and not to `test_feed`?
    stream.push(Token('fooobaaaar'))
    assert_equal(stream.current.type, 'eof')
    assert_equal(stream.look().type, 'fooobaaaar')
    # skip the current 'eof' token and the 'fooobaaaar' token
    stream.skip(2)
    assert_equal(stream.current.type, 'eof')
Exemple #3
0
    def tokenize(self, raw=None, enable_escaping=False):
        """
        Tokenize the raw document, apply stream-filters
        and return the processing-ready token stream.

        :param raw: The raw document.
        :return: A `TokenStream` instance.
        """
        ctx = Context(self, enable_escaping)
        stream = TokenStream.from_tuple_iter(self._process_lexing_rules(raw or self.raw, enable_escaping))

        for callback in events.iter_callbacks("process-stream"):
            ret = callback(stream, ctx)
            if ret is not None:
                stream = ret

        return stream
Exemple #4
0
    def tokenize(self, raw=None, enable_escaping=False):
        """
        Tokenize the raw document, apply stream-filters
        and return the processing-ready token stream.

        :param raw: The raw document.
        :return: A `TokenStream` instance.
        """
        ctx = Context(self, enable_escaping)
        stream = TokenStream.from_tuple_iter(
            self._process_lexing_rules(raw or self.raw, enable_escaping))

        for callback in events.iter_callbacks('process-stream'):
            ret = callback(stream, ctx)
            if ret is not None:
                stream = ret

        return stream