Ejemplo n.º 1
0
def test_tokenstream_picklable():
    import pickle
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_equals(s.current, Token('a', 1))
    s.next()
    assert_equals(s.current, Token('b', 2))
    dumped = pickle.dumps(s)
    loaded = pickle.loads(dumped)
    assert_equals(loaded.current, Token('b', 2))
    loaded.next()
    assert_equals(loaded.current, Token('c', 3))
Ejemplo n.º 2
0
def test_tokenstream_picklable():
    import pickle
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_equals(s.current, Token('a', 1))
    s.next()
    assert_equals(s.current, Token('b', 2))
    dumped = pickle.dumps(s)
    loaded = pickle.loads(dumped)
    assert_equals(loaded.current, Token('b', 2))
    loaded.next()
    assert_equals(loaded.current, Token('c', 3))
Ejemplo n.º 3
0
def test_tokenstream():
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_equals(s.current, Token('a', 1))

    # from_tuple_iter
    s = TokenStream.from_tuple_iter(iter((('a', 1), ('b', 2), ('c', 3))))
    assert_equals(s.current, Token('a', 1))

    # iter
    assert_true(isinstance(iter(s), TokenStreamIterator))
    assert_equals(tuple(iter(s)),
                  (Token('a', 1), Token('b', 2), Token('c', 3)))

    # eof
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_false(s.eof)
    list(s)
    assert_true(s.eof)

    # look, push
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_equals(s.current, Token('a', 1))
    assert_equals(s.look(), Token('b', 2))
    s.next()
    assert_equals(s.look(), Token('c', 3))
    s.push(Token('b', 2))
    assert_equals(s.look(), Token('b', 2))
    s.push(Token('e', 4), current=True)
    assert_equals(s.current, Token('e', 4))
    assert_equals(s.look(), Token('b', 2))

    # skip, next
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    s.skip(1)
    assert_equals(s.current, Token('b', 2))
    s.next()
    assert_equals(s.current, Token('c', 3))
    s.push(Token('e', 4))
    assert_equals(s.current, Token('c', 3))
    s.next()
    assert_equals(s.current, Token('e', 4))
    s.next()
    assert_equals(s.current, Token('eof', None))

    # expect
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_equals(s.expect('a'), Token('a', 1))
    assert_equals(s.expect('b', 2), Token('b', 2))
    assert_raises(AssertionError, s.expect, 'e')
    assert_raises(AssertionError, s.expect, 'c', 5)

    # test
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_true(s.test('a'))
    s.next()
    assert_true(s.test('b', 2))

    # shift
    assert_equals(s.current, Token('b', 2))
    s.shift(Token('f', 5))
    assert_equals(s.current, Token('f', 5))
    s.next()
    assert_equals(s.current, Token('b', 2))
Ejemplo n.º 4
0
def test_tokenstream():
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_equals(s.current, Token('a', 1))

    # from_tuple_iter
    s = TokenStream.from_tuple_iter(iter((('a', 1), ('b', 2), ('c', 3))))
    assert_equals(s.current, Token('a', 1))

    # iter
    assert_true(isinstance(iter(s), TokenStreamIterator))
    assert_equals(tuple(iter(s)), (Token('a', 1), Token('b', 2), Token('c', 3)))

    # eof
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_false(s.eof)
    list(s)
    assert_true(s.eof)

    # look, push
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_equals(s.current, Token('a', 1))
    assert_equals(s.look(), Token('b', 2))
    s.next()
    assert_equals(s.look(), Token('c', 3))
    s.push(Token('b', 2))
    assert_equals(s.look(), Token('b', 2))
    s.push(Token('e', 4), current=True)
    assert_equals(s.current, Token('e', 4))
    assert_equals(s.look(), Token('b', 2))

    # skip, next
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    s.skip(1)
    assert_equals(s.current, Token('b', 2))
    s.next()
    assert_equals(s.current, Token('c', 3))
    s.push(Token('e', 4))
    assert_equals(s.current, Token('c', 3))
    s.next()
    assert_equals(s.current, Token('e', 4))
    s.next()
    assert_equals(s.current, Token('eof', None))

    # expect
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_equals(s.expect('a'), Token('a', 1))
    assert_equals(s.expect('b', 2), Token('b', 2))
    assert_raises(AssertionError, s.expect, 'e')
    assert_raises(AssertionError, s.expect, 'c', 5)

    # test
    s = TokenStream(iter((Token('a', 1), Token('b', 2), Token('c', 3))))
    assert_true(s.test('a'))
    s.next()
    assert_true(s.test('b', 2))

    # shift
    assert_equals(s.current, Token('b', 2))
    s.shift(Token('f', 5))
    assert_equals(s.current, Token('f', 5))
    s.next()
    assert_equals(s.current, Token('b', 2))
Ejemplo n.º 5
0
    def tokenize(self, string):
        """
        Resolve quotes and parse quote for quote in an isolated environment.
        """
        buffer = []
        stack = [0]
        open_blocks = [False]

        def tokenize_buffer():
            for item in self.tokenize_block(u'\n'.join(buffer)):
                yield item
            del buffer[:]

        def changes_block_state(line, reverse):
            primary = self._block_start_re.search
            secondary = self._block_end_re.search
            if reverse:
                primary, secondary = secondary, primary
            match = primary(line)
            if match is None:
                return False
            while 1:
                match = secondary(line, match.end())
                if match is None:
                    return True
                match = primary(line, match.end())
                if match is None:
                    return False

        def tokenize_blocks():
            for line in string.splitlines():
                block_open = open_blocks[-1]
                if not block_open:
                    m = self._quote_re.match(line)
                    if m is None:
                        level = 0
                    else:
                        level = len(m.group(1))
                        line = line[m.end():]
                    if level > stack[-1]:
                        for item in tokenize_buffer():
                            yield item
                        for new_level in xrange(stack[-1] + 1, level + 1):
                            stack.append(new_level)
                            open_blocks.append(False)
                            yield 'quote_begin', None
                    elif level < stack[-1]:
                        for item in tokenize_buffer():
                            yield item
                        for x in xrange(stack[-1] - level):
                            stack.pop()
                            open_blocks.pop()
                            yield 'quote_end', None
                else:
                    line = re.sub('^' + '> ?' * (len(open_blocks) - 1), '',
                                  line)
                if not block_open and changes_block_state(line, False):
                    open_blocks[-1] = True
                elif block_open and changes_block_state(line, True):
                    open_blocks[-1] = False
                buffer.append(line)

            for item in tokenize_buffer():
                yield item
            while stack:
                if stack.pop():
                    yield 'quote_end', None
                open_blocks.pop()

        return TokenStream.from_tuple_iter(tokenize_blocks())
Ejemplo n.º 6
0
    def tokenize(self, string):
        """
        Resolve quotes and parse quote for quote in an isolated environment.
        """
        buffer = []
        stack = [0]
        open_blocks = [False]

        def tokenize_buffer():
            for item in self.tokenize_block(u'\n'.join(buffer)):
                yield item
            del buffer[:]

        def changes_block_state(line, reverse):
            primary = self._block_start_re.search
            secondary = self._block_end_re.search
            if reverse:
                primary, secondary = secondary, primary
            match = primary(line)
            if match is None:
                return False
            while 1:
                match = secondary(line, match.end())
                if match is None:
                    return True
                match = primary(line, match.end())
                if match is None:
                    return False

        def tokenize_blocks():
            for line in string.splitlines():
                block_open = open_blocks[-1]
                if not block_open:
                    m = self._quote_re.match(line)
                    if m is None:
                        level = 0
                    else:
                        level = len(m.group(1))
                        line = line[m.end():]
                    if level > stack[-1]:
                        for item in tokenize_buffer():
                            yield item
                        for new_level in xrange(stack[-1] + 1, level + 1):
                            stack.append(new_level)
                            open_blocks.append(False)
                            yield 'quote_begin', None
                    elif level < stack[-1]:
                        for item in tokenize_buffer():
                            yield item
                        for x in xrange(stack[-1] - level):
                            stack.pop()
                            open_blocks.pop()
                            yield 'quote_end', None
                else:
                    line = re.sub('^' + '> ?' * (len(open_blocks) - 1), '', line)
                if not block_open and changes_block_state(line, False):
                    open_blocks[-1] = True
                elif block_open and changes_block_state(line, True):
                    open_blocks[-1] = False
                buffer.append(line)

            for item in tokenize_buffer():
                yield item
            while stack:
                if stack.pop():
                    yield 'quote_end', None
                open_blocks.pop()

        return TokenStream.from_tuple_iter(tokenize_blocks())