Пример #1
0
    def _step(self):
        """Progress one step through the batch, or fetch another physical
        record if the batch is exhausted. Returns ``True`` so long as the
        collection range has not been exceeded."""
        # Previous record was non-batch, or previous batch exhausted. Need to
        # fetch another record.
        if not self._index:
            # Have we visited maximum number of physical records? If so, stop
            # iteration.
            if not self._max_phys:
                return False
            self._max_phys -= 1

            # Get the next record and decode its key. from_raw() returns None
            # if the key's prefix doesn't match self.prefix, which indicates
            # we've reached the end of the collection.
            phys_key, self.raw = next(self.it, ('', ''))
            self.keys = keylib.KeyList.from_raw(phys_key, self.prefix)
            if not self.keys:
                return False

            self.phys_key = phys_key
            lenk = len(self.keys)
            # Single record.
            if lenk == 1:
                self.key = self.keys[0]
                self.data = self.raw
                self._index = 0
                return True

            # Decode the array of logical record offsets and save it, along
            # with the decompressed concatenation of all records.
            self._offsets, dstart = decode_offsets(self.raw)
            self.concat = self.compressor.unpack(buffer(self.raw, dstart))
            self._index = lenk

        self._index -= 1
        if self._reverse:
            idx = self._index
        else:
            idx = (len(self.keys) - self._index) - 1
        start = self._offsets[idx]
        stop = self._offsets[idx + 1]
        self.key = self.keys[-1 + -idx]
        self.data = self.concat[start:stop]
        return True
Пример #2
0
    def _step(self):
        """Progress one step through the batch, or fetch another physical
        record if the batch is exhausted. Returns ``True`` so long as the
        collection range has not been exceeded."""
        # Previous record was non-batch, or previous batch exhausted. Need to
        # fetch another record.
        if not self._index:
            # Have we visited maximum number of physical records? If so, stop
            # iteration.
            if not self._max_phys:
                return False
            self._max_phys -= 1

            # Get the next record and decode its key. from_raw() returns None
            # if the key's prefix doesn't match self.prefix, which indicates
            # we've reached the end of the collection.
            phys_key, self.raw = next(self.it, ('', ''))
            self.keys = keylib.KeyList.from_raw(phys_key, self.prefix)
            if not self.keys:
                return False

            self.phys_key = phys_key
            lenk = len(self.keys)
            # Single record.
            if lenk == 1:
                self.key = self.keys[0]
                self.data = self.raw
                self._index = 0
                return True

            # Decode the array of logical record offsets and save it, along
            # with the decompressed concatenation of all records.
            self._offsets, dstart = decode_offsets(self.raw)
            self.concat = self.compressor.unpack(buffer(self.raw, dstart))
            self._index = lenk

        self._index -= 1
        if self._reverse:
            idx = self._index
        else:
            idx = (len(self.keys) - self._index) - 1
        start = self._offsets[idx]
        stop = self._offsets[idx + 1]
        self.key = self.keys[-1 + -idx]
        self.data = self.concat[start:stop]
        return True
Пример #3
0
    def _logical_iter(self, it, reverse, prefix_s, prefix):
        """Generator that wraps a database engine iterator to yield logical
        records. For compressed records, each physical record may contain
        multiple physical records. This job's function is to make the
        distinction invisible to reads."""
        #   * When iterating forward, if first yielded key lacks collection
        #     prefix, result of iteration is empty.
        #   * When iterating reverse, if first yielded key lacks collection
        #     prefix, discard, then behave as forward.
        #   * Records are discarded in the direction of iteration until
        #     startpred() or not self.prefix.
        #   * Records are yielded following startpred() until not endpred() or
        #     not self.prefix.
        prefix = prefix or ()
        tup = next(it, None)
        if tup and tup[0][:len(prefix_s)] == prefix_s:
            it = itertools.chain((tup,), it)
        for key, value in it:
            keys = keylib.unpacks(prefix_s, key)
            if not keys:
                return

            lenk = len(keys)
            if lenk == 1:
                key = keylib.Key(*(prefix + keys[0]))
                yield False, key, self._decompress(value)
            else: # Batch record.
                offsets, dstart = decode_offsets(value)
                data = self._decompress(buffer(value, dstart))
                if reverse:
                    stop = -1
                    step = -1
                    i = lenk
                else:
                    stop = lenk
                    step = 1
                    i = 0
                while i != stop:
                    key = keys[-1 - i]
                    offs = offsets[i]
                    size = offsets[i+1] - offs
                    key = keylib.Key(*(prefix + key))
                    yield True, key, buffer(data, offs, size)
                    i += step