Exemple #1
0
 def next_batch(self, _min_slice=MIN_SLICE, _magic_offset=MAGIC_OFFSET):
     next_slice = self._next_slice
     if next_slice is None:
         return None
     if len(next_slice) < _min_slice:
         raise CorruptRecordException(
             "Record size is less than the minimum record overhead "
             "({})".format(_min_slice - self.LOG_OVERHEAD))
     self._cache_next()
     magic, = struct.unpack_from(">b", next_slice, _magic_offset)
     if magic <= 1:
         return LegacyRecordBatch(next_slice, magic)
     else:
         return DefaultRecordBatch(next_slice)
def test_read_write_serde_v0_v1_no_compression(magic):
    builder = LegacyRecordBatchBuilder(magic=magic,
                                       compression_type=0,
                                       batch_size=9999999)
    builder.append(0, timestamp=9999999, key=b"test", value=b"Super")
    buffer = builder.build()

    batch = LegacyRecordBatch(bytes(buffer), magic)
    msgs = list(batch)
    assert len(msgs) == 1
    msg = msgs[0]

    assert msg.offset == 0
    assert msg.timestamp == (9999999 if magic else None)
    assert msg.timestamp_type == (0 if magic else None)
    assert msg.key == b"test"
    assert msg.value == b"Super"
    assert msg.checksum == (-2095076219 if magic else 278251978) & 0xffffffff
def test_read_write_serde_v0_v1_with_compression(compression_type, magic):
    builder = LegacyRecordBatchBuilder(magic=magic,
                                       compression_type=compression_type,
                                       batch_size=9999999)
    for offset in range(10):
        builder.append(offset, timestamp=9999999, key=b"test", value=b"Super")
    buffer = builder.build()

    batch = LegacyRecordBatch(bytes(buffer), magic)
    msgs = list(batch)

    for offset, msg in enumerate(msgs):
        assert msg.offset == offset
        assert msg.timestamp == (9999999 if magic else None)
        assert msg.timestamp_type == (0 if magic else None)
        assert msg.key == b"test"
        assert msg.value == b"Super"
        assert msg.checksum == (-2095076219 if magic else 278251978) & \
            0xffffffff
def test_unavailable_codec(magic, compression_type, name, checker_name):
    builder = LegacyRecordBatchBuilder(magic=magic,
                                       compression_type=compression_type,
                                       batch_size=1024)
    builder.append(0, timestamp=None, key=None, value=b"M")
    correct_buffer = builder.build()

    with patch.object(kafka.codec, checker_name) as mocked:
        mocked.return_value = False
        # Check that builder raises error
        builder = LegacyRecordBatchBuilder(magic=magic,
                                           compression_type=compression_type,
                                           batch_size=1024)
        error_msg = "Libraries for {} compression codec not found".format(name)
        with pytest.raises(UnsupportedCodecError, match=error_msg):
            builder.append(0, timestamp=None, key=None, value=b"M")
            builder.build()

        # Check that reader raises same error
        batch = LegacyRecordBatch(bytes(correct_buffer), magic)
        with pytest.raises(UnsupportedCodecError, match=error_msg):
            list(batch)