Пример #1
0
def test_read_write_serde_v2(compression_type, crc):
    builder = DefaultRecordBatchBuilder(
        magic=2, compression_type=compression_type, is_transactional=1,
        producer_id=123456, producer_epoch=123, base_sequence=9999,
        batch_size=999999)
    headers = [("header1", b"aaa"), ("header2", b"bbb")]
    for offset in range(10):
        builder.append(
            offset, timestamp=9999999 + offset, key=b"test", value=b"Super",
            headers=headers)
    buffer = builder.build()
    reader = DefaultRecordBatch(bytes(buffer))
    assert reader.validate_crc()
    msgs = list(reader)

    assert reader.is_transactional is True
    assert reader.is_control_batch is False
    assert reader.compression_type == compression_type
    assert reader.magic == 2
    assert reader.timestamp_type == 0
    assert reader.base_offset == 0
    assert reader.last_offset_delta == 9
    assert reader.next_offset == 10
    assert reader.first_timestamp == 9999999
    assert reader.max_timestamp == 10000008
    if crc is not None:
        assert reader.crc == crc
    for offset, msg in enumerate(msgs):
        assert msg.offset == offset
        assert msg.timestamp == 9999999 + offset
        assert msg.key == b"test"
        assert msg.value == b"Super"
        assert msg.headers == headers
Пример #2
0
def test_unavailable_codec(compression_type, name, checker_name):
    builder = DefaultRecordBatchBuilder(magic=2,
                                        compression_type=compression_type,
                                        is_transactional=0,
                                        producer_id=-1,
                                        producer_epoch=-1,
                                        base_sequence=-1,
                                        batch_size=1024)
    builder.append(0, timestamp=None, key=None, value=b"M" * 2000, headers=[])
    correct_buffer = builder.build()

    with mock.patch.object(kafka.codec, checker_name, return_value=False):
        # Check that builder raises error
        builder = DefaultRecordBatchBuilder(magic=2,
                                            compression_type=compression_type,
                                            is_transactional=0,
                                            producer_id=-1,
                                            producer_epoch=-1,
                                            base_sequence=-1,
                                            batch_size=1024)
        error_msg = "Libraries for {} compression codec not found".format(name)
        with pytest.raises(UnsupportedCodecError, match=error_msg):
            builder.append(0, timestamp=None, key=None, value=b"M", headers=[])
            builder.build()

        # Check that reader raises same error
        batch = DefaultRecordBatch(bytes(correct_buffer))
        with pytest.raises(UnsupportedCodecError, match=error_msg):
            list(batch)
Пример #3
0
def test_build_without_append():
    builder = DefaultRecordBatchBuilder(
        magic=2, compression_type=0, is_transactional=1,
        producer_id=123456, producer_epoch=123, base_sequence=9999,
        batch_size=999999)
    buffer = builder.build()

    reader = DefaultRecordBatch(bytes(buffer))
    msgs = list(reader)
    assert not msgs
Пример #4
0
def test_set_producer_state():
    builder = DefaultRecordBatchBuilder(
        magic=2, compression_type=0, is_transactional=0,
        producer_id=-1, producer_epoch=-1, base_sequence=-1,
        batch_size=999999)
    builder.set_producer_state(
        producer_id=700,
        producer_epoch=5,
        base_sequence=17)
    assert builder.producer_id == 700
    buffer = builder.build()

    reader = DefaultRecordBatch(bytes(buffer))
    assert reader.producer_id == 700
    assert reader.producer_epoch == 5
    assert reader.base_sequence == 17