def test_record_overhead(): known = { 0: 14, 1: 22, } for magic, size in known.items(): assert LegacyRecordBatchBuilder.record_overhead(magic) == size
def test_record_overhead(): known = { 0: 14, 1: 22, } for magic, size in known.items(): assert LegacyRecordBatchBuilder.record_overhead(magic) == size
def _serialize(self, topic, key, value): if self._key_serializer: serialized_key = self._key_serializer(key) else: serialized_key = key if self._value_serializer: serialized_value = self._value_serializer(value) else: serialized_value = value message_size = LegacyRecordBatchBuilder.record_overhead( self._producer_magic) if serialized_key is not None: message_size += len(serialized_key) if serialized_value is not None: message_size += len(serialized_value) if message_size > self._max_request_size: raise MessageSizeTooLargeError( "The message is %d bytes when serialized which is larger than" " the maximum request size you have configured with the" " max_request_size configuration" % message_size) return serialized_key, serialized_value
def _serialize(self, topic, key, value): if self._key_serializer: serialized_key = self._key_serializer(key) else: serialized_key = key if self._value_serializer: serialized_value = self._value_serializer(value) else: serialized_value = value message_size = LegacyRecordBatchBuilder.record_overhead( self._producer_magic) if serialized_key is not None: message_size += len(serialized_key) if serialized_value is not None: message_size += len(serialized_value) if message_size > self._max_request_size: raise MessageSizeTooLargeError( "The message is %d bytes when serialized which is larger than" " the maximum request size you have configured with the" " max_request_size configuration" % message_size) return serialized_key, serialized_value