def test_encode_message_set(self): message_set = [ create_message(b"v1", b"k1"), create_message(b"v2", b"k2") ] encoded = KafkaCodec._encode_message_set(message_set) expect = b"".join([ struct.pack(">q", 0), # MsgSet Offset struct.pack(">i", 18), # Msg Size struct.pack(">i", 1474775406), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", 2), # Length of key b"k1", # Key struct.pack(">i", 2), # Length of value b"v1", # Value struct.pack(">q", 0), # MsgSet Offset struct.pack(">i", 18), # Msg Size struct.pack(">i", -16383415), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", 2), # Length of key b"k2", # Key struct.pack(">i", 2), # Length of value b"v2", # Value ]) self.assertEqual(encoded, expect)
def test_create_gzip(self): message_list = [create_message(b"v1", None), create_message(b"v2", key=b'42')] msg = create_gzip_message(message_list) self.assertEqual(msg.magic, 0) self.assertEqual(msg.attributes, ATTRIBUTE_CODEC_MASK & CODEC_GZIP) self.assertEqual(msg.key, None) # Need to decode to check since gzipped payload is non-deterministic decoded = gzip_decode(msg.value) expect = b"".join([ struct.pack(">q", 0), # MsgSet offset struct.pack(">i", 16), # MsgSet size struct.pack(">i", 1285512130), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", -1), # -1 indicates a null key struct.pack(">i", 2), # Msg length (bytes) b"v1", # Message contents struct.pack(">q", 0), # MsgSet offset struct.pack(">i", 18), # MsgSet size struct.pack(">i", 1929437987), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i2s", 2, b'42'), # Key is 2 bytes long, 42 struct.pack(">i", 2), # Msg length (bytes) b"v2", # Message contents ]) self.assertEqual(decoded, expect)
def test_create_gzip(self): message_list = [ create_message(b"v1", None), create_message(b"v2", key=b'42') ] msg = create_gzip_message(message_list) self.assertEqual(msg.magic, 0) self.assertEqual(msg.attributes, ATTRIBUTE_CODEC_MASK & CODEC_GZIP) self.assertEqual(msg.key, None) # Need to decode to check since gzipped payload is non-deterministic decoded = gzip_decode(msg.value) expect = b"".join([ struct.pack(">q", 0), # MsgSet offset struct.pack(">i", 16), # MsgSet size struct.pack(">i", 1285512130), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", -1), # -1 indicates a null key struct.pack(">i", 2), # Msg length (bytes) b"v1", # Message contents struct.pack(">q", 0), # MsgSet offset struct.pack(">i", 18), # MsgSet size struct.pack(">i", 1929437987), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i2s", 2, b'42'), # Key is 2 bytes long, 42 struct.pack(">i", 2), # Msg length (bytes) b"v2", # Message contents ]) self.assertEqual(decoded, expect)
def test_create_snappy(self): if not has_snappy(): raise SkipTest("Snappy not available") # pragma: no cover message_list = [ create_message(b"v3", key=b'84'), create_message(b"v4", None) ] msg = create_snappy_message(message_list) self.assertEqual(msg.magic, 0) self.assertEqual(msg.attributes, ATTRIBUTE_CODEC_MASK & CODEC_SNAPPY) self.assertEqual(msg.key, None) decoded = snappy_decode(msg.value) expect = b"".join([ struct.pack(">q", 0), # MsgSet offset struct.pack(">i", 18), # MsgSet size struct.pack(">i", 813233088), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i2s", 2, b'84'), # Key is 2 bytes long, '84' struct.pack(">i", 2), # Msg length (bytes) b"v3", # Message contents struct.pack(">q", 0), # MsgSet offset struct.pack(">i", 16), # MsgSet size struct.pack(">i", 1022734157), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", -1), # -1 indicates a null key struct.pack(">i", 2), # Msg length (bytes) b"v4", # Message contents ]) self.assertEqual(decoded, expect)
def test_encode_message_set(self): message_set = [ create_message((-1, b"v1"), b"k1"), create_message((-1, b"v2"), b"k2"), ] encoded = KafkaCodec._encode_message_set(message_set) expect = b"".join([ struct.pack(">q", 0), # MsgSet Offset struct.pack(">i", 26), # Msg Size struct.pack(">i", -634178223), # CRC struct.pack(">bbq", 1, 0, -1), # Magic, flags, timestamp struct.pack(">i", 2), # Length of key b"k1", # Key struct.pack(">i", 2), # Length of value b"v1", # Value struct.pack(">q", 0), # MsgSet Offset struct.pack(">i", 26), # Msg Size struct.pack(">i", 1926397558), # CRC struct.pack(">bbq", 1, 0, -1), # Magic, flags, timestamp struct.pack(">i", 2), # Length of key b"k2", # Key struct.pack(">i", 2), # Length of value b"v2", # Value ]) self.assertEqual(encoded, expect)
def test_create_snappy(self): if not has_snappy(): raise SkipTest("Snappy not available") # pragma: no cover message_list = [create_message(b"v3", key=b'84'), create_message(b"v4", None)] msg = create_snappy_message(message_list) self.assertEqual(msg.magic, 0) self.assertEqual(msg.attributes, ATTRIBUTE_CODEC_MASK & CODEC_SNAPPY) self.assertEqual(msg.key, None) decoded = snappy_decode(msg.value) expect = b"".join([ struct.pack(">q", 0), # MsgSet offset struct.pack(">i", 18), # MsgSet size struct.pack(">i", 813233088), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i2s", 2, b'84'), # Key is 2 bytes long, '84' struct.pack(">i", 2), # Msg length (bytes) b"v3", # Message contents struct.pack(">q", 0), # MsgSet offset struct.pack(">i", 16), # MsgSet size struct.pack(">i", 1022734157), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", -1), # -1 indicates a null key struct.pack(">i", 2), # Msg length (bytes) b"v4", # Message contents ]) self.assertEqual(decoded, expect)
def test_encode_message_set(self): message_set = [ create_message(b"v1", b"k1"), create_message(b"v2", b"k2"), ] encoded = KafkaCodec._encode_message_set(message_set) expect = b"".join([ struct.pack(">q", 0), # MsgSet Offset struct.pack(">i", 18), # Msg Size struct.pack(">i", 1474775406), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", 2), # Length of key b"k1", # Key struct.pack(">i", 2), # Length of value b"v1", # Value struct.pack(">q", 0), # MsgSet Offset struct.pack(">i", 18), # Msg Size struct.pack(">i", -16383415), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", 2), # Length of key b"k2", # Key struct.pack(">i", 2), # Length of value b"v2", # Value ]) self.assertEqual(encoded, expect)
def test_decode_message_set_stop_iteration(self): encoded = b"".join([ struct.pack(">q", 0), # MsgSet Offset struct.pack(">i", 18), # Msg Size struct.pack(">i", 1474775406), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", 2), # Length of key b"k1", # Key struct.pack(">i", 2), # Length of value b"v1", # Value struct.pack(">q", 1), # MsgSet Offset struct.pack(">i", 18), # Msg Size struct.pack(">i", -16383415), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", 2), # Length of key b"k2", # Key struct.pack(">i", 2), # Length of value b"v2", # Value b"@1$%(Y!", # Random padding ]) msgs = list(KafkaCodec._decode_message_set_iter(encoded)) self.assertEqual(len(msgs), 2) msg1, msg2 = msgs returned_offset1, decoded_message1 = msg1 returned_offset2, decoded_message2 = msg2 self.assertEqual(returned_offset1, 0) self.assertEqual(decoded_message1, create_message(b"v1", b"k1")) self.assertEqual(returned_offset2, 1) self.assertEqual(decoded_message2, create_message(b"v2", b"k2"))
def test_roundtrip_large_request(self): """ A large request can be produced and fetched. """ log.debug('Timestamp Before ProduceRequest') # Single message of a bit less than 1 MiB message = create_message(self.topic.encode() + b" message 0: " + (b"0123456789" * 10 + b'\n') * 90) produce = ProduceRequest(self.topic, 0, [message]) log.debug('Timestamp before send') [produce_resp] = yield self.retry_while_broker_errors( self.client.send_produce_request, [produce]) log.debug('Timestamp after send') self.assertEqual(produce_resp.error, 0) self.assertEqual(produce_resp.topic, self.topic) self.assertEqual(produce_resp.partition, 0) self.assertEqual(produce_resp.offset, 0) # Fetch request with max size of 1 MiB fetch = FetchRequest(self.topic, 0, 0, 1024**2) fetch_resp, = yield self.client.send_fetch_request([fetch], max_wait_time=1000) self.assertEqual(fetch_resp.error, 0) self.assertEqual(fetch_resp.topic, self.topic) self.assertEqual(fetch_resp.partition, 0) messages = list(fetch_resp.messages) self.assertEqual(len(messages), 1)
def test_roundtrip_large_request(self): log.debug('Timestamp Before ProduceRequest') # Single message of 5 MBish produce = ProduceRequest(self.topic, 0, [ create_message(self.topic + " message 0: " + ("0123456789" * 10 + '\n') * 51909) ]) log.debug('Timestamp After ProduceRequest') produce_resp, = yield self.client.send_produce_request([produce]) log.debug('Timestamp After Send') self.assertEqual(produce_resp.error, 0) self.assertEqual(produce_resp.topic, self.topic) self.assertEqual(produce_resp.partition, 0) self.assertEqual(produce_resp.offset, 0) # Fetch request with max size of 6MB fetch = FetchRequest(self.topic, 0, 0, 6 * 1048576) fetch_resp, = yield self.client.send_fetch_request([fetch], max_wait_time=500) self.assertEqual(fetch_resp.error, 0) self.assertEqual(fetch_resp.topic, self.topic) self.assertEqual(fetch_resp.partition, 0) messages = list(fetch_resp.messages) self.assertEqual(len(messages), 1)
def test_roundtrip_large_request(self): """ A large request can be produced and fetched. """ log.debug('Timestamp Before ProduceRequest') # Single message of a bit less than 1 MiB message = create_message(self.topic.encode() + b" message 0: " + (b"0123456789" * 10 + b'\n') * 90) produce = ProduceRequest(self.topic, 0, [message]) log.debug('Timestamp before send') [produce_resp] = yield self.retry_while_broker_errors(self.client.send_produce_request, [produce]) log.debug('Timestamp after send') self.assertEqual(produce_resp.error, 0) self.assertEqual(produce_resp.topic, self.topic) self.assertEqual(produce_resp.partition, 0) self.assertEqual(produce_resp.offset, 0) # Fetch request with max size of 1 MiB fetch = FetchRequest(self.topic, 0, 0, 1024 ** 2) fetch_resp, = yield self.client.send_fetch_request([fetch], max_wait_time=1000) self.assertEqual(fetch_resp.error, 0) self.assertEqual(fetch_resp.topic, self.topic) self.assertEqual(fetch_resp.partition, 0) messages = list(fetch_resp.messages) self.assertEqual(len(messages), 1)
def test_decode_fetch_response(self): t1 = "topic1" t2 = u"topic2" msgs = [create_message(m) for m in [b"message1", b"hi", b"boo", b"foo", b"so fun!"]] ms1 = KafkaCodec._encode_message_set([msgs[0], msgs[1]]) ms2 = KafkaCodec._encode_message_set([msgs[2]]) ms3 = KafkaCodec._encode_message_set([msgs[3], msgs[4]]) encoded = struct.pack('>iih%dsiihqi%dsihqi%dsh%dsiihqi%ds' % (len(t1), len(ms1), len(ms2), len(t2), len(ms3)), 4, 2, len(t1), t1.encode(), 2, 0, 0, 10, len(ms1), ms1, 1, 1, 20, len(ms2), ms2, len(t2), t2.encode(), 1, 0, 0, 30, len(ms3), ms3) responses = list(KafkaCodec.decode_fetch_response(encoded)) def expand_messages(response): return FetchResponse(response.topic, response.partition, response.error, response.highwaterMark, list(response.messages)) expanded_responses = [expand_messages(r) for r in responses] expect = [FetchResponse(t1, 0, 0, 10, [OffsetAndMessage(0, msgs[0]), OffsetAndMessage(0, msgs[1])]), FetchResponse(t1, 1, 1, 20, [OffsetAndMessage(0, msgs[2])]), FetchResponse(t2, 0, 0, 30, [OffsetAndMessage(0, msgs[3]), OffsetAndMessage(0, msgs[4])])] self.assertEqual(expanded_responses, expect)
def test_create_message(self): payload = b"test" key = b"key" msg = create_message(payload, key) self.assertEqual(msg.magic, 0) self.assertEqual(msg.attributes, 0) self.assertEqual(msg.key, key) self.assertEqual(msg.value, payload)
def test_decode_message_gzip(self): gzip_encoded = (b'\xc0\x11\xb2\xf0\x00\x01\xff\xff\xff\xff\x00\x00\x000' b'\x1f\x8b\x08\x00\xa1\xc1\xc5R\x02\xffc`\x80\x03\x01' b'\x9f\xf9\xd1\x87\x18\x18\xfe\x03\x01\x90\xc7Tf\xc8' b'\x80$wu\x1aW\x05\x92\x9c\x11\x00z\xc0h\x888\x00\x00' b'\x00') offset = 11 messages = list(KafkaCodec._decode_message(gzip_encoded, offset)) self.assertEqual(len(messages), 2) msg1, msg2 = messages returned_offset1, decoded_message1 = msg1 self.assertEqual(returned_offset1, 0) self.assertEqual(decoded_message1, create_message(b"v1")) returned_offset2, decoded_message2 = msg2 self.assertEqual(returned_offset2, 0) self.assertEqual(decoded_message2, create_message(b"v2"))
def test_produce_request(self): produce = ProduceRequest( self.topic, 0, [create_message(self.topic + " message %d" % i) for i in range(5)]) produce_resp, = yield self.client.send_produce_request([produce]) self.assertEqual(produce_resp.error, 0) self.assertEqual(produce_resp.topic, self.topic) self.assertEqual(produce_resp.partition, 0) self.assertEqual(produce_resp.offset, 0)
def test_decode_message_gzip(self): gzip_encoded = ('\xc0\x11\xb2\xf0\x00\x01\xff\xff\xff\xff\x00\x00\x000' '\x1f\x8b\x08\x00\xa1\xc1\xc5R\x02\xffc`\x80\x03\x01' '\x9f\xf9\xd1\x87\x18\x18\xfe\x03\x01\x90\xc7Tf\xc8' '\x80$wu\x1aW\x05\x92\x9c\x11\x00z\xc0h\x888\x00\x00' '\x00') offset = 11 messages = list(KafkaCodec._decode_message(gzip_encoded, offset)) self.assertEqual(len(messages), 2) msg1, msg2 = messages returned_offset1, decoded_message1 = msg1 self.assertEqual(returned_offset1, 0) self.assertEqual(decoded_message1, create_message("v1")) returned_offset2, decoded_message2 = msg2 self.assertEqual(returned_offset2, 0) self.assertEqual(decoded_message2, create_message("v2"))
def test_decode_message_snappy(self): if not has_snappy(): raise SkipTest("Snappy not available") # pragma: no cover snappy_encoded = (b'\xec\x80\xa1\x95\x00\x02\xff\xff\xff\xff\x00\x00' b'\x00,8\x00\x00\x19\x01@\x10L\x9f[\xc2\x00\x00\xff' b'\xff\xff\xff\x00\x00\x00\x02v1\x19\x1bD\x00\x10\xd5' b'\x96\nx\x00\x00\xff\xff\xff\xff\x00\x00\x00\x02v2') offset = 11 messages = list(KafkaCodec._decode_message(snappy_encoded, offset)) self.assertEqual(len(messages), 2) msg1, msg2 = messages returned_offset1, decoded_message1 = msg1 self.assertEqual(returned_offset1, 0) self.assertEqual(decoded_message1, create_message(b"v1")) returned_offset2, decoded_message2 = msg2 self.assertEqual(returned_offset2, 0) self.assertEqual(decoded_message2, create_message(b"v2"))
def test_decode_message_snappy(self): if not has_snappy(): raise SkipTest("Snappy not available") # pragma: no cover snappy_encoded = ('\xec\x80\xa1\x95\x00\x02\xff\xff\xff\xff\x00\x00' '\x00,8\x00\x00\x19\x01@\x10L\x9f[\xc2\x00\x00\xff' '\xff\xff\xff\x00\x00\x00\x02v1\x19\x1bD\x00\x10\xd5' '\x96\nx\x00\x00\xff\xff\xff\xff\x00\x00\x00\x02v2') offset = 11 messages = list(KafkaCodec._decode_message(snappy_encoded, offset)) self.assertEqual(len(messages), 2) msg1, msg2 = messages returned_offset1, decoded_message1 = msg1 self.assertEqual(returned_offset1, 0) self.assertEqual(decoded_message1, create_message("v1")) returned_offset2, decoded_message2 = msg2 self.assertEqual(returned_offset2, 0) self.assertEqual(decoded_message2, create_message("v2"))
def test_produce_request(self): produce = ProduceRequest(self.topic, 0, [ create_message(self.topic.encode() + b" message %d" % i) for i in range(5) ]) [produce_resp] = yield self.retry_while_broker_errors(self.client.send_produce_request, [produce]) self.assertEqual(produce_resp.error, 0) self.assertEqual(produce_resp.topic, self.topic) self.assertEqual(produce_resp.partition, 0) self.assertEqual(produce_resp.offset, 0)
def test_encode_message(self): message = create_message(b"test", b"key") encoded = KafkaCodec._encode_message(message) expect = b"".join([ struct.pack(">i", -1427009701), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", 3), # Length of key b"key", # key struct.pack(">i", 4), # Length of value b"test", # value ]) self.assertEqual(encoded, expect)
def test_encode_message(self): message = create_message((47, b"test"), b"key", magic=1) encoded = KafkaCodec._encode_message(message) expect = b"".join([ struct.pack(">i", -1232077792), # CRC struct.pack(">bbq", 1, 0, 47), # Magic, flags, timestamp struct.pack(">i", 3), # Length of key b"key", # key struct.pack(">i", 4), # Length of value b"test", # value ]) self.assertEqual(encoded, expect)
def test_encode_produce_request(self): requests = [ ProduceRequest("topic1", 0, [ create_message((-1, b"a")), create_message((-1, b"b")), ]), ProduceRequest(u"topic2", 1, [ create_message((-1, b"c")), ]), ] msg_a_binary = KafkaCodec._encode_message(create_message((-1, b"a"))) msg_b_binary = KafkaCodec._encode_message(create_message((-1, b"b"))) msg_c_binary = KafkaCodec._encode_message(create_message((-1, b"c"))) header = b"".join([ struct.pack('>h', 0), # Msg Header, Message type = Produce struct.pack('>h', 1), # Msg Header, API version struct.pack('>i', 2), # Msg Header, Correlation ID struct.pack('>h7s', 7, b"client1"), # Msg Header, The client ID struct.pack('>h', 2), # Num acks required struct.pack('>i', 100), # Request Timeout struct.pack('>i', 2), # The number of requests ]) total_len = len(msg_a_binary) + len(msg_b_binary) topic1 = b"".join([ struct.pack('>h6s', 6, b'topic1'), # The topic1 struct.pack('>i', 1), # One message set struct.pack('>i', 0), # Partition 0 struct.pack('>i', total_len + 24), # Size of message set struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_a_binary)), # Length of message msg_a_binary, # Actual message struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_b_binary)), # Length of message msg_b_binary, # Actual message ]) topic2 = b"".join([ struct.pack('>h6s', 6, b'topic2'), # The topic1 struct.pack('>i', 1), # One message set struct.pack('>i', 1), # Partition 1 struct.pack('>i', len(msg_c_binary) + 12), # Size of message set struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_c_binary)), # Length of message msg_c_binary, # Actual message ]) expected1 = b"".join([header, topic1, topic2]) expected2 = b"".join([header, topic2, topic1]) encoded = KafkaCodec.encode_produce_request(b"client1", 2, requests, 2, 100) self.assertIn(encoded, [expected1, expected2])
def test_produce_large_request(self): """ Send large messages of about 950 KB in size. Note that per the default configuration Kafka only allows up to 1 MiB messages. """ produce = ProduceRequest(self.topic, 0, [ create_message(self.topic.encode() + b" message %d: " % i + b"0123456789" * (950 * 100)) for i in range(5) ]) [produce_resp] = yield self.retry_while_broker_errors(self.client.send_produce_request, [produce]) self.assertEqual(produce_resp.error, 0) self.assertEqual(produce_resp.topic, self.topic) self.assertEqual(produce_resp.partition, 0) self.assertEqual(produce_resp.offset, 0)
def test_produce_large_request(self): """ Send large messages of about 950 KB in size. Note that per the default configuration Kafka only allows up to 1 MiB messages. """ produce = ProduceRequest(self.topic, 0, [ create_message(self.topic + b" message %d: " % i + b"0123456789" * (950 * 100)) for i in range(5) ]) produce_resp, = yield self.client.send_produce_request([produce]) self.assertEqual(produce_resp.error, 0) self.assertEqual(produce_resp.topic, self.topic) self.assertEqual(produce_resp.partition, 0) self.assertEqual(produce_resp.offset, 0)
def make_fetch_response(id): t1 = b"topic1" t2 = b"topic2" msgs = [ create_message(m) for m in [b"message1", b"hi", b"boo", b"foo", b"so fun!"] ] ms1 = KafkaCodec._encode_message_set([msgs[0], msgs[1]]) ms2 = KafkaCodec._encode_message_set([msgs[2]]) ms3 = KafkaCodec._encode_message_set([msgs[3], msgs[4]]) packFmt = '>iih{}siihqi{}sihqi{}sh{}siihqi{}s'.format( len(t1), len(ms1), len(ms2), len(t2), len(ms3)) return struct.pack(packFmt, id, 2, len(t1), t1, 2, 0, 0, 10, len(ms1), ms1, 1, 1, 20, len(ms2), ms2, len(t2), t2, 1, 0, 0, 30, len(ms3), ms3)
def test_decode_message(self): encoded = b"".join([ struct.pack(">i", -1427009701), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", 3), # Length of key b"key", # key struct.pack(">i", 4), # Length of value b"test", # value ]) offset = 10 (returned_offset, decoded_message) = list( KafkaCodec._decode_message(encoded, offset))[0] self.assertEqual(returned_offset, offset) self.assertEqual(decoded_message, create_message(b"test", b"key"))
def test_encode_produce_request(self): requests = [ ProduceRequest("topic1", 0, [ create_message(b"a"), create_message(b"b"), ]), ProduceRequest(u"topic2", 1, [ create_message(b"c"), ]), ] msg_a_binary = KafkaCodec._encode_message(create_message(b"a")) msg_b_binary = KafkaCodec._encode_message(create_message(b"b")) msg_c_binary = KafkaCodec._encode_message(create_message(b"c")) header = b"".join([ struct.pack('>h', 0), # Msg Header, Message type = Produce struct.pack('>h', 0), # Msg Header, API version struct.pack('>i', 2), # Msg Header, Correlation ID struct.pack('>h7s', 7, b"client1"), # Msg Header, The client ID struct.pack('>h', 2), # Num acks required struct.pack('>i', 100), # Request Timeout struct.pack('>i', 2), # The number of requests ]) total_len = len(msg_a_binary) + len(msg_b_binary) topic1 = b"".join([ struct.pack('>h6s', 6, b'topic1'), # The topic1 struct.pack('>i', 1), # One message set struct.pack('>i', 0), # Partition 0 struct.pack('>i', total_len + 24), # Size of message set struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_a_binary)), # Length of message msg_a_binary, # Actual message struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_b_binary)), # Length of message msg_b_binary, # Actual message ]) topic2 = b"".join([ struct.pack('>h6s', 6, b'topic2'), # The topic1 struct.pack('>i', 1), # One message set struct.pack('>i', 1), # Partition 1 struct.pack('>i', len(msg_c_binary) + 12), # Size of message set struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_c_binary)), # Length of message msg_c_binary, # Actual message ]) expected1 = b"".join([header, topic1, topic2]) expected2 = b"".join([header, topic2, topic1]) encoded = KafkaCodec.encode_produce_request( b"client1", 2, requests, 2, 100) self.assertIn(encoded, [expected1, expected2])
def test_decode_message(self): encoded = b"".join([ struct.pack(">i", -1427009701), # CRC struct.pack(">bb", 0, 0), # Magic, flags struct.pack(">i", 3), # Length of key b"key", # key struct.pack(">i", 4), # Length of value b"test", # value ]) offset = 10 (returned_offset, decoded_message) = list(KafkaCodec._decode_message(encoded, offset))[0] self.assertEqual(returned_offset, offset) self.assertEqual(decoded_message, create_message(b"test", b"key"))
def test_decode_fetch_response(self): t1 = b"topic1" t2 = b"topic2" msgs = [ create_message(m) for m in [b"message1", b"hi", b"boo", b"foo", b"so fun!"] ] ms1 = KafkaCodec._encode_message_set([msgs[0], msgs[1]]) ms2 = KafkaCodec._encode_message_set([msgs[2]]) ms3 = KafkaCodec._encode_message_set([msgs[3], msgs[4]]) encoded = struct.pack( '>iih%dsiihqi%dsihqi%dsh%dsiihqi%ds' % (len(t1), len(ms1), len(ms2), len(t2), len(ms3)), 4, 2, len(t1), t1, 2, 0, 0, 10, len(ms1), ms1, 1, 1, 20, len(ms2), ms2, len(t2), t2, 1, 0, 0, 30, len(ms3), ms3) responses = list(KafkaCodec.decode_fetch_response(encoded)) def expand_messages(response): return FetchResponse(response.topic, response.partition, response.error, response.highwaterMark, list(response.messages)) expanded_responses = [expand_messages(r) for r in responses] expect = [ FetchResponse( t1, 0, 0, 10, [OffsetAndMessage(0, msgs[0]), OffsetAndMessage(0, msgs[1])]), FetchResponse(t1, 1, 1, 20, [OffsetAndMessage(0, msgs[2])]), FetchResponse( t2, 0, 0, 30, [OffsetAndMessage(0, msgs[3]), OffsetAndMessage(0, msgs[4])]) ] self.assertEqual(expanded_responses, expect)