def test_encode_produce_request(self): requests = [ ProduceRequestPayload("topic1", 0, [ kafka.protocol.message.Message(b"a"), kafka.protocol.message.Message(b"b") ]), ProduceRequestPayload("topic2", 1, [kafka.protocol.message.Message(b"c")]) ] msg_a_binary = KafkaProtocol._encode_message(create_message(b"a")) msg_b_binary = KafkaProtocol._encode_message(create_message(b"b")) msg_c_binary = KafkaProtocol._encode_message(create_message(b"c")) header = b"".join([ struct.pack('>i', 0x94), # The length of the message overall struct.pack('>h', 0), # Msg Header, Message type = Produce struct.pack('>h', 0), # Msg Header, API version struct.pack('>i', 2), # Msg Header, Correlation ID struct.pack('>h7s', 7, b"client1"), # Msg Header, The client ID struct.pack('>h', 2), # Num acks required struct.pack('>i', 100), # Request Timeout struct.pack('>i', 2), # The number of requests ]) total_len = len(msg_a_binary) + len(msg_b_binary) topic1 = b"".join([ struct.pack('>h6s', 6, b'topic1'), # The topic1 struct.pack('>i', 1), # One message set struct.pack('>i', 0), # Partition 0 struct.pack('>i', total_len + 24), # Size of the incoming message set struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_a_binary)), # Length of message msg_a_binary, # Actual message struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_b_binary)), # Length of message msg_b_binary, # Actual message ]) topic2 = b"".join([ struct.pack('>h6s', 6, b'topic2'), # The topic1 struct.pack('>i', 1), # One message set struct.pack('>i', 1), # Partition 1 struct.pack('>i', len(msg_c_binary) + 12), # Size of the incoming message set struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_c_binary)), # Length of message msg_c_binary, # Actual message ]) expected1 = b"".join([header, topic1, topic2]) expected2 = b"".join([header, topic2, topic1]) encoded = KafkaProtocol.encode_produce_request(b"client1", 2, requests, 2, 100) self.assertIn(encoded, [expected1, expected2])
def test_encode_produce_request(self): requests = [ ProduceRequestPayload("topic1", 0, [ kafka.protocol.message.Message(b"a"), kafka.protocol.message.Message(b"b") ]), ProduceRequestPayload("topic2", 1, [ kafka.protocol.message.Message(b"c") ]) ] msg_a_binary = KafkaProtocol._encode_message(create_message(b"a")) msg_b_binary = KafkaProtocol._encode_message(create_message(b"b")) msg_c_binary = KafkaProtocol._encode_message(create_message(b"c")) header = b"".join([ struct.pack('>i', 0x94), # The length of the message overall struct.pack('>h', 0), # Msg Header, Message type = Produce struct.pack('>h', 0), # Msg Header, API version struct.pack('>i', 2), # Msg Header, Correlation ID struct.pack('>h7s', 7, b"client1"), # Msg Header, The client ID struct.pack('>h', 2), # Num acks required struct.pack('>i', 100), # Request Timeout struct.pack('>i', 2), # The number of requests ]) total_len = len(msg_a_binary) + len(msg_b_binary) topic1 = b"".join([ struct.pack('>h6s', 6, b'topic1'), # The topic1 struct.pack('>i', 1), # One message set struct.pack('>i', 0), # Partition 0 struct.pack('>i', total_len + 24), # Size of the incoming message set struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_a_binary)), # Length of message msg_a_binary, # Actual message struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_b_binary)), # Length of message msg_b_binary, # Actual message ]) topic2 = b"".join([ struct.pack('>h6s', 6, b'topic2'), # The topic1 struct.pack('>i', 1), # One message set struct.pack('>i', 1), # Partition 1 struct.pack('>i', len(msg_c_binary) + 12), # Size of the incoming message set struct.pack('>q', 0), # No offset specified struct.pack('>i', len(msg_c_binary)), # Length of message msg_c_binary, # Actual message ]) expected1 = b"".join([ header, topic1, topic2 ]) expected2 = b"".join([ header, topic2, topic1 ]) encoded = KafkaProtocol.encode_produce_request(b"client1", 2, requests, 2, 100) self.assertIn(encoded, [ expected1, expected2 ])
def test_encode_produce_request(self): requests = [ProduceRequest("topic1", 0, [create_message("a"), create_message("b")]), ProduceRequest("topic2", 1, [create_message("c")])] expect = ('\x00\x00\x00\x94\x00\x00\x00\x00\x00\x00\x00\x02\x00\x07' 'client1\x00\x02\x00\x00\x00d\x00\x00\x00\x02\x00\x06topic1' '\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x006\x00\x00\x00' '\x00\x00\x00\x00\x00\x00\x00\x00\x0fQ\xdf:2\x00\x00\xff\xff' '\xff\xff\x00\x00\x00\x01a\x00\x00\x00\x00\x00\x00\x00\x00' '\x00\x00\x00\x0f\xc8\xd6k\x88\x00\x00\xff\xff\xff\xff\x00' '\x00\x00\x01b\x00\x06topic2\x00\x00\x00\x01\x00\x00\x00\x01' '\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' '\x00\x0f\xbf\xd1[\x1e\x00\x00\xff\xff\xff\xff\x00\x00\x00' '\x01c') encoded = KafkaProtocol.encode_produce_request("client1", 2, requests, 2, 100) self.assertEqual(encoded, expect)
def test_encode_produce_request(self): requests = [ ProduceRequest( "topic1", 0, [create_message("a"), create_message("b")]), ProduceRequest("topic2", 1, [create_message("c")]) ] expect = ('\x00\x00\x00\x94\x00\x00\x00\x00\x00\x00\x00\x02\x00\x07' 'client1\x00\x02\x00\x00\x00d\x00\x00\x00\x02\x00\x06topic1' '\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x006\x00\x00\x00' '\x00\x00\x00\x00\x00\x00\x00\x00\x0fQ\xdf:2\x00\x00\xff\xff' '\xff\xff\x00\x00\x00\x01a\x00\x00\x00\x00\x00\x00\x00\x00' '\x00\x00\x00\x0f\xc8\xd6k\x88\x00\x00\xff\xff\xff\xff\x00' '\x00\x00\x01b\x00\x06topic2\x00\x00\x00\x01\x00\x00\x00\x01' '\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' '\x00\x0f\xbf\xd1[\x1e\x00\x00\xff\xff\xff\xff\x00\x00\x00' '\x01c') encoded = KafkaProtocol.encode_produce_request("client1", 2, requests, 2, 100) self.assertEqual(encoded, expect)
from kafka.protocol import KafkaProtocol from kafka import ( SimpleProducer, KeyedProducer, create_message, create_gzip_message, create_snappy_message, RoundRobinPartitioner, HashedPartitioner ) ProduceRequestPayload = namedtuple("ProduceRequestPayload", ["topic", "partition", "messages"]) messages = [ create_snappy_message([("---start---", None), ("xxxxxxxxxxxxxxxxx", None)]), create_snappy_message([("ccccc", None), ("---end---", None)]), ] produce = ProduceRequestPayload("topic1", 1, messages=messages) correlationID = 3 req = KafkaProtocol.encode_produce_request("clientID", correlationID, payloads=[produce], acks=1, timeout=1000) print "[]byte{" +''.join( [ "0x%02x, " % ord( x ) for x in req ] ).strip() + "}" # python test.py | hexdump -C import subprocess task = subprocess.Popen("hexdump -C", shell=True, stdin=subprocess.PIPE) task.stdin.write(req) task.stdin.flush() task.stdin.close() task.wait()