def test_round_robin_partitioner(self): start_offset0 = yield from self.current_offset(self.topic, 0) start_offset1 = yield from self.current_offset(self.topic, 1) producer = KeyedAIOProducer(self.client, partitioner=RoundRobinPartitioner) resp1 = yield from producer.send(self.topic, self.msg("one"), key=self.key("key1")) resp2 = yield from producer.send(self.topic, self.msg("two"), key=self.key("key2")) resp3 = yield from producer.send(self.topic, self.msg("three"), key=self.key("key3")) resp4 = yield from producer.send(self.topic, self.msg("four"), key=self.key("key4")) self.assert_produce_response(resp1, start_offset0 + 0) self.assert_produce_response(resp2, start_offset1 + 0) self.assert_produce_response(resp3, start_offset0 + 1) self.assert_produce_response(resp4, start_offset1 + 1) yield from self.assert_fetch_offset(0, start_offset0, [self.msg("one"), self.msg("three")]) yield from self.assert_fetch_offset(1, start_offset1, [self.msg("two"), self.msg("four")])
def test_keyed_producer_ctor(self): client = mock.Mock() ack = KeyedAIOProducer.ACK_AFTER_LOCAL_WRITE producer = KeyedAIOProducer(client, partitioner=RoundRobinPartitioner, req_acks=ack) name = producer.__class__.__name__ self.assertTrue(name in producer.__repr__()) self.assertEqual(producer._req_acks, ack)
def test_send_non_byteish(self): client = mock.Mock() sproducer = SimpleAIOProducer(client) with self.assertRaises(TypeError): self.loop.run_until_complete(sproducer.send(b"topic", "text")) kproducer = KeyedAIOProducer(client) with self.assertRaises(TypeError): self.loop.run_until_complete(kproducer.send(b"topic", "text", key=b'key'))
def test_send_non_byteish(self): client = mock.Mock() sproducer = SimpleAIOProducer(client) with self.assertRaises(TypeError): self.loop.run_until_complete(sproducer.send(b"topic", "text")) kproducer = KeyedAIOProducer(client) with self.assertRaises(TypeError): self.loop.run_until_complete( kproducer.send(b"topic", "text", key=b'key'))
def test_round_robin_partitioner(self): start_offset0 = yield from self.current_offset(self.topic, 0) start_offset1 = yield from self.current_offset(self.topic, 1) producer = KeyedAIOProducer(self.client, partitioner=RoundRobinPartitioner) resp1 = yield from producer.send(self.topic, self.msg("one"), key=self.key("key1")) resp2 = yield from producer.send(self.topic, self.msg("two"), key=self.key("key2")) resp3 = yield from producer.send(self.topic, self.msg("three"), key=self.key("key3")) resp4 = yield from producer.send(self.topic, self.msg("four"), key=self.key("key4")) self.assert_produce_response(resp1, start_offset0 + 0) self.assert_produce_response(resp2, start_offset1 + 0) self.assert_produce_response(resp3, start_offset0 + 1) self.assert_produce_response(resp4, start_offset1 + 1) yield from self.assert_fetch_offset( 0, start_offset0, [self.msg("one"), self.msg("three")]) yield from self.assert_fetch_offset( 1, start_offset1, [self.msg("two"), self.msg("four")])
def test_hashed_partitioner(self): start_offset0 = yield from self.current_offset(self.topic, 0) start_offset1 = yield from self.current_offset(self.topic, 1) # create client without preloaded metadata, and enforce # metadata loading on first call of sent method client = AIOKafkaClient(self.hosts, loop=self.loop) producer = KeyedAIOProducer(client, partitioner=HashedPartitioner) resp1 = yield from producer.send( self.topic, self.msg("one"), key=self.key("1"), ) resp2 = yield from producer.send( self.topic, self.msg("two"), key=self.key("2"), ) resp3 = yield from producer.send(self.topic, self.msg("three"), key=self.key("3")) resp4 = yield from producer.send(self.topic, self.msg("four"), key=self.key("3")) resp5 = yield from producer.send( self.topic, self.msg("five"), key=self.key("4"), ) offsets = {0: start_offset0, 1: start_offset1} messages = {0: [], 1: []} keys = [self.key(k) for k in ["1", "2", "3", "3", "4"]] resps = [resp1, resp2, resp3, resp4, resp5] msgs = [self.msg(m) for m in ["one", "two", "three", "four", "five"]] for key, resp, msg in zip(keys, resps, msgs): k = hash(key) % 2 offset = offsets[k] self.assert_produce_response(resp, offset) offsets[k] += 1 messages[k].append(msg) yield from self.assert_fetch_offset(0, start_offset0, messages[0]) yield from self.assert_fetch_offset(1, start_offset1, messages[1])
def test_hashed_partitioner(self): start_offset0 = yield from self.current_offset(self.topic, 0) start_offset1 = yield from self.current_offset(self.topic, 1) # create client without preloaded metadata, and enforce # metadata loading on first call of sent method client = AIOKafkaClient(self.hosts, loop=self.loop) producer = KeyedAIOProducer(client, partitioner=HashedPartitioner) resp1 = yield from producer.send(self.topic, self.msg("one"), key=self.key("1"),) resp2 = yield from producer.send(self.topic, self.msg("two"), key=self.key("2"),) resp3 = yield from producer.send(self.topic, self.msg("three"), key=self.key("3")) resp4 = yield from producer.send(self.topic, self.msg("four"), key=self.key("3")) resp5 = yield from producer.send(self.topic, self.msg("five"), key=self.key("4"),) offsets = {0: start_offset0, 1: start_offset1} messages = {0: [], 1: []} keys = [self.key(k) for k in ["1", "2", "3", "3", "4"]] resps = [resp1, resp2, resp3, resp4, resp5] msgs = [self.msg(m) for m in ["one", "two", "three", "four", "five"]] for key, resp, msg in zip(keys, resps, msgs): k = hash(key) % 2 offset = offsets[k] self.assert_produce_response(resp, offset) offsets[k] += 1 messages[k].append(msg) yield from self.assert_fetch_offset(0, start_offset0, messages[0]) yield from self.assert_fetch_offset(1, start_offset1, messages[1])
def test_send_non_byteish_key(self): client = mock.Mock() producer = KeyedAIOProducer(client) with self.assertRaises(TypeError): self.loop.run_until_complete(producer.send(b"topic", b"text", key='key'))
def test_send_non_byteish_key(self): client = mock.Mock() producer = KeyedAIOProducer(client) with self.assertRaises(TypeError): self.loop.run_until_complete( producer.send(b"topic", b"text", key='key'))