def test_decode_fetch_response(self): t1 = "topic1" t2 = "topic2" msgs = map(create_message, ["message1", "hi", "boo", "foo", "so fun!"]) ms1 = KafkaProtocol._encode_message_set([msgs[0], msgs[1]]) ms2 = KafkaProtocol._encode_message_set([msgs[2]]) ms3 = KafkaProtocol._encode_message_set([msgs[3], msgs[4]]) encoded = struct.pack( '>iih%dsiihqi%dsihqi%dsh%dsiihqi%ds' % (len(t1), len(ms1), len(ms2), len(t2), len(ms3)), 4, 2, len(t1), t1, 2, 0, 0, 10, len(ms1), ms1, 1, 1, 20, len(ms2), ms2, len(t2), t2, 1, 0, 0, 30, len(ms3), ms3) responses = list(KafkaProtocol.decode_fetch_response(encoded)) def expand_messages(response): return FetchResponse(response.topic, response.partition, response.error, response.highwaterMark, list(response.messages)) expanded_responses = map(expand_messages, responses) expect = [ FetchResponse( t1, 0, 0, 10, [OffsetAndMessage(0, msgs[0]), OffsetAndMessage(0, msgs[1])]), FetchResponse(t1, 1, 1, 20, [OffsetAndMessage(0, msgs[2])]), FetchResponse( t2, 0, 0, 30, [OffsetAndMessage(0, msgs[3]), OffsetAndMessage(0, msgs[4])]) ] self.assertEqual(expanded_responses, expect)
def test_02_collectorha(self, mock_SimpleConsumer, mock_get_uve, mock_get_part, mock_send_agg_uve, mock_clear_agg_uve, mock_reconnect_agg_uve): m_get_part = Mock_get_part() m_get_part[(1,("127.0.0.1",0,0))] = "127.0.0.1:0", \ { "gen1" : { "ObjectXX:uve1" : { "type1":{} } }} m_get_part[(1,("127.0.0.5",0,0))] = "127.0.0.5:0", \ { "gen1" : { "ObjectZZ:uve3" : { "type3":{}} }} mock_get_part.side_effect = m_get_part m_get_uve = Mock_get_uve() m_get_uve["ObjectXX:uve1"] = {"type1": {"xx": 0}} m_get_uve["ObjectYY:uve2"] = {"type2": {"yy": 1}} m_get_uve["ObjectZZ:uve3"] = {"type3": {"zz": 2}} mock_get_uve.side_effect = m_get_uve # When this message is read, 127.0.0.5 will not be present m_get_messages = Mock_get_messages() m_get_messages["ObjectYY:uve2"] = OffsetAndMessage(offset=0, message=Message(magic=0, attributes=0, key='ObjectYY:uve2|type2|gen1|127.0.0.5:0', value='{}')) mock_SimpleConsumer.return_value.get_messages.side_effect = \ m_get_messages self._ag.disc_cb_coll([{"ip-address":"127.0.0.1","pid":0}]) self._ag.libpart_cb([1]) # Now bringup collector 127.0.0.5 self.assertTrue(self.checker_dict([1, "ObjectZZ", "uve3"], self._ag.ptab_info, False)) self._ag.disc_cb_coll([{"ip-address":"127.0.0.1","pid":0}, {"ip-address":"127.0.0.5","pid":0}]) self.assertTrue(self.checker_dict([1, "ObjectZZ", "uve3"], self._ag.ptab_info)) self.assertTrue(self.checker_dict([1, "ObjectYY", "uve2"], self._ag.ptab_info, False)) # Feed the message in again m_get_messages["ObjectYY:uve2"] = OffsetAndMessage(offset=0, message=Message(magic=0, attributes=0, key='ObjectYY:uve2|type2|gen1|127.0.0.5:0', value='{}')) self.assertTrue(self.checker_dict([1, "ObjectYY", "uve2"], self._ag.ptab_info)) # Withdraw collector 127.0.0.1 self.assertTrue(self.checker_dict([1, "ObjectXX", "uve1"], self._ag.ptab_info)) del m_get_uve["ObjectXX:uve1"] self._ag.disc_cb_coll([{"ip-address":"127.0.0.5","pid":0}]) self.assertTrue(self.checker_dict([1, "ObjectXX", "uve1"], self._ag.ptab_info, False))
def _decode_message_set_iter(cls, data): """ Iteratively decode a MessageSet Reads repeated elements of (offset, message), calling decode_message to decode a single message. Since compressed messages contain futher MessageSets, these two methods have been decoupled so that they may recurse easily. """ cur = 0 read_message = False while cur < len(data): try: ((offset, ), cur) = relative_unpack('>q', data, cur) (msg, cur) = read_int_string(data, cur) for (offset, message) in KafkaProtocol._decode_message(msg, offset): read_message = True yield OffsetAndMessage(offset, message) except BufferUnderflowError: # NOTE: Not sure this is correct error handling: # Is it possible to get a BUE if the message set is somewhere # in the middle of the fetch response? If so, we probably have # an issue that's not fetch size too small. # Aren't we ignoring errors if we fail to unpack data by # raising StopIteration()? # If _decode_message() raises a ChecksumError, couldn't that # also be due to the fetch size being too small? if read_message is False: # If we get a partial read of a message, but haven't # yielded anything there's a problem raise ConsumerFetchSizeTooSmall() else: raise StopIteration()
def test_01_rxmsg(self, mock_SimpleConsumer, mock_get_uve, mock_get_part, mock_send_agg_uve, mock_clear_agg_uve, mock_reconnect_agg_uve): m_get_part = Mock_get_part() m_get_part[(1,("127.0.0.1",0,0))] = "127.0.0.1:0", \ { "gen1" : { "ObjectXX:uve1" : {"type1":{}} }} mock_get_part.side_effect = m_get_part # Boostraped UVE ObjectXX:uve1 is not present! m_get_uve = Mock_get_uve() m_get_uve["ObjectYY:uve2"] = {"type2": {"yy": 1}} mock_get_uve.side_effect = m_get_uve m_get_messages = Mock_get_messages() m_get_messages["ObjectYY:uve2"] = OffsetAndMessage( offset=0, message=Message(magic=0, attributes=0, key='ObjectYY:uve2|type2|gen1|127.0.0.1:0', value='{}')) mock_SimpleConsumer.return_value.get_messages.side_effect = \ m_get_messages self._ag.disc_cb_coll([{"ip-address": "127.0.0.1", "pid": 0}]) self._ag.libpart_cb([1]) self.assertTrue( self.checker_dict([1, "ObjectXX", "uve1"], self._ag.ptab_info, False)) self.assertTrue( self.checker_dict([1, "ObjectYY", "uve2"], self._ag.ptab_info)) self.assertTrue(self.checker_exact(\ self._ag.ptab_info[1]["ObjectYY"]["uve2"].values(), {"type2" : {"yy": 1}}))
def _decode_message_set_iter(cls, data): """ Iteratively decode a MessageSet Reads repeated elements of (offset, message), calling decode_message to decode a single message. Since compressed messages contain futher MessageSets, these two methods have been decoupled so that they may recurse easily. """ cur = 0 read_message = False while cur < len(data): try: ((offset, ), cur) = relative_unpack('>q', data, cur) (msg, cur) = read_int_string(data, cur) for (offset, message) in KafkaProtocol._decode_message(msg, offset): read_message = True yield OffsetAndMessage(offset, message) except BufferUnderflowError: if read_message is False: # If we get a partial read of a message, but haven't yielded anyhting # there's a problem raise ConsumerFetchSizeTooSmall() else: raise StopIteration()
def fail_requests(payloads, **kwargs): responses = [ FetchResponsePayload(payloads[0].topic, payloads[0].partition, 0, 0, [OffsetAndMessage( payloads[0].offset + i, "msg %d" % (payloads[0].offset + i)) for i in range(10)]), ] for failure in payloads[1:]: responses.append(error_factory(failure)) return responses
def test_create_from_offset_and_message_with_no_reader_schema_specified( self, registered_schema, payload, example_payload_data): unpacked_message = CreateMessage( schema_id=registered_schema.schema_id, payload=payload, timestamp=1500, ) offset_and_message = OffsetAndMessage( 0, create_message(Envelope().pack(unpacked_message))) extracted_message = create_from_offset_and_message( offset_and_message=offset_and_message, reader_schema_id=None) assert extracted_message.schema_id == registered_schema.schema_id assert extracted_message.topic == registered_schema.topic.name assert extracted_message.reader_schema_id == registered_schema.schema_id assert extracted_message.payload_data == example_payload_data
def _decode_message_set_iter(cls, data): """ Iteratively decode a MessageSet Reads repeated elements of (offset, message), calling decode_message to decode a single message. Since compressed messages contain futher MessageSets, these two methods have been decoupled so that they may recurse easily. """ cur = 0 while cur < len(data): try: ((offset, ), cur) = relative_unpack('>q', data, cur) (msg, cur) = read_int_string(data, cur) for (offset, message) in KafkaProtocol._decode_message(msg, offset): yield OffsetAndMessage(offset, message) except BufferUnderflowError: # If we get a partial read of a message, stop raise StopIteration()
def test_01_rxmsg(self, mock_SimpleConsumer, mock_get_uve, mock_get_part): m_get_part = Mock_get_part() m_get_part[(1, ("127.0.0.1", 0, 0))] = { "127.0.0.1:0": { "gen1": { "ObjectXX:uve1": set(["type1"]) } } } mock_get_part.side_effect = m_get_part # Boostraped UVE ObjectXX:uve1 is not present! m_get_uve = Mock_get_uve() m_get_uve["ObjectYY:uve2"] = {"type2": {"yy": 1}} mock_get_uve.side_effect = m_get_uve m_get_messages = Mock_get_messages() m_get_messages["ObjectYY:uve2"] = OffsetAndMessage( offset=0, message=Message( magic=0, attributes=0, key='', value=('{"message":"UVEUpdate","key":"ObjectYY:uve2",' '"type":"type2","gen":"gen1","coll":' '"127.0.0.1:0","deleted":false}'))) mock_SimpleConsumer.return_value.get_messages.side_effect = \ m_get_messages self._ag.disc_cb_coll([{"ip-address": "127.0.0.1", "pid": 0}]) self._ag.libpart_cb([1]) self.assertTrue( self.checker_dict([1, "ObjectXX", "uve1"], self._ag.ptab_info, False)) self.assertTrue( self.checker_dict([1, "ObjectYY", "uve2"], self._ag.ptab_info)) self.assertTrue(self.checker_exact(\ self._ag.ptab_info[1]["ObjectYY"]["uve2"].values(), {"type2" : {"yy": 1}}))
def offset_and_message(self, message): return OffsetAndMessage(0, create_message(Envelope().pack(message)))
def test_00_init(self, mock_SimpleConsumer, mock_KafkaClient, mock_UVEServer): self.test_spec = [ TestStage(i=PartHandlerInput( redis_instances=set([("127.0.0.1", 44444, 0)]), get_part={ "127.0.0.1:44444": { "gen1": { "ObjectXX:uve1": set(["type1"]) } } }, get_messages=[ OffsetAndMessage( offset=0, message=Message( magic=0, attributes=0, key='', value=( '{"message":"UVEUpdate","key":"ObjectYY:uve2",' '"type":"type2","gen":"gen1","coll":' '"127.0.0.1:44444","deleted":false}'))) ]), o=PartHandlerOutput(callbacks=[ { "ObjectXX:uve1": None }, { "ObjectYY:uve2": set(["type2"]) }, ], uvedb=None)), TestStage( i=PartHandlerInput(redis_instances=gevent.GreenletExit(), get_part=None, get_messages=None), o=PartHandlerOutput(callbacks=[ { "ObjectXX:uve1": None, "ObjectYY:uve2": None }, ], uvedb={ "127.0.0.1:44444": { "gen1": { "ObjectXX:uve1": set(["type1"]), "ObjectYY:uve2": set(["type2"]) } } }), ) ] mock_UVEServer.return_value.redis_instances.side_effect = \ [x.i.redis_instances for x in self.test_spec] mock_UVEServer.return_value.get_part.side_effect = \ [x.i.get_part for x in self.test_spec if x.i.get_part is not None] mock_SimpleConsumer.return_value.get_messages.side_effect = \ [x.i.get_messages for x in self.test_spec] self.ph = UveStreamProc('no-brokers', 1, "uve-1", logging, self.callback_proc, "127.0.0.1", mock_UVEServer.return_value) self.ph.start() res, db = self.ph.get(timeout=10) if (isinstance(res, AssertionError)): raise res self.assertEqual(db, self.test_spec[-1].o.uvedb)