def test_invalid_correlation_id(self): host, port = self.kafka_host, self.kafka_port request = MetadataRequest([]) # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) # setup reader reader = mock.MagicMock() int32 = struct.Struct('>i') resp = MetadataResponse(brokers=[], topics=[]).encode() resp = int32.pack(999) + resp # set invalid correlation id reader.readexactly.side_effect = [ asyncio.coroutine(lambda *a, **kw: int32.pack(len(resp)))(), asyncio.coroutine(lambda *a, **kw: resp)()] writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = asyncio.async(conn._read(), loop=self.loop) with self.assertRaises(CorrelationIdError): yield from conn.send(request)
def test_osserror_in_reader_task(self): host, port = self.kafka_host, self.kafka_port @asyncio.coroutine def invoke_osserror(*a, **kw): yield from asyncio.sleep(0.1, loop=self.loop) raise OSError('test oserror') request = MetadataRequest([]) # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) # setup reader reader = mock.MagicMock() reader.readexactly.return_value = invoke_osserror() writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = asyncio.async(conn._read(), loop=self.loop) with self.assertRaises(ConnectionError): yield from conn.send(request) self.assertEqual(conn.connected(), False)
def test_osserror_in_reader_task(self): host, port = self.server.host, self.server.port @asyncio.coroutine def invoke_osserror(*a, **kw): yield from asyncio.sleep(0.1, loop=self.loop) raise OSError encoder = KafkaProtocol.encode_metadata_request request = encoder(client_id=b"aiokafka-python", correlation_id=1, payloads=()) # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) # setup reader reader = mock.MagicMock() reader.readexactly.return_value = invoke_osserror() writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = asyncio.async(conn._read(), loop=self.loop) with self.assertRaises(ConnectionError): yield from conn.send(request)
def test_correlation_id_on_group_coordinator_req(self): host, port = self.kafka_host, self.kafka_port request = GroupCoordinatorRequest(consumer_group='test') # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) # setup reader reader = mock.MagicMock() int32 = struct.Struct('>i') resp = GroupCoordinatorResponse( error_code=0, coordinator_id=22, host='127.0.0.1', port=3333).encode() resp = int32.pack(0) + resp # set correlation id to 0 reader.readexactly.side_effect = [ asyncio.coroutine(lambda *a, **kw: int32.pack(len(resp)))(), asyncio.coroutine(lambda *a, **kw: resp)()] writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = asyncio.async(conn._read(), loop=self.loop) response = yield from conn.send(request) self.assertIsInstance(response, GroupCoordinatorResponse) self.assertEqual(response.error_code, 0) self.assertEqual(response.coordinator_id, 22) self.assertEqual(response.host, '127.0.0.1') self.assertEqual(response.port, 3333)
async def test_invalid_correlation_id(self): host, port = self.kafka_host, self.kafka_port request = MetadataRequest([]) # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port) # setup reader reader = mock.MagicMock() int32 = struct.Struct('>i') resp = MetadataResponse(brokers=[], topics=[]) resp = resp.encode() resp = int32.pack(999) + resp # set invalid correlation id async def first_resp(*args: Any, **kw: Any): return int32.pack(len(resp)) async def second_resp(*args: Any, **kw: Any): return resp reader.readexactly.side_effect = [first_resp(), second_resp()] writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = conn._create_reader_task() with self.assertRaises(CorrelationIdError): await conn.send(request)
def test_osserror_in_reader_task(self): host, port = self.kafka_host, self.kafka_port @asyncio.coroutine def invoke_osserror(*a, **kw): yield from asyncio.sleep(0.1, loop=self.loop) raise OSError encoder = KafkaProtocol.encode_metadata_request request = encoder(client_id=b"aiokafka-python", correlation_id=1, payloads=()) # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) # setup reader reader = mock.MagicMock() reader.readexactly.return_value = invoke_osserror() writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = asyncio. async (conn._read(), loop=self.loop) with self.assertRaises(ConnectionError): yield from conn.send(request)
def test_invalid_correlation_id(self): host, port = self.kafka_host, self.kafka_port request = MetadataRequest([]) # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) # setup reader reader = mock.MagicMock() int32 = struct.Struct('>i') resp = MetadataResponse(brokers=[], topics=[]).encode() resp = int32.pack(999) + resp # set invalid correlation id reader.readexactly.side_effect = [ asyncio.coroutine(lambda *a, **kw: int32.pack(len(resp)))(), asyncio.coroutine(lambda *a, **kw: resp)() ] writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = asyncio. async (conn._read(), loop=self.loop) with self.assertRaises(CorrelationIdError): yield from conn.send(request)
def test_osserror_in_reader_task(self): host, port = self.kafka_host, self.kafka_port @asyncio.coroutine def invoke_osserror(*a, **kw): yield from asyncio.sleep(0.1, loop=self.loop) raise OSError('test oserror') request = MetadataRequest([]) # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) # setup reader reader = mock.MagicMock() reader.readexactly.return_value = invoke_osserror() writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = asyncio. async (conn._read(), loop=self.loop) with self.assertRaises(ConnectionError): yield from conn.send(request) self.assertEqual(conn.connected(), False)
def test_correlation_id_on_group_coordinator_req(self): host, port = self.kafka_host, self.kafka_port request = GroupCoordinatorRequest(consumer_group='test') # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) # setup reader reader = mock.MagicMock() int32 = struct.Struct('>i') resp = GroupCoordinatorResponse(error_code=0, coordinator_id=22, host='127.0.0.1', port=3333).encode() resp = int32.pack(0) + resp # set correlation id to 0 reader.readexactly.side_effect = [ asyncio.coroutine(lambda *a, **kw: int32.pack(len(resp)))(), asyncio.coroutine(lambda *a, **kw: resp)() ] writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = asyncio. async (conn._read(), loop=self.loop) response = yield from conn.send(request) self.assertIsInstance(response, GroupCoordinatorResponse) self.assertEqual(response.error_code, 0) self.assertEqual(response.coordinator_id, 22) self.assertEqual(response.host, '127.0.0.1') self.assertEqual(response.port, 3333)
def test_send_to_closed(self): host, port = self.kafka_host, self.kafka_port conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) request = MetadataRequest([]) with self.assertRaises(ConnectionError): yield from conn.send(request) conn._writer = mock.MagicMock() conn._writer.write.side_effect = OSError('mocked writer is closed') with self.assertRaises(ConnectionError): yield from conn.send(request)
async def test_send_to_closed(self): host, port = self.kafka_host, self.kafka_port conn = AIOKafkaConnection(host=host, port=port) request = MetadataRequest([]) with self.assertRaises(KafkaConnectionError): await conn.send(request) conn._writer = mock.MagicMock() conn._writer.write.side_effect = OSError('mocked writer is closed') with self.assertRaises(KafkaConnectionError): await conn.send(request)
def test__send_sasl_token(self): # Before Kafka 1.0.0 SASL was performed on the wire without # KAFKA_HEADER in the protocol. So we needed another private # function to send `raw` data with only length prefixed # setup connection with mocked transport and protocol conn = AIOKafkaConnection( host="", port=9999, loop=self.loop ) conn.close = mock.MagicMock() conn._writer = mock.MagicMock() out_buffer = [] conn._writer.write = mock.Mock(side_effect=out_buffer.append) conn._reader = mock.MagicMock() self.assertEqual(len(conn._requests), 0) # Successful send fut = conn._send_sasl_token(b"Super data") self.assertEqual(b''.join(out_buffer), b"\x00\x00\x00\nSuper data") self.assertEqual(len(conn._requests), 1) out_buffer.clear() # Resolve the request conn._requests[0][2].set_result(None) conn._requests.clear() yield from fut # Broken pipe error conn._writer.write.side_effect = OSError with self.assertRaises(ConnectionError): conn._send_sasl_token(b"Super data") self.assertEqual(out_buffer, []) self.assertEqual(len(conn._requests), 0) self.assertEqual(conn.close.call_count, 1) conn._writer = None with self.assertRaises(ConnectionError): conn._send_sasl_token(b"Super data") # We don't need to close 2ce self.assertEqual(conn.close.call_count, 1)
async def test__send_sasl_token(self): # Before Kafka 1.0.0 SASL was performed on the wire without # KAFKA_HEADER in the protocol. So we needed another private # function to send `raw` data with only length prefixed # setup connection with mocked transport and protocol conn = AIOKafkaConnection( host="", port=9999, loop=self.loop ) conn.close = mock.MagicMock() conn._writer = mock.MagicMock() out_buffer = [] conn._writer.write = mock.Mock(side_effect=out_buffer.append) conn._reader = mock.MagicMock() self.assertEqual(len(conn._requests), 0) # Successful send fut = conn._send_sasl_token(b"Super data") self.assertEqual(b''.join(out_buffer), b"\x00\x00\x00\nSuper data") self.assertEqual(len(conn._requests), 1) out_buffer.clear() # Resolve the request conn._requests[0][2].set_result(None) conn._requests.clear() await fut # Broken pipe error conn._writer.write.side_effect = OSError with self.assertRaises(ConnectionError): conn._send_sasl_token(b"Super data") self.assertEqual(out_buffer, []) self.assertEqual(len(conn._requests), 0) self.assertEqual(conn.close.call_count, 1) conn._writer = None with self.assertRaises(ConnectionError): conn._send_sasl_token(b"Super data") # We don't need to close 2ce self.assertEqual(conn.close.call_count, 1)
def test_send_to_closed(self): host, port = self.kafka_host, self.kafka_port conn = AIOKafkaConnection(host=host, port=port, loop=self.loop) request = MetadataRequest([]) with self.assertRaises(ConnectionError): yield from conn.send(request) @asyncio.coroutine def invoke_osserror(*a, **kw): yield from asyncio.sleep(0.1, loop=self.loop) raise OSError('mocked writer is closed') conn._writer = mock.MagicMock() conn._writer.write.side_effect = OSError('mocked writer is closed') with self.assertRaises(ConnectionError): yield from conn.send(request)
async def test_correlation_id_on_group_coordinator_req(self): host, port = self.kafka_host, self.kafka_port request = GroupCoordinatorRequest(consumer_group='test') # setup connection with mocked reader and writer conn = AIOKafkaConnection(host=host, port=port) # setup reader reader = mock.MagicMock() int32 = struct.Struct('>i') resp = GroupCoordinatorResponse(error_code=0, coordinator_id=22, host='127.0.0.1', port=3333) resp = resp.encode() resp = int32.pack(0) + resp # set correlation id to 0 async def first_resp(*args: Any, **kw: Any): return int32.pack(len(resp)) async def second_resp(*args: Any, **kw: Any): return resp reader.readexactly.side_effect = [first_resp(), second_resp()] writer = mock.MagicMock() conn._reader = reader conn._writer = writer # invoke reader task conn._read_task = conn._create_reader_task() response = await conn.send(request) self.assertIsInstance(response, GroupCoordinatorResponse) self.assertEqual(response.error_code, 0) self.assertEqual(response.coordinator_id, 22) self.assertEqual(response.host, '127.0.0.1') self.assertEqual(response.port, 3333)