Ejemplo n.º 1
0
async def wrkr_fails(client):
    print(client.failures)
    if client.failures < 2:
        client.failures += 1
        raise KafkaError("wrkr_fails")

    return True
Ejemplo n.º 2
0
    async def start(self):
        if not self.failures:
            self.failures = 1
            raise KafkaError("fakemq fails")

        else:
            return True
Ejemplo n.º 3
0
    def start(self):
        if self.connection_failing_attempt_countdown <= 0 and not self.is_down:
            return True

        self.connection_failing_attempt_countdown -= 1
        self.trying_to_connect_failures_calls += 1
        raise KafkaError("Failed to connect to the FakeMQ")
Ejemplo n.º 4
0
    async def test_check_version(self):
        kafka_version = tuple(int(x) for x in self.kafka_version.split("."))

        client = AIOKafkaClient(bootstrap_servers=self.hosts)
        await client.bootstrap()
        ver = await client.check_version()

        expected_version = kafka_version[:2]
        # No significant protocol changed, no way to differencieate
        if expected_version == (2, 2):
            expected_version = (2, 1)
        elif expected_version == (2, 4):
            expected_version = (2, 3)
        self.assertEqual(expected_version, ver[:2])
        await self.wait_topic(client, 'some_test_topic')
        ver2 = await client.check_version()
        self.assertEqual(ver, ver2)
        ver2 = await client.check_version(client.get_random_node())
        self.assertEqual(ver, ver2)

        with mock.patch.object(AIOKafkaConnection, 'send') as mocked:
            mocked.side_effect = KafkaError('mocked exception')
            with self.assertRaises(UnrecognizedBrokerVersion):
                await client.check_version(client.get_random_node())

        async def _get_conn(*args: Any, **kw: Any):
            return None

        client._get_conn = _get_conn
        with self.assertRaises(KafkaConnectionError):
            await client.check_version()
        await client.close()
def get_container():
    COMPOSE_PATH = '{}/tests'.format(rootpath.detect('.', pattern='tests'))
    compose = DockerCompose(COMPOSE_PATH)
    compose.start()
    bootstrap_server = 'localhost:9092'
    consumer = KafkaConsumer(group_id='test',
                             bootstrap_servers=[bootstrap_server])
    if not consumer.topics():
        raise KafkaError('Unable to connect with kafka container!')
    return compose
Ejemplo n.º 6
0
 def sendjsondata(self, kafkatopic, params):
     try:
         if isinstance(params, str):
             parmas_message = params
         else:
             parmas_message = json.dumps(params)
         self.producer.send(kafkatopic,
                            parmas_message.encode('utf-8', 'ignore'))
         # self.producer.send(kafkatopic, parmas_message)
         self.producer.flush()
     except:
         raise KafkaError("send message to kafka error")
Ejemplo n.º 7
0
    def _raise_if_need_to(self, calls: int = 0, avoid_iteration_control=False):
        if not avoid_iteration_control:
            if self.disconnect_in_operation > 0 and self.disconnect_in_operation == (
                    calls - 1):
                self.disconnect_in_operation = 0
                self.disconnect_in_operation_called = True
                raise KafkaError("Failed to connect to the FakeMQ")

            if self.stop_iteration_countdown <= 0:
                raise StopLoopException('Stopping the iteration')

            self.stop_iteration_countdown -= 1
Ejemplo n.º 8
0
def simulate_kafka_cannot_connect():
    """
    Causes instantiation of a kafka producer to raise a `KafkaError`.

    IMPORTANT: this is mocking the `TestKafkaProducer`, itselt a mock. I'm
    hoping that the real producer raises similarly, and that that behaviour
    doesn't change with version of the library. I have tested this manually
    however locally with real Kafka connection, and it seems to function as
    expected :fingerscrossed:
    """
    with patch.object(TestKafkaProducer, "__init__") as init_mock:
        init_mock.side_effect = KafkaError("failed to connect")
        yield
Ejemplo n.º 9
0
    async def test_metadata_update_fail(self):
        client = AIOKafkaClient(bootstrap_servers=self.hosts)
        await client.bootstrap()
        # Make sure the connection is initialize before mock to avoid crashing
        # api_version routine
        await client.force_metadata_update()

        with mock.patch.object(AIOKafkaConnection, 'send') as mocked:
            mocked.side_effect = KafkaError('mocked exception')

            updated = await client.force_metadata_update()

            self.assertEqual(updated, False)

            with self.assertRaises(KafkaError):
                await client.fetch_all_metadata()
        await client.close()
Ejemplo n.º 10
0
    def handle(self, *args, **options):
        c = KafkaConsumer(
            "test",
            auto_offset_reset='earliest',
            enable_auto_commit=True,
            value_deserializer=lambda x: loads(x.decode('utf-8')),
            bootstrap_servers="kafka:9092")

        self.stdout.write('subscribed')

        for msg in c:
            print(msg)
            print("Received Message")

            if msg is None:
                continue

            if msg.error():
                if msg.error().code() == KafkaError("Trouble"):
                    continue
                else:
                    raise Exception("HAHLAHO")

            # skip internal channels
            if msg.topic().startswith('__'):
                continue

            try:
                data = msg.value().decode('utf-8')
            except Exception:
                self.stdout.write('%s: message is not valid utf-8: %s' %
                                  (msg.topic(), repr(msg.value())))
                continue

            async_to_sync(channel_layer.group_send)(data, {
                "type": "stream",
                "stream": "kafka",
                "room": msg.topic(),
                "method": "create",
                "data": data
            })

        print("Received END")
Ejemplo n.º 11
0
def _delete_host(session, event_producer, host):
    delete_query = session.query(Host).filter(Host.id == host.id)
    if kafka_available():
        delete_query.delete(synchronize_session="fetch")
        host_deleted = _deleted_by_this_query(host)
        if host_deleted:
            delete_host_count.inc()
            event = build_event(EventType.delete, host)
            insights_id = host.canonical_facts.get("insights_id")
            headers = message_headers(EventType.delete, insights_id)
            event_producer.write_event(event, str(host.id), headers, wait=True)
            delete_query.session.commit()
            return host_deleted
        else:
            delete_query.session.rollback()
            return host_deleted
    else:
        logger.error(
            f"host with {host.id} NOT deleted because Kafka server not available."
        )
        raise KafkaError(
            "Kafka server not available.  Stopping host deletions.")
Ejemplo n.º 12
0
    async def test_check_version(self):
        kafka_version = tuple(int(x) for x in self.kafka_version.split("."))

        client = AIOKafkaClient(loop=self.loop, bootstrap_servers=self.hosts)
        await client.bootstrap()
        ver = await client.check_version()
        self.assertEqual(kafka_version[:2], ver[:2])
        await self.wait_topic(client, 'some_test_topic')
        ver2 = await client.check_version()
        self.assertEqual(ver, ver2)
        ver2 = await client.check_version(client.get_random_node())
        self.assertEqual(ver, ver2)

        with mock.patch.object(
                AIOKafkaConnection, 'send') as mocked:
            mocked.side_effect = KafkaError('mocked exception')
            with self.assertRaises(UnrecognizedBrokerVersion):
                await client.check_version(client.get_random_node())

        client._get_conn = asyncio.coroutine(lambda _, **kw: None)
        with self.assertRaises(KafkaConnectionError):
            await client.check_version()
        await client.close()
Ejemplo n.º 13
0
 async def test_failed_bootstrap(self):
     client = AIOKafkaClient(bootstrap_servers=self.hosts)
     with mock.patch.object(AIOKafkaConnection, 'send') as mock_send:
         mock_send.side_effect = KafkaError('some kafka error')
         with self.assertRaises(KafkaConnectionError):
             await client.bootstrap()
Ejemplo n.º 14
0
def raise_exception():
    """Raise a kafka error."""
    raise KafkaError()
Ejemplo n.º 15
0
 async def seek_to_committed(self):
     # This isn't realistic... But it's one way to stop the consumer for our needs.
     raise KafkaError("Seek to commited. Closing...")
Ejemplo n.º 16
0
 def getone(self):
     for msg in self.preloaded_messages:
         return msg
     raise KafkaError("Closing Mock Consumer")
Ejemplo n.º 17
0
async def raise_exception():
    """Raise KafkaError"""
    raise KafkaError()
Ejemplo n.º 18
0
    mocker.patch("api.host._get_host_list_by_id_list",
                 query_wraper.mock_get_host_list_by_id_list)

    hosts = db_create_multiple_hosts(how_many=2)
    host_id_list = [str(host.id) for host in hosts]

    response_status, response_data = api_delete_host(",".join(host_id_list))

    assert_response_status(response_status, expected_status=200)

    query_wraper.query.limit.assert_called_with(5)


@pytest.mark.parametrize(
    "send_side_effects",
    ((mock.Mock(), mock.Mock(**{"get.side_effect": KafkaError()})),
     (mock.Mock(), KafkaError("oops"))),
)
def test_delete_stops_after_kafka_producer_error(
        send_side_effects, event_producer_mock, event_producer,
        db_create_multiple_hosts, api_delete_host, db_get_hosts):
    event_producer._kafka_producer.send.side_effect = send_side_effects

    hosts = db_create_multiple_hosts(how_many=3)
    host_id_list = [str(host.id) for host in hosts]

    response_status, response_data = api_delete_host(",".join(host_id_list))

    assert_response_status(response_status, expected_status=500)

    remaining_hosts = db_get_hosts(host_id_list)
Ejemplo n.º 19
0
 def _connect(self):
     bootstrap_server = self.get_bootstrap_server()
     consumer = KafkaConsumer(group_id='test',
                              bootstrap_servers=[bootstrap_server])
     if not consumer.topics():
         raise KafkaError("Unable to connect with kafka container!")