Example #1
0
def process_message(app_type_id, msg):  # noqa: C901
    """
    Process message from Platform-Sources kafka service.

    Handler for various application/source create and delete events.
    'create' events:
        Issues a Sources REST API call to get additional context for the Platform-Sources kafka event.
        This information is stored in the Sources database table.
    'destroy' events:
        Enqueues a source delete event which will be processed in the synchronize_sources method.

    Args:
        app_type_id - application type identifier
        msg - kafka message

    Returns:
        None

    """
    LOG.info(f"Processing Event: {msg}")
    msg_data = None
    try:
        msg_data = cost_mgmt_msg_filter(msg)
    except SourceNotFoundError:
        LOG.warning(f"Source not found in platform sources. Skipping msg: {msg}")
        return
    if not msg_data:
        LOG.debug(f"Message not intended for cost management: {msg}")
        return

    if msg_data.get("event_type") in (KAFKA_APPLICATION_CREATE,):
        storage.create_source_event(msg_data.get("source_id"), msg_data.get("auth_header"), msg_data.get("offset"))

        if storage.is_known_source(msg_data.get("source_id")):
            sources_network_info(msg_data.get("source_id"), msg_data.get("auth_header"))

    elif msg_data.get("event_type") in (KAFKA_AUTHENTICATION_CREATE, KAFKA_AUTHENTICATION_UPDATE):
        if msg_data.get("event_type") in (KAFKA_AUTHENTICATION_CREATE,):
            storage.create_source_event(  # this will create source _only_ if it does not exist.
                msg_data.get("source_id"), msg_data.get("auth_header"), msg_data.get("offset")
            )

        save_auth_info(msg_data.get("auth_header"), msg_data.get("source_id"))

    elif msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE,):
        if storage.is_known_source(msg_data.get("source_id")) is False:
            LOG.info("Update event for unknown source id, skipping...")
            return
        sources_network_info(msg_data.get("source_id"), msg_data.get("auth_header"))

    elif msg_data.get("event_type") in (KAFKA_APPLICATION_DESTROY,):
        storage.enqueue_source_delete(msg_data.get("source_id"), msg_data.get("offset"), allow_out_of_order=True)

    elif msg_data.get("event_type") in (KAFKA_SOURCE_DESTROY,):
        storage.enqueue_source_delete(msg_data.get("source_id"), msg_data.get("offset"))

    if msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE, KAFKA_AUTHENTICATION_UPDATE):
        storage.enqueue_source_update(msg_data.get("source_id"))
Example #2
0
async def process_messages(msg_pending_queue):  # pragma: no cover
    """
    Process messages from Platform-Sources kafka service.

    Handler for various application/source create and delete events.
    'create' events:
        Issues a Sources REST API call to get additional context for the Platform-Sources kafka event.
        This information is stored in the Sources database table.
    'destroy' events:
        Enqueues a source delete event which will be processed in the synchronize_sources method.

    Args:
        msg_pending_queue (Asyncio queue): Queue to hold kafka messages to be filtered


    Returns:
        None

    """
    LOG.info('Waiting to process incoming kafka messages...')
    while True:
        msg_data = await msg_pending_queue.get()

        LOG.info(f'Processing Event: {str(msg_data)}')
        try:
            if msg_data.get('event_type') in (KAFKA_APPLICATION_CREATE,
                                              KAFKA_SOURCE_UPDATE):
                storage.create_provider_event(msg_data.get('source_id'),
                                              msg_data.get('auth_header'),
                                              msg_data.get('offset'))
                with concurrent.futures.ThreadPoolExecutor() as pool:
                    await EVENT_LOOP.run_in_executor(
                        pool, sources_network_info, msg_data.get('source_id'),
                        msg_data.get('auth_header'))
            elif msg_data.get('event_type') in (KAFKA_AUTHENTICATION_CREATE,
                                                KAFKA_AUTHENTICATION_UPDATE):
                with concurrent.futures.ThreadPoolExecutor() as pool:
                    await EVENT_LOOP.run_in_executor(
                        pool, sources_network_auth_info,
                        msg_data.get('resource_id'),
                        msg_data.get('auth_header'))
                    msg_data['source_id'] = storage.get_source_from_endpoint(
                        msg_data.get('resource_id'))
            elif msg_data.get('event_type') in (KAFKA_APPLICATION_DESTROY,
                                                KAFKA_SOURCE_DESTROY):
                storage.enqueue_source_delete(msg_data.get('source_id'))

            if msg_data.get('event_type') in (KAFKA_SOURCE_UPDATE,
                                              KAFKA_AUTHENTICATION_UPDATE):
                storage.enqueue_source_update(msg_data.get('source_id'))
        except Exception as error:
            # The reason for catching all exceptions is to ensure that the event
            # loop remains active in the event that message processing fails unexpectedly.
            source_id = str(msg_data.get('source_id', 'unknown'))
            LOG.error(
                f'Source {source_id} Unexpected message processing error: {str(error)}'
            )
Example #3
0
    def test_enqueue_source_update(self):
        """Test for enqueuing source updating."""
        test_matrix = [
            {
                "koku_uuid": None,
                "pending_delete": False,
                "pending_update": False,
                "expected_pending_update": False
            },
            {
                "koku_uuid": None,
                "pending_delete": True,
                "pending_update": False,
                "expected_pending_update": False
            },
            {
                "koku_uuid": faker.uuid4(),
                "pending_delete": True,
                "pending_update": False,
                "expected_pending_update": False,
            },
            {
                "koku_uuid": faker.uuid4(),
                "pending_delete": False,
                "pending_update": False,
                "expected_pending_update": True,
            },
            {
                "koku_uuid": faker.uuid4(),
                "pending_delete": False,
                "pending_update": True,
                "expected_pending_update": True,
            },
        ]
        test_source_id = 3
        for test in test_matrix:
            aws_obj = Sources(
                source_id=test_source_id,
                auth_header=self.test_header,
                koku_uuid=test.get("koku_uuid"),
                pending_delete=test.get("pending_delete"),
                pending_update=test.get("pending_update"),
                offset=3,
                endpoint_id=4,
                source_type=Provider.PROVIDER_AWS,
                name="Test AWS Source",
                billing_source={"bucket": "test-bucket"},
            )
            aws_obj.save()

            storage.enqueue_source_update(test_source_id)
            response = Sources.objects.get(source_id=test_source_id)
            self.assertEquals(test.get("expected_pending_update"),
                              response.pending_update)
            test_source_id += 1
Example #4
0
    def test_enqueue_source_update(self):
        """Test for enqueuing source updating."""
        test_matrix = [{
            'koku_uuid': None,
            'pending_delete': False,
            'pending_update': False,
            'expected_pending_update': False
        }, {
            'koku_uuid': None,
            'pending_delete': True,
            'pending_update': False,
            'expected_pending_update': False
        }, {
            'koku_uuid': faker.uuid4(),
            'pending_delete': True,
            'pending_update': False,
            'expected_pending_update': False
        }, {
            'koku_uuid': faker.uuid4(),
            'pending_delete': False,
            'pending_update': False,
            'expected_pending_update': True
        }, {
            'koku_uuid': faker.uuid4(),
            'pending_delete': False,
            'pending_update': True,
            'expected_pending_update': True
        }]
        test_source_id = 3
        for test in test_matrix:
            aws_obj = Sources(source_id=test_source_id,
                              auth_header=self.test_header,
                              koku_uuid=test.get('koku_uuid'),
                              pending_delete=test.get('pending_delete'),
                              pending_update=test.get('pending_update'),
                              offset=3,
                              endpoint_id=4,
                              source_type=Provider.PROVIDER_AWS,
                              name='Test AWS Source',
                              billing_source={'bucket': 'test-bucket'})
            aws_obj.save()

            storage.enqueue_source_update(test_source_id)
            response = Sources.objects.get(source_id=test_source_id)
            self.assertEquals(test.get('expected_pending_update'),
                              response.pending_update)
            test_source_id += 1
Example #5
0
 def test_enqueue_source_update_unknown_source(self):
     """Test to enqueue a source update for an unknown source."""
     self.test_obj.koku_uuid = faker.uuid4()
     storage.enqueue_source_update(self.test_source_id + 1)
     self.assertFalse(self.test_obj.pending_update)
Example #6
0
async def process_messages(msg_pending_queue):  # noqa: C901; pragma: no cover
    """
    Process messages from Platform-Sources kafka service.

    Handler for various application/source create and delete events.
    'create' events:
        Issues a Sources REST API call to get additional context for the Platform-Sources kafka event.
        This information is stored in the Sources database table.
    'destroy' events:
        Enqueues a source delete event which will be processed in the synchronize_sources method.

    Args:
        msg_pending_queue (Asyncio queue): Queue to hold kafka messages to be filtered


    Returns:
        None

    """
    LOG.info("Waiting to process incoming kafka messages...")
    while True:
        msg_data = await msg_pending_queue.get()

        LOG.info(f"Processing Event: {str(msg_data)}")
        try:
            if msg_data.get("event_type") in (KAFKA_APPLICATION_CREATE,
                                              KAFKA_AUTHENTICATION_CREATE):
                if msg_data.get("event_type") == KAFKA_AUTHENTICATION_CREATE:
                    sources_network = SourcesHTTPClient(
                        msg_data.get("auth_header"))
                    msg_data[
                        "source_id"] = sources_network.get_source_id_from_endpoint_id(
                            msg_data.get("resource_id"))

                storage.create_source_event(msg_data.get("source_id"),
                                            msg_data.get("auth_header"),
                                            msg_data.get("offset"))

                with concurrent.futures.ThreadPoolExecutor() as pool:
                    await EVENT_LOOP.run_in_executor(
                        pool, sources_network_info, msg_data.get("source_id"),
                        msg_data.get("auth_header"))

            elif msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE, ):
                with concurrent.futures.ThreadPoolExecutor() as pool:
                    if storage.is_known_source(
                            msg_data.get("source_id")) is False:
                        LOG.info(
                            f"Update event for unknown source id, skipping...")
                        continue
                    await EVENT_LOOP.run_in_executor(
                        pool, sources_network_info, msg_data.get("source_id"),
                        msg_data.get("auth_header"))

            elif msg_data.get("event_type") in (KAFKA_AUTHENTICATION_UPDATE, ):
                msg_data["source_id"] = storage.get_source_from_endpoint(
                    msg_data.get("resource_id"))
                with concurrent.futures.ThreadPoolExecutor() as pool:
                    await EVENT_LOOP.run_in_executor(
                        pool, save_auth_info, msg_data.get("auth_header"),
                        msg_data.get("source_id"))

            elif msg_data.get("event_type") in (KAFKA_APPLICATION_DESTROY,
                                                KAFKA_SOURCE_DESTROY):
                storage.enqueue_source_delete(msg_data.get("source_id"))

            if msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE,
                                              KAFKA_AUTHENTICATION_UPDATE):
                storage.enqueue_source_update(msg_data.get("source_id"))
        except (InterfaceError, OperationalError) as error:
            LOG.error(
                f"[process_messages] Closing DB connection and re-queueing failed operation."
                f" Encountered {type(error).__name__}: {error}")
            connection.close()
            await asyncio.sleep(Config.RETRY_SECONDS)
            await msg_pending_queue.put(msg_data)
            LOG.info(
                f'Requeued failed operation: {msg_data.get("event_type")} '
                f'for Source ID: {str(msg_data.get("source_id"))}.')
        except Exception as error:
            # The reason for catching all exceptions is to ensure that the event
            # loop remains active in the event that message processing fails unexpectedly.
            source_id = str(msg_data.get("source_id", "unknown"))
            LOG.error(
                f"Source {source_id} Unexpected message processing error: {str(error)}",
                exc_info=True)