Exemple #1
0
 def test_create_source_event_invalid_auth_header(self):
     """Tests creating a source db record with invalid auth_header."""
     test_source_id = 2
     test_offset = 3
     storage.create_source_event(test_source_id, "bad", test_offset)
     with self.assertRaises(Sources.DoesNotExist):
         Sources.objects.get(source_id=test_source_id)
Exemple #2
0
    def process(self):
        """Process the message."""
        if self.event_type in (KAFKA_APPLICATION_CREATE,):
            storage.create_source_event(self.source_id, self.auth_header, self.offset)

        if storage.is_known_source(self.source_id):
            if self.event_type in (KAFKA_APPLICATION_CREATE,):
                self.save_sources_details()
                self.save_source_info(bill=True)
                # _Authentication_ messages are responsible for saving credentials.
                # However, OCP does not send an Auth message. Therefore, we need
                # to run the following branch for OCP which completes the source
                # creation cycle for an OCP source.
                if storage.get_source_type(self.source_id) == Provider.PROVIDER_OCP:
                    self.save_source_info(auth=True)
            if self.event_type in (KAFKA_APPLICATION_UPDATE,):
                if storage.get_source_type(self.source_id) == Provider.PROVIDER_AZURE:
                    # Because azure auth is split in Sources backend, we need to check both
                    # auth and billing when we recieve either auth update or app update event
                    updated = self.save_source_info(auth=True, bill=True)
                else:
                    updated = self.save_source_info(bill=True)
                if updated:
                    LOG.info(f"[ApplicationMsgProcessor] source_id {self.source_id} updated")
                    storage.enqueue_source_create_or_update(self.source_id)
                else:
                    LOG.info(f"[ApplicationMsgProcessor] source_id {self.source_id} not updated. No changes detected.")

        if self.event_type in (KAFKA_APPLICATION_DESTROY,):
            storage.enqueue_source_delete(self.source_id, self.offset, allow_out_of_order=True)
    def process(self):
        """Process the message."""
        if self.event_type in (KAFKA_AUTHENTICATION_CREATE):
            LOG.debug(
                f"[AuthenticationMsgProcessor] creating source for source_id: {self.source_id}"
            )
            storage.create_source_event(self.source_id, self.account_number,
                                        self.auth_header, self.offset)

        if storage.is_known_source(self.source_id):
            if self.event_type in (KAFKA_AUTHENTICATION_CREATE):
                self.save_source_info(auth=True)
            if self.event_type in (KAFKA_AUTHENTICATION_UPDATE):
                if storage.get_source_type(
                        self.source_id) == Provider.PROVIDER_AZURE:
                    # Because azure auth is split in Sources backend, we need to check both
                    # auth and billing when we recieve either auth update or app update event
                    updated = self.save_source_info(auth=True, bill=True)
                else:
                    updated = self.save_source_info(auth=True)
                if updated:
                    LOG.info(
                        f"[AuthenticationMsgProcessor] source_id {self.source_id} updated"
                    )
                    storage.enqueue_source_create_or_update(self.source_id)
                else:
                    LOG.info(
                        f"[AuthenticationMsgProcessor] source_id {self.source_id} not updated. No changes detected."
                    )
Exemple #4
0
def process_message(app_type_id, msg):  # noqa: C901
    """
    Process message from Platform-Sources kafka service.

    Handler for various application/source create and delete events.
    'create' events:
        Issues a Sources REST API call to get additional context for the Platform-Sources kafka event.
        This information is stored in the Sources database table.
    'destroy' events:
        Enqueues a source delete event which will be processed in the synchronize_sources method.

    Args:
        app_type_id - application type identifier
        msg - kafka message

    Returns:
        None

    """
    LOG.info(f"Processing Event: {msg}")
    msg_data = None
    try:
        msg_data = cost_mgmt_msg_filter(msg)
    except SourceNotFoundError:
        LOG.warning(f"Source not found in platform sources. Skipping msg: {msg}")
        return
    if not msg_data:
        LOG.debug(f"Message not intended for cost management: {msg}")
        return

    if msg_data.get("event_type") in (KAFKA_APPLICATION_CREATE,):
        storage.create_source_event(msg_data.get("source_id"), msg_data.get("auth_header"), msg_data.get("offset"))

        if storage.is_known_source(msg_data.get("source_id")):
            sources_network_info(msg_data.get("source_id"), msg_data.get("auth_header"))

    elif msg_data.get("event_type") in (KAFKA_AUTHENTICATION_CREATE, KAFKA_AUTHENTICATION_UPDATE):
        if msg_data.get("event_type") in (KAFKA_AUTHENTICATION_CREATE,):
            storage.create_source_event(  # this will create source _only_ if it does not exist.
                msg_data.get("source_id"), msg_data.get("auth_header"), msg_data.get("offset")
            )

        save_auth_info(msg_data.get("auth_header"), msg_data.get("source_id"))

    elif msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE,):
        if storage.is_known_source(msg_data.get("source_id")) is False:
            LOG.info("Update event for unknown source id, skipping...")
            return
        sources_network_info(msg_data.get("source_id"), msg_data.get("auth_header"))

    elif msg_data.get("event_type") in (KAFKA_APPLICATION_DESTROY,):
        storage.enqueue_source_delete(msg_data.get("source_id"), msg_data.get("offset"), allow_out_of_order=True)

    elif msg_data.get("event_type") in (KAFKA_SOURCE_DESTROY,):
        storage.enqueue_source_delete(msg_data.get("source_id"), msg_data.get("offset"))

    if msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE, KAFKA_AUTHENTICATION_UPDATE):
        storage.enqueue_source_update(msg_data.get("source_id"))
Exemple #5
0
    def test_create_source_event_out_of_order_unexpected_entry(self):
        """Tests that a source entry is not cleaned up when unexpected entry is found."""
        test_source_id = 3
        test_offset = 4
        test_obj = Sources(source_id=test_source_id, offset=3)
        test_obj.save()

        storage.create_source_event(test_source_id, Config.SOURCES_FAKE_HEADER, test_offset)
        self.assertTrue(Sources.objects.filter(source_id=test_source_id).exists())
Exemple #6
0
    def test_create_source_event_out_of_order(self):
        """Tests that a source entry is cleaned up when following an out of order destroy."""
        test_source_id = 3
        test_offset = 4
        test_obj = Sources(source_id=test_source_id, offset=3, out_of_order_delete=True)
        test_obj.save()

        storage.create_source_event(test_source_id, Config.SOURCES_FAKE_HEADER, test_offset)
        self.assertFalse(Sources.objects.filter(source_id=test_source_id).exists())
Exemple #7
0
 def test_create_source_event_db_down(self):
     """Tests creating a source db record with invalid auth_header."""
     test_source_id = 2
     test_offset = 3
     with patch("sources.storage.Sources.objects") as mock_objects:
         mock_objects.get.side_effect = InterfaceError("Test exception")
         with self.assertRaises(InterfaceError):
             storage.create_source_event(test_source_id,
                                         Config.SOURCES_FAKE_HEADER,
                                         test_offset)
Exemple #8
0
 def test_create_source_event(self):
     """Tests that a source can be created."""
     test_source_id = 2
     test_offset = 3
     storage.create_source_event(test_source_id, Config.SOURCES_FAKE_HEADER, test_offset)
     db_obj = Sources.objects.get(source_id=test_source_id)
     self.assertEqual(db_obj.source_id, test_source_id)
     self.assertEqual(db_obj.auth_header, Config.SOURCES_FAKE_HEADER)
     self.assertEqual(db_obj.offset, test_offset)
     self.assertEqual(db_obj.account_id, self.account_id)
Exemple #9
0
 def test_create_source_event_db_down(self):
     """Tests creating a source db record with invalid auth_header."""
     test_source_id = 2
     test_offset = 3
     ocp_obj = Sources(source_id=test_source_id, offset=3, out_of_order_delete=True, pending_delete=False)
     ocp_obj.save()
     with patch.object(Sources, "delete") as mock_object:
         mock_object.side_effect = InterfaceError("Error")
         with self.assertRaises(InterfaceError):
             storage.create_source_event(test_source_id, Config.SOURCES_FAKE_HEADER, test_offset)
Exemple #10
0
async def process_messages(msg_pending_queue):  # noqa: C901; pragma: no cover
    """
    Process messages from Platform-Sources kafka service.

    Handler for various application/source create and delete events.
    'create' events:
        Issues a Sources REST API call to get additional context for the Platform-Sources kafka event.
        This information is stored in the Sources database table.
    'destroy' events:
        Enqueues a source delete event which will be processed in the synchronize_sources method.

    Args:
        msg_pending_queue (Asyncio queue): Queue to hold kafka messages to be filtered


    Returns:
        None

    """
    LOG.info("Waiting to process incoming kafka messages...")
    while True:
        msg_data = await msg_pending_queue.get()

        LOG.info(f"Processing Event: {str(msg_data)}")
        try:
            if msg_data.get("event_type") in (KAFKA_APPLICATION_CREATE,
                                              KAFKA_AUTHENTICATION_CREATE):
                if msg_data.get("event_type") == KAFKA_AUTHENTICATION_CREATE:
                    sources_network = SourcesHTTPClient(
                        msg_data.get("auth_header"))
                    msg_data[
                        "source_id"] = sources_network.get_source_id_from_endpoint_id(
                            msg_data.get("resource_id"))

                storage.create_source_event(msg_data.get("source_id"),
                                            msg_data.get("auth_header"),
                                            msg_data.get("offset"))

                with concurrent.futures.ThreadPoolExecutor() as pool:
                    await EVENT_LOOP.run_in_executor(
                        pool, sources_network_info, msg_data.get("source_id"),
                        msg_data.get("auth_header"))

            elif msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE, ):
                with concurrent.futures.ThreadPoolExecutor() as pool:
                    if storage.is_known_source(
                            msg_data.get("source_id")) is False:
                        LOG.info(
                            f"Update event for unknown source id, skipping...")
                        continue
                    await EVENT_LOOP.run_in_executor(
                        pool, sources_network_info, msg_data.get("source_id"),
                        msg_data.get("auth_header"))

            elif msg_data.get("event_type") in (KAFKA_AUTHENTICATION_UPDATE, ):
                msg_data["source_id"] = storage.get_source_from_endpoint(
                    msg_data.get("resource_id"))
                with concurrent.futures.ThreadPoolExecutor() as pool:
                    await EVENT_LOOP.run_in_executor(
                        pool, save_auth_info, msg_data.get("auth_header"),
                        msg_data.get("source_id"))

            elif msg_data.get("event_type") in (KAFKA_APPLICATION_DESTROY,
                                                KAFKA_SOURCE_DESTROY):
                storage.enqueue_source_delete(msg_data.get("source_id"))

            if msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE,
                                              KAFKA_AUTHENTICATION_UPDATE):
                storage.enqueue_source_update(msg_data.get("source_id"))
        except (InterfaceError, OperationalError) as error:
            LOG.error(
                f"[process_messages] Closing DB connection and re-queueing failed operation."
                f" Encountered {type(error).__name__}: {error}")
            connection.close()
            await asyncio.sleep(Config.RETRY_SECONDS)
            await msg_pending_queue.put(msg_data)
            LOG.info(
                f'Requeued failed operation: {msg_data.get("event_type")} '
                f'for Source ID: {str(msg_data.get("source_id"))}.')
        except Exception as error:
            # The reason for catching all exceptions is to ensure that the event
            # loop remains active in the event that message processing fails unexpectedly.
            source_id = str(msg_data.get("source_id", "unknown"))
            LOG.error(
                f"Source {source_id} Unexpected message processing error: {str(error)}",
                exc_info=True)