예제 #1
0
    def test_get_source_id_from_endpoint_id_connection_error(self):
        """Test to get source ID from endpoint ID with connection error."""
        resource_id = 2

        client = SourcesHTTPClient(auth_header=Config.SOURCES_FAKE_HEADER,
                                   source_id=self.source_id)
        with requests_mock.mock() as m:
            m.get(
                f"http://www.sources.com/api/v1.0/endpoints?filter[id]={resource_id}",
                exc=RequestException)
            with self.assertRaises(SourcesHTTPClientError):
                client.get_source_id_from_endpoint_id(resource_id)
예제 #2
0
    def test_get_source_id_from_endpoint_id_misconfigured(self):
        """Test to get source_id from resource_id with route not found."""
        resource_id = 2
        source_id = 3

        client = SourcesHTTPClient(auth_header=Config.SOURCES_FAKE_HEADER, source_id=source_id)
        with requests_mock.mock() as m:
            m.get(
                f"http://www.sources.com/api/v1.0/endpoints?filter[id]={resource_id}",
                status_code=404,
                json={"data": [{"id": resource_id}]},
            )
            with self.assertRaises(SourcesHTTPClientError):
                client.get_source_id_from_endpoint_id(resource_id)
예제 #3
0
def cost_mgmt_msg_filter(msg_data):
    """Verify that message is for cost management."""
    event_type = msg_data.get("event_type")
    auth_header = msg_data.get("auth_header")

    if event_type in (KAFKA_APPLICATION_DESTROY, KAFKA_SOURCE_DESTROY):
        return msg_data

    if event_type in (KAFKA_AUTHENTICATION_CREATE,
                      KAFKA_AUTHENTICATION_UPDATE):
        sources_network = SourcesHTTPClient(auth_header)
        source_id = sources_network.get_source_id_from_endpoint_id(
            msg_data.get("resource_id"))
        msg_data["source_id"] = source_id
        if not sources_network.get_application_type_is_cost_management(
                source_id):
            LOG.info(
                f"Resource id {msg_data.get('resource_id')} not associated with cost-management."
            )
            return None
    else:
        source_id = msg_data.get("source_id")

    sources_network = SourcesHTTPClient(auth_header, source_id=source_id)
    source_details = sources_network.get_source_details()
    source_type_id = int(source_details.get("source_type_id"))
    source_type_name = sources_network.get_source_type_name(source_type_id)

    if source_type_name not in (SOURCES_OCP_SOURCE_NAME,
                                SOURCES_AWS_SOURCE_NAME,
                                SOURCES_AZURE_SOURCE_NAME):
        LOG.debug(f"Filtering unexpected source type {source_type_name}.")
        return None
    return msg_data
예제 #4
0
    def test_get_source_id_from_endpoint_id_no_data(self):
        """Test to get source_id from resource_id with no data in response."""
        resource_id = 2
        source_id = 3

        client = SourcesHTTPClient(auth_header=Config.SOURCES_FAKE_HEADER, source_id=source_id)
        with requests_mock.mock() as m:
            m.get(
                f"http://www.sources.com/api/v1.0/endpoints?filter[id]={resource_id}",
                status_code=200,
                json={"data": []},
            )
            self.assertIsNone(client.get_source_id_from_endpoint_id(resource_id))
예제 #5
0
    def test_get_source_id_from_endpoint_id(self):
        """Test to get source_id from resource_id."""
        resource_id = 2
        source_id = 3

        client = SourcesHTTPClient(auth_header=Config.SOURCES_FAKE_HEADER,
                                   source_id=source_id)
        with requests_mock.mock() as m:
            m.get(
                f'http://www.sources.com/api/v1.0/endpoints?filter[id]={resource_id}',
                status_code=200,
                json={'data': [{
                    'source_id': source_id
                }]})
            response = client.get_source_id_from_endpoint_id(resource_id)
            self.assertEqual(response, source_id)
예제 #6
0
def cost_mgmt_msg_filter(msg_data):
    """Verify that message is for cost management."""
    event_type = msg_data.get("event_type")
    auth_header = msg_data.get("auth_header")

    if event_type in (KAFKA_APPLICATION_DESTROY, KAFKA_SOURCE_DESTROY):
        return msg_data

    if event_type in (KAFKA_AUTHENTICATION_CREATE, KAFKA_AUTHENTICATION_UPDATE):
        sources_network = SourcesHTTPClient(auth_header)

        if msg_data.get("resource_type") == "Endpoint":
            source_id = sources_network.get_source_id_from_endpoint_id(msg_data.get("resource_id"))
        if msg_data.get("resource_type") == "Application":
            source_id = sources_network.get_source_id_from_applications_id(msg_data.get("resource_id"))
        msg_data["source_id"] = source_id
        if not sources_network.get_application_type_is_cost_management(source_id):
            LOG.info(f"Resource id {msg_data.get('resource_id')} not associated with cost-management.")
            return None
    else:
        source_id = msg_data.get("source_id")

    return msg_data
예제 #7
0
def sources_network_auth_info(resource_id, auth_header):
    """
    Store Sources Authentication information given an endpoint (Resource ID).

    Convenience method when a Resource ID (Endpoint) is known and the Source ID
    is not.  This happens when from an Authentication.create message.

    Args:
        resource_id (Integer): Platform Sources Endpoint ID, aka resource_id.
        auth_header (String): Authentication Header.

    Returns:
        None

    """
    source_id = storage.get_source_from_endpoint(resource_id)
    if source_id:
        save_auth_info(auth_header, source_id)
    else:
        sources_network = SourcesHTTPClient(auth_header)
        source_id = sources_network.get_source_id_from_endpoint_id(resource_id)
        storage.update_endpoint_id(source_id, resource_id)
        save_auth_info(auth_header, source_id)
예제 #8
0
async def process_messages(msg_pending_queue):  # noqa: C901; pragma: no cover
    """
    Process messages from Platform-Sources kafka service.

    Handler for various application/source create and delete events.
    'create' events:
        Issues a Sources REST API call to get additional context for the Platform-Sources kafka event.
        This information is stored in the Sources database table.
    'destroy' events:
        Enqueues a source delete event which will be processed in the synchronize_sources method.

    Args:
        msg_pending_queue (Asyncio queue): Queue to hold kafka messages to be filtered


    Returns:
        None

    """
    LOG.info("Waiting to process incoming kafka messages...")
    while True:
        msg_data = await msg_pending_queue.get()

        LOG.info(f"Processing Event: {str(msg_data)}")
        try:
            if msg_data.get("event_type") in (KAFKA_APPLICATION_CREATE,
                                              KAFKA_AUTHENTICATION_CREATE):
                if msg_data.get("event_type") == KAFKA_AUTHENTICATION_CREATE:
                    sources_network = SourcesHTTPClient(
                        msg_data.get("auth_header"))
                    msg_data[
                        "source_id"] = sources_network.get_source_id_from_endpoint_id(
                            msg_data.get("resource_id"))

                storage.create_source_event(msg_data.get("source_id"),
                                            msg_data.get("auth_header"),
                                            msg_data.get("offset"))

                with concurrent.futures.ThreadPoolExecutor() as pool:
                    await EVENT_LOOP.run_in_executor(
                        pool, sources_network_info, msg_data.get("source_id"),
                        msg_data.get("auth_header"))

            elif msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE, ):
                with concurrent.futures.ThreadPoolExecutor() as pool:
                    if storage.is_known_source(
                            msg_data.get("source_id")) is False:
                        LOG.info(
                            f"Update event for unknown source id, skipping...")
                        continue
                    await EVENT_LOOP.run_in_executor(
                        pool, sources_network_info, msg_data.get("source_id"),
                        msg_data.get("auth_header"))

            elif msg_data.get("event_type") in (KAFKA_AUTHENTICATION_UPDATE, ):
                msg_data["source_id"] = storage.get_source_from_endpoint(
                    msg_data.get("resource_id"))
                with concurrent.futures.ThreadPoolExecutor() as pool:
                    await EVENT_LOOP.run_in_executor(
                        pool, save_auth_info, msg_data.get("auth_header"),
                        msg_data.get("source_id"))

            elif msg_data.get("event_type") in (KAFKA_APPLICATION_DESTROY,
                                                KAFKA_SOURCE_DESTROY):
                storage.enqueue_source_delete(msg_data.get("source_id"))

            if msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE,
                                              KAFKA_AUTHENTICATION_UPDATE):
                storage.enqueue_source_update(msg_data.get("source_id"))
        except (InterfaceError, OperationalError) as error:
            LOG.error(
                f"[process_messages] Closing DB connection and re-queueing failed operation."
                f" Encountered {type(error).__name__}: {error}")
            connection.close()
            await asyncio.sleep(Config.RETRY_SECONDS)
            await msg_pending_queue.put(msg_data)
            LOG.info(
                f'Requeued failed operation: {msg_data.get("event_type")} '
                f'for Source ID: {str(msg_data.get("source_id"))}.')
        except Exception as error:
            # The reason for catching all exceptions is to ensure that the event
            # loop remains active in the event that message processing fails unexpectedly.
            source_id = str(msg_data.get("source_id", "unknown"))
            LOG.error(
                f"Source {source_id} Unexpected message processing error: {str(error)}",
                exc_info=True)