Ejemplo n.º 1
0
        def process_message(message):
            """Message procession logic"""
            try:
                msg_dict = json.loads(message.value.decode('utf-8'))
                FailedCache.process_failed_cache(FailedCache.upload_cache, executor, self.process_upload_or_re_evaluate, loop)
            except json.decoder.JSONDecodeError:
                MESSAGE_PARSE_ERROR.inc()
                LOGGER.exception("Unable to parse message: ")
                return
            if message.topic in kafka_evaluator_topic:
                if 'type' not in msg_dict:
                    LOGGER.error("Received message is missing type field: %s", msg_dict)
                    return
                if msg_dict['type'] in ['upload_new_file', 're-evaluate_system']:
                    process_func = self.process_upload_or_re_evaluate
                    if msg_dict['type'] == 'upload_new_file':
                        send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'processing',
                                                    status_msg='Scheduled for evaluation', loop=loop)
                else:
                    UNKNOWN_MSG.inc()
                    LOGGER.error("Received unknown message type: %s", msg_dict['type'])
                    return

                future = executor.submit(process_func, msg_dict, loop=loop)
                future.add_done_callback(on_thread_done)
            else:
                UNKNOWN_TOPIC.inc()
                LOGGER.error("Received message on unsupported topic: %s", message.topic)
    def test_cache_processing(self):
        """Test cache processing"""
        with concurrent.futures.ThreadPoolExecutor(1) as executor:
            loop = asyncio.get_event_loop()
            FailedCache.upload_cache.append("failed1")

            FailedCache.process_failed_cache(FailedCache.upload_cache,
                                             executor,
                                             TestFailedCache._failed_func_mock,
                                             loop)
    def process_message(msg):  # pylint: disable=too-many-return-statements,too-many-branches
        """Message processing logic"""
        PROCESS_MESSAGES.inc()
        LOGGER.info('Received message from topic %s: %s', msg.topic, msg.value)

        try:
            msg_dict = json.loads(msg.value.decode("utf8"))
        except json.decoder.JSONDecodeError:
            MESSAGE_PARSE_ERROR.inc()
            LOGGER.exception("Unable to parse message: ")
            return
        FailedCache.process_failed_cache(FailedCache.upload_cache, executor,
                                         process_upload, loop)
        FailedCache.process_failed_cache(FailedCache.delete_cache, executor,
                                         process_delete, loop)

        if msg.topic == mqueue.UPLOAD_TOPIC:
            if not validate_msg(msg_dict, "upload",
                                REQUIRED_UPLOAD_MESSAGE_FIELDS):
                return
            # send message to payload tracker
            send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                        msg_dict,
                                        'received',
                                        loop=loop)
            # proces only archives from smart_management accounts
            identity = get_identity(
                msg_dict["platform_metadata"]["b64_identity"])
            if identity is None:
                INVALID_IDENTITY.inc()
                error_msg = "Skipped upload due to invalid identity header."
                LOGGER.warning(error_msg)
                send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                            msg_dict,
                                            'error',
                                            status_msg=error_msg,
                                            loop=loop)
                return
            if not is_entitled_smart_management(identity,
                                                allow_missing_section=True):
                MISSING_SMART_MANAGEMENT.inc()
                error_msg = "Skipped upload due to missing smart_management entitlement."
                LOGGER.debug(error_msg)
                send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                            msg_dict,
                                            'error',
                                            status_msg=error_msg,
                                            loop=loop)
                return
            process_func = process_upload
        elif msg.topic == mqueue.EVENTS_TOPIC:
            if not validate_msg(msg_dict, "event",
                                REQUIRED_EVENT_MESSAGE_FIELDS):
                return
            if msg_dict['type'] == 'delete':
                process_func = process_delete
            else:
                UNKNOWN_EVENT_TYPE.inc()
                LOGGER.error("Received unknown event type: %s",
                             msg_dict['type'])
                return
        else:
            UNKNOWN_TOPIC.inc()
            LOGGER.error("Received message on unsupported topic: %s",
                         msg.topic)
            return

        future = executor.submit(process_func, msg_dict, loop=loop)
        future.add_done_callback(on_thread_done)
def process_message(msg):  # pylint: disable=too-many-return-statements,too-many-branches
    """Message processing logic"""
    PROCESS_MESSAGES.inc()
    LOGGER.debug('Received message from topic %s: %s', msg.topic, msg.value)

    try:
        msg_dict = json.loads(msg.value.decode("utf8"))
    except json.decoder.JSONDecodeError:
        MESSAGE_PARSE_ERROR.inc()
        LOGGER.exception("Unable to parse message: ")
        return
    FailedCache.process_failed_cache(FailedCache.upload_cache,
                                     ListenerCtx.executor, process_upload,
                                     ListenerCtx.loop)
    FailedCache.process_failed_cache(FailedCache.delete_cache,
                                     ListenerCtx.executor, process_delete,
                                     ListenerCtx.loop)

    if msg.topic == CFG.events_topic:
        if msg_dict.get("type", "") in ["created", "updated"]:
            if not validate_kafka_msg(msg_dict,
                                      REQUIRED_CREATED_UPDATED_MESSAGE_FIELDS):
                SKIPPED_MESSAGES.inc()
                return
            if msg_dict.get("platform_metadata"):
                if not validate_kafka_msg(msg_dict,
                                          REQUIRED_UPLOAD_MESSAGE_FIELDS):
                    SKIPPED_MESSAGES.inc()
                    return
                LOGGER.info(
                    "Received created/updated msg, inventory_id: %s, type: %s",
                    msg_dict["host"]["id"], msg_dict["type"])
                # send message to payload tracker
                send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                            msg_dict,
                                            'received',
                                            loop=ListenerCtx.loop)
                # process only system uploads from insights entitled accounts
                identity = get_identity(
                    msg_dict["platform_metadata"]["b64_identity"])
                if identity is None:
                    INVALID_IDENTITY.inc()
                    error_msg = "Skipped upload due to invalid identity header."
                    LOGGER.warning(error_msg)
                    send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                                msg_dict,
                                                'error',
                                                status_msg=error_msg,
                                                loop=ListenerCtx.loop)
                    return
                if not is_entitled_insights(identity,
                                            allow_missing_section=True):
                    MISSING_INSIGHTS_ENTITLEMENT.inc()
                    error_msg = "Skipped upload due to missing insights entitlement."
                    LOGGER.debug(error_msg)
                    send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                                msg_dict,
                                                'error',
                                                status_msg=error_msg,
                                                loop=ListenerCtx.loop)
                    return
                process_func = process_upload
            else:
                # display name change message doesn't have platform_metadata section, cannot validate identity and track payload,
                # support only display name change
                LOGGER.info("Received update event msg, inventory_id: %s",
                            msg_dict["host"]["id"])
                process_func = process_update
        elif msg_dict.get("type", "") == "delete":
            if not validate_kafka_msg(msg_dict,
                                      REQUIRED_DELETE_MESSAGE_FIELDS):
                SKIPPED_MESSAGES.inc()
                return
            LOGGER.info("Received delete msg, inventory_id: %s",
                        msg_dict["id"])
            process_func = process_delete
        else:
            UNKNOWN_EVENT_TYPE.inc()
            LOGGER.error("Received unknown event type: %s",
                         msg_dict.get('type', 'missing event type'))
            return
    else:
        UNKNOWN_TOPIC.inc()
        LOGGER.error("Received message on unsupported topic: %s", msg.topic)
        return

    future = ListenerCtx.executor.submit(process_func,
                                         msg_dict,
                                         loop=ListenerCtx.loop)
    future.add_done_callback(on_thread_done)