def test_validate_msg_invalid(self):
        """Test validation of kafka message with invalid msg"""
        required = {"important": []}
        msg_dict = {"others": 123}

        res = utils.validate_kafka_msg(msg_dict, required)
        assert res is False
    def test_validate_msg_null_wrong(self):
        """Test validation of kafka message with invalid non nullable value"""
        required = {"important": [("password", False)]}
        msg_dict = {
            "important": {
                "password": None,
                "login": "******",
                "info": None
            },
            "other": "yes"
        }

        res = utils.validate_kafka_msg(msg_dict, required)
        assert res is False
    def test_validate_msg_valid(self):
        """Test validation of kafka message with valid msg"""
        required = {"important": [], "very_important": ["password"]}
        msg_dict = {
            "important": 25,
            "very_important": {
                "password": 1234,
                "login": "******"
            },
            "other": "yes"
        }

        res = utils.validate_kafka_msg(msg_dict, required)
        assert res is True
Beispiel #4
0
async def process_message(msg):
    """Message processing logic"""
    PROCESS_MESSAGES.inc()
    LOGGER.debug('Message from topic %s, body: %s', msg.topic, msg.value)

    try:
        msg_dict = json.loads(msg.value.decode('utf8'))
    except json.decoder.JSONDecodeError:
        MESSAGE_PARSE_ERROR.inc()
        LOGGER.exception('Unable to parse message: ')
        return

    send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                msg_dict['input'],
                                'processing',
                                'Starting advisor evaluation',
                                service=PAYLOAD_TRACKER_SERVICE,
                                loop=MAIN_LOOP)

    if not validate_kafka_msg(msg_dict, REQUIRED_MESSAGE_FIELDS):
        INVALID_INSIGHTS_ACC.inc()
        send_msg_to_payload_tracker(
            PAYLOAD_TRACKER_PRODUCER,
            msg_dict['input'],
            'error',
            'Skipped advisor result due to message coming from non-insights account.',
            service=PAYLOAD_TRACKER_SERVICE,
            loop=MAIN_LOOP)
        LOGGER.debug(
            'Skipped advisor result due to coming from non-insights account.')
        return
    identity = get_identity(
        msg_dict['input']['platform_metadata']['b64_identity'])
    if identity is None:
        INVALID_IDENTITY.inc()
        send_msg_to_payload_tracker(
            PAYLOAD_TRACKER_PRODUCER,
            msg_dict['input'],
            'error',
            'Skipped advisor result due to invalid identity header.',
            service=PAYLOAD_TRACKER_SERVICE,
            loop=MAIN_LOOP)
        LOGGER.debug('Skipped advisor result due to invalid identity header.')
        return
    if not is_entitled_insights(identity, allow_missing_section=True):
        MISSING_INSIGHTS_ENTITLEMENT.inc()
        send_msg_to_payload_tracker(
            PAYLOAD_TRACKER_PRODUCER,
            msg_dict['input'],
            'error',
            'Skipped advisor result due to missing insights entitlement.',
            service=PAYLOAD_TRACKER_SERVICE,
            loop=MAIN_LOOP)
        LOGGER.debug(
            'Skipped advisor result due to missing insights entitlement.')
        return
    if not validate_system_inventory(msg_dict["input"]["host"]["id"],
                                     msg_dict["input"]["timestamp"]):
        DELETED_SYSTEM_FROM_INVENTORY.inc()
        send_msg_to_payload_tracker(
            PAYLOAD_TRACKER_PRODUCER,
            msg_dict['input'],
            'error',
            'Skipped advisor result due to system not valid in inventory anymore.',
            service=PAYLOAD_TRACKER_SERVICE,
            loop=MAIN_LOOP)
        LOGGER.info(
            'Skipped advisor result due to system not valid in inventory anymore.'
        )
        return

    advisor_json, rule_hits = await parse_inventory_data(msg_dict)
    if advisor_json:
        LOGGER.info("Evaluating rule hits for inventory_id: %s",
                    msg_dict['input']['host']['id'])
        status = await db_import_system(msg_dict, rule_hits, advisor_json)
        if ImportStatus.CHANGED in status:
            LOGGER.debug("Finished evaluating rule hits for inventory_id: %s",
                         msg_dict['input']['host']['id'])
            send_msg_to_payload_tracker(
                PAYLOAD_TRACKER_PRODUCER,
                msg_dict['input'],
                'success',
                'System successfully uploaded and evaluated',
                service=PAYLOAD_TRACKER_SERVICE,
                loop=MAIN_LOOP)
        elif ImportStatus.FAILED not in status:
            LOGGER.info(
                "Skipping evaluating rule hits for inventory_id %s due to unchanged system",
                msg_dict['input']['host']['id'])
            UNCHANGED_SYSTEM.inc()
            send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                        msg_dict['input'],
                                        'success',
                                        'Unchanged system and not evaluated',
                                        service=PAYLOAD_TRACKER_SERVICE,
                                        loop=MAIN_LOOP)
def process_message(msg):  # pylint: disable=too-many-return-statements,too-many-branches
    """Message processing logic"""
    PROCESS_MESSAGES.inc()
    LOGGER.debug('Received message from topic %s: %s', msg.topic, msg.value)

    try:
        msg_dict = json.loads(msg.value.decode("utf8"))
    except json.decoder.JSONDecodeError:
        MESSAGE_PARSE_ERROR.inc()
        LOGGER.exception("Unable to parse message: ")
        return
    FailedCache.process_failed_cache(FailedCache.upload_cache,
                                     ListenerCtx.executor, process_upload,
                                     ListenerCtx.loop)
    FailedCache.process_failed_cache(FailedCache.delete_cache,
                                     ListenerCtx.executor, process_delete,
                                     ListenerCtx.loop)

    if msg.topic == CFG.events_topic:
        if msg_dict.get("type", "") in ["created", "updated"]:
            if not validate_kafka_msg(msg_dict,
                                      REQUIRED_CREATED_UPDATED_MESSAGE_FIELDS):
                SKIPPED_MESSAGES.inc()
                return
            if msg_dict.get("platform_metadata"):
                if not validate_kafka_msg(msg_dict,
                                          REQUIRED_UPLOAD_MESSAGE_FIELDS):
                    SKIPPED_MESSAGES.inc()
                    return
                LOGGER.info(
                    "Received created/updated msg, inventory_id: %s, type: %s",
                    msg_dict["host"]["id"], msg_dict["type"])
                # send message to payload tracker
                send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                            msg_dict,
                                            'received',
                                            loop=ListenerCtx.loop)
                # process only system uploads from insights entitled accounts
                identity = get_identity(
                    msg_dict["platform_metadata"]["b64_identity"])
                if identity is None:
                    INVALID_IDENTITY.inc()
                    error_msg = "Skipped upload due to invalid identity header."
                    LOGGER.warning(error_msg)
                    send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                                msg_dict,
                                                'error',
                                                status_msg=error_msg,
                                                loop=ListenerCtx.loop)
                    return
                if not is_entitled_insights(identity,
                                            allow_missing_section=True):
                    MISSING_INSIGHTS_ENTITLEMENT.inc()
                    error_msg = "Skipped upload due to missing insights entitlement."
                    LOGGER.debug(error_msg)
                    send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                                msg_dict,
                                                'error',
                                                status_msg=error_msg,
                                                loop=ListenerCtx.loop)
                    return
                process_func = process_upload
            else:
                # display name change message doesn't have platform_metadata section, cannot validate identity and track payload,
                # support only display name change
                LOGGER.info("Received update event msg, inventory_id: %s",
                            msg_dict["host"]["id"])
                process_func = process_update
        elif msg_dict.get("type", "") == "delete":
            if not validate_kafka_msg(msg_dict,
                                      REQUIRED_DELETE_MESSAGE_FIELDS):
                SKIPPED_MESSAGES.inc()
                return
            LOGGER.info("Received delete msg, inventory_id: %s",
                        msg_dict["id"])
            process_func = process_delete
        else:
            UNKNOWN_EVENT_TYPE.inc()
            LOGGER.error("Received unknown event type: %s",
                         msg_dict.get('type', 'missing event type'))
            return
    else:
        UNKNOWN_TOPIC.inc()
        LOGGER.error("Received message on unsupported topic: %s", msg.topic)
        return

    future = ListenerCtx.executor.submit(process_func,
                                         msg_dict,
                                         loop=ListenerCtx.loop)
    future.add_done_callback(on_thread_done)
    def process_message(msg):
        """Message processing logic"""
        PROCESS_MESSAGES.inc()
        LOGGER.debug('Message from topic %s, body: %s', msg.topic, msg.value)

        try:
            msg_dict = json.loads(msg.value.decode('utf8'))
        except json.decoder.JSONDecodeError:
            MESSAGE_PARSE_ERROR.inc()
            LOGGER.exception('Unable to parse message: ')
            return

        send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                    msg_dict['input'], 'processing',
                                    'Starting advisor evaluation')

        if not validate_kafka_msg(msg_dict, REQUIRED_MESSAGE_FIELDS):
            INVALID_INSIGHTS_ACC.inc()
            send_msg_to_payload_tracker(
                PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error',
                'Skipped advisor result due to message coming from non-insights account.'
            )
            LOGGER.debug(
                'Skipped advisor result due to coming from non-insights account.'
            )
        identity = get_identity(
            msg_dict['input']['platform_metadata']['b64_identity'])
        if identity is None:
            INVALID_IDENTITY.inc()
            send_msg_to_payload_tracker(
                PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error',
                'Skipped advisor result due to invalid identity header.')
            LOGGER.debug(
                'Skipped advisor result due to invalid identity header.')
            return
        if not is_entitled_insights(identity, allow_missing_section=True):
            MISSING_INSIGHTS_ENTITLEMENT.inc()
            send_msg_to_payload_tracker(
                PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error',
                'Skipped advisor result due to missing insights entitlement.')
            LOGGER.debug(
                'Skipped advisor result due to missing insights entitlement.')
            return

        # TODO: insert system into database if it's 1st upload, shall we update last seen?
        system_data = {
            'rh_account':
            msg_dict['input']['host']['account'],
            'display_name':
            msg_dict['input']['host']['display_name'],
            'inventory_id':
            msg_dict['input']['host']['id'],
            'stale_timestamp':
            msg_dict['input']['host']['stale_timestamp'],
            'stale_warning_timestamp':
            msg_dict['input']['host']['stale_warning_timestamp'],
            'culled_timestamp':
            msg_dict['input']['host']['culled_timestamp']
        }

        LOGGER.info("Evaluating rule hits for inventory_id: %s",
                    system_data["inventory_id"])

        rule_hits = {}

        reports = msg_dict['results']['reports']
        for report in reports:
            if 'cves' in report['details']:
                rule = report['rule_id']
                if rule in RULE_BLACKLIST:
                    # TODO: remove this once CVE_2017_5753_4_cpu_kernel and CVE_2017_5715_cpu_virt are merged
                    continue
                if rule not in RULES_CACHE:
                    db_import_rule(rule,
                                   list(report['details']['cves'].keys()))
                for cve in report['details']['cves']:
                    if cve not in CVES_CACHE:
                        db_import_cve(cve)
                    if not report['details']['cves'][
                            cve]:  # False in the CVE dict indicates failed rule
                        rule_hits[CVES_CACHE[cve]] = {
                            'id': RULES_CACHE[rule],
                            'details': json.dumps(report['details']),
                            'cve_name': cve
                        }
                    elif report['details']['cves'][cve]:
                        rule_hits[CVES_CACHE[cve]] = {
                            'id': RULES_CACHE[rule],
                            'mitigation_reason': report['details']['cves'][cve]
                        }

        try:
            success = db_import_system(system_data, rule_hits, loop)
        except DatabaseError as exc:
            success = False
            # The exception should not get lost
            raise exc
        finally:
            LOGGER.debug("Finished evaluating rule hits for inventory_id: %s",
                         system_data["inventory_id"])
            if success:
                send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER,
                                            msg_dict['input'], 'success')
            else:
                send_msg_to_payload_tracker(
                    PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error',
                    'Error importing system to vulnerability')