def auth(x_rh_identity, required_scopes=None): # pylint: disable=unused-argument """ Parses account number from the x-rh-identity header """ identity = get_identity(x_rh_identity) return auth_common(identity, x_rh_identity) if identity is not None else None
def auth_admin(x_rh_identity, required_scopes=None): # pylint: disable=unused-argument """ Parses user name from the x-rh-identity header """ identity = get_identity(x_rh_identity) user = identity.get("identity", {}).get("associate", {}).get("email") LOGGER.info("User '%s' accessed admin API.", user) ADMIN_REQUESTS.inc() return {"uid": user} if user else None
def process_message(msg): """Message processing logic""" PROCESS_MESSAGES.inc() LOGGER.info('Received message on topic %s', msg.topic) LOGGER.debug('Message body: %s', msg.value) try: msg_dict = json.loads(msg.value.decode('utf8')) except json.decoder.JSONDecodeError: MESSAGE_PARSE_ERROR.inc() LOGGER.exception('Unable to parse message: ') return identity = get_identity( msg_dict['input']['platform_metadata']['b64_identity']) if identity is None: INVALID_IDENTITY.inc() LOGGER.warning( 'Skipped advisor result due to invalid identity header.') return if not is_entitled_insights(identity, allow_missing_section=True): MISSING_INSIGHTS_ENTITLEMENT.inc() LOGGER.debug( 'Skipped advisor result due to missing insights entitlement.') return # TODO: insert system into database if it's 1st upload, shall we update last seen? system_data = { 'rh_account': msg_dict['input']['host']['account'], 'display_name': msg_dict['input']['host']['display_name'], 'inventory_id': msg_dict['input']['host']['id'] } rule_hits = {} reports = msg_dict['results']['reports'] for report in reports: if 'cves' in report['details']: rule = report['rule_id'] if rule in RULE_BLACKLIST: # TODO: remove this once CVE_2017_5753_4_cpu_kernel and CVE_2017_5715_cpu_virt are merged continue if rule not in RULES_CACHE: db_import_rule(rule, list(report['details']['cves'].keys())) for cve in report['details']['cves']: if cve not in CVES_CACHE: db_import_cve(cve) if not report['details']['cves'][ cve]: # False in the CVE dict indicates failed rule rule_hits[CVES_CACHE[cve]] = { 'id': RULES_CACHE[rule], 'details': json.dumps(report['details']) } db_import_system(system_data, rule_hits)
def auth_internal(x_rh_identity, required_scopes=None): # pylint: disable=unused-argument """ Parses account number from the x-rh-identity header and ensures account is internal """ identity = get_identity(x_rh_identity) if identity is not None: if 'identity' not in identity or 'user' not in identity['identity'] or \ not identity['identity']['user'].get('is_internal', False): raise InternalOnlyException return auth_common(identity, x_rh_identity) if identity is not None else None
def process_message(msg): """Message processing logic""" PROCESS_MESSAGES.inc() LOGGER.info('Received message from topic %s: %s', msg.topic, msg.value) try: msg_dict = json.loads(msg.value.decode("utf8")) except json.decoder.JSONDecodeError: MESSAGE_PARSE_ERROR.inc() LOGGER.exception("Unable to parse message: ") return if msg.topic == mqueue.UPLOAD_TOPIC: # proces only archives from smart_management accounts identity = get_identity(msg_dict.get("b64_identity", "")) if identity is None: INVALID_IDENTITY.inc() LOGGER.warning( "Skipped upload due to invalid identity header.") return if not is_entitled_smart_management(identity, allow_missing_section=True): MISSING_SMART_MANAGEMENT.inc() LOGGER.debug( "Skipped upload due to missing smart_management entitlement." ) return process_func = process_upload elif msg.topic == mqueue.EVENTS_TOPIC: if msg_dict['type'] == 'delete': process_func = process_delete else: UNKNOWN_EVENT_TYPE.inc() LOGGER.error("Received unknown event type: %s", msg_dict['type']) return else: UNKNOWN_TOPIC.inc() LOGGER.error("Received message on unsupported topic: %s", msg.topic) return if 'id' not in msg_dict or msg_dict["id"] is None: MISSING_ID.inc() LOGGER.warning( "Unable to process message, inventory ID is missing.") return future = executor.submit(process_func, msg_dict, loop=loop) future.add_done_callback(on_thread_done)
def process_message(msg): # pylint: disable=too-many-return-statements,too-many-branches """Message processing logic""" PROCESS_MESSAGES.inc() LOGGER.info('Received message from topic %s: %s', msg.topic, msg.value) try: msg_dict = json.loads(msg.value.decode("utf8")) except json.decoder.JSONDecodeError: MESSAGE_PARSE_ERROR.inc() LOGGER.exception("Unable to parse message: ") return FailedCache.process_failed_cache(FailedCache.upload_cache, executor, process_upload, loop) FailedCache.process_failed_cache(FailedCache.delete_cache, executor, process_delete, loop) if msg.topic == mqueue.UPLOAD_TOPIC: if not validate_msg(msg_dict, "upload", REQUIRED_UPLOAD_MESSAGE_FIELDS): return # send message to payload tracker send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'received', loop=loop) # proces only archives from smart_management accounts identity = get_identity( msg_dict["platform_metadata"]["b64_identity"]) if identity is None: INVALID_IDENTITY.inc() error_msg = "Skipped upload due to invalid identity header." LOGGER.warning(error_msg) send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'error', status_msg=error_msg, loop=loop) return if not is_entitled_smart_management(identity, allow_missing_section=True): MISSING_SMART_MANAGEMENT.inc() error_msg = "Skipped upload due to missing smart_management entitlement." LOGGER.debug(error_msg) send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'error', status_msg=error_msg, loop=loop) return process_func = process_upload elif msg.topic == mqueue.EVENTS_TOPIC: if not validate_msg(msg_dict, "event", REQUIRED_EVENT_MESSAGE_FIELDS): return if msg_dict['type'] == 'delete': process_func = process_delete else: UNKNOWN_EVENT_TYPE.inc() LOGGER.error("Received unknown event type: %s", msg_dict['type']) return else: UNKNOWN_TOPIC.inc() LOGGER.error("Received message on unsupported topic: %s", msg.topic) return future = executor.submit(process_func, msg_dict, loop=loop) future.add_done_callback(on_thread_done)
async def process_message(msg): """Message processing logic""" PROCESS_MESSAGES.inc() LOGGER.debug('Message from topic %s, body: %s', msg.topic, msg.value) try: msg_dict = json.loads(msg.value.decode('utf8')) except json.decoder.JSONDecodeError: MESSAGE_PARSE_ERROR.inc() LOGGER.exception('Unable to parse message: ') return send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'processing', 'Starting advisor evaluation', service=PAYLOAD_TRACKER_SERVICE, loop=MAIN_LOOP) if not validate_kafka_msg(msg_dict, REQUIRED_MESSAGE_FIELDS): INVALID_INSIGHTS_ACC.inc() send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error', 'Skipped advisor result due to message coming from non-insights account.', service=PAYLOAD_TRACKER_SERVICE, loop=MAIN_LOOP) LOGGER.debug( 'Skipped advisor result due to coming from non-insights account.') return identity = get_identity( msg_dict['input']['platform_metadata']['b64_identity']) if identity is None: INVALID_IDENTITY.inc() send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error', 'Skipped advisor result due to invalid identity header.', service=PAYLOAD_TRACKER_SERVICE, loop=MAIN_LOOP) LOGGER.debug('Skipped advisor result due to invalid identity header.') return if not is_entitled_insights(identity, allow_missing_section=True): MISSING_INSIGHTS_ENTITLEMENT.inc() send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error', 'Skipped advisor result due to missing insights entitlement.', service=PAYLOAD_TRACKER_SERVICE, loop=MAIN_LOOP) LOGGER.debug( 'Skipped advisor result due to missing insights entitlement.') return if not validate_system_inventory(msg_dict["input"]["host"]["id"], msg_dict["input"]["timestamp"]): DELETED_SYSTEM_FROM_INVENTORY.inc() send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error', 'Skipped advisor result due to system not valid in inventory anymore.', service=PAYLOAD_TRACKER_SERVICE, loop=MAIN_LOOP) LOGGER.info( 'Skipped advisor result due to system not valid in inventory anymore.' ) return advisor_json, rule_hits = await parse_inventory_data(msg_dict) if advisor_json: LOGGER.info("Evaluating rule hits for inventory_id: %s", msg_dict['input']['host']['id']) status = await db_import_system(msg_dict, rule_hits, advisor_json) if ImportStatus.CHANGED in status: LOGGER.debug("Finished evaluating rule hits for inventory_id: %s", msg_dict['input']['host']['id']) send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'success', 'System successfully uploaded and evaluated', service=PAYLOAD_TRACKER_SERVICE, loop=MAIN_LOOP) elif ImportStatus.FAILED not in status: LOGGER.info( "Skipping evaluating rule hits for inventory_id %s due to unchanged system", msg_dict['input']['host']['id']) UNCHANGED_SYSTEM.inc() send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'success', 'Unchanged system and not evaluated', service=PAYLOAD_TRACKER_SERVICE, loop=MAIN_LOOP)
def process_message(msg): # pylint: disable=too-many-return-statements,too-many-branches """Message processing logic""" PROCESS_MESSAGES.inc() LOGGER.debug('Received message from topic %s: %s', msg.topic, msg.value) try: msg_dict = json.loads(msg.value.decode("utf8")) except json.decoder.JSONDecodeError: MESSAGE_PARSE_ERROR.inc() LOGGER.exception("Unable to parse message: ") return FailedCache.process_failed_cache(FailedCache.upload_cache, ListenerCtx.executor, process_upload, ListenerCtx.loop) FailedCache.process_failed_cache(FailedCache.delete_cache, ListenerCtx.executor, process_delete, ListenerCtx.loop) if msg.topic == CFG.events_topic: if msg_dict.get("type", "") in ["created", "updated"]: if not validate_kafka_msg(msg_dict, REQUIRED_CREATED_UPDATED_MESSAGE_FIELDS): SKIPPED_MESSAGES.inc() return if msg_dict.get("platform_metadata"): if not validate_kafka_msg(msg_dict, REQUIRED_UPLOAD_MESSAGE_FIELDS): SKIPPED_MESSAGES.inc() return LOGGER.info( "Received created/updated msg, inventory_id: %s, type: %s", msg_dict["host"]["id"], msg_dict["type"]) # send message to payload tracker send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'received', loop=ListenerCtx.loop) # process only system uploads from insights entitled accounts identity = get_identity( msg_dict["platform_metadata"]["b64_identity"]) if identity is None: INVALID_IDENTITY.inc() error_msg = "Skipped upload due to invalid identity header." LOGGER.warning(error_msg) send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'error', status_msg=error_msg, loop=ListenerCtx.loop) return if not is_entitled_insights(identity, allow_missing_section=True): MISSING_INSIGHTS_ENTITLEMENT.inc() error_msg = "Skipped upload due to missing insights entitlement." LOGGER.debug(error_msg) send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'error', status_msg=error_msg, loop=ListenerCtx.loop) return process_func = process_upload else: # display name change message doesn't have platform_metadata section, cannot validate identity and track payload, # support only display name change LOGGER.info("Received update event msg, inventory_id: %s", msg_dict["host"]["id"]) process_func = process_update elif msg_dict.get("type", "") == "delete": if not validate_kafka_msg(msg_dict, REQUIRED_DELETE_MESSAGE_FIELDS): SKIPPED_MESSAGES.inc() return LOGGER.info("Received delete msg, inventory_id: %s", msg_dict["id"]) process_func = process_delete else: UNKNOWN_EVENT_TYPE.inc() LOGGER.error("Received unknown event type: %s", msg_dict.get('type', 'missing event type')) return else: UNKNOWN_TOPIC.inc() LOGGER.error("Received message on unsupported topic: %s", msg.topic) return future = ListenerCtx.executor.submit(process_func, msg_dict, loop=ListenerCtx.loop) future.add_done_callback(on_thread_done)
def process_message(msg): """Message processing logic""" PROCESS_MESSAGES.inc() LOGGER.debug('Message from topic %s, body: %s', msg.topic, msg.value) try: msg_dict = json.loads(msg.value.decode('utf8')) except json.decoder.JSONDecodeError: MESSAGE_PARSE_ERROR.inc() LOGGER.exception('Unable to parse message: ') return send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'processing', 'Starting advisor evaluation') if not validate_kafka_msg(msg_dict, REQUIRED_MESSAGE_FIELDS): INVALID_INSIGHTS_ACC.inc() send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error', 'Skipped advisor result due to message coming from non-insights account.' ) LOGGER.debug( 'Skipped advisor result due to coming from non-insights account.' ) identity = get_identity( msg_dict['input']['platform_metadata']['b64_identity']) if identity is None: INVALID_IDENTITY.inc() send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error', 'Skipped advisor result due to invalid identity header.') LOGGER.debug( 'Skipped advisor result due to invalid identity header.') return if not is_entitled_insights(identity, allow_missing_section=True): MISSING_INSIGHTS_ENTITLEMENT.inc() send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error', 'Skipped advisor result due to missing insights entitlement.') LOGGER.debug( 'Skipped advisor result due to missing insights entitlement.') return # TODO: insert system into database if it's 1st upload, shall we update last seen? system_data = { 'rh_account': msg_dict['input']['host']['account'], 'display_name': msg_dict['input']['host']['display_name'], 'inventory_id': msg_dict['input']['host']['id'], 'stale_timestamp': msg_dict['input']['host']['stale_timestamp'], 'stale_warning_timestamp': msg_dict['input']['host']['stale_warning_timestamp'], 'culled_timestamp': msg_dict['input']['host']['culled_timestamp'] } LOGGER.info("Evaluating rule hits for inventory_id: %s", system_data["inventory_id"]) rule_hits = {} reports = msg_dict['results']['reports'] for report in reports: if 'cves' in report['details']: rule = report['rule_id'] if rule in RULE_BLACKLIST: # TODO: remove this once CVE_2017_5753_4_cpu_kernel and CVE_2017_5715_cpu_virt are merged continue if rule not in RULES_CACHE: db_import_rule(rule, list(report['details']['cves'].keys())) for cve in report['details']['cves']: if cve not in CVES_CACHE: db_import_cve(cve) if not report['details']['cves'][ cve]: # False in the CVE dict indicates failed rule rule_hits[CVES_CACHE[cve]] = { 'id': RULES_CACHE[rule], 'details': json.dumps(report['details']), 'cve_name': cve } elif report['details']['cves'][cve]: rule_hits[CVES_CACHE[cve]] = { 'id': RULES_CACHE[rule], 'mitigation_reason': report['details']['cves'][cve] } try: success = db_import_system(system_data, rule_hits, loop) except DatabaseError as exc: success = False # The exception should not get lost raise exc finally: LOGGER.debug("Finished evaluating rule hits for inventory_id: %s", system_data["inventory_id"]) if success: send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'success') else: send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict['input'], 'error', 'Error importing system to vulnerability')