async def process_upload_or_re_evaluate(msg_dict: dict): """ Process function to upload new file or re-evaluate system """ async with DB_POOL.acquire() as conn: try: async with conn.transaction(): LOGGER.info("Received message type: %s", msg_dict['type']) # Lock the system for processing system_platform = await conn.fetchrow( """SELECT id, inventory_id, vmaas_json, rh_account_id, opt_out, stale FROM system_platform WHERE inventory_id = $1 AND when_deleted IS NULL FOR UPDATE""", msg_dict['host']['id']) if system_platform is not None: await evaluate_vmaas(system_platform, conn) if msg_dict['type'] == 'upload_new_file': send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'success', loop=MAIN_LOOP) else: INV_ID_NOT_FOUND.inc() LOGGER.error( "System with inventory_id not found in DB: %s", msg_dict['host']['id']) if msg_dict['type'] == 'upload_new_file': send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict, 'error', status_msg= 'System with inventory_id not found in DB: %s' % msg_dict['host']['id'], loop=MAIN_LOOP) # pylint: disable=broad-except except Exception: LOGGER.exception("Unable to store data: ") FailedCache.push(FailedCache.upload_cache, msg_dict) LOGGER.info("Remembered failed upload: %s", str(msg_dict))
def process_upload_or_re_evaluate(self, msg_dict: dict, loop=None): """ Process function to upload new file or re-evaluate system """ with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: LOGGER.info("Received message type: %s", msg_dict['type']) # Lock the system for processing cur.execute( """SELECT id, inventory_id, vmaas_json, rh_account_id, opt_out FROM system_platform WHERE inventory_id = %s FOR UPDATE""", (msg_dict['host']['id'], )) system_platform = cur.fetchone() if system_platform is not None: self.evaluate_vmaas(system_platform, cur) conn.commit() if msg_dict['type'] == 'upload_new_file': send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict, 'success', loop=loop) else: INV_ID_NOT_FOUND.inc() LOGGER.error( "System with inventory_id not found in DB: %s", msg_dict['host']['id']) send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict, 'error', status_msg= 'System with inventory_id not found in DB: %s' % msg_dict['host']['id'], loop=loop) except DatabaseError: LOGGER.exception("Unable to store data: ") FailedCache.push(FailedCache.upload_cache, msg_dict) LOGGER.info("Remembered failed upload: %s", str(msg_dict)) conn.rollback()
async def process_message(message): """Message procession logic""" try: msg_dict = json.loads(message.value.decode('utf-8')) # Can't use FailedCache.process_failed_cache here because it's tied # to ThreadExecutor. So do it the asyncio way if FailedCache.upload_cache: cache = FailedCache.upload_cache LOGGER.info("Start processing %d failed uploads", len(cache)) for msg in cache: LOGGER.info("Processing failed upload: %s", str(msg)) await process_upload_or_re_evaluate(msg) LOGGER.info("Cleared failed cache") FailedCache.clear_cache(cache) except json.decoder.JSONDecodeError: MESSAGE_PARSE_ERROR.inc() LOGGER.exception("Unable to parse message: ") return if message.topic in CFG.evaluator_topics: if 'type' not in msg_dict: LOGGER.error("Received message is missing type field: %s", msg_dict) return if msg_dict['type'] in ['upload_new_file', 're-evaluate_system']: await process_upload_or_re_evaluate(msg_dict) if msg_dict['type'] == 'upload_new_file': send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict, 'processing', status_msg='Scheduled for evaluation', loop=MAIN_LOOP) else: UNKNOWN_MSG.inc() LOGGER.error("Received unknown message type: %s", msg_dict['type']) else: UNKNOWN_TOPIC.inc() LOGGER.error("Received message on unsupported topic: %s", message.topic)
def process_message(message): """Message procession logic""" try: msg_dict = json.loads(message.value.decode('utf-8')) FailedCache.process_failed_cache( FailedCache.upload_cache, executor, self.process_upload_or_re_evaluate, loop) except json.decoder.JSONDecodeError: MESSAGE_PARSE_ERROR.inc() LOGGER.exception("Unable to parse message: ") return if message.topic in kafka_evaluator_topic: if 'type' not in msg_dict: LOGGER.error("Received message is missing type field: %s", msg_dict) return if msg_dict['type'] in [ 'upload_new_file', 're-evaluate_system' ]: process_func = self.process_upload_or_re_evaluate if msg_dict['type'] == 'upload_new_file': send_msg_to_payload_tracker( PAYLOAD_TRACKER_PRODUCER, msg_dict, 'processing', status_msg='Scheduled for evaluation', loop=loop) else: UNKNOWN_MSG.inc() LOGGER.error("Received unknown message type: %s", msg_dict['type']) return future = executor.submit(process_func, msg_dict, loop=loop) future.add_done_callback(on_thread_done) else: UNKNOWN_TOPIC.inc() LOGGER.error("Received message on unsupported topic: %s", message.topic)
def db_import_system(upload_data, vmaas_json: str, repo_list: list): """Import initial system record to the DB, report back on what we did.""" status = ImportStatus.FAILED with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: host = upload_data["host"] import_status, system_id = db_import_system_platform( cur, host['id'], host['account'], upload_data['platform_metadata']['url'], host.get("display_name"), host.get('stale_timestamp'), host.get('stale_warning_timestamp'), host.get('culled_timestamp'), host.get('system_profile', {}).get('host_type'), vmaas_json) if import_status is None: conn.rollback() return status status |= import_status db_import_repos(cur, repo_list) db_import_system_repos(cur, repo_list, system_id) db_delete_other_system_repos(cur, repo_list, system_id) conn.commit() status -= ImportStatus.FAILED except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error importing system: ") FailedCache.push(FailedCache.upload_cache, upload_data) LOGGER.info("Remembered upload %s", str(upload_data)) conn.rollback() return status
def process_message(msg): # pylint: disable=too-many-return-statements,too-many-branches """Message processing logic""" PROCESS_MESSAGES.inc() LOGGER.debug('Received message from topic %s: %s', msg.topic, msg.value) try: msg_dict = json.loads(msg.value.decode("utf8")) except json.decoder.JSONDecodeError: MESSAGE_PARSE_ERROR.inc() LOGGER.exception("Unable to parse message: ") return FailedCache.process_failed_cache(FailedCache.upload_cache, ListenerCtx.executor, process_upload, ListenerCtx.loop) FailedCache.process_failed_cache(FailedCache.delete_cache, ListenerCtx.executor, process_delete, ListenerCtx.loop) if msg.topic == CFG.events_topic: if msg_dict.get("type", "") in ["created", "updated"]: if not validate_kafka_msg(msg_dict, REQUIRED_CREATED_UPDATED_MESSAGE_FIELDS): SKIPPED_MESSAGES.inc() return if msg_dict.get("platform_metadata"): if not validate_kafka_msg(msg_dict, REQUIRED_UPLOAD_MESSAGE_FIELDS): SKIPPED_MESSAGES.inc() return LOGGER.info( "Received created/updated msg, inventory_id: %s, type: %s", msg_dict["host"]["id"], msg_dict["type"]) # send message to payload tracker send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'received', loop=ListenerCtx.loop) # process only system uploads from insights entitled accounts identity = get_identity( msg_dict["platform_metadata"]["b64_identity"]) if identity is None: INVALID_IDENTITY.inc() error_msg = "Skipped upload due to invalid identity header." LOGGER.warning(error_msg) send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'error', status_msg=error_msg, loop=ListenerCtx.loop) return if not is_entitled_insights(identity, allow_missing_section=True): MISSING_INSIGHTS_ENTITLEMENT.inc() error_msg = "Skipped upload due to missing insights entitlement." LOGGER.debug(error_msg) send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'error', status_msg=error_msg, loop=ListenerCtx.loop) return process_func = process_upload else: # display name change message doesn't have platform_metadata section, cannot validate identity and track payload, # support only display name change LOGGER.info("Received update event msg, inventory_id: %s", msg_dict["host"]["id"]) process_func = process_update elif msg_dict.get("type", "") == "delete": if not validate_kafka_msg(msg_dict, REQUIRED_DELETE_MESSAGE_FIELDS): SKIPPED_MESSAGES.inc() return LOGGER.info("Received delete msg, inventory_id: %s", msg_dict["id"]) process_func = process_delete else: UNKNOWN_EVENT_TYPE.inc() LOGGER.error("Received unknown event type: %s", msg_dict.get('type', 'missing event type')) return else: UNKNOWN_TOPIC.inc() LOGGER.error("Received message on unsupported topic: %s", msg.topic) return future = ListenerCtx.executor.submit(process_func, msg_dict, loop=ListenerCtx.loop) future.add_done_callback(on_thread_done)