def test_case_new_found(app, monkeypatch): with app.app_context(): from nmp_broker.common.data_store.redis import workflow from nmp_broker.common.workflow.status_strategy import is_new_abort_task_found owner = 'nwp_xp' repo = 'nwpc_op' previous_server_status = NodeStatus.aborted error_task_dict_list = [ { "children": [], "name": "initial", "node_path": "/grapes_meso_3km_post/00/initial", "node_type": "task", "path": "/grapes_meso_3km_post/00/initial", "status": "aborted" }, { "children": [], "name": "pre_data", "node_path": "/grapes_meso_3km_post/00/pre_data", "node_type": "task", "path": "/grapes_meso_3km_post/00/pre_data", "status": "aborted" } ] monkeypatch.setattr(workflow, 'get_error_task_list_from_cache', mock_has_error_task) assert is_new_abort_task_found(owner, repo, previous_server_status, error_task_dict_list)
def handle_status_message(owner: str, repo: str, message_data: dict, message_app: str) -> None: """ message_data: { "name": "ecflow_status_message_data", "type": "record", "fields": [ {"name": "time", "type": "string"}, {"name": "server_name", "type": "string"}, { "name": "status", "doc": "bunch status", "type": { "type": "node" } } ] } """ server_name = message_data['server_name'] message_time = message_data['time'] bunch_dict = message_data['status'] message_datetime = datetime.datetime.strptime(message_time, "%Y-%m-%dT%H:%M:%S.%f") # warn_user_list = data_store.get_ding_talk_warn_user_list(owner, repo) nmp_model_system_store_flag = False nmp_model_system_dict = None if len(bunch_dict) == 0: return current_app.logger.info( '[{owner}/{repo}] building bunch from message...'.format(owner=owner, repo=repo)) if message_app == 'ecflow_status_collector': from nwpc_workflow_model.ecflow import Bunch, ErrorStatusTaskVisitor, pre_order_travel elif message_app == 'sms_status_collector': from nwpc_workflow_model.sms import Bunch, ErrorStatusTaskVisitor, pre_order_travel else: raise ValueError("message app is not supported: {message_app}".format( message_app=message_app)) bunch = Bunch.create_from_dict(bunch_dict) # NOTE: Because Bunch.create_from_dict will use Bunch.name as path prefix, We need to set it to empty string. # So that its path begins with '/' as the same as path in bunch_dict generated by nwpc-log-collector. bunch.name = '' current_app.logger.info( '[{owner}/{repo}] building bunch from message...Done'.format( owner=owner, repo=repo)) # find error tasks every suite suite_error_map = dict() error_task_dict_list = [] for a_suite in bunch.children: error_visitor = ErrorStatusTaskVisitor() pre_order_travel(a_suite, error_visitor) suite_error_map[a_suite.name] = { 'name': a_suite.name, 'status': a_suite.status, 'error_task_list': error_visitor.error_task_list } for a_task in error_visitor.error_task_list: error_task_dict_list.append(a_task.to_dict()) server_status = bunch.status if is_server_status_aborted(server_status): cached_sms_server_status = mongodb_workflow.get_server_status_from_cache( owner, repo, server_name) if cached_sms_server_status is not None: current_app.logger.info( '[{owner}/{repo}] building bunch from cache...'.format( owner=owner, repo=repo)) cached_bunch = Bunch.create_from_dict( cached_sms_server_status['data']['status']) current_app.logger.info( '[{owner}/{repo}] building bunch from cache...done'.format( owner=owner, repo=repo)) previous_server_status = cached_bunch.status send_warning_flag = False warn_strategy = current_app.config['BROKER_CONFIG']['weixin_app'][ 'warn']['strategy'] if warn_strategy == "always": send_warning_flag = True elif warn_strategy == "new_abort_task": send_warning_flag = is_new_abort_task_found( owner, repo, previous_server_status, error_task_dict_list) elif warn_strategy == "new_abort_root": send_warning_flag = is_new_abort_root_found( owner, repo, previous_server_status, server_status) else: current_app.logger.warn( '[{owner}/{repo}] warn strategy is not supported:', warn_strategy) if send_warning_flag: current_app.logger.info( '[{owner}/{repo}] sending alert for {owner}/{repo}...'. format(owner=owner, repo=repo)) nmp_model_system_dict = mongodb_workflow.save_server_status_to_nmp_model_system( owner, repo, server_name, message_data, error_task_dict_list) nmp_model_system_store_flag = True aborted_tasks_blob_id = None for a_blob in nmp_model_system_dict['blobs']: if isinstance(a_blob, AbortedTasksBlob): aborted_tasks_blob_id = a_blob.ticket_id warning_data = { 'owner': owner, 'repo': repo, 'server_name': server_name, # bunch.name 'message_datetime': message_datetime, 'suite_error_map': suite_error_map, 'aborted_tasks_blob_id': aborted_tasks_blob_id } # ding_talk_app = ding_talk.DingTalkApp( # ding_talk_config=app.config['BROKER_CONFIG']['ding_talk_app'], # cloud_config=app.config['BROKER_CONFIG']['cloud'] # ) # # ding_talk_app.send_warning_message(warning_data) weixin_app = nmp_broker.common.weixin.weixin_app.WeixinApp( weixin_config=current_app.config['BROKER_CONFIG'] ['weixin_app'], cloud_config=current_app.config['BROKER_CONFIG']['cloud']) weixin_app.send_warning_message(warning_data) else: current_app.logger.info( '[{owner}/{repo}] don\'t send alert'.format(owner=owner, repo=repo)) # 保存 error_task_list 到缓存 error_task_value = { 'timestamp': datetime.datetime.utcnow(), 'error_task_list': error_task_dict_list } redis_workflow.save_error_task_list_to_cache(owner, repo, error_task_value) mongodb_workflow.save_server_status_to_cache(owner, repo, server_name, message_data) # 发送给外网服务器 website_url = current_app.config['BROKER_CONFIG']['cloud']['put'][ 'url'].format(owner=owner, repo=repo) if nmp_model_system_store_flag: post_message = { 'app': 'nmp_broker', 'event': 'post_ecflow_status', 'timestamp': datetime.datetime.utcnow(), 'data': { 'type': 'nmp_model', 'blobs': [ blob.to_mongo().to_dict() for blob in nmp_model_system_dict['blobs'] ], 'trees': [ blob.to_mongo().to_dict() for blob in nmp_model_system_dict['trees'] ], 'commits': [ blob.to_mongo().to_dict() for blob in nmp_model_system_dict['commits'] ], } } website_post_data = {'message': json.dumps(post_message)} else: message_data['type'] = 'status' post_message = { 'app': 'nmp_broker', 'event': 'post_ecflow_status', 'timestamp': datetime.datetime.utcnow(), 'data': message_data } website_post_data = {'message': json.dumps(post_message)} current_app.logger.info('[{owner}/{repo}] gzip the data...'.format( owner=owner, repo=repo)) gzipped_post_data = gzip.compress( bytes(json.dumps(website_post_data), 'utf-8')) current_app.logger.info('[{owner}/{repo}] post to web...'.format( owner=owner, repo=repo)) response = requests.post(website_url, data=gzipped_post_data, headers={'content-encoding': 'gzip'}, timeout=REQUEST_POST_TIME_OUT) current_app.logger.info('[{owner}/{repo}] post to web...{response}'.format( owner=owner, repo=repo, response=response)) return