def save_compute2es(input_dict): add_dict = {} status = True add_dict = dict(add_dict, **input_dict['task_information']) task_name = input_dict['task_information']['task_name'] task_id = input_dict['task_information']['task_id'] uid_list = input_dict['task_information']['uid_list'] if isinstance(uid_list, list): count = len(input_dict['task_information']['uid_list']) else: count = len(json.loads(input_dict['task_information']['uid_list'])) add_dict['count'] = count if 'query_condition' not in input_dict: input_dict['query_condition'] = {} if isinstance(input_dict['query_condition'], str): add_dict['query_condition'] = input_dict['query_condition'] else: add_dict['query_condition'] = json.dumps(input_dict['query_condition']) try: es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_id, body=add_dict) print 'success add compute task es' except: status = False return status
def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data['task_name'] submit_user = input_data['submit_user'] task_id = submit_user + task_name try: result = es_group_result.get(index=group_index_name, doc_type=group_index_type, id=task_id)['_source'] except: status = 1 if status != 0 and 'uid_file' not in input_data: input_data['status'] = 0 # mark the task not compute count = len(input_data['uid_list']) input_data['count'] = count input_data['task_type'] = 'analysis' input_data['submit_user'] = '******' input_data['detect_type'] = '' input_data['detect_process'] = '' add_es_dict = {'task_information': input_data, 'query_condition': ''} es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_id, body=input_data) r.lpush(group_analysis_queue_name, json.dumps(input_data)) return status
def save_detect2es(input_dict): add_dict = {} status = True add_dict = dict(add_dict, **input_dict['task_information']) task_id = input_dict['task_information']['task_id'] task_name = input_dict['task_information']['task_name'] add_dict['query_condition'] = json.dumps(input_dict['query_condition']) try: es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_id, body=add_dict) print 'success add detect task to es' except: status = False return status
def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data["task_name"] submit_user = input_data["submit_user"] task_id = submit_user + task_name try: result = es_group_result.get(index=group_index_name, doc_type=group_index_type, id=task_id)["_source"] except: status = 1 if status != 0 and "uid_file" not in input_data: input_data["status"] = 0 # mark the task not compute count = len(input_data["uid_list"]) input_data["count"] = count input_data["task_type"] = "analysis" input_data["submit_user"] = "******" input_data["detect_type"] = "" input_data["detect_process"] = "" add_es_dict = {"task_information": input_data, "query_condition": ""} es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_id, body=input_data) r.lpush(group_analysis_queue_name, json.dumps(input_data)) return status