def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data['task_name'] try: result = es.get(index=index_name, doc_type=index_type, id=task_name) except: status = 1 if status != 0 and 'uid_file' not in input_data: r.lpush('group_task', json.dumps(input_data)) input_data['status'] = 0 # mark the task not compute count = len(input_data['uid_list']) input_data['count'] = count uid_list_string = json.dumps(input_data['uid_list']) es.index(index='group_result', doc_type='group', id=task_name, body=input_data) elif status != 0 and 'uid_file' in input_data: input_data['status'] = 0 # mark the task not compute uid_file = input_data['uid_file'] uid_list = read_uid_file(uid_file) input_data['count'] = len(uid_list) input_data['uid_list'] = json.dumps(uid_list) r.lpush('group_task', json.dumps(input_data)) es.index(index='group_result', doc_type='group', id=task_name, body=input_data) delete_status = delete_uid_file(uid_file) if delete_status == 0: print 'fail delete uid file' elif delete_status == 1: print 'success delete uid file' return status
def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data['task_name'] submit_user = input_data['submit_user'] task_id = submit_user + task_name try: result = es_group_result.get(index=group_index_name, doc_type=group_index_type, id=task_id)['_source'] except: status = 1 if status != 0 and 'uid_file' not in input_data: input_data['status'] = 0 # mark the task not compute count = len(input_data['uid_list']) input_data['count'] = count input_data['task_type'] = 'analysis' input_data['submit_user'] = '******' input_data['detect_type'] = '' input_data['detect_process'] = '' add_es_dict = {'task_information': input_data, 'query_condition': ''} es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_id, body=input_data) r.lpush(group_analysis_queue_name, json.dumps(input_data)) return status
def save_compute2redis(input_dict): status = True try: r_group.lpush(group_analysis_queue_name, json.dumps(input_dict)) print 'success add detect task to redis queue' except: status = False return status
def save_detect2redis(input_dict): status = True try: r_group.lpush(group_detect_queue_name, json.dumps(input_dict)) print 'success to save redis' except: status = False return status
def change_user_count(task_user): status = 0 for uid in task_user: uid_task_count = r.hget('track_task_user', str(uid)) if int(uid_task_count) >1: r.hincrby('track_task_user', str(uid), -1) else: r.hdel('track_task_user', str(uid)) status = 1 return status
def change_user_count(task_user): status = 0 for uid in task_user: uid_task_count = r.hget('track_task_user', str(uid)) if int(uid_task_count) > 1: r.hincrby('track_task_user', str(uid), -1) else: r.hdel('track_task_user', str(uid)) status = 1 return status
def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data["task_name"] submit_user = input_data["submit_user"] task_id = submit_user + task_name try: result = es_group_result.get(index=group_index_name, doc_type=group_index_type, id=task_id)["_source"] except: status = 1 if status != 0 and "uid_file" not in input_data: input_data["status"] = 0 # mark the task not compute count = len(input_data["uid_list"]) input_data["count"] = count input_data["task_type"] = "analysis" input_data["submit_user"] = "******" input_data["detect_type"] = "" input_data["detect_process"] = "" add_es_dict = {"task_information": input_data, "query_condition": ""} es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_id, body=input_data) r.lpush(group_analysis_queue_name, json.dumps(input_data)) return status
def add_user_set(user_list): status = 0 for uid in user_list: r.hincrby('track_task_user', str(uid)) status = 1 return status