def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data['task_name'] try: result = es.get(index=index_name, doc_type=index_type, id=task_name) except: status = 1 if status != 0 and 'uid_file' not in input_data: r.lpush('group_task', json.dumps(input_data)) input_data['status'] = 0 # mark the task not compute count = len(input_data['uid_list']) input_data['count'] = count uid_list_string = json.dumps(input_data['uid_list']) es.index(index='group_result', doc_type='group', id=task_name, body=input_data) elif status != 0 and 'uid_file' in input_data: input_data['status'] = 0 # mark the task not compute uid_file = input_data['uid_file'] uid_list = read_uid_file(uid_file) input_data['count'] = len(uid_list) input_data['uid_list'] = json.dumps(uid_list) r.lpush('group_task', json.dumps(input_data)) es.index(index='group_result', doc_type='group', id=task_name, body=input_data) delete_status = delete_uid_file(uid_file) if delete_status == 0: print 'fail delete uid file' elif delete_status == 1: print 'success delete uid file' return status
def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data['task_name'] try: result = es_group_result.get(index=group_index_name, doc_type=group_index_type, id=task_name)['_source'] except: status = 1 if status != 0 and 'uid_file' not in input_data: input_data['status'] = 0 # mark the task not compute count = len(input_data['uid_list']) input_data['count'] = count input_data['task_type'] = 'analysis' input_data['submit_user'] = '******' input_data['detect_type'] = '' input_data['detect_process'] = '' add_es_dict = {'task_information': input_data, 'query_condition': ''} es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_name, body=input_data) r.lpush(group_analysis_queue_name, json.dumps(input_data)) return status
def save_compute2redis(input_dict): status = True try: r_group.lpush(group_analysis_queue_name, json.dumps(input_dict)) print 'success add detect task to redis queue' except: status = False return status
def save_detect2redis(input_dict): status = True try: r_group.lpush(group_detect_queue_name, json.dumps(input_dict)) print 'success to save redis' except: status = False return status
def submit_task(input_data): print 'aaaaaaaaaaaaaaa' print input_data status = 0 # mark it can not submit task_name = input_data['task_name'] submit_user = input_data['submit_user'] task_id = submit_user + '-' + task_name #identify the compute task is not more than limit try: task_max_count = input_data['task_max_count'] except: task_max_count = 0 query_body = { 'query':{ 'filtered':{ 'filter':{ 'bool':{ 'must':[ {'term': {'submit_user': submit_user}}, {'term': {'status': 0}} ] } } } } } print es_group_result,group_index_name,group_index_type exist_compute_result = es_group_result.search(index=group_index_name, doc_type=group_index_type, body=query_body)['hits']['hits'] exist_compute_count = len(exist_compute_result) if exist_compute_count >= task_max_count: return 'more than limit' #identify the task name is valid try: result = es_group_result.get(index=group_index_name, doc_type=group_index_type, id=task_id)['_source'] except: status = 1 if status != 0 and 'uid_file' not in input_data: input_data['status'] = 0 # mark the task not compute count = len(input_data['uid_list']) input_data['count'] = count input_data['task_type'] = 'analysis' input_data['submit_user'] = submit_user input_data['detect_type'] = '' input_data['detect_process'] = '' input_data['task_id'] = task_id add_es_dict = {'task_information': input_data, 'query_condition':''} # print es_group_result,group_index_name,group_index_type # print r # print '**********************8' es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_id, body=input_data) #print es_group_result,group_index_name,group_index_type r.lpush(group_analysis_queue_name, json.dumps(input_data)) #print status print status return status
def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data['task_name'] try: result = es.get(index=index_name, doc_type=index_type, id=task_name) except: status = 1 if status != 0: r.lpush('group_task', json.dumps(input_data)) input_data['status'] = 0 # mark the task not compute count = len(input_data['uid_list']) input_data['count'] = count uid_list_string = json.dumps(input_data['uid_list']) es.index(index='group_result', doc_type='group', id=task_name, body=input_data) return status
def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data['task_name'] submit_user = input_data['submit_user'] task_id = submit_user + '-' + task_name #identify the compute task is not more than limit try: task_max_count = input_data['task_max_count'] except: task_max_count = 0 query_body = { 'query':{ 'filtered':{ 'filter':{ 'bool':{ 'must':[ {'term': {'submit_user': submit_user}}, {'term': {'status': 0}} ] } } } } } exist_compute_result = es_group_result.search(index=group_index_name, doc_type=group_index_type, body=query_body)['hits']['hits'] exist_compute_count = len(exist_compute_result) if exist_compute_count >= task_max_count: return 'more than limit' #identify the task name is valid try: result = es_group_result.get(index=group_index_name, doc_type=group_index_type, id=task_id)['_source'] except: status = 1 if status != 0 and 'uid_file' not in input_data: input_data['status'] = 0 # mark the task not compute count = len(input_data['uid_list']) input_data['count'] = count input_data['task_type'] = 'analysis' input_data['submit_user'] = submit_user input_data['detect_type'] = '' input_data['detect_process'] = '' add_es_dict = {'task_information': input_data, 'query_condition':''} es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_id, body=input_data) r.lpush(group_analysis_queue_name, json.dumps(input_data)) return status
def submit_task(input_data): status = 0 # mark it can not submit task_name = input_data['task_name'] try: result = es_group_result.get(index=group_index_name, doc_type=group_index_type, id=task_name)['_source'] except: status = 1 if status != 0 and 'uid_file' not in input_data: input_data['status'] = 0 # mark the task not compute count = len(input_data['uid_list']) input_data['count'] = count input_data['task_type'] = 'analysis' input_data['submit_user'] = '******' input_data['detect_type'] = '' input_data['detect_process'] = '' add_es_dict = {'task_information': input_data, 'query_condition':''} es_group_result.index(index=group_index_name, doc_type=group_index_type, id=task_name, body=input_data) r.lpush(group_analysis_queue_name, json.dumps(input_data)) return status