def get_error_queue_jobs(error_queue_name, sindex=0, eindex=-1): error_queue = ztq_core.get_error_queue(error_queue_name) workers_state = ztq_core.get_worker_state() for hash_key in error_queue[sindex:eindex]: error_job = ztq_core.get_error_hash(error_queue_name)[hash_key] tmp_job = {} tmp_job['json'] = json.dumps(error_job) tmp_job['_queue_name'] = error_queue_name worker_name = error_job['runtime']['worker'] # 检查worker是否存在,存在则取得服务器ip if worker_name in workers_state: tmp_job['_server'] = workers_state[worker_name]['ip'] else: tmp_job['_server'] = worker_name tmp_job['_created'] = datetime.datetime.fromtimestamp( error_job['runtime'].get('create', 0)) tmp_job['_start'] = datetime.datetime.fromtimestamp( error_job['runtime'].get('start', 0)) tmp_job['_end'] = datetime.datetime.fromtimestamp( error_job['runtime'].get('end', 0)) tmp_job['_reason'] = ''.join(error_job['runtime']['reason']) tmp_job['_file'] = error_job['kw'].get('comment', error_job['kw'].get('path', '')) tmp_job['_error_mime'] = error_job['process'].get('to_mime', '') tmp_job['_detail'] = pprint.pformat(error_job) tmp_job['hash_id'] = urllib.quote(hash_key) yield tmp_job
def get_taskqueues_list(): # 队列情况列表 dispatcher_config = ztq_core.get_dispatcher_config() queue_weight = dispatcher_config['queue_weight'] queues_list = ztq_core.get_queue_config() # 排序 sort_queue_name = {} for queue_name, queue_config in queues_list.items(): sort_queue_name[queue_name] = len(ztq_core.get_error_queue(queue_name)) for queue_name in sorted(sort_queue_name, key=lambda x: sort_queue_name[x], reverse=True): task_queue = {} task_queue['name'] = queue_name #task_queue['tags'] = queue_config.get('tags',()) queue = ztq_core.get_task_queue(queue_name) # 任务数/错误数 task_queue['length'] = len(queue) task_queue['error_length'] = sort_queue_name[queue_name] #任务首个时间 task_queue['error_end'] = task_queue['first'] = '' first_job = queue[0] first_job = ztq_core.get_task_hash(queue_name).get(first_job) if first_job: task_queue['first'] = datetime.datetime.fromtimestamp( first_job['runtime'].get('create', 0)) #错误最末一个的时间 error_first_job = ztq_core.get_error_queue(queue_name)[0] error_first_job = ztq_core.get_error_hash(queue_name).get( error_first_job) if error_first_job: task_queue['error_end'] = datetime.datetime.fromtimestamp( error_first_job['runtime'].get('create', 0)) task_queue['weight'] = queue_weight.get(queue_name, 0) # 获取worker工作线程配置 workers_config = ztq_core.get_worker_config() task_queue['from_right'] = True for worker_name, worker_config in workers_config.items(): task_queue['workers'] = [] for config in worker_config.get(queue_name, []): task_queue['workers'].append( [worker_name + ':', config['interval']]) if 'from_right' in config: task_queue['from_right'] = config['from_right'] task_queue['buffer_length'] = len( ztq_core.get_buffer_queue(queue_name)) yield task_queue
def del_all_error_for_queue(request): """删除这个错误队列所有的任务 """ queue_id = request.matchdict['id'] error_hash = ztq_core.get_error_hash(queue_id) error_queue = ztq_core.get_error_queue(queue_id) client = ztq_core.get_redis() client.delete(error_queue.name) client.delete(error_hash.name) return HTTPFound(location='/taskqueues')
def del_all_error_for_queue(request): """删除这个错误队列所有的任务 """ queue_id = request.matchdict['id'] error_hash = ztq_core.get_error_hash(queue_id) error_queue = ztq_core.get_error_queue(queue_id) client = ztq_core.get_redis() client.delete(error_queue.name) client.delete(error_hash.name) return HTTPFound(location = '/taskqueues')
def get_taskqueues_list(): # 队列情况列表 dispatcher_config = ztq_core.get_dispatcher_config() queue_weight = dispatcher_config['queue_weight'] queues_list = ztq_core.get_queue_config() # 排序 sort_queue_name = {} for queue_name, queue_config in queues_list.items(): sort_queue_name[queue_name] = len(ztq_core.get_error_queue(queue_name)) for queue_name in sorted(sort_queue_name, key=lambda x: sort_queue_name[x], reverse=True): task_queue = {} task_queue['name'] = queue_name #task_queue['tags'] = queue_config.get('tags',()) queue = ztq_core.get_task_queue(queue_name) # 任务数/错误数 task_queue['length'] = len(queue) task_queue['error_length'] = sort_queue_name[queue_name] #任务首个时间 task_queue['error_end'] = task_queue['first'] = '' first_job = queue[0] first_job= ztq_core.get_task_hash(queue_name).get(first_job) if first_job: task_queue['first'] = datetime.datetime.fromtimestamp(first_job['runtime'].get('create', 0)) #错误最末一个的时间 error_first_job = ztq_core.get_error_queue(queue_name)[0] error_first_job = ztq_core.get_error_hash(queue_name).get(error_first_job) if error_first_job: task_queue['error_end'] = datetime.datetime.fromtimestamp(error_first_job['runtime'].get('create', 0)) task_queue['weight'] = queue_weight.get(queue_name, 0) # 获取worker工作线程配置 workers_config = ztq_core.get_worker_config() task_queue['from_right'] = True for worker_name,worker_config in workers_config.items(): task_queue['workers'] = [] for config in worker_config.get(queue_name,[]): task_queue['workers'].append([worker_name+':', config['interval']]) if 'from_right' in config: task_queue['from_right'] = config['from_right'] task_queue['buffer_length'] = len(ztq_core.get_buffer_queue(queue_name)) yield task_queue
def get_error_queue_jobs(error_queue_name, sindex=0, eindex=-1): error_queue = ztq_core.get_error_queue(error_queue_name) workers_state = ztq_core.get_worker_state() for hash_key in error_queue[sindex:eindex]: error_job = ztq_core.get_error_hash(error_queue_name)[hash_key] tmp_job={} tmp_job['json'] = json.dumps(error_job) tmp_job['_queue_name'] = error_queue_name worker_name = error_job['runtime']['worker'] # 检查worker是否存在,存在则取得服务器ip if worker_name in workers_state: tmp_job['_server'] = workers_state[worker_name]['ip'] else: tmp_job['_server'] = worker_name tmp_job['_created'] = datetime.datetime.fromtimestamp(error_job['runtime'].get('create',0)) tmp_job['_start'] = datetime.datetime.fromtimestamp(error_job['runtime'].get('start',0)) tmp_job['_end'] = datetime.datetime.fromtimestamp(error_job['runtime'].get('end',0)) tmp_job['_reason'] = ''.join(error_job['runtime']['reason']) tmp_job['_file'] = error_job['kw'].get('comment', error_job['kw'].get('path', '')) tmp_job['_error_mime'] = error_job['process'].get('to_mime','') tmp_job['_detail'] = pprint.pformat(error_job) tmp_job['hash_id'] = urllib.quote(hash_key) yield tmp_job