def queue_failed_(): log('Listing failed') connection = get_connection(host=WORKER_HOST) queue = get_queue(FAILED, connection=connection) for job in queue.jobs: print('Origin: %s' % job.origin) print('Id: %s' % job.get_id()) print(job.exc_info, end='\n\n')
def queue_retry_(chunk): log('Retrying') connection = get_connection(host=WORKER_HOST) queue = get_queue(FAILED, connection=connection) ids = annotators_ids(queue.jobs) queue.empty() for annotator in ids: annotator_ids = ids[annotator] annotator_ids = log_progress(annotator_ids, prefix=annotator) chunks = group_chunks(annotator_ids, size=chunk) queue = get_queue(annotator, connection=connection) for chunk_ in chunks: enqueue(queue, task, chunk_)
def queue_insert_(annotators, offset, count, chunk): log('Annotators: %s; offset: %d, count: %d, chunk: %d', ', '.join(annotators), offset, count or -1, chunk) db = get_db(host=WORKER_HOST) ids = read_index(db[SOURCE], offset) ids = log_progress(ids, total=count) ids = head(ids, count) chunks = group_chunks(ids, size=chunk) connection = get_connection(host=WORKER_HOST) queues = dict(get_queues(annotators, connection)) for chunk in chunks: for annotator in annotators: queue = queues[annotator] enqueue(queue, task, chunk)
def queue_clear_(): log('Clear queues') connection = get_connection(host=WORKER_HOST) queues = get_queues(ANNOTATORS + [FAILED], connection) for name, queue in queues: queue.empty()
def queue_show_(): log('Showing queues') connection = get_connection(host=WORKER_HOST) show(connection)