def delete_from_global_queue(book_key): """Delete entry from global-queue""" q_global_job = redis_py.Queue(keys.redis_key1 + "global") pattern = re.search('bub_.+:(.+):(.+)', book_key) library = pattern.group(1) book_id = pattern.group(2) q_global_job.remove(json.dumps(dict(library=library, book_id=book_id)))
def get_id_from_another_worker(calling_worker_key, workers=3): q_size_list = [] q_list = [] for i in range(1, workers + 1): worker_queue_key = queue_key(i) #print worker_queue_key, if calling_worker_key == worker_queue_key: q_list.append(0) q_size_list.append(0) continue q = redis_py.Queue(worker_queue_key) q_list.append(q) q_size_list.append(q.size()) max_size = max(q_size_list) if max_size == 0: return False largest_queue_index = q_size_list.index(max_size) largest_queue = q_list[largest_queue_index] Lock = redis_py.Lock(queue_key(largest_queue_index) + ":pop") Lock.acquire() item = largest_queue.pop() if item != False: largest_queue.remove(item[0]) Lock.release() return item[0] else: Lock.release() return False
def check_if_upload_ready(): redis = redis_py.Redis() redis_key2 = keys.redis_key2 lock_key1 = keys.lock_key1 q = redis_py.Queue(redis_key2) Lock = redis_py.Lock(lock_key1) while True: book_keys = q.pop(-1) if book_keys is False: time.sleep(2) continue for book_key in book_keys: uploaded = 0 ia_identifier = redis.get(book_key + ":ia_identifier") ia_identifier = json.loads(ia_identifier) if isinstance(ia_identifier, list): Lock.acquire(timeout=60 * 2) users_request = redis_py.smembers(book_key + ":requests", True) if users_request != None: redis.delete(book_key + ":requests") remove_request_db(users_request, book_key) remove_from_db(users_request) Lock.release() q.remove(book_key) if users_request != None: send_email(users_request, ia_identifier, book_key=book_key) email_progress_key = book_key + ":email_progress" redis_py.set(email_progress_key, 1, True) delete_from_global_queue(book_key) continue else: r = get_ia_metadata(ia_identifier) if 'metadata' in r.keys(): if 'ocr' in r['metadata'].keys(): if r['metadata'][ 'ocr'] == 'language not currently OCRable': uploaded = 2 if 'DjVuTXT' in str(r) or 'Djvu XML' in str(r): uploaded = 1 if uploaded != 0: Lock.acquire(timeout=60 * 2) users_request = redis_py.smembers(book_key + ":requests", True) if users_request != None: redis.delete(book_key + ":requests") remove_request_db(users_request, book_key) remove_from_db(users_request) Lock.release() q.remove(book_key) if users_request != None: send_email(users_request, str(ia_identifier)) email_progress_key = book_key + ":email_progress" redis_py.set(email_progress_key, 1, True) delete_from_global_queue(book_key) OCR_progress_key = book_key + ":OCR_progress" redis_py.set(OCR_progress_key, 1, True) else: continue time.sleep(2)
def get_shortest_queue(workers=3): #2 mass-workers running q_size_list = [] q_list = [] for i in range(1, workers + 1): worker_queue_key = "%s:mass_worker_%s" % (redis_key4, i) if i == 1: worker_queue_key = "%s:mass_worker" % (redis_key4) q = redis_py.Queue(worker_queue_key) q_list.append(q) q_size_list.append(q.size()) return q_list[q_size_list.index(min(q_size_list))]
def __init__(self, redis_key): self.queue = redis_py.Queue(redis_key) self.Lock = redis_py.Lock(mass_worker_key + ":pop")
def __init__(self, redis_key): self.queue = redis_py.Queue(redis_key)
def submit_OCR_wait_job(self, value): """Add book-request to OCR-waitlist queue""" self.save_ia_identifier(value) redis_key2 = keys.redis_key2 q = redis_py.Queue(redis_key2) q.add(self.book_key)