def pusher(task_queue: Queue, event: Event, broker: Broker = None): """ Pulls tasks of the broker and puts them in the task queue :type broker: :type task_queue: multiprocessing.Queue :type event: multiprocessing.Event """ if not broker: broker = get_broker() logger.info(_(f"{current_process().name} pushing tasks at {current_process().pid}")) while True: try: task_set = broker.dequeue() except Exception as e: logger.error(e, traceback.format_exc()) # broker probably crashed. Let the sentinel handle it. sleep(10) break if task_set: for task in task_set: ack_id = task[0] # unpack the task try: task = SignedPackage.loads(task[1]) except (TypeError, BadSignature) as e: logger.error(e, traceback.format_exc()) broker.fail(ack_id) continue task["ack_id"] = ack_id task_queue.put(task) logger.debug(_(f"queueing from {broker.list_key}")) if event.is_set(): break logger.info(_(f"{current_process().name} stopped pushing tasks"))
def monitor(result_queue: Queue, broker: Broker = None): """ Gets finished tasks from the result queue and saves them to Django :type broker: brokers.Broker :type result_queue: multiprocessing.Queue """ if not broker: broker = get_broker() name = current_process().name logger.info(_(f"{name} monitoring at {current_process().pid}")) for task in iter(result_queue.get, "STOP"): # save the result if task.get("cached", False): save_cached(task, broker) else: save_task(task, broker) # acknowledge result ack_id = task.pop("ack_id", False) if ack_id and (task["success"] or task.get("ack_failure", False)): broker.acknowledge(ack_id) # log the result if task["success"]: # log success logger.info(_(f"Processed [{task['name']}]")) else: # log failure logger.error(_(f"Failed [{task['name']}] - {task['result']}")) logger.info(_(f"{name} stopped monitoring results"))
def save_task(task, broker: Broker): """ Saves the task package to Django or the cache :param task: the task package :type broker: brokers.Broker """ # SAVE LIMIT < 0 : Don't save success if not task.get("save", Conf.SAVE_LIMIT >= 0) and task["success"]: return # enqueues next in a chain if task.get("chain", None): django_q.tasks.async_chain( task["chain"], group=task["group"], cached=task["cached"], sync=task["sync"], broker=broker, ) # SAVE LIMIT > 0: Prune database, SAVE_LIMIT 0: No pruning close_old_django_connections() try: with db.transaction.atomic(): last = Success.objects.select_for_update().last() if task["success"] and 0 < Conf.SAVE_LIMIT <= Success.objects.count(): last.delete() # check if this task has previous results if Task.objects.filter(id=task["id"], name=task["name"]).exists(): existing_task = Task.objects.get(id=task["id"], name=task["name"]) # only update the result if it hasn't succeeded yet if not existing_task.success: existing_task.stopped = task["stopped"] existing_task.result = task["result"] existing_task.success = task["success"] existing_task.attempt_count = existing_task.attempt_count + 1 existing_task.save() if Conf.MAX_ATTEMPTS > 0 and existing_task.attempt_count >= Conf.MAX_ATTEMPTS: broker.acknowledge(task['ack_id']) else: Task.objects.create( id=task["id"], name=task["name"], func=task["func"], hook=task.get("hook"), args=task["args"], kwargs=task["kwargs"], started=task["started"], stopped=task["stopped"], result=task["result"], group=task.get("group"), success=task["success"], attempt_count=1 ) except Exception as e: logger.error(e)
def get_all(broker: Broker = None) -> list: """ Get the status for all currently running clusters with the same prefix and secret key. :return: list of type Stat """ if not broker: broker = get_broker() stats = [] packs = broker.get_stats(f"{Conf.Q_STAT}:*") or [] for pack in packs: try: stats.append(SignedPackage.loads(pack)) except BadSignature: continue return stats
def get(pid: int, cluster_id: str, broker: Broker = None) -> Union[Status, None]: """ gets the current status for the cluster :param pid: :param broker: an optional broker instance :param cluster_id: id of the cluster :return: Stat or Status """ if not broker: broker = get_broker() pack = broker.get_stat(Stat.get_key(cluster_id)) if pack: try: return SignedPackage.loads(pack) except BadSignature: return None return Status(pid=pid, cluster_id=cluster_id)
def test_broker(): broker = Broker() broker.enqueue('test') broker.dequeue() broker.queue_size() broker.lock_size() broker.purge_queue() broker.delete('id') broker.delete_queue() broker.acknowledge('test') broker.ping() broker.info() # stats assert broker.get_stat('test_1') is None broker.set_stat('test_1', 'test', 3) assert broker.get_stat('test_1') == 'test' assert broker.get_stats('test:*')[0] == 'test' # stats with no cache Conf.CACHE = 'not_configured' broker.cache = broker.get_cache() assert broker.get_stat('test_1') is None broker.set_stat('test_1', 'test', 3) assert broker.get_stat('test_1') is None assert broker.get_stats('test:*') is None Conf.CACHE = 'default'
def test_broker(monkeypatch): broker = Broker() broker.enqueue("test") broker.dequeue() broker.queue_size() broker.lock_size() broker.purge_queue() broker.delete("id") broker.delete_queue() broker.acknowledge("test") broker.ping() broker.info() # stats assert broker.get_stat("test_1") is None broker.set_stat("test_1", "test", 3) assert broker.get_stat("test_1") == "test" assert broker.get_stats("test:*")[0] == "test" # stats with no cache monkeypatch.setattr(Conf, "CACHE", "not_configured") broker.cache = broker.get_cache() assert broker.get_stat("test_1") is None broker.set_stat("test_1", "test", 3) assert broker.get_stat("test_1") is None assert broker.get_stats("test:*") is None
def test_broker(monkeypatch): broker = Broker() broker.enqueue('test') broker.dequeue() broker.queue_size() broker.lock_size() broker.purge_queue() broker.delete('id') broker.delete_queue() broker.acknowledge('test') broker.ping() broker.info() broker.close() # stats assert broker.get_stat('test_1') is None broker.set_stat('test_1', 'test', 3) assert broker.get_stat('test_1') == 'test' assert broker.get_stats('test:*')[0] == 'test' # stats with no cache monkeypatch.setattr(Conf, 'CACHE', 'not_configured') broker.cache = broker.get_cache() assert broker.get_stat('test_1') is None broker.set_stat('test_1', 'test', 3) assert broker.get_stat('test_1') is None assert broker.get_stats('test:*') is None
def test_broker(): broker = Broker() broker.enqueue("test") broker.dequeue() broker.queue_size() broker.lock_size() broker.purge_queue() broker.delete("id") broker.delete_queue() broker.acknowledge("test") broker.ping() broker.info() # stats assert broker.get_stat("test_1") is None broker.set_stat("test_1", "test", 3) assert broker.get_stat("test_1") == "test" assert broker.get_stats("test:*")[0] == "test" # stats with no cache Conf.CACHE = "not_configured" broker.cache = broker.get_cache() assert broker.get_stat("test_1") is None broker.set_stat("test_1", "test", 3) assert broker.get_stat("test_1") is None assert broker.get_stats("test:*") is None Conf.CACHE = "default"