def add_task(self, company_id: str, queue_id: str, task_id: str) -> dict: """ Add the task to the queue and return the queue update results :raise errors.bad_request.TaskAlreadyQueued: if the task is already in the queue :raise errors.bad_request.InvalidQueueOrTaskNotQueued: if the queue update operation failed """ with translate_errors_context(): queue = self.get_by_id(company_id=company_id, queue_id=queue_id) if any(e.task == task_id for e in queue.entries): raise errors.bad_request.TaskAlreadyQueued(task=task_id) self.metrics.log_queue_metrics_to_es(company_id=company_id, queues=[queue]) entry = Entry(added=datetime.utcnow(), task=task_id) query = dict(id=queue_id, company=company_id) res = Queue.objects(entries__task__ne=task_id, **query).update_one( push__entries=entry, last_update=datetime.utcnow(), upsert=False) if not res: raise errors.bad_request.InvalidQueueOrTaskNotQueued( task=task_id, **query) return res
def get_statistics(cls, company_id: str) -> dict: """ Returns a statistics report per company """ return { "time": datetime.utcnow(), "company_id": company_id, "server": { "version": current_version, "deployment": get_deployment_type(), "uuid": get_server_uuid(), "queues": { "count": Queue.objects(company=company_id).count() }, "users": { "count": User.objects(company=company_id).count() }, "resources": cls.threads.resource_monitor.get_stats(), "experiments": next(iter(cls._get_experiments_stats(company_id).values()), {}), }, "agents": cls._get_agents_statistics(company_id), }
def reposition_task( self, company_id: str, queue_id: str, task_id: str, pos_func: Callable[[int], int], ) -> int: """ Moves the task in the queue to the position calculated by pos_func Returns the updated task position in the queue """ with translate_errors_context(): queue = self.get_queue_with_task(company_id=company_id, queue_id=queue_id, task_id=task_id) position = next(i for i, e in enumerate(queue.entries) if e.task == task_id) new_position = pos_func(position) if new_position != position: entry = queue.entries[position] query = dict(id=queue_id, company=company_id) updated = Queue.objects(entries__task=task_id, **query).update_one( pull__entries=entry, last_update=datetime.utcnow()) if not updated: raise errors.bad_request.RemovedDuringReposition( task=task_id, **query) inst = { "$push": { "entries": { "$each": [entry.to_proper_dict()] } } } if new_position >= 0: inst["$push"]["entries"]["$position"] = new_position res = Queue.objects(entries__task__ne=task_id, **query).update_one(__raw__=inst) if not res: raise errors.bad_request.FailedAddingDuringReposition( task=task_id, **query) return new_position
def register_worker( self, company_id: str, user_id: str, worker: str, ip: str = "", queues: Sequence[str] = None, timeout: int = 0, tags: Sequence[str] = None, ) -> WorkerEntry: """ Register a worker :param company_id: worker's company ID :param user_id: user ID under which this worker is running :param worker: worker ID :param ip: the real ip of the worker :param queues: queues reported as being monitored by the worker :param timeout: registration expiration timeout in seconds :param tags: a list of tags for this worker :raise bad_request.InvalidUserId: in case the calling user or company does not exist :return: worker entry instance """ key = WorkerBLL._get_worker_key(company_id, user_id, worker) timeout = timeout or DEFAULT_TIMEOUT queues = queues or [] with translate_errors_context(): query = dict(id=user_id, company=company_id) user = User.objects(**query).only("id", "name").first() if not user: raise bad_request.InvalidUserId(**query) company = Company.objects(id=company_id).only("id", "name").first() if not company: raise server_error.InternalError("invalid company", company=company_id) queue_objs = Queue.objects(company=company_id, id__in=queues).only("id") if len(queue_objs) < len(queues): invalid = set(queues).difference(q.id for q in queue_objs) raise bad_request.InvalidQueueId(ids=invalid) now = datetime.utcnow() entry = WorkerEntry( key=key, id=worker, user=user.to_proper_dict(), company=company.to_proper_dict(), ip=ip, queues=queues, register_time=now, register_timeout=timeout, last_activity_time=now, tags=tags, ) self.redis.setex(key, timedelta(seconds=timeout), entry.to_json()) return entry
def get_queue_with_task(cls, company_id: str, queue_id: str, task_id: str) -> Queue: with translate_errors_context(): query = dict(id=queue_id, company=company_id) queue = Queue.objects(entries__task=task_id, **query).first() if not queue: raise errors.bad_request.InvalidQueueOrTaskNotQueued( task=task_id, **query) return queue
def _ensure_default_queue(company): """ If no queue is present for the company then create a new one and mark it as a default """ queue = Queue.objects(company=company).only("id").first() if queue: return QueueBLL.create(company, name="default", system_tags=["default"])
def get_next_task(self, company_id: str, queue_id: str) -> Optional[Entry]: """ Atomically pop and return the first task from the queue (or None) :raise errors.bad_request.InvalidQueueId: if the queue does not exist """ with translate_errors_context(): query = dict(id=queue_id, company=company_id) queue = Queue.objects(**query).modify(pop__entries=-1, upsert=False) if not queue: raise errors.bad_request.InvalidQueueId(**query) self.metrics.log_queue_metrics_to_es(company_id, queues=[queue]) if not queue.entries: return try: Queue.objects(**query).update(last_update=datetime.utcnow()) except Exception: log.exception("Error while updating Queue.last_update") return queue.entries[0]
def get_default(self, company_id: str) -> Queue: """ Get the default queue :raise errors.bad_request.NoDefaultQueue: if the default queue not found :raise errors.bad_request.MultipleDefaultQueues: if more than one default queue is found """ with translate_errors_context(): res = Queue.objects(company=company_id, system_tags="default").only("id", "name") if not res: raise errors.bad_request.NoDefaultQueue() if len(res) > 1: raise errors.bad_request.MultipleDefaultQueues(queues=tuple( r.id for r in res)) return res.first()
def get_by_id(self, company_id: str, queue_id: str, only: Optional[Sequence[str]] = None) -> Queue: """ Get queue by id :raise errors.bad_request.InvalidQueueId: if the queue is not found """ with translate_errors_context(): query = dict(id=queue_id, company=company_id) qs = Queue.objects(**query) if only: qs = qs.only(*only) queue = qs.first() if not queue: raise errors.bad_request.InvalidQueueId(**query) return queue
def remove_task(self, company_id: str, queue_id: str, task_id: str) -> int: """ Removes the task from the queue and returns the number of removed items :raise errors.bad_request.InvalidQueueOrTaskNotQueued: if the task is not found in the queue """ with translate_errors_context(): queue = self.get_queue_with_task(company_id=company_id, queue_id=queue_id, task_id=task_id) self.metrics.log_queue_metrics_to_es(company_id, queues=[queue]) entries_to_remove = [e for e in queue.entries if e.task == task_id] query = dict(id=queue_id, company=company_id) res = Queue.objects(entries__task=task_id, **query).update_one( pull_all__entries=entries_to_remove, last_update=datetime.utcnow()) return len(entries_to_remove) if res else 0
def _log_current_metrics(self, company_id: str, queue_ids=Sequence[str]): query = dict(company=company_id) if queue_ids: query["id__in"] = list(queue_ids) queues = Queue.objects(**query) self.log_queue_metrics_to_es(company_id, queues=list(queues))