def sig_handler(self, signum, frame): logger.debug( _( f'{current_process().name} got signal {Conf.SIGNAL_NAMES.get(signum, "UNKNOWN")}' ) ) self.stop()
def async(func, *args, **kwargs): """Send a task to the cluster.""" # get options from q_options dict or direct from kwargs options = kwargs.pop('q_options', kwargs) hook = options.pop('hook', None) list_key = options.pop('list_key', Conf.Q_LIST) redis = options.pop('redis', redis_client) sync = options.pop('sync', False) group = options.pop('group', None) save = options.pop('save', None) # get an id tag = uuid() # build the task package task = {'id': tag[1], 'name': tag[0], 'func': func, 'args': args, 'kwargs': kwargs, 'started': timezone.now()} # add optionals if hook: task['hook'] = hook if group: task['group'] = group if save is not None: task['save'] = save # sign it pack = signing.SignedPackage.dumps(task) if sync or Conf.SYNC: return _sync(pack) # push it redis.rpush(list_key, pack) logger.debug('Pushed {}'.format(tag)) return task['id']
def pusher(task_queue: Queue, event: Event, broker: Broker = None): """ Pulls tasks of the broker and puts them in the task queue :type broker: :type task_queue: multiprocessing.Queue :type event: multiprocessing.Event """ if not broker: broker = get_broker() logger.info(_(f"{current_process().name} pushing tasks at {current_process().pid}")) while True: try: task_set = broker.dequeue() except Exception as e: logger.error(e, traceback.format_exc()) # broker probably crashed. Let the sentinel handle it. sleep(10) break if task_set: for task in task_set: ack_id = task[0] # unpack the task try: task = SignedPackage.loads(task[1]) except (TypeError, BadSignature) as e: logger.error(e, traceback.format_exc()) broker.fail(ack_id) continue task["ack_id"] = ack_id task_queue.put(task) logger.debug(_(f"queueing from {broker.list_key}")) if event.is_set(): break logger.info(_(f"{current_process().name} stopped pushing tasks"))
def pusher(task_queue, event, list_key=Conf.Q_LIST, r=redis_client): """ Pulls tasks of the Redis List and puts them in the task queue :type task_queue: multiprocessing.Queue :type event: multiprocessing.Event :type list_key: str """ logger.info(_("{} pushing tasks at {}").format(current_process().name, current_process().pid)) while True: try: task = r.blpop(list_key, 1) except Exception as e: logger.error(e) # redis probably crashed. Let the sentinel handle it. sleep(10) break if task: # unpack the task try: task = signing.SignedPackage.loads(task[1]) except (TypeError, signing.BadSignature) as e: logger.error(e) continue task_queue.put(task) logger.debug(_("queueing from {}").format(list_key)) if event.is_set(): break logger.info(_("{} stopped pushing tasks").format(current_process().name))
def async_task(func, *args, **kwargs): """Queue a task for the cluster.""" keywords = kwargs.copy() opt_keys = ( "hook", "group", "save", "sync", "cached", "ack_failure", "iter_count", "iter_cached", "chain", "broker", "timeout", ) q_options = keywords.pop("q_options", {}) # get an id tag = uuid() # build the task package task = { "id": tag[1], "name": keywords.pop("task_name", None) or q_options.pop("task_name", None) or tag[0], "func": func, "args": args, } # push optionals for key in opt_keys: if q_options and key in q_options: task[key] = q_options[key] elif key in keywords: task[key] = keywords.pop(key) # don't serialize the broker broker = task.pop("broker", get_broker()) # overrides if "cached" not in task and Conf.CACHED: task["cached"] = Conf.CACHED if "sync" not in task and Conf.SYNC: task["sync"] = Conf.SYNC if "ack_failure" not in task and Conf.ACK_FAILURES: task["ack_failure"] = Conf.ACK_FAILURES # finalize task["kwargs"] = keywords task["started"] = timezone.now() # signal it pre_enqueue.send(sender="django_q", task=task) # sign it pack = SignedPackage.dumps(task) if task.get("sync", False): return _sync(pack) # push it enqueue_id = broker.enqueue(pack) logger.info(f"Enqueued {enqueue_id}") logger.debug(f"Pushed {tag}") return task["id"]
def pusher(task_queue, event, broker=None): """ Pulls tasks of the broker and puts them in the task queue :type task_queue: multiprocessing.Queue :type event: multiprocessing.Event """ if not broker: broker = get_broker() logger.info(_('{} pushing tasks at {}').format(current_process().name, current_process().pid)) while True: try: task_set = broker.dequeue() except Exception as e: logger.error(e) # broker probably crashed. Let the sentinel handle it. sleep(10) break if task_set: for task in task_set: ack_id = task[0] # unpack the task try: task = signing.SignedPackage.loads(task[1]) except (TypeError, signing.BadSignature) as e: logger.error(e) broker.fail(ack_id) continue task['ack_id'] = ack_id task_queue.put(task) logger.debug(_('queueing from {}').format(broker.list_key)) if event.is_set(): break logger.info(_("{} stopped pushing tasks").format(current_process().name))
def async (func, *args, **kwargs): """Queue a task for the cluster.""" keywords = kwargs.copy() opt_keys = ('hook', 'group', 'save', 'sync', 'cached', 'iter_count', 'iter_cached', 'chain', 'broker') q_options = keywords.pop('q_options', None) # get an id tag = uuid() # build the task package task = {'id': tag[1], 'name': tag[0], 'func': func, 'args': args} # push optionals for key in opt_keys: if q_options and key in q_options: task[key] = q_options[key] elif key in keywords: task[key] = keywords.pop(key) # don't serialize the broker broker = task.pop('broker', get_broker()) # overrides if 'cached' not in task and Conf.CACHED: task['cached'] = Conf.CACHED if 'sync' not in task and Conf.SYNC: task['sync'] = Conf.SYNC # finalize task['kwargs'] = keywords task['started'] = timezone.now() # sign it pack = signing.SignedPackage.dumps(task) if task.get('sync', False): return _sync(pack) # push it broker.enqueue(pack) logger.debug('Pushed {}'.format(tag)) return task['id']
def pusher(task_queue, event, broker=None): """ Pulls tasks of the broker and puts them in the task queue :type task_queue: multiprocessing.Queue :type event: multiprocessing.Event """ if not broker: broker = get_broker() logger.info( _('{} pushing tasks at {}').format(current_process().name, current_process().pid)) while True: try: task_set = broker.dequeue() except Exception as e: logger.error(e) # broker probably crashed. Let the sentinel handle it. sleep(10) break if task_set: for task in task_set: ack_id = task[0] # unpack the task try: task = signing.SignedPackage.loads(task[1]) except (TypeError, signing.BadSignature) as e: logger.error(e) broker.fail(ack_id) continue task['ack_id'] = ack_id task_queue.put(task) logger.debug(_('queueing from {}').format(broker.list_key)) if event.is_set(): break logger.info(_("{} stopped pushing tasks").format(current_process().name))
def async(func, *args, **kwargs): """ Sends a task to the cluster """ # optional hook hook = kwargs.pop('hook', None) # optional list_key list_key = kwargs.pop('list_key', Conf.Q_LIST) # optional redis connection redis = kwargs.pop('redis', redis_client) # optional sync mode sync = kwargs.pop('sync', False) # optional group group = kwargs.pop('group', None) # get an id tag = uuid() # build the task package task = {'id': tag[1], 'name': tag[0], 'func': func, 'args': args, 'kwargs': kwargs, 'started': timezone.now()} # add optionals if hook: task['hook'] = hook if group: task['group'] = group # sign it pack = signing.SignedPackage.dumps(task) if sync: return _sync(task['id'], pack) # push it redis.rpush(list_key, pack) logger.debug('Pushed {}'.format(tag)) return task['id']
def pusher(task_queue, e, list_key=Conf.Q_LIST, r=redis_client): """ Pulls tasks of the Redis List and puts them in the task queue :type task_queue: multiprocessing.Queue :type e: multiprocessing.Event :type list_key: str """ logger.info( _('{} pushing tasks at {}').format(current_process().name, current_process().pid)) while True: try: task = r.blpop(list_key, 1) except Exception as e: logger.error(e) # redis probably crashed. Let the sentinel handle it. sleep(10) break if task: task = task[1] task_queue.put(task) logger.debug(_('queueing from {}').format(list_key)) if e.is_set(): break logger.info(_("{} stopped pushing tasks").format(current_process().name))
def get_connection(list_key=Conf.PREFIX): if transaction.get_autocommit(): # Only True when not in an atomic block # Make sure stale connections in the broker thread are explicitly # closed before attempting DB access. # logger.debug("Broker thread calling close_old_connections") db.close_old_connections() else: logger.debug("Broker in an atomic transaction") return OrmQ.objects.using(Conf.ORM)
def add_async_task(func, *args, **kwargs): # Wrapper method to add a task with awareness of schemapack if "schema_name" not in kwargs: kwargs.update({"schema_name": connection.schema_name}) tag, task, broker, pack = QUtilities.prepare_task( func, *args, **kwargs) if task.get("sync", False): return QUtilities.run_synchronously(pack) enqueue_id = broker.enqueue(pack) logger.info(f"Enqueued {enqueue_id}") logger.debug(f"Pushed {tag}") return task["id"]
def async (func, *args, **kwargs): """Queue a task for the cluster.""" keywords = kwargs.copy() opt_keys = ('hook', 'group', 'save', 'sync', 'cached', 'iter_count', 'iter_cached', 'chain', 'broker', 'progress_updates') q_options = keywords.pop('q_options', {}) # get an id tag = keywords.pop('uuid', None) or uuid() # build the task package task = { 'id': tag[1], 'name': keywords.pop('task_name', None) or q_options.pop('task_name', None) or tag[0], 'func': func, 'args': args } # push optionals for key in opt_keys: if q_options and key in q_options: task[key] = q_options[key] elif key in keywords: task[key] = keywords.pop(key) # don't serialize the broker broker = task.pop('broker', get_broker()) # overrides if 'cached' not in task and Conf.CACHED: task['cached'] = Conf.CACHED if 'sync' not in task and Conf.SYNC: task['sync'] = Conf.SYNC # finalize task['kwargs'] = keywords task['started'] = timezone.now() task['is_progress_updating'] = bool(task.get('progress_updates', False)) task['success'] = False task['stopped'] = None task['result'] = None task['task_status'] = Task.PENDING # sign it pack = signing.SignedPackage.dumps(task) if task.get('sync', False): return _sync(pack) # push it broker.enqueue(pack) logger.debug('Pushed {}'.format(tag)) # create initial task result entry cluster.save_task(task, broker) return task['id']
def async (func, *args, **kwargs): """Queue a task for the cluster.""" # get options from q_options dict or direct from kwargs options = kwargs.pop('q_options', kwargs) hook = options.pop('hook', None) broker = options.pop('broker', get_broker()) sync = options.pop('sync', False) group = options.pop('group', None) save = options.pop('save', None) cached = options.pop('cached', Conf.CACHED) iter_count = options.pop('iter_count', None) iter_cached = options.pop('iter_cached', None) # get an id tag = uuid() # build the task package task = { 'id': tag[1], 'name': tag[0], 'func': func, 'args': args, 'kwargs': kwargs, 'started': timezone.now() } # add optionals if hook: task['hook'] = hook if group: task['group'] = group if save is not None: task['save'] = save if cached: task['cached'] = cached if iter_count: task['iter_count'] = iter_count if iter_cached: task['iter_cached'] = iter_cached # sign it pack = signing.SignedPackage.dumps(task) if sync or Conf.SYNC: return _sync(pack) # push it broker.enqueue(pack) logger.debug('Pushed {}'.format(tag)) return task['id']
def async_task(func, *args, **kwargs): """Queue a task for the cluster.""" keywords = kwargs.copy() opt_keys = ( 'hook', 'group', 'save', 'sync', 'cached', 'ack_failure', 'iter_count', 'iter_cached', 'chain', 'broker', 'timeout') q_options = keywords.pop('q_options', {}) # get an id tag = uuid() # build the task package task = {'id': tag[1], 'name': keywords.pop('task_name', None) or q_options.pop('task_name', None) or tag[0], 'func': func, 'args': args} # push optionals for key in opt_keys: if q_options and key in q_options: task[key] = q_options[key] elif key in keywords: task[key] = keywords.pop(key) # don't serialize the broker broker = task.pop('broker', get_broker()) # overrides if 'cached' not in task and Conf.CACHED: task['cached'] = Conf.CACHED if 'sync' not in task and Conf.SYNC: task['sync'] = Conf.SYNC if 'ack_failure' not in task and Conf.ACK_FAILURES: task['ack_failure'] = Conf.ACK_FAILURES # finalize task['kwargs'] = keywords task['started'] = timezone.now() # signal it pre_enqueue.send(sender="django_q", task=task) # sign it pack = SignedPackage.dumps(task) if task.get('sync', False): return _sync(pack) # push it enqueue_id = broker.enqueue(pack) logger.info('Enqueued {}'.format(enqueue_id)) logger.debug('Pushed {}'.format(tag)) return task['id']
def async_task(func, *args, **kwargs): """Queue a task for the cluster.""" keywords = kwargs.copy() opt_keys = ( 'hook', 'group', 'save', 'sync', 'cached', 'ack_failure', 'iter_count', 'iter_cached', 'chain', 'broker') q_options = keywords.pop('q_options', {}) # get an id tag = uuid() # build the task package task = {'id': tag[1], 'name': keywords.pop('task_name', None) or q_options.pop('task_name', None) or tag[0], 'func': func, 'args': args} # push optionals for key in opt_keys: if q_options and key in q_options: task[key] = q_options[key] elif key in keywords: task[key] = keywords.pop(key) # don't serialize the broker broker = task.pop('broker', get_broker()) # overrides if 'cached' not in task and Conf.CACHED: task['cached'] = Conf.CACHED if 'sync' not in task and Conf.SYNC: task['sync'] = Conf.SYNC if 'ack_failure' not in task and Conf.ACK_FAILURES: task['ack_failure'] = Conf.ACK_FAILURES # finalize task['kwargs'] = keywords task['started'] = timezone.now() # signal it pre_enqueue.send(sender="django_q", task=task) # sign it pack = SignedPackage.dumps(task) if task.get('sync', False): return _sync(pack) # push it enqueue_id = broker.enqueue(pack) logger.info('Enqueued {}'.format(enqueue_id)) logger.debug('Pushed {}'.format(tag)) return task['id']
def pusher(task_queue, e, list_key=Conf.Q_LIST, r=redis_client): """ Pulls tasks of the Redis List and puts them in the task queue :type task_queue: multiprocessing.Queue :type e: multiprocessing.Event :type list_key: str """ logger.info(_('{} pushing tasks at {}').format(current_process().name, current_process().pid)) while True: try: task = r.blpop(list_key, 1) except Exception as e: logger.error(e) # redis probably crashed. Let the sentinel handle it. sleep(10) break if task: task_queue.put(task[1]) logger.debug(_('queueing from {}').format(list_key)) if e.is_set(): break logger.info(_("{} stopped pushing tasks").format(current_process().name))
def sig_handler(self, signum, frame): logger.debug(_('{} got signal {}').format(current_process().name, Conf.SIGNAL_NAMES.get(signum, 'UNKNOWN'))) self.stop()
def sig_handler(self, signum, frame): logger.debug( _('{} got signal {}').format( current_process().name, Conf.SIGNAL_NAMES.get(signum, 'UNKNOWN'))) self.stop()
def async_task(func, *pos_args, args=None, kwargs=None, name=None, hook=None, group=None, timeout=None, **q_options): """ Queue a task for the cluster. :param func: Callable function object or string representation of module.function :param pos_args: Positional arguments to provide to func :param args: Positional arguments to provide to func :param kwargs: Keyword arguments to provide to func :param name: Optional custom name of task :param hook: Function to call after task complete (provided Task instance as argument) :param str group: Group identifier (to correlate related tasks) """ func = validate_function(func) hook = validate_function(hook) args = tuple(pos_args or args or tuple()) keywords = kwargs.copy() opt_keys = ( "hook", "group", "save", "sync", # Whether to run the task synchronously "cached", # Remove "ack_failure", # Causes failed tasks to still mark status as complete "iter_count", # Remove "iter_cached", # Remove "chain", # Use prerequisite instead of chain "broker", # dont need "timeout", ) q_options = keywords.pop("q_options", {}) # get an id tag = uuid() # Create task instance task = Task.objects.create( id=tag[1], name=name or tag[0], func=func, args=args, kwargs=kwargs, hook=hook, group=group, ) # push optionals for key in opt_keys: if q_options and key in q_options: task[key] = q_options[key] elif key in keywords: task[key] = keywords.pop(key) # don't serialize the broker broker = task.pop("broker", get_broker()) # overrides if "cached" not in task and Conf.CACHED: task["cached"] = Conf.CACHED if "sync" not in task and Conf.SYNC: task["sync"] = Conf.SYNC if "ack_failure" not in task and Conf.ACK_FAILURES: task["ack_failure"] = Conf.ACK_FAILURES # finalize task["kwargs"] = keywords task["started"] = timezone.now() # signal it pre_enqueue.send(sender="django_q", task=task) # sign it pack = SignedPackage.dumps(task) if task.get("sync", False): return _sync(pack) # push it enqueue_id = broker.enqueue(pack) logger.info(f"Enqueued {enqueue_id}") logger.debug(f"Pushed {tag}") return task["id"]