Beispiel #1
0
    def test_login_by_email_positive(self):
        # when
        response = self.client.post(reverse('email_login'),
                                    data={'email_or_login': self.new_user.email, })

        # then
        self.assertContains(response=response, text="Вам отправлен код!", status_code=200)
        issued_code = Code.objects.filter(recipient=self.new_user.email).get()
        self.assertIsNotNone(issued_code)

        # check email was sent
        packages = self.broker.dequeue()
        task_signed = packages[0][1]
        task = SignedPackage.loads(task_signed)
        self.assertEqual(task['func'].__name__, 'send_auth_email')
        self.assertEqual(task['args'][0].id, self.new_user.id)
        self.assertEqual(task['args'][1].id, issued_code.id)

        # check notify wast sent
        packages = self.broker.dequeue()
        task_signed = packages[0][1]
        task = SignedPackage.loads(task_signed)
        self.assertEqual(task['func'].__name__, 'notify_user_auth')
        self.assertEqual(task['args'][0].id, self.new_user.id)
        self.assertEqual(task['args'][1].id, issued_code.id)

        # it's not yet authorised, only code was sent
        self.assertFalse(self.client.is_authorised())
Beispiel #2
0
def save_cached(task, broker):
    task_key = f'{broker.list_key}:{task["id"]}'
    timeout = task["cached"]
    if timeout is True:
        timeout = None
    try:
        group = task.get("group", None)
        iter_count = task.get("iter_count", 0)
        # if it's a group append to the group list
        if group:
            group_key = f"{broker.list_key}:{group}:keys"
            group_list = broker.cache.get(group_key) or []
            # if it's an iter group, check if we are ready
            if iter_count and len(group_list) == iter_count - 1:
                group_args = f"{broker.list_key}:{group}:args"
                # collate the results into a Task result
                results = [
                    SignedPackage.loads(broker.cache.get(k))["result"]
                    for k in group_list
                ]
                results.append(task["result"])
                task["result"] = results
                task["id"] = group
                task["args"] = SignedPackage.loads(
                    broker.cache.get(group_args))
                task.pop("iter_count", None)
                task.pop("group", None)
                if task.get("iter_cached", None):
                    task["cached"] = task.pop("iter_cached", None)
                    save_cached(task, broker=broker)
                else:
                    save_task(task, broker)
                broker.cache.delete_many(group_list)
                broker.cache.delete_many([group_key, group_args])
                return
            # save the group list
            group_list.append(task_key)
            broker.cache.set(group_key, group_list, timeout)
            # async_task next in a chain
            if task.get("chain", None):
                django_q.tasks.async_chain(
                    task["chain"],
                    group=group,
                    cached=task["cached"],
                    sync=task["sync"],
                    broker=broker,
                )
        # save the task
        broker.cache.set(task_key, SignedPackage.dumps(task), timeout)
    except Exception as e:
        logger.error(e)
Beispiel #3
0
def save_cached(task, broker):
    task_key = '{}:{}'.format(broker.list_key, task['id'])
    timeout = task['cached']
    if timeout is True:
        timeout = None
    try:
        group = task.get('group', None)
        iter_count = task.get('iter_count', 0)
        # if it's a group append to the group list
        if group:
            group_key = '{}:{}:keys'.format(broker.list_key, group)
            group_list = broker.cache.get(group_key) or []
            # if it's an iter group, check if we are ready
            if iter_count and len(group_list) == iter_count - 1:
                group_args = '{}:{}:args'.format(broker.list_key, group)
                # collate the results into a Task result
                results = [
                    SignedPackage.loads(broker.cache.get(k))['result']
                    for k in group_list
                ]
                results.append(task['result'])
                task['result'] = results
                task['id'] = group
                task['args'] = SignedPackage.loads(
                    broker.cache.get(group_args))
                task.pop('iter_count', None)
                task.pop('group', None)
                if task.get('iter_cached', None):
                    task['cached'] = task.pop('iter_cached', None)
                    save_cached(task, broker=broker)
                else:
                    save_task(task, broker)
                broker.cache.delete_many(group_list)
                broker.cache.delete_many([group_key, group_args])
                return
            # save the group list
            group_list.append(task_key)
            broker.cache.set(group_key, group_list, timeout)
            # async_task next in a chain
            if task.get('chain', None):
                tasks.async_chain(task['chain'],
                                  group=group,
                                  cached=task['cached'],
                                  sync=task['sync'],
                                  broker=broker)
        # save the task
        broker.cache.set(task_key, SignedPackage.dumps(task), timeout)
    except Exception as e:
        logger.error(e)
Beispiel #4
0
def pusher(task_queue, event, broker=None):
    """
    Pulls tasks of the broker and puts them in the task queue
    :type task_queue: multiprocessing.Queue
    :type event: multiprocessing.Event
    """
    if not broker:
        broker = get_broker()
    logger.info(_('{} pushing tasks at {}').format(current_process().name, current_process().pid))
    while True:
        try:
            task_set = broker.dequeue()
        except Exception as e:
            logger.error(e)
            # broker probably crashed. Let the sentinel handle it.
            sleep(10)
            break
        if task_set:
            for task in task_set:
                ack_id = task[0]
                # unpack the task
                try:
                    task = SignedPackage.loads(task[1])
                except (TypeError, BadSignature) as e:
                    logger.error(e)
                    broker.fail(ack_id)
                    continue
                task['ack_id'] = ack_id
                task_queue.put(task)
            logger.debug(_('queueing from {}').format(broker.list_key))
        if event.is_set():
            break
    logger.info(_("{} stopped pushing tasks").format(current_process().name))
Beispiel #5
0
def fetch_cached(task_id, wait=0, broker=None):
    """
    Return the processed task from the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    while True:
        r = broker.cache.get('{}:{}'.format(broker.list_key, task_id))
        if r:
            task = SignedPackage.loads(r)
            t = Task(id=task['id'],
                     name=task['name'],
                     func=task['func'],
                     hook=task.get('hook'),
                     args=task['args'],
                     kwargs=task['kwargs'],
                     started=task['started'],
                     stopped=task['stopped'],
                     result=task['result'],
                     success=task['success'])
            return t
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #6
0
def async_task(func, *args, **kwargs):
    """Queue a task for the cluster."""
    keywords = kwargs.copy()
    opt_keys = (
        "hook",
        "group",
        "save",
        "sync",
        "cached",
        "ack_failure",
        "iter_count",
        "iter_cached",
        "chain",
        "broker",
        "timeout",
    )
    q_options = keywords.pop("q_options", {})
    # get an id
    tag = uuid()
    # build the task package
    task = {
        "id":
        tag[1],
        "name":
        keywords.pop("task_name", None) or q_options.pop("task_name", None)
        or tag[0],
        "func":
        func,
        "args":
        args,
    }
    # push optionals
    for key in opt_keys:
        if q_options and key in q_options:
            task[key] = q_options[key]
        elif key in keywords:
            task[key] = keywords.pop(key)
    # don't serialize the broker
    broker = task.pop("broker", get_broker())
    # overrides
    if "cached" not in task and Conf.CACHED:
        task["cached"] = Conf.CACHED
    if "sync" not in task and Conf.SYNC:
        task["sync"] = Conf.SYNC
    if "ack_failure" not in task and Conf.ACK_FAILURES:
        task["ack_failure"] = Conf.ACK_FAILURES
    # finalize
    task["kwargs"] = keywords
    task["started"] = timezone.now()
    # signal it
    pre_enqueue.send(sender="django_q", task=task)
    # sign it
    pack = SignedPackage.dumps(task)
    if task.get("sync", False):
        return _sync(pack)
    # push it
    enqueue_id = broker.enqueue(pack)
    logger.info(f"Enqueued {enqueue_id}")
    logger.debug(f"Pushed {tag}")
    return task["id"]
Beispiel #7
0
def fetch_group_cached(group_id, failures=True, wait=0, count=None, broker=None):
    """
    Return a list of Tasks for a task group in the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    if count:
        while True:
            if count_group_cached(group_id) == count or wait and (time() - start) * 1000 >= wait >= 0:
                break
            sleep(0.01)
    while True:
        group_list = broker.cache.get('{}:{}:keys'.format(broker.list_key, group_id))
        if group_list:
            task_list = []
            for task_key in group_list:
                task = SignedPackage.loads(broker.cache.get(task_key))
                if task['success'] or failures:
                    t = Task(id=task['id'],
                             name=task['name'],
                             func=task['func'],
                             hook=task.get('hook'),
                             args=task['args'],
                             kwargs=task['kwargs'],
                             started=task['started'],
                             stopped=task['stopped'],
                             result=task['result'],
                             group=task.get('group'),
                             success=task['success'])
                    task_list.append(t)
            return task_list
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #8
0
def fetch_cached(task_id, wait=0, broker=None):
    """
    Return the processed task from the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    while True:
        r = broker.cache.get('{}:{}'.format(broker.list_key, task_id))
        if r:
            task = SignedPackage.loads(r)
            t = Task(id=task['id'],
                     name=task['name'],
                     func=task['func'],
                     hook=task.get('hook'),
                     args=task['args'],
                     kwargs=task['kwargs'],
                     started=task['started'],
                     stopped=task['stopped'],
                     result=task['result'],
                     success=task['success'])
            return t
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #9
0
def fetch_cached(task_id, wait=0, broker=None):
    """
    Return the processed task from the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    while True:
        r = broker.cache.get(f"{broker.list_key}:{task_id}")
        if r:
            task = SignedPackage.loads(r)
            return Task(
                id=task["id"],
                name=task["name"],
                func=task["func"],
                hook=task.get("hook"),
                args=task["args"],
                kwargs=task["kwargs"],
                started=task["started"],
                stopped=task["stopped"],
                result=task["result"],
                success=task["success"],
            )
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #10
0
def async_iter(func, args_iter, **kwargs):
    """
    enqueues a function with iterable arguments
    """
    iter_count = len(args_iter)
    iter_group = uuid()[1]
    # clean up the kwargs
    options = kwargs.get("q_options", kwargs)
    options.pop("hook", None)
    options["broker"] = options.get("broker", get_broker())
    options["group"] = iter_group
    options["iter_count"] = iter_count
    if options.get("cached", None):
        options["iter_cached"] = options["cached"]
    options["cached"] = True
    # save the original arguments
    broker = options["broker"]
    broker.cache.set(
        f"{broker.list_key}:{iter_group}:args", SignedPackage.dumps(args_iter)
    )
    for args in args_iter:
        if not isinstance(args, tuple):
            args = (args,)
        async_task(func, *args, **options)
    return iter_group
Beispiel #11
0
def pusher(task_queue, event, broker=None):
    """
    Pulls tasks of the broker and puts them in the task queue
    :type task_queue: multiprocessing.Queue
    :type event: multiprocessing.Event
    """
    if not broker:
        broker = get_broker()
    logger.info(
        _(f"{current_process().name} pushing tasks at {current_process().pid}")
    )
    while True:
        try:
            task_set = broker.dequeue()
        except Exception as e:
            logger.error(e, traceback.format_exc())
            # broker probably crashed. Let the sentinel handle it.
            sleep(10)
            break
        if task_set:
            for task in task_set:
                ack_id = task[0]
                # unpack the task
                try:
                    task = SignedPackage.loads(task[1])
                except (TypeError, BadSignature) as e:
                    logger.error(e, traceback.format_exc())
                    broker.fail(ack_id)
                    continue
                task["ack_id"] = ack_id
                task_queue.put(task)
            logger.debug(_(f"queueing from {broker.list_key}"))
        if event.is_set():
            break
    logger.info(_(f"{current_process().name} stopped pushing tasks"))
Beispiel #12
0
def fetch_group_cached(group_id, failures=True, wait=0, count=None, broker=None):
    """
    Return a list of Tasks for a task group in the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    if count:
        while True:
            if count_group_cached(group_id) == count or wait and (time() - start) * 1000 >= wait >= 0:
                break
            sleep(0.01)
    while True:
        group_list = broker.cache.get('{}:{}:keys'.format(broker.list_key, group_id))
        if group_list:
            task_list = []
            for task_key in group_list:
                task = SignedPackage.loads(broker.cache.get(task_key))
                if task['success'] or failures:
                    t = Task(id=task['id'],
                             name=task['name'],
                             func=task['func'],
                             hook=task.get('hook'),
                             args=task['args'],
                             kwargs=task['kwargs'],
                             started=task['started'],
                             stopped=task['stopped'],
                             result=task['result'],
                             group=task.get('group'),
                             success=task['success'])
                    task_list.append(t)
            return task_list
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #13
0
def result_group_cached(group_id,
                        failures=False,
                        wait=0,
                        count=None,
                        broker=None):
    """
    Return a list of results for a task group from the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    if count:
        while True:
            if count_group_cached(group_id) == count or wait and (
                    time() - start) * 1000 >= wait > 0:
                break
            sleep(0.01)
    while True:
        group_list = broker.cache.get('{}:{}:keys'.format(
            broker.list_key, group_id))
        if group_list:
            result_list = []
            for task_key in group_list:
                task = SignedPackage.loads(broker.cache.get(task_key))
                if task['success'] or failures:
                    result_list.append(task['result'])
            return result_list
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #14
0
def save_cached(task, broker):
    task_key = '{}:{}'.format(broker.list_key, task['id'])
    timeout = task['cached']
    if timeout is True:
        timeout = None
    try:
        group = task.get('group', None)
        iter_count = task.get('iter_count', 0)
        # if it's a group append to the group list
        if group:
            group_key = '{}:{}:keys'.format(broker.list_key, group)
            group_list = broker.cache.get(group_key) or []
            # if it's an iter group, check if we are ready
            if iter_count and len(group_list) == iter_count - 1:
                group_args = '{}:{}:args'.format(broker.list_key, group)
                # collate the results into a Task result
                results = [SignedPackage.loads(broker.cache.get(k))['result'] for k in group_list]
                results.append(task['result'])
                task['result'] = results
                task['id'] = group
                task['args'] = SignedPackage.loads(broker.cache.get(group_args))
                task.pop('iter_count', None)
                task.pop('group', None)
                if task.get('iter_cached', None):
                    task['cached'] = task.pop('iter_cached', None)
                    save_cached(task, broker=broker)
                else:
                    save_task(task, broker)
                broker.cache.delete_many(group_list)
                broker.cache.delete_many([group_key, group_args])
                return
            # save the group list
            group_list.append(task_key)
            broker.cache.set(group_key, group_list, timeout)
            # async next in a chain
            if task.get('chain', None):
                tasks.async_chain(task['chain'], group=group, cached=task['cached'], sync=task['sync'], broker=broker)
        # save the task
        broker.cache.set(task_key,
                         SignedPackage.dumps(task),
                         timeout)
    except Exception as e:
        logger.error(e)
Beispiel #15
0
def result_cached(task_id, wait=0, broker=None):
    """
     Return the result from the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    while True:
        r = broker.cache.get('{}:{}'.format(broker.list_key, task_id))
        if r:
            return SignedPackage.loads(r)['result']
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #16
0
def _sync(pack):
    # Python 2.6 is unable to handle this import on top of the file
    # because it creates a circular dependency between tasks and cluster
    from django_q.cluster import worker, monitor
    """Simulate a package travelling through the cluster."""
    task_queue = Queue()
    result_queue = Queue()
    task = SignedPackage.loads(pack)
    task_queue.put(task)
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1))
    result_queue.put('STOP')
    monitor(result_queue)
    return task['id']
Beispiel #17
0
def result_cached(task_id, wait=0, broker=None):
    """
     Return the result from the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    while True:
        r = broker.cache.get('{}:{}'.format(broker.list_key, task_id))
        if r:
            return SignedPackage.loads(r)['result']
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #18
0
def async_task(func, *args, **kwargs):
    """Queue a task for the cluster."""
    keywords = kwargs.copy()
    opt_keys = ('hook', 'group', 'save', 'sync', 'cached', 'ack_failure',
                'iter_count', 'iter_cached', 'chain', 'broker')
    q_options = keywords.pop('q_options', {})
    # get an id
    tag = uuid()
    # build the task package
    task = {
        'id':
        tag[1],
        'name':
        keywords.pop('task_name', None) or q_options.pop('task_name', None)
        or tag[0],
        'func':
        func,
        'args':
        args
    }
    # push optionals
    for key in opt_keys:
        if q_options and key in q_options:
            task[key] = q_options[key]
        elif key in keywords:
            task[key] = keywords.pop(key)
    # don't serialize the broker
    broker = task.pop('broker', get_broker())
    # overrides
    if 'cached' not in task and Conf.CACHED:
        task['cached'] = Conf.CACHED
    if 'sync' not in task and Conf.SYNC:
        task['sync'] = Conf.SYNC
    if 'ack_failure' not in task and Conf.ACK_FAILURES:
        task['ack_failure'] = Conf.ACK_FAILURES
    # finalize
    task['kwargs'] = keywords
    task['started'] = timezone.now()
    # signal it
    pre_enqueue.send(sender="django_q", task=task)
    # sign it
    pack = SignedPackage.dumps(task)
    if task.get('sync', False):
        return _sync(pack)
    # push it
    enqueue_id = broker.enqueue(pack)
    logger.info('Enqueued {}'.format(enqueue_id))
    logger.debug('Pushed {}'.format(tag))
    return task['id']
Beispiel #19
0
def _sync(pack):
    """Simulate a package travelling through the cluster."""
    task_queue = Queue()
    result_queue = Queue()
    task = SignedPackage.loads(pack)
    task_queue.put(task)
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1))
    result_queue.put('STOP')
    monitor(result_queue)
    task_queue.close()
    task_queue.join_thread()
    result_queue.close()
    result_queue.join_thread()
    return task['id']
Beispiel #20
0
def _sync(pack):
    """Simulate a package travelling through the cluster."""
    task_queue = Queue()
    result_queue = Queue()
    task = SignedPackage.loads(pack)
    task_queue.put(task)
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1))
    result_queue.put('STOP')
    monitor(result_queue)
    task_queue.close()
    task_queue.join_thread()
    result_queue.close()
    result_queue.join_thread()
    return task['id']
Beispiel #21
0
 def get(cluster_id, broker=None):
     """
     gets the current status for the cluster
     :param cluster_id: id of the cluster
     :return: Stat or Status
     """
     if not broker:
         broker = get_broker()
     pack = broker.get_stat(Stat.get_key(cluster_id))
     if pack:
         try:
             return SignedPackage.loads(pack)
         except BadSignature:
             return None
     return Status(cluster_id)
Beispiel #22
0
 def get(pid, cluster_id, broker=None):
     """
     gets the current status for the cluster
     :param cluster_id: id of the cluster
     :return: Stat or Status
     """
     if not broker:
         broker = get_broker()
     pack = broker.get_stat(Stat.get_key(cluster_id))
     if pack:
         try:
             return SignedPackage.loads(pack)
         except BadSignature:
             return None
     return Status(pid=pid, cluster_id=cluster_id)
Beispiel #23
0
 def get_all(broker=None):
     """
     Get the status for all currently running clusters with the same prefix
     and secret key.
     :return: list of type Stat
     """
     if not broker:
         broker = get_broker()
     stats = []
     packs = broker.get_stats(f"{Conf.Q_STAT}:*") or []
     for pack in packs:
         try:
             stats.append(SignedPackage.loads(pack))
         except BadSignature:
             continue
     return stats
Beispiel #24
0
def count_group_cached(group_id, failures=False, broker=None):
    """
    Count the results in a group in the cache backend
    """
    if not broker:
        broker = get_broker()
    group_list = broker.cache.get(f"{broker.list_key}:{group_id}:keys")
    if group_list:
        if not failures:
            return len(group_list)
        failure_count = 0
        for task_key in group_list:
            task = SignedPackage.loads(broker.cache.get(task_key))
            if not task["success"]:
                failure_count += 1
        return failure_count
Beispiel #25
0
def count_group_cached(group_id, failures=False, broker=None):
    """
    Count the results in a group in the cache backend
    """
    if not broker:
        broker = get_broker()
    group_list = broker.cache.get('{}:{}:keys'.format(broker.list_key, group_id))
    if group_list:
        if not failures:
            return len(group_list)
        failure_count = 0
        for task_key in group_list:
            task = SignedPackage.loads(broker.cache.get(task_key))
            if not task['success']:
                failure_count += 1
        return failure_count
Beispiel #26
0
 def get_all(broker=None):
     """
     Get the status for all currently running clusters with the same prefix
     and secret key.
     :return: list of type Stat
     """
     if not broker:
         broker = get_broker()
     stats = []
     packs = broker.get_stats('{}:*'.format(Conf.Q_STAT)) or []
     for pack in packs:
         try:
             stats.append(SignedPackage.loads(pack))
         except BadSignature:
             continue
     return stats
Beispiel #27
0
def _sync(pack):
    """Simulate a package travelling through the cluster."""
    from django_q.cluster import monitor, worker

    task_queue = Queue()
    result_queue = Queue()
    task = SignedPackage.loads(pack)
    task_queue.put(task)
    task_queue.put("STOP")
    worker(task_queue, result_queue, Value("f", -1))
    result_queue.put("STOP")
    monitor(result_queue)
    task_queue.close()
    task_queue.join_thread()
    result_queue.close()
    result_queue.join_thread()
    return task["id"]
Beispiel #28
0
 def get(pid: int, cluster_id: str, broker: Broker = None) -> Union[Status, None]:
     """
     gets the current status for the cluster
     :param pid:
     :param broker: an optional broker instance
     :param cluster_id: id of the cluster
     :return: Stat or Status
     """
     if not broker:
         broker = get_broker()
     pack = broker.get_stat(Stat.get_key(cluster_id))
     if pack:
         try:
             return SignedPackage.loads(pack)
         except BadSignature:
             return None
     return Status(pid=pid, cluster_id=cluster_id)
Beispiel #29
0
    def run_synchronously(pack):
        """Method to run a task synchoronously"""

        from django_tenant_schemas_q.cluster import worker, monitor

        task_queue = Queue()
        result_queue = Queue()
        task = SignedPackage.loads(pack)
        task_queue.put(task)
        task_queue.put("STOP")
        worker(task_queue, result_queue, Value("f", -1))
        result_queue.put("STOP")
        monitor(result_queue)
        task_queue.close()
        task_queue.join_thread()
        result_queue.close()
        result_queue.join_thread()
        return task["id"]
Beispiel #30
0
def fetch_group_cached(group_id,
                       failures=True,
                       wait=0,
                       count=None,
                       broker=None):
    """
    Return a list of Tasks for a task group in the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    if count:
        while True:
            if (count_group_cached(group_id) == count
                    or wait and (time() - start) * 1000 >= wait >= 0):
                break
            sleep(0.01)
    while True:
        group_list = broker.cache.get(f"{broker.list_key}:{group_id}:keys")
        if group_list:
            task_list = []
            for task_key in group_list:
                task = SignedPackage.loads(broker.cache.get(task_key))
                if task["success"] or failures:
                    t = Task(
                        id=task["id"],
                        name=task["name"],
                        func=task["func"],
                        hook=task.get("hook"),
                        args=task["args"],
                        kwargs=task["kwargs"],
                        started=task["started"],
                        stopped=task["stopped"],
                        result=task["result"],
                        group=task.get("group"),
                        success=task["success"],
                    )
                    task_list.append(t)
            return task_list
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #31
0
def async_task(func, *args, **kwargs):
    """Queue a task for the cluster."""
    keywords = kwargs.copy()
    opt_keys = (
    'hook', 'group', 'save', 'sync', 'cached', 'ack_failure', 'iter_count', 'iter_cached', 'chain', 'broker')
    q_options = keywords.pop('q_options', {})
    # get an id
    tag = uuid()
    # build the task package
    task = {'id': tag[1],
            'name': keywords.pop('task_name', None) or q_options.pop('task_name', None) or tag[0],
            'func': func,
            'args': args}
    # push optionals
    for key in opt_keys:
        if q_options and key in q_options:
            task[key] = q_options[key]
        elif key in keywords:
            task[key] = keywords.pop(key)
    # don't serialize the broker
    broker = task.pop('broker', get_broker())
    # overrides
    if 'cached' not in task and Conf.CACHED:
        task['cached'] = Conf.CACHED
    if 'sync' not in task and Conf.SYNC:
        task['sync'] = Conf.SYNC
    if 'ack_failure' not in task and Conf.ACK_FAILURES:
        task['ack_failure'] = Conf.ACK_FAILURES
    # finalize
    task['kwargs'] = keywords
    task['started'] = timezone.now()
    # signal it
    pre_enqueue.send(sender="django_q", task=task)
    # sign it
    pack = SignedPackage.dumps(task)
    if task.get('sync', False):
        return _sync(pack)
    # push it
    enqueue_id = broker.enqueue(pack)
    logger.info('Enqueued {}'.format(enqueue_id))
    logger.debug('Pushed {}'.format(tag))
    return task['id']
Beispiel #32
0
def async_iter(func, args_iter, **kwargs):
    """
    enqueues a function with iterable arguments
    """
    iter_count = len(args_iter)
    iter_group = uuid()[1]
    # clean up the kwargs
    options = kwargs.get('q_options', kwargs)
    options.pop('hook', None)
    options['broker'] = options.get('broker', get_broker())
    options['group'] = iter_group
    options['iter_count'] = iter_count
    if options.get('cached', None):
        options['iter_cached'] = options['cached']
    options['cached'] = True
    # save the original arguments
    broker = options['broker']
    broker.cache.set('{}:{}:args'.format(broker.list_key, iter_group), SignedPackage.dumps(args_iter))
    for args in args_iter:
        if not isinstance(args, tuple):
            args = (args,)
        async_task(func, *args, **options)
    return iter_group
Beispiel #33
0
def async_iter(func, args_iter, **kwargs):
    """
    enqueues a function with iterable arguments
    """
    iter_count = len(args_iter)
    iter_group = uuid()[1]
    # clean up the kwargs
    options = kwargs.get('q_options', kwargs)
    options.pop('hook', None)
    options['broker'] = options.get('broker', get_broker())
    options['group'] = iter_group
    options['iter_count'] = iter_count
    if options.get('cached', None):
        options['iter_cached'] = options['cached']
    options['cached'] = True
    # save the original arguments
    broker = options['broker']
    broker.cache.set('{}:{}:args'.format(broker.list_key, iter_group), SignedPackage.dumps(args_iter))
    for args in args_iter:
        if not isinstance(args, tuple):
            args = (args,)
        async_task(func, *args, **options)
    return iter_group
Beispiel #34
0
def result_group_cached(group_id, failures=False, wait=0, count=None, broker=None):
    """
    Return a list of results for a task group from the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    if count:
        while True:
            if count_group_cached(group_id) == count or wait and (time() - start) * 1000 >= wait > 0:
                break
            sleep(0.01)
    while True:
        group_list = broker.cache.get('{}:{}:keys'.format(broker.list_key, group_id))
        if group_list:
            result_list = []
            for task_key in group_list:
                task = SignedPackage.loads(broker.cache.get(task_key))
                if task['success'] or failures:
                    result_list.append(task['result'])
            return result_list
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Beispiel #35
0
 def task(self):
     return SignedPackage.loads(self.payload)
Beispiel #36
0
def async_task(func,
               *pos_args,
               args=None,
               kwargs=None,
               name=None,
               hook=None,
               group=None,
               timeout=None,
               **q_options):
    """
    Queue a task for the cluster.
    :param func: Callable function object or string representation of module.function
    :param pos_args: Positional arguments to provide to func
    :param args: Positional arguments to provide to func
    :param kwargs: Keyword arguments to provide to func
    :param name: Optional custom name of task
    :param hook: Function to call after task complete (provided Task instance as argument)
    :param str group: Group identifier (to correlate related tasks)
    """
    func = validate_function(func)
    hook = validate_function(hook)

    args = tuple(pos_args or args or tuple())

    keywords = kwargs.copy()
    opt_keys = (
        "hook",
        "group",
        "save",
        "sync",  # Whether to run the task synchronously
        "cached",  # Remove
        "ack_failure",  # Causes failed tasks to still mark status as complete
        "iter_count",  # Remove
        "iter_cached",  # Remove
        "chain",  # Use prerequisite instead of chain
        "broker",  # dont need
        "timeout",
    )
    q_options = keywords.pop("q_options", {})
    # get an id
    tag = uuid()
    # Create task instance
    task = Task.objects.create(
        id=tag[1],
        name=name or tag[0],
        func=func,
        args=args,
        kwargs=kwargs,
        hook=hook,
        group=group,
    )
    # push optionals
    for key in opt_keys:
        if q_options and key in q_options:
            task[key] = q_options[key]
        elif key in keywords:
            task[key] = keywords.pop(key)
    # don't serialize the broker
    broker = task.pop("broker", get_broker())
    # overrides
    if "cached" not in task and Conf.CACHED:
        task["cached"] = Conf.CACHED
    if "sync" not in task and Conf.SYNC:
        task["sync"] = Conf.SYNC
    if "ack_failure" not in task and Conf.ACK_FAILURES:
        task["ack_failure"] = Conf.ACK_FAILURES
    # finalize
    task["kwargs"] = keywords
    task["started"] = timezone.now()
    # signal it
    pre_enqueue.send(sender="django_q", task=task)
    # sign it
    pack = SignedPackage.dumps(task)
    if task.get("sync", False):
        return _sync(pack)
    # push it
    enqueue_id = broker.enqueue(pack)
    logger.info(f"Enqueued {enqueue_id}")
    logger.debug(f"Pushed {tag}")
    return task["id"]
Beispiel #37
0
 def task(self):
     return SignedPackage.loads(self.payload)
Beispiel #38
0
def test_admin_views(admin_client, monkeypatch):
    monkeypatch.setattr(Conf, "ORM", "default")
    s = schedule("schedule.test")
    tag = uuid()
    f = Task.objects.create(
        id=tag[1],
        name=tag[0],
        func="test.fail",
        started=timezone.now(),
        stopped=timezone.now(),
        success=False,
    )
    tag = uuid()
    t = Task.objects.create(
        id=tag[1],
        name=tag[0],
        func="test.success",
        started=timezone.now(),
        stopped=timezone.now(),
        success=True,
    )
    q = OrmQ.objects.create(
        key="test",
        payload=SignedPackage.dumps({
            "id": 1,
            "func": "test",
            "name": "test"
        }),
    )
    admin_urls = (
        # schedule
        reverse("admin:django_q_schedule_changelist"),
        reverse("admin:django_q_schedule_add"),
        reverse("admin:django_q_schedule_change", args=(s.id, )),
        reverse("admin:django_q_schedule_history", args=(s.id, )),
        reverse("admin:django_q_schedule_delete", args=(s.id, )),
        # success
        reverse("admin:django_q_success_changelist"),
        reverse("admin:django_q_success_change", args=(t.id, )),
        reverse("admin:django_q_success_history", args=(t.id, )),
        reverse("admin:django_q_success_delete", args=(t.id, )),
        # failure
        reverse("admin:django_q_failure_changelist"),
        reverse("admin:django_q_failure_change", args=(f.id, )),
        reverse("admin:django_q_failure_history", args=(f.id, )),
        reverse("admin:django_q_failure_delete", args=(f.id, )),
        # orm queue
        reverse("admin:django_q_ormq_changelist"),
        reverse("admin:django_q_ormq_change", args=(q.id, )),
        reverse("admin:django_q_ormq_history", args=(q.id, )),
        reverse("admin:django_q_ormq_delete", args=(q.id, )),
    )
    for url in admin_urls:
        response = admin_client.get(url)
        assert response.status_code == 200

    # resubmit the failure
    url = reverse("admin:django_q_failure_changelist")
    data = {"action": "retry_failed", "_selected_action": [f.pk]}
    response = admin_client.post(url, data)
    assert response.status_code == 302
    assert Failure.objects.filter(name=f.id).exists() is False
    # change q
    url = reverse("admin:django_q_ormq_change", args=(q.id, ))
    data = {
        "key": "default",
        "payload": "test",
        "lock_0": "2015-09-17",
        "lock_1": "14:31:51",
        "_save": "Save",
    }
    response = admin_client.post(url, data)
    assert response.status_code == 302
    # delete q
    url = reverse("admin:django_q_ormq_delete", args=(q.id, ))
    data = {"post": "yes"}
    response = admin_client.post(url, data)
    assert response.status_code == 302
Beispiel #39
0
 def save(self):
     try:
         self.broker.set_stat(self.key, SignedPackage.dumps(self, True), 3)
     except Exception as e:
         logger.error(e)
Beispiel #40
0
def test_admin_views(admin_client, monkeypatch):
    monkeypatch.setattr(Conf, 'ORM', 'default')
    s = schedule('schedule.test')
    tag = uuid()
    f = Task.objects.create(
        id=tag[1],
        name=tag[0],
        func='test.fail',
        started=timezone.now(),
        stopped=timezone.now(),
        success=False)
    tag = uuid()
    t = Task.objects.create(
        id=tag[1],
        name=tag[0],
        func='test.success',
        started=timezone.now(),
        stopped=timezone.now(),
        success=True)
    q = OrmQ.objects.create(
        key='test',
        payload=SignedPackage.dumps({'id': 1, 'func': 'test', 'name': 'test'}))
    admin_urls = (
        # schedule
        reverse('admin:django_q_schedule_changelist'),
        reverse('admin:django_q_schedule_add'),
        reverse('admin:django_q_schedule_change', args=(s.id,)),
        reverse('admin:django_q_schedule_history', args=(s.id,)),
        reverse('admin:django_q_schedule_delete', args=(s.id,)),
        # success
        reverse('admin:django_q_success_changelist'),
        reverse('admin:django_q_success_change', args=(t.id,)),
        reverse('admin:django_q_success_history', args=(t.id,)),
        reverse('admin:django_q_success_delete', args=(t.id,)),
        # failure
        reverse('admin:django_q_failure_changelist'),
        reverse('admin:django_q_failure_change', args=(f.id,)),
        reverse('admin:django_q_failure_history', args=(f.id,)),
        reverse('admin:django_q_failure_delete', args=(f.id,)),
        # orm queue
        reverse('admin:django_q_ormq_changelist'),
        reverse('admin:django_q_ormq_change', args=(q.id,)),
        reverse('admin:django_q_ormq_history', args=(q.id,)),
        reverse('admin:django_q_ormq_delete', args=(q.id,)),

    )
    for url in admin_urls:
        response = admin_client.get(url)
        assert response.status_code == 200

    # resubmit the failure
    url = reverse('admin:django_q_failure_changelist')
    data = {'action': 'retry_failed',
            '_selected_action': [f.pk]}
    response = admin_client.post(url, data)
    assert response.status_code == 302
    assert Failure.objects.filter(name=f.id).exists() is False
    # change q
    url = reverse('admin:django_q_ormq_change', args=(q.id,))
    data = {'key': 'default', 'payload': 'test', 'lock_0': '2015-09-17', 'lock_1': '14:31:51', '_save': 'Save'}
    response = admin_client.post(url, data)
    assert response.status_code == 302
    # delete q
    url = reverse('admin:django_q_ormq_delete', args=(q.id,))
    data = {'post': 'yes'}
    response = admin_client.post(url, data)
    assert response.status_code == 302
Beispiel #41
0
 def save(self):
     try:
         self.broker.set_stat(self.key, SignedPackage.dumps(self, True), 3)
     except Exception as e:
         logger.error(e)