Пример #1
0
def fetch_group(group_id,
                failures=True,
                wait=0,
                count=None,
                cached=Conf.CACHED):
    """
    Return a list of Tasks for a task group.

    :param str group_id: the group id
    :param bool failures: set to False to exclude failures
    :param bool cached: run this against the cache backend
    :return: list of Tasks
    """
    if cached:
        return fetch_group_cached(group_id, failures, wait, count)
    start = time()
    if count:
        while True:
            if count_group(group_id) == count or wait and (
                    time() - start) * 1000 >= wait >= 0:
                break
            sleep(0.01)
    while True:
        r = Task.get_task_group(group_id, failures)
        if r:
            return r
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Пример #2
0
def fetch_group_cached(group_id, failures=True, wait=0, count=None, broker=None):
    """
    Return a list of Tasks for a task group in the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time.time()
    if count:
        while True:
            if count_group_cached(group_id) == count or wait and (time.time() - start) * 1000 >= wait >= 0:
                break
            time.sleep(0.01)
    while True:
        group_list = broker.cache.get('{}:{}:keys'.format(broker.list_key, group_id))
        if group_list:
            task_list = []
            for task_key in group_list:
                task = signing.SignedPackage.loads(broker.cache.get(task_key))
                if task['success'] or failures:
                    t = Task(id=task['id'],
                             name=task['name'],
                             func=task['func'],
                             hook=task.get('hook'),
                             args=task['args'],
                             kwargs=task['kwargs'],
                             started=task['started'],
                             stopped=task['stopped'],
                             result=task['result'],
                             group=task.get('group'),
                             success=task['success'])
                    task_list.append(t)
            return task_list
        if (time.time() - start) * 1000 >= wait >= 0:
            break
        time.sleep(0.01)
Пример #3
0
def fetch_cached(task_id, wait=0, broker=None):
    """
    Return the processed task from the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    while True:
        r = broker.cache.get(f"{broker.list_key}:{task_id}")
        if r:
            task = SignedPackage.loads(r)
            return Task(
                id=task["id"],
                name=task["name"],
                func=task["func"],
                hook=task.get("hook"),
                args=task["args"],
                kwargs=task["kwargs"],
                started=task["started"],
                stopped=task["stopped"],
                result=task["result"],
                success=task["success"],
            )
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Пример #4
0
def result_group(group_id, failures=False, wait=0, count=None, cached=Conf.CACHED):
    """
    Return a list of results for a task group.

    :param str group_id: the group id
    :param bool failures: set to True to include failures
    :param int count: Block until there are this many results in the group
    :param bool cached: run this against the cache backend
    :return: list or results
    """
    if cached:
        return result_group_cached(group_id, failures, wait, count)
    start = time.time()
    if count:
        while True:
            if count_group(group_id) == count or wait and (time.time() - start) * 1000 >= wait >= 0:
                break
            time.sleep(0.01)
    while True:
        r = Task.get_result_group(group_id, failures)
        if r:
            return r
        if (time.time() - start) * 1000 >= wait >= 0:
            break
        time.sleep(0.01)
Пример #5
0
def fetch_cached(task_id, wait=0, broker=None):
    """
    Return the processed task from the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time.time()
    while True:
        r = broker.cache.get('{}:{}'.format(broker.list_key, task_id))
        if r:
            task = signing.SignedPackage.loads(r)
            t = Task(id=task['id'],
                     name=task['name'],
                     func=task['func'],
                     hook=task.get('hook'),
                     args=task['args'],
                     kwargs=task['kwargs'],
                     started=task['started'],
                     stopped=task['stopped'],
                     result=task['result'],
                     success=task['success'])
            return t
        if (time.time() - start) * 1000 >= wait >= 0:
            break
        time.sleep(0.01)
Пример #6
0
    def startremoteimport(self, request):
        '''Download a channel's database from the main curation server, and then
        download its content.

        '''
        TASKTYPE = "remoteimport"

        if "channel_id" not in request.data:
            raise serializers.ValidationError(
                "The 'channel_id' field is required.")

        channel_id = request.data['channel_id']

        # ensure the requested channel_id can be found on the central server, otherwise error
        status = requests.head(
            get_content_database_file_url(channel_id)).status_code
        if status == 404:
            raise Http404(
                _("The requested channel does not exist on the content server."
                  ))

        task_id = async (_networkimport,
                         channel_id,
                         group=TASKTYPE,
                         progress_updates=True)

        # attempt to get the created Task, otherwise return pending status
        resp = _task_to_response(Task.get_task(task_id),
                                 task_type=TASKTYPE,
                                 task_id=task_id)

        return Response(resp)
Пример #7
0
def result_group(group_id, failures=False, wait=0, count=None, cached=Conf.CACHED):
    """
    Return a list of results for a task group.

    :param str group_id: the group id
    :param bool failures: set to True to include failures
    :param int count: Block until there are this many results in the group
    :param bool cached: run this against the cache backend
    :return: list or results
    """
    if cached:
        return result_group_cached(group_id, failures, wait, count)
    start = time()
    if count:
        while True:
            if count_group(group_id) == count or wait and (time() - start) * 1000 >= wait >= 0:
                break
            sleep(0.01)
    while True:
        r = Task.get_result_group(group_id, failures)
        if r:
            return r
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Пример #8
0
def result_group(group_id, failures=False):
    """
    returns a list of results for a task group
    :param str group_id: the group id
    :param bool failures: set to True to include failures
    :return: list or results
    """
    return Task.get_result_group(group_id, failures)
Пример #9
0
def delete_group(group_id, tasks=False):
    """
    :param str group_id: the group id
    :param bool tasks: If set to True this will also delete the group tasks.
    Otherwise just the group label is removed.
    :return:
    """
    return Task.delete_group(group_id, tasks)
Пример #10
0
def count_group(group_id, failures=False):
    """
    :param str group_id: the group id
    :param bool failures: Returns failure count if True
    :return: the number of tasks/results in a group
    :rtype: int
    """
    return Task.get_group_count(group_id, failures)
Пример #11
0
def fetch(task_id):
    """
    Returns the processed task
    :param task_id: the task name or uuid
    :type task_id: str or uuid
    :return: the full task object
    :rtype: Task
    """
    return Task.get_task(task_id)
Пример #12
0
def fetch_group(group_id, failures=True):
    """
    Return a list of Tasks for a task group.

    :param str group_id: the group id
    :param bool failures: set to False to exclude failures
    :return: list of Tasks
    """
    return Task.get_task_group(group_id, failures)
Пример #13
0
def result(name):
    """
    Returns the result of the named task
    :type name: str or unicode
    :param name: the task name
    :return: the result object of this task
    :rtype: object or str
    """
    return Task.get_result(name)
Пример #14
0
def result(task_id):
    """
    Returns the result of the named task
    :type task_id: str or uuid
    :param task_id: the task name or uuid
    :return: the result object of this task
    :rtype: object
    """
    return Task.get_result(task_id)
Пример #15
0
    def cleartask(self, request):
        '''
        Clears a task with its task id given in the task_id parameter.
        '''

        if 'task_id' not in request.data:
            raise serializers.ValidationError("The 'task_id' field is required.")

        task_id = request.data['task_id']

        # Attempt to kill running task.
        Task.get_task(task_id).kill_running_task()

        # we need to decrypt tasks first in the ORM queue to get their real task_id. Hence why this python-side task_id retrieval and deletion.
        [taskitem.delete() for taskitem in OrmQ.objects.all() if taskitem.task()["id"] == task_id]

        Task.objects.filter(pk=task_id).delete()

        return Response({})
Пример #16
0
def count_group(group_id, failures=False, cached=Conf.CACHED):
    """
    Count the results in a group.

    :param str group_id: the group id
    :param bool failures: Returns failure count if True
    :param bool cached: run this against the cache backend
    :return: the number of tasks/results in a group
    :rtype: int
    """
    if cached:
        return count_group_cached(group_id, failures)
    return Task.get_group_count(group_id, failures)
Пример #17
0
def delete_group(group_id, tasks=False, cached=Conf.CACHED):
    """
    Delete a group.

    :param str group_id: the group id
    :param bool tasks: If set to True this will also delete the group tasks.
    Otherwise just the group label is removed.
    :param bool cached: run this against the cache backend
    :return:
    """
    if cached:
        return delete_group_cached(group_id)
    return Task.delete_group(group_id, tasks)
Пример #18
0
def delete_group(group_id, tasks=False, cached=Conf.CACHED):
    """
    Delete a group.

    :param str group_id: the group id
    :param bool tasks: If set to True this will also delete the group tasks.
    Otherwise just the group label is removed.
    :param bool cached: run this against the cache backend
    :return:
    """
    if cached:
        return delete_group_cached(group_id)
    return Task.delete_group(group_id, tasks)
Пример #19
0
def count_group(group_id, failures=False, cached=Conf.CACHED):
    """
    Count the results in a group.

    :param str group_id: the group id
    :param bool failures: Returns failure count if True
    :param bool cached: run this against the cache backend
    :return: the number of tasks/results in a group
    :rtype: int
    """
    if cached:
        return count_group_cached(group_id, failures)
    return Task.get_group_count(group_id, failures)
Пример #20
0
    def cleartask(self, request):
        '''
        Clears a task with its task id given in the task_id parameter.
        '''

        if 'task_id' not in request.data:
            raise serializers.ValidationError(
                "The 'task_id' field is required.")

        task_id = request.data['task_id']

        # Attempt to kill running task.
        Task.get_task(task_id).kill_running_task()

        # we need to decrypt tasks first in the ORM queue to get their real task_id. Hence why this python-side task_id retrieval and deletion.
        [
            taskitem.delete() for taskitem in OrmQ.objects.all()
            if taskitem.task()["id"] == task_id
        ]

        Task.objects.filter(pk=task_id).delete()

        return Response({})
Пример #21
0
    def startlocalexport(self, request):
        '''
        Export a channel to a local drive, and copy content to the drive.

        '''
        TASKTYPE = "localexport"

        if "drive_id" not in request.data:
            raise serializers.ValidationError("The 'drive_id' field is required.")

        task_id = async(_localexport, request.data['drive_id'], group=TASKTYPE, progress_updates=True)

        # attempt to get the created Task, otherwise return pending status
        resp = _task_to_response(Task.get_task(task_id), task_type=TASKTYPE, task_id=task_id)

        return Response(resp)
Пример #22
0
    def startremoteimport(self, request):
        '''Download a channel's database from the main curation server, and then
        download its content.

        '''
        TASKTYPE = "remoteimport"

        if "channel_id" not in request.data:
            raise serializers.ValidationError("The 'channel_id' field is required.")

        task_id = async(_networkimport, request.data['channel_id'], group=TASKTYPE, progress_updates=True)

        # attempt to get the created Task, otherwise return pending status
        resp = _task_to_response(Task.get_task(task_id), task_type=TASKTYPE, task_id=task_id)

        return Response(resp)
Пример #23
0
    def startlocalimport(self, request):
        '''
        Import a channel from a local drive, and copy content to the local machine.

        '''
        # Importing django/running setup because Windows...
        TASKTYPE = "localimport"

        if "drive_id" not in request.data:
            raise serializers.ValidationError("The 'drive_id' field is required.")

        task_id = make_async_call(_localimport, request.data['drive_id'], group=TASKTYPE, progress_updates=True)

        # attempt to get the created Task, otherwise return pending status
        resp = _task_to_response(Task.get_task(task_id), task_type=TASKTYPE, task_id=task_id)

        return Response(resp)
Пример #24
0
def fetch_group_cached(group_id,
                       failures=True,
                       wait=0,
                       count=None,
                       broker=None):
    """
    Return a list of Tasks for a task group in the cache backend
    """
    if not broker:
        broker = get_broker()
    start = time()
    if count:
        while True:
            if (count_group_cached(group_id) == count
                    or wait and (time() - start) * 1000 >= wait >= 0):
                break
            sleep(0.01)
    while True:
        group_list = broker.cache.get(f"{broker.list_key}:{group_id}:keys")
        if group_list:
            task_list = []
            for task_key in group_list:
                task = SignedPackage.loads(broker.cache.get(task_key))
                if task["success"] or failures:
                    t = Task(
                        id=task["id"],
                        name=task["name"],
                        func=task["func"],
                        hook=task.get("hook"),
                        args=task["args"],
                        kwargs=task["kwargs"],
                        started=task["started"],
                        stopped=task["stopped"],
                        result=task["result"],
                        group=task.get("group"),
                        success=task["success"],
                    )
                    task_list.append(t)
            return task_list
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Пример #25
0
def result(task_id, wait=0, cached=Conf.CACHED):
    """
    Return the result of the named task.

    :type task_id: str or uuid
    :param task_id: the task name or uuid
    :type wait: int
    :param wait: number of milliseconds to wait for a result
    :param bool cached: run this against the cache backend
    :return: the result object of this task
    :rtype: object
    """
    if cached:
        return result_cached(task_id, wait)
    start = time()
    while True:
        r = Task.get_result(task_id)
        if r:
            return r
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Пример #26
0
def fetch(task_id, wait=0, cached=Conf.CACHED):
    """
    Return the processed task.

    :param task_id: the task name or uuid
    :type task_id: str or uuid
    :param wait: the number of milliseconds to wait for a result
    :type wait: int
    :param bool cached: run this against the cache backend
    :return: the full task object
    :rtype: Task
    """
    if cached:
        return fetch_cached(task_id, wait)
    start = time()
    while True:
        t = Task.get_task(task_id)
        if t:
            return t
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Пример #27
0
    def startlocalexport(self, request):
        '''
        Export a channel to a local drive, and copy content to the drive.

        '''
        TASKTYPE = "localexport"

        if "drive_id" not in request.data:
            raise serializers.ValidationError(
                "The 'drive_id' field is required.")

        task_id = async (_localexport,
                         request.data['drive_id'],
                         group=TASKTYPE,
                         progress_updates=True)

        # attempt to get the created Task, otherwise return pending status
        resp = _task_to_response(Task.get_task(task_id),
                                 task_type=TASKTYPE,
                                 task_id=task_id)

        return Response(resp)
Пример #28
0
def result(task_id, wait=0, cached=Conf.CACHED):
    """
    Return the result of the named task.

    :type task_id: str or uuid
    :param task_id: the task name or uuid
    :type wait: int
    :param wait: number of milliseconds to wait for a result
    :param bool cached: run this against the cache backend
    :return: the result object of this task
    :rtype: object
    """
    if cached:
        return result_cached(task_id, wait)
    start = time.time()
    while True:
        r = Task.get_result(task_id)
        if r:
            return r
        if (time.time() - start) * 1000 >= wait >= 0:
            break
        time.sleep(0.01)
Пример #29
0
def fetch(task_id, wait=0, cached=Conf.CACHED):
    """
    Return the processed task.

    :param task_id: the task name or uuid
    :type task_id: str or uuid
    :param wait: the number of milliseconds to wait for a result
    :type wait: int
    :param bool cached: run this against the cache backend
    :return: the full task object
    :rtype: Task
    """
    if cached:
        return fetch_cached(task_id, wait)
    start = time.time()
    while True:
        t = Task.get_task(task_id)
        if t:
            return t
        if (time.time() - start) * 1000 >= wait >= 0:
            break
        time.sleep(0.01)
Пример #30
0
    def startlocalimport(self, request):
        '''
        Import a channel from a local drive, and copy content to the local machine.

        '''
        # Importing django/running setup because Windows...
        TASKTYPE = "localimport"

        if "drive_id" not in request.data:
            raise serializers.ValidationError(
                "The 'drive_id' field is required.")

        task_id = make_async_call(_localimport,
                                  request.data['drive_id'],
                                  group=TASKTYPE,
                                  progress_updates=True)

        # attempt to get the created Task, otherwise return pending status
        resp = _task_to_response(Task.get_task(task_id),
                                 task_type=TASKTYPE,
                                 task_id=task_id)

        return Response(resp)
Пример #31
0
    def startremoteimport(self, request):
        '''Download a channel's database from the main curation server, and then
        download its content.

        '''
        TASKTYPE = "remoteimport"

        if "channel_id" not in request.data:
            raise serializers.ValidationError("The 'channel_id' field is required.")

        channel_id = request.data['channel_id']

        # ensure the requested channel_id can be found on the central server, otherwise error
        status = requests.head(get_content_database_file_url(channel_id)).status_code
        if status == 404:
            raise Http404(_("The requested channel does not exist on the content server."))

        task_id = async(_networkimport, channel_id, group=TASKTYPE, progress_updates=True)

        # attempt to get the created Task, otherwise return pending status
        resp = _task_to_response(Task.get_task(task_id), task_type=TASKTYPE, task_id=task_id)

        return Response(resp)
Пример #32
0
def fetch_group(group_id, failures=True, wait=0, count=None, cached=Conf.CACHED):
    """
    Return a list of Tasks for a task group.

    :param str group_id: the group id
    :param bool failures: set to False to exclude failures
    :param bool cached: run this against the cache backend
    :return: list of Tasks
    """
    if cached:
        return fetch_group_cached(group_id, failures, wait, count)
    start = time()
    if count:
        while True:
            if count_group(group_id) == count or wait and (time() - start) * 1000 >= wait >= 0:
                break
            sleep(0.01)
    while True:
        r = Task.get_task_group(group_id, failures)
        if r:
            return r
        if (time() - start) * 1000 >= wait >= 0:
            break
        sleep(0.01)
Пример #33
0
def test_enqueue(broker, admin_user):
    broker.list_key = "cluster_test:q"
    broker.delete_queue()
    a = async_task(
        "django_q.tests.tasks.count_letters",
        DEFAULT_WORDLIST,
        hook="django_q.tests.test_cluster.assert_result",
        broker=broker,
    )
    b = async_task(
        "django_q.tests.tasks.count_letters2",
        WordClass(),
        hook="django_q.tests.test_cluster.assert_result",
        broker=broker,
    )
    # unknown argument
    c = async_task(
        "django_q.tests.tasks.count_letters",
        DEFAULT_WORDLIST,
        "oneargumentoomany",
        hook="django_q.tests.test_cluster.assert_bad_result",
        broker=broker,
    )
    # unknown function
    d = async_task(
        "django_q.tests.tasks.does_not_exist",
        WordClass(),
        hook="django_q.tests.test_cluster.assert_bad_result",
        broker=broker,
    )
    # function without result
    e = async_task("django_q.tests.tasks.countdown", 100000, broker=broker)
    # function as instance
    f = async_task(multiply, 753, 2, hook=assert_result, broker=broker)
    # model as argument
    g = async_task("django_q.tests.tasks.get_task_name",
                   Task(name="John"),
                   broker=broker)
    # args,kwargs, group and broken hook
    h = async_task(
        "django_q.tests.tasks.word_multiply",
        2,
        word="django",
        hook="fail.me",
        broker=broker,
    )
    # args unpickle test
    j = async_task("django_q.tests.tasks.get_user_id",
                   admin_user,
                   broker=broker,
                   group="test_j")
    # q_options and save opt_out test
    k = async_task(
        "django_q.tests.tasks.get_user_id",
        admin_user,
        q_options={
            "broker": broker,
            "group": "test_k",
            "save": False,
            "timeout": 90
        },
    )
    # test unicode
    assert Task(name="Amalia").__str__() == "Amalia"
    # check if everything has a task id
    assert isinstance(a, str)
    assert isinstance(b, str)
    assert isinstance(c, str)
    assert isinstance(d, str)
    assert isinstance(e, str)
    assert isinstance(f, str)
    assert isinstance(g, str)
    assert isinstance(h, str)
    assert isinstance(j, str)
    assert isinstance(k, str)
    # run the cluster to execute the tasks
    task_count = 10
    assert broker.queue_size() == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push the tasks
    for _ in range(task_count):
        pusher(task_queue, stop_event, broker=broker)
    assert broker.queue_size() == 0
    assert task_queue.qsize() == task_count
    task_queue.put("STOP")
    # test wait timeout
    assert result(j, wait=10) is None
    assert fetch(j, wait=10) is None
    assert result_group("test_j", wait=10) is None
    assert result_group("test_j", count=2, wait=10) is None
    assert fetch_group("test_j", wait=10) is None
    assert fetch_group("test_j", count=2, wait=10) is None
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value("f", -1))
    assert result_queue.qsize() == task_count
    result_queue.put("STOP")
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # Check the results
    # task a
    result_a = fetch(a)
    assert result_a is not None
    assert result_a.success is True
    assert result(a) == 1506
    # task b
    result_b = fetch(b)
    assert result_b is not None
    assert result_b.success is True
    assert result(b) == 1506
    # task c
    result_c = fetch(c)
    assert result_c is not None
    assert result_c.success is False
    # task d
    result_d = fetch(d)
    assert result_d is not None
    assert result_d.success is False
    # task e
    result_e = fetch(e)
    assert result_e is not None
    assert result_e.success is True
    assert result(e) is None
    # task f
    result_f = fetch(f)
    assert result_f is not None
    assert result_f.success is True
    assert result(f) == 1506
    # task g
    result_g = fetch(g)
    assert result_g is not None
    assert result_g.success is True
    assert result(g) == "John"
    # task h
    result_h = fetch(h)
    assert result_h is not None
    assert result_h.success is True
    assert result(h) == 12
    # task j
    result_j = fetch(j)
    assert result_j is not None
    assert result_j.success is True
    assert result_j.result == result_j.args[0].id
    # check fetch, result by name
    assert fetch(result_j.name) == result_j
    assert result(result_j.name) == result_j.result
    # groups
    assert result_group("test_j")[0] == result_j.result
    assert result_j.group_result()[0] == result_j.result
    assert result_group("test_j", failures=True)[0] == result_j.result
    assert result_j.group_result(failures=True)[0] == result_j.result
    assert fetch_group("test_j")[0].id == [result_j][0].id
    assert fetch_group("test_j", failures=False)[0].id == [result_j][0].id
    assert count_group("test_j") == 1
    assert result_j.group_count() == 1
    assert count_group("test_j", failures=True) == 0
    assert result_j.group_count(failures=True) == 0
    assert delete_group("test_j") == 1
    assert result_j.group_delete() == 0
    deleted_group = delete_group("test_j", tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    deleted_group = result_j.group_delete(tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    # task k should not have been saved
    assert fetch(k) is None
    assert fetch(k, 100) is None
    assert result(k, 100) is None
    broker.delete_queue()
Пример #34
0
        # Import here to avoid circular imports.
        from django_q.tasks import async
        from django_q.models import Task
        data = json.loads(request.body.decode('utf-8'))

        if "id" not in data:
            raise serializers.ValidationError("The 'id' field is required.")

        channel_id = data['id']

        task_id = async(_importchannel, channel_id, group=TASKTYPE, progress_updates=True)

        # id status metadata

        # wait for the task instance to be saved first before continuing
        taskobj = Task.get_task(task_id)
        if taskobj:             # the task object has been saved!
            resp = _task_to_response(taskobj)
        else:                   # task object hasn't been saved yet, fake the response for now
            resp = {
                "type": TASKTYPE,
                "status": "PENDING",
                "percentage": 0,
                "metadata": {},
                "id": task_id,
            }

        return Response(resp)

    @list_route(methods=['post'])
    def startlocalimportchannel(self, request):
Пример #35
0
    def retrieve(self, request, pk=None):
        from django_q.models import Task

        task = _task_to_response(Task.get_task(pk))
        return Response(task)
Пример #36
0
 def retrieve(self, request, pk=None):
     task = _task_to_response(Task.get_task(pk))
     return Response(task)
Пример #37
0
def test_async(broker, admin_user):
    broker.list_key = 'cluster_test:q'
    broker.delete_queue()
    a = async ('django_q.tests.tasks.count_letters',
               DEFAULT_WORDLIST,
               hook='django_q.tests.test_cluster.assert_result',
               broker=broker)
    b = async ('django_q.tests.tasks.count_letters2',
               WordClass(),
               hook='django_q.tests.test_cluster.assert_result',
               broker=broker)
    # unknown argument
    c = async ('django_q.tests.tasks.count_letters',
               DEFAULT_WORDLIST,
               'oneargumentoomany',
               hook='django_q.tests.test_cluster.assert_bad_result',
               broker=broker)
    # unknown function
    d = async ('django_q.tests.tasks.does_not_exist',
               WordClass(),
               hook='django_q.tests.test_cluster.assert_bad_result',
               broker=broker)
    # function without result
    e = async ('django_q.tests.tasks.countdown', 100000, broker=broker)
    # function as instance
    f = async (multiply, 753, 2, hook=assert_result, broker=broker)
    # model as argument
    g = async ('django_q.tests.tasks.get_task_name',
               Task(name='John'),
               broker=broker)
    # args,kwargs, group and broken hook
    h = async ('django_q.tests.tasks.word_multiply',
               2,
               word='django',
               hook='fail.me',
               broker=broker)
    # args unpickle test
    j = async ('django_q.tests.tasks.get_user_id',
               admin_user,
               broker=broker,
               group='test_j')
    # q_options and save opt_out test
    k = async ('django_q.tests.tasks.get_user_id',
               admin_user,
               q_options={
                   'broker': broker,
                   'group': 'test_k',
                   'save': False,
                   'timeout': 90
               })
    # check if everything has a task id
    assert isinstance(a, str)
    assert isinstance(b, str)
    assert isinstance(c, str)
    assert isinstance(d, str)
    assert isinstance(e, str)
    assert isinstance(f, str)
    assert isinstance(g, str)
    assert isinstance(h, str)
    assert isinstance(j, str)
    assert isinstance(k, str)
    # run the cluster to execute the tasks
    task_count = 10
    assert broker.queue_size() == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push the tasks
    for i in range(task_count):
        pusher(task_queue, stop_event, broker=broker)
    assert broker.queue_size() == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    # test wait timeout
    assert result(j, wait=10) is None
    assert fetch(j, wait=10) is None
    assert result_group('test_j', wait=10) is None
    assert result_group('test_j', count=2, wait=10) is None
    assert fetch_group('test_j', wait=10) is None
    assert fetch_group('test_j', count=2, wait=10) is None
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('f', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # Check the results
    # task a
    result_a = fetch(a)
    assert result_a is not None
    assert result_a.success is True
    assert result(a) == 1506
    # task b
    result_b = fetch(b)
    assert result_b is not None
    assert result_b.success is True
    assert result(b) == 1506
    # task c
    result_c = fetch(c)
    assert result_c is not None
    assert result_c.success is False
    # task d
    result_d = fetch(d)
    assert result_d is not None
    assert result_d.success is False
    # task e
    result_e = fetch(e)
    assert result_e is not None
    assert result_e.success is True
    assert result(e) is None
    # task f
    result_f = fetch(f)
    assert result_f is not None
    assert result_f.success is True
    assert result(f) == 1506
    # task g
    result_g = fetch(g)
    assert result_g is not None
    assert result_g.success is True
    assert result(g) == 'John'
    # task h
    result_h = fetch(h)
    assert result_h is not None
    assert result_h.success is True
    assert result(h) == 12
    # task j
    result_j = fetch(j)
    assert result_j is not None
    assert result_j.success is True
    assert result_j.result == result_j.args[0].id
    # check fetch, result by name
    assert fetch(result_j.name) == result_j
    assert result(result_j.name) == result_j.result
    # groups
    assert result_group('test_j')[0] == result_j.result
    assert result_j.group_result()[0] == result_j.result
    assert result_group('test_j', failures=True)[0] == result_j.result
    assert result_j.group_result(failures=True)[0] == result_j.result
    assert fetch_group('test_j')[0].id == [result_j][0].id
    assert fetch_group('test_j', failures=False)[0].id == [result_j][0].id
    assert count_group('test_j') == 1
    assert result_j.group_count() == 1
    assert count_group('test_j', failures=True) == 0
    assert result_j.group_count(failures=True) == 0
    assert delete_group('test_j') == 1
    assert result_j.group_delete() == 0
    deleted_group = delete_group('test_j', tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    deleted_group = result_j.group_delete(tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    # task k should not have been saved
    assert fetch(k) is None
    assert fetch(k, 100) is None
    assert result(k, 100) is None
    broker.delete_queue()
Пример #38
0
        # Import here to avoid circular imports.
        from django_q.tasks import async
        from django_q.models import Task
        data = json.loads(request.body.decode('utf-8'))

        if "id" not in data:
            raise serializers.ValidationError("The 'id' field is required.")

        channel_id = data['id']

        task_id = async(_importchannel, channel_id, group=TASKTYPE, progress_updates=True)

        # id status metadata

        # wait for the task instance to be saved first before continuing
        taskobj = Task.get_task(task_id)
        if taskobj:             # the task object has been saved!
            resp = _task_to_response(taskobj)
        else:                   # task object hasn't been saved yet, fake the response for now
            resp = {
                "type": TASKTYPE,
                "status": "PENDING",
                "percentage": 0,
                "metadata": {},
                "id": task_id,
            }

        return Response(resp)

    @list_route(methods=['post'])
    def startlocalimportchannel(self, request):
Пример #39
0
 def retrieve(self, request, pk=None):
     task = _task_to_response(Task.get_task(pk))
     return Response(task)
Пример #40
0
    def retrieve(self, request, pk=None):
        from django_q.models import Task

        task = _task_to_response(Task.get_task(pk))
        return Response(task)
Пример #41
0
from django.contrib.auth.models import Group
from django.contrib.postgres.fields import JSONField
from django.db.models.signals import post_save
from django.dispatch import receiver
from home.choices import *
from werkzeug import secure_filename
from django_q.models import Task


def get_name(self):
    return "%s %s" % (self.first_name, self.last_name)


DjangoUser.add_to_class("__str__", get_name)

Task.add_to_class("minutes", "minutes")


def upload_to(instance, filename):
    return "/".join([
        secure_filename(type(instance).__name__),
        strftime("%Y/%m/%d"),
        instance.id or "0",
        token_urlsafe(8),
        secure_filename(filename),
    ])


class UserProfile(mdls.Model):
    user = mdls.OneToOneField(DjangoUser, on_delete=mdls.CASCADE)
    change_pwd = mdls.BooleanField(default=False)
Пример #42
0
def test_async(r):
    list_key = 'cluster_test:q'
    r.delete(list_key)
    a = async ('django_q.tests.tasks.count_letters',
               DEFAULT_WORDLIST,
               hook='django_q.tests.test_cluster.assert_result',
               list_key=list_key)
    b = async ('django_q.tests.tasks.count_letters2',
               WordClass(),
               hook='django_q.tests.test_cluster.assert_result',
               list_key=list_key)
    # unknown argument
    c = async ('django_q.tests.tasks.count_letters',
               DEFAULT_WORDLIST,
               'oneargumentoomany',
               hook='django_q.tests.test_cluster.assert_bad_result',
               list_key=list_key)
    # unknown function
    d = async ('django_q.tests.tasks.does_not_exist',
               WordClass(),
               hook='django_q.tests.test_cluster.assert_bad_result',
               list_key=list_key)
    # function without result
    e = async ('django_q.tests.tasks.countdown', 100000, list_key=list_key)
    # function as instance
    f = async (multiply, 753, 2, hook=assert_result, list_key=list_key)
    # model as argument
    g = async ('django_q.tests.tasks.get_task_name',
               Task(name='John'),
               list_key=list_key)
    # args and kwargs and broken hook
    h = async ('django_q.tests.tasks.word_multiply',
               2,
               word='django',
               hook='fail.me',
               list_key=list_key,
               redis=r)
    # check if everything has a task name
    assert isinstance(a, str)
    assert isinstance(b, str)
    assert isinstance(c, str)
    assert isinstance(d, str)
    assert isinstance(e, str)
    assert isinstance(f, str)
    assert isinstance(g, str)
    assert isinstance(h, str)
    # run the cluster to execute the tasks
    task_count = 8
    assert r.llen(list_key) == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push the tasks
    for i in range(task_count):
        pusher(task_queue, stop_event, list_key=list_key, r=r)
    assert r.llen(list_key) == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('b', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # Check the results
    # task a
    result_a = fetch(a)
    assert result_a is not None
    assert result_a.success is True
    assert result(a) == 1506
    # task b
    result_b = fetch(b)
    assert result_b is not None
    assert result_b.success is True
    assert result(b) == 1506
    # task c
    result_c = fetch(c)
    assert result_c is not None
    assert result_c.success is False
    # task d
    result_d = fetch(d)
    assert result_d is not None
    assert result_d.success is False
    # task e
    result_e = fetch(e)
    assert result_e is not None
    assert result_e.success is True
    assert result(e) is None
    # task f
    result_f = fetch(f)
    assert result_f is not None
    assert result_f.success is True
    assert result(f) == 1506
    # task g
    result_g = fetch(g)
    assert result_g is not None
    assert result_g.success is True
    assert result(g) == 'John'
    # task h
    result_h = fetch(h)
    assert result_h is not None
    assert result_h.success is True
    assert result(h) == 12
    r.delete(list_key)