示例#1
0
def test_recycle(broker, monkeypatch):
    # set up the Sentinel
    broker.list_key = 'test_recycle_test:q'
    async('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    async('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    async('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    start_event = Event()
    stop_event = Event()
    # override settings
    monkeypatch.setattr(Conf, 'RECYCLE', 2)
    monkeypatch.setattr(Conf, 'WORKERS', 1)
    # set a timer to stop the Sentinel
    threading.Timer(3, stop_event.set).start()
    s = Sentinel(stop_event, start_event, broker=broker)
    assert start_event.is_set()
    assert s.status() == Conf.STOPPED
    assert s.reincarnations == 1
    async('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    async('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    task_queue = Queue()
    result_queue = Queue()
    # push two tasks
    pusher(task_queue, stop_event, broker=broker)
    pusher(task_queue, stop_event, broker=broker)
    # worker should exit on recycle
    worker(task_queue, result_queue, Value('f', -1))
    # check if the work has been done
    assert result_queue.qsize() == 2
    # save_limit test
    monkeypatch.setattr(Conf, 'SAVE_LIMIT', 1)
    result_queue.put('STOP')
    # run monitor
    monitor(result_queue)
    assert Success.objects.count() == Conf.SAVE_LIMIT
    broker.delete_queue()
示例#2
0
def test_bad_secret(broker, monkeypatch):
    broker.list_key = "test_bad_secret:q"
    async_task("math.copysign", 1, -1, broker=broker)
    stop_event = Event()
    stop_event.set()
    start_event = Event()
    cluster_id = uuidlib.uuid4()
    s = Sentinel(stop_event,
                 start_event,
                 cluster_id=cluster_id,
                 broker=broker,
                 start=False)
    Stat(s).save()
    # change the SECRET
    monkeypatch.setattr(Conf, "SECRET_KEY", "OOPS")
    stat = Stat.get_all()
    assert len(stat) == 0
    assert Stat.get(pid=s.parent_pid, cluster_id=cluster_id) is None
    task_queue = Queue()
    pusher(task_queue, stop_event, broker=broker)
    result_queue = Queue()
    task_queue.put("STOP")
    worker(
        task_queue,
        result_queue,
        Value("f", -1),
    )
    assert result_queue.qsize() == 0
    broker.delete_queue()
示例#3
0
def test_cluster(r):
    list_key = 'cluster_test:q'
    r.delete(list_key)
    task = async ('django_q.tests.tasks.count_letters',
                  DEFAULT_WORDLIST,
                  list_key=list_key)
    assert r.llen(list_key) == 1
    task_queue = Queue()
    assert task_queue.qsize() == 0
    result_queue = Queue()
    assert result_queue.qsize() == 0
    event = Event()
    event.set()
    # Test push
    pusher(task_queue, event, list_key=list_key, r=r)
    assert task_queue.qsize() == 1
    assert r.llen(list_key) == 0
    # Test work
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('b', -1))
    assert task_queue.qsize() == 0
    assert result_queue.qsize() == 1
    # Test monitor
    result_queue.put('STOP')
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # check result
    assert result(task) == 1506
    r.delete(list_key)
示例#4
0
    def test_post_execute_signal(self, broker):
        broker.list_key = "post_execute_test:q"
        broker.delete_queue()
        self.signal_was_called: bool = False
        self.task: Optional[dict] = None
        self.func = None

        def handler(sender, task, **kwargs):
            self.signal_was_called = True
            self.task = task

        post_execute.connect(handler)
        task_id = async_task("math.copysign", 1, -1, broker=broker)
        task_queue = Queue()
        result_queue = Queue()
        event = Event()
        event.set()
        pusher(task_queue, event, broker=broker)
        task_queue.put("STOP")
        worker(task_queue, result_queue, Value("f", -1))
        result_queue.put("STOP")
        monitor(result_queue, broker)
        broker.delete_queue()
        assert self.signal_was_called is True
        assert self.task.get("id") == task_id
        assert self.task.get("result") == -1
        post_execute.disconnect(handler)
示例#5
0
def test_max_rss(broker, monkeypatch):
    # set up the Sentinel
    broker.list_key = "test_max_rss_test:q"
    async_task("django_q.tests.tasks.multiply", 2, 2, broker=broker)
    start_event = Event()
    stop_event = Event()
    cluster_id = uuidlib.uuid4()
    # override settings
    monkeypatch.setattr(Conf, "MAX_RSS", 40000)
    monkeypatch.setattr(Conf, "WORKERS", 1)
    # set a timer to stop the Sentinel
    threading.Timer(3, stop_event.set).start()
    s = Sentinel(stop_event, start_event, cluster_id=cluster_id, broker=broker)
    assert start_event.is_set()
    assert s.status() == Conf.STOPPED
    assert s.reincarnations == 1
    async_task("django_q.tests.tasks.multiply", 2, 2, broker=broker)
    task_queue = Queue()
    result_queue = Queue()
    # push the task
    pusher(task_queue, stop_event, broker=broker)
    # worker should exit on recycle
    worker(task_queue, result_queue, Value("f", -1))
    # check if the work has been done
    assert result_queue.qsize() == 1
    # save_limit test
    monkeypatch.setattr(Conf, "SAVE_LIMIT", 1)
    result_queue.put("STOP")
    # run monitor
    monitor(result_queue)
    assert Success.objects.count() == Conf.SAVE_LIMIT
    broker.delete_queue()
示例#6
0
def test_cluster(r):
    list_key = 'cluster_test:q'
    r.delete(list_key)
    task = async('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, list_key=list_key)
    assert queue_size(list_key=list_key, r=r) == 1
    task_queue = Queue()
    assert task_queue.qsize() == 0
    result_queue = Queue()
    assert result_queue.qsize() == 0
    event = Event()
    event.set()
    # Test push
    pusher(task_queue, event, list_key=list_key, r=r)
    assert task_queue.qsize() == 1
    assert queue_size(list_key=list_key, r=r) == 0
    # Test work
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1))
    assert task_queue.qsize() == 0
    assert result_queue.qsize() == 1
    # Test monitor
    result_queue.put('STOP')
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # check result
    assert result(task) == 1506
    r.delete(list_key)
示例#7
0
def test_simple_async_report_send(rf, admin_user):
    broker = get_broker()
    assert broker.queue_size() == 0

    request = rf.get("/")
    request.query_params = {}
    request.user = admin_user

    report = LeaseStatisticReport()
    response = report.get_response(request)
    assert response.data
    assert broker.queue_size() == 1

    # Run async task
    task_queue = Queue()
    result_queue = Queue()
    event = Event()
    event.set()
    pusher(task_queue, event, broker=broker)
    assert task_queue.qsize() == 1
    assert queue_size(broker=broker) == 0
    task_queue.put("STOP")
    worker(task_queue, result_queue, Value("f", -1))
    assert task_queue.qsize() == 0
    assert result_queue.qsize() == 1
    result_queue.put("STOP")
    monitor(result_queue)
    assert result_queue.qsize() == 0
    broker.delete_queue()

    # Test report file have been sent via email
    assert len(mail.outbox) == 1
    assert len(mail.outbox[0].attachments) == 1
示例#8
0
def test_cached(broker):
    broker.purge_queue()
    broker.cache.clear()
    group = 'cache_test'
    # queue the tests
    task = async_task('math.copysign', 1, -1, cached=True, broker=broker)
    task_id = task['id']
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.popysign', 1, -1, cached=True, broker=broker, group=group)
    iter_id = async_iter('math.floor', [i for i in range(10)], cached=True)
    # test wait on cache
    # test wait timeout
    assert result(task_id, wait=10, cached=True) is None
    assert fetch(task_id, wait=10, cached=True) is None
    assert result_group(group, wait=10, cached=True) is None
    assert result_group(group, count=2, wait=10, cached=True) is None
    assert fetch_group(group, wait=10, cached=True) is None
    assert fetch_group(group, count=2, wait=10, cached=True) is None
    # run a single inline cluster
    task_count = 17
    assert broker.queue_size() == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    for i in range(task_count):
        pusher(task_queue, stop_event, broker=broker)
    assert broker.queue_size() == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    result_queue = Queue()
    worker(task_queue, result_queue, Value('f', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # assert results
    assert result(task_id, wait=500, cached=True) == -1
    assert fetch(task_id, wait=500, cached=True).result == -1
    # make sure it's not in the db backend
    assert fetch(task_id) is None
    # assert group
    assert count_group(group, cached=True) == 6
    assert count_group(group, cached=True, failures=True) == 1
    assert result_group(group, cached=True) == [-1, -1, -1, -1, -1]
    assert len(result_group(group, cached=True, failures=True)) == 6
    assert len(fetch_group(group, cached=True)) == 6
    assert len(fetch_group(group, cached=True, failures=False)) == 5
    delete_group(group, cached=True)
    assert count_group(group, cached=True) is None
    delete_cached(task_id)
    assert result(task_id, cached=True) is None
    assert fetch(task_id, cached=True) is None
    # iter cached
    assert result(iter_id) is None
    assert result(iter_id, cached=True) is not None
    broker.cache.clear()
示例#9
0
def test_recycle(r):
    # set up the Sentinel
    list_key = 'test_recycle_test:q'
    async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    start_event = Event()
    stop_event = Event()
    # override settings
    Conf.RECYCLE = 2
    Conf.WORKERS = 1
    # set a timer to stop the Sentinel
    threading.Timer(3, stop_event.set).start()
    s = Sentinel(stop_event, start_event, list_key=list_key)
    assert start_event.is_set()
    assert s.status() == Conf.STOPPED
    assert s.reincarnations == 1
    async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    task_queue = Queue()
    result_queue = Queue()
    # push two tasks
    pusher(task_queue, stop_event, list_key=list_key, r=r)
    pusher(task_queue, stop_event, list_key=list_key, r=r)
    # worker should exit on recycle
    worker(task_queue, result_queue, Value('f', -1))
    # check if the work has been done
    assert result_queue.qsize() == 2
    # save_limit test
    Conf.SAVE_LIMIT = 1
    result_queue.put('STOP')
    # run monitor
    monitor(result_queue)
    assert Success.objects.count() == Conf.SAVE_LIMIT
    r.delete(list_key)
示例#10
0
def test_recycle(broker, monkeypatch):
    # set up the Sentinel
    broker.list_key = 'test_recycle_test:q'
    async ('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    async ('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    async ('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    start_event = Event()
    stop_event = Event()
    # override settings
    monkeypatch.setattr(Conf, 'RECYCLE', 2)
    monkeypatch.setattr(Conf, 'WORKERS', 1)
    # set a timer to stop the Sentinel
    threading.Timer(3, stop_event.set).start()
    s = Sentinel(stop_event, start_event, broker=broker)
    assert start_event.is_set()
    assert s.status() == Conf.STOPPED
    assert s.reincarnations == 1
    async ('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    async ('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    task_queue = Queue()
    result_queue = Queue()
    # push two tasks
    pusher(task_queue, stop_event, broker=broker)
    pusher(task_queue, stop_event, broker=broker)
    # worker should exit on recycle
    worker(task_queue, result_queue, Value('f', -1))
    # check if the work has been done
    assert result_queue.qsize() == 2
    # save_limit test
    monkeypatch.setattr(Conf, 'SAVE_LIMIT', 1)
    result_queue.put('STOP')
    # run monitor
    monitor(result_queue)
    assert Success.objects.count() == Conf.SAVE_LIMIT
    broker.delete_queue()
示例#11
0
def test_cluster(broker):
    broker.list_key = 'cluster_test:q'
    broker.delete_queue()
    task = async ('django_q.tests.tasks.count_letters',
                  DEFAULT_WORDLIST,
                  broker=broker)
    assert broker.queue_size() == 1
    task_queue = Queue()
    assert task_queue.qsize() == 0
    result_queue = Queue()
    assert result_queue.qsize() == 0
    event = Event()
    event.set()
    # Test push
    pusher(task_queue, event, broker=broker)
    assert task_queue.qsize() == 1
    assert queue_size(broker=broker) == 0
    # Test work
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1))
    assert task_queue.qsize() == 0
    assert result_queue.qsize() == 1
    # Test monitor
    result_queue.put('STOP')
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # check result
    assert result(task) == 1506
    broker.delete_queue()
示例#12
0
def test_recycle(r):
    # set up the Sentinel
    list_key = 'test_recycle_test:q'
    async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    start_event = Event()
    stop_event = Event()
    # override settings
    Conf.RECYCLE = 2
    Conf.WORKERS = 1
    # set a timer to stop the Sentinel
    threading.Timer(3, stop_event.set).start()
    s = Sentinel(stop_event, start_event, list_key=list_key)
    assert start_event.is_set()
    assert s.status() == Conf.STOPPED
    assert s.reincarnations == 1
    async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r)
    task_queue = Queue()
    result_queue = Queue()
    # push two tasks
    pusher(task_queue, stop_event, list_key=list_key, r=r)
    pusher(task_queue, stop_event, list_key=list_key, r=r)
    # worker should exit on recycle
    worker(task_queue, result_queue, Value('f', -1))
    # check if the work has been done
    assert result_queue.qsize() == 2
    # save_limit test
    Conf.SAVE_LIMIT = 1
    result_queue.put('STOP')
    # run monitor
    monitor(result_queue)
    assert Success.objects.count() == Conf.SAVE_LIMIT
    r.delete(list_key)
示例#13
0
def test_cached(broker):
    broker.purge_queue()
    broker.cache.clear()
    group = 'cache_test'
    # queue the tests
    task_id = async('math.copysign', 1, -1, cached=True, broker=broker)
    async('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async('math.popysign', 1, -1, cached=True, broker=broker, group=group)
    iter_id = async_iter('math.floor', [i for i in range(10)], cached=True)
    # test wait on cache
    # test wait timeout
    assert result(task_id, wait=10, cached=True) is None
    assert fetch(task_id, wait=10, cached=True) is None
    assert result_group(group, wait=10, cached=True) is None
    assert result_group(group, count=2, wait=10, cached=True) is None
    assert fetch_group(group, wait=10, cached=True) is None
    assert fetch_group(group, count=2, wait=10, cached=True) is None
    # run a single inline cluster
    task_count = 17
    assert broker.queue_size() == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    for i in range(task_count):
        pusher(task_queue, stop_event, broker=broker)
    assert broker.queue_size() == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    result_queue = Queue()
    worker(task_queue, result_queue, Value('f', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # assert results
    assert result(task_id, wait=500, cached=True) == -1
    assert fetch(task_id, wait=500, cached=True).result == -1
    # make sure it's not in the db backend
    assert fetch(task_id) is None
    # assert group
    assert count_group(group, cached=True) == 6
    assert count_group(group, cached=True, failures=True) == 1
    assert result_group(group, cached=True) == [-1, -1, -1, -1, -1]
    assert len(result_group(group, cached=True, failures=True)) == 6
    assert len(fetch_group(group, cached=True)) == 6
    assert len(fetch_group(group, cached=True, failures=False)) == 5
    delete_group(group, cached=True)
    assert count_group(group, cached=True) is None
    delete_cached(task_id)
    assert result(task_id, cached=True) is None
    assert fetch(task_id, cached=True) is None
    # iter cached
    assert result(iter_id) is None
    assert result(iter_id, cached=True) is not None
    broker.cache.clear()
示例#14
0
def _sync(pack):
    # Python 2.6 is unable to handle this import on top of the file
    # because it creates a circular dependency between tasks and cluster
    from django_q.cluster import worker, monitor
    """Simulate a package travelling through the cluster."""
    task_queue = Queue()
    result_queue = Queue()
    task = SignedPackage.loads(pack)
    task_queue.put(task)
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1))
    result_queue.put('STOP')
    monitor(result_queue)
    return task['id']
示例#15
0
def _sync(pack):
    """Simulate a package travelling through the cluster."""
    task_queue = Queue()
    result_queue = Queue()
    task = SignedPackage.loads(pack)
    task_queue.put(task)
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1))
    result_queue.put('STOP')
    monitor(result_queue)
    task_queue.close()
    task_queue.join_thread()
    result_queue.close()
    result_queue.join_thread()
    return task['id']
示例#16
0
文件: tasks.py 项目: Koed00/django-q
def _sync(pack):
    """Simulate a package travelling through the cluster."""
    task_queue = Queue()
    result_queue = Queue()
    task = SignedPackage.loads(pack)
    task_queue.put(task)
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1))
    result_queue.put('STOP')
    monitor(result_queue)
    task_queue.close()
    task_queue.join_thread()
    result_queue.close()
    result_queue.join_thread()
    return task['id']
示例#17
0
def test_scheduler(r):
    list_key = 'scheduler_test:q'
    r.delete(list_key)
    schedule = create_schedule('math.copysign',
                               1, -1,
                               hook='django_q.tests.tasks.result',
                               schedule_type=Schedule.HOURLY,
                               repeats=1)
    assert schedule.last_run() is None
    # run scheduler
    scheduler(list_key=list_key)
    # set up the workflow
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push it
    pusher(task_queue, stop_event, list_key=list_key, r=r)
    assert task_queue.qsize() == 1
    assert r.llen(list_key) == 0
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('b', -1))
    assert result_queue.qsize() == 1
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.repeats == 0
    assert schedule.last_run() is not None
    assert schedule.success() is True
    task = fetch(schedule.task)
    assert task is not None
    assert task.success is True
    assert task.result < 0
    for t in Schedule.TYPE:
        schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                   2,
                                   word='django',
                                   schedule_type=t[0],
                                   repeats=1,
                                   hook='django_q.tests.tasks.result'
                                   )
        assert schedule is not None
        assert schedule.last_run() is None
    scheduler()
示例#18
0
def test_scheduler(r):
    list_key = 'scheduler_test:q'
    r.delete(list_key)
    schedule = create_schedule('math.copysign',
                               1,
                               -1,
                               hook='django_q.tests.tasks.result',
                               schedule_type=Schedule.HOURLY,
                               repeats=1)
    assert schedule.last_run() is None
    # run scheduler
    scheduler(list_key=list_key)
    # set up the workflow
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push it
    pusher(task_queue, stop_event, list_key=list_key, r=r)
    assert task_queue.qsize() == 1
    assert r.llen(list_key) == 0
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('b', -1))
    assert result_queue.qsize() == 1
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    schedule.refresh_from_db()
    assert schedule.repeats == 0
    assert schedule.last_run() is not None
    assert schedule.success() is True
    task = fetch(schedule.task)
    assert task is not None
    assert task.success is True
    assert task.result < 0
    for t in Schedule.TYPE:
        schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                   2,
                                   word='django',
                                   schedule_type=t[0],
                                   repeats=1,
                                   hook='django_q.tests.tasks.result')
        assert schedule is not None
        assert schedule.last_run() is None
示例#19
0
def _sync(pack):
    """Simulate a package travelling through the cluster."""
    from django_q.cluster import monitor, worker

    task_queue = Queue()
    result_queue = Queue()
    task = SignedPackage.loads(pack)
    task_queue.put(task)
    task_queue.put("STOP")
    worker(task_queue, result_queue, Value("f", -1))
    result_queue.put("STOP")
    monitor(result_queue)
    task_queue.close()
    task_queue.join_thread()
    result_queue.close()
    result_queue.join_thread()
    return task["id"]
示例#20
0
def test_bad_secret(r, monkeypatch):
    list_key = 'test_bad_secret'
    async('math.copysign', 1, -1, list_key=list_key)
    stop_event = Event()
    stop_event.set()
    start_event = Event()
    s = Sentinel(stop_event, start_event, list_key=list_key, start=False)
    Stat(s).save()
    # change the SECRET
    monkeypatch.setattr(Conf, "SECRET_KEY", "OOPS")
    stat = Stat.get_all(r)
    assert len(stat) == 0
    assert Stat.get(s.parent_pid, r) is None
    task_queue = Queue()
    pusher(task_queue, stop_event, list_key=list_key, r=r)
    result_queue = Queue()
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1), )
    assert result_queue.qsize() == 0
    r.delete(list_key)
示例#21
0
def test_bad_secret(broker, monkeypatch):
    broker.list_key = 'test_bad_secret:q'
    async('math.copysign', 1, -1, broker=broker)
    stop_event = Event()
    stop_event.set()
    start_event = Event()
    s = Sentinel(stop_event, start_event, broker=broker, start=False)
    Stat(s).save()
    # change the SECRET
    monkeypatch.setattr(Conf, "SECRET_KEY", "OOPS")
    stat = Stat.get_all()
    assert len(stat) == 0
    assert Stat.get(s.parent_pid) is None
    task_queue = Queue()
    pusher(task_queue, stop_event, broker=broker)
    result_queue = Queue()
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1), )
    assert result_queue.qsize() == 0
    broker.delete_queue()
示例#22
0
def test_scheduler(broker):
    broker.list_key = 'scheduler_test:q'
    broker.delete_queue()
    schedule = create_schedule('math.copysign',
                               1, -1,
                               name='test math',
                               hook='django_q.tests.tasks.result',
                               schedule_type=Schedule.HOURLY,
                               repeats=1)
    assert schedule.last_run() is None
    # run scheduler
    scheduler(broker=broker)
    # set up the workflow
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push it
    pusher(task_queue, stop_event, broker=broker)
    assert task_queue.qsize() == 1
    assert broker.queue_size() == 0
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('b', -1))
    assert result_queue.qsize() == 1
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.repeats == 0
    assert schedule.last_run() is not None
    assert schedule.success() is True
    assert schedule.next_run < arrow.get(timezone.now()).replace(hours=+1)
    task = fetch(schedule.task)
    assert task is not None
    assert task.success is True
    assert task.result < 0
    # Once schedule with delete
    once_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                    2,
                                    word='django',
                                    schedule_type=Schedule.ONCE,
                                    repeats=-1,
                                    hook='django_q.tests.tasks.result'
                                    )
    assert hasattr(once_schedule, 'pk') is True
    # negative repeats
    always_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                      2,
                                      word='django',
                                      schedule_type=Schedule.DAILY,
                                      repeats=-1,
                                      hook='django_q.tests.tasks.result'
                                      )
    assert hasattr(always_schedule, 'pk') is True
    # Minute schedule
    minute_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                      2,
                                      word='django',
                                      schedule_type=Schedule.MINUTES,
                                      minutes=10)
    assert hasattr(minute_schedule, 'pk') is True
    # All other types
    for t in Schedule.TYPE:
        schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                   2,
                                   word='django',
                                   schedule_type=t[0],
                                   repeats=1,
                                   hook='django_q.tests.tasks.result'
                                   )
        assert schedule is not None
        assert schedule.last_run() is None
        scheduler(broker=broker)
    # via model
    Schedule.objects.create(func='django_q.tests.tasks.word_multiply',
                            args='2',
                            kwargs='word="django"',
                            schedule_type=Schedule.DAILY
                            )
    # scheduler
    scheduler(broker=broker)
    # ONCE schedule should be deleted
    assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False
    # Catch up On
    Conf.CATCH_UP = True
    now = timezone.now()
    schedule = create_schedule('django_q.tests.tasks.word_multiply',
                               2,
                               word='catch_up',
                               schedule_type=Schedule.HOURLY,
                               next_run=timezone.now() - timedelta(hours=12),
                               repeats=-1
                               )
    scheduler(broker=broker)
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.next_run < now
    # Catch up off
    Conf.CATCH_UP = False
    scheduler(broker=broker)
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.next_run > now
    # Done
    broker.delete_queue()
示例#23
0
def test_scheduler(broker, monkeypatch):
    broker.list_key = "scheduler_test:q"
    broker.delete_queue()
    schedule = create_schedule(
        "math.copysign",
        1,
        -1,
        name="test math",
        hook="django_q.tests.tasks.result",
        schedule_type=Schedule.HOURLY,
        repeats=1,
    )
    assert schedule.last_run() is None
    # check duplicate constraint
    with pytest.raises(IntegrityError):
        schedule = create_schedule(
            "math.copysign",
            1,
            -1,
            name="test math",
            hook="django_q.tests.tasks.result",
            schedule_type=Schedule.HOURLY,
            repeats=1,
        )
    # run scheduler
    scheduler(broker=broker)
    # set up the workflow
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push it
    pusher(task_queue, stop_event, broker=broker)
    assert task_queue.qsize() == 1
    assert broker.queue_size() == 0
    task_queue.put("STOP")
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value("b", -1))
    assert result_queue.qsize() == 1
    result_queue.put("STOP")
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.repeats == 0
    assert schedule.last_run() is not None
    assert schedule.success() is True
    assert schedule.next_run < arrow.get(timezone.now()).shift(hours=+1)
    task = fetch(schedule.task)
    assert task is not None
    assert task.success is True
    assert task.result < 0
    # Once schedule with delete
    once_schedule = create_schedule(
        "django_q.tests.tasks.word_multiply",
        2,
        word="django",
        schedule_type=Schedule.ONCE,
        repeats=-1,
        hook="django_q.tests.tasks.result",
    )
    assert hasattr(once_schedule, "pk") is True
    # negative repeats
    always_schedule = create_schedule(
        "django_q.tests.tasks.word_multiply",
        2,
        word="django",
        schedule_type=Schedule.DAILY,
        repeats=-1,
        hook="django_q.tests.tasks.result",
    )
    assert hasattr(always_schedule, "pk") is True
    # Minute schedule
    minute_schedule = create_schedule(
        "django_q.tests.tasks.word_multiply",
        2,
        word="django",
        schedule_type=Schedule.MINUTES,
        minutes=10,
    )
    assert hasattr(minute_schedule, "pk") is True
    # Cron schedule
    cron_schedule = create_schedule(
        "django_q.tests.tasks.word_multiply",
        2,
        word="django",
        schedule_type=Schedule.CRON,
        cron="0 22 * * 1-5",
    )
    assert hasattr(cron_schedule, "pk") is True
    assert cron_schedule.full_clean() is None
    assert cron_schedule.__str__() == "django_q.tests.tasks.word_multiply"
    with pytest.raises(ValidationError):
        create_schedule(
            "django_q.tests.tasks.word_multiply",
            2,
            word="django",
            schedule_type=Schedule.CRON,
            cron="0 22 * * 1-12",
        )
    # All other types
    for t in Schedule.TYPE:
        if t[0] == Schedule.CRON:
            continue
        schedule = create_schedule(
            "django_q.tests.tasks.word_multiply",
            2,
            word="django",
            schedule_type=t[0],
            repeats=1,
            hook="django_q.tests.tasks.result",
        )
        assert schedule is not None
        assert schedule.last_run() is None
        scheduler(broker=broker)
    # via model
    Schedule.objects.create(
        func="django_q.tests.tasks.word_multiply",
        args="2",
        kwargs='word="django"',
        schedule_type=Schedule.DAILY,
    )
    # scheduler
    scheduler(broker=broker)
    # ONCE schedule should be deleted
    assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False
    # Catch up On
    monkeypatch.setattr(Conf, "CATCH_UP", True)
    now = timezone.now()
    schedule = create_schedule(
        "django_q.tests.tasks.word_multiply",
        2,
        word="catch_up",
        schedule_type=Schedule.HOURLY,
        next_run=timezone.now() - timedelta(hours=12),
        repeats=-1,
    )
    scheduler(broker=broker)
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.next_run < now
    # Catch up off
    monkeypatch.setattr(Conf, "CATCH_UP", False)
    scheduler(broker=broker)
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.next_run > now
    # Done
    broker.delete_queue()

    monkeypatch.setattr(Conf, "PREFIX", "some_cluster_name")
    # create a schedule on another cluster
    schedule = create_schedule(
        "math.copysign",
        1,
        -1,
        name="test schedule on a another cluster",
        hook="django_q.tests.tasks.result",
        schedule_type=Schedule.HOURLY,
        cluster="some_other_cluster_name",
        repeats=1,
    )
    # run scheduler
    scheduler(broker=broker)
    # set up the workflow
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push it
    pusher(task_queue, stop_event, broker=broker)

    # queue must be empty
    assert task_queue.qsize() == 0

    monkeypatch.setattr(Conf, "PREFIX", "default")
    # create a schedule on the same cluster
    schedule = create_schedule(
        "math.copysign",
        1,
        -1,
        name="test schedule with no cluster",
        hook="django_q.tests.tasks.result",
        schedule_type=Schedule.HOURLY,
        cluster="default",
        repeats=1,
    )
    # run scheduler
    scheduler(broker=broker)
    # set up the workflow
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push it
    pusher(task_queue, stop_event, broker=broker)

    # queue must contain a task
    assert task_queue.qsize() == 1
示例#24
0
def test_scheduler(r):
    list_key = 'scheduler_test:q'
    r.delete(list_key)
    schedule = create_schedule('math.copysign',
                               1,
                               -1,
                               name='test math',
                               hook='django_q.tests.tasks.result',
                               schedule_type=Schedule.HOURLY,
                               repeats=1)
    assert schedule.last_run() is None
    # run scheduler
    scheduler(list_key=list_key)
    # set up the workflow
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push it
    pusher(task_queue, stop_event, list_key=list_key, r=r)
    assert task_queue.qsize() == 1
    assert r.llen(list_key) == 0
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('b', -1))
    assert result_queue.qsize() == 1
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.repeats == 0
    assert schedule.last_run() is not None
    assert schedule.success() is True
    assert schedule.next_run < arrow.get(timezone.now()).replace(hours=+1)
    task = fetch(schedule.task)
    assert task is not None
    assert task.success is True
    assert task.result < 0
    # Once schedule with delete
    once_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                    2,
                                    word='django',
                                    schedule_type=Schedule.ONCE,
                                    repeats=-1,
                                    hook='django_q.tests.tasks.result')
    assert hasattr(once_schedule, 'pk') is True
    # negative repeats
    always_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                      2,
                                      word='django',
                                      schedule_type=Schedule.DAILY,
                                      repeats=-1,
                                      hook='django_q.tests.tasks.result')
    assert hasattr(always_schedule, 'pk') is True
    # All other types
    for t in Schedule.TYPE:
        schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                   2,
                                   word='django',
                                   schedule_type=t[0],
                                   repeats=1,
                                   hook='django_q.tests.tasks.result')
        assert schedule is not None
        assert schedule.last_run() is None
        scheduler(list_key=list_key)
    scheduler(list_key=list_key)
    # ONCE schedule should be deleted
    assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False
    r.delete(list_key)
示例#25
0
def test_async(broker, admin_user):
    broker.list_key = 'cluster_test:q'
    broker.delete_queue()
    a = async ('django_q.tests.tasks.count_letters',
               DEFAULT_WORDLIST,
               hook='django_q.tests.test_cluster.assert_result',
               broker=broker)
    b = async ('django_q.tests.tasks.count_letters2',
               WordClass(),
               hook='django_q.tests.test_cluster.assert_result',
               broker=broker)
    # unknown argument
    c = async ('django_q.tests.tasks.count_letters',
               DEFAULT_WORDLIST,
               'oneargumentoomany',
               hook='django_q.tests.test_cluster.assert_bad_result',
               broker=broker)
    # unknown function
    d = async ('django_q.tests.tasks.does_not_exist',
               WordClass(),
               hook='django_q.tests.test_cluster.assert_bad_result',
               broker=broker)
    # function without result
    e = async ('django_q.tests.tasks.countdown', 100000, broker=broker)
    # function as instance
    f = async (multiply, 753, 2, hook=assert_result, broker=broker)
    # model as argument
    g = async ('django_q.tests.tasks.get_task_name',
               Task(name='John'),
               broker=broker)
    # args,kwargs, group and broken hook
    h = async ('django_q.tests.tasks.word_multiply',
               2,
               word='django',
               hook='fail.me',
               broker=broker)
    # args unpickle test
    j = async ('django_q.tests.tasks.get_user_id',
               admin_user,
               broker=broker,
               group='test_j')
    # q_options and save opt_out test
    k = async ('django_q.tests.tasks.get_user_id',
               admin_user,
               q_options={
                   'broker': broker,
                   'group': 'test_k',
                   'save': False,
                   'timeout': 90
               })
    # check if everything has a task id
    assert isinstance(a, str)
    assert isinstance(b, str)
    assert isinstance(c, str)
    assert isinstance(d, str)
    assert isinstance(e, str)
    assert isinstance(f, str)
    assert isinstance(g, str)
    assert isinstance(h, str)
    assert isinstance(j, str)
    assert isinstance(k, str)
    # run the cluster to execute the tasks
    task_count = 10
    assert broker.queue_size() == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push the tasks
    for i in range(task_count):
        pusher(task_queue, stop_event, broker=broker)
    assert broker.queue_size() == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    # test wait timeout
    assert result(j, wait=10) is None
    assert fetch(j, wait=10) is None
    assert result_group('test_j', wait=10) is None
    assert result_group('test_j', count=2, wait=10) is None
    assert fetch_group('test_j', wait=10) is None
    assert fetch_group('test_j', count=2, wait=10) is None
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('f', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # Check the results
    # task a
    result_a = fetch(a)
    assert result_a is not None
    assert result_a.success is True
    assert result(a) == 1506
    # task b
    result_b = fetch(b)
    assert result_b is not None
    assert result_b.success is True
    assert result(b) == 1506
    # task c
    result_c = fetch(c)
    assert result_c is not None
    assert result_c.success is False
    # task d
    result_d = fetch(d)
    assert result_d is not None
    assert result_d.success is False
    # task e
    result_e = fetch(e)
    assert result_e is not None
    assert result_e.success is True
    assert result(e) is None
    # task f
    result_f = fetch(f)
    assert result_f is not None
    assert result_f.success is True
    assert result(f) == 1506
    # task g
    result_g = fetch(g)
    assert result_g is not None
    assert result_g.success is True
    assert result(g) == 'John'
    # task h
    result_h = fetch(h)
    assert result_h is not None
    assert result_h.success is True
    assert result(h) == 12
    # task j
    result_j = fetch(j)
    assert result_j is not None
    assert result_j.success is True
    assert result_j.result == result_j.args[0].id
    # check fetch, result by name
    assert fetch(result_j.name) == result_j
    assert result(result_j.name) == result_j.result
    # groups
    assert result_group('test_j')[0] == result_j.result
    assert result_j.group_result()[0] == result_j.result
    assert result_group('test_j', failures=True)[0] == result_j.result
    assert result_j.group_result(failures=True)[0] == result_j.result
    assert fetch_group('test_j')[0].id == [result_j][0].id
    assert fetch_group('test_j', failures=False)[0].id == [result_j][0].id
    assert count_group('test_j') == 1
    assert result_j.group_count() == 1
    assert count_group('test_j', failures=True) == 0
    assert result_j.group_count(failures=True) == 0
    assert delete_group('test_j') == 1
    assert result_j.group_delete() == 0
    deleted_group = delete_group('test_j', tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    deleted_group = result_j.group_delete(tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    # task k should not have been saved
    assert fetch(k) is None
    assert fetch(k, 100) is None
    assert result(k, 100) is None
    broker.delete_queue()
示例#26
0
def test_scheduler(r):
    list_key = 'scheduler_test:q'
    r.delete(list_key)
    schedule = create_schedule('math.copysign',
                               1, -1,
                               name='test math',
                               hook='django_q.tests.tasks.result',
                               schedule_type=Schedule.HOURLY,
                               repeats=1)
    assert schedule.last_run() is None
    # run scheduler
    scheduler(list_key=list_key)
    # set up the workflow
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push it
    pusher(task_queue, stop_event, list_key=list_key, r=r)
    assert task_queue.qsize() == 1
    assert r.llen(list_key) == 0
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('b', -1))
    assert result_queue.qsize() == 1
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.repeats == 0
    assert schedule.last_run() is not None
    assert schedule.success() is True
    assert schedule.next_run < arrow.get(timezone.now()).replace(hours=+1)
    task = fetch(schedule.task)
    assert task is not None
    assert task.success is True
    assert task.result < 0
    # Once schedule with delete
    once_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                    2,
                                    word='django',
                                    schedule_type=Schedule.ONCE,
                                    repeats=-1,
                                    hook='django_q.tests.tasks.result'
                                    )
    assert hasattr(once_schedule, 'pk') is True
    # negative repeats
    always_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                      2,
                                      word='django',
                                      schedule_type=Schedule.DAILY,
                                      repeats=-1,
                                      hook='django_q.tests.tasks.result'
                                      )
    assert hasattr(always_schedule, 'pk') is True
    # All other types
    for t in Schedule.TYPE:
        schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                   2,
                                   word='django',
                                   schedule_type=t[0],
                                   repeats=1,
                                   hook='django_q.tests.tasks.result'
                                   )
        assert schedule is not None
        assert schedule.last_run() is None
        scheduler(list_key=list_key)
    scheduler(list_key=list_key)
    # ONCE schedule should be deleted
    assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False
    r.delete(list_key)
示例#27
0
def test_async(r):
    list_key = 'cluster_test:q'
    r.delete(list_key)
    a = async('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, hook='django_q.tests.test_cluster.assert_result',
              list_key=list_key)
    b = async('django_q.tests.tasks.count_letters2', WordClass(), hook='django_q.tests.test_cluster.assert_result',
              list_key=list_key)
    # unknown argument
    c = async('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, 'oneargumentoomany',
              hook='django_q.tests.test_cluster.assert_bad_result', list_key=list_key)
    # unknown function
    d = async('django_q.tests.tasks.does_not_exist', WordClass(), hook='django_q.tests.test_cluster.assert_bad_result',
              list_key=list_key)
    # function without result
    e = async('django_q.tests.tasks.countdown', 100000, list_key=list_key)
    # function as instance
    f = async(multiply, 753, 2, hook=assert_result, list_key=list_key)
    # model as argument
    g = async('django_q.tests.tasks.get_task_name', Task(name='John'), list_key=list_key)
    # args and kwargs and broken hook
    h = async('django_q.tests.tasks.word_multiply', 2, word='django', hook='fail.me', list_key=list_key, redis=r)
    # check if everything has a task name
    assert isinstance(a, str)
    assert isinstance(b, str)
    assert isinstance(c, str)
    assert isinstance(d, str)
    assert isinstance(e, str)
    assert isinstance(f, str)
    assert isinstance(g, str)
    assert isinstance(h, str)
    # run the cluster to execute the tasks
    task_count = 8
    assert r.llen(list_key) == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push the tasks
    for i in range(task_count):
        pusher(task_queue, stop_event, list_key=list_key, r=r)
    assert r.llen(list_key) == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('b', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # Check the results
    # task a
    result_a = fetch(a)
    assert result_a is not None
    assert result_a.success is True
    assert result(a) == 1506
    # task b
    result_b = fetch(b)
    assert result_b is not None
    assert result_b.success is True
    assert result(b) == 1506
    # task c
    result_c = fetch(c)
    assert result_c is not None
    assert result_c.success is False
    # task d
    result_d = fetch(d)
    assert result_d is not None
    assert result_d.success is False
    # task e
    result_e = fetch(e)
    assert result_e is not None
    assert result_e.success is True
    assert result(e) is None
    # task f
    result_f = fetch(f)
    assert result_f is not None
    assert result_f.success is True
    assert result(f) == 1506
    # task g
    result_g = fetch(g)
    assert result_g is not None
    assert result_g.success is True
    assert result(g) == 'John'
    # task h
    result_h = fetch(h)
    assert result_h is not None
    assert result_h.success is True
    assert result(h) == 12
    r.delete(list_key)
示例#28
0
def test_enqueue(broker, admin_user):
    broker.list_key = "cluster_test:q"
    broker.delete_queue()
    a = async_task(
        "django_q.tests.tasks.count_letters",
        DEFAULT_WORDLIST,
        hook="django_q.tests.test_cluster.assert_result",
        broker=broker,
    )
    b = async_task(
        "django_q.tests.tasks.count_letters2",
        WordClass(),
        hook="django_q.tests.test_cluster.assert_result",
        broker=broker,
    )
    # unknown argument
    c = async_task(
        "django_q.tests.tasks.count_letters",
        DEFAULT_WORDLIST,
        "oneargumentoomany",
        hook="django_q.tests.test_cluster.assert_bad_result",
        broker=broker,
    )
    # unknown function
    d = async_task(
        "django_q.tests.tasks.does_not_exist",
        WordClass(),
        hook="django_q.tests.test_cluster.assert_bad_result",
        broker=broker,
    )
    # function without result
    e = async_task("django_q.tests.tasks.countdown", 100000, broker=broker)
    # function as instance
    f = async_task(multiply, 753, 2, hook=assert_result, broker=broker)
    # model as argument
    g = async_task("django_q.tests.tasks.get_task_name",
                   Task(name="John"),
                   broker=broker)
    # args,kwargs, group and broken hook
    h = async_task(
        "django_q.tests.tasks.word_multiply",
        2,
        word="django",
        hook="fail.me",
        broker=broker,
    )
    # args unpickle test
    j = async_task("django_q.tests.tasks.get_user_id",
                   admin_user,
                   broker=broker,
                   group="test_j")
    # q_options and save opt_out test
    k = async_task(
        "django_q.tests.tasks.get_user_id",
        admin_user,
        q_options={
            "broker": broker,
            "group": "test_k",
            "save": False,
            "timeout": 90
        },
    )
    # test unicode
    assert Task(name="Amalia").__str__() == "Amalia"
    # check if everything has a task id
    assert isinstance(a, str)
    assert isinstance(b, str)
    assert isinstance(c, str)
    assert isinstance(d, str)
    assert isinstance(e, str)
    assert isinstance(f, str)
    assert isinstance(g, str)
    assert isinstance(h, str)
    assert isinstance(j, str)
    assert isinstance(k, str)
    # run the cluster to execute the tasks
    task_count = 10
    assert broker.queue_size() == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push the tasks
    for _ in range(task_count):
        pusher(task_queue, stop_event, broker=broker)
    assert broker.queue_size() == 0
    assert task_queue.qsize() == task_count
    task_queue.put("STOP")
    # test wait timeout
    assert result(j, wait=10) is None
    assert fetch(j, wait=10) is None
    assert result_group("test_j", wait=10) is None
    assert result_group("test_j", count=2, wait=10) is None
    assert fetch_group("test_j", wait=10) is None
    assert fetch_group("test_j", count=2, wait=10) is None
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value("f", -1))
    assert result_queue.qsize() == task_count
    result_queue.put("STOP")
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # Check the results
    # task a
    result_a = fetch(a)
    assert result_a is not None
    assert result_a.success is True
    assert result(a) == 1506
    # task b
    result_b = fetch(b)
    assert result_b is not None
    assert result_b.success is True
    assert result(b) == 1506
    # task c
    result_c = fetch(c)
    assert result_c is not None
    assert result_c.success is False
    # task d
    result_d = fetch(d)
    assert result_d is not None
    assert result_d.success is False
    # task e
    result_e = fetch(e)
    assert result_e is not None
    assert result_e.success is True
    assert result(e) is None
    # task f
    result_f = fetch(f)
    assert result_f is not None
    assert result_f.success is True
    assert result(f) == 1506
    # task g
    result_g = fetch(g)
    assert result_g is not None
    assert result_g.success is True
    assert result(g) == "John"
    # task h
    result_h = fetch(h)
    assert result_h is not None
    assert result_h.success is True
    assert result(h) == 12
    # task j
    result_j = fetch(j)
    assert result_j is not None
    assert result_j.success is True
    assert result_j.result == result_j.args[0].id
    # check fetch, result by name
    assert fetch(result_j.name) == result_j
    assert result(result_j.name) == result_j.result
    # groups
    assert result_group("test_j")[0] == result_j.result
    assert result_j.group_result()[0] == result_j.result
    assert result_group("test_j", failures=True)[0] == result_j.result
    assert result_j.group_result(failures=True)[0] == result_j.result
    assert fetch_group("test_j")[0].id == [result_j][0].id
    assert fetch_group("test_j", failures=False)[0].id == [result_j][0].id
    assert count_group("test_j") == 1
    assert result_j.group_count() == 1
    assert count_group("test_j", failures=True) == 0
    assert result_j.group_count(failures=True) == 0
    assert delete_group("test_j") == 1
    assert result_j.group_delete() == 0
    deleted_group = delete_group("test_j", tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    deleted_group = result_j.group_delete(tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    # task k should not have been saved
    assert fetch(k) is None
    assert fetch(k, 100) is None
    assert result(k, 100) is None
    broker.delete_queue()
示例#29
0
def test_scheduler(broker, monkeypatch):
    broker.list_key = 'scheduler_test:q'
    broker.delete_queue()
    schedule = create_schedule('math.copysign',
                               1, -1,
                               name='test math',
                               hook='django_q.tests.tasks.result',
                               schedule_type=Schedule.HOURLY,
                               repeats=1)
    assert schedule.last_run() is None
    # check duplicate constraint
    with pytest.raises(IntegrityError):
        schedule = create_schedule('math.copysign',
                                   1, -1,
                                   name='test math',
                                   hook='django_q.tests.tasks.result',
                                   schedule_type=Schedule.HOURLY,
                                   repeats=1)
    # run scheduler
    scheduler(broker=broker)
    # set up the workflow
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push it
    pusher(task_queue, stop_event, broker=broker)
    assert task_queue.qsize() == 1
    assert broker.queue_size() == 0
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('b', -1))
    assert result_queue.qsize() == 1
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.repeats == 0
    assert schedule.last_run() is not None
    assert schedule.success() is True
    assert schedule.next_run < arrow.get(timezone.now()).shift(hours=+1)
    task = fetch(schedule.task)
    assert task is not None
    assert task.success is True
    assert task.result < 0
    # Once schedule with delete
    once_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                    2,
                                    word='django',
                                    schedule_type=Schedule.ONCE,
                                    repeats=-1,
                                    hook='django_q.tests.tasks.result'
                                    )
    assert hasattr(once_schedule, 'pk') is True
    # negative repeats
    always_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                      2,
                                      word='django',
                                      schedule_type=Schedule.DAILY,
                                      repeats=-1,
                                      hook='django_q.tests.tasks.result'
                                      )
    assert hasattr(always_schedule, 'pk') is True
    # Minute schedule
    minute_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                      2,
                                      word='django',
                                      schedule_type=Schedule.MINUTES,
                                      minutes=10)
    assert hasattr(minute_schedule, 'pk') is True
    # Cron schedule
    cron_schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                    2,
                                    word='django',
                                    schedule_type=Schedule.CRON,
                                    cron="0 22 * * 1-5")
    assert hasattr(cron_schedule, 'pk') is True
    assert cron_schedule.full_clean() is None
    assert cron_schedule.__str__() == 'django_q.tests.tasks.word_multiply'
    with pytest.raises(ValidationError):
        create_schedule('django_q.tests.tasks.word_multiply',
                        2,
                        word='django',
                        schedule_type=Schedule.CRON,
                        cron="0 22 * * 1-12")
    # All other types
    for t in Schedule.TYPE:
        if t[0] == Schedule.CRON:
            continue
        schedule = create_schedule('django_q.tests.tasks.word_multiply',
                                   2,
                                   word='django',
                                   schedule_type=t[0],
                                   repeats=1,
                                   hook='django_q.tests.tasks.result'
                                   )
        assert schedule is not None
        assert schedule.last_run() is None
        scheduler(broker=broker)
    # via model
    Schedule.objects.create(func='django_q.tests.tasks.word_multiply',
                            args='2',
                            kwargs='word="django"',
                            schedule_type=Schedule.DAILY
                            )
    # scheduler
    scheduler(broker=broker)
    # ONCE schedule should be deleted
    assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False
    # Catch up On
    monkeypatch.setattr(Conf, 'CATCH_UP', True)
    now = timezone.now()
    schedule = create_schedule('django_q.tests.tasks.word_multiply',
                               2,
                               word='catch_up',
                               schedule_type=Schedule.HOURLY,
                               next_run=timezone.now() - timedelta(hours=12),
                               repeats=-1
                               )
    scheduler(broker=broker)
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.next_run < now
    # Catch up off
    monkeypatch.setattr(Conf, 'CATCH_UP', False)
    scheduler(broker=broker)
    schedule = Schedule.objects.get(pk=schedule.pk)
    assert schedule.next_run > now
    # Done
    broker.delete_queue()
示例#30
0
def test_async(r):
    list_key = 'cluster_test:q'
    r.delete(list_key)
    a = async ('django_q.tests.tasks.count_letters',
               DEFAULT_WORDLIST,
               hook='django_q.tests.test_cluster.assert_result',
               list_key=list_key)
    b = async ('django_q.tests.tasks.count_letters2',
               WordClass(),
               hook='django_q.tests.test_cluster.assert_result',
               list_key=list_key)
    # unknown argument
    c = async ('django_q.tests.tasks.count_letters',
               DEFAULT_WORDLIST,
               'oneargumentoomany',
               hook='django_q.tests.test_cluster.assert_bad_result',
               list_key=list_key)
    # unknown function
    d = async ('django_q.tests.tasks.does_not_exist',
               WordClass(),
               hook='django_q.tests.test_cluster.assert_bad_result',
               list_key=list_key)
    # function without result
    e = async ('django_q.tests.tasks.countdown', 100000, list_key=list_key)
    # function as instance
    f = async (multiply, 753, 2, hook=assert_result, list_key=list_key)
    # model as argument
    g = async ('django_q.tests.tasks.get_task_name',
               Task(name='John'),
               list_key=list_key)
    # args and kwargs and broken hook
    h = async ('django_q.tests.tasks.word_multiply',
               2,
               word='django',
               hook='fail.me',
               list_key=list_key,
               redis=r)
    # check if everything has a task name
    assert isinstance(a, str)
    assert isinstance(b, str)
    assert isinstance(c, str)
    assert isinstance(d, str)
    assert isinstance(e, str)
    assert isinstance(f, str)
    assert isinstance(g, str)
    assert isinstance(h, str)
    # run the cluster to execute the tasks
    task_count = 8
    assert r.llen(list_key) == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push the tasks
    for i in range(task_count):
        pusher(task_queue, stop_event, list_key=list_key, r=r)
    assert r.llen(list_key) == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('b', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # Check the results
    # task a
    result_a = fetch(a)
    assert result_a is not None
    assert result_a.success is True
    assert result(a) == 1506
    # task b
    result_b = fetch(b)
    assert result_b is not None
    assert result_b.success is True
    assert result(b) == 1506
    # task c
    result_c = fetch(c)
    assert result_c is not None
    assert result_c.success is False
    # task d
    result_d = fetch(d)
    assert result_d is not None
    assert result_d.success is False
    # task e
    result_e = fetch(e)
    assert result_e is not None
    assert result_e.success is True
    assert result(e) is None
    # task f
    result_f = fetch(f)
    assert result_f is not None
    assert result_f.success is True
    assert result(f) == 1506
    # task g
    result_g = fetch(g)
    assert result_g is not None
    assert result_g.success is True
    assert result(g) == 'John'
    # task h
    result_h = fetch(h)
    assert result_h is not None
    assert result_h.success is True
    assert result(h) == 12
    r.delete(list_key)
示例#31
0
def test_async(r, admin_user):
    list_key = 'cluster_test:q'
    r.delete(list_key)
    a = async('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, hook='django_q.tests.test_cluster.assert_result',
              list_key=list_key, redis=r)
    b = async('django_q.tests.tasks.count_letters2', WordClass(), hook='django_q.tests.test_cluster.assert_result',
              list_key=list_key, redis=r)
    # unknown argument
    c = async('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, 'oneargumentoomany',
              hook='django_q.tests.test_cluster.assert_bad_result', list_key=list_key, redis=r)
    # unknown function
    d = async('django_q.tests.tasks.does_not_exist', WordClass(), hook='django_q.tests.test_cluster.assert_bad_result',
              list_key=list_key, redis=r)
    # function without result
    e = async('django_q.tests.tasks.countdown', 100000, list_key=list_key, redis=r)
    # function as instance
    f = async(multiply, 753, 2, hook=assert_result, list_key=list_key, redis=r)
    # model as argument
    g = async('django_q.tests.tasks.get_task_name', Task(name='John'), list_key=list_key, redis=r)
    # args,kwargs, group and broken hook
    h = async('django_q.tests.tasks.word_multiply', 2, word='django', hook='fail.me', list_key=list_key, redis=r)
    # args unpickle test
    j = async('django_q.tests.tasks.get_user_id', admin_user, list_key=list_key, group='test_j', redis=r)
    # q_options and save opt_out test
    k = async('django_q.tests.tasks.get_user_id', admin_user,
              q_options={'list_key': list_key, 'group': 'test_k', 'redis': r, 'save': False, 'timeout': 90})
    # check if everything has a task id
    assert isinstance(a, str)
    assert isinstance(b, str)
    assert isinstance(c, str)
    assert isinstance(d, str)
    assert isinstance(e, str)
    assert isinstance(f, str)
    assert isinstance(g, str)
    assert isinstance(h, str)
    assert isinstance(j, str)
    assert isinstance(k, str)
    # run the cluster to execute the tasks
    task_count = 10
    assert queue_size(list_key=list_key, r=r) == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push the tasks
    for i in range(task_count):
        pusher(task_queue, stop_event, list_key=list_key, r=r)
    assert queue_size(list_key=list_key, r=r) == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('f', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # Check the results
    # task a
    result_a = fetch(a)
    assert result_a is not None
    assert result_a.success is True
    assert result(a) == 1506
    # task b
    result_b = fetch(b)
    assert result_b is not None
    assert result_b.success is True
    assert result(b) == 1506
    # task c
    result_c = fetch(c)
    assert result_c is not None
    assert result_c.success is False
    # task d
    result_d = fetch(d)
    assert result_d is not None
    assert result_d.success is False
    # task e
    result_e = fetch(e)
    assert result_e is not None
    assert result_e.success is True
    assert result(e) is None
    # task f
    result_f = fetch(f)
    assert result_f is not None
    assert result_f.success is True
    assert result(f) == 1506
    # task g
    result_g = fetch(g)
    assert result_g is not None
    assert result_g.success is True
    assert result(g) == 'John'
    # task h
    result_h = fetch(h)
    assert result_h is not None
    assert result_h.success is True
    assert result(h) == 12
    # task j
    result_j = fetch(j)
    assert result_j is not None
    assert result_j.success is True
    assert result_j.result == result_j.args[0].id
    # check fetch, result by name
    assert fetch(result_j.name) == result_j
    assert result(result_j.name) == result_j.result
    # groups
    assert result_group('test_j') == [result_j.result]
    assert result_group('test_j', failures=True) == [result_j.result]
    assert fetch_group('test_j')[0].id == [result_j][0].id
    assert fetch_group('test_j', failures=False)[0].id == [result_j][0].id
    assert count_group('test_j') == 1
    assert count_group('test_j', failures=True) == 0
    assert delete_group('test_j') == 1
    assert delete_group('test_j', tasks=True) is None
    # task k should not have been saved
    assert fetch(k) is None
    r.delete(list_key)