Exemplo n.º 1
0
async def test_enqueue_multiple(arq_redis: ArqRedis, caplog):
    caplog.set_level(logging.DEBUG)
    results = await asyncio.gather(*[
        arq_redis.enqueue_job('foobar', i, _job_id='testing')
        for i in range(10)
    ])
    assert sum(r is not None for r in results) == 1
    assert sum(r is None for r in results) == 9
    assert 'WatchVariableError' not in caplog.text
Exemplo n.º 2
0
def test_no_jobs(arq_redis: ArqRedis, loop):
    class Settings:
        functions = [func(foobar, name='foobar')]
        burst = True
        poll_delay = 0

    loop.run_until_complete(arq_redis.enqueue_job('foobar'))
    worker = run_worker(Settings)
    assert worker.jobs_complete == 1
    assert str(worker) == '<Worker j_complete=1 j_failed=0 j_retried=0 j_ongoing=0>'
Exemplo n.º 3
0
def test_no_jobs(arq_redis: ArqRedis, loop):
    class Settings:
        functions = [func(foobar, name='foobar')]
        burst = True
        poll_delay = 0

    loop.run_until_complete(arq_redis.enqueue_job('foobar'))
    worker = run_worker(Settings)
    assert worker.jobs_complete == 1
    assert str(worker) == '<Worker j_complete=1 j_failed=0 j_retried=0 j_ongoing=0>'
Exemplo n.º 4
0
async def test_mung(arq_redis: ArqRedis, worker):
    """
    check a job can't be enqueued multiple times with the same id
    """
    counter = Counter()

    async def count(ctx, v):
        counter[v] += 1

    tasks = []
    for i in range(50):
        tasks.extend(
            [arq_redis.enqueue_job('count', i, _job_id=f'v-{i}'), arq_redis.enqueue_job('count', i, _job_id=f'v-{i}')]
        )
    shuffle(tasks)
    await asyncio.gather(*tasks)

    worker: Worker = worker(functions=[func(count, name='count')])
    await worker.main()
    assert counter.most_common(1)[0][1] == 1  # no job go enqueued twice
Exemplo n.º 5
0
async def test_mung(arq_redis: ArqRedis, worker):
    """
    check a job can't be enqueued multiple times with the same id
    """
    counter = Counter()

    async def count(ctx, v):
        counter[v] += 1

    tasks = []
    for i in range(50):
        tasks.extend(
            [arq_redis.enqueue_job('count', i, _job_id=f'v-{i}'), arq_redis.enqueue_job('count', i, _job_id=f'v-{i}')]
        )
    shuffle(tasks)
    await asyncio.gather(*tasks)

    worker: Worker = worker(functions=[func(count, name='count')])
    await worker.main()
    assert counter.most_common(1)[0][1] == 1  # no job go enqueued twice
Exemplo n.º 6
0
def test_no_jobs(arq_redis: ArqRedis, loop, mocker):
    class Settings:
        functions = [func(foobar, name='foobar')]
        burst = True
        poll_delay = 0
        queue_read_limit = 10

    loop.run_until_complete(arq_redis.enqueue_job('foobar'))
    mocker.patch('asyncio.get_event_loop', lambda: loop)
    worker = run_worker(Settings)
    assert worker.jobs_complete == 1
    assert str(worker) == '<Worker j_complete=1 j_failed=0 j_retried=0 j_ongoing=0>'