Esempio n. 1
0
async def test_enqueue_job_nested_custom_serializer(
        arq_redis_msgpack: ArqRedis, worker):
    async def foobar(ctx):
        return 42

    async def parent_job(ctx):
        inner_job = await ctx['redis'].enqueue_job('foobar')
        return inner_job.job_id

    job = await arq_redis_msgpack.enqueue_job('parent_job')

    worker: Worker = worker(
        functions=[
            func(parent_job, name='parent_job'),
            func(foobar, name='foobar')
        ],
        arq_redis=None,
        job_serializer=msgpack.packb,
        job_deserializer=functools.partial(msgpack.unpackb, raw=False),
    )

    await worker.main()
    result = await job.result(poll_delay=0)
    assert result is not None
    inner_job = Job(result,
                    arq_redis_msgpack,
                    _deserializer=functools.partial(msgpack.unpackb,
                                                    raw=False))
    inner_result = await inner_job.result(poll_delay=0)
    assert inner_result == 42
Esempio n. 2
0
async def report_pdf_endpoint(job_id: str):
    redis = await arq.create_pool(REDIS)

    try:
        job = Job(job_id, redis=redis, _queue_name=REDIS_QUEUE)
        status = await job.status()

        if status == JobStatus.not_found:
            raise HTTPException(status_code=404, detail="Job not found")

        if status != JobStatus.complete:
            raise HTTPException(status_code=400, detail="Job not complete")

        info = await job.result_info()
        if not info.success:
            raise HTTPException(status_code=400,
                                detail="Job failed, cannot return results")

        path, errors = info.result
        name = (info.kwargs.get("name", None)
                or "South Atlantic Blueprint Summary Report")

        return FileResponse(path,
                            filename=f"{name}.pdf",
                            media_type="application/pdf")

    finally:
        redis.close()
        await redis.wait_closed()
Esempio n. 3
0
async def test_enqueue_job_custom_queue(arq_redis: ArqRedis, worker):
    async def foobar(ctx):
        return 42

    async def parent_job(ctx):
        inner_job = await ctx['redis'].enqueue_job('foobar')
        return inner_job.job_id

    job = await arq_redis.enqueue_job('parent_job', _queue_name='spanner')

    worker: Worker = worker(
        functions=[
            func(parent_job, name='parent_job'),
            func(foobar, name='foobar')
        ],
        arq_redis=None,
        queue_name='spanner',
    )

    await worker.main()
    inner_job_id = await job.result(poll_delay=0)
    assert inner_job_id is not None
    inner_job = Job(inner_job_id, arq_redis, _queue_name='spanner')
    inner_result = await inner_job.result(poll_delay=0)
    assert inner_result == 42
Esempio n. 4
0
async def test_result_pole_delay_dep(arq_redis: ArqRedis):
    j = Job('foobar', arq_redis)
    r = serialize_result('foobar', (1, ), {}, 1, 123, True, 42, 123, 123,
                         'testing', 'test-queue')
    await arq_redis.set(result_key_prefix + j.job_id, r)
    with pytest.warns(
            DeprecationWarning,
            match=
            '"pole_delay" is deprecated, use the correct spelling "poll_delay" instead'
    ):
        assert await j.result(pole_delay=0) == 42
Esempio n. 5
0
async def test_enqueue_job_nested(arq_redis: ArqRedis, worker):
    async def foobar(ctx):
        return 42

    async def parent_job(ctx):
        inner_job = await ctx['redis'].enqueue_job('foobar')
        return inner_job.job_id

    job = await arq_redis.enqueue_job('parent_job')
    worker: Worker = worker(functions=[func(parent_job, name='parent_job'), func(foobar, name='foobar')])

    await worker.main()
    result = await job.result(poll_delay=0)
    assert result is not None
    inner_job = Job(result, arq_redis)
    inner_result = await inner_job.result(poll_delay=0)
    assert inner_result == 42
Esempio n. 6
0
async def test_result_timeout(arq_redis: ArqRedis):
    j = Job('foobar', arq_redis)
    with pytest.raises(asyncio.TimeoutError):
        await j.result(0.1, poll_delay=0)
Esempio n. 7
0
async def test_unknown(arq_redis: ArqRedis):
    j = Job('foobar', arq_redis)
    assert JobStatus.not_found == await j.status()
    info = await j.info()
    assert info is None
Esempio n. 8
0
async def test_job_in_progress(arq_redis: ArqRedis):
    await arq_redis.set(in_progress_key_prefix + 'foobar', b'1')
    j = Job('foobar', arq_redis)
    assert JobStatus.in_progress == await j.status()
    assert str(j) == '<arq job foobar>'
Esempio n. 9
0
async def job_status_endpoint(job_id: str):
    """Return the status of a job.

    Job status values derived from JobStatus enum at:
    https://github.com/samuelcolvin/arq/blob/master/arq/jobs.py
    ['deferred', 'queued', 'in_progress', 'complete', 'not_found']

    We add ['success', 'failed'] status values here.

    Parameters
    ----------
    job_id : str

    Returns
    -------
    JSON
        {"status": "...", "progress": 0-100, "result": "...only if complete...", "detail": "...only if failed..."}
    """

    redis = await arq.create_pool(REDIS)

    try:
        job = Job(job_id, redis=redis, _queue_name=REDIS_QUEUE)
        status = await job.status()

        if status == JobStatus.not_found:
            raise HTTPException(status_code=404, detail="Job not found")

        if status != JobStatus.complete:
            progress, message, errors = await get_progress(job_id)

            return {
                "status": status,
                "progress": progress,
                "message": message,
                "errors": errors,
            }

        info = await job.result_info()

        try:
            # this re-raises the underlying exception raised in the worker
            filename, errors = await job.result()

            if info.success:
                return {
                    "status": "success",
                    "result": f"/api/reports/results/{job_id}",
                    "errors": errors,
                }

        except DataError as ex:
            message = str(ex)

        # TODO: other specific exceptions

        except Exception as ex:
            log.error(ex)
            message = "Internal server error"
            raise HTTPException(status_code=500,
                                detail="Internal server error")

        return {"status": "failed", "detail": message}

    finally:
        redis.close()
        await redis.wait_closed()
Esempio n. 10
0
 def _get_job(self, job_id: str, queue_name: Optional[str] = None) -> Job:
     return Job(
         job_id,
         self._pool,
         _queue_name=queue_name or self.default_queue_name,
     )