コード例 #1
0
    def delay(self, *args: Any, **kwargs: Any) -> AsyncResult:
        """
        Traditional Celery-like interface to enqueue a task for execution by the
        workers.

        The `args` and `kwargs` will be passed through to the task when executed.

        Examples
        --------
        >>> @app.task
        >>> def foo(x, bar=None):
        ...     time.sleep(x)
        ...     if bar == "mystr":
        ...         return False
        ...     return True
        ...
        >>> foo.delay(1)
        >>> foo.delay(2, bar="mystr")
        """
        job = Job(
            task=self.name,
            args=list(args),
            kwargs=kwargs,
            tries=0,
            max_retries=self.max_retries,
        )
        send(self.app, job)
        return AsyncResult(job=job, app=self.app)
コード例 #2
0
async def _iter_dlq(app, batchsize=100):
    """
    Iterate over the dead-letter queue and replay any jobs for which filter(job)
    evaluates to True. The default is to replay all jobs.
    """
    xid = "0"

    while True:
        messages = await app.aioclient.xread(
            [app.keys.dead], latest_ids=[xid], count=batchsize, timeout=None
        )
        if not messages:
            return

        assert all(stream == app.keys.dead for stream, _, _ in messages)
        xid = messages[-1][1]

        tx = app.aioclient.multi_exec()
        for _, xid, fields in messages:
            tx.hgetall(app.keys.status_prefix + f":{fields['uuid']}")
        response = await tx.execute()

        jobs = zip(
            [xid for _, xid, _ in messages],
            [Job.deserialise(fields) for fields in response],
        )

        for xid, job in jobs:
            yield (xid, job)
コード例 #3
0
ファイル: actions.py プロジェクト: leonh/fennel
def _iter_dlq(app, batchsize=100):
    xid = "0"

    while True:
        response = app.client.xread({app.keys.dead: xid}, count=batchsize)
        if not response:
            return

        for stream, messages in response:
            assert stream == app.keys.dead

            if not messages:
                return
            else:
                xid = messages[-1][1]

            with app.client.pipeline() as pipe:
                for xid, fields in messages:
                    pipe.hgetall(app.keys.status_prefix + f":{fields['uuid']}")
                response = pipe.execute()

            yield from zip(
                [xid for xid, _ in messages],
                [Job.deserialise(fields) for fields in response],
            )
コード例 #4
0
ファイル: task.py プロジェクト: leonh/fennel
    async def delay(self, *args: Any, **kwargs: Any) -> AsyncResult:
        """
        Enqueue a task for execution by the workers.

        Similar to asyncio.create_task (but also works with non-async functions and runs
        on our Redis-backed task queue with distributed workers, automatic retry, and
        result storage with configurable TTL).

        The `args` and `kwargs` will be passed through to the task when executed.

        Examples
        --------
        >>> @app.task(retries=1)
        >>> async def foo(x, bar=None):
        ...     asyncio.sleep(x)
        ...     if bar == "mystr":
        ...         return False
        ...     return True
        ...
        >>> await foo.delay(1)
        >>> await foo.delay(2, bar="mystr")
        """
        job = Job(
            task=self.name,
            args=list(args),
            kwargs=kwargs,
            tries=0,
            max_retries=self.max_retries,
        )
        await send(self.app, job)
        return AsyncResult(job=job, app=self.app)
コード例 #5
0
 async def executing(self, uuid: str) -> Job:
     """
     Set the status entry for the given uuid to status.EXECUTING
     and return the associated Job.
     """
     key = self.keys.status_prefix + f":{uuid}"
     tx = self.client.multi_exec()
     tx.hmset_dict(key, status=status.EXECUTING)
     tx.hgetall(key)
     _, fields = await tx.execute()
     return Job.deserialise(fields)
コード例 #6
0
ファイル: test_job.py プロジェクト: leonh/fennel
def job(uuid):
    return Job(
        task="test",
        args=["foo"],
        kwargs={"bar": "baz"},
        tries=0,
        max_retries=1,
        exception={},
        return_value=None,
        status=EXECUTING,
        uuid=uuid,
    )
コード例 #7
0
def random_job(**kwargs):
    defaults = dict(
        task="unknown",
        args=[random.randint(1, 100)],
        kwargs={},
        tries=0,
        max_retries=9,
        exception={},
        return_value=None,
        uuid=base64uuid(),
        status=UNKNOWN,
    )
    return Job(**dict(defaults, **kwargs))
コード例 #8
0
    async def ack(self, xid: str, job: Job) -> List:
        """
        Acknowledge receipt of the ID:
            1. Remove it from the consumer's PEL.
            2. Delete the message from the stream.
            3. Set the status entry for the job to status.SUCCESS.
            4. Set expiry for the status entry.
        """
        job = job.replace(status=status.SUCCESS)

        tx = self.client.multi_exec()
        self._ack(tx, xid)
        self._status(tx, job, ttl=self.results_ttl)
        return await tx.execute()
コード例 #9
0
    async def ack_and_schedule(self, xid: str, job: Job) -> List:
        """
        Acknowledge receipt of the ID and schedule the job for reprocessing:
            1. Remove it from the consumer's PEL.
            2. Delete the message from the stream.
            3. Add the job to the schedule sorted set so that consumers can poll it.
            4. Set the status entry for the job to status.RETRY.
        """
        job = job.replace(status=status.RETRY)
        eta = now() + int(self.retry_backoff(job.tries))

        tx = self.client.multi_exec()
        self._ack(tx, xid)
        self._schedule(tx, job, eta)
        self._status(tx, job, ttl=None)
        return await tx.execute()
コード例 #10
0
    async def ack_and_store(self, xid: str, job: Job) -> List:
        """
        Acknowledge receipt of the ID and store the result:
            1. Remove it from the consumer's PEL.
            2. Delete the message from the stream.
            3. Delete any existing results (just in case it already exists).
            4. Store the result in a list so that clients can wait via BRPOPLPUSH.
            5. Set expiry for the result.
            6. Set the status entry for the job to status.SUCCESS.
            7. Set expiry for the status entry.
        """
        job = job.replace(status=status.SUCCESS)

        tx = self.client.multi_exec()
        self._ack(tx, xid)
        self._store(tx, job)
        self._status(tx, job, ttl=self.results_ttl)
        return await tx.execute()
コード例 #11
0
    async def ack_and_dead(self, xid: str, job: Job) -> List:
        """
        Acknowledge receipt of the ID and add the job to the dead-letter queue:
            1. Remove it from the consumer's PEL.
            2. Delete the message from the stream.
            3. Add the message to the DLQ.
            4. Delete any existing results (just in case it already exists).
            5. Store the result in a list so that clients can wait via BRPOPLPUSH.
            6. Set expiry for the result.
            7. Set the status entry for the job to status.DEAD.
        """
        job = job.replace(status=status.DEAD)

        tx = self.client.multi_exec()
        self._ack(tx, xid)
        self._dead(tx, job)
        self._store(tx, job)
        self._status(tx, job, ttl=None)
        return await tx.execute()
コード例 #12
0
ファイル: test_recovery.py プロジェクト: leonh/fennel
 def mocked_deserialise(fields):
     job = Job.deserialise(fields)
     if job.args == [sentinel]:
         raise Chaos("Found sentinel job")
     return job
コード例 #13
0
ファイル: test_job.py プロジェクト: leonh/fennel
def test_deserialise(job, as_dict):
    assert Job.deserialise(as_dict) == job
コード例 #14
0
ファイル: test_job.py プロジェクト: leonh/fennel
def test_from_json(job, as_json):
    assert Job.from_json(as_json) == job
コード例 #15
0
async def get_job(app, uuid) -> Job:
    resp = await app.aioclient.hgetall(app.keys.status_prefix + f":{uuid}")
    if not resp:
        raise JobNotFound(uuid)
    return Job(**resp)
コード例 #16
0
ファイル: state.py プロジェクト: leonh/fennel
def get_job(app, uuid):
    resp = app.client.hgetall(app.keys.status_prefix + f":{uuid}")
    if not resp:
        raise JobNotFound(uuid)
    return Job(**resp)