def _iter_dlq(app, batchsize=100): xid = "0" while True: response = app.client.xread({app.keys.dead: xid}, count=batchsize) if not response: return for stream, messages in response: assert stream == app.keys.dead if not messages: return else: xid = messages[-1][1] with app.client.pipeline() as pipe: for xid, fields in messages: pipe.hgetall(app.keys.status_prefix + f":{fields['uuid']}") response = pipe.execute() yield from zip( [xid for xid, _ in messages], [Job.deserialise(fields) for fields in response], )
async def _iter_dlq(app, batchsize=100): """ Iterate over the dead-letter queue and replay any jobs for which filter(job) evaluates to True. The default is to replay all jobs. """ xid = "0" while True: messages = await app.aioclient.xread( [app.keys.dead], latest_ids=[xid], count=batchsize, timeout=None ) if not messages: return assert all(stream == app.keys.dead for stream, _, _ in messages) xid = messages[-1][1] tx = app.aioclient.multi_exec() for _, xid, fields in messages: tx.hgetall(app.keys.status_prefix + f":{fields['uuid']}") response = await tx.execute() jobs = zip( [xid for _, xid, _ in messages], [Job.deserialise(fields) for fields in response], ) for xid, job in jobs: yield (xid, job)
async def executing(self, uuid: str) -> Job: """ Set the status entry for the given uuid to status.EXECUTING and return the associated Job. """ key = self.keys.status_prefix + f":{uuid}" tx = self.client.multi_exec() tx.hmset_dict(key, status=status.EXECUTING) tx.hgetall(key) _, fields = await tx.execute() return Job.deserialise(fields)
def mocked_deserialise(fields): job = Job.deserialise(fields) if job.args == [sentinel]: raise Chaos("Found sentinel job") return job
def test_deserialise(job, as_dict): assert Job.deserialise(as_dict) == job