async def delete(self, uids: Iterable[int]) -> None: redis = self._redis keys = self._keys uids = list(uids) if not uids: return while True: pipe = watch_pipe(redis, keys.max_mod, keys.abort) pipe.mget(keys.max_mod, keys.abort) _, _, (max_mod, abort) = await pipe.execute() MailboxAbort.assertFalse(abort) new_mod = int(max_mod or 0) + 1 multi = redis.multi_exec() multi.set(keys.max_mod, new_mod) multi.srem(keys.uids, *uids) multi.zrem(keys.seq, *uids) multi.zrem(keys.mod_seq, *uids) multi.srem(keys.recent, *uids) multi.srem(keys.deleted, *uids) multi.zrem(keys.unseen, *uids) for uid in uids: multi.zadd(keys.expunged, new_mod, uid) msg_keys = MessageKeys(keys, uid) self._cleanup.add_message(multi, msg_keys) try: await multi.execute() except MultiExecError: if await check_errors(multi): raise else: break
async def snapshot(self) -> MailboxSnapshot: redis = self._redis keys = self._keys while True: pipe = watch_pipe(redis, keys.seq, keys.abort) pipe.mget(keys.max_uid, keys.abort) pipe.zcard(keys.seq) pipe.scard(keys.recent) pipe.zcard(keys.unseen) pipe.zrange(keys.unseen, 0, 0) _, _, (max_uid, abort), exists, num_recent, num_unseen, unseen = \ await pipe.execute() MailboxAbort.assertFalse(abort) next_uid = int(max_uid or 0) + 1 if not unseen: first_unseen: Optional[int] = None break first_uid = int(unseen[0]) multi = redis.multi_exec() multi.zrank(keys.seq, first_uid) try: first_unseen_b, = await multi.execute() except MultiExecError: if await check_errors(multi): raise else: first_unseen = int(first_unseen_b) + 1 break return MailboxSnapshot(self.mailbox_id, self.readonly, self.uid_validity, self.permanent_flags, self.session_flags, exists, num_recent, num_unseen, first_unseen, next_uid)
async def claim_recent(self, selected: SelectedMailbox) -> None: redis = self._redis keys = self._keys while True: pipe = watch_pipe(redis, keys.max_mod, keys.abort) pipe.smembers(keys.recent) pipe.mget(keys.max_mod, keys.abort) _, _, recent, (max_mod, abort) = await pipe.execute() if not recent: break MailboxAbort.assertFalse(abort) new_mod = int(max_mod or 0) + 1 multi = redis.multi_exec() multi.set(keys.max_mod, new_mod) for uid in recent: multi.zadd(keys.mod_seq, new_mod, uid) multi.delete(keys.recent) try: await multi.execute() except MultiExecError: if await check_errors(multi): raise else: break for uid_bytes in recent: selected.session_flags.add_recent(int(uid_bytes))
async def _get_initial(self) \ -> Tuple[int, Sequence[Message], Sequence[int]]: redis = self._redis prefix = self._prefix while True: await redis.watch(prefix + b':max-mod', self._abort_key) pipe = redis.pipeline() pipe.zrange(prefix + b':sequence') pipe.get(self._abort_key) uids, abort = await pipe.execute() MailboxAbort.assertFalse(abort) multi = redis.multi_exec() multi.get(prefix + b':max-mod') for uid in uids: msg_prefix = prefix + b':msg:' + uid multi.echo(uid) multi.smembers(msg_prefix + b':flags') multi.get(msg_prefix + b':time') try: results = await multi.execute() except MultiExecError: if await _check_errors(multi): raise else: break mod_seq = int(results[0] or 0) updated: List[Message] = [] for i in range(1, len(results), 3): msg_uid = int(results[i]) msg_flags = {Flag(flag) for flag in results[i + 1]} msg_time = datetime.fromisoformat(results[i + 2].decode('ascii')) msg = Message(msg_uid, msg_flags, msg_time) updated.append(msg) return mod_seq, updated, []
async def get(self, uid: int, cached_msg: CachedMessage = None, requirement: FetchRequirement = FetchRequirement.METADATA) \ -> Optional[Message]: redis = self._redis keys = self._keys ns_keys = self._ns_keys msg_keys = MessageKeys(keys, uid) await redis.unwatch() multi = redis.multi_exec() multi.sismember(keys.uids, uid) multi.smembers(msg_keys.flags) multi.hmget(msg_keys.immutable, b'time', b'emailid', b'threadid') multi.get(keys.abort) exists, flags, (time, email_id, thread_id), abort = \ await multi.execute() MailboxAbort.assertFalse(abort) if not exists: if cached_msg is not None: if not isinstance(cached_msg, Message): raise TypeError(cached_msg) return Message.copy_expunged(cached_msg) else: return None msg_flags = {Flag(flag) for flag in flags} msg_email_id = ObjectId.maybe(email_id) msg_thread_id = ObjectId.maybe(thread_id) msg_time = datetime.fromisoformat(time.decode('ascii')) return Message(uid, msg_time, msg_flags, email_id=msg_email_id, thread_id=msg_thread_id, redis=redis, ns_keys=ns_keys)
async def snapshot(self) -> MailboxSnapshot: redis = self._redis prefix = self._prefix while True: await redis.watch(prefix + b':sequence', self._abort_key) pipe = redis.pipeline() pipe.get(prefix + b':max-uid') pipe.zcard(prefix + b':sequence') pipe.scard(prefix + b':recent') pipe.zcard(prefix + b':unseen') pipe.zrange(prefix + b':unseen', 0, 0) pipe.get(self._abort_key) max_uid, exists, num_recent, num_unseen, unseen, abort = \ await pipe.execute() MailboxAbort.assertFalse(abort) next_uid = int(max_uid or 0) + 1 if not unseen: first_unseen: Optional[int] = None break else: first_uid = int(unseen[0]) multi = redis.multi_exec() multi.zrank(prefix + b':sequence', first_uid) try: [first_unseen] = await multi.execute() except MultiExecError: if await _check_errors(multi): raise else: break return MailboxSnapshot(self.guid, self.readonly, self.uid_validity, self.permanent_flags, self.session_flags, exists, num_recent, num_unseen, first_unseen, next_uid)
async def claim_recent(self, selected: SelectedMailbox) -> None: redis = self._redis prefix = self._prefix while True: await redis.watch(prefix + b':max-mod', self._abort_key) recent = await redis.smembers(prefix + b':recent') if not recent: break max_mod, abort = await redis.mget(prefix + b':max-mod', self._abort_key) MailboxAbort.assertFalse(abort) new_mod = int(max_mod or 0) + 1 multi = self._redis.multi_exec() multi.set(prefix + b':max-mod', new_mod) for uid in recent: multi.zadd(prefix + b':mod-sequence', new_mod, uid) multi.delete(prefix + b':recent') try: await multi.execute() except MultiExecError: if await _check_errors(multi): raise else: break for uid_bytes in recent: selected.session_flags.add_recent(int(uid_bytes))
async def delete(self, uids: Iterable[int]) -> None: redis = self._redis prefix = self._prefix uids = list(uids) if not uids: return while True: await redis.watch(prefix + b':max-mod', self._abort_key) max_mod, abort = await redis.mget(prefix + b':max-mod', self._abort_key) MailboxAbort.assertFalse(abort) new_mod = int(max_mod or 0) + 1 multi = redis.multi_exec() multi.set(prefix + b':max-mod', new_mod) for uid in uids: multi.zadd(prefix + b':expunged', new_mod, uid) multi.srem(prefix + b':uids', *uids) multi.zrem(prefix + b':sequence', *uids) multi.zrem(prefix + b':mod-sequence', *uids) multi.srem(prefix + b':recent', *uids) multi.srem(prefix + b':deleted', *uids) multi.zrem(prefix + b':unseen', *uids) multi.sadd(prefix + b':cleanup', *uids) try: await multi.execute() except MultiExecError: if await _check_errors(multi): raise else: break
async def add(self, message: PreparedMessage, *, recent: bool = False) -> Message: redis = self._redis keys = self._keys ns_keys = self._ns_keys when = message.when or datetime.now() is_deleted = Deleted in message.flag_set is_unseen = Seen not in message.flag_set msg_flags = [flag.value for flag in message.flag_set] msg_time = when.isoformat().encode('ascii') email_id = message.email_id thread_id = message.thread_id ct_keys = ContentKeys(ns_keys, email_id) while True: pipe = watch_pipe(redis, keys.max_mod, keys.abort) pipe.mget(keys.max_uid, keys.max_mod, keys.abort) _, _, (max_uid, max_mod, abort) = await pipe.execute() MailboxAbort.assertFalse(abort) new_uid = int(max_uid or 0) + 1 new_mod = int(max_mod or 0) + 1 msg_keys = MessageKeys(keys, new_uid) multi = redis.multi_exec() multi.set(keys.max_uid, new_uid) multi.set(keys.max_mod, new_mod) multi.sadd(keys.uids, new_uid) multi.zadd(keys.mod_seq, new_mod, new_uid) multi.zadd(keys.seq, new_uid, new_uid) if recent: multi.sadd(keys.recent, new_uid) if is_deleted: multi.sadd(keys.deleted, new_uid) if is_unseen: multi.zadd(keys.unseen, new_uid, new_uid) if msg_flags: multi.sadd(msg_keys.flags, *msg_flags) multi.hmset(msg_keys.immutable, b'time', msg_time, b'emailid', email_id.value, b'threadid', thread_id.value) multi.persist(ct_keys.data) multi.hincrby(ns_keys.content_refs, email_id.value, 1) try: await multi.execute() except MultiExecError: if await check_errors(multi): raise else: break return Message(new_uid, when, message.flag_set, email_id=email_id, thread_id=thread_id, redis=redis, ns_keys=ns_keys)
async def _get_updated(self, last_mod_seq: int) \ -> Tuple[int, Sequence[Message], Sequence[int]]: redis = self._redis keys = self._keys ns_keys = self._ns_keys while True: pipe = watch_pipe(redis, keys.max_mod, keys.abort) pipe.zrangebyscore(keys.mod_seq, last_mod_seq) pipe.get(keys.abort) _, _, uids, abort = await pipe.execute() MailboxAbort.assertFalse(abort) multi = redis.multi_exec() multi.get(keys.max_mod) multi.zrangebyscore(keys.expunged, last_mod_seq) for uid in uids: msg_keys = MessageKeys(keys, uid) multi.echo(uid) multi.smembers(msg_keys.flags) multi.hmget(msg_keys.immutable, b'time', b'emailid', b'threadid') try: results = await multi.execute() except MultiExecError: if await check_errors(multi): raise else: break mod_seq = int(results[0] or 0) expunged = [int(uid) for uid in results[1]] updated: List[Message] = [] for i in range(2, len(results), 3): msg_uid = int(results[i]) msg_flags = {Flag(flag) for flag in results[i + 1]} time_b, email_id, thread_id = results[i + 2] msg_time = datetime.fromisoformat(time_b.decode('ascii')) msg = Message(msg_uid, msg_time, msg_flags, email_id=ObjectId(email_id), thread_id=ObjectId(thread_id), redis=redis, ns_keys=ns_keys) updated.append(msg) return mod_seq, updated, expunged
async def add(self, append_msg: AppendMessage, recent: bool = False) \ -> Message: redis = self._redis prefix = self._prefix is_deleted = Deleted in append_msg.flag_set is_unseen = Seen not in append_msg.flag_set msg_content = MessageContent.parse(append_msg.message) msg_flags = [flag.value for flag in append_msg.flag_set] msg_time = append_msg.when.isoformat().encode('ascii') while True: await redis.watch(prefix + b':max-mod', self._abort_key) max_uid, max_mod, abort = await redis.mget( prefix + b':max-uid', prefix + b':max-mod', self._abort_key) MailboxAbort.assertFalse(abort) new_uid = int(max_uid or 0) + 1 new_mod = int(max_mod or 0) + 1 msg_prefix = prefix + b':msg:%d' % new_uid multi = redis.multi_exec() multi.set(prefix + b':max-uid', new_uid) multi.set(prefix + b':max-mod', new_mod) multi.sadd(prefix + b':uids', new_uid) multi.zadd(prefix + b':mod-sequence', new_mod, new_uid) multi.zadd(prefix + b':sequence', new_uid, new_uid) if recent: multi.sadd(prefix + b':recent', new_uid) if is_deleted: multi.sadd(prefix + b':deleted', new_uid) if is_unseen: multi.zadd(prefix + b':unseen', new_uid, new_uid) if msg_flags: multi.sadd(msg_prefix + b':flags', *msg_flags) multi.set(msg_prefix + b':time', msg_time) multi.set(msg_prefix + b':header', bytes(msg_content.header)) multi.set(msg_prefix + b':body', bytes(msg_content.body)) try: await multi.execute() except MultiExecError: if await _check_errors(multi): raise else: break return Message(new_uid, append_msg.flag_set, append_msg.when, recent=recent, content=msg_content)
async def get(self, uid: int, cached_msg: CachedMessage = None, requirement: FetchRequirement = FetchRequirement.METADATA) \ -> Optional[Message]: redis = self._redis prefix = self._prefix msg_prefix = prefix + b':msg:%d' % uid multi = redis.multi_exec() multi.sismember(prefix + b':uids', uid) multi.smembers(msg_prefix + b':flags') multi.get(msg_prefix + b':time') multi.sismember(prefix + b':recent', uid) if requirement & FetchRequirement.BODY: multi.get(msg_prefix + b':header') multi.get(msg_prefix + b':body') elif requirement & FetchRequirement.HEADERS: multi.get(msg_prefix + b':header') multi.echo(b'') else: multi.echo(b'') multi.echo(b'') multi.get(self._abort_key) exists, flags, time, recent, header, body, abort = \ await multi.execute() MailboxAbort.assertFalse(abort) if not exists: if cached_msg is None: return None else: return Message(cached_msg.uid, cached_msg.permanent_flags, cached_msg.internal_date, expunged=True) msg_flags = {Flag(flag) for flag in flags} msg_time = datetime.fromisoformat(time.decode('ascii')) msg_recent = bool(recent) if header: msg_content = MessageContent.parse_split(header, body) return Message(uid, msg_flags, msg_time, recent=msg_recent, content=msg_content) else: return Message(uid, msg_flags, msg_time, recent=msg_recent)
async def update_flags(self, messages: Sequence[Message], flag_set: FrozenSet[Flag], mode: FlagOp) -> None: redis = self._redis keys = self._keys messages = list(messages) if not messages: return uids = {msg.uid: msg for msg in messages} while True: pipe = watch_pipe(redis, keys.max_mod, keys.abort) pipe.smembers(keys.uids) pipe.mget(keys.max_mod, keys.abort) _, _, existing_uids, (max_mod, abort) = await pipe.execute() MailboxAbort.assertFalse(abort) update_uids = uids.keys() & {int(uid) for uid in existing_uids} if not update_uids: break new_mod = int(max_mod or 0) + 1 new_flags: Dict[int, Awaitable[Sequence[bytes]]] = {} multi = redis.multi_exec() multi.set(keys.max_mod, new_mod) for msg in messages: msg_uid = msg.uid if msg_uid not in update_uids: continue msg_keys = MessageKeys(keys, msg_uid) flag_vals = (flag.value for flag in flag_set) multi.zadd(keys.mod_seq, new_mod, msg_uid) if mode == FlagOp.REPLACE: multi.delete(msg_keys.flags) if flag_set: multi.sadd(msg_keys.flags, *flag_vals) if Deleted in flag_set: multi.sadd(keys.deleted, msg_uid) else: multi.srem(keys.deleted, msg_uid) if Seen not in flag_set: multi.zadd(keys.unseen, msg_uid, msg_uid) else: multi.zrem(keys.unseen, msg_uid) elif mode == FlagOp.ADD and flag_set: multi.sadd(msg_keys.flags, *flag_vals) if Deleted in flag_set: multi.sadd(keys.deleted, msg_uid) if Seen in flag_set: multi.zrem(keys.unseen, msg_uid) elif mode == FlagOp.DELETE and flag_set: multi.srem(msg_keys.flags, *flag_vals) if Deleted in flag_set: multi.srem(keys.deleted, msg_uid) if Seen in flag_set: multi.zadd(keys.unseen, msg_uid, msg_uid) new_flags[msg_uid] = multi.smembers(msg_keys.flags) try: await multi.execute() except MultiExecError: if await check_errors(multi): raise else: for msg_uid, msg_flags in new_flags.items(): msg = uids[msg_uid] msg.permanent_flags = frozenset( Flag(flag) for flag in await msg_flags) break
async def update_flags(self, messages: Sequence[Message], flag_set: FrozenSet[Flag], mode: FlagOp) -> None: redis = self._redis prefix = self._prefix messages = list(messages) if not messages: return uids = {msg.uid: msg for msg in messages} while True: await redis.watch(prefix + b':max-mod', self._abort_key) pipe = redis.pipeline() pipe.smembers(prefix + b':uids') pipe.get(prefix + b':max-mod') pipe.get(self._abort_key) existing_uids, max_mod, abort = await pipe.execute() MailboxAbort.assertFalse(abort) update_uids = uids.keys() & {int(uid) for uid in existing_uids} if not update_uids: break new_mod = int(max_mod or 0) + 1 new_flags: Dict[int, Awaitable[Sequence[bytes]]] = {} multi = redis.multi_exec() multi.set(prefix + b':max-mod', new_mod) for msg in messages: msg_uid = msg.uid if msg_uid not in update_uids: continue msg_prefix = prefix + b':msg:%d' % msg_uid flag_vals = (flag.value for flag in flag_set) multi.zadd(prefix + b':mod-sequence', new_mod, msg_uid) if mode == FlagOp.REPLACE: multi.unlink(msg_prefix + b':flags') if flag_set: multi.sadd(msg_prefix + b':flags', *flag_vals) if Deleted in flag_set: multi.sadd(prefix + b':deleted', msg_uid) else: multi.srem(prefix + b':deleted', msg_uid) if Seen not in flag_set: multi.zadd(prefix + b':unseen', msg_uid, msg_uid) else: multi.zrem(prefix + b':unseen', msg_uid) elif mode == FlagOp.ADD and flag_set: multi.sadd(msg_prefix + b':flags', *flag_vals) if Deleted in flag_set: multi.sadd(prefix + b':deleted', msg_uid) if Seen in flag_set: multi.zrem(prefix + b':unseen', msg_uid) elif mode == FlagOp.DELETE and flag_set: multi.srem(msg_prefix + b':flags', *flag_vals) if Deleted in flag_set: multi.srem(prefix + b':deleted', msg_uid) if Seen in flag_set: multi.zadd(prefix + b':unseen', msg_uid, msg_uid) new_flags[msg_uid] = multi.smembers(msg_prefix + b':flags') try: await multi.execute() except MultiExecError: if await _check_errors(multi): raise else: for msg_uid, msg_flags in new_flags.items(): msg = uids[msg_uid] msg.permanent_flags = frozenset( Flag(flag) for flag in await msg_flags) break