async def get(self, uid: int, cached_msg: CachedMessage = None, requirement: FetchRequirement = FetchRequirement.METADATA) \ -> Optional[Message]: redis = self._redis keys = self._keys ns_keys = self._ns_keys msg_keys = MessageKeys(keys, uid) await redis.unwatch() multi = redis.multi_exec() multi.sismember(keys.uids, uid) multi.smembers(msg_keys.flags) multi.hmget(msg_keys.immutable, b'time', b'emailid', b'threadid') multi.get(keys.abort) exists, flags, (time, email_id, thread_id), abort = \ await multi.execute() MailboxAbort.assertFalse(abort) if not exists: if cached_msg is not None: if not isinstance(cached_msg, Message): raise TypeError(cached_msg) return Message.copy_expunged(cached_msg) else: return None msg_flags = {Flag(flag) for flag in flags} msg_email_id = ObjectId.maybe(email_id) msg_thread_id = ObjectId.maybe(thread_id) msg_time = datetime.fromisoformat(time.decode('ascii')) return Message(uid, msg_time, msg_flags, email_id=msg_email_id, thread_id=msg_thread_id, redis=redis, ns_keys=ns_keys)
def test_random(self): mailbox_id = ObjectId.random_mailbox_id() self.assertEqual(b'F', mailbox_id.value[0:1]) self.assertTrue(len(mailbox_id.value)) email_id = ObjectId.random_email_id() self.assertEqual(b'M', email_id.value[0:1]) self.assertTrue(len(email_id.value)) thread_id = ObjectId.random_thread_id() self.assertEqual(b'T', thread_id.value[0:1]) self.assertTrue(len(thread_id.value))
async def rename_mailbox(self, before: str, after: str) -> None: redis = self._redis while True: pipe = watch_pipe(redis, self._keys.mailboxes) pipe.hgetall(self._keys.mailboxes) _, _, all_keys = await pipe.execute() all_mbx = {modutf7_decode(key): ns for key, ns in all_keys.items()} tree = ListTree(self.delimiter).update('INBOX', *all_mbx.keys()) before_entry = tree.get(before) after_entry = tree.get(after) if before_entry is None: raise MailboxNotFound(before) elif after_entry is not None: raise MailboxConflict(after) multi = redis.multi_exec() for before_name, after_name in tree.get_renames(before, after): before_id = all_mbx[before_name] before_key = modutf7_encode(before_name) after_key = modutf7_encode(after_name) multi.hset(self._keys.mailboxes, after_key, before_id) multi.hdel(self._keys.mailboxes, before_key) multi.hincrby(self._keys.uid_validity, after_key) if before == 'INBOX': inbox_id = ObjectId.random_mailbox_id() multi.hset(self._keys.mailboxes, b'INBOX', inbox_id.value) multi.hincrby(self._keys.uid_validity, b'INBOX') try: await multi.execute() except MultiExecError: if await check_errors(multi): raise else: break
async def add_mailbox(self, name: str) -> ObjectId: try: self._layout.add_folder(name, self.delimiter) except FileExistsError: raise MailboxConflict(name) path = self._layout.get_path(name, self.delimiter) async with UidList.with_open(path) as uidl: return ObjectId(uidl.global_uid)
async def reset(self) -> MailboxData: keys = await self._get_keys() async with UidList.with_write(self._path) as uidl: for rec in uidl.records: keys.pop(rec.key, None) if not keys: raise NoChanges() for key, info in keys.items(): filename = key + ':' + info fields = {'E': str(ObjectId.random_email_id()), 'T': str(ObjectId.random_thread_id())} new_rec = Record(uidl.next_uid, fields, filename) uidl.next_uid += 1 uidl.set(new_rec) self._uid_validity = uidl.uid_validity self._next_uid = uidl.next_uid return self
async def _get_updated(self, last_mod_seq: int) \ -> Tuple[int, Sequence[Message], Sequence[int]]: redis = self._redis keys = self._keys ns_keys = self._ns_keys while True: pipe = watch_pipe(redis, keys.max_mod, keys.abort) pipe.zrangebyscore(keys.mod_seq, last_mod_seq) pipe.get(keys.abort) _, _, uids, abort = await pipe.execute() MailboxAbort.assertFalse(abort) multi = redis.multi_exec() multi.get(keys.max_mod) multi.zrangebyscore(keys.expunged, last_mod_seq) for uid in uids: msg_keys = MessageKeys(keys, uid) multi.echo(uid) multi.smembers(msg_keys.flags) multi.hmget(msg_keys.immutable, b'time', b'emailid', b'threadid') try: results = await multi.execute() except MultiExecError: if await check_errors(multi): raise else: break mod_seq = int(results[0] or 0) expunged = [int(uid) for uid in results[1]] updated: List[Message] = [] for i in range(2, len(results), 3): msg_uid = int(results[i]) msg_flags = {Flag(flag) for flag in results[i + 1]} time_b, email_id, thread_id = results[i + 2] msg_time = datetime.fromisoformat(time_b.decode('ascii')) msg = Message(msg_uid, msg_time, msg_flags, email_id=ObjectId(email_id), thread_id=ObjectId(thread_id), redis=redis, ns_keys=ns_keys) updated.append(msg) return mod_seq, updated, expunged
def add(self, content: MessageContent) -> ObjectId: msg_hash = HashStream(hashlib.sha1()).digest(content) existing = self._email_ids.get(msg_hash) if existing is not None: return existing email_id = ObjectId.random_email_id() self._email_ids[msg_hash] = email_id self._hashes[email_id] = msg_hash self._content[email_id] = content finalize(content, self._remove, msg_hash, email_id) return email_id
def __init__(self, redis: Redis, mailbox_id: bytes, uid_validity: int, keys: MailboxKeys, ns_keys: NamespaceKeys, cleanup: Cleanup) -> None: super().__init__() self._redis = redis self._mailbox_id = ObjectId(mailbox_id) self._uid_validity = uid_validity self._selected_set = SelectedSet() self._keys = keys self._ns_keys = ns_keys self._cleanup = cleanup
def add(self, content: MessageContent) -> ObjectId: self._ref[content] = thread_keys = ThreadKey.get_all(content.header) for thread_key in thread_keys: thread_id = self._thread_ids.get(thread_key) if thread_id is not None: break else: thread_id = ObjectId.random_thread_id() for thread_key in thread_keys: self._thread_ids.setdefault(thread_key, thread_id) return thread_id
def __init__(self, content_cache: _ContentCache, thread_cache: _ThreadCache) -> None: self._mailbox_id = ObjectId.random_mailbox_id() self._content_cache = content_cache self._thread_cache = thread_cache self._readonly = False self._messages_lock = subsystem.get().new_rwlock() self._selected_set = SelectedSet() self._uid_validity = MailboxSnapshot.new_uid_validity() self._max_uid = 100 self._mod_sequences = _ModSequenceMapping() self._messages: Dict[int, Message] = OrderedDict()
async def save(self, message: bytes) -> SavedMessage: redis = self._redis ns_keys = self._ns_keys content = MessageContent.parse(message) new_email_id = ObjectId.random_email_id() msg_hash = HashStream(hashlib.sha1()).digest(content) thread_keys = ThreadKey.get_all(content.header) thread_key_keys = [ b'\0'.join(thread_key) for thread_key in thread_keys ] await redis.unwatch() multi = redis.multi_exec() multi.hsetnx(ns_keys.email_ids, msg_hash, new_email_id.value) multi.hget(ns_keys.email_ids, msg_hash) if thread_key_keys: multi.hmget(ns_keys.thread_ids, *thread_key_keys) else: multi.hmget(ns_keys.thread_ids, b'') _, email_id, thread_ids = await multi.execute() thread_id_b = next( (thread_id for thread_id in thread_ids if thread_id is not None), None) if thread_id_b is None: thread_id = ObjectId.random_thread_id() else: thread_id = ObjectId(thread_id_b) ct_keys = ContentKeys(ns_keys, email_id) multi = redis.multi_exec() multi.hset(ct_keys.data, b'full', message) multi.hset(ct_keys.data, b'full-json', json.dumps(content.json)) multi.hset(ct_keys.data, b'header', bytes(content.header)) multi.hset(ct_keys.data, b'header-json', json.dumps(content.header.json)) multi.expire(ct_keys.data, self._cleanup.content_expire) for thread_key_key in thread_key_keys: multi.hsetnx(ns_keys.thread_ids, thread_key_key, thread_id.value) await multi.execute() return SavedMessage(ObjectId(email_id), thread_id, None)
async def get_mailbox(self, name: str, try_create: bool = False) -> MailboxData: if name == 'INBOX': maildir = self._inbox_maildir else: try: maildir = self._layout.get_folder(name, self.delimiter) except FileNotFoundError: raise MailboxNotFound(name, try_create) if name in self._cache: mbx = self._cache[name] else: path = self._layout.get_path(name, self.delimiter) async with UidList.with_open(path) as uidl: mailbox_id = ObjectId(uidl.global_uid) mbx = MailboxData(mailbox_id, maildir, path) self._cache[name] = mbx return await mbx.reset()
async def add_mailbox(self, name: str) -> ObjectId: redis = self._redis name_key = modutf7_encode(name) while True: mbx_id = ObjectId.random_mailbox_id() pipe = watch_pipe(redis, self._keys.mailboxes) pipe.incr(self._keys.max_order) pipe.hexists(self._keys.mailboxes, name_key) _, _, order, exists = await pipe.execute() if exists: raise MailboxConflict(name) multi = redis.multi_exec() multi.hset(self._keys.mailboxes, name_key, mbx_id.value) multi.zadd(self._keys.order, order, mbx_id.value) multi.hincrby(self._keys.uid_validity, name_key) try: _, _, uidval = await multi.execute() except MultiExecError: if await check_errors(multi): raise else: break return mbx_id
def test_bytes(self): ret = ObjectId(b'objectid') self.assertEqual(b'objectid', bytes(ret))
def test_maybe(self): self.assertEqual(ObjectId(None), ObjectId.maybe(None)) self.assertEqual(ObjectId(None), ObjectId.maybe(b'')) self.assertEqual(ObjectId(None), ObjectId.maybe('')) self.assertEqual(ObjectId(b'test'), ObjectId.maybe(b'test')) self.assertEqual(ObjectId(b'test'), ObjectId.maybe('te\u2026st'))
async def save(self, message: bytes) -> SavedMessage: email_id = ObjectId.random_email_id() thread_id = ObjectId.random_thread_id() return SavedMessage(email_id, thread_id, message)
def _get_object_id(cls, rec: Record, field: str) -> Optional[ObjectId]: return ObjectId.maybe(rec.fields.get(field))
def test_parens(self): ret = ObjectId(b'objectid') self.assertEqual(b'(objectid)', ret.parens)
def test_parse_failure(self): with self.assertRaises(NotParseable): ObjectId.parse(b'?', Params())
def test_parse(self): ret, buf = ObjectId.parse(b' one_2-three four', Params()) self.assertIsInstance(ret, ObjectId) self.assertEqual(b'one_2-three', ret.value) self.assertEqual(b' four', buf)
def new_selected(cls, guid: bytes = b'test') -> SelectedMailbox: return SelectedMailbox(ObjectId(guid), False, PermanentFlags([Seen, Flagged]), SessionFlags([_Keyword]))