def _folders_for_labels(g_labels, account, db_session): """Given a set of Gmail label strings, return the set of associated Folder objects. Creates new (un-added, uncommitted) Folder instances if needed.""" # Elements of g_labels may not have unicode type (in particular, if you # have a numeric label, e.g., '42'), so we need to coerce to unicode. labels = {unicode(l).lstrip('\\').lower() for l in g_labels} # The problem here is that Gmail's attempt to squash labels and # IMAP folders into the same abstraction doesn't work perfectly. In # particular, there is a '[Gmail]/Sent' folder, but *also* a 'Sent' # label, and so on. We handle this by only maintaining one folder # object that encapsulates both of these. If a Gmail user does not # have these folders enabled via IMAP, we create Folder rows # with no 'name' attribute and fill in the 'name' if the account # is later reconfigured. special_folders = { 'inbox': account.inbox_folder, 'sent': account.sent_folder, 'draft': account.drafts_folder, 'starred': account.starred_folder, 'important': account.important_folder, 'trash': account.trash_folder, } folders = set() for label in labels: if label in special_folders: folder = special_folders[label] if folder is None: folder = Folder.find_or_create(db_session, account, None, label) folders.add(folder) else: folders.add(Folder.find_or_create(db_session, account, label)) return folders
def handle_raw_folder_change(self, db_session, account, raw_folder): folder = db_session.query(Folder).filter( Folder.account_id == account.id, Folder.canonical_name == raw_folder.role).first() if folder: if folder.name != raw_folder.display_name: log.info('Folder name changed on remote', account_id=self.account_id, role=raw_folder.role, new_name=raw_folder.display_name, name=folder.name) folder.name = raw_folder.display_name if folder.category: if folder.category.display_name != \ raw_folder.display_name: folder.category.display_name = raw_folder.display_name # noqa else: log.info('Creating category for folder', account_id=self.account_id, folder_name=folder.name) folder.category = Category.find_or_create( db_session, namespace_id=account.namespace.id, name=raw_folder.role, display_name=raw_folder.display_name, type_='folder') else: Folder.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role)
def save_folder_names(self, db_session, raw_folders): """ Save the folders present on the remote backend for an account. * Create Folder objects. * Delete Folders that no longer exist on the remote. Notes ----- Generic IMAP uses folders (not labels). Canonical folders ('inbox') and other folders are created as Folder objects only accordingly. We don't canonicalize folder names to lowercase when saving because different backends may be case-sensitive or otherwise - code that references saved folder names should canonicalize if needed when doing comparisons. """ account = db_session.query(Account).get(self.account_id) remote_folder_names = { f.display_name.rstrip()[:MAX_FOLDER_NAME_LENGTH] for f in raw_folders } assert 'inbox' in {f.role for f in raw_folders},\ 'Account {} has no detected inbox folder'.\ format(account.email_address) local_folders = { f.name: f for f in db_session.query(Folder).filter( Folder.account_id == self.account_id) } # Delete folders no longer present on the remote. # Note that the folder with canonical_name='inbox' cannot be deleted; # remote_folder_names will always contain an entry corresponding to it. discard = set(local_folders) - remote_folder_names for name in discard: log.info('Folder deleted from remote', account_id=self.account_id, name=name) cat = db_session.query(Category).get( local_folders[name].category_id) if cat is not None: db_session.delete(cat) del local_folders[name] # Create new folders for raw_folder in raw_folders: Folder.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) # Set the should_run bit for existing folders to True (it's True by # default for new ones.) for f in local_folders.values(): if f.imapsyncstatus: f.imapsyncstatus.sync_should_run = True db_session.commit()
def test_deleting_from_a_message_with_multiple_uids(db, default_account, message, thread): """Check that deleting a imapuid from a message with multiple uids doesn't mark the message for deletion.""" inbox_folder = Folder.find_or_create(db.session, default_account, "inbox", "inbox") sent_folder = Folder.find_or_create(db.session, default_account, "sent", "sent") add_fake_imapuid(db.session, default_account.id, message, sent_folder, 1337) add_fake_imapuid(db.session, default_account.id, message, inbox_folder, 2222) assert len(message.imapuids) == 2 remove_deleted_uids(default_account.id, inbox_folder.id, [2222]) db.session.expire_all() assert ( message.deleted_at is None ), "The associated message should not have been marked for deletion." assert len( message.imapuids) == 1, "The message should have only one imapuid."
def _folders_for_labels(g_labels, account, db_session): """Given a set of Gmail label strings, return the set of associated Folder objects. Creates new (un-added, uncommitted) Folder instances if needed.""" # Elements of g_labels may not have unicode type (in particular, if you # have a numeric label, e.g., '42'), so we need to coerce to unicode. labels = {unicode(l).lstrip('\\').lower() for l in g_labels} # The problem here is that Gmail's attempt to squash labels and # IMAP folders into the same abstraction doesn't work perfectly. In # particular, there is a '[Gmail]/Sent' folder, but *also* a 'Sent' # label, and so on. We handle this by only maintaining one folder # object that encapsulates both of these. If a Gmail user does not # have these folders enabled via IMAP, we create Folder rows # with no 'name' attribute and fill in the 'name' if the account # is later reconfigured. special_folders = { 'inbox': account.inbox_folder, 'sent': account.sent_folder, 'draft': account.drafts_folder, 'starred': account.starred_folder, 'important': account.important_folder, } folders = set() for label in labels: if label in special_folders: folder = special_folders[label] if folder is None: folder = Folder.find_or_create(db_session, account, None, label) folders.add(folder) else: folders.add( Folder.find_or_create(db_session, account, label)) return folders
def test_imap_search_unicode(db, imap_api_client, generic_account, patch_crispin_client, sorted_imap_threads): Folder.find_or_create(db.session, generic_account, '存档', '存档') search_client = get_search_client(generic_account) assert isinstance(search_client, IMAPSearchClient) threads = imap_api_client.get_data('/threads/search?q=存档') for sorted_thread, result_thread in zip(sorted_imap_threads, threads): assert sorted_thread.public_id == result_thread['id']
def save_folder_names(self, db_session, raw_folders): """ Save the folders present on the remote backend for an account. * Create Folder objects. * Delete Folders that no longer exist on the remote. Notes ----- Generic IMAP uses folders (not labels). Canonical folders ('inbox') and other folders are created as Folder objects only accordingly. We don't canonicalize folder names to lowercase when saving because different backends may be case-sensitive or otherwise - code that references saved folder names should canonicalize if needed when doing comparisons. """ account = db_session.query(Account).get(self.account_id) remote_folder_names = {f.display_name.rstrip()[:MAX_FOLDER_NAME_LENGTH] for f in raw_folders} assert 'inbox' in {f.role for f in raw_folders},\ 'Account {} has no detected inbox folder'.\ format(account.email_address) local_folders = {f.name: f for f in db_session.query(Folder).filter( Folder.account_id == self.account_id)} # Delete folders no longer present on the remote. # Note that the folder with canonical_name='inbox' cannot be deleted; # remote_folder_names will always contain an entry corresponding to it. discard = set(local_folders) - remote_folder_names for name in discard: log.info('Folder deleted from remote', account_id=self.account_id, name=name) if local_folders[name].category_id is not None: cat = db_session.query(Category).get( local_folders[name].category_id) if cat is not None: db_session.delete(cat) del local_folders[name] # Create new folders for raw_folder in raw_folders: Folder.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) # Set the should_run bit for existing folders to True (it's True by # default for new ones.) for f in local_folders.values(): if f.imapsyncstatus: f.imapsyncstatus.sync_should_run = True db_session.commit()
def test_imap_search_unicode(db, imap_api_client, generic_account, patch_crispin_client, patch_handler_from_provider, sorted_imap_threads): Folder.find_or_create(db.session, generic_account, '存档', '存档') search_client = get_search_client(generic_account) assert search_client.__class__.__name__ == 'IMAPSearchClient' threads = imap_api_client.get_data('/threads/search?q=存档') for sorted_thread, result_thread in zip(sorted_imap_threads, threads): assert sorted_thread.public_id == result_thread['id']
def test_gmail_search_unicode(db, api_client, test_gmail_thread, patch_token_manager, patch_gmail_search_response, default_account, sorted_gmail_threads): Folder.find_or_create(db.session, default_account, '存档', '存档') search_client = get_search_client(default_account) assert isinstance(search_client, GmailSearchClient) threads = api_client.get_data('/threads/search?q=存档') for sorted_thread, result_thread in zip(sorted_gmail_threads, threads): assert sorted_thread.public_id == result_thread['id']
def test_gmail_search_unicode(db, api_client, test_gmail_thread, default_account, patch_crispin_client, patch_handler_from_provider, sorted_gmail_threads): Folder.find_or_create(db.session, default_account, '存档', '存档') search_client = get_search_client(default_account) assert search_client.__class__.__name__ == 'GmailSearchClient' threads = api_client.get_data('/threads/search?q=存档') for sorted_thread, result_thread in zip(sorted_gmail_threads, threads): assert sorted_thread.public_id == result_thread['id']
def save_folder_names(log, account, folder_names, db_session): """ Create Folder objects & map special folder names on Account objects. Folders that belong to an account and no longer exist in `folder_names` ARE DELETED. """ # NOTE: We don't do anything like canonicalizing to lowercase because # different backends may be case-sensitive or not. Code that references # saved folder names should canonicalize if needed when doing comparisons. assert 'inbox' in folder_names, 'Account {} has no detected inbox folder'\ .format(account.email_address) folders = {f.name.lower(): f for f in db_session.query(Folder).filter_by(account=account)} for canonical_name in ['inbox', 'drafts', 'sent', 'spam', 'trash', 'starred', 'important', 'archive', 'all']: if canonical_name in folder_names: backend_folder_name = folder_names[canonical_name].lower() if backend_folder_name not in folders: folder = Folder.create(account, folder_names[canonical_name], db_session, canonical_name) attr_name = '{}_folder'.format(canonical_name) setattr(account, attr_name, verify_folder_name( account.id, getattr(account, attr_name), folder)) else: del folders[backend_folder_name] # Gmail labels, user-created IMAP/EAS folders, etc. if 'extra' in folder_names: for name in folder_names['extra']: name = name[:MAX_FOLDER_NAME_LENGTH] if name.lower() not in folders: folder = Folder.create(account, name, db_session) db_session.add(folder) if name.lower() in folders: del folders[name.lower()] # This may cascade to FolderItems and ImapUid (ONLY), which is what we # want--doing the update here short-circuits us syncing that change later. log.info("Folders were deleted from the remote: {}".format(folders.keys())) for folder in folders.values(): db_session.delete(folder) # TODO(emfree) delete associated tag # Create associated tags for any new folders. for folder in account.folders: folder.get_associated_tag(db_session) db_session.commit()
def test_generic_grouping(db, default_account): thread = add_fake_thread(db.session, default_account.namespace.id) message = add_fake_message( db.session, default_account.namespace.id, thread, subject="Golden Gate Park next Sat", ) folder = Folder(account=default_account, name="Inbox", canonical_name="inbox") ImapUid(message=message, account_id=default_account.id, msg_uid=2222, folder=folder) thread = add_fake_thread(db.session, default_account.namespace.id) account = add_generic_imap_account(db.session) message = add_fake_message(db.session, account.namespace.id, thread, subject="Golden Gate Park next Sat") thread = fetch_corresponding_thread(db.session, default_account.namespace.id, message) assert thread is None, "fetch_similar_threads should " "heed namespace boundaries"
def sync_engine_stub(db, yahoo_account): db.session.add(Folder(account=yahoo_account, name='Inbox')) db.session.commit() engine = FolderSyncEngine(yahoo_account.id, yahoo_account.namespace.id, "Inbox", TEST_YAHOO_EMAIL, "yahoo", None) return engine
def save_sent_email(account_id, message_id, db_session): """ Create an email on the remote backend. Only used to work around providers who don't save sent messages themselves (I'm looking at you, iCloud). """ account = db_session.query(Account).get(account_id) message = db_session.query(Message).get(message_id) if message is None: log.info('tried to create nonexistent message', message_id=message_id, account_id=account_id) return create_backend_sent_folder = False if account.sent_folder is None: # account has no detected drafts folder - create one. sent_folder = Folder.find_or_create(db_session, account, 'Sent', 'sent') account.sent_folder = sent_folder create_backend_sent_folder = True mimemsg = _create_email(account, message) remote_save_sent = module_registry[account.provider].remote_save_sent remote_save_sent(account, account.sent_folder.name, mimemsg, message.created_at, create_backend_sent_folder)
def save_draft(account_id, message_id, db_session, args): """ Sync a new/updated draft back to the remote backend. """ account = db_session.query(Account).get(account_id) message = db_session.query(Message).get(message_id) version = args.get('version') if message is None: log.info('tried to save nonexistent message as draft', message_id=message_id, account_id=account_id) return if not message.is_draft: log.warning('tried to save non-draft message as draft', message_id=message_id, account_id=account_id) return if version != message.version: log.warning('tried to save outdated version of draft') return if account.drafts_folder is None: # account has no detected drafts folder - create one. drafts_folder = Folder.find_or_create(db_session, account, 'Drafts', 'drafts') account.drafts_folder = drafts_folder mimemsg = _create_email(account, message) remote_save_draft = module_registry[account.provider].remote_save_draft remote_save_draft(account, account.drafts_folder.name, mimemsg, db_session, message.created_at)
def add_new_imapuid(db_session, log, gmessage, folder_name, acc): """ Add ImapUid object for this GMessage if we don't already have one. Parameters ---------- message : GMessage Message to add ImapUid for. folder_name : str Which folder to add the ImapUid in. acc : GmailAccount Which account to associate the message with. (Not looking this up within this function is a db access optimization.) """ if not db_session.query(ImapUid.msg_uid).join(Folder).filter( Folder.name == folder_name, ImapUid.msg_uid == gmessage.uid).all(): message = db_session.query(Message).filter_by( g_msgid=gmessage.g_metadata.msgid).one() new_imapuid = ImapUid( account=acc, folder=Folder.find_or_create(db_session, acc, folder_name), msg_uid=gmessage.uid, message=message) new_imapuid.update_imap_flags(gmessage.flags, gmessage.labels) db_session.add(new_imapuid) db_session.commit() else: log.debug('skipping imapuid creation', uid=gmessage.uid)
def add_new_imapuids(crispin_client, remote_g_metadata, syncmanager_lock, uids): """ Add ImapUid entries only for (already-downloaded) messages. If a message has already been downloaded via another folder, we only need to add `ImapUid` accounting for the current folder. `Message` objects etc. have already been created. """ flags = crispin_client.flags(uids) with syncmanager_lock: with mailsync_session_scope() as db_session: # Since we prioritize download for messages in certain threads, we # may already have ImapUid entries despite calling this method. local_folder_uids = {uid for uid, in db_session.query(ImapUid.msg_uid).join(Folder) .filter( ImapUid.account_id == crispin_client.account_id, Folder.name == crispin_client.selected_folder_name, ImapUid.msg_uid.in_(uids))} uids = [uid for uid in uids if uid not in local_folder_uids] if uids: acc = db_session.query(GmailAccount).get( crispin_client.account_id) # collate message objects to relate the new imapuids to imapuid_for = dict([(metadata.msgid, uid) for (uid, metadata) in remote_g_metadata.items() if uid in uids]) imapuid_g_msgids = [remote_g_metadata[uid].msgid for uid in uids] message_for = dict([(imapuid_for[m.g_msgid], m) for m in db_session.query(Message).join(ImapThread) .filter( Message.g_msgid.in_(imapuid_g_msgids), ImapThread.namespace_id == acc.namespace.id)]) # Stop Folder.find_or_create()'s query from triggering a flush. with db_session.no_autoflush: new_imapuids = [ImapUid( account=acc, folder=Folder.find_or_create( db_session, acc, crispin_client.selected_folder_name), msg_uid=uid, message=message_for[uid]) for uid in uids if uid in message_for] for item in new_imapuids: # skip uids which have disappeared in the meantime if item.msg_uid in flags: item.update_flags_and_labels( flags[item.msg_uid].flags, flags[item.msg_uid].labels) db_session.add_all(new_imapuids) db_session.commit()
def create_db_objects(account_id, db_session, log, folder_name, raw_messages, msg_create_fn, canonical_name=None): new_uids = [] # TODO: Detect which namespace to add message to. (shared folders) # Look up message thread, acc = db_session.query(Account).get(account_id) folder = Folder.find_or_create(db_session, acc, folder_name, canonical_name) for msg in raw_messages: uid = msg_create_fn(db_session, log, acc, folder, msg) # Must ensure message objects are flushed because they reference # threads, which may be new, and later messages may need to belong to # the same thread. If we don't flush here and disable autoflush within # the message creation to avoid flushing incomplete messages, we can't # query for the (uncommitted) new thread id. # # We should probably refactor this later to use provider-specific # Message constructors to avoid creating incomplete objects in the # first place. db_session.add(uid) db_session.flush() if uid is not None: new_uids.append(uid) # imapuid, message, thread, labels return new_uids
def save_draft(account_id, message_id, db_session): """ Sync a new/updated draft back to the remote backend. """ account = db_session.query(Account).get(account_id) message = db_session.query(Message).get(message_id) if message is None: log.info('tried to save nonexistent message as draft', message_id=message_id, account_id=account_id) return if not message.is_draft: log.warning('tried to save non-draft message as draft', message_id=message_id, account_id=account_id) return recipients = Recipients(message.to_addr, message.cc_addr, message.bcc_addr) blocks = [p.block for p in message.attachments] attachments = generate_attachments(blocks) mimemsg = create_email(account.sender_name, account.email_address, message.inbox_uid, recipients, message.subject, message.sanitized_body, attachments) if account.drafts_folder is None: # account has no detected drafts folder - create one. drafts_folder = Folder.find_or_create(db_session, account, 'Drafts', 'drafts') account.drafts_folder = drafts_folder remote_save_draft = module_registry[account.provider].remote_save_draft remote_save_draft(account, account.drafts_folder.name, mimemsg.to_string(), message.created_at)
def folder_sync_engine(db, generic_account): db.session.add(Folder(account=generic_account, name='Inbox')) db.session.commit() engine = FolderSyncEngine(generic_account.id, generic_account.namespace.id, "Inbox", generic_account.email_address, generic_account.provider, None) return engine
def __init__(self, account_id): self.account_id = account_id self.log = get_logger() self.log.bind(account_id=account_id) with session_scope() as db_session: account = db_session.query(ImapAccount).get(self.account_id) self.email_address = account.email_address self.provider_name = account.provider self.sender_name = account.name self.smtp_endpoint = account.smtp_endpoint if account.sent_folder is None: # account has no detected sent folder - create one. sent_folder = Folder.find_or_create(db_session, account, 'sent', 'sent') account.sent_folder = sent_folder self.sent_folder = account.sent_folder.name self.auth_type = provider_info(self.provider_name, self.email_address)['auth'] if self.auth_type == 'oauth2': try: self.auth_token = account.access_token except OAuthError: raise SendMailException('Error logging in.') else: assert self.auth_type == 'password' self.auth_token = account.password
def download_and_commit_uids(self, crispin_client, uids): start = datetime.utcnow() raw_messages = crispin_client.uids(uids) if not raw_messages: return 0 new_uids = set() with self.syncmanager_lock: with session_scope(self.namespace_id) as db_session: account = Account.get(self.account_id, db_session) folder = Folder.get(self.folder_id, db_session) for msg in raw_messages: uid = self.create_message(db_session, account, folder, msg) if uid is not None: db_session.add(uid) db_session.flush() new_uids.add(uid) db_session.commit() log.debug('Committed new UIDs', new_committed_message_count=len(new_uids)) # If we downloaded uids, record message velocity (#uid / latency) if self.state == 'initial' and len(new_uids): self._report_message_velocity(datetime.utcnow() - start, len(new_uids)) if self.is_first_message: self._report_first_message() self.is_first_message = False return len(new_uids)
def create_db_objects(account_id, db_session, log, folder_name, raw_messages, msg_create_fn, canonical_name=None, identifier=None): new_uids = [] # TODO: Detect which namespace to add message to. (shared folders) # Look up message thread, acc = db_session.query(Account).get(account_id) folder = Folder.find_or_create(db_session, acc, folder_name, canonical_name, identifier) for msg in raw_messages: uid = msg_create_fn(db_session, acc, folder, msg) # Must ensure message objects are flushed because they reference # threads, which may be new, and later messages may need to belong to # the same thread. If we don't flush here and disable autoflush within # the message creation to avoid flushing incomplete messages, we can't # query for the (uncommitted) new thread id. # # We should probably refactor this later to use provider-specific # Message constructors to avoid creating incomplete objects in the # first place. db_session.add(uid) db_session.flush() if uid is not None: new_uids.append(uid) # imapuid, message, thread, labels return new_uids
def download_and_commit_uids(self, crispin_client, uids): start = datetime.utcnow() raw_messages = crispin_client.uids(uids) if not raw_messages: return new_uids = set() with self.syncmanager_lock: with session_scope() as db_session: account = Account.get(self.account_id, db_session) folder = Folder.get(self.folder_id, db_session) raw_messages = self.__deduplicate_message_object_creation( db_session, raw_messages, account) if not raw_messages: return 0 for msg in raw_messages: uid = self.create_message(db_session, account, folder, msg) if uid is not None: db_session.add(uid) db_session.commit() new_uids.add(uid) log.info('Committed new UIDs', new_committed_message_count=len(new_uids)) # If we downloaded uids, record message velocity (#uid / latency) if self.state == "initial" and len(new_uids): self._report_message_velocity(datetime.utcnow() - start, len(new_uids)) if self.is_first_message: self._report_first_message() self.is_first_message = False self.saved_uids.update(new_uids)
def save_draft(account_id, message_id, db_session): """ Sync a new/updated draft back to the remote backend. """ account = db_session.query(Account).get(account_id) message = db_session.query(Message).get(message_id) if message is None: log.info('tried to save nonexistent message as draft', message_id=message_id, account_id=account_id) return if not message.is_draft: log.warning('tried to save non-draft message as draft', message_id=message_id, account_id=account_id) return recipients = Recipients(message.to_addr, message.cc_addr, message.bcc_addr) blocks = [p.block for p in message.attachments] attachments = generate_attachments(blocks) mimemsg = create_email(account.name, account.email_address, message.inbox_uid, recipients, message.subject, message.sanitized_body, attachments) if account.drafts_folder is None: # account has no detected drafts folder - create one. drafts_folder = Folder.find_or_create(db_session, account, 'Drafts', 'drafts') account.drafts_folder = drafts_folder remote_save_draft = module_registry[account.provider].remote_save_draft remote_save_draft(account, account.drafts_folder.name, mimemsg.to_string(), message.created_at)
def download_and_commit_uids(self, crispin_client, uids): start = datetime.utcnow() raw_messages = crispin_client.uids(uids) if not raw_messages: return new_uids = set() with self.syncmanager_lock: with session_scope(self.namespace_id) as db_session: account = Account.get(self.account_id, db_session) folder = Folder.get(self.folder_id, db_session) raw_messages = self.__deduplicate_message_object_creation( db_session, raw_messages, account) if not raw_messages: return 0 for msg in raw_messages: uid = self.create_message(db_session, account, folder, msg) if uid is not None: db_session.add(uid) db_session.commit() new_uids.add(uid) log.info('Committed new UIDs', new_committed_message_count=len(new_uids)) # If we downloaded uids, record message velocity (#uid / latency) if self.state == "initial" and len(new_uids): self._report_message_velocity(datetime.utcnow() - start, len(new_uids)) if self.is_first_message: self._report_first_message() self.is_first_message = False self.saved_uids.update(new_uids)
def test_generic_grouping(db, default_account): thread = add_fake_thread(db.session, default_account.namespace.id) message = add_fake_message(db.session, default_account.namespace.id, thread, subject="Golden Gate Park next Sat") folder = Folder(account=default_account, name='Inbox', canonical_name='inbox') ImapUid(message=message, account_id=default_account.id, msg_uid=2222, folder=folder) thread = add_fake_thread(db.session, default_account.namespace.id) new_namespace = Namespace() db.session.add(new_namespace) db.session.commit() message = add_fake_message(db.session, new_namespace.id, thread, subject="Golden Gate Park next Sat") thread = fetch_corresponding_thread(db.session, default_account.namespace.id, message) assert thread is None, ("fetch_similar_threads should " "heed namespace boundaries")
def save_folder_names(self, db_session, raw_folders): """ Save the folders, labels present on the remote backend for an account. * Create Folder/ Label objects. * Delete Folders/ Labels that no longer exist on the remote. Notes ----- Gmail uses IMAP folders and labels. Canonical folders ('all', 'trash', 'spam') are therefore mapped to both Folder and Label objects, everything else is created as a Label only. We don't canonicalize names to lowercase when saving because different backends may be case-sensitive or otherwise - code that references saved names should canonicalize if needed when doing comparisons. """ account = db_session.query(Account).get(self.account_id) remote_label_names = {l.display_name.rstrip()[:MAX_LABEL_NAME_LENGTH] for l in raw_folders} assert "all" in {f.role for f in raw_folders}, "Account {} has no detected All Mail folder".format( account.email_address ) local_labels = {l.name: l for l in db_session.query(Label).filter(Label.account_id == self.account_id).all()} # Delete labels no longer present on the remote. # Note that the label with canonical_name='all' cannot be deleted; # remote_label_names will always contain an entry corresponding to it. discard = set(local_labels) - set(remote_label_names) for name in discard: log.info("Label deleted from remote", account_id=self.account_id, name=name) db_session.delete(local_labels[name]) # Create new labels, folders for raw_folder in raw_folders: Label.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) if raw_folder.role in ("all", "spam", "trash"): folder = Folder.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) if folder.name != raw_folder.display_name: log.info( "Folder name changed on remote", account_id=self.account_id, role=raw_folder.role, new_name=raw_folder.display_name, name=folder.name, ) folder.name = raw_folder.display_name # Ensure sync_should_run is True for the folders we want to sync (for # Gmail, that's just all folders, since we created them above if # they didn't exist.) for folder in account.folders: if folder.imapsyncstatus: folder.imapsyncstatus.sync_should_run = True db_session.commit()
def __deduplicate_message_object_creation(self, db_session, raw_messages, account): """ We deduplicate messages based on g_msgid: if we've previously saved a Message object for this raw message, we don't create a new one. But we do create a new ImapUid, associate it to the message, and update flags and categories accordingly. Note: we could do this prior to downloading the actual message body, but that's really more complicated than it's worth. This operation is not super common unless you're regularly moving lots of messages to trash or spam, and even then the overhead of just downloading the body is generally not that high. """ new_g_msgids = {msg.g_msgid for msg in raw_messages} existing_g_msgids = g_msgids(self.namespace_id, db_session, in_=new_g_msgids) brand_new_messages = [ m for m in raw_messages if m.g_msgid not in existing_g_msgids ] previously_synced_messages = [ m for m in raw_messages if m.g_msgid in existing_g_msgids ] if previously_synced_messages: log.info('saving new uids for existing messages', count=len(previously_synced_messages)) account = Account.get(self.account_id, db_session) folder = Folder.get(self.folder_id, db_session) for raw_message in previously_synced_messages: message_obj = db_session.query(Message).filter( Message.namespace_id == self.namespace_id, Message.g_msgid == raw_message.g_msgid).first() if message_obj is None: log.warning('Message disappeared while saving new uid', g_msgid=raw_message.g_msgid, uid=raw_message.uid) brand_new_messages.append(raw_message) continue already_have_uid = ((raw_message.uid, self.folder_id) in {(u.msg_uid, u.folder_id) for u in message_obj.imapuids}) if already_have_uid: log.warning('Skipping existing UID for message', uid=raw_message.uid, message_id=message_obj.id) continue uid = ImapUid(account=account, folder=folder, msg_uid=raw_message.uid, message=message_obj) uid.update_flags(raw_message.flags) uid.update_labels(raw_message.g_labels) common.update_message_metadata(db_session, account, message_obj, uid.is_draft) db_session.commit() return brand_new_messages
def __deduplicate_message_object_creation(self, db_session, raw_messages, account): """ We deduplicate messages based on g_msgid: if we've previously saved a Message object for this raw message, we don't create a new one. But we do create a new ImapUid, associate it to the message, and update flags and categories accordingly. Note: we could do this prior to downloading the actual message body, but that's really more complicated than it's worth. This operation is not super common unless you're regularly moving lots of messages to trash or spam, and even then the overhead of just downloading the body is generally not that high. """ new_g_msgids = {msg.g_msgid for msg in raw_messages} existing_g_msgids = g_msgids(self.namespace_id, db_session, in_=new_g_msgids) brand_new_messages = [m for m in raw_messages if m.g_msgid not in existing_g_msgids] previously_synced_messages = [m for m in raw_messages if m.g_msgid in existing_g_msgids] if previously_synced_messages: log.info('saving new uids for existing messages', count=len(previously_synced_messages)) account = Account.get(self.account_id, db_session) folder = Folder.get(self.folder_id, db_session) for raw_message in previously_synced_messages: message_obj = db_session.query(Message).filter( Message.namespace_id == self.namespace_id, Message.g_msgid == raw_message.g_msgid).first() if message_obj is None: log.warning( 'Message disappeared while saving new uid', g_msgid=raw_message.g_msgid, uid=raw_message.uid) brand_new_messages.append(raw_message) continue already_have_uid = ( (raw_message.uid, self.folder_id) in {(u.msg_uid, u.folder_id) for u in message_obj.imapuids} ) if already_have_uid: log.warning('Skipping existing UID for message', uid=raw_message.uid, message_id=message_obj.id) continue uid = ImapUid(account=account, folder=folder, msg_uid=raw_message.uid, message=message_obj) uid.update_flags(raw_message.flags) uid.update_labels(raw_message.g_labels) common.update_message_metadata( db_session, account, message_obj, uid.is_draft) db_session.commit() return brand_new_messages
def test_soft_delete(db, config): from inbox.models import Folder, Message from inbox.models.backends.imap import ImapUid f = Folder(name='DOES NOT EXIST', account_id=ACCOUNT_ID) db.session.add(f) db.session.flush() m = Message() m.thread_id = 1 m.received_date = datetime.datetime.utcnow() m.size = 0 m.sanitized_body = "" m.snippet = "" u = ImapUid(message=m, account_id=ACCOUNT_ID, folder_id=f.id, msg_uid=9999, extra_flags="") db.session.add_all([m, u]) f.mark_deleted() u.mark_deleted() db.session.commit() m_id = m.id # bypass custom query method to confirm creation db.new_session(ignore_soft_deletes=False) f = db.session.query(Folder).filter_by(name='DOES NOT EXIST').one() assert f, "Can't find Folder object" assert f.deleted_at is not None, "Folder not marked as deleted" db.new_session(ignore_soft_deletes=True) with pytest.raises(NoResultFound): folders = db.session.query(Folder).filter( Folder.name == 'DOES NOT EXIST').one() count = db.session.query(Folder).filter( Folder.name == 'DOES NOT EXIST').count() assert count == 0, "Shouldn't find any deleted folders!" m = db.session.query(Message).filter_by(id=m_id).one() assert not m.imapuids, "imapuid was deleted!"
def test_soft_delete(db, config): from inbox.models import Folder, Message from inbox.models.backends.imap import ImapUid f = Folder(name='DOES NOT EXIST', account_id=ACCOUNT_ID) db.session.add(f) db.session.flush() m = Message() m.namespace_id = NAMESPACE_ID m.thread_id = 1 m.received_date = datetime.datetime.utcnow() m.size = 0 m.sanitized_body = "" m.snippet = "" u = ImapUid(message=m, account_id=ACCOUNT_ID, folder_id=f.id, msg_uid=9999, extra_flags="") db.session.add_all([m, u]) f.mark_deleted() u.mark_deleted() db.session.commit() m_id = m.id # bypass custom query method to confirm creation db.new_session(ignore_soft_deletes=False) f = db.session.query(Folder).filter_by(name='DOES NOT EXIST').one() assert f, "Can't find Folder object" assert f.deleted_at is not None, "Folder not marked as deleted" db.new_session(ignore_soft_deletes=True) with pytest.raises(NoResultFound): db.session.query(Folder).filter(Folder.name == 'DOES NOT EXIST').one() count = db.session.query(Folder).filter( Folder.name == 'DOES NOT EXIST').count() assert count == 0, "Shouldn't find any deleted folders!" m = db.session.query(Message).filter_by(id=m_id).one() assert not m.imapuids, "imapuid was deleted!"
def create_db_objects(account_id, db_session, log, folder_name, raw_messages, msg_create_fn): new_uids = [] # TODO: Detect which namespace to add message to. (shared folders) # Look up message thread, acc = db_session.query(Account).get(account_id) folder = Folder.find_or_create(db_session, acc, folder_name) for msg in raw_messages: uid = msg_create_fn(db_session, log, acc, folder, msg) if uid is not None: new_uids.append(uid) # imapuid, message, thread, labels return new_uids
def save_folder_names(self, db_session, raw_folders): """ Save the folders, labels present on the remote backend for an account. * Create Folder/ Label objects. * Delete Folders/ Labels that no longer exist on the remote. Notes ----- Gmail uses IMAP folders and labels. Canonical folders ('all', 'trash', 'spam') are therefore mapped to both Folder and Label objects, everything else is created as a Label only. We don't canonicalize names to lowercase when saving because different backends may be case-sensitive or otherwise - code that references saved names should canonicalize if needed when doing comparisons. """ account = db_session.query(Account).get(self.account_id) # Create new labels, folders for raw_folder in raw_folders: if raw_folder.role == 'starred': # The starred state of messages is tracked separately # (we set Message.is_starred from the '\\Flagged' flag) continue Label.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) if raw_folder.role in ('all', 'spam', 'trash'): folder = Folder.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) if folder.name != raw_folder.display_name: log.info('Folder name changed on remote', account_id=self.account_id, role=raw_folder.role, new_name=raw_folder.display_name, name=folder.name) folder.name = raw_folder.display_name # Ensure sync_should_run is True for the folders we want to sync (for # Gmail, that's just all folders, since we created them above if # they didn't exist.) for folder in account.folders: if folder.imapsyncstatus: folder.imapsyncstatus.sync_should_run = True db_session.commit()
def test_deleting_from_a_message_with_multiple_uids(db, default_account, message, thread): """Check that deleting a imapuid from a message with multiple uids doesn't mark the message for deletion.""" inbox_folder = Folder.find_or_create(db.session, default_account, 'inbox', 'inbox') sent_folder = Folder.find_or_create(db.session, default_account, 'sent', 'sent') add_fake_imapuid(db.session, default_account.id, message, sent_folder, 1337) add_fake_imapuid(db.session, default_account.id, message, inbox_folder, 2222) assert len(message.imapuids) == 2 remove_deleted_uids(default_account.id, inbox_folder.id, [2222]) db.session.expire_all() assert message.deleted_at is None, \ "The associated message should not have been marked for deletion." assert len(message.imapuids) == 1, \ "The message should have only one imapuid."
def add_new_imapuids(crispin_client, log, db_session, remote_g_metadata, syncmanager_lock, uids): """ Add ImapUid entries only for (already-downloaded) messages. If a message has already been downloaded via another folder, we only need to add `ImapUid` accounting for the current folder. `Message` objects etc. have already been created. """ flags = crispin_client.flags(uids) with syncmanager_lock: log.debug('add_new_imapuids acquired syncmanager_lock') # Since we prioritize download for messages in certain threads, we may # already have ImapUid entries despite calling this method. local_folder_uids = {uid for uid, in db_session.query(ImapUid.msg_uid).join(Folder) .filter( Folder.name == crispin_client.selected_folder_name, ImapUid.msg_uid.in_(uids))} uids = [uid for uid in uids if uid not in local_folder_uids] if uids: # collate message objects to relate the new imapuids imapuid_uid_for = dict([(metadata.msgid, uid) for (uid, metadata) in remote_g_metadata.items() if uid in uids]) imapuid_g_msgids = [remote_g_metadata[uid].msgid for uid in uids] message_for = dict([(imapuid_uid_for[mm.g_msgid], mm) for mm in db_session.query(Message).filter( Message.g_msgid.in_(imapuid_g_msgids))]) acc = db_session.query(GmailAccount).get(crispin_client.account_id) # Folder.find_or_create()'s query will otherwise trigger a flush. with db_session.no_autoflush: new_imapuids = [ImapUid( account=acc, folder=Folder.find_or_create( db_session, acc, crispin_client.selected_folder_name), msg_uid=uid, message=message_for[uid]) for uid in uids] for item in new_imapuids: item.update_imap_flags(flags[item.msg_uid].flags, flags[item.msg_uid].labels) db_session.add_all(new_imapuids) db_session.commit()
def __init__(self, account_id, folder_name, folder_id, email_address, provider_name, syncmanager_lock): bind_context(self, 'foldersyncengine', account_id, folder_id) self.account_id = account_id self.folder_name = folder_name self.folder_id = folder_id if self.folder_name.lower() == 'inbox': self.poll_frequency = INBOX_POLL_FREQUENCY else: self.poll_frequency = DEFAULT_POLL_FREQUENCY self.syncmanager_lock = syncmanager_lock self.state = None self.provider_name = provider_name self.last_fast_refresh = None self.conn_pool = connection_pool(self.account_id) # Metric flags for sync performance self.is_initial_sync = False self.is_first_sync = False self.is_first_message = False with session_scope() as db_session: account = Account.get(self.account_id, db_session) self.namespace_id = account.namespace.id assert self.namespace_id is not None, "namespace_id is None" folder = Folder.get(self.folder_id, db_session) if folder: self.is_initial_sync = folder.initial_sync_end is None self.is_first_sync = folder.initial_sync_start is None self.is_first_message = self.is_first_sync self.state_handlers = { 'initial': self.initial_sync, 'initial uidinvalid': self.resync_uids, 'poll': self.poll, 'poll uidinvalid': self.resync_uids, } Greenlet.__init__(self) self.heartbeat_status = HeartbeatStatusProxy(self.account_id, self.folder_id, self.folder_name, email_address, self.provider_name)
def add_new_imapuid(db_session, gmessage, folder_name, acc): """ Add ImapUid object for this GMessage if we don't already have one. Parameters ---------- message : GMessage Message to add ImapUid for. folder_name : str Which folder to add the ImapUid in. acc : GmailAccount Which account to associate the message with. (Not looking this up within this function is a db access optimization.) """ if not db_session.query(ImapUid.msg_uid).join(Folder).filter( Folder.name == folder_name, ImapUid.account_id == acc.id, ImapUid.msg_uid == gmessage.uid).all(): try: message = db_session.query(Message).join(ImapThread).filter( ImapThread.g_thrid == gmessage.g_metadata.thrid, Message.g_thrid == gmessage.g_metadata.thrid, Message.g_msgid == gmessage.g_metadata.msgid, ImapThread.namespace_id == acc.namespace.id).one() except NoResultFound: # this may occur when a thread is expanded and those messages are # downloaded and committed, then new messages on that thread arrive # and get added to the download queue before this code is run log.debug('no Message object found, skipping imapuid creation', uid=gmessage.uid, g_msgid=gmessage.g_metadata.msgid) return new_imapuid = ImapUid(account=acc, folder=Folder.find_or_create( db_session, acc, folder_name), msg_uid=gmessage.uid, message=message) new_imapuid.update_imap_flags(gmessage.flags, gmessage.labels) new_imapuid.g_labels = [label for label in gmessage.labels] db_session.add(new_imapuid) db_session.commit() else: log.debug('skipping imapuid creation', uid=gmessage.uid, g_msgid=gmessage.g_metadata.msgid)
def add_new_imapuid(db_session, gmessage, folder_name, acc): """ Add ImapUid object for this GMessage if we don't already have one. Parameters ---------- message : GMessage Message to add ImapUid for. folder_name : str Which folder to add the ImapUid in. acc : GmailAccount Which account to associate the message with. (Not looking this up within this function is a db access optimization.) """ if not db_session.query(ImapUid.msg_uid).join(Folder).filter( Folder.name == folder_name, ImapUid.account_id == acc.id, ImapUid.msg_uid == gmessage.uid).all(): try: message = db_session.query(Message).join(ImapThread).filter( ImapThread.g_thrid == gmessage.g_metadata.thrid, Message.g_thrid == gmessage.g_metadata.thrid, Message.g_msgid == gmessage.g_metadata.msgid, ImapThread.namespace_id == acc.namespace.id).one() except NoResultFound: # this may occur when a thread is expanded and those messages are # downloaded and committed, then new messages on that thread arrive # and get added to the download queue before this code is run log.debug('no Message object found, skipping imapuid creation', uid=gmessage.uid, g_msgid=gmessage.g_metadata.msgid) return new_imapuid = ImapUid( account=acc, folder=Folder.find_or_create(db_session, acc, folder_name), msg_uid=gmessage.uid, message=message) new_imapuid.update_imap_flags(gmessage.flags, gmessage.labels) new_imapuid.g_labels = [label for label in gmessage.labels] db_session.add(new_imapuid) db_session.commit() else: log.debug('skipping imapuid creation', uid=gmessage.uid, g_msgid=gmessage.g_metadata.msgid)
def test_threading_limit(db, folder_sync_engine, monkeypatch): """Test that custom threading doesn't produce arbitrarily long threads, which eventually break things.""" from inbox.models import Message, Thread, Account # Shorten bound to make test faster MAX_THREAD_LENGTH = 10 monkeypatch.setattr( 'inbox.mailsync.backends.imap.generic.MAX_THREAD_LENGTH', MAX_THREAD_LENGTH) namespace_id = folder_sync_engine.namespace_id account = db.session.query(Account).get(folder_sync_engine.account_id) account.namespace.create_canonical_tags() account.inbox_folder = Folder(account=account, name='Inbox', canonical_name='inbox') folder = account.inbox_folder msg = MockRawMessage([]) for i in range(3 * MAX_THREAD_LENGTH): m = Message() m.namespace_id = namespace_id m.received_date = datetime.datetime.utcnow() m.references = [] m.size = 0 m.body = '' m.from_addr = [("Karim Hamidou", "*****@*****.**")] m.to_addr = [("Eben Freeman", "*****@*****.**")] m.snippet = '' m.subject = 'unique subject' uid = ImapUid(message=m, account=account, msg_uid=2222 + i, folder=folder) folder_sync_engine.add_message_attrs(db.session, uid, msg) db.session.add(m) db.session.commit() new_threads = db.session.query(Thread). \ filter(Thread.subject == 'unique subject').all() assert len(new_threads) == 3 assert all( len(thread.messages) == MAX_THREAD_LENGTH for thread in new_threads)
def create_folder_with_syncstatus(account, name, canonical_name, db_session): folder = Folder.find_or_create(db_session, account, name, canonical_name) folder.imapsyncstatus = ImapFolderSyncStatus(account=account) db_session.commit() return folder
def save_folder_names(self, db_session, raw_folders): """ Save the folders, labels present on the remote backend for an account. * Create Folder/ Label objects. * Delete Folders/ Labels that no longer exist on the remote. Notes ----- Gmail uses IMAP folders and labels. Canonical folders ('all', 'trash', 'spam') are therefore mapped to both Folder and Label objects, everything else is created as a Label only. We don't canonicalize names to lowercase when saving because different backends may be case-sensitive or otherwise - code that references saved names should canonicalize if needed when doing comparisons. """ account = db_session.query(Account).get(self.account_id) # Create new labels, folders for raw_folder in raw_folders: if raw_folder.role == 'starred': # The starred state of messages is tracked separately # (we set Message.is_starred from the '\\Flagged' flag) continue Label.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) if raw_folder.role in ('all', 'spam', 'trash'): folder = db_session.query(Folder). \ filter(Folder.account_id == account.id, Folder.canonical_name == raw_folder.role). \ first() if folder: if folder.name != raw_folder.display_name: log.info('Folder name changed on remote', account_id=self.account_id, role=raw_folder.role, new_name=raw_folder.display_name, name=folder.name) folder.name = raw_folder.display_name if folder.category: if folder.category.display_name != \ raw_folder.display_name: folder.category.display_name = raw_folder.display_name else: log.info('Creating category for folder', account_id=self.account_id, folder_name=folder.name) folder.category = Category.find_or_create( db_session, namespace_id=account.namespace.id, name=raw_folder.role, display_name=raw_folder.display_name, type_='folder') else: Folder.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) # Ensure sync_should_run is True for the folders we want to sync (for # Gmail, that's just all folders, since we created them above if # they didn't exist.) for folder in account.folders: if folder.imapsyncstatus: folder.imapsyncstatus.sync_should_run = True db_session.commit()
def save_folder_names(log, account_id, folder_names, db_session): """ Create Folder objects & map special folder names on Account objects. Folders that belong to an account and no longer exist in `folder_names` ARE DELETED, unless they are "dangling" (do not have a 'name' set). We don't canonicalizing folder names to lowercase when saving because different backends may be case-sensitive or not. Code that references saved folder names should canonicalize if needed when doing comparisons. """ account = db_session.query(Account).get(account_id) assert "inbox" in folder_names, "Account {} has no detected inbox folder".format(account.email_address) all_folders = db_session.query(Folder).filter_by(account_id=account.id).all() # dangled_folders don't map to upstream account folders (may be used for # keeping track of e.g. special Gmail labels which are exposed as IMAP # flags but not folders) folder_for = {f.name: f for f in all_folders if f.name is not None} dangled_folder_for = {f.canonical_name: f for f in all_folders if f.name is None} canonical_names = {"inbox", "drafts", "sent", "spam", "trash", "starred", "important", "archive", "all"} for canonical_name in canonical_names: if canonical_name in folder_names: backend_folder_name = folder_names[canonical_name] if backend_folder_name not in folder_for: # Reconcile dangled folders which now exist on the remote if canonical_name in dangled_folder_for: folder = dangled_folder_for[canonical_name] folder.name = folder_names[canonical_name] del dangled_folder_for[canonical_name] else: folder = Folder.find_or_create(db_session, account, None, canonical_name) if folder.name != folder_names[canonical_name]: if folder.name is not None: del folder_for[folder.name] folder.name = folder_names[canonical_name] folder.get_associated_tag(db_session) attr_name = "{}_folder".format(canonical_name) id_attr_name = "{}_folder_id".format(canonical_name) if getattr(account, id_attr_name) != folder.id: # NOTE: updating the relationship (i.e., attr_name) also # updates the associated foreign key (i.e., id_attr_name) setattr(account, attr_name, folder) else: del folder_for[backend_folder_name] # Gmail labels, user-created IMAP/EAS folders, etc. if "extra" in folder_names: for name in folder_names["extra"]: name = name[:MAX_FOLDER_NAME_LENGTH] if name not in folder_for: # Folder.create() takes care of adding to the session folder = Folder.create(account, name, db_session) folder.get_associated_tag(db_session) else: del folder_for[name] # This may cascade to FolderItems and ImapUid (ONLY), which is what we # want--doing the update here short-circuits us syncing that change later. if len(folder_for): log.info("folders deleted from remote", folders=folder_for.keys()) for name, folder in folder_for.iteritems(): db_session.delete(folder) # TODO(emfree) delete associated tag db_session.commit()
def save_folder_names(log, account_id, folder_names, db_session): """ Create Folder objects & map special folder names on Account objects. Folders that belong to an account and no longer exist in `folder_names` ARE DELETED, unless they are "dangling" (do not have a 'name' set). We don't canonicalizing folder names to lowercase when saving because different backends may be case-sensitive or not. Code that references saved folder names should canonicalize if needed when doing comparisons. """ account = db_session.query(Account).get(account_id) assert 'inbox' in folder_names, 'Account {} has no detected inbox folder'\ .format(account.email_address) all_folders = db_session.query(Folder).filter_by( account_id=account.id).all() # dangled_folders don't map to upstream account folders (may be used for # keeping track of e.g. special Gmail labels which are exposed as IMAP # flags but not folders) folder_for = {f.name.lower(): f for f in all_folders if f.name is not None} dangled_folder_for = {f.canonical_name: f for f in all_folders if f.name is None} canonical_names = {'inbox', 'drafts', 'sent', 'spam', 'trash', 'starred', 'important', 'archive', 'all'} for canonical_name in canonical_names: if canonical_name in folder_names: backend_folder_name = folder_names[canonical_name].lower() if backend_folder_name not in folder_for: # Reconcile dangled folders which now exist on the remote if canonical_name in dangled_folder_for: folder = dangled_folder_for[canonical_name] folder.name = folder_names[canonical_name] del dangled_folder_for[canonical_name] else: folder = Folder.create(account, folder_names[canonical_name], db_session, canonical_name) folder.get_associated_tag(db_session) attr_name = '{}_folder'.format(canonical_name) setattr(account, attr_name, verify_folder_name( account.id, getattr(account, attr_name), folder)) else: del folder_for[backend_folder_name] # Gmail labels, user-created IMAP/EAS folders, etc. if 'extra' in folder_names: for name in folder_names['extra']: name = name[:MAX_FOLDER_NAME_LENGTH] if name.lower() not in folder_for: # Folder.create() takes care of adding to the session folder = Folder.create(account, name, db_session) folder.get_associated_tag(db_session) if name.lower() in folder_for: del folder_for[name.lower()] # This may cascade to FolderItems and ImapUid (ONLY), which is what we # want--doing the update here short-circuits us syncing that change later. log.info("folders deleted from remote", folders=folder_for.keys()) for name, folder in folder_for.iteritems(): db_session.delete(folder) # TODO(emfree) delete associated tag db_session.commit()
def imap_folder(db, generic_account): f = Folder.find_or_create(db.session, generic_account, u"Boîte de réception", "inbox") db.session.add(f) db.session.commit() return f
def folder_names(self): # Different providers have different names for folders, here # we have a default map for common name mapping, additional # mappings can be provided via the provider configuration file default_folder_map = { 'INBOX': 'inbox', 'DRAFTS': 'drafts', 'DRAFT': 'drafts', 'JUNK': 'spam', 'ARCHIVE': 'archive', 'SENT': 'sent', 'TRASH': 'trash', 'SPAM': 'spam' } # Some providers also provide flags to determine common folders # Here we read these flags and apply the mapping flag_to_folder_map = { '\\Trash': 'trash', '\\Sent': 'sent', '\\Drafts': 'drafts', '\\Junk': 'spam', '\\Inbox': 'inbox', '\\Spam': 'spam' } # Additionally we provide a custom mapping for providers that # don't fit into the defaults. info = provider_info(self.provider_name) folder_map = info.get('folder_map', {}) if self._folder_names is None: folders = self._fetch_folder_list() self._folder_names = dict() for flags, delimiter, name in folders: if u'\\Noselect' in flags: # special folders that can't contain messages pass # TODO: internationalization support elif name in folder_map: self._folder_names[folder_map[name]] = name elif name.upper() in default_folder_map: self._folder_names[default_folder_map[name.upper()]] = name else: matched = False for flag in flags: if flag in flag_to_folder_map: self._folder_names[flag_to_folder_map[flag]] = name matched = True if not matched: self._folder_names.setdefault('extra', list()).append(name) # TODO: support subfolders # Create any needed folders that don't exist on the backend needed_folders = set( ['inbox', 'drafts', 'sent', 'spam', 'trash', 'archive']) needed_folders -= set(self._folder_names.keys()) for folder_id in needed_folders: name = folder_id.capitalize() self.create_folder(name) with session_scope() as db_session: account = db_session.query(Account).get(self.account_id) folder = Folder.find_or_create(db_session, account, name, folder_id) setattr(account, folder_id + '_folder', folder) db_session.commit() self._folder_names[folder_id] = name return self._folder_names
def save_folder_names(self, db_session, raw_folders): """ Save the folders, labels present on the remote backend for an account. * Create Folder/ Label objects. * Delete Folders/ Labels that no longer exist on the remote. Notes ----- Gmail uses IMAP folders and labels. Canonical folders ('all', 'trash', 'spam') are therefore mapped to both Folder and Label objects, everything else is created as a Label only. We don't canonicalize names to lowercase when saving because different backends may be case-sensitive or otherwise - code that references saved names should canonicalize if needed when doing comparisons. """ account = db_session.query(Account).get(self.account_id) old_labels = { label for label in db_session.query(Label).filter( Label.account_id == self.account_id, Label.deleted_at == None) } new_labels = set() # Create new labels, folders for raw_folder in raw_folders: if raw_folder.role == 'starred': # The starred state of messages is tracked separately # (we set Message.is_starred from the '\\Flagged' flag) continue label = Label.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) new_labels.add(label) if label.deleted_at is not None: # This is a label which was previously marked as deleted # but which mysteriously reappeared. Unmark it. log.info('Deleted label recreated on remote', name=raw_folder.display_name) label.deleted_at = None label.category.deleted_at = None if raw_folder.role in ('all', 'spam', 'trash'): folder = db_session.query(Folder).filter( Folder.account_id == account.id, Folder.canonical_name == raw_folder.role).first() if folder: if folder.name != raw_folder.display_name: log.info('Folder name changed on remote', account_id=self.account_id, role=raw_folder.role, new_name=raw_folder.display_name, name=folder.name) folder.name = raw_folder.display_name if folder.category: if folder.category.display_name != \ raw_folder.display_name: folder.category.display_name = raw_folder.display_name # noqa else: log.info('Creating category for folder', account_id=self.account_id, folder_name=folder.name) folder.category = Category.find_or_create( db_session, namespace_id=account.namespace.id, name=raw_folder.role, display_name=raw_folder.display_name, type_='folder') else: Folder.find_or_create(db_session, account, raw_folder.display_name, raw_folder.role) # Ensure sync_should_run is True for the folders we want to sync (for # Gmail, that's just all folders, since we created them above if # they didn't exist.) for folder in account.folders: if folder.imapsyncstatus: folder.imapsyncstatus.sync_should_run = True # Go through the labels which have been "deleted" (i.e: they don't # show up when running LIST) and mark them as such. # We can't delete labels directly because Gmail allows users to hide # folders --- we need to check that there's no messages still # associated with the label. deleted_labels = old_labels - new_labels for deleted_label in deleted_labels: deleted_label.deleted_at = datetime.now() cat = deleted_label.category cat.deleted_at = datetime.now() db_session.commit()
def different_imap_folder(db, generic_account): f = Folder.find_or_create(db.session, generic_account, "Archive", "archive") db.session.add(f) db.session.commit() return f
def folder_names(self): # Different providers have different names for folders, here # we have a default map for common name mapping, additional # mappings can be provided via the provider configuration file default_folder_map = {'INBOX': 'inbox', 'DRAFTS': 'drafts', 'DRAFT': 'drafts', 'JUNK': 'spam', 'ARCHIVE': 'archive', 'SENT': 'sent', 'TRASH': 'trash', 'SPAM': 'spam'} # Some providers also provide flags to determine common folders # Here we read these flags and apply the mapping flag_to_folder_map = {'\\Trash': 'trash', '\\Sent': 'sent', '\\Drafts': 'drafts', '\\Junk': 'spam', '\\Inbox': 'inbox', '\\Spam': 'spam'} # Additionally we provide a custom mapping for providers that # don't fit into the defaults. info = provider_info(self.provider_name, self.email_address) folder_map = info.get('folder_map', {}) if self._folder_names is None: folders = self._fetch_folder_list() self._folder_names = dict() for flags, delimiter, name in folders: if u'\\Noselect' in flags: # special folders that can't contain messages pass # TODO: internationalization support elif name in folder_map: self._folder_names[folder_map[name]] = name elif name.upper() in default_folder_map: self._folder_names[default_folder_map[name.upper()]] = name else: matched = False for flag in flags: if flag in flag_to_folder_map: self._folder_names[flag_to_folder_map[flag]] = name matched = True if not matched: self._folder_names.setdefault( 'extra', list()).append(name) # TODO: support subfolders # Create any needed folders that don't exist on the backend needed_folders = set(['inbox', 'drafts', 'sent', 'spam', 'trash', 'archive']) needed_folders -= set(self._folder_names.keys()) for folder_id in needed_folders: name = folder_id.capitalize() self.create_folder(name) with session_scope() as db_session: account = db_session.query(Account).get(self.account_id) folder = Folder.find_or_create(db_session, account, name, folder_id) setattr(account, folder_id + '_folder', folder) db_session.commit() self._folder_names[folder_id] = name return self._folder_names