def update_thread_labels(thread, folder_name, g_labels, db_session): """ Make sure `thread` has all the right labels. """ existing_labels = {folder.name.lower() for folder in thread.folders if folder.name is not None} | \ {folder.canonical_name for folder in thread.folders if folder.canonical_name is not None} new_labels = { l.lstrip('\\').lower() if isinstance(l, unicode) else unicode(l) for l in g_labels if l is not None } new_labels.add(folder_name.lower()) # Remove labels that have been deleted -- note that the \Inbox, \Sent, # \Important, \Starred, and \Drafts labels are per-message, not per-thread, # but since we always work at the thread level, _we_ apply the label to the # whole thread. # TODO: properly aggregate \Inbox, \Sent, \Important, and \Drafts # per-message so we can detect deletions properly. folders_to_discard = [] for folder in thread.folders: if folder.canonical_name not in ('inbox', 'sent', 'drafts', 'important', 'starred', 'all'): if folder.lowercase_name not in new_labels: folders_to_discard.append(folder) for folder in folders_to_discard: thread.folders.discard(folder) # add new labels for label in new_labels: if label.lower() not in existing_labels: # The problem here is that Gmail's attempt to squash labels and # IMAP folders into the same abstraction doesn't work perfectly. In # particular, there is a '[Gmail]/Sent' folder, but *also* a 'Sent' # label, and so on. We handle this by only maintaining one folder # object that encapsulates both of these. If a Gmail user does not # have these folders enabled via IMAP, we create Folder rows # with no 'name' attribute and fill in the 'name' if the account # is later reconfigured. canonical_labels = { 'sent': thread.namespace.account.sent_folder, 'draft': thread.namespace.account.drafts_folder, 'starred': thread.namespace.account.starred_folder, 'important': thread.namespace.account.important_folder } if label in canonical_labels: folder = canonical_labels[label] if folder: thread.folders.add(folder) else: folder = Folder.find_or_create(db_session, thread.namespace.account, None, label) thread.folders.add(folder) else: folder = Folder.find_or_create(db_session, thread.namespace.account, label) thread.folders.add(folder) return new_labels
def update_thread_labels(thread, folder_name, g_labels, db_session): """ Make sure `thread` has all the right labels. """ existing_labels = {folder.name.lower() for folder in thread.folders if folder.name is not None} | \ {folder.canonical_name for folder in thread.folders if folder.canonical_name is not None} new_labels = {l.lstrip('\\').lower() if isinstance(l, unicode) else unicode(l) for l in g_labels if l is not None} new_labels.add(folder_name.lower()) # Remove labels that have been deleted -- note that the \Inbox, \Sent, # \Important, \Starred, and \Drafts labels are per-message, not per-thread, # but since we always work at the thread level, _we_ apply the label to the # whole thread. # TODO: properly aggregate \Inbox, \Sent, \Important, and \Drafts # per-message so we can detect deletions properly. folders_to_discard = [] for folder in thread.folders: if folder.canonical_name not in ('inbox', 'sent', 'drafts', 'important', 'starred', 'all'): if folder.lowercase_name not in new_labels: folders_to_discard.append(folder) for folder in folders_to_discard: thread.folders.discard(folder) # add new labels for label in new_labels: if label.lower() not in existing_labels: # The problem here is that Gmail's attempt to squash labels and # IMAP folders into the same abstraction doesn't work perfectly. In # particular, there is a '[Gmail]/Sent' folder, but *also* a 'Sent' # label, and so on. We handle this by only maintaining one folder # object that encapsulates both of these. If a Gmail user does not # have these folders enabled via IMAP, we create Folder rows # with no 'name' attribute and fill in the 'name' if the account # is later reconfigured. canonical_labels = { 'sent': thread.namespace.account.sent_folder, 'draft': thread.namespace.account.drafts_folder, 'starred': thread.namespace.account.starred_folder, 'important': thread.namespace.account.important_folder} if label in canonical_labels: folder = canonical_labels[label] if folder: thread.folders.add(folder) else: folder = Folder.find_or_create( db_session, thread.namespace.account, None, label) thread.folders.add(folder) else: folder = Folder.find_or_create(db_session, thread.namespace.account, label) thread.folders.add(folder) return new_labels
def add_fake_folder(db_session, default_account, display_name='All Mail', name='all'): from inbox.models.folder import Folder return Folder.find_or_create(db_session, default_account, display_name, name)
def set_remote_trash(account, thread_id, trash, db_session): thread = db_session.query(Thread).get(thread_id) if account.trash_folder is None: # account has no detected trash folder - create one. trash_folder = Folder.find_or_create(db_session, account, 'Trash', 'trash') account.trash_folder = trash_folder if trash: # apparently it's not possible to index an association # proxy. folders = [folder for folder in thread.folders] assert len(folders) == 1, "A thread belongs to only one folder" # Arbitrarily pick the first folder since there's no support for # threads belonging to multiple folders on non-gmail backends. return remote_move(account, thread_id, folders[0].name, account.trash_folder.name, db_session, create_destination=True) else: return remote_move(account, thread_id, account.trash_folder.name, account.inbox_folder.name, db_session)
def set_remote_spam(account, thread_id, spam, db_session): if account.spam_folder is None: # account has no detected spam folder - create one. spam_folder = Folder.find_or_create(db_session, account, 'Spam', 'spam') account.spam_folder = spam_folder db_session.commit() thread = db_session.query(Thread).get(thread_id) # FIXME @karim: not sure if we should exclude sent or not. folders = [folder.name for folder in thread.folders] if spam: for folder in folders: remote_move(account, thread_id, folder, account.spam_folder.name, db_session, create_destination=True) else: remote_move(account, thread_id, account.spam_folder.name, account.inbox_folder.name, db_session)
def set_remote_archived(account, thread_id, archived, db_session): if account.archive_folder is None: # account has no detected archive folder - create one. archive_folder = Folder.find_or_create(db_session, account, 'Archive', 'archive') account.archive_folder = archive_folder if archived: return remote_move(account, thread_id, account.inbox_folder.name, account.archive_folder.name, db_session, create_destination=True) else: return remote_move(account, thread_id, account.archive_folder.name, account.inbox_folder.name, db_session)
def test_adding_message_to_thread(db): """recompute_thread_labels is not invoked when a new message is added (only when UID metadata changes, or when a UID is deleted). Test that tag changes work when adding messages to a thread.""" account = db.session.query(Account).get(ACCOUNT_ID) account.namespace.create_canonical_tags() thread = db.session.query(Thread).get(THREAD_ID) account.trash_folder = Folder(name='Trash', account=account) fld_item = FolderItem(thread=thread, folder=account.trash_folder) folder_names = [folder.name for folder in thread.folders] m = Message(namespace_id=account.namespace.id, subject='test message', thread_id=thread.id, received_date=datetime.datetime.now(), size=64, sanitized_body="body", snippet="snippet") uid = ImapUid(account=account, message=m, g_labels=['\\Inbox', 'test-label'], msg_uid=22L, folder_id=account.inbox_folder.id) uid.folder = account.inbox_folder uid2 = ImapUid(account=account, message=m, g_labels=['test-2'], msg_uid=24L, folder_id=account.trash_folder.id) uid2.folder = account.trash_folder thread.messages.append(m) add_any_new_thread_labels(thread, uid, db.session) add_any_new_thread_labels(thread, uid2, db.session) folder_names = [folder.name for folder in thread.folders] for folder in folder_names: assert folder in ['Inbox', 'Trash', 'test-label', 'test-2', '[Gmail]/All Mail', '[Gmail]/Important'],\ "all folders should be present" # Now, remove the message m.imapuids.remove(uid2) db.session.delete(uid2) db.session.flush() recompute_thread_labels(thread, db.session) folder_names = [folder.name for folder in thread.folders] assert 'test-2' not in folder_names,\ "test-2 label should have been removed from thread"
def set_remote_archived(account, thread_id, archived, db_session): if account.archive_folder is None: # account has no detected archive folder - create one. archive_folder = Folder.find_or_create(db_session, account, 'Archive', 'archive') account.archive_folder = archive_folder if archived: return remote_move(account, thread_id, account.inbox_folder.name, account.archive_folder.name, db_session, create_destination=True) else: return remote_move(account, thread_id, account.archive_folder.name, account.inbox_folder.name, db_session)
def test_recompute_thread_labels_removes_trash(db): account = db.session.query(Account).get(ACCOUNT_ID) thread = db.session.query(Thread).get(THREAD_ID) account.trash_folder = Folder(name='Trash', account_id=account.id) db.session.flush() # Check that the we remove the trash folder from a thread # if the latest message has the inbox flag. # To do this, we manufacture this situation. g_labels = thread.messages[-1].imapuids[-1].g_labels if '\\Inbox' not in g_labels: g_labels.append('\\Inbox') thread.folders.add(account.trash_folder) recompute_thread_labels(thread, db.session) assert account.trash_folder not in thread.folders,\ "should have removed trash folder from thread"
def test_recompute_thread_labels_removes_trash(db, default_account, thread): default_account.trash_folder = Folder(name='Trash', account_id=default_account.id) message = add_fake_message(db.session, default_account.namespace.id, thread) add_fake_imapuid(db.session, default_account.id, message, default_account.inbox_folder, 22) db.session.commit() # Check that the we remove the trash folder from a thread # if the latest message has the inbox flag. # To do this, we manufacture this situation. g_labels = thread.messages[-1].imapuids[-1].g_labels if '\\Inbox' not in g_labels: g_labels.append('\\Inbox') thread.folders.add(default_account.trash_folder) recompute_thread_labels(thread, db.session) assert default_account.trash_folder not in thread.folders,\ "should have removed trash folder from thread"
def set_remote_trash(account, thread_id, trash, db_session): if account.trash_folder is None: # account has no detected trash folder - create one. trash_folder = Folder.find_or_create(db_session, account, 'Trash', 'trash') account.trash_folder = trash_folder db_session.commit() thread = db_session.query(Thread).get(thread_id) # FIXME @karim: not sure if we should exclude sent or not. folders = [folder.name for folder in thread.folders] if trash: for folder in folders: remote_move(account, thread_id, folder, account.trash_folder.name, db_session, create_destination=True) else: remote_move(account, thread_id, account.trash_folder.name, account.inbox_folder.name, db_session)
def set_remote_trash(account, thread_id, trash, db_session): if account.trash_folder is None: # account has no detected trash folder - create one. trash_folder = Folder.find_or_create(db_session, account, 'Trash', 'trash') account.trash_folder = trash_folder db_session.commit() thread = db_session.query(Thread).get(thread_id) # FIXME @karim: not sure if we should exclude sent or not. folders = [folder.name for folder in thread.folders] if trash: for folder in folders: remote_move(account, thread_id, folder, account.trash_folder.name, db_session, create_destination=True) else: remote_move(account, thread_id, account.trash_folder.name, account.inbox_folder.name, db_session)
def set_remote_archived(account, thread_id, archived, db_session): if account.archive_folder is None: # account has no detected archive folder - create one. archive_folder = Folder.find_or_create(db_session, account, 'Archive', 'archive') account.archive_folder = archive_folder db_session.commit() thread = db_session.query(Thread).get(thread_id) # FIXME @karim: not sure if we should exclude sent or not. folders = [folder.name for folder in thread.folders] if archived: for folder in folders: remote_move(account, thread_id, folder, account.archive_folder.name, db_session, create_destination=True) else: remote_move(account, thread_id, account.archive_folder.name, account.inbox_folder.name, db_session)
def set_remote_archived(account, thread_id, archived, db_session): if account.archive_folder is None: # account has no detected archive folder - create one. archive_folder = Folder.find_or_create(db_session, account, 'Archive', 'archive') account.archive_folder = archive_folder db_session.commit() thread = db_session.query(Thread).get(thread_id) # FIXME @karim: not sure if we should exclude sent or not. folders = [folder.name for folder in thread.folders] if archived: for folder in folders: remote_move(account, thread_id, folder, account.archive_folder.name, db_session, create_destination=True) else: remote_move(account, thread_id, account.archive_folder.name, account.inbox_folder.name, db_session)
def set_remote_spam(account, thread_id, spam, db_session): if account.spam_folder is None: # account has no detected spam folder - create one. spam_folder = Folder.find_or_create(db_session, account, 'Spam', 'spam') account.spam_folder = spam_folder db_session.commit() thread = db_session.query(Thread).get(thread_id) # FIXME @karim: not sure if we should exclude sent or not. folders = [folder.name for folder in thread.folders] if spam: for folder in folders: remote_move(account, thread_id, folder, account.spam_folder.name, db_session, create_destination=True) else: remote_move(account, thread_id, account.spam_folder.name, account.inbox_folder.name, db_session)
def set_remote_trash(account, thread_id, trash, db_session): thread = db_session.query(Thread).get(thread_id) if account.trash_folder is None: # account has no detected trash folder - create one. trash_folder = Folder.find_or_create(db_session, account, 'Trash', 'trash') account.trash_folder = trash_folder if trash: # apparently it's not possible to index an association # proxy. folders = [folder for folder in thread.folders] assert len(folders) == 1, "A thread belongs to only one folder" # Arbitrarily pick the first folder since there's no support for # threads belonging to multiple folders on non-gmail backends. return remote_move(account, thread_id, folders[0].name, account.trash_folder.name, db_session, create_destination=True) else: return remote_move(account, thread_id, account.trash_folder.name, account.inbox_folder.name, db_session)
def update_thread_labels(thread, folder_name, g_labels, db_session): existing_labels = {folder.name.lower() for folder in thread.folders} new_labels = {l.lstrip('\\').lower() for l in g_labels} new_labels.add(folder_name.lower()) # Remove labels that have been deleted -- note that the \Inbox, \Sent, # \Important, and \Drafts labels are per-message, not per-thread, but # since we always work at the thread level, _we_ apply the label to the # whole thread. thread.folders = {folder for folder in thread.folders if folder.name.lower() in new_labels or folder.name.lower() in ('inbox', 'sent', 'drafts', 'important')} # add new labels for label in new_labels: if label.lower() not in existing_labels: # The problem here is that Gmail's attempt to squash labels and # IMAP folders into the same abstraction doesn't work # perfectly. In particular, there is a '[Gmail]/Sent' folder, # but *also* a 'Sent' label, and so on. We handle this by only # maintaining one folder object that encapsulates both of # these. if label == 'sent': thread.folders.add(thread.namespace.account.sent_folder) elif label == 'draft': thread.folders.add(thread.namespace.account.drafts_folder) elif label == 'starred': thread.folders.add(thread.namespace.account.starred_folder) elif label == 'important': thread.folders.add( thread.namespace.account.important_folder) else: folder = Folder.find_or_create(db_session, thread.namespace.account, label) thread.folders.add(folder) return new_labels
def upgrade(): from inbox.models.session import session_scope from inbox.models.folder import Folder from inbox.sqlalchemy_ext.util import JSON from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) ### foldersync => imapfoldersyncstatus # note that renaming a table does in fact migrate constraints + indexes too op.rename_table('foldersync', 'imapfoldersyncstatus') op.alter_column('imapfoldersyncstatus', '_sync_status', existing_type=JSON(), nullable=True, new_column_name='_metrics') op.add_column('imapfoldersyncstatus', sa.Column('folder_id', sa.Integer(), nullable=False)) ### uidvalidity => imapfolderinfo op.rename_table('uidvalidity', 'imapfolderinfo') op.alter_column('imapfolderinfo', 'uid_validity', existing_type=sa.Integer(), nullable=False, new_column_name='uidvalidity') op.alter_column('imapfolderinfo', 'highestmodseq', existing_type=sa.Integer(), nullable=True) op.drop_constraint('imapfolderinfo_ibfk_1', 'imapfolderinfo', type_='foreignkey') op.alter_column('imapfolderinfo', 'imapaccount_id', existing_type=sa.Integer(), nullable=False, new_column_name='account_id') op.create_foreign_key('imapfolderinfo_ibfk_1', 'imapfolderinfo', 'imapaccount', ['account_id'], ['id']) op.add_column('imapfolderinfo', sa.Column('folder_id', sa.Integer(), nullable=False)) ### imapuid op.drop_constraint('imapuid_ibfk_1', 'imapuid', type_='foreignkey') op.alter_column('imapuid', 'imapaccount_id', existing_type=sa.Integer(), nullable=False, new_column_name='account_id') op.create_foreign_key('imapuid_ibfk_1', 'imapuid', 'imapaccount', ['account_id'], ['id']) ### migrate data and add new constraints Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) if 'easfoldersync' in Base.metadata.tables: op.rename_table('easfoldersync', 'easfoldersyncstatus') op.add_column('easfoldersyncstatus', sa.Column('folder_id', sa.Integer(), nullable=False)) op.alter_column('easfoldersyncstatus', '_sync_status', existing_type=JSON(), nullable=True, new_column_name='_metrics') Base.metadata.reflect(engine) class EASFolderSyncStatus(Base): __table__ = Base.metadata.tables['easfoldersyncstatus'] class ImapFolderSyncStatus(Base): __table__ = Base.metadata.tables['imapfoldersyncstatus'] class ImapFolderInfo(Base): __table__ = Base.metadata.tables['imapfolderinfo'] with session_scope(versioned=False, ignore_soft_deletes=False) \ as db_session: folder_id_for = dict([((account_id, name.lower()), id_) for id_, account_id, name in db_session.query( Folder.id, Folder.account_id, Folder.name)]) for status in db_session.query(ImapFolderSyncStatus): print "migrating", status.folder_name status.folder_id = folder_id_for[(status.account_id, status.folder_name.lower())] db_session.commit() if 'easfoldersyncstatus' in Base.metadata.tables: for status in db_session.query(EASFolderSyncStatus): print "migrating", status.folder_name folder_id = folder_id_for.get( (status.account_id, status.folder_name.lower())) if folder_id is not None: status.folder_id = folder_id else: # EAS folder rows *may* not exist if have no messages folder = Folder(account_id=status.account_id, name=status.folder_name) db_session.add(folder) db_session.commit() status.folder_id = folder.id db_session.commit() # some weird alembic bug? need to drop and recreate this FK op.drop_constraint('easfoldersyncstatus_ibfk_1', 'easfoldersyncstatus', type_='foreignkey') op.drop_column('easfoldersyncstatus', 'folder_name') op.create_foreign_key('easfoldersyncstatus_ibfk_1', 'easfoldersyncstatus', 'easaccount', ['account_id'], ['id']) op.create_foreign_key('easfoldersyncstatus_ibfk_2', 'easfoldersyncstatus', 'folder', ['folder_id'], ['id']) op.create_unique_constraint('account_id', 'easfoldersyncstatus', ['account_id', 'folder_id']) # some weird alembic bug? need to drop and recreate this FK op.drop_constraint('imapfoldersyncstatus_ibfk_1', 'imapfoldersyncstatus', type_='foreignkey') op.drop_constraint('account_id', 'imapfoldersyncstatus', type_='unique') op.drop_column('imapfoldersyncstatus', 'folder_name') op.create_foreign_key('imapfoldersyncstatus_ibfk_1', 'imapfoldersyncstatus', 'imapaccount', ['account_id'], ['id']) op.create_foreign_key('imapfoldersyncstatus_ibfk_2', 'imapfoldersyncstatus', 'folder', ['folder_id'], ['id']) op.create_unique_constraint('account_id', 'imapfoldersyncstatus', ['account_id', 'folder_id']) with session_scope(versioned=False, ignore_soft_deletes=False) \ as db_session: for info in db_session.query(ImapFolderInfo): print "migrating", info.folder_name info.folder_id = folder_id_for[(info.account_id, info.folder_name.lower())] db_session.commit() # some weird alembic bug? need to drop and recreate this FK op.drop_constraint('imapfolderinfo_ibfk_1', 'imapfolderinfo', type_='foreignkey') op.drop_constraint('imapaccount_id', 'imapfolderinfo', type_='unique') op.drop_column('imapfolderinfo', 'folder_name') op.create_foreign_key('imapfolderinfo_ibfk_1', 'imapfolderinfo', 'imapaccount', ['account_id'], ['id']) op.create_foreign_key('imapfolderinfo_ibfk_2', 'imapfolderinfo', 'folder', ['folder_id'], ['id']) op.create_unique_constraint('imapaccount_id', 'imapfolderinfo', ['account_id', 'folder_id'])
def folder(db, default_account): from inbox.models.folder import Folder return Folder.find_or_create(db.session, default_account, '[Gmail]/All Mail', 'all')
def folder(db, default_account): from inbox.models.folder import Folder return Folder.find_or_create(db.session, default_account, '[Gmail]/All Mail', 'all')
def add_fake_folder(db_session, default_account, display_name='All Mail', name='all'): from inbox.models.folder import Folder return Folder.find_or_create(db_session, default_account, display_name, name)
def upgrade(): from inbox.ignition import main_engine from inbox.models.folder import Folder from inbox.models.session import session_scope from inbox.sqlalchemy_ext.util import JSON engine = main_engine(pool_size=1, max_overflow=0) # foldersync => imapfoldersyncstatus # note that renaming a table does in fact migrate constraints + indexes too op.rename_table("foldersync", "imapfoldersyncstatus") op.alter_column( "imapfoldersyncstatus", "_sync_status", existing_type=JSON(), nullable=True, new_column_name="_metrics", ) op.add_column("imapfoldersyncstatus", sa.Column("folder_id", sa.Integer(), nullable=False)) # uidvalidity => imapfolderinfo op.rename_table("uidvalidity", "imapfolderinfo") op.alter_column( "imapfolderinfo", "uid_validity", existing_type=sa.Integer(), nullable=False, new_column_name="uidvalidity", ) op.alter_column("imapfolderinfo", "highestmodseq", existing_type=sa.Integer(), nullable=True) op.drop_constraint("imapfolderinfo_ibfk_1", "imapfolderinfo", type_="foreignkey") op.alter_column( "imapfolderinfo", "imapaccount_id", existing_type=sa.Integer(), nullable=False, new_column_name="account_id", ) op.create_foreign_key("imapfolderinfo_ibfk_1", "imapfolderinfo", "imapaccount", ["account_id"], ["id"]) op.add_column("imapfolderinfo", sa.Column("folder_id", sa.Integer(), nullable=False)) # imapuid op.drop_constraint("imapuid_ibfk_1", "imapuid", type_="foreignkey") op.alter_column( "imapuid", "imapaccount_id", existing_type=sa.Integer(), nullable=False, new_column_name="account_id", ) op.create_foreign_key("imapuid_ibfk_1", "imapuid", "imapaccount", ["account_id"], ["id"]) # migrate data and add new constraints Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) if "easfoldersync" in Base.metadata.tables: op.rename_table("easfoldersync", "easfoldersyncstatus") op.add_column("easfoldersyncstatus", sa.Column("folder_id", sa.Integer(), nullable=False)) op.alter_column( "easfoldersyncstatus", "_sync_status", existing_type=JSON(), nullable=True, new_column_name="_metrics", ) Base.metadata.reflect(engine) class EASFolderSyncStatus(Base): __table__ = Base.metadata.tables["easfoldersyncstatus"] class ImapFolderSyncStatus(Base): __table__ = Base.metadata.tables["imapfoldersyncstatus"] class ImapFolderInfo(Base): __table__ = Base.metadata.tables["imapfolderinfo"] with session_scope(versioned=False) as db_session: folder_id_for = dict([((account_id, name.lower()), id_) for id_, account_id, name in db_session.query( Folder.id, Folder.account_id, Folder.name)]) for status in db_session.query(ImapFolderSyncStatus): print("migrating", status.folder_name) status.folder_id = folder_id_for[(status.account_id, status.folder_name.lower())] db_session.commit() if "easfoldersyncstatus" in Base.metadata.tables: for status in db_session.query(EASFolderSyncStatus): print("migrating", status.folder_name) folder_id = folder_id_for.get( (status.account_id, status.folder_name.lower())) if folder_id is not None: status.folder_id = folder_id else: # EAS folder rows *may* not exist if have no messages folder = Folder(account_id=status.account_id, name=status.folder_name) db_session.add(folder) db_session.commit() status.folder_id = folder.id db_session.commit() # some weird alembic bug? need to drop and recreate this FK op.drop_constraint("easfoldersyncstatus_ibfk_1", "easfoldersyncstatus", type_="foreignkey") op.drop_column("easfoldersyncstatus", "folder_name") op.create_foreign_key( "easfoldersyncstatus_ibfk_1", "easfoldersyncstatus", "easaccount", ["account_id"], ["id"], ) op.create_foreign_key( "easfoldersyncstatus_ibfk_2", "easfoldersyncstatus", "folder", ["folder_id"], ["id"], ) op.create_unique_constraint("account_id", "easfoldersyncstatus", ["account_id", "folder_id"]) # some weird alembic bug? need to drop and recreate this FK op.drop_constraint("imapfoldersyncstatus_ibfk_1", "imapfoldersyncstatus", type_="foreignkey") op.drop_constraint("account_id", "imapfoldersyncstatus", type_="unique") op.drop_column("imapfoldersyncstatus", "folder_name") op.create_foreign_key( "imapfoldersyncstatus_ibfk_1", "imapfoldersyncstatus", "imapaccount", ["account_id"], ["id"], ) op.create_foreign_key( "imapfoldersyncstatus_ibfk_2", "imapfoldersyncstatus", "folder", ["folder_id"], ["id"], ) op.create_unique_constraint("account_id", "imapfoldersyncstatus", ["account_id", "folder_id"]) with session_scope(versioned=False) as db_session: for info in db_session.query(ImapFolderInfo): print("migrating", info.folder_name) info.folder_id = folder_id_for[(info.account_id, info.folder_name.lower())] db_session.commit() # some weird alembic bug? need to drop and recreate this FK op.drop_constraint("imapfolderinfo_ibfk_1", "imapfolderinfo", type_="foreignkey") op.drop_constraint("imapaccount_id", "imapfolderinfo", type_="unique") op.drop_column("imapfolderinfo", "folder_name") op.create_foreign_key("imapfolderinfo_ibfk_1", "imapfolderinfo", "imapaccount", ["account_id"], ["id"]) op.create_foreign_key("imapfolderinfo_ibfk_2", "imapfolderinfo", "folder", ["folder_id"], ["id"]) op.create_unique_constraint("imapaccount_id", "imapfolderinfo", ["account_id", "folder_id"])
def add_fake_folder(db, default_account): from inbox.models.folder import Folder return Folder.find_or_create(db.session, default_account, "All Mail", "all")
def add_fake_folder(db_session, default_account): from inbox.models.folder import Folder return Folder.find_or_create(db_session, default_account, 'All Mail', 'all')
def add_fake_folder(db_session, default_account): from inbox.models.folder import Folder return Folder.find_or_create(db_session, default_account, 'All Mail', 'all')