def upgrade():
    op.add_column(
        'foldersync',
        sa.Column('_sync_status',
                  MutableDict.as_mutable(JSON()),
                  nullable=True))

    if 'easfoldersync' in Base.metadata.tables:
        op.add_column(
            'easfoldersync',
            sa.Column('_sync_status',
                      MutableDict.as_mutable(JSON()),
                      nullable=True))
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    from inbox.sqlalchemy_ext.util import JSON, MutableDict
    Base = declarative_base()
    Base.metadata.reflect(engine)

    op.add_column('foldersync',
                  sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                            nullable=True))

    if 'easfoldersync' in Base.metadata.tables:
        op.add_column('easfoldersync',
                      sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                                nullable=True))
def upgrade():
    from inbox.ignition import main_engine
    from inbox.sqlalchemy_ext.util import JSON, MutableDict

    engine = main_engine(pool_size=1, max_overflow=0)
    from sqlalchemy.ext.declarative import declarative_base

    from inbox.models.session import session_scope

    op.add_column(
        "account",
        sa.Column(
            "_sync_status", MutableDict.as_mutable(JSON()), default={}, nullable=True
        ),
    )

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Account(Base):
        __table__ = Base.metadata.tables["account"]

    with session_scope(versioned=False) as db_session:
        for acct in db_session.query(Account):
            d = dict(
                sync_start_time=str(acct.sync_start_time),
                sync_end_time=str(acct.sync_end_time),
            )
            acct._sync_status = json_util.dumps(d)

        db_session.commit()

    op.drop_column("account", "sync_start_time")
    op.drop_column("account", "sync_end_time")
Beispiel #4
0
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON
    op.add_column('actionlog',
                  sa.Column('extra_args', JSON(), nullable=True))

    conn = op.get_bind()

    conn.execute(text("""
        ALTER TABLE message ADD COLUMN version BINARY(16),
                            DROP FOREIGN KEY message_ibfk_3
                      """))

    parent_drafts_ids = [id_ for id_, in conn.execute(text(
        """
        SELECT message.parent_draft_id from message
        WHERE message.is_created = 1
            AND message.is_draft = 1
            AND message.parent_draft_id IS NOT NULL
        """))]
    print parent_drafts_ids
    if parent_drafts_ids:
        # delete old parent drafts
        conn.execute(text("""
            DELETE FROM message
            WHERE message.is_created = 1 AND message.is_draft = 1
            AND message.id IN :parent_drafts_ids"""),
                     parent_drafts_ids=parent_drafts_ids)

    conn.execute(text("""
        UPDATE message SET message.version = message.public_id
        WHERE message.is_created = 1 AND message.is_draft = 1
        """))

    op.drop_column('message', 'parent_draft_id')
Beispiel #5
0
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON

    op.add_column("event", sa.Column("participants_by_email", JSON(), nullable=False))
    op.drop_table("eventparticipant")

    conn = op.get_bind()
    conn.execute(text("UPDATE event SET participants_by_email='{}'"))
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON

    shard_id = int(context.get_x_argument(as_dictionary=True).get("shard_id"))
    namespace_id_type = sa.Integer() if shard_id == 0 else sa.BigInteger()

    op.create_table(
        "metadata",
        sa.Column("public_id", sa.BINARY(length=16), nullable=False),
        sa.Column("created_at", sa.DateTime(), nullable=False),
        sa.Column("updated_at", sa.DateTime(), nullable=False),
        sa.Column("deleted_at", sa.DateTime(), nullable=True),
        sa.Column("id", sa.BigInteger(), nullable=False),
        sa.Column("app_id", sa.Integer(), nullable=True),
        sa.Column("app_client_id", sa.BINARY(length=16), nullable=False),
        sa.Column("app_type", sa.String(length=20), nullable=False),
        sa.Column("namespace_id", namespace_id_type, nullable=False),
        sa.Column("object_public_id", sa.String(length=191), nullable=False),
        sa.Column("object_type", sa.String(length=20), nullable=False),
        sa.Column("object_id", sa.BigInteger(), nullable=False),
        sa.Column("value", JSON(), nullable=True),
        sa.Column("version", sa.Integer(), server_default="0", nullable=True),
        sa.ForeignKeyConstraint(["namespace_id"], [u"namespace.id"],
                                ondelete="CASCADE"),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_index(op.f("ix_metadata_created_at"),
                    "metadata", ["created_at"],
                    unique=False)
    op.create_index(op.f("ix_metadata_deleted_at"),
                    "metadata", ["deleted_at"],
                    unique=False)
    op.create_index(op.f("ix_metadata_object_id"),
                    "metadata", ["object_id"],
                    unique=False)
    op.create_index(
        op.f("ix_metadata_object_public_id"),
        "metadata",
        ["object_public_id"],
        unique=False,
    )
    op.create_index(op.f("ix_metadata_public_id"),
                    "metadata", ["public_id"],
                    unique=False)
    op.create_index(op.f("ix_metadata_updated_at"),
                    "metadata", ["updated_at"],
                    unique=False)
    op.create_index(
        "ix_obj_public_id_app_id",
        "metadata",
        ["object_public_id", "app_id"],
        unique=True,
    )

    conn = op.get_bind()
    increment = (shard_id << 48) + 1
    conn.execute("ALTER TABLE metadata AUTO_INCREMENT={}".format(increment))
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON

    shard_id = int(context.get_x_argument(as_dictionary=True).get('shard_id'))
    namespace_id_type = sa.Integer() if shard_id == 0 else sa.BigInteger()

    op.create_table(
        'metadata', sa.Column('public_id',
                              sa.BINARY(length=16),
                              nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=False),
        sa.Column('updated_at', sa.DateTime(), nullable=False),
        sa.Column('deleted_at', sa.DateTime(), nullable=True),
        sa.Column('id', sa.BigInteger(), nullable=False),
        sa.Column('app_id', sa.Integer(), nullable=True),
        sa.Column('app_client_id', sa.BINARY(length=16), nullable=False),
        sa.Column('app_type', sa.String(length=20), nullable=False),
        sa.Column('namespace_id', namespace_id_type, nullable=False),
        sa.Column('object_public_id', sa.String(length=191), nullable=False),
        sa.Column('object_type', sa.String(length=20), nullable=False),
        sa.Column('object_id', sa.BigInteger(), nullable=False),
        sa.Column('value', JSON(), nullable=True),
        sa.Column('version', sa.Integer(), server_default='0', nullable=True),
        sa.ForeignKeyConstraint(['namespace_id'], [u'namespace.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))
    op.create_index(op.f('ix_metadata_created_at'),
                    'metadata', ['created_at'],
                    unique=False)
    op.create_index(op.f('ix_metadata_deleted_at'),
                    'metadata', ['deleted_at'],
                    unique=False)
    op.create_index(op.f('ix_metadata_object_id'),
                    'metadata', ['object_id'],
                    unique=False)
    op.create_index(op.f('ix_metadata_object_public_id'),
                    'metadata', ['object_public_id'],
                    unique=False)
    op.create_index(op.f('ix_metadata_public_id'),
                    'metadata', ['public_id'],
                    unique=False)
    op.create_index(op.f('ix_metadata_updated_at'),
                    'metadata', ['updated_at'],
                    unique=False)
    op.create_index('ix_obj_public_id_app_id',
                    'metadata', ['object_public_id', 'app_id'],
                    unique=True)

    conn = op.get_bind()
    increment = (shard_id << 48) + 1
    conn.execute('ALTER TABLE metadata AUTO_INCREMENT={}'.format(increment))
Beispiel #8
0
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON

    op.add_column('actionlog',
                  sa.Column('extra_args', JSON(), nullable=True))

    op.add_column('message',
                  sa.Column('version', mysql.BINARY(16), nullable=True))

    from inbox.ignition import main_engine
    from inbox.models.session import session_scope

    engine = main_engine(pool_size=1, max_overflow=0)
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    class Message(Base):
        __table__ = Base.metadata.tables['message']

    # Delete old draft versions, set message.version=public_id on the latest
    # one.
    with session_scope(ignore_soft_deletes=False, versioned=False) as \
            db_session:
        q = db_session.query(Message).filter(
            Message.is_created == True,
            Message.is_draft == True)

        for d in page_query(q):
            if d.child_draft is not None:
                db_session.delete(d)
            else:
                d.version = d.public_id
                db_session.add(d)

        db_session.commit()

    op.drop_constraint('message_ibfk_3', 'message', type_='foreignkey')
    op.drop_column('message', 'parent_draft_id')
Beispiel #9
0
def upgrade():
    from inbox.models.session import session_scope
    from inbox.models.folder import Folder
    from inbox.sqlalchemy_ext.util import JSON
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)

    ### foldersync => imapfoldersyncstatus
    # note that renaming a table does in fact migrate constraints + indexes too
    op.rename_table('foldersync', 'imapfoldersyncstatus')

    op.alter_column('imapfoldersyncstatus',
                    '_sync_status',
                    existing_type=JSON(),
                    nullable=True,
                    new_column_name='_metrics')

    op.add_column('imapfoldersyncstatus',
                  sa.Column('folder_id', sa.Integer(), nullable=False))

    ### uidvalidity => imapfolderinfo
    op.rename_table('uidvalidity', 'imapfolderinfo')
    op.alter_column('imapfolderinfo',
                    'uid_validity',
                    existing_type=sa.Integer(),
                    nullable=False,
                    new_column_name='uidvalidity')
    op.alter_column('imapfolderinfo',
                    'highestmodseq',
                    existing_type=sa.Integer(),
                    nullable=True)

    op.drop_constraint('imapfolderinfo_ibfk_1',
                       'imapfolderinfo',
                       type_='foreignkey')
    op.alter_column('imapfolderinfo',
                    'imapaccount_id',
                    existing_type=sa.Integer(),
                    nullable=False,
                    new_column_name='account_id')
    op.create_foreign_key('imapfolderinfo_ibfk_1', 'imapfolderinfo',
                          'imapaccount', ['account_id'], ['id'])

    op.add_column('imapfolderinfo',
                  sa.Column('folder_id', sa.Integer(), nullable=False))

    ### imapuid
    op.drop_constraint('imapuid_ibfk_1', 'imapuid', type_='foreignkey')
    op.alter_column('imapuid',
                    'imapaccount_id',
                    existing_type=sa.Integer(),
                    nullable=False,
                    new_column_name='account_id')
    op.create_foreign_key('imapuid_ibfk_1', 'imapuid', 'imapaccount',
                          ['account_id'], ['id'])

    ### migrate data and add new constraints
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    if 'easfoldersync' in Base.metadata.tables:
        op.rename_table('easfoldersync', 'easfoldersyncstatus')
        op.add_column('easfoldersyncstatus',
                      sa.Column('folder_id', sa.Integer(), nullable=False))
        op.alter_column('easfoldersyncstatus',
                        '_sync_status',
                        existing_type=JSON(),
                        nullable=True,
                        new_column_name='_metrics')
        Base.metadata.reflect(engine)

        class EASFolderSyncStatus(Base):
            __table__ = Base.metadata.tables['easfoldersyncstatus']

    class ImapFolderSyncStatus(Base):
        __table__ = Base.metadata.tables['imapfoldersyncstatus']

    class ImapFolderInfo(Base):
        __table__ = Base.metadata.tables['imapfolderinfo']

    with session_scope(versioned=False, ignore_soft_deletes=False) \
            as db_session:
        folder_id_for = dict([((account_id, name.lower()), id_)
                              for id_, account_id, name in db_session.query(
                                  Folder.id, Folder.account_id, Folder.name)])
        for status in db_session.query(ImapFolderSyncStatus):
            print "migrating", status.folder_name
            status.folder_id = folder_id_for[(status.account_id,
                                              status.folder_name.lower())]
        db_session.commit()
        if 'easfoldersyncstatus' in Base.metadata.tables:
            for status in db_session.query(EASFolderSyncStatus):
                print "migrating", status.folder_name
                folder_id = folder_id_for.get(
                    (status.account_id, status.folder_name.lower()))
                if folder_id is not None:
                    status.folder_id = folder_id
                else:
                    # EAS folder rows *may* not exist if have no messages
                    folder = Folder(account_id=status.account_id,
                                    name=status.folder_name)
                    db_session.add(folder)
                    db_session.commit()
                    status.folder_id = folder.id
            db_session.commit()
            # some weird alembic bug? need to drop and recreate this FK
            op.drop_constraint('easfoldersyncstatus_ibfk_1',
                               'easfoldersyncstatus',
                               type_='foreignkey')
            op.drop_column('easfoldersyncstatus', 'folder_name')
            op.create_foreign_key('easfoldersyncstatus_ibfk_1',
                                  'easfoldersyncstatus', 'easaccount',
                                  ['account_id'], ['id'])
            op.create_foreign_key('easfoldersyncstatus_ibfk_2',
                                  'easfoldersyncstatus', 'folder',
                                  ['folder_id'], ['id'])
            op.create_unique_constraint('account_id', 'easfoldersyncstatus',
                                        ['account_id', 'folder_id'])

    # some weird alembic bug? need to drop and recreate this FK
    op.drop_constraint('imapfoldersyncstatus_ibfk_1',
                       'imapfoldersyncstatus',
                       type_='foreignkey')
    op.drop_constraint('account_id', 'imapfoldersyncstatus', type_='unique')
    op.drop_column('imapfoldersyncstatus', 'folder_name')
    op.create_foreign_key('imapfoldersyncstatus_ibfk_1',
                          'imapfoldersyncstatus', 'imapaccount',
                          ['account_id'], ['id'])
    op.create_foreign_key('imapfoldersyncstatus_ibfk_2',
                          'imapfoldersyncstatus', 'folder', ['folder_id'],
                          ['id'])
    op.create_unique_constraint('account_id', 'imapfoldersyncstatus',
                                ['account_id', 'folder_id'])

    with session_scope(versioned=False, ignore_soft_deletes=False) \
            as db_session:
        for info in db_session.query(ImapFolderInfo):
            print "migrating", info.folder_name
            info.folder_id = folder_id_for[(info.account_id,
                                            info.folder_name.lower())]
        db_session.commit()

    # some weird alembic bug? need to drop and recreate this FK
    op.drop_constraint('imapfolderinfo_ibfk_1',
                       'imapfolderinfo',
                       type_='foreignkey')
    op.drop_constraint('imapaccount_id', 'imapfolderinfo', type_='unique')
    op.drop_column('imapfolderinfo', 'folder_name')
    op.create_foreign_key('imapfolderinfo_ibfk_1', 'imapfolderinfo',
                          'imapaccount', ['account_id'], ['id'])
    op.create_foreign_key('imapfolderinfo_ibfk_2', 'imapfolderinfo', 'folder',
                          ['folder_id'], ['id'])
    op.create_unique_constraint('imapaccount_id', 'imapfolderinfo',
                                ['account_id', 'folder_id'])
Beispiel #10
0
def upgrade():
    from inbox.models.session import session_scope, Session
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)

    from inbox.models import (Part, Namespace, Message, Thread)
    from inbox.sqlalchemy_ext.util import JSON

    print 'Creating table for parts...'
    op.create_table(
        'part', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('message_id', sa.Integer(), nullable=True),
        sa.Column('walk_index', sa.Integer(), nullable=True),
        sa.Column('content_disposition',
                  sa.Enum('inline', 'attachment'),
                  nullable=True),
        sa.Column('content_id', sa.String(length=255), nullable=True),
        sa.Column('misc_keyval', JSON(), nullable=True),
        sa.Column('is_inboxapp_attachment',
                  sa.Boolean(),
                  server_default=sa.sql.expression.false(),
                  nullable=True),
        sa.ForeignKeyConstraint(['id'], ['block.id'], ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['message_id'], ['message.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('message_id', 'walk_index'))

    print 'Reflecting old block table schema'
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Block_(Base):  # old schema, reflected from database table
        __table__ = Base.metadata.tables['block']

    print 'Adding namespace_id column to blocks ',
    op.add_column(u'block',
                  sa.Column('namespace_id', sa.Integer(), nullable=False))

    print 'Migrating from blocks to parts'
    new_parts = []
    with session_scope() as db_session:
        for block in db_session.query(Block_).yield_per(chunk_size):

            # Move relevant fields
            p = Part()
            p.size = block.size
            p.data_sha256 = block.data_sha256
            p.message_id = block.message_id
            p.walk_index = block.walk_index
            p.content_disposition = block.content_disposition
            p.content_id = block.content_id
            p.misc_keyval = block.misc_keyval
            p.is_inboxapp_attachment

            old_namespace = db_session.query(Namespace) \
                .join(Message.thread, Thread.namespace) \
                .filter(Message.id == block.message_id).one()
            p.namespace_id = old_namespace.id

            # Commit after column modifications
            new_parts.append(p)

        print 'Deleting old blocks (now parts)... ',
        db_session.query(Block_).delete()
        db_session.commit()
        print 'Done!'

    print 'Removing `message_id` constraint from block'
    op.drop_constraint('block_ibfk_1', 'block', type_='foreignkey')

    print 'Creating foreign key for block -> namespace on block'
    op.create_foreign_key('block_ibfk_1',
                          'block',
                          'namespace', ['namespace_id'], ['id'],
                          ondelete='CASCADE')

    print 'Dropping old block columns which are now in part'
    op.drop_column(u'block', u'walk_index')
    op.drop_column(u'block', u'content_disposition')
    op.drop_column(u'block', u'misc_keyval')
    op.drop_column(u'block', u'content_id')
    op.drop_column(u'block', u'is_inboxapp_attachment')
    op.drop_constraint(u'message_id', 'block', type_='unique')
    op.drop_column(u'block', u'message_id')

    # Note: here we use the regular database session, since the transaction
    # log requires the `namespace` property on objects. We've set the
    # `namespace_id` foreign key, but need to commit the object before the
    # SQLalchemy reference is valid
    no_tx_session = Session(autoflush=True, autocommit=False)
    no_tx_session.add_all(new_parts)
    no_tx_session.commit()

    print 'Done migration blocks to parts!'
Beispiel #11
0
def upgrade():
    from inbox.ignition import main_engine
    from inbox.models.session import Session, session_scope

    engine = main_engine(pool_size=1, max_overflow=0)

    from inbox.models import Message, Namespace, Part, Thread
    from inbox.sqlalchemy_ext.util import JSON

    print("Creating table for parts...")
    op.create_table(
        "part",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("message_id", sa.Integer(), nullable=True),
        sa.Column("walk_index", sa.Integer(), nullable=True),
        sa.Column("content_disposition",
                  sa.Enum("inline", "attachment"),
                  nullable=True),
        sa.Column("content_id", sa.String(length=255), nullable=True),
        sa.Column("misc_keyval", JSON(), nullable=True),
        sa.Column(
            "is_inboxapp_attachment",
            sa.Boolean(),
            server_default=sa.sql.expression.false(),
            nullable=True,
        ),
        sa.ForeignKeyConstraint(["id"], ["block.id"], ondelete="CASCADE"),
        sa.ForeignKeyConstraint(["message_id"], ["message.id"],
                                ondelete="CASCADE"),
        sa.PrimaryKeyConstraint("id"),
        sa.UniqueConstraint("message_id", "walk_index"),
    )

    print("Reflecting old block table schema")
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Block_(Base):  # old schema, reflected from database table
        __table__ = Base.metadata.tables["block"]

    print("Adding namespace_id column to blocks ", end=" ")
    op.add_column(u"block",
                  sa.Column("namespace_id", sa.Integer(), nullable=False))

    print("Migrating from blocks to parts")
    new_parts = []
    with session_scope() as db_session:
        for block in db_session.query(Block_).yield_per(chunk_size):

            # Move relevant fields
            p = Part()
            p.size = block.size
            p.data_sha256 = block.data_sha256
            p.message_id = block.message_id
            p.walk_index = block.walk_index
            p.content_disposition = block.content_disposition
            p.content_id = block.content_id
            p.misc_keyval = block.misc_keyval
            p.is_inboxapp_attachment

            old_namespace = (db_session.query(Namespace).join(
                Message.thread,
                Thread.namespace).filter(Message.id == block.message_id).one())
            p.namespace_id = old_namespace.id

            # Commit after column modifications
            new_parts.append(p)

        print("Deleting old blocks (now parts)... ", end=" ")
        db_session.query(Block_).delete()
        db_session.commit()
        print("Done!")

    print("Removing `message_id` constraint from block")
    op.drop_constraint("block_ibfk_1", "block", type_="foreignkey")

    print("Creating foreign key for block -> namespace on block")
    op.create_foreign_key(
        "block_ibfk_1",
        "block",
        "namespace",
        ["namespace_id"],
        ["id"],
        ondelete="CASCADE",
    )

    print("Dropping old block columns which are now in part")
    op.drop_column(u"block", u"walk_index")
    op.drop_column(u"block", u"content_disposition")
    op.drop_column(u"block", u"misc_keyval")
    op.drop_column(u"block", u"content_id")
    op.drop_column(u"block", u"is_inboxapp_attachment")
    op.drop_constraint(u"message_id", "block", type_="unique")
    op.drop_column(u"block", u"message_id")

    # Note: here we use the regular database session, since the transaction
    # log requires the `namespace` property on objects. We've set the
    # `namespace_id` foreign key, but need to commit the object before the
    # SQLalchemy reference is valid
    no_tx_session = Session(autoflush=True, autocommit=False)
    no_tx_session.add_all(new_parts)
    no_tx_session.commit()

    print("Done migration blocks to parts!")
def upgrade():
    from inbox.ignition import main_engine
    from inbox.models.folder import Folder
    from inbox.models.session import session_scope
    from inbox.sqlalchemy_ext.util import JSON

    engine = main_engine(pool_size=1, max_overflow=0)

    # foldersync => imapfoldersyncstatus
    # note that renaming a table does in fact migrate constraints + indexes too
    op.rename_table("foldersync", "imapfoldersyncstatus")

    op.alter_column(
        "imapfoldersyncstatus",
        "_sync_status",
        existing_type=JSON(),
        nullable=True,
        new_column_name="_metrics",
    )

    op.add_column("imapfoldersyncstatus",
                  sa.Column("folder_id", sa.Integer(), nullable=False))

    # uidvalidity => imapfolderinfo
    op.rename_table("uidvalidity", "imapfolderinfo")
    op.alter_column(
        "imapfolderinfo",
        "uid_validity",
        existing_type=sa.Integer(),
        nullable=False,
        new_column_name="uidvalidity",
    )
    op.alter_column("imapfolderinfo",
                    "highestmodseq",
                    existing_type=sa.Integer(),
                    nullable=True)

    op.drop_constraint("imapfolderinfo_ibfk_1",
                       "imapfolderinfo",
                       type_="foreignkey")
    op.alter_column(
        "imapfolderinfo",
        "imapaccount_id",
        existing_type=sa.Integer(),
        nullable=False,
        new_column_name="account_id",
    )
    op.create_foreign_key("imapfolderinfo_ibfk_1", "imapfolderinfo",
                          "imapaccount", ["account_id"], ["id"])

    op.add_column("imapfolderinfo",
                  sa.Column("folder_id", sa.Integer(), nullable=False))

    # imapuid
    op.drop_constraint("imapuid_ibfk_1", "imapuid", type_="foreignkey")
    op.alter_column(
        "imapuid",
        "imapaccount_id",
        existing_type=sa.Integer(),
        nullable=False,
        new_column_name="account_id",
    )
    op.create_foreign_key("imapuid_ibfk_1", "imapuid", "imapaccount",
                          ["account_id"], ["id"])

    # migrate data and add new constraints
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    if "easfoldersync" in Base.metadata.tables:
        op.rename_table("easfoldersync", "easfoldersyncstatus")
        op.add_column("easfoldersyncstatus",
                      sa.Column("folder_id", sa.Integer(), nullable=False))
        op.alter_column(
            "easfoldersyncstatus",
            "_sync_status",
            existing_type=JSON(),
            nullable=True,
            new_column_name="_metrics",
        )
        Base.metadata.reflect(engine)

        class EASFolderSyncStatus(Base):
            __table__ = Base.metadata.tables["easfoldersyncstatus"]

    class ImapFolderSyncStatus(Base):
        __table__ = Base.metadata.tables["imapfoldersyncstatus"]

    class ImapFolderInfo(Base):
        __table__ = Base.metadata.tables["imapfolderinfo"]

    with session_scope(versioned=False) as db_session:
        folder_id_for = dict([((account_id, name.lower()), id_)
                              for id_, account_id, name in db_session.query(
                                  Folder.id, Folder.account_id, Folder.name)])
        for status in db_session.query(ImapFolderSyncStatus):
            print("migrating", status.folder_name)
            status.folder_id = folder_id_for[(status.account_id,
                                              status.folder_name.lower())]
        db_session.commit()
        if "easfoldersyncstatus" in Base.metadata.tables:
            for status in db_session.query(EASFolderSyncStatus):
                print("migrating", status.folder_name)
                folder_id = folder_id_for.get(
                    (status.account_id, status.folder_name.lower()))
                if folder_id is not None:
                    status.folder_id = folder_id
                else:
                    # EAS folder rows *may* not exist if have no messages
                    folder = Folder(account_id=status.account_id,
                                    name=status.folder_name)
                    db_session.add(folder)
                    db_session.commit()
                    status.folder_id = folder.id
            db_session.commit()
            # some weird alembic bug? need to drop and recreate this FK
            op.drop_constraint("easfoldersyncstatus_ibfk_1",
                               "easfoldersyncstatus",
                               type_="foreignkey")
            op.drop_column("easfoldersyncstatus", "folder_name")
            op.create_foreign_key(
                "easfoldersyncstatus_ibfk_1",
                "easfoldersyncstatus",
                "easaccount",
                ["account_id"],
                ["id"],
            )
            op.create_foreign_key(
                "easfoldersyncstatus_ibfk_2",
                "easfoldersyncstatus",
                "folder",
                ["folder_id"],
                ["id"],
            )
            op.create_unique_constraint("account_id", "easfoldersyncstatus",
                                        ["account_id", "folder_id"])

    # some weird alembic bug? need to drop and recreate this FK
    op.drop_constraint("imapfoldersyncstatus_ibfk_1",
                       "imapfoldersyncstatus",
                       type_="foreignkey")
    op.drop_constraint("account_id", "imapfoldersyncstatus", type_="unique")
    op.drop_column("imapfoldersyncstatus", "folder_name")
    op.create_foreign_key(
        "imapfoldersyncstatus_ibfk_1",
        "imapfoldersyncstatus",
        "imapaccount",
        ["account_id"],
        ["id"],
    )
    op.create_foreign_key(
        "imapfoldersyncstatus_ibfk_2",
        "imapfoldersyncstatus",
        "folder",
        ["folder_id"],
        ["id"],
    )
    op.create_unique_constraint("account_id", "imapfoldersyncstatus",
                                ["account_id", "folder_id"])

    with session_scope(versioned=False) as db_session:
        for info in db_session.query(ImapFolderInfo):
            print("migrating", info.folder_name)
            info.folder_id = folder_id_for[(info.account_id,
                                            info.folder_name.lower())]
        db_session.commit()

    # some weird alembic bug? need to drop and recreate this FK
    op.drop_constraint("imapfolderinfo_ibfk_1",
                       "imapfolderinfo",
                       type_="foreignkey")
    op.drop_constraint("imapaccount_id", "imapfolderinfo", type_="unique")
    op.drop_column("imapfolderinfo", "folder_name")
    op.create_foreign_key("imapfolderinfo_ibfk_1", "imapfolderinfo",
                          "imapaccount", ["account_id"], ["id"])
    op.create_foreign_key("imapfolderinfo_ibfk_2", "imapfolderinfo", "folder",
                          ["folder_id"], ["id"])
    op.create_unique_constraint("imapaccount_id", "imapfolderinfo",
                                ["account_id", "folder_id"])
def upgrade():
    op.add_column("imapuid", sa.Column("g_labels", JSON(), nullable=True))

    conn = op.get_bind()
    conn.execute(text("UPDATE imapuid SET g_labels = '[]'"))