def downgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('easfoldersyncstatus'):
        return
    op.drop_column('easfoldersyncstatus', 'initial_sync_start')
    op.drop_column('easfoldersyncstatus', 'initial_sync_end')
Beispiel #2
0
def upgrade():
    from inbox.models.session import session_scope
    from inbox.util.html import strip_tags
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    SNIPPET_LENGTH = 191

    class Message(Base):
        __table__ = Base.metadata.tables['message']

    def calculate_html_snippet(msg, text):
        text = text.replace('<br>', ' ').replace('<br/>', ' '). \
            replace('<br />', ' ')
        text = strip_tags(text)
        calculate_plaintext_snippet(msg, text)

    def calculate_plaintext_snippet(msg, text):
        msg.snippet = ' '.join(text.split())[:SNIPPET_LENGTH]

    with session_scope(ignore_soft_deletes=False, versioned=False)\
            as db_session:
        for message in page_query(db_session.query(Message)):
            if not message.decode_error:
                calculate_html_snippet(message, message.sanitized_body)
    db_session.commit()
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine()

    if not engine.has_table("easaccount"):
        return

    # We allow nullable=True because we don't have usernames for existing accounts.
    # Furthermore, we won't always get a username.
    from inbox.models.constants import MAX_INDEXABLE_LENGTH

    op.add_column("easaccount", sa.Column("username", sa.String(255), nullable=True))

    op.add_column("easaccount", sa.Column("eas_auth", sa.String(MAX_INDEXABLE_LENGTH), nullable=True))

    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)
    from inbox.models.session import session_scope

    class EASAccount(Base):
        __table__ = Base.metadata.tables["easaccount"]

    with session_scope(ignore_soft_deletes=False, versioned=False) as db_session:
        accts = db_session.query(EASAccount).all()

        for a in accts:
            a.eas_auth = a.email_address
            db_session.add(a)

        db_session.commit()

    op.alter_column("easaccount", "eas_auth", nullable=False, existing_type=sa.String(MAX_INDEXABLE_LENGTH))
def upgrade():
    if 'easfoldersyncstatus' in Base.metadata.tables:
        from inbox.ignition import main_engine
        engine = main_engine(pool_size=1, max_overflow=0)
        from inbox.models.session import session_scope
        from sqlalchemy.ext.declarative import declarative_base
        from sqlalchemy.orm.exc import NoResultFound
        Base = declarative_base()
        Base.metadata.reflect(engine)
        from inbox.models.backends.eas import EASFolderSyncStatus
        from inbox.models import Folder
        from inbox.util.eas.constants import SKIP_FOLDERS

        with session_scope(versioned=False, ignore_soft_deletes=False) as \
                db_session:
            statuses = db_session.query(EASFolderSyncStatus).filter(
                EASFolderSyncStatus.eas_folder_type.in_(SKIP_FOLDERS)).all()
            for s in statuses:
                db_session.delete(s)
                db_session.delete(s.folder)

            try:
                for status in db_session.query(EASFolderSyncStatus)\
                        .join(Folder).filter(
                            Folder.name == 'RecipientInfo').all():
                    db_session.delete(status)
                    db_session.delete(status.folder)
            except NoResultFound:
                pass

            db_session.commit()
def downgrade_imapthread():
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class ImapThread_(Base):
        __table__ = Base.metadata.tables['imapthread']

    # Get data from table-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapThread_.id, ImapThread_.g_thrid).all()
    to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results]

    # Drop columns, add new columns + insert data
    op.drop_column('thread', 'type')
    op.add_column('thread', sa.Column('g_thrid', sa.BigInteger(),
                                      nullable=True, index=True))
    table_ = table('thread',
                   column('g_thrid', sa.BigInteger),
                   column('id', sa.Integer))

    for r in to_insert:
        op.execute(
            table_.update().
            where(table_.c.id == r['id']).
            values({'g_thrid': r['g_thrid']})
        )

    # Drop table
    op.drop_table('imapthread')
def upgrade():
    from inbox.config import config
    import nacl.secret
    import nacl.utils
    from inbox.ignition import main_engine
    from inbox.models.session import session_scope

    engine = main_engine(pool_size=1, max_overflow=0)
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    class Secret(Base):
        __table__ = Base.metadata.tables['secret']

    class GenericAccount(Base):
        __table__ = Base.metadata.tables['genericaccount']

    with session_scope(ignore_soft_deletes=False, versioned=False) as \
            db_session:
        secrets = db_session.query(Secret).filter(
            Secret.secret.isnot(None)).all()

        # Join on the genericaccount and optionally easaccount tables to
        # determine which secrets should have type 'password'.
        generic_query = db_session.query(Secret.id).join(
            GenericAccount).filter(Secret.id == GenericAccount.password_id)
        password_secrets = [id_ for id_, in generic_query]
        if engine.has_table('easaccount'):
            class EASAccount(Base):
                __table__ = Base.metadata.tables['easaccount']

            eas_query = db_session.query(Secret.id).join(
                EASAccount).filter(Secret.id == EASAccount.password_id)
            password_secrets.extend([id_ for id_, in eas_query])

        for s in secrets:
            plain = s.secret.encode('utf-8') if isinstance(s.secret, unicode) \
                else s.secret
            if config.get_required('ENCRYPT_SECRETS'):

                s._secret = nacl.secret.SecretBox(
                    key=config.get_required('SECRET_ENCRYPTION_KEY'),
                    encoder=nacl.encoding.HexEncoder
                ).encrypt(
                    plaintext=plain,
                    nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE))

                # 1 is EncryptionScheme.SECRETBOX_WITH_STATIC_KEY
                s.encryption_scheme = 1
            else:
                s._secret = plain

            if s.id in password_secrets:
                s.type = 'password'
            else:
                s.type = 'token'

            db_session.add(s)

        db_session.commit()
def downgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('easaccount'):
        return
    op.drop_constraint('easaccount_ibfk_2', 'easaccount', type_='foreignkey')
    op.drop_column('easaccount', 'password_id')
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON, MutableDict
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    from inbox.models.session import session_scope
    from sqlalchemy.ext.declarative import declarative_base
    op.add_column('account',
                  sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                            default={}, nullable=True))

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Account(Base):
        __table__ = Base.metadata.tables['account']

    with session_scope(versioned=False, ignore_soft_deletes=False) \
            as db_session:
        for acct in db_session.query(Account):
            d = dict(sync_start_time=str(acct.sync_start_time),
                     sync_end_time=str(acct.sync_end_time))
            acct._sync_status = json_util.dumps(d)

        db_session.commit()

    op.drop_column('account', 'sync_start_time')
    op.drop_column('account', 'sync_end_time')
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine()
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    # We allow nullable=True because we don't have usernames for existing accounts.
    # Furthermore, we won't always get a username.
    if 'easaccount' in Base.metadata.tables:
        from inbox.models.constants import MAX_INDEXABLE_LENGTH
        from inbox.models.session import session_scope
        from inbox.models.backends.eas import EASAccount

        op.add_column('easaccount',
                      sa.Column('username', sa.String(255), nullable=True))

        op.add_column('easaccount',
                      sa.Column('eas_auth', sa.String(MAX_INDEXABLE_LENGTH),
                                nullable=True))

        with session_scope(ignore_soft_deletes=False, versioned=False) as \
                db_session:
            accts = db_session.query(EASAccount).all()

            for a in accts:
                a.eas_auth = a.email_address
                db_session.add(a)

            db_session.commit()

        op.alter_column('easaccount', 'eas_auth', nullable=False,
                        existing_type=sa.String(MAX_INDEXABLE_LENGTH))
def downgrade():
    from inbox.ignition import main_engine
    engine = main_engine()

    if engine.has_table('easaccount'):
        op.drop_column('easaccount', 'username')
        op.drop_column('easaccount', 'eas_auth')
def upgrade():
    from sqlalchemy.ext.declarative import declarative_base
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('easuid'):
        return
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class EASUid(Base):
        __table__ = Base.metadata.tables['easuid']

    class EASFolderSyncStatus(Base):
        __table__ = Base.metadata.tables['easfoldersyncstatus']

    with session_scope(versioned=False) as db_session:
        max_easuid = db_session.query(sa.func.max(EASUid.id)).scalar()
        if max_easuid is None:
            return
        while True:
            results = db_session.query(EASUid, EASFolderSyncStatus). \
                join(EASFolderSyncStatus, sa.and_(
                    EASUid.fld_uid == EASFolderSyncStatus.eas_folder_id,
                    EASUid.device_id == EASFolderSyncStatus.device_id,
                    EASUid.easaccount_id == EASFolderSyncStatus.account_id,
                    EASUid.easfoldersyncstatus_id.is_(None))). \
                limit(1000).all()
            if not results:
                return
            for easuid, easfoldersyncstatus in results:
                easuid.easfoldersyncstatus_id = easfoldersyncstatus.id
            db_session.commit()
def downgrade():
    from inbox.models.session import session_scope
    from sqlalchemy.ext.declarative import declarative_base
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Account(Base):
        __table__ = Base.metadata.tables['account']

    class ImapAccount(Base):
        __table__ = Base.metadata.tables['imapaccount']

    class GmailAccount(Base):
        __table__ = Base.metadata.tables['gmailaccount']

    class Secret(Base):
        __table__ = Base.metadata.tables['secret']

    op.add_column('gmailaccount', sa.Column('refresh_token',
                  sa.String(length=512), nullable=True))

    with session_scope(versioned=False) \
            as db_session:
        for acct in db_session.query(GmailAccount):
            secret = db_session.query(Secret) \
                .filter_by(id=acct.refresh_token_id).one()
            acct.refresh_token = secret.secret
            db_session.add(acct)
        db_session.commit()

    op.drop_column('gmailaccount', 'refresh_token_id')
    op.drop_table('secret')
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine()
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    if 'easaccount' in Base.metadata.tables:
        op.drop_column('easaccount', 'eas_state')
Beispiel #14
0
    def __init__(self):
        from inbox.ignition import main_engine
        engine = main_engine()
        # Set up test database
        self.engine = engine

        # Populate with test data
        self.setup()
def downgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('thread'):
        return
    op.drop_column('thread', 'receivedrecentdate')
    op.drop_index('ix_thread_namespace_id_receivedrecentdate',
                  table_name='thread')
def downgrade():
    from inbox.ignition import main_engine

    engine = main_engine()

    if engine.has_table("easaccount"):
        op.drop_column("easaccount", "username")
        op.drop_column("easaccount", "eas_auth")
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('easfoldersyncstatus'):
        return
    op.add_column('easfoldersyncstatus', sa.Column('sync_should_run',
                                                   sa.Boolean(), server_default=sa.sql.expression.true(),
                                                   nullable=False))
def add_eas_tables():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)
    for table_name in ['easuid', 'easfoldersync']:
        if table_name in Base.metadata.tables:
            table_names.add(table_name)
Beispiel #19
0
def downgrade():
    from inbox.ignition import main_engine
    engine = main_engine()
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    if 'easthread' in Base.metadata.tables:
        op.drop_index('ix_easthread_eas_thrid', table_name='easthread')
def upgrade():
    import datetime
    from sqlalchemy.ext.declarative import declarative_base
    from sqlalchemy.orm import relationship
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine()

    now = datetime.datetime.now()
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class GmailAccount(Base):
        __table__ = Base.metadata.tables['gmailaccount']

    class Secret(Base):
        __table__ = Base.metadata.tables['secret']

    class GmailAuthCredentials(Base):
        __table__ = Base.metadata.tables['gmailauthcredentials']
        secret = relationship(Secret)

    with session_scope(versioned=False) as db_session:

        for acc, sec in db_session.query(GmailAccount, Secret) \
                        .filter(GmailAccount.refresh_token_id == Secret.id) \
                        .all():

            # Create a new GmailAuthCredentials entry if
            # we don't have one already
            if db_session.query(GmailAuthCredentials, Secret) \
                    .filter(GmailAuthCredentials.gmailaccount_id == acc.id) \
                    .filter(Secret._secret == sec._secret) \
                    .count() == 0:

                # Create a new secret
                new_sec = Secret()
                new_sec.created_at = now
                new_sec.updated_at = now
                new_sec._secret = sec._secret
                new_sec.type = sec.type  # 'token'
                new_sec.encryption_scheme = sec.encryption_scheme

                # Create a new GmailAuthCredentials entry
                auth_creds = GmailAuthCredentials()
                auth_creds.gmailaccount_id = acc.id
                auth_creds.scopes = acc.scope
                auth_creds.g_id_token = acc.g_id_token
                auth_creds.client_id = acc.client_id
                auth_creds.client_secret = acc.client_secret
                auth_creds.created_at = now
                auth_creds.updated_at = now
                auth_creds.secret = new_sec

                db_session.add(auth_creds)
                db_session.add(new_sec)

        db_session.commit()
Beispiel #21
0
def session_scope(versioned=True, ignore_soft_deletes=True, namespace_id=None):
    """ Provide a transactional scope around a series of operations.

    Takes care of rolling back failed transactions and closing the session
    when it goes out of scope.

    Note that sqlalchemy automatically starts a new database transaction when
    the session is created, and restarts a new transaction after every commit()
    on the session. Your database backend's transaction semantics are important
    here when reasoning about concurrency.

    Parameters
    ----------
    versioned : bool
        Do you want to enable the transaction log?
    ignore_soft_deletes : bool
        Whether or not to ignore soft-deleted objects in query results.
    namespace_id : int
        Namespace to limit query results with.

    Yields
    ------
    InboxSession
        The created session.
    """

    global cached_engine
    if cached_engine is None:
        cached_engine = main_engine()
        log.info("Don't yet have engine... creating default from ignition",
                 engine=id(cached_engine))

    session = InboxSession(cached_engine,
                           versioned=versioned,
                           ignore_soft_deletes=ignore_soft_deletes,
                           namespace_id=namespace_id)
    try:
        if config.get('LOG_DB_SESSIONS'):
            start_time = time.time()
            calling_frame = sys._getframe().f_back.f_back
            call_loc = '{}:{}'.format(calling_frame.f_globals.get('__name__'),
                                      calling_frame.f_lineno)
            logger = log.bind(engine_id=id(cached_engine),
                              session_id=id(session), call_loc=call_loc)
            logger.info('creating db_session',
                        sessions_used=cached_engine.pool.checkedout())
        yield session
        session.commit()
    except:
        session.rollback()
        raise
    finally:
        if config.get('LOG_DB_SESSIONS'):
            lifetime = time.time() - start_time
            logger.info('closing db_session', lifetime=lifetime,
                        sessions_used=cached_engine.pool.checkedout())
        session.close()
def downgrade():
    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('easaccount'):
        return

    op.drop_column('actionlog', 'type')
    op.drop_table('easactionlog')
Beispiel #23
0
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine()
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    if 'easthread' in Base.metadata.tables:
        op.create_index('ix_easthread_eas_thrid', 'easthread', ['eas_thrid'],
                        unique=False, mysql_length=256)
Beispiel #24
0
def real_db():
    """A fixture to get access to the real mysql db. We need this
    to log in to providers like gmail to check that events changes
    are synced back."""
    engine = main_engine()
    session = InboxSession(engine)
    yield session
    session.rollback()
    session.close()
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('easdevice'):
        return
    op.add_column('easdevice',
                  sa.Column('retired', sa.Boolean(),
                            server_default=sa.sql.expression.false(),
                            nullable=False))
def downgrade():
    from inbox.models.session import session_scope
    from sqlalchemy.ext.declarative import declarative_base
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    op.create_table('aolaccount',
                    sa.Column('id', sa.Integer(), nullable=False),
                    sa.ForeignKeyConstraint(['id'], [u'imapaccount.id'],
                                            ondelete='CASCADE'),
                    sa.Column('password', sa.String(256)),
                    sa.PrimaryKeyConstraint('id')
                    )

    op.create_table('yahooaccount',
                    sa.Column('id', sa.Integer(), nullable=False),
                    sa.ForeignKeyConstraint(['id'], [u'imapaccount.id'],
                                            ondelete='CASCADE'),
                    sa.Column('password', sa.String(256)),
                    sa.PrimaryKeyConstraint('id')
                    )

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Account(Base):
        __table__ = Base.metadata.tables['account']

    class ImapAccount(Base):
        __table__ = Base.metadata.tables['imapaccount']

    class YahooAccount(Base):
        __table__ = Base.metadata.tables['yahooaccount']

    class AOLAccount(Base):
        __table__ = Base.metadata.tables['aolaccount']

    class GenericAccount(Base):
        __table__ = Base.metadata.tables['genericaccount']

    with session_scope(versioned=False) \
            as db_session:
        for acct in db_session.query(GenericAccount):
            secret = db_session.query(Secret) \
                .filter_by(id=acct.password_id).one()

            if acct.provider == 'yahoo':
                new_acct = YahooAccount(namespace=acct.namespace,
                                        password=secret.secret)
                db_session.add(new_acct)
            elif acct.provider == 'aol':
                new_acct = AOLAccount(namespace=acct.namespace,
                                      password=secret.secret)
                db_session.add(new_acct)
        db_session.commit()

    engine.execute('drop table genericaccount')
Beispiel #27
0
def upgrade():
    from sqlalchemy.ext.declarative import declarative_base
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    op.add_column('message',
                  sa.Column('is_created', sa.Boolean,
                            server_default=sa.sql.expression.false(),
                            nullable=False))
    op.add_column('message',
                  sa.Column('is_sent', sa.Boolean,
                            server_default=sa.sql.expression.false(),
                            nullable=False))
    op.add_column('message',
                  sa.Column('state',
                            sa.Enum('draft', 'sending', 'sending failed',
                                    'sent')))
    op.add_column('message',
                  sa.Column('is_reply', sa.Boolean()))
    op.add_column('message',
                  sa.Column('resolved_message_id', sa.Integer(),
                            nullable=True))
    op.create_foreign_key('message_ibfk_2',
                          'message', 'message',
                          ['resolved_message_id'], ['id'])

    op.add_column('message',
                  sa.Column('parent_draft_id', sa.Integer(), nullable=True))
    op.create_foreign_key('message_ibfk_3',
                          'message', 'message',
                          ['parent_draft_id'], ['id'])

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Message(Base):
        __table__ = Base.metadata.tables['message']

    class SpoolMessage(Base):
        __table__ = Base.metadata.tables['spoolmessage']

    with session_scope(versioned=False, ignore_soft_deletes=False) \
            as db_session:

        for sm in db_session.query(SpoolMessage).yield_per(250):
            m = db_session.query(Message).get(sm.id)

            m.is_sent = sm.is_sent
            m.state = sm.state
            m.is_reply = sm.is_reply
            m.resolved_message_id = sm.resolved_message_id
            m.parent_draft_id = sm.parent_draft_id

        db_session.commit()

    op.drop_table('spoolmessage')
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    inspector = sa.inspect(engine)
    if 'default_calendar_ibfk_1' in [k['name'] for k in
                                     inspector.get_foreign_keys('account')]:
        op.drop_constraint('default_calendar_ibfk_1', 'account',
                           type_='foreignkey')
        op.create_foreign_key('account_ibfk_10', 'account', 'calendar',
                              ['default_calendar_id'], ['id'])
def downgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    op.drop_column('foldersync', '_sync_status')

    if 'easfoldersync' in Base.metadata.tables:
        op.drop_column('easfoldersync', '_sync_status')
Beispiel #30
0
def session_scope(versioned=True, ignore_soft_deletes=False):
    """ Provide a transactional scope around a series of operations.

    Takes care of rolling back failed transactions and closing the session
    when it goes out of scope.

    Note that sqlalchemy automatically starts a new database transaction when
    the session is created, and restarts a new transaction after every commit()
    on the session. Your database backend's transaction semantics are important
    here when reasoning about concurrency.

    Parameters
    ----------
    versioned : bool
        Do you want to enable the transaction log?

    Yields
    ------
    Session
        The created session.
    """

    global cached_engine
    if cached_engine is None:
        cached_engine = main_engine()
        log.info("Don't yet have engine... creating default from ignition",
                 engine=id(cached_engine))

    session = new_session(cached_engine, versioned)
    try:
        if config.get('LOG_DB_SESSIONS'):
            start_time = time.time()
            calling_frame = sys._getframe().f_back.f_back
            call_loc = '{}:{}'.format(calling_frame.f_globals.get('__name__'),
                                      calling_frame.f_lineno)
            logger = log.bind(engine_id=id(cached_engine),
                              session_id=id(session), call_loc=call_loc)
            logger.info('creating db_session',
                        sessions_used=cached_engine.pool.checkedout())
        yield session
        session.commit()
    except BaseException as exc:
        try:
            session.rollback()
            raise
        except OperationalError:
            log.warn('Encountered OperationalError on rollback',
                     original_exception=type(exc))
            raise exc
    finally:
        if config.get('LOG_DB_SESSIONS'):
            lifetime = time.time() - start_time
            logger.info('closing db_session', lifetime=lifetime,
                        sessions_used=cached_engine.pool.checkedout())
        session.close()
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table("easdevice"):
        return
    op.add_column(
        "easdevice",
        sa.Column(
            "retired",
            sa.Boolean(),
            server_default=sa.sql.expression.false(),
            nullable=False,
        ),
    )
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table("easfoldersyncstatus"):
        return
    op.add_column(
        "easfoldersyncstatus",
        sa.Column(
            "sync_should_run",
            sa.Boolean(),
            server_default=sa.sql.expression.true(),
            nullable=False,
        ),
    )
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    from inbox.sqlalchemy_ext.util import JSON, MutableDict
    Base = declarative_base()
    Base.metadata.reflect(engine)

    op.add_column('foldersync',
                  sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                            nullable=True))

    if 'easfoldersync' in Base.metadata.tables:
        op.add_column('easfoldersync',
                      sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                                nullable=True))
def upgrade():
    conn = op.get_bind()
    conn.execute(text("set @@foreign_key_checks = 0;"))
    op.drop_constraint(u'namespace_id', 'category', type_='unique')
    op.create_unique_constraint(u'namespace_id', 'category',
                                ['namespace_id', 'name', 'display_name'])

    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('easfoldersyncstatus'):
        return
    op.drop_constraint(u'account_id_2', 'easfoldersyncstatus', type_='unique')
    op.create_unique_constraint(u'account_id_2', 'easfoldersyncstatus',
                                ['account_id', 'device_id', 'eas_folder_id'])
    conn.execute(text("set @@foreign_key_checks = 1;"))
Beispiel #35
0
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine()
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    if "easthread" in Base.metadata.tables:
        op.create_index(
            "ix_easthread_eas_thrid",
            "easthread",
            ["eas_thrid"],
            unique=False,
            mysql_length=256,
        )
Beispiel #36
0
def upgrade():
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Folder(Base):
        __table__ = Base.metadata.tables['folder']

    with session_scope(versioned=False) as db_session:
        for folder in db_session.query(Folder).filter(Folder.name == 'Inbox'):
            folder.public_id = 'inbox'
            folder.exposed_name = 'inbox'
        db_session.commit()
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table("easuid"):
        return
    conn = op.get_bind()
    conn.execute("""ALTER TABLE easuid
        ADD COLUMN server_id VARCHAR(64) DEFAULT NULL,
        ADD COLUMN easfoldersyncstatus_id INT(11) DEFAULT NULL,
        ADD INDEX easfoldersyncstatus_id (easfoldersyncstatus_id),
        ADD CONSTRAINT easuid_ibfk_4 FOREIGN KEY (easfoldersyncstatus_id)
            REFERENCES easfoldersyncstatus (id) ON DELETE CASCADE,
        ADD INDEX ix_easuid_server_id (server_id)
        """)
Beispiel #38
0
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    if 'easuid' in Base.metadata.tables:
        op.create_index('ix_easuid_msg_uid',
                        'easuid', ['msg_uid'],
                        unique=False)

    op.create_index('ix_imapuid_msg_uid', 'imapuid', ['msg_uid'], unique=False)
    op.create_index('ix_transaction_table_name',
                    'transaction', ['table_name'],
                    unique=False)
def upgrade():
    op.add_column('transaction',
                  sa.Column('public_id', mysql.BINARY(16), nullable=True))
    op.add_column(
        'transaction',
        sa.Column('object_public_id', sa.String(length=191), nullable=True))
    op.create_index('ix_transaction_public_id',
                    'transaction', ['public_id'],
                    unique=False)

    from inbox.sqlalchemy_ext.util import generate_public_id, b36_to_bin
    # TODO(emfree) reflect
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Transaction(Base):
        __table__ = Base.metadata.tables['transaction']

    with session_scope(versioned=False) as db_session:
        count = 0
        num_transactions, = db_session.query(sa.func.max(Transaction.id)).one()
        print 'Adding public ids to {} transactions'.format(num_transactions)
        for pointer in range(0, num_transactions + 1, 500):
            for entry in db_session.query(Transaction).filter(
                    Transaction.id >= pointer, Transaction.id < pointer + 500):
                entry.public_id = b36_to_bin(generate_public_id())
                count += 1
                if not count % 500:
                    sys.stdout.write('.')
                    sys.stdout.flush()
                    db_session.commit()
                    garbage_collect()

    op.alter_column('transaction',
                    'public_id',
                    existing_type=mysql.BINARY(16),
                    nullable=False)

    op.add_column(
        'transaction',
        sa.Column('public_snapshot', sa.Text(length=4194304), nullable=True))
    op.add_column(
        'transaction',
        sa.Column('private_snapshot', sa.Text(length=4194304), nullable=True))
    op.drop_column('transaction', u'additional_data')
Beispiel #40
0
def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """
    context.configure(engine=main_engine(pool_size=1, max_overflow=0))

    with context.begin_transaction():
        context.run_migrations()
Beispiel #41
0
def downgrade():
    conn = op.get_bind()
    conn.execute(text("set @@lock_wait_timeout = 20;"))

    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    if "easfoldersyncstatus" in Base.metadata.tables:
        conn.execute(
            text(
                "ALTER TABLE easfoldersyncstatus MODIFY eas_folder_id VARCHAR(64),"
                "                                MODIFY eas_parent_id VARCHAR(64)"
            ))
Beispiel #42
0
def upgrade():
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Contact_Old(Base):
        __table__ = Base.metadata.tables['contact']

    # Delete the "remote" contacts. This is just a server cache for comparing
    # any changes, now handled by the previous "local" contacts
    with session_scope() as db_session:
        db_session.query(Contact_Old).filter_by(source='remote').delete()

    op.drop_column('contact', 'source')
def downgrade_imapaccount():
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class ImapAccount_(Base):
        __table__ = Base.metadata.tables['imapaccount']

    # Get data from table-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapAccount_.id,
                                   ImapAccount_.imap_host).all()
    to_insert = [dict(id=r[0], imap_host=r[1]) for r in results]

    # Drop columns, add new columns + insert data
    op.drop_column('account', 'type')
    op.add_column('account', sa.Column('imap_host', sa.String(512)))

    table_ = table('account', column('imap_host', sa.String(512)),
                   column('id', sa.Integer))

    for r in to_insert:
        op.execute(table_.update().where(table_.c.id == r['id']).values(
            {'imap_host': r['imap_host']}))

    # Table switch-over
    op.drop_constraint('imapuid_ibfk_1', 'imapuid', type_='foreignkey')
    op.drop_constraint('uidvalidity_ibfk_1', 'uidvalidity', type_='foreignkey')
    op.drop_constraint('foldersync_ibfk_1', 'foldersync', type_='foreignkey')
    op.drop_table('imapaccount')

    op.rename_table('account', 'imapaccount')

    op.create_foreign_key('imapuid_ibfk_1',
                          'imapuid',
                          'imapaccount', ['imapaccount_id'], ['id'],
                          ondelete='CASCADE')
    op.create_foreign_key('uidvalidity_ibfk_1',
                          'uidvalidity',
                          'imapaccount', ['imapaccount_id'], ['id'],
                          ondelete='CASCADE')
    op.create_foreign_key('foldersync_ibfk_1',
                          'foldersync',
                          'imapaccount', ['account_id'], ['id'],
                          ondelete='CASCADE')
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)
    # Do nothing if the affected table isn't present.
    if not engine.has_table("easaccount"):
        return

    # Do not define foreign key constraint here; that's done for all account
    # tables in the next migration.
    op.add_column(
        "easaccount",
        sa.Column("password_id", sa.Integer(), sa.ForeignKey("secret.id")))
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)
    from inbox.models.session import session_scope

    class EASAccount(Base):
        __table__ = Base.metadata.tables["easaccount"]
        secret = sa.orm.relationship(
            "Secret", primaryjoin="EASAccount.password_id == Secret.id")

    class Secret(Base):
        __table__ = Base.metadata.tables["secret"]

    with session_scope(versioned=False) as db_session:
        accounts = db_session.query(EASAccount).all()
        print "# EAS accounts: ", len(accounts)

        for account in accounts:
            secret = Secret()
            # Need to set non-nullable attributes.
            secret.created_at = datetime.utcnow()
            secret.updated_at = datetime.utcnow()
            secret.type = 0
            secret.acl_id = 0

            secret.secret = account.password
            account.secret = secret

        db_session.commit()

    op.alter_column("easaccount",
                    "password_id",
                    existing_type=sa.Integer(),
                    nullable=False)
def upgrade():
    from inbox.ignition import main_engine
    from inbox.models.session import session_scope

    engine = main_engine(pool_size=1, max_overflow=0)

    from sqlalchemy.ext.declarative import declarative_base

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Message(Base):
        __table__ = Base.metadata.tables["message"]

    class Thread(Base):
        __table__ = Base.metadata.tables["thread"]

    with session_scope(versioned=False) as db_session:
        count = 0
        for msg in (db_session.query(Message).options(
                sa.orm.load_only("subject")).yield_per(500)):
            truncate_subject(msg)
            count += 1
            if count > 500:
                db_session.commit()
                count = 0
        db_session.commit()

        for thread in (db_session.query(Thread).options(
                sa.orm.load_only("subject")).yield_per(500)):
            truncate_subject(thread)
            count += 1
            if count > 500:
                db_session.commit()
                count = 0
        db_session.commit()

    op.alter_column("message",
                    "subject",
                    type_=sa.String(255),
                    existing_nullable=True)
    op.alter_column("thread",
                    "subject",
                    type_=sa.String(255),
                    existing_nullable=True)
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    from inbox.models.session import session_scope
    from sqlalchemy.ext.declarative import declarative_base
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Message(Base):
        __table__ = Base.metadata.tables['message']

    with session_scope(versioned=False) \
            as db_session:
        null_field_count = db_session.query(func.count(Message.id)). \
            filter(or_(Message.from_addr.is_(None),
                       Message.to_addr.is_(None),
                       Message.cc_addr.is_(None),
                       Message.bcc_addr.is_(None))).scalar()
        print 'messages to migrate:', null_field_count
        if int(null_field_count):
            for message in db_session.query(Message):
                for attr in ('to_addr', 'from_addr', 'cc_addr', 'bcc_addr'):
                    if getattr(message, attr) is None:
                        setattr(message, attr, [])
                print '.',
        db_session.commit()

    print 'making addrs non-nullable'

    op.alter_column('message',
                    'bcc_addr',
                    existing_type=mysql.TEXT(),
                    nullable=False)
    op.alter_column('message',
                    'cc_addr',
                    existing_type=mysql.TEXT(),
                    nullable=False)
    op.alter_column('message',
                    'from_addr',
                    existing_type=mysql.TEXT(),
                    nullable=False)
    op.alter_column('message',
                    'to_addr',
                    existing_type=mysql.TEXT(),
                    nullable=False)
Beispiel #47
0
def migrate_messages(account_id):
    from inbox.ignition import main_engine
    from inbox.models import Message, Namespace
    from inbox.models.session import session_scope

    engine = main_engine(pool_size=1, max_overflow=0)

    with session_scope(versioned=False) as db_session:
        namespace = db_session.query(Namespace).filter_by(
            account_id=account_id).one()
        offset = 0
        while True:
            if engine.has_table("easuid"):
                additional_options = [subqueryload(Message.easuids)]
            else:
                additional_options = []

            messages = (db_session.query(Message).filter(
                Message.namespace_id == namespace.id).options(
                    load_only(
                        Message.id,
                        Message.is_read,
                        Message.is_starred,
                        Message.is_draft,
                    ),
                    joinedload(Message.namespace).load_only("id"),
                    subqueryload(Message.imapuids),
                    subqueryload(Message.messagecategories),
                    *additional_options).with_hint(
                        Message,
                        "USE INDEX (ix_message_namespace_id)").order_by(
                            asc(Message.id)).limit(1000).offset(offset).all())
            if not messages:
                return
            for message in messages:
                try:
                    message.update_metadata(message.is_draft)
                except IndexError:
                    # Can happen for messages without a folder.
                    pass
                log.info("Updated message",
                         namespace_id=namespace.id,
                         message_id=message.id)
            db_session.commit()
            offset += 1000
Beispiel #48
0
def upgrade():
    op.add_column(
        'message',
        sa.Column('is_read',
                  sa.Boolean(),
                  server_default=sa.sql.expression.false(),
                  nullable=False))

    op.alter_column('usertagitem',
                    'created_at',
                    existing_type=mysql.DATETIME(),
                    nullable=False)
    op.alter_column('usertagitem',
                    'updated_at',
                    existing_type=mysql.DATETIME(),
                    nullable=False)

    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Message(Base):
        __table__ = Base.metadata.tables['message']

    class ImapUid(Base):
        __table__ = Base.metadata.tables['imapuid']
        message = relationship('Message',
                               backref=backref(
                                   'imapuids',
                                   primaryjoin='and_('
                                   'Message.id == ImapUid.message_id, '
                                   'ImapUid.deleted_at == None)'),
                               primaryjoin='and_('
                               'ImapUid.message_id == Message.id,'
                               'Message.deleted_at == None)')

    with session_scope(versioned=False,
                       ignore_soft_deletes=False) as db_session:
        for uid in db_session.query(ImapUid).yield_per(500):
            if uid.is_seen:
                uid.message.is_read = True

        db_session.commit()
def upgrade():
    op.alter_column('message',
                    'state',
                    type_=sa.Enum('draft', 'sending', 'sending failed', 'sent',
                                  'actions_pending', 'actions_committed'),
                    existing_type=sa.Enum('draft', 'sending', 'sending failed',
                                          'sent'))

    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('easdevice'):
        return
    op.add_column(
        'easdevice',
        sa.Column('active',
                  sa.Boolean(),
                  server_default=sa.sql.expression.false(),
                  nullable=False))
Beispiel #50
0
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    # The model previously didn't reflect the migration, therefore
    # only drop the uid constraint if it exists (created with creat_db
    # vs a migration).
    inspector = sa.inspect(engine)
    if "uid" in [c["name"] for c in inspector.get_unique_constraints("event")]:
        op.drop_constraint("uid", "event", type_="unique")

    op.create_unique_constraint(
        "uuid", "event", ["uid", "source", "account_id", "provider_name"]
    )
    op.alter_column("event", "uid", type_=sa.String(767, collation="ascii_general_ci"))
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    # The model previously didn't reflect the migration, therefore
    # only drop the uid constraint if it exists (created with creat_db
    # vs a migration).
    inspector = sa.inspect(engine)
    if 'uid' in [c['name'] for c in inspector.get_unique_constraints('event')]:
        op.drop_constraint('uid', 'event', type_='unique')

    op.create_unique_constraint(
        'uuid', 'event', ['uid', 'source', 'account_id', 'provider_name'])
    op.alter_column('event',
                    'uid',
                    type_=sa.String(767, collation='ascii_general_ci'))
Beispiel #52
0
def check_db():
    """ Checks the database revision against the known alembic migrations. """
    from inbox.ignition import main_engine
    inbox_db_engine = main_engine(pool_size=1, max_overflow=0)

    # top-level, with setup.sh
    alembic_ini_path = os.environ.get("ALEMBIC_INI_PATH",
                                      _absolute_path('../../alembic.ini'))
    alembic_cfg = alembic_config(alembic_ini_path)

    alembic_basedir = os.path.dirname(alembic_ini_path)
    alembic_script_dir = os.path.join(
        alembic_basedir, alembic_cfg.get_main_option("script_location"))

    assert os.path.isdir(alembic_script_dir), \
        'Must have migrations directory at {}'.format(alembic_script_dir)

    try:
        inbox_db_engine.dialect.has_table(inbox_db_engine, 'alembic_version')
    except sqlalchemy.exc.OperationalError:
        sys.exit("Databases don't exist! Run bin/create-db")

    if inbox_db_engine.dialect.has_table(inbox_db_engine, 'alembic_version'):
        res = inbox_db_engine.execute(
            'SELECT version_num from alembic_version')
        current_revision = [r for r in res][0][0]
        assert current_revision, \
            'Need current revision in alembic_version table...'

        script = ScriptDirectory(alembic_script_dir)
        head_revision = script.get_current_head()
        log.info('Head database revision: {0}'.format(head_revision))
        log.info('Current database revision: {0}'.format(current_revision))
        # clean up a ton (8) of idle database connections
        del script
        gc.collect()

        if current_revision != head_revision:
            raise Exception(
                'Outdated database! Migrate using `alembic upgrade head`')
        else:
            log.info('[OK] Database scheme matches latest')
    else:
        raise Exception('Un-stamped database! Run `bin/create-db`. bailing.')
def upgrade():
    conn = op.get_bind()
    conn.execute(text("set @@foreign_key_checks = 0;"))
    op.drop_constraint(u"namespace_id", "category", type_="unique")
    op.create_unique_constraint(u"namespace_id", "category",
                                ["namespace_id", "name", "display_name"])

    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table("easfoldersyncstatus"):
        return
    op.drop_constraint(u"account_id_2", "easfoldersyncstatus", type_="unique")
    op.create_unique_constraint(
        u"account_id_2",
        "easfoldersyncstatus",
        ["account_id", "device_id", "eas_folder_id"],
    )
    conn.execute(text("set @@foreign_key_checks = 1;"))
Beispiel #54
0
def downgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    if 'easfoldersync' in Base.metadata.tables:
        op.alter_column('easfoldersync', 'state',
                        type_=sa.Enum('initial', 'initial uidinvalid',
                                      'poll', 'poll uidinvalid', 'finish'),
                        existing_nullable=False)

    if 'easuid' in Base.metadata.tables:
        op.alter_column('easuid', 'message_id', existing_type=sa.Integer(),
                        nullable=True)
        op.alter_column('easuid', 'fld_uid', existing_type=sa.Integer(),
                        nullable=True)
        op.alter_column('easuid', 'msg_uid', existing_type=sa.Integer(),
                        nullable=True)
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)
    from sqlalchemy.ext.declarative import declarative_base

    Base = declarative_base()
    Base.metadata.reflect(engine)

    if "easfoldersyncstatus" in Base.metadata.tables:
        op.drop_constraint("easuid_ibfk_3", "easuid", type_="foreignkey")
        op.create_foreign_key(
            "easuid_ibfk_3",
            "easuid",
            "folder",
            ["folder_id"],
            ["id"],
            ondelete="CASCADE",
        )
Beispiel #56
0
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine()

    if not engine.has_table("easaccount"):
        return

    # We allow nullable=True because we don't have usernames for existing accounts.
    # Furthermore, we won't always get a username.
    from inbox.models.constants import MAX_INDEXABLE_LENGTH

    op.add_column("easaccount",
                  sa.Column("username", sa.String(255), nullable=True))

    op.add_column(
        "easaccount",
        sa.Column("eas_auth", sa.String(MAX_INDEXABLE_LENGTH), nullable=True),
    )

    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)
    from inbox.models.session import session_scope

    class EASAccount(Base):
        __table__ = Base.metadata.tables["easaccount"]

    with session_scope(versioned=False) as db_session:
        accts = db_session.query(EASAccount).all()

        for a in accts:
            a.eas_auth = a.email_address
            db_session.add(a)

        db_session.commit()

    op.alter_column(
        "easaccount",
        "eas_auth",
        nullable=False,
        existing_type=sa.String(MAX_INDEXABLE_LENGTH),
    )
Beispiel #57
0
def upgrade():
    from sqlalchemy.ext.declarative import declarative_base

    from inbox.ignition import main_engine
    from inbox.models.session import session_scope

    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table("easuid"):
        return
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class EASUid(Base):
        __table__ = Base.metadata.tables["easuid"]

    class EASFolderSyncStatus(Base):
        __table__ = Base.metadata.tables["easfoldersyncstatus"]

    with session_scope(versioned=False) as db_session:
        max_easuid = db_session.query(sa.func.max(EASUid.id)).scalar()
        if max_easuid is None:
            return
        while True:
            results = (
                db_session.query(EASUid, EASFolderSyncStatus)
                .join(
                    EASFolderSyncStatus,
                    sa.and_(
                        EASUid.fld_uid == EASFolderSyncStatus.eas_folder_id,
                        EASUid.device_id == EASFolderSyncStatus.device_id,
                        EASUid.easaccount_id == EASFolderSyncStatus.account_id,
                        EASUid.easfoldersyncstatus_id.is_(None),
                    ),
                )
                .limit(1000)
                .all()
            )
            if not results:
                return
            for easuid, easfoldersyncstatus in results:
                easuid.easfoldersyncstatus_id = easfoldersyncstatus.id
            db_session.commit()
Beispiel #58
0
def upgrade():
    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)

    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    op.drop_constraint("folder_fk1", "folder", type_="foreignkey")
    op.drop_constraint("account_id", "folder", type_="unique")

    op.create_foreign_key("folder_fk1", "folder", "account", ["account_id"], ["id"])
    op.create_unique_constraint(
        "account_id", "folder", ["account_id", "name", "canonical_name"]
    )

    if "easfoldersyncstatus" in Base.metadata.tables:
        op.create_unique_constraint(
            "account_id_2", "easfoldersyncstatus", ["account_id", "eas_folder_id"]
        )
Beispiel #59
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    engine = main_engine(pool_size=1, max_overflow=0)

    connection = engine.connect()
    context.configure(
        connection=connection,
        target_metadata=target_metadata
    )

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()
Beispiel #60
0
def upgrade():
    from inbox.ignition import main_engine

    op.add_column("actionlog", sa.Column("type", sa.String(16)))

    # Update action_log entries
    from inbox.models import Account, ActionLog, Namespace
    from inbox.models.session import session_scope

    with session_scope() as db_session:
        q = (db_session.query(ActionLog).join(Namespace).join(Account).filter(
            ActionLog.status == "pending",
            Account.discriminator != "easaccount").options(
                contains_eager(ActionLog.namespace, Namespace.account)))

        print("Updating {} action_log entries".format(q.count()))

        for a in q.all():
            a.type = "actionlog"

        db_session.commit()

    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table("easaccount"):
        return

    op.create_table(
        "easactionlog",
        sa.Column("id", sa.Integer()),
        sa.Column(
            "secondary_status",
            sa.Enum("pending", "successful", "failed"),
            server_default="pending",
        ),
        sa.Column("secondary_retries",
                  sa.Integer(),
                  nullable=False,
                  server_default="0"),
        sa.PrimaryKeyConstraint("id"),
        sa.ForeignKeyConstraint(["id"], ["actionlog.id"], ondelete="CASCADE"),
    )