Пример #1
0
def downgrade():

    from inbox.server.config import load_config
    load_config()

    # These all inherit HasPublicID
    from inbox.server.models.tables.base import (
        Account, Block, Contact, Message, Namespace,
        SharedFolder, Thread, User, UserSession, HasPublicID)

    classes = [
        Account, Block, Contact, Message, Namespace,
        SharedFolder, Thread, User, UserSession]

    for c in classes:
        assert issubclass(c, HasPublicID)
        print '[{0}] Dropping public_id column... '.format(c.__tablename__),
        op.drop_column(c.__tablename__, 'public_id')

        print 'Dropping index... ',
        op.drop_index(
            'ix_{0}_public_id'.format(c.__tablename__),
            table_name=c.__tablename__)

        print 'Done.'
Пример #2
0
def upgrade():

    from inbox.server.config import load_config
    load_config()
    from inbox.sqlalchemy.util import generate_public_id
    from inbox.server.models import session_scope

    # These all inherit HasPublicID
    from inbox.server.models.tables.base import (
        Account, Block, Contact, Message, Namespace,
        SharedFolder, Thread, User, UserSession, HasPublicID)

    classes = [
        Account, Block, Contact, Message, Namespace,
        SharedFolder, Thread, User, UserSession]

    for c in classes:
        assert issubclass(c, HasPublicID)
        print '[{0}] adding public_id column... '.format(c.__tablename__),
        sys.stdout.flush()
        op.add_column(c.__tablename__, sa.Column(
            'public_id', mysql.BINARY(16), nullable=False))

        print 'adding index... ',
        op.create_index(
            'ix_{0}_public_id'.format(c.__tablename__),
            c.__tablename__,
            ['public_id'],
            unique=False)

        print 'Done!'
        sys.stdout.flush()

    print 'Finished adding columns. \nNow generating public_ids'

    with session_scope() as db_session:
        count = 0
        for c in classes:
            garbage_collect()
            print '[{0}] Loading rows. '.format(c.__name__),
            sys.stdout.flush()
            print 'Generating public_ids',
            sys.stdout.flush()
            for r in db_session.query(c).yield_per(chunk_size):
                count += 1
                r.public_id = generate_public_id()
                if not count % chunk_size:
                    sys.stdout.write('.')
                    sys.stdout.flush()
                    db_session.commit()
                    garbage_collect()
            sys.stdout.write(' Saving. '.format(c.__name__)),
            # sys.stdout.flush()
            sys.stdout.flush()
            db_session.commit()
            sys.stdout.write('Done!\n')
            sys.stdout.flush()
        print '\nUpdgraded OK!\n'
def upgrade():

    from inbox.server.config import load_config
    load_config()

    from inbox.server.models import session_scope
    from inbox.server.models.tables.base import Message

    with session_scope() as db_session:
        results = db_session.query(Message).all()
        for message in results:
            message.from_addr = [message.from_addr]
            message.sender_addr = [message.sender_addr]
        db_session.commit()
def upgrade():

    from inbox.server.config import load_config
    load_config()

    from inbox.server.models import session_scope
    from inbox.server.models.tables.base import Message

    with session_scope() as db_session:
        results = db_session.query(Message).all()
        for message in results:
            message.from_addr = [message.from_addr]
            message.sender_addr = [message.sender_addr]
        db_session.commit()
Пример #5
0
Файл: env.py Проект: cenk/inbox
from sqlalchemy import create_engine, pool

from logging.config import fileConfig

from inbox.server.config import load_config

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# Load Inbox server configuration.
load_config()

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
from inbox.server.models import Base, db_uri
target_metadata = Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

def run_migrations_offline():
    """Run migrations in 'offline' mode.
Пример #6
0
"""

# revision identifiers, used by Alembic.
revision = '1c3f1812f2d9'
down_revision = '482338e7a7d6'

from alembic import op
import sqlalchemy as sa

from sqlalchemy.sql import table, column
from sqlalchemy.ext.declarative import declarative_base

from inbox.server.config import load_config

load_config()
from inbox.server.models import session_scope
from inbox.server.models.ignition import engine

Base = declarative_base()
Base.metadata.reflect(engine)


def upgrade():
    genericize_imapaccount()
    genericize_thread()
    genericize_namespace_contact_foldersync()


def downgrade():
    downgrade_imapaccount()
def upgrade():
    easupdate = False

    print 'Creating new tables and columns...'
    op.create_table('folder',
                    sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('account_id', sa.Integer(), nullable=False),
                    sa.Column('name', sa.String(
                        length=191, collation='utf8mb4_general_ci'),
                        nullable=True),
                    sa.ForeignKeyConstraint(['account_id'], ['account.id'],
                                            ondelete='CASCADE'),
                    sa.PrimaryKeyConstraint('id'),
                    sa.UniqueConstraint('account_id', 'name')
                    )
    op.create_table('internaltag',
                    sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('public_id', mysql.BINARY(16), nullable=False),
                    sa.Column('namespace_id', sa.Integer(), nullable=False),
                    sa.Column('name', sa.String(length=191), nullable=False),
                    sa.Column('thread_id', sa.Integer(), nullable=False),
                    sa.ForeignKeyConstraint(['namespace_id'], ['namespace.id'],
                                            ondelete='CASCADE'),
                    sa.ForeignKeyConstraint(['thread_id'], ['thread.id'],
                                            ondelete='CASCADE'),
                    sa.PrimaryKeyConstraint('id'),
                    sa.UniqueConstraint('namespace_id', 'name')
                    )
    op.add_column('folderitem',
                  sa.Column('folder_id', sa.Integer(), nullable=True))
    op.create_foreign_key("fk_folder_id", "folderitem",
                          "folder", ["folder_id"], ["id"],
                          ondelete='CASCADE')

    op.add_column('account', sa.Column('inbox_folder_id',
                                       sa.Integer, nullable=True))
    op.add_column('account', sa.Column('sent_folder_id',
                                       sa.Integer, nullable=True))
    op.add_column('account', sa.Column('drafts_folder_id',
                                       sa.Integer, nullable=True))
    op.add_column('account', sa.Column('spam_folder_id',
                                       sa.Integer, nullable=True))
    op.add_column('account', sa.Column('trash_folder_id',
                                       sa.Integer, nullable=True))
    op.add_column('account', sa.Column('archive_folder_id',
                                       sa.Integer, nullable=True))
    op.add_column('account', sa.Column('all_folder_id',
                                       sa.Integer, nullable=True))
    op.add_column('account', sa.Column('starred_folder_id',
                                       sa.Integer, nullable=True))
    op.create_foreign_key('account_ibfk_2', 'account', 'folder',
                          ['inbox_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_3', 'account', 'folder',
                          ['sent_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_4', 'account', 'folder',
                          ['drafts_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_5', 'account', 'folder',
                          ['spam_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_6', 'account', 'folder',
                          ['trash_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_7', 'account', 'folder',
                          ['archive_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_8', 'account', 'folder',
                          ['all_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_9', 'account', 'folder',
                          ['starred_folder_id'], ['id'])

    op.add_column('imapuid', sa.Column('folder_id', sa.Integer, nullable=True))
    op.create_foreign_key('imapuid_ibfk_3', 'imapuid', 'folder',
                          ['folder_id'], ['id'])

    from inbox.server.config import load_config
    load_config()

    from inbox.server.models import session_scope
    from inbox.server.models.ignition import engine

    Base = declarative_base()
    Base.metadata.reflect(engine)

    if 'easuid' in Base.metadata.tables:
        easupdate = True
        print 'Adding new EASUid columns...'

        op.add_column('easuid',
                      sa.Column('fld_uid', sa.Integer(), nullable=True))

        op.add_column('easuid',
                      sa.Column('folder_id', sa.Integer(), nullable=True))

        op.create_foreign_key('easuid_ibfk_3', 'easuid', 'folder',
                              ['folder_id'], ['id'])

        op.create_unique_constraint(
            'uq_easuid_folder_id_msg_uid_easaccount_id',
            'easuid',
            ['folder_id', 'msg_uid', 'easaccount_id'])

        op.create_index('easuid_easaccount_id_folder_id', 'easuid',
                        ['easaccount_id', 'folder_id'])

    # Include our changes to the EASUid table:
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Folder(Base):
        __table__ = Base.metadata.tables['folder']
        account = relationship('Account', foreign_keys='Folder.account_id',
                               backref='folders')

    class FolderItem(Base):
        __table__ = Base.metadata.tables['folderitem']
        folder = relationship('Folder', backref='threads', lazy='joined')

    class Thread(Base):
        __table__ = Base.metadata.tables['thread']
        folderitems = relationship('FolderItem', backref="thread",
                                   single_parent=True,
                                   cascade='all, delete, delete-orphan')
        namespace = relationship('Namespace', backref='threads')

    class Namespace(Base):
        __table__ = Base.metadata.tables['namespace']
        account = relationship('Account',
                               backref=backref('namespace', uselist=False))

    class Account(Base):
        __table__ = Base.metadata.tables['account']
        inbox_folder = relationship('Folder',
                                    foreign_keys='Account.inbox_folder_id')
        sent_folder = relationship('Folder',
                                   foreign_keys='Account.sent_folder_id')
        drafts_folder = relationship('Folder',
                                     foreign_keys='Account.drafts_folder_id')
        spam_folder = relationship('Folder',
                                   foreign_keys='Account.spam_folder_id')
        trash_folder = relationship('Folder',
                                    foreign_keys='Account.trash_folder_id')
        starred_folder = relationship('Folder',
                                      foreign_keys='Account.starred_folder_id')
        archive_folder = relationship('Folder',
                                      foreign_keys='Account.archive_folder_id')
        all_folder = relationship('Folder',
                                  foreign_keys='Account.all_folder_id')

    class ImapUid(Base):
        __table__ = Base.metadata.tables['imapuid']
        folder = relationship('Folder', backref='imapuids', lazy='joined')

    if easupdate:
        class EASUid(Base):
            __table__ = Base.metadata.tables['easuid']
            folder = relationship('Folder', foreign_keys='EASUid.folder_id',
                                  backref='easuids', lazy='joined')

    print 'Creating Folder rows and migrating FolderItems...'
    # not many folders per account, so shouldn't grow that big
    with session_scope(versioned=False, ignore_soft_deletes=False) as db_session:
        folders = dict([((i.account_id, i.name), i)
                        for i in db_session.query(Folder).all()])
        count = 0
        for folderitem in db_session.query(FolderItem).join(Thread).join(
                Namespace).yield_per(CHUNK_SIZE):
            account_id = folderitem.thread.namespace.account_id
            if folderitem.thread.namespace.account.provider == 'Gmail':
                if folderitem.folder_name in folder_name_subst_map:
                    new_folder_name = folder_name_subst_map[
                        folderitem.folder_name]
                else:
                    new_folder_name = folderitem.folder_name
            elif folderitem.thread.namespace.account.provider == 'EAS':
                new_folder_name = folderitem.folder_name.title()

            if (account_id, new_folder_name) in folders:
                f = folders[(account_id, new_folder_name)]
            else:
                f = Folder(account_id=account_id,
                           name=new_folder_name)
                folders[(account_id, new_folder_name)] = f
            folderitem.folder = f
            count += 1
            if count > CHUNK_SIZE:
                db_session.commit()
                count = 0
        db_session.commit()

        print 'Migrating ImapUids to reference Folder rows...'
        for imapuid in db_session.query(ImapUid).yield_per(CHUNK_SIZE):
            account_id = imapuid.imapaccount_id
            if imapuid.folder_name in folder_name_subst_map:
                new_folder_name = folder_name_subst_map[imapuid.folder_name]
            else:
                new_folder_name = imapuid.folder_name
            if (account_id, new_folder_name) in folders:
                f = folders[(account_id, new_folder_name)]
            else:
                f = Folder(account_id=account_id,
                           name=new_folder_name)
                folders[(account_id, new_folder_name)] = f
            imapuid.folder = f
            count += 1
            if count > CHUNK_SIZE:
                db_session.commit()
                count = 0
        db_session.commit()

        if easupdate:
            print 'Migrating EASUids to reference Folder rows...'

            for easuid in db_session.query(EASUid).yield_per(CHUNK_SIZE):
                account_id = easuid.easaccount_id
                new_folder_name = easuid.folder_name

                if (account_id, new_folder_name) in folders:
                    f = folders[(account_id, new_folder_name)]
                else:
                    f = Folder(account_id=account_id,
                               name=new_folder_name)
                    folders[(account_id, new_folder_name)] = f
                easuid.folder = f
                count += 1
                if count > CHUNK_SIZE:
                    db_session.commit()
                    count = 0
            db_session.commit()

        print 'Migrating *_folder_name fields to reference Folder rows...'
        for account in db_session.query(Account).filter_by(provider='Gmail'):
            if account.inbox_folder_name:
                # hard replace INBOX with canonicalized caps
                k = (account.id, 'Inbox')
                if k in folders:
                    account.inbox_folder = folders[k]
                else:
                    account.inbox_folder = Folder(
                        account_id=account.id,
                        name=folder_name_subst_map[account.inbox_folder_name])
            if account.sent_folder_name:
                k = (account.id, account.sent_folder_name)
                if k in folders:
                    account.sent_folder = folders[k]
                else:
                    account.sent_folder = Folder(
                        account_id=account.id,
                        name=account.sent_folder_name)
            if account.drafts_folder_name:
                k = (account.id, account.drafts_folder_name)
                if k in folders:
                    account.drafts_folder = folders[k]
                else:
                    account.drafts_folder = Folder(
                        account_id=account.id,
                        name=account.drafts_folder_name)
            # all/archive mismatch is intentional; semantics have changed
            if account.archive_folder_name:
                k = (account.id, account.archive_folder_name)
                if k in folders:
                    account.all_folder = folders[k]
                else:
                    account.all_folder = Folder(
                        account_id=account.id,
                        name=account.archive_folder_name)
        db_session.commit()

        if easupdate:
            print "Migrating EAS accounts' *_folder_name fields to reference "\
                  "Folder rows..."

            for account in db_session.query(Account).filter_by(provider='EAS'):
                if account.inbox_folder_name:
                    k = (account.id, account.inbox_folder_name)
                    if k in folders:
                        account.inbox_folder = folders[k]
                    else:
                        account.inbox_folder = Folder(
                            account_id=account.id,
                            name=account.inbox_folder_name)
                if account.sent_folder_name:
                    k = (account.id, account.sent_folder_name)
                    if k in folders:
                        account.sent_folder = folders[k]
                    else:
                        account.sent_folder = Folder(
                            account_id=account.id,
                            name=account.sent_folder_name)
                if account.drafts_folder_name:
                    k = (account.id, account.drafts_folder_name)
                    if k in folders:
                        account.drafts_folder = folders[k]
                    else:
                        account.drafts_folder = Folder(
                            account_id=account.id,
                            name=account.drafts_folder_name)
                if account.archive_folder_name:
                    k = (account.id, account.archive_folder_name)
                    if k in folders:
                        account.archive_folder = folders[k]
                    else:
                        account.archive_folder = Folder(
                            account_id=account.id,
                            name=account.archive_folder_name)
            db_session.commit()

    print 'Final schema tweaks and new constraint enforcement'
    op.alter_column('folderitem', 'folder_id', existing_type=sa.Integer(),
                    nullable=False)
    op.drop_constraint('folder_name', 'folderitem', type_='unique')
    op.drop_constraint('folder_name', 'imapuid', type_='unique')
    op.create_unique_constraint('uq_imapuid_folder_id_msg_uid_imapaccount_id',
                                'imapuid',
                                ['folder_id', 'msg_uid', 'imapaccount_id'])
    op.drop_column('folderitem', 'folder_name')
    op.drop_column('imapuid', 'folder_name')
    op.drop_column('account', 'inbox_folder_name')
    op.drop_column('account', 'drafts_folder_name')
    op.drop_column('account', 'sent_folder_name')
    op.drop_column('account', 'archive_folder_name')

    if easupdate:
        print 'Dropping old EASUid columns...'

        op.drop_constraint('folder_name', 'easuid', type_='unique')
        op.drop_index('easuid_easaccount_id_folder_name', 'easuid')
        op.drop_column('easuid', 'folder_name')
Пример #8
0
def config():
    from inbox.server.config import load_config, config
    load_config(filename=TEST_CONFIG)
    return config
Пример #9
0
def upgrade():

    from inbox.server.config import load_config
    load_config()

    from inbox.server.models import session_scope, Session
    from inbox.server.models.ignition import engine

    from inbox.server.models.tables.base import (Part, Namespace, Message,
                                                 Thread)
    from inbox.sqlalchemy.util import JSON

    print 'Creating table for parts...'
    op.create_table(
        'part', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('message_id', sa.Integer(), nullable=True),
        sa.Column('walk_index', sa.Integer(), nullable=True),
        sa.Column('content_disposition',
                  sa.Enum('inline', 'attachment'),
                  nullable=True),
        sa.Column('content_id', sa.String(length=255), nullable=True),
        sa.Column('misc_keyval', JSON(), nullable=True),
        sa.Column('is_inboxapp_attachment',
                  sa.Boolean(),
                  server_default=sa.sql.expression.false(),
                  nullable=True),
        sa.ForeignKeyConstraint(['id'], ['block.id'], ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['message_id'], ['message.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('message_id', 'walk_index'))

    print 'Reflecting old block table schema'
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Block_(Base):  # old schema, reflected from database table
        __table__ = Base.metadata.tables['block']

    print 'Adding namespace_id column to blocks ',
    op.add_column(u'block',
                  sa.Column('namespace_id', sa.Integer(), nullable=False))

    print 'Migrating from blocks to parts'
    new_parts = []
    with session_scope() as db_session:
        for block in db_session.query(Block_).yield_per(chunk_size):

            # Move relevant fields
            p = Part()
            p.size = block.size
            p.data_sha256 = block.data_sha256
            p.message_id = block.message_id
            p.walk_index = block.walk_index
            p.content_disposition = block.content_disposition
            p.content_id = block.content_id
            p.misc_keyval = block.misc_keyval
            p.is_inboxapp_attachment

            old_namespace = db_session.query(Namespace) \
                .join(Message.thread, Thread.namespace) \
                .filter(Message.id == block.message_id).one()
            p.namespace_id = old_namespace.id

            # Commit after column modifications
            new_parts.append(p)

        print 'Deleting old blocks (now parts)... ',
        db_session.query(Block_).delete()
        db_session.commit()
        print 'Done!'

    print 'Removing `message_id` constraint from block'
    op.drop_constraint('block_ibfk_1', 'block', type_='foreignkey')

    print 'Creating foreign key for block -> namespace on block'
    op.create_foreign_key('block_ibfk_1',
                          'block',
                          'namespace', ['namespace_id'], ['id'],
                          ondelete='CASCADE')

    print 'Dropping old block columns which are now in part'
    op.drop_column(u'block', u'walk_index')
    op.drop_column(u'block', u'content_disposition')
    op.drop_column(u'block', u'misc_keyval')
    op.drop_column(u'block', u'content_id')
    op.drop_column(u'block', u'is_inboxapp_attachment')
    op.drop_constraint(u'message_id', 'block', type_='unique')
    op.drop_column(u'block', u'message_id')

    # Note: here we use the regular database session, since the transaction
    # log requires the `namespace` property on objects. We've set the
    # `namespace_id` foreign key, but need to commit the object before the
    # SQLalchemy reference is valid
    no_tx_session = Session(autoflush=True, autocommit=False)
    no_tx_session.add_all(new_parts)
    no_tx_session.commit()

    print 'Done migration blocks to parts!'
Пример #10
0
Файл: env.py Проект: caitp/inbox
from logging.config import fileConfig

from inbox.server.config import load_config

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# If alembic was invoked with --tag=test, load the test Inbox config. Otherwise
# load the default config.
if context.get_tag_argument() == 'test':
    load_config('tests/config.cfg')
else:
    load_config()

from inbox.server.models.tables.base import register_backends
table_mod_for = register_backends()

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
from inbox.server.models import Base
target_metadata = Base.metadata

from inbox.server.models.ignition import db_uri

# other values from the config, defined by the needs of env.py,
Пример #11
0
def upgrade():

    from inbox.server.config import load_config
    load_config()
    from inbox.server.models import session_scope
    from inbox.server.models.ignition import engine
    from inbox.server.models.tables.imap import ImapAccount
    import inbox.server.auth.gmail as gmail

    # Assert we have the dump file
    if not os.path.isfile(SQL_DUMP_FILENAME):
        print "Can't find old user SQL dump at {0}...\nMigration no users."\
            .format(SQL_DUMP_FILENAME)
        return

    # Imports to `imapaccount_old` table
    with open(SQL_DUMP_FILENAME, 'r') as f:
        print 'Importing old account data...',
        op.execute(f.read())
        print 'OK!'

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class ImapAccount_Old(Base):
        __table__ = Base.metadata.tables['imapaccount_old']

    with session_scope() as db_session:
        migrated_accounts = []

        for acct in db_session.query(ImapAccount_Old):
            print 'Importing {0}'.format(acct.email_address)

            existing_account = db_session.query(ImapAccount)\
                .filter_by(email_address=acct.email_address)
            if existing_account.count() > 0:
                print 'Already have account for {0}'.format(acct.email_address)
                continue

            # Create a mock OAuth response using data from the old table
            mock_response = dict(email=acct.email_address,
                                 issued_to=acct.o_token_issued_to,
                                 user_id=acct.o_user_id,
                                 access_token=acct.o_access_token,
                                 id_token=acct.o_id_token,
                                 expires_in=acct.o_expires_in,
                                 access_type=acct.o_access_type,
                                 token_type=acct.o_token_type,
                                 audience=acct.o_audience,
                                 scope=acct.o_scope,
                                 refresh_token=acct.o_refresh_token,
                                 verified_email=acct.o_verified_email)

            new_account = gmail.create_account(db_session, acct.email_address,
                                               mock_response)

            # Note that this doesn't verify **anything** about the account.
            # We're just doing the migration now
            db_session.add(new_account)
            db_session.commit()
            migrated_accounts.append(new_account)

        print '\nDone! Imported {0} accounts.'.format(len(migrated_accounts))
        print '\nNow verifying refresh tokens...\n'

        verified_accounts = []
        for acct in migrated_accounts:
            try:
                print 'Verifying {0}... '.format(acct.email_address),
                gmail.verify_account(db_session, acct)
                verified_accounts.append(acct)
                print 'OK!'
            except Exception, e:
                print 'FAILED!', e

        print 'Done! Verified {0} of {1}'.format(len(verified_accounts),
                                                 len(migrated_accounts))
Пример #12
0
def upgrade():

    from inbox.server.config import load_config
    load_config()
    from inbox.server.models import session_scope
    from inbox.server.models.ignition import engine
    from inbox.server.models.tables.imap import ImapAccount
    import inbox.server.auth.gmail as gmail


    # Assert we have the dump file
    if not os.path.isfile(SQL_DUMP_FILENAME):
        print "Can't find old user SQL dump at {0}...\nMigration no users."\
            .format(SQL_DUMP_FILENAME)
        return

    # Imports to `imapaccount_old` table
    with open(SQL_DUMP_FILENAME, 'r') as f:
        print 'Importing old account data...',
        op.execute(f.read())
        print 'OK!'

    Base = declarative_base()
    Base.metadata.reflect(engine)
    class ImapAccount_Old(Base):
        __table__ = Base.metadata.tables['imapaccount_old']


    with session_scope() as db_session:
        migrated_accounts = []

        for acct in db_session.query(ImapAccount_Old):
            print 'Importing {0}'. format(acct.email_address)

            existing_account = db_session.query(ImapAccount)\
                .filter_by(email_address=acct.email_address)
            if existing_account.count() > 0:
                print 'Already have account for {0}'. format(acct.email_address)
                continue

            # Create a mock OAuth response using data from the old table
            mock_response = dict(
                email=acct.email_address,
                issued_to=acct.o_token_issued_to,
                user_id=acct.o_user_id,
                access_token=acct.o_access_token,
                id_token=acct.o_id_token,
                expires_in=acct.o_expires_in,
                access_type=acct.o_access_type,
                token_type=acct.o_token_type,
                audience=acct.o_audience,
                scope=acct.o_scope,
                refresh_token=acct.o_refresh_token,
                verified_email=acct.o_verified_email
                )

            new_account = gmail.create_account(db_session, acct.email_address, mock_response)

            # Note that this doesn't verify **anything** about the account.
            # We're just doing the migration now
            db_session.add(new_account)
            db_session.commit()
            migrated_accounts.append(new_account)

        print '\nDone! Imported {0} accounts.'.format(len(migrated_accounts))
        print '\nNow verifying refresh tokens...\n'

        verified_accounts = []
        for acct in migrated_accounts:
            try:
                print 'Verifying {0}... '.format(acct.email_address),
                gmail.verify_account(db_session, acct)
                verified_accounts.append(acct)
                print 'OK!'
            except Exception, e:
                print 'FAILED!', e

        print 'Done! Verified {0} of {1}'.format(len(verified_accounts), len(migrated_accounts))
Пример #13
0
def upgrade():
    easupdate = False

    print 'Creating new tables and columns...'
    op.create_table(
        'folder', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('account_id', sa.Integer(), nullable=False),
        sa.Column('name',
                  sa.String(length=191, collation='utf8mb4_general_ci'),
                  nullable=True),
        sa.ForeignKeyConstraint(['account_id'], ['account.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('account_id', 'name'))
    op.create_table(
        'internaltag', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('public_id', mysql.BINARY(16), nullable=False),
        sa.Column('namespace_id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=191), nullable=False),
        sa.Column('thread_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['namespace_id'], ['namespace.id'],
                                ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['thread_id'], ['thread.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('namespace_id', 'name'))
    op.add_column('folderitem',
                  sa.Column('folder_id', sa.Integer(), nullable=True))
    op.create_foreign_key("fk_folder_id",
                          "folderitem",
                          "folder", ["folder_id"], ["id"],
                          ondelete='CASCADE')

    op.add_column('account',
                  sa.Column('inbox_folder_id', sa.Integer, nullable=True))
    op.add_column('account',
                  sa.Column('sent_folder_id', sa.Integer, nullable=True))
    op.add_column('account',
                  sa.Column('drafts_folder_id', sa.Integer, nullable=True))
    op.add_column('account',
                  sa.Column('spam_folder_id', sa.Integer, nullable=True))
    op.add_column('account',
                  sa.Column('trash_folder_id', sa.Integer, nullable=True))
    op.add_column('account',
                  sa.Column('archive_folder_id', sa.Integer, nullable=True))
    op.add_column('account',
                  sa.Column('all_folder_id', sa.Integer, nullable=True))
    op.add_column('account',
                  sa.Column('starred_folder_id', sa.Integer, nullable=True))
    op.create_foreign_key('account_ibfk_2', 'account', 'folder',
                          ['inbox_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_3', 'account', 'folder',
                          ['sent_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_4', 'account', 'folder',
                          ['drafts_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_5', 'account', 'folder',
                          ['spam_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_6', 'account', 'folder',
                          ['trash_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_7', 'account', 'folder',
                          ['archive_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_8', 'account', 'folder',
                          ['all_folder_id'], ['id'])
    op.create_foreign_key('account_ibfk_9', 'account', 'folder',
                          ['starred_folder_id'], ['id'])

    op.add_column('imapuid', sa.Column('folder_id', sa.Integer, nullable=True))
    op.create_foreign_key('imapuid_ibfk_3', 'imapuid', 'folder', ['folder_id'],
                          ['id'])

    from inbox.server.config import load_config
    load_config()

    from inbox.server.models import session_scope
    from inbox.server.models.ignition import engine

    Base = declarative_base()
    Base.metadata.reflect(engine)

    if 'easuid' in Base.metadata.tables:
        easupdate = True
        print 'Adding new EASUid columns...'

        op.add_column('easuid',
                      sa.Column('fld_uid', sa.Integer(), nullable=True))

        op.add_column('easuid',
                      sa.Column('folder_id', sa.Integer(), nullable=True))

        op.create_foreign_key('easuid_ibfk_3', 'easuid', 'folder',
                              ['folder_id'], ['id'])

        op.create_unique_constraint(
            'uq_easuid_folder_id_msg_uid_easaccount_id', 'easuid',
            ['folder_id', 'msg_uid', 'easaccount_id'])

        op.create_index('easuid_easaccount_id_folder_id', 'easuid',
                        ['easaccount_id', 'folder_id'])

    # Include our changes to the EASUid table:
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Folder(Base):
        __table__ = Base.metadata.tables['folder']
        account = relationship('Account',
                               foreign_keys='Folder.account_id',
                               backref='folders')

    class FolderItem(Base):
        __table__ = Base.metadata.tables['folderitem']
        folder = relationship('Folder', backref='threads', lazy='joined')

    class Thread(Base):
        __table__ = Base.metadata.tables['thread']
        folderitems = relationship('FolderItem',
                                   backref="thread",
                                   single_parent=True,
                                   cascade='all, delete, delete-orphan')
        namespace = relationship('Namespace', backref='threads')

    class Namespace(Base):
        __table__ = Base.metadata.tables['namespace']
        account = relationship('Account',
                               backref=backref('namespace', uselist=False))

    class Account(Base):
        __table__ = Base.metadata.tables['account']
        inbox_folder = relationship('Folder',
                                    foreign_keys='Account.inbox_folder_id')
        sent_folder = relationship('Folder',
                                   foreign_keys='Account.sent_folder_id')
        drafts_folder = relationship('Folder',
                                     foreign_keys='Account.drafts_folder_id')
        spam_folder = relationship('Folder',
                                   foreign_keys='Account.spam_folder_id')
        trash_folder = relationship('Folder',
                                    foreign_keys='Account.trash_folder_id')
        starred_folder = relationship('Folder',
                                      foreign_keys='Account.starred_folder_id')
        archive_folder = relationship('Folder',
                                      foreign_keys='Account.archive_folder_id')
        all_folder = relationship('Folder',
                                  foreign_keys='Account.all_folder_id')

    class ImapUid(Base):
        __table__ = Base.metadata.tables['imapuid']
        folder = relationship('Folder', backref='imapuids', lazy='joined')

    if easupdate:

        class EASUid(Base):
            __table__ = Base.metadata.tables['easuid']
            folder = relationship('Folder',
                                  foreign_keys='EASUid.folder_id',
                                  backref='easuids',
                                  lazy='joined')

    print 'Creating Folder rows and migrating FolderItems...'
    # not many folders per account, so shouldn't grow that big
    with session_scope(versioned=False,
                       ignore_soft_deletes=False) as db_session:
        folders = dict([((i.account_id, i.name), i)
                        for i in db_session.query(Folder).all()])
        count = 0
        for folderitem in db_session.query(FolderItem).join(Thread).join(
                Namespace).yield_per(CHUNK_SIZE):
            account_id = folderitem.thread.namespace.account_id
            if folderitem.thread.namespace.account.provider == 'Gmail':
                if folderitem.folder_name in folder_name_subst_map:
                    new_folder_name = folder_name_subst_map[
                        folderitem.folder_name]
                else:
                    new_folder_name = folderitem.folder_name
            elif folderitem.thread.namespace.account.provider == 'EAS':
                new_folder_name = folderitem.folder_name.title()

            if (account_id, new_folder_name) in folders:
                f = folders[(account_id, new_folder_name)]
            else:
                f = Folder(account_id=account_id, name=new_folder_name)
                folders[(account_id, new_folder_name)] = f
            folderitem.folder = f
            count += 1
            if count > CHUNK_SIZE:
                db_session.commit()
                count = 0
        db_session.commit()

        print 'Migrating ImapUids to reference Folder rows...'
        for imapuid in db_session.query(ImapUid).yield_per(CHUNK_SIZE):
            account_id = imapuid.imapaccount_id
            if imapuid.folder_name in folder_name_subst_map:
                new_folder_name = folder_name_subst_map[imapuid.folder_name]
            else:
                new_folder_name = imapuid.folder_name
            if (account_id, new_folder_name) in folders:
                f = folders[(account_id, new_folder_name)]
            else:
                f = Folder(account_id=account_id, name=new_folder_name)
                folders[(account_id, new_folder_name)] = f
            imapuid.folder = f
            count += 1
            if count > CHUNK_SIZE:
                db_session.commit()
                count = 0
        db_session.commit()

        if easupdate:
            print 'Migrating EASUids to reference Folder rows...'

            for easuid in db_session.query(EASUid).yield_per(CHUNK_SIZE):
                account_id = easuid.easaccount_id
                new_folder_name = easuid.folder_name

                if (account_id, new_folder_name) in folders:
                    f = folders[(account_id, new_folder_name)]
                else:
                    f = Folder(account_id=account_id, name=new_folder_name)
                    folders[(account_id, new_folder_name)] = f
                easuid.folder = f
                count += 1
                if count > CHUNK_SIZE:
                    db_session.commit()
                    count = 0
            db_session.commit()

        print 'Migrating *_folder_name fields to reference Folder rows...'
        for account in db_session.query(Account).filter_by(provider='Gmail'):
            if account.inbox_folder_name:
                # hard replace INBOX with canonicalized caps
                k = (account.id, 'Inbox')
                if k in folders:
                    account.inbox_folder = folders[k]
                else:
                    account.inbox_folder = Folder(
                        account_id=account.id,
                        name=folder_name_subst_map[account.inbox_folder_name])
            if account.sent_folder_name:
                k = (account.id, account.sent_folder_name)
                if k in folders:
                    account.sent_folder = folders[k]
                else:
                    account.sent_folder = Folder(account_id=account.id,
                                                 name=account.sent_folder_name)
            if account.drafts_folder_name:
                k = (account.id, account.drafts_folder_name)
                if k in folders:
                    account.drafts_folder = folders[k]
                else:
                    account.drafts_folder = Folder(
                        account_id=account.id, name=account.drafts_folder_name)
            # all/archive mismatch is intentional; semantics have changed
            if account.archive_folder_name:
                k = (account.id, account.archive_folder_name)
                if k in folders:
                    account.all_folder = folders[k]
                else:
                    account.all_folder = Folder(
                        account_id=account.id,
                        name=account.archive_folder_name)
        db_session.commit()

        if easupdate:
            print "Migrating EAS accounts' *_folder_name fields to reference "\
                  "Folder rows..."

            for account in db_session.query(Account).filter_by(provider='EAS'):
                if account.inbox_folder_name:
                    k = (account.id, account.inbox_folder_name)
                    if k in folders:
                        account.inbox_folder = folders[k]
                    else:
                        account.inbox_folder = Folder(
                            account_id=account.id,
                            name=account.inbox_folder_name)
                if account.sent_folder_name:
                    k = (account.id, account.sent_folder_name)
                    if k in folders:
                        account.sent_folder = folders[k]
                    else:
                        account.sent_folder = Folder(
                            account_id=account.id,
                            name=account.sent_folder_name)
                if account.drafts_folder_name:
                    k = (account.id, account.drafts_folder_name)
                    if k in folders:
                        account.drafts_folder = folders[k]
                    else:
                        account.drafts_folder = Folder(
                            account_id=account.id,
                            name=account.drafts_folder_name)
                if account.archive_folder_name:
                    k = (account.id, account.archive_folder_name)
                    if k in folders:
                        account.archive_folder = folders[k]
                    else:
                        account.archive_folder = Folder(
                            account_id=account.id,
                            name=account.archive_folder_name)
            db_session.commit()

    print 'Final schema tweaks and new constraint enforcement'
    op.alter_column('folderitem',
                    'folder_id',
                    existing_type=sa.Integer(),
                    nullable=False)
    op.drop_constraint('folder_name', 'folderitem', type_='unique')
    op.drop_constraint('folder_name', 'imapuid', type_='unique')
    op.create_unique_constraint('uq_imapuid_folder_id_msg_uid_imapaccount_id',
                                'imapuid',
                                ['folder_id', 'msg_uid', 'imapaccount_id'])
    op.drop_column('folderitem', 'folder_name')
    op.drop_column('imapuid', 'folder_name')
    op.drop_column('account', 'inbox_folder_name')
    op.drop_column('account', 'drafts_folder_name')
    op.drop_column('account', 'sent_folder_name')
    op.drop_column('account', 'archive_folder_name')

    if easupdate:
        print 'Dropping old EASUid columns...'

        op.drop_constraint('folder_name', 'easuid', type_='unique')
        op.drop_index('easuid_easaccount_id_folder_name', 'easuid')
        op.drop_column('easuid', 'folder_name')
Пример #14
0
def config():
    from inbox.server.config import load_config
    from inbox.server.config import config as confdict
    load_config(filename=TEST_CONFIG)
Пример #15
0
Файл: env.py Проект: jre21/inbox
from logging.config import fileConfig

from inbox.server.config import load_config

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# If alembic was invoked with --tag=test, load the test Inbox config. Otherwise
# load the default config.
if context.get_tag_argument() == 'test':
    load_config('tests/config.cfg')
else:
    load_config()

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
from inbox.server.models import Base, db_uri
target_metadata = Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

def run_migrations_offline():
Пример #16
0
def upgrade():

    from inbox.server.config import load_config
    load_config()

    from inbox.server.models import session_scope, Session
    from inbox.server.models.ignition import engine

    from inbox.server.models.tables.base import (Part, Namespace,
                                                 Message, Thread)
    from inbox.sqlalchemy.util import JSON

    print 'Creating table for parts...'
    op.create_table('part',
                    sa.Column('id', sa.Integer(), nullable=False),
                    sa.Column('message_id', sa.Integer(), nullable=True),
                    sa.Column('walk_index', sa.Integer(), nullable=True),
                    sa.Column('content_disposition', sa.Enum(
                        'inline', 'attachment'), nullable=True),
                    sa.Column(
                        'content_id', sa.String(length=255), nullable=True),
                    sa.Column('misc_keyval', JSON(), nullable=True),
                    sa.Column('is_inboxapp_attachment', sa.Boolean(),
                              server_default=sa.sql.expression.false(),
                              nullable=True),
                    sa.ForeignKeyConstraint(
                        ['id'], ['block.id'], ondelete='CASCADE'),
                    sa.ForeignKeyConstraint(
                        ['message_id'], ['message.id'], ondelete='CASCADE'),
                    sa.PrimaryKeyConstraint('id'),
                    sa.UniqueConstraint('message_id', 'walk_index')
                    )

    print 'Reflecting old block table schema'
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Block_(Base):  # old schema, reflected from database table
        __table__ = Base.metadata.tables['block']

    print 'Adding namespace_id column to blocks ',
    op.add_column(
        u'block', sa.Column('namespace_id', sa.Integer(), nullable=False))

    print 'Migrating from blocks to parts'
    new_parts = []
    with session_scope() as db_session:
        for block in db_session.query(Block_).yield_per(chunk_size):

            # Move relevant fields
            p = Part()
            p.size = block.size
            p.data_sha256 = block.data_sha256
            p.message_id = block.message_id
            p.walk_index = block.walk_index
            p.content_disposition = block.content_disposition
            p.content_id = block.content_id
            p.misc_keyval = block.misc_keyval
            p.is_inboxapp_attachment

            old_namespace = db_session.query(Namespace) \
                .join(Message.thread, Thread.namespace) \
                .filter(Message.id == block.message_id).one()
            p.namespace_id = old_namespace.id

            # Commit after column modifications
            new_parts.append(p)

        print 'Deleting old blocks (now parts)... ',
        db_session.query(Block_).delete()
        db_session.commit()
        print 'Done!'

    print 'Removing `message_id` constraint from block'
    op.drop_constraint('block_ibfk_1', 'block', type_='foreignkey')

    print 'Creating foreign key for block -> namespace on block'
    op.create_foreign_key('block_ibfk_1', 'block', 'namespace',
                          ['namespace_id'], ['id'], ondelete='CASCADE')

    print 'Dropping old block columns which are now in part'
    op.drop_column(u'block', u'walk_index')
    op.drop_column(u'block', u'content_disposition')
    op.drop_column(u'block', u'misc_keyval')
    op.drop_column(u'block', u'content_id')
    op.drop_column(u'block', u'is_inboxapp_attachment')
    op.drop_constraint(u'message_id', 'block', type_='unique')
    op.drop_column(u'block', u'message_id')

    # Note: here we use the regular database session, since the transaction
    # log requires the `namespace` property on objects. We've set the
    # `namespace_id` foreign key, but need to commit the object before the
    # SQLalchemy reference is valid
    no_tx_session = Session(autoflush=True, autocommit=False)
    no_tx_session.add_all(new_parts)
    no_tx_session.commit()

    print 'Done migration blocks to parts!'