Example #1
0
def genericize_thread():
    class Thread_(Base):
        __table__ = Base.metadata.tables['thread']

    # Get data from columns-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(Thread_.id, Thread_.g_thrid).all()

    to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results]

    # Add new columns
    op.add_column('thread', sa.Column('type', sa.String(16)))

    # Create new table, insert data
    # The table
    op.create_table(
        'imapthread',
        sa.Column('g_thrid', sa.BigInteger(), nullable=True, index=True),
        sa.Column('id', sa.Integer()),
        sa.ForeignKeyConstraint(['id'], ['thread.id'], ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))

    # The ad-hoc table for insert
    table_ = table('imapthread', column('g_thrid', sa.BigInteger),
                   column('id', sa.Integer))
    if to_insert:
        op.bulk_insert(table_, to_insert)

    # Drop columns now
    op.drop_column('thread', 'g_thrid')
def downgrade_imapthread():
    class ImapThread_(Base):
        __table__ = Base.metadata.tables['imapthread']

    # Get data from table-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapThread_.id, ImapThread_.g_thrid).all()
    to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results]

    # Drop columns, add new columns + insert data
    op.drop_column('thread', 'type')
    op.add_column('thread', sa.Column('g_thrid', sa.BigInteger(),
                                      nullable=True, index=True))
    table_ = table('thread',
                   column('g_thrid', sa.BigInteger),
                   column('id', sa.Integer))

    for r in to_insert:
        op.execute(
            table_.update().
            where(table_.c.id == r['id']).
            values({'g_thrid': r['g_thrid']})
        )

    # Drop table
    op.drop_table('imapthread')
Example #3
0
 def _process_log(self):
     """Scan the transaction log `self.chunk_size` entries at a time,
     publishing matching events to registered hooks."""
     with session_scope() as db_session:
         self.log.info('Scanning tx log from id: {}'.format(
             self.minimum_id))
         unprocessed_txn_count = db_session.query(
             func.count(Transaction.id)).filter(
                 Transaction.table_name == 'message',
                 Transaction.id > self.minimum_id).scalar()
         if unprocessed_txn_count:
             self.log.debug('Total of {0} transactions to process'.format(
                 unprocessed_txn_count))
         for transaction in db_session.query(Transaction) \
                 .filter(Transaction.table_name == 'message',
                         Transaction.id > self.minimum_id)\
                 .order_by(asc(Transaction.id)).yield_per(self.chunk_size):
             namespace_id = transaction.namespace_id
             event_data = EventData(transaction)
             for worker in self.workers[namespace_id]:
                 if worker.match(event_data):
                     # It's important to put a separate class instance on
                     # each queue.
                     worker.enqueue(copy.copy(event_data))
             self.minimum_id = transaction.id
         self.log.debug('Processed tx. setting min id to {0}'.format(
             self.minimum_id))
Example #4
0
    def __init__(self):
        self.monitor_cls_for = register_backends()

        self.log = get_logger()
        # { account_id: MailSyncMonitor() }
        self.monitors = dict()
        # READ ONLY from API calls, writes happen from callbacks from monitor
        # greenlets.
        # { 'account_id': { 'state': 'initial sync', 'status': '0'} }
        # 'state' can be ['initial sync', 'poll']
        # 'status' is the percent-done for initial sync, polling start time
        # otherwise
        # all data in here ought to be msgpack-serializable!
        self.statuses = defaultdict(dict)

        self.contact_sync_monitors = dict()

        # Restart existing active syncs.
        # (Later we will want to partition these across different machines!)
        with session_scope() as db_session:
            # XXX: I think we can do some sqlalchemy magic to make it so we
            # can query on the attribute sync_active.
            for account_id, in db_session.query(Account.id)\
                    .filter(~Account.sync_host.is_(None)):
                self.start_sync(account_id)
Example #5
0
    def __init__(self):
        self.monitor_cls_for = register_backends()

        self.log = get_logger()
        # { account_id: MailSyncMonitor() }
        self.monitors = dict()
        # READ ONLY from API calls, writes happen from callbacks from monitor
        # greenlets.
        # { 'account_id': { 'state': 'initial sync', 'status': '0'} }
        # 'state' can be ['initial sync', 'poll']
        # 'status' is the percent-done for initial sync, polling start time
        # otherwise
        # all data in here ought to be msgpack-serializable!
        self.statuses = defaultdict(dict)

        self.contact_sync_monitors = dict()

        # Restart existing active syncs.
        # (Later we will want to partition these across different machines!)
        with session_scope() as db_session:
            # XXX: I think we can do some sqlalchemy magic to make it so we
            # can query on the attribute sync_active.
            for account_id, in db_session.query(Account.id)\
                    .filter(~Account.sync_host.is_(None)):
                self.start_sync(account_id)
Example #6
0
File: api.py Project: jre21/inbox
    def sync_status(self):
        """ Returns data representing the status of all syncing users, like:

            user_id: {
                state: 'initial sync',
                stored_data: '12127227',
                stored_messages: '50000',
                status: '56%',
            }
            user_id: {
                state: 'poll',
                stored_data: '1000000000',
                stored_messages: '200000',
                status: '2013-06-08 14:00',
            }
        """
        if not self._sync:
            self._sync = zerorpc.Client(config.get('CRISPIN_SERVER_LOC', None))
        status = self._sync.status()
        user_ids = status.keys()
        with session_scope() as db_session:
            users = db_session.query(User).filter(User.id.in_(user_ids))
            for user in users:
                status[user.id]['stored_data'] = 0
                status[user.id]['stored_messages'] = 0
                for account in user.accounts:
                    status[user.id]['stored_data'] += \
                        total_stored_data(account.id, db_session)
                    status[user.id]['stored_messages'] += \
                        total_stored_messages(account.id, db_session)
            return status
Example #7
0
File: api.py Project: jre21/inbox
 def headers_for_message(self, message_id):
     # TODO[kavya]: Take namespace into account, currently doesn't matter
     # since one namespace only.
     with session_scope() as db_session:
         message = db_session.query(Message).filter(
             Message.id == message_id).one()
         return message.headers
Example #8
0
File: api.py Project: jre21/inbox
 def update_contact(self, contact_id, contact_data):
     """Update data for an existing contact."""
     with session_scope() as db_session:
         contact = db_session.query(Contact).filter_by(id=contact_id).one()
         contact.from_cereal(contact_data)
         log.info("Updated contact {0}".format(contact.id))
         return 'OK'
Example #9
0
File: api.py Project: jre21/inbox
 def body_for_message(self, message_id):
     # TODO: Take namespace into account, currently doesn't matter since
     # one namespace only.
     with session_scope() as db_session:
         message = db_session.query(Message).join(Message.parts) \
             .filter(Message.id == message_id).one()
         return {'data': message.prettified_body}
Example #10
0
def genericize_imapaccount():
    class ImapAccount_(Base):
        __table__ = Base.metadata.tables['imapaccount']

    # Get data from columns-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapAccount_.id,
                                   ImapAccount_.imap_host).all()

    to_insert = [dict(id=r[0], imap_host=r[1]) for r in results]

    # Rename table, add new columns.
    op.rename_table('imapaccount', 'account')
    op.add_column('account', sa.Column('type', sa.String(16)))

    # Create new table, insert data
    # The table
    op.create_table(
        'imapaccount', sa.Column('imap_host', sa.String(512)),
        sa.Column('id', sa.Integer()),
        sa.ForeignKeyConstraint(['id'], ['account.id'], ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'))

    # The ad-hoc table for insert
    table_ = table('imapaccount', column('imap_host', sa.String()),
                   column('id', sa.Integer))
    if to_insert:
        op.bulk_insert(table_, to_insert)

    # Drop columns now
    op.drop_column('account', 'imap_host')
def genericize_thread():
    class Thread_(Base):
        __table__ = Base.metadata.tables['thread']

    # Get data from columns-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(Thread_.id, Thread_.g_thrid).all()

    to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results]

    # Add new columns
    op.add_column('thread', sa.Column('type', sa.String(16)))

    # Create new table, insert data
    # The table
    op.create_table('imapthread',
                    sa.Column('g_thrid', sa.BigInteger(), nullable=True,
                              index=True),
                    sa.Column('id', sa.Integer()),
                    sa.ForeignKeyConstraint(['id'], ['thread.id'],
                                            ondelete='CASCADE'),
                    sa.PrimaryKeyConstraint('id'))

    # The ad-hoc table for insert
    table_ = table('imapthread',
                   column('g_thrid', sa.BigInteger),
                   column('id', sa.Integer))
    if to_insert:
        op.bulk_insert(table_, to_insert)

    # Drop columns now
    op.drop_column('thread', 'g_thrid')
Example #12
0
    def start_sync(self, account_id=None):
        """ Starts all syncs if account_id not specified.
            If account_id doesn't exist, does nothing.
        """
        results = {}
        if account_id:
            account_id = int(account_id)
        with session_scope() as db_session:
            query = db_session.query(Account)
            if account_id is not None:
                query = query.filter_by(id=account_id)
            fqdn = socket.getfqdn()
            for acc in query:
                if acc.provider not in self.monitor_cls_for:
                    self.log.info('Inbox does not currently support {0}\
                        '.format(acc.provider))
                    continue
                self.log.info('Starting sync for account {0}'.format(
                    acc.email_address))
                if acc.sync_host is not None and acc.sync_host != fqdn:
                    results[acc.id] = \
                        'acc {0} is syncing on host {1}'.format(
                            acc.email_address, acc.sync_host)
                elif acc.id not in self.monitors:
                    try:
                        acc.sync_lock()

                        def update_status(account_id, state, status):
                            """ I really really wish I were a lambda """
                            folder, progress = status
                            self.statuses[account_id][folder] \
                                = (state, progress)
                            notify(account_id, state, status)

                        monitor = self.monitor_cls_for[acc.provider](
                            acc.id, acc.namespace.id, acc.email_address,
                            acc.provider, update_status)
                        self.monitors[acc.id] = monitor
                        monitor.start()
                        # For Gmail accounts, also start contacts sync
                        if acc.provider == 'Gmail':
                            contact_sync = ContactSync(acc.id)
                            self.contact_sync_monitors[acc.id] = contact_sync
                            contact_sync.start()
                        acc.sync_host = fqdn
                        db_session.add(acc)
                        db_session.commit()
                        results[acc.id] = 'OK sync started'
                    except Exception as e:
                        self.log.error(e.message)
                        results[
                            acc.id] = 'ERROR error encountered: {0}'.format(e)
                else:
                    results[acc.id] = 'OK sync already started'
        if account_id:
            if account_id in results:
                return results[account_id]
            else:
                return "OK no such user"
        return results
Example #13
0
 def stop_sync(self, account_id=None):
     """ Stops all syncs if account_id not specified.
         If account_id doesn't exist, does nothing.
     """
     results = {}
     if account_id:
         account_id = int(account_id)
     with session_scope() as db_session:
         query = db_session.query(ImapAccount)
         if account_id is not None:
             query = query.filter_by(id=account_id)
         fqdn = socket.getfqdn()
         for acc in query:
             if (not acc.id in self.monitors) or \
                     (not acc.sync_active):
                 results[acc.id] = "OK sync stopped already"
             try:
                 assert acc.sync_host == fqdn, "sync host FQDN doesn't match"
                 # XXX Can processing this command fail in some way?
                 self.monitors[acc.id].inbox.put_nowait("shutdown")
                 acc.sync_host = None
                 db_session.add(acc)
                 db_session.commit()
                 acc.sync_unlock()
                 del self.monitors[acc.id]
                 results[acc.id] = "OK sync stopped"
             except:
                 results[acc.id] = "ERROR error encountered"
     if account_id:
         if account_id in results:
             return results[account_id]
         else:
             return "OK no such user"
     return results
Example #14
0
File: imap.py Project: cenk/inbox
 def _run(self):
     with session_scope() as db_session:
         try:
             foldersync = db_session.query(FolderSync).filter_by(
                     imapaccount_id=self.crispin_client.account_id,
                     folder_name=self.folder_name).one()
         except NoResultFound:
             foldersync = FolderSync(
                     imapaccount_id=self.crispin_client.account_id,
                     folder_name=self.folder_name)
             db_session.add(foldersync)
             db_session.commit()
         self.state = foldersync.state
         # NOTE: The parent ImapSyncMonitor handler could kill us at any
         # time if it receives a shutdown command. The shutdown command is
         # equivalent to ctrl-c.
         while True:
             try:
                 self.state = foldersync.state = \
                         self.state_handlers[foldersync.state](
                                 self.crispin_client, db_session, self.log,
                                 self.folder_name, self.shared_state)
             except UIDInvalid:
                 self.state = foldersync.state = self.state + ' uidinvalid'
             # State handlers are idempotent, so it's okay if we're killed
             # between the end of the handler and the commit.
             db_session.commit()
             if self.state == 'finish':
                 return
Example #15
0
    def start_sync(self, account_id=None):
        """ Starts all syncs if account_id not specified.
            If account_id doesn't exist, does nothing.
        """
        results = {}
        if account_id:
            account_id = int(account_id)
        with session_scope() as db_session:
            query = db_session.query(Account)
            if account_id is not None:
                query = query.filter_by(id=account_id)
            fqdn = socket.getfqdn()
            for acc in query:
                if acc.provider not in self.monitor_cls_for:
                    self.log.info('Inbox does not currently support {0}\
                        '.format(acc.provider))
                    continue
                self.log.info('Starting sync for account {0}'
                              .format(acc.email_address))
                if acc.sync_host is not None and acc.sync_host != fqdn:
                    results[acc.id] = \
                        'acc {0} is syncing on host {1}'.format(
                            acc.email_address, acc.sync_host)
                elif acc.id not in self.monitors:
                    try:
                        acc.sync_lock()

                        def update_status(account_id, state, status):
                            """ I really really wish I were a lambda """
                            folder, progress = status
                            self.statuses.setdefault(account_id,
                                                     dict())[folder] \
                                = (state, progress)
                            notify(account_id, state, status)

                        monitor = self.monitor_cls_for[acc.provider](
                            acc.id, acc.namespace.id, acc.email_address,
                            acc.provider, update_status)
                        self.monitors[acc.id] = monitor
                        monitor.start()
                        # For Gmail accounts, also start contacts sync
                        if acc.provider == 'Gmail':
                            contact_sync = ContactSync(acc.id)
                            self.contact_sync_monitors[acc.id] = contact_sync
                            contact_sync.start()
                        acc.sync_host = fqdn
                        db_session.add(acc)
                        db_session.commit()
                        results[acc.id] = 'OK sync started'
                    except Exception as e:
                        self.log.error(e.message)
                        results[acc.id] = 'ERROR error encountered: {0}'.format(e)
                else:
                    results[acc.id] = 'OK sync already started'
        if account_id:
            if account_id in results:
                return results[account_id]
            else:
                return "OK no such user"
        return results
def upgrade():
    from inbox.server.models import session_scope
    from inbox.server.models.ignition import engine

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class ImapUid(Base):
        __table__ = Base.metadata.tables['imapuid']

    print 'Deleting imapuid objects with NULL message_id...'

    with session_scope(versioned=False, ignore_soft_deletes=False) as session:
        session.query(ImapUid).filter_by(message_id=None).delete()
        session.commit()

    print 'Tightening NULL constraints...'

    op.alter_column('imapuid',
                    'message_id',
                    existing_type=sa.Integer(),
                    nullable=False)
    # unrelated to current bugs, but no reason this should be NULLable either
    op.alter_column('imapuid',
                    'msg_uid',
                    existing_type=sa.BigInteger(),
                    nullable=False)
Example #17
0
def user_console(user_email_address):
    with session_scope() as db_session:
        account = db_session.query(Account).filter_by(
            email_address=user_email_address).one()

        crispin_client = new_crispin(account.id, account.provider,
                                     conn_pool_size=1)
        with crispin_client.pool.get() as c:
            crispin_client.select_folder(crispin_client.folder_names(c)['all'],
                                         uidvalidity_cb(db_session, account),
                                         c)

        server_uids = crispin_client.all_uids(c)

        banner = """
        You can access the crispin instance with the 'crispin_client' variable.
        You can access the IMAPClient connection with the 'c' variable.
        AllMail message UIDs are in 'server_uids'.
        You can refresh the session with 'refresh_crispin()'.

        IMAPClient docs are at:

            http://imapclient.readthedocs.org/en/latest/#imapclient-class-reference
        """

        IPython.embed(banner1=banner)
def genericize_imapaccount():
    class ImapAccount_(Base):
        __table__ = Base.metadata.tables['imapaccount']

    # Get data from columns-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapAccount_.id,
                                   ImapAccount_.imap_host).all()

    to_insert = [dict(id=r[0], imap_host=r[1]) for r in results]

    # Rename table, add new columns.
    op.rename_table('imapaccount', 'account')
    op.add_column('account', sa.Column('type', sa.String(16)))

    # Create new table, insert data
    # The table
    op.create_table('imapaccount',
                    sa.Column('imap_host', sa.String(512)),
                    sa.Column('id', sa.Integer()),
                    sa.ForeignKeyConstraint(['id'], ['account.id'],
                                            ondelete='CASCADE'),
                    sa.PrimaryKeyConstraint('id'))

    # The ad-hoc table for insert
    table_ = table('imapaccount',
                   column('imap_host', sa.String()),
                   column('id', sa.Integer))
    if to_insert:
        op.bulk_insert(table_, to_insert)

    # Drop columns now
    op.drop_column('account', 'imap_host')
Example #19
0
 def _process_log(self):
     """Scan the transaction log `self.chunk_size` entries at a time,
     publishing matching events to registered hooks."""
     with session_scope() as db_session:
         self.log.info('Scanning tx log from id: {}'.
                       format(self.minimum_id))
         unprocessed_txn_count = db_session.query(
             func.count(Transaction.id)).filter(
             Transaction.table_name == 'message',
             Transaction.id > self.minimum_id).scalar()
         if unprocessed_txn_count:
             self.log.debug('Total of {0} transactions to process'.
                            format(unprocessed_txn_count))
         for transaction in db_session.query(Transaction) \
                 .filter(Transaction.table_name == 'message',
                         Transaction.id > self.minimum_id)\
                 .order_by(asc(Transaction.id)).yield_per(self.chunk_size):
             namespace_id = transaction.namespace_id
             event_data = EventData(transaction)
             for worker in self.workers[namespace_id]:
                 if worker.match(event_data):
                     # It's important to put a separate class instance on
                     # each queue.
                     worker.enqueue(copy.copy(event_data))
             self.minimum_id = transaction.id
         self.log.debug('Processed tx. setting min id to {0}'.
                        format(self.minimum_id))
Example #20
0
File: imap.py Project: jre21/inbox
def check_new_uids(account_id, provider, folder_name, log, uid_download_stack, poll_frequency, syncmanager_lock):
    """ Check for new UIDs and add them to the download stack.

    We do this by comparing local UID lists to remote UID lists, maintaining
    the invariant that (stack uids)+(local uids) == (remote uids).

    We also remove local messages that have disappeared from the remote, since
    it's totally probable that users will be archiving mail as the initial
    sync goes on.

    We grab a new IMAP connection from the pool for this to isolate its
    actions from whatever the main greenlet may be doing.

    Runs until killed. (Intended to be run in a greenlet.)
    """
    log.info("Spinning up new UID-check poller for {}".format(folder_name))
    # can't mix and match crispin clients when playing with different folders
    crispin_client = new_crispin(account_id, provider, conn_pool_size=1)
    with crispin_client.pool.get() as c:
        with session_scope() as db_session:
            crispin_client.select_folder(folder_name, uidvalidity_cb(db_session, crispin_client.account_id), c)
        while True:
            remote_uids = set(crispin_client.all_uids(c))
            # We lock this section to make sure no messages are being
            # downloaded while we make sure the queue is in a good state.
            with syncmanager_lock:
                with session_scope() as db_session:
                    local_uids = set(account.all_uids(account_id, db_session, folder_name))
                    stack_uids = set(uid_download_stack.queue)
                    local_with_pending_uids = local_uids | stack_uids
                    deleted_uids = remove_deleted_uids(
                        account_id, db_session, log, folder_name, local_uids, remote_uids, syncmanager_lock, c
                    )
                    # XXX This double-grabs syncmanager_lock, does that cause
                    # a deadlock?
                    log.info("Removed {} deleted UIDs from {}".format(len(deleted_uids), folder_name))

                # filter out messages that have disappeared on the remote side
                new_uid_download_stack = {u for u in uid_download_stack.queue if u in remote_uids}

                # add in any new uids from the remote
                for uid in remote_uids:
                    if uid not in local_with_pending_uids:
                        new_uid_download_stack.add(uid)

                uid_download_stack.queue = sorted(new_uid_download_stack, key=int)
            sleep(poll_frequency)
Example #21
0
def _syncback_action(fn, imapaccount_id, folder_name):
    """ `folder_name` is a Gmail folder name. """
    with session_scope() as db_session:
        account = db_session.query(ImapAccount).join(Namespace).filter_by(
            id=imapaccount_id).one()
        with writable_connection_pool(imapaccount_id).get() as crispin_client:
            crispin_client.select_folder(folder_name, uidvalidity_cb)
            fn(account, db_session, crispin_client)
Example #22
0
 def ns_all():
     """ Return all namespaces """
     # We do this outside the blueprint to support the case of an empty public_id.
     # However, this means the before_request isn't run, so we need to make our own session
     with session_scope() as db_session:
         namespaces = db_session.query(Namespace).all()
         result = jsonify(namespaces)
     return result
def upgrade():

    with session_scope() as db_session:
        results = db_session.query(Message).all()
        for message in results:
            message.from_addr = [message.from_addr]
            message.sender_addr = [message.sender_addr]
        db_session.commit()
Example #24
0
 def ns_all():
     """ Return all namespaces """
     # We do this outside the blueprint to support the case of an empty public_id.
     # However, this means the before_request isn't run, so we need to make our own session
     with session_scope() as db_session:
         namespaces = db_session.query(Namespace).all()
         result = jsonify(namespaces)
     return result
Example #25
0
def _syncback_action(fn, imapaccount_id, folder_name):
    """ `folder_name` is a Gmail folder name. """
    with session_scope() as db_session:
        account = db_session.query(ImapAccount).join(Namespace).filter_by(
            id=imapaccount_id).one()
        with writable_connection_pool(imapaccount_id).get() as crispin_client:
            crispin_client.select_folder(folder_name, uidvalidity_cb)
            fn(account, db_session, crispin_client)
Example #26
0
def upgrade():

    from inbox.server.config import load_config
    load_config()
    from inbox.sqlalchemy.util import generate_public_id
    from inbox.server.models import session_scope

    # These all inherit HasPublicID
    from inbox.server.models.tables.base import (
        Account, Block, Contact, Message, Namespace,
        SharedFolder, Thread, User, UserSession, HasPublicID)

    classes = [
        Account, Block, Contact, Message, Namespace,
        SharedFolder, Thread, User, UserSession]

    for c in classes:
        assert issubclass(c, HasPublicID)
        print '[{0}] adding public_id column... '.format(c.__tablename__),
        sys.stdout.flush()
        op.add_column(c.__tablename__, sa.Column(
            'public_id', mysql.BINARY(16), nullable=False))

        print 'adding index... ',
        op.create_index(
            'ix_{0}_public_id'.format(c.__tablename__),
            c.__tablename__,
            ['public_id'],
            unique=False)

        print 'Done!'
        sys.stdout.flush()

    print 'Finished adding columns. \nNow generating public_ids'

    with session_scope() as db_session:
        count = 0
        for c in classes:
            garbage_collect()
            print '[{0}] Loading rows. '.format(c.__name__),
            sys.stdout.flush()
            print 'Generating public_ids',
            sys.stdout.flush()
            for r in db_session.query(c).yield_per(chunk_size):
                count += 1
                r.public_id = generate_public_id()
                if not count % chunk_size:
                    sys.stdout.write('.')
                    sys.stdout.flush()
                    db_session.commit()
                    garbage_collect()
            sys.stdout.write(' Saving. '.format(c.__name__)),
            # sys.stdout.flush()
            sys.stdout.flush()
            db_session.commit()
            sys.stdout.write('Done!\n')
            sys.stdout.flush()
        print '\nUpdgraded OK!\n'
Example #27
0
 def set_min_processed_id(self, new_id):
     if new_id <= self.min_processed_id:
         return
     self.min_processed_id = new_id
     with session_scope() as db_session:
         stored_params = db_session.query(Webhook). \
             filter_by(id=self.id).one()
         stored_params.min_processed_id = self.min_processed_id
         db_session.commit()
Example #28
0
 def set_min_processed_id(self, new_id):
     if new_id <= self.min_processed_id:
         return
     self.min_processed_id = new_id
     with session_scope() as db_session:
         stored_params = db_session.query(Webhook). \
             filter_by(id=self.id).one()
         stored_params.min_processed_id = self.min_processed_id
         db_session.commit()
Example #29
0
def set_remote_unread(account_id, thread_id, unread):
    def fn(account, db_session, crispin_client):
        g_thrid = _get_g_thrid(account.namespace.id, thread_id, db_session)
        crispin_client.set_unread(g_thrid, unread)

    with session_scope() as db_session:
        all_mail_folder_name = db_session.query(Account).filter(
            Account.id == account_id).one().all_folder.name
    return _syncback_action(fn, account_id, all_mail_folder_name)
Example #30
0
def set_remote_unread(account_id, thread_id, unread):
    def fn(account, db_session, crispin_client):
        g_thrid = _get_g_thrid(account.namespace.id, thread_id, db_session)
        crispin_client.set_unread(g_thrid, unread)

    with session_scope() as db_session:
        all_mail_folder_name = db_session.query(Account).filter(
            Account.id == account_id).one().all_folder.name
    return _syncback_action(fn, account_id, all_mail_folder_name)
Example #31
0
    def start_sync(self, account_id=None):
        """ Starts all syncs if account_id not specified.
            If account_id doesn't exist, does nothing.
        """
        results = {}
        if account_id:
            account_id = int(account_id)
        with session_scope() as db_session:
            query = db_session.query(ImapAccount)
            if account_id is not None:
                query = query.filter_by(id=account_id)
            fqdn = socket.getfqdn()
            for acc in query:
                # ONLY GMAIL CURRENTLY
                if acc.provider != 'Gmail':
                    self.log.info('Inbox currently supports Gmail only!')
                    return

                self.log.info("Starting sync for account {0}" \
                        .format(acc.email_address))
                if acc.sync_host is not None and acc.sync_host != fqdn:
                    results[acc.id] = \
                            'acc {0} is syncing on host {1}'.format(
                                acc.email_address, acc.sync_host)
                elif acc.id not in self.monitors:
                    try:
                        acc.sync_lock()
                        def update_status(account_id, state, status):
                            """ I really really wish I were a lambda """
                            folder, progress = status
                            self.statuses.setdefault(account_id,
                                    dict())[folder] = (state, progress)
                            notify(account_id, state, status)

                        monitor = self.monitor_cls_for[acc.provider](acc.id,
                                acc.namespace.id, acc.email_address,
                                acc.provider, update_status)
                        self.monitors[acc.id] = monitor
                        monitor.start()
                        acc.sync_host = fqdn
                        db_session.add(acc)
                        db_session.commit()
                        results[acc.id] = "OK sync started"
                    except Exception as e:
                        self.log.error(e.message)
                        results[acc.id] = "ERROR error encountered"
                else:
                    results[acc.id] =  "OK sync already started"
        if account_id:
            if account_id in results:
                return results[account_id]
            else:
                return "OK no such user"
        return results
Example #32
0
 def stop_hook(self, hook_public_id):
     self.log.info('Stopping hook with public id {}'.format(hook_public_id))
     with session_scope() as db_session:
         hook = db_session.query(Webhook). \
             filter_by(public_id=hook_public_id).one()
         hook.active = False
         for worker in self.workers[hook.namespace_id]:
             if worker.public_id == hook_public_id:
                 self.workers[hook.namespace_id].remove(worker)
                 del worker
                 return 'OK hook stopped'
Example #33
0
 def _load_hooks(self):
     """Load stored hook parameters from the database. Run once on
     startup."""
     with session_scope() as db_session:
         all_hooks = db_session.query(Webhook).filter_by(active=True).all()
         for hook in all_hooks:
             namespace_id = hook.namespace_id
             self.workers[namespace_id].add(WebhookWorker(hook))
         if all_hooks:
             self.minimum_id = min(hook.min_processed_id for hook in
                                   all_hooks)
Example #34
0
    def _set_account_info(self):
        with session_scope() as db_session:
            account = db_session.query(ImapAccount).get(self.account_id)

            # Refresh token if need be, for OAuthed accounts
            if AUTH_TYPES.get(account.provider) == 'OAuth':
                account = verify_imap_account(db_session, account)
                self.o_access_token = account.o_access_token

            self.email_address = account.email_address
            self.provider = account.provider
Example #35
0
 def _load_hooks(self):
     """Load stored hook parameters from the database. Run once on
     startup."""
     with session_scope() as db_session:
         all_hooks = db_session.query(Webhook).filter_by(active=True).all()
         for hook in all_hooks:
             namespace_id = hook.namespace_id
             self.workers[namespace_id].add(WebhookWorker(hook))
         if all_hooks:
             self.minimum_id = min(hook.min_processed_id
                                   for hook in all_hooks)
Example #36
0
 def stop_hook(self, hook_public_id):
     self.log.info('Stopping hook with public id {}'.format(hook_public_id))
     with session_scope() as db_session:
         hook = db_session.query(Webhook). \
             filter_by(public_id=hook_public_id).one()
         hook.active = False
         for worker in self.workers[hook.namespace_id]:
             if worker.public_id == hook_public_id:
                 self.workers[hook.namespace_id].remove(worker)
                 del worker
                 return 'OK hook stopped'
def downgrade():
    # MOVE:

    with session_scope() as db_session:
        results = db_session.query(Message).all()
        for message in results:
            if message.from_addr:
                message.from_addr = message.from_addr[0]
            if message.sender_addr:
                message.sender_addr = message.sender_addr[0]
        db_session.commit()
Example #38
0
    def _set_account_info(self):
        with session_scope() as db_session:
            account = db_session.query(ImapAccount).get(self.account_id)

            # Refresh token if need be, for OAuthed accounts
            if AUTH_TYPES.get(account.provider) == 'OAuth':
                account = verify_imap_account(db_session, account)
                self.o_access_token = account.o_access_token

            self.email_address = account.email_address
            self.provider = account.provider
Example #39
0
File: gmail.py Project: jre21/inbox
def _syncback_action(fn, imapaccount_id, folder_name):
    """ `folder_name` is an Inbox folder name, not a Gmail folder name. """
    with session_scope() as db_session:
        account = db_session.query(ImapAccount).join(Namespace).filter_by(
                id=imapaccount_id).one()
        crispin_client = new_crispin(account.id, account.provider,
                conn_pool_size=1, readonly=False)
        with crispin_client.pool.get() as c:
            crispin_client.select_folder(
                    _translate_folder_name(folder_name, crispin_client, c),
                    uidvalidity_cb, c)
            fn(account, db_session, crispin_client, c)
Example #40
0
File: api.py Project: jre21/inbox
 def add_contact(self, account_id, contact_info):
     """Add a new contact to the specified IMAP account. Returns the ID of
     the added contact."""
     with session_scope() as db_session:
         contact = Contact(account_id=account_id, source='local',
                           provider_name=INBOX_PROVIDER_NAME,
                           uid=uuid.uuid4())
         contact.from_cereal(contact_info)
         db_session.add(contact)
         db_session.commit()
         log.info("Added contact {0}".format(contact.id))
         return contact.id
Example #41
0
File: api.py Project: jre21/inbox
    def threads_for_folder(self, folder_name):
        """ Returns all threads in a given folder, together with associated
            messages. Supports shared folders and TODO namespaces as well, if
            caller auths with that namespace.

            Note that this may be more messages than included in the IMAP
            folder, since we fetch the full thread if one of the messages is in
            the requested folder.
        """
        with session_scope() as db_session:
            return [t.cereal() for t in threads_for_folder(
                self.namespace.id, db_session, folder_name)]
Example #42
0
def remote_archive(imapaccount_id, thread_id):
    def fn(account, db_session, crispin_client):
        g_thrid = _get_g_thrid(account.namespace.id, thread_id, db_session)
        return _archive(g_thrid, crispin_client)

    with session_scope() as db_session:
        inbox_folder = db_session.query(ImapAccount)\
            .join(ImapAccount.inbox_folder).filter(
                ImapAccount.id == imapaccount_id).one().inbox_folder
        assert inbox_folder is not None
        inbox_folder_name = inbox_folder.name

    return _syncback_action(fn, imapaccount_id, inbox_folder_name)
Example #43
0
def remote_archive(imapaccount_id, thread_id):
    def fn(account, db_session, crispin_client):
        g_thrid = _get_g_thrid(account.namespace.id, thread_id, db_session)
        return _archive(g_thrid, crispin_client)

    with session_scope() as db_session:
        inbox_folder = db_session.query(ImapAccount)\
            .join(ImapAccount.inbox_folder).filter(
                ImapAccount.id == imapaccount_id).one().inbox_folder
        assert inbox_folder is not None
        inbox_folder_name = inbox_folder.name

    return _syncback_action(fn, imapaccount_id, inbox_folder_name)
Example #44
0
def rerank_contacts():
    with session_scope() as db_session:
        # Delete existing signals.
        signals = db_session.query(SearchSignal).all()
        for signal in signals:
            db_session.delete(signal)
        db_session.commit()
        messages = db_session.query(Message).all()
        for message in messages:
            account_id = message.namespace.account_id
            update_contacts(db_session, account_id, message)

        db_session.commit()
def upgrade():

    from inbox.server.config import load_config
    load_config()

    from inbox.server.models import session_scope
    from inbox.server.models.tables.base import Message

    with session_scope() as db_session:
        results = db_session.query(Message).all()
        for message in results:
            message.from_addr = [message.from_addr]
            message.sender_addr = [message.sender_addr]
        db_session.commit()
Example #46
0
    def _new_connection(self):
        with session_scope() as db_session:
            account = db_session.query(ImapAccount).get(self.account_id)

            if (account.provider == 'Gmail'):
                conn = verify_gmail_account(account)

            elif (account.provider == 'Yahoo'):
                conn = verify_yahoo_account(account)

            # Reads from db, therefore shouldn't get here
            else:
                raise

        return new_crispin(self.account_id, self.provider, conn, self.readonly)
Example #47
0
def upgrade():
    from inbox.server.models import session_scope
    from inbox.server.models.ignition import engine
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Folder(Base):
        __table__ = Base.metadata.tables['folder']

    with session_scope(ignore_soft_deletes=False,
                       versioned=False) as db_session:
        for folder in db_session.query(Folder).filter(Folder.name == 'Inbox'):
            folder.public_id = 'inbox'
            folder.exposed_name = 'inbox'
        db_session.commit()
Example #48
0
 def start_hook(self, hook_public_id):
     self.log.info('Starting hook with public id {}'.format(hook_public_id))
     with session_scope() as db_session:
         hook = db_session.query(Webhook). \
             filter_by(public_id=hook_public_id).one()
         if hook.active:
             # Hook is already running
             return 'OK hook already running'
         hook.min_processed_id = self.minimum_id
         hook.active = True
         namespace_id = hook.namespace_id
         worker = WebhookWorker(hook)
         self.workers[namespace_id].add(worker)
         if not worker.started:
             worker.start()
         db_session.commit()
         return 'OK hook started'
Example #49
0
File: gmail.py Project: caitp/inbox
def verify_gmail_account(account):
    try:
        conn = IMAPClient(IMAP_HOST, use_uid=True, ssl=True)
    except IMAPClient.Error as e:
        raise socket.error(str(e))

    conn.debug = False
    try:
        conn.oauth2_login(account.email_address, account.o_access_token)
    except IMAPClient.Error as e:
        if str(e) == '[ALERT] Invalid credentials (Failure)':
            # maybe refresh the access token
            with session_scope() as db_session:
                account = verify_imap_account(db_session, account)
                conn.oauth2_login(account.email_address,
                                  account.o_access_token)

    return conn
Example #50
0
    def register_hook(self, namespace_id, parameters):
        """Register a new webhook.

        Parameters
        ----------
        namespace_id: int
            ID for the namespace to apply the webhook on.
        parameters: dictionary
            Dictionary of the hook parameters.
        """
        with session_scope() as db_session:
            lens = Lens(
                namespace_id=namespace_id,
                subject=parameters.get('subject'),
                thread_public_id=parameters.get('thread'),
                to_addr=parameters.get('to'),
                from_addr=parameters.get('from'),
                cc_addr=parameters.get('cc'),
                bcc_addr=parameters.get('bcc'),
                any_email=parameters.get('any_email'),
                started_before=parameters.get('started_before'),
                started_after=parameters.get('started_after'),
                last_message_before=parameters.get('last_message_before'),
                last_message_after=parameters.get('last_message_after'),
                filename=parameters.get('filename'))

            hook = Webhook(
                namespace_id=namespace_id,
                lens=lens,
                callback_url=parameters.get('callback_url'),
                failure_notify_url=parameters.get('failure_notify_url'),
                include_body=parameters.get('include_body', False),
                active=parameters.get('active', True),
                min_processed_id=self.minimum_id)

            db_session.add(hook)
            db_session.add(lens)
            db_session.commit()
            if hook.active:
                worker = WebhookWorker(hook)
                self.workers[namespace_id].add(worker)
                if not worker.started:
                    worker.start()
            return cereal(hook, pretty=True)
Example #51
0
def upgrade():
    op.add_column(
        'message',
        sa.Column('is_read',
                  sa.Boolean(),
                  server_default=sa.sql.expression.false(),
                  nullable=False))

    op.alter_column('usertagitem',
                    'created_at',
                    existing_type=mysql.DATETIME(),
                    nullable=False)
    op.alter_column('usertagitem',
                    'updated_at',
                    existing_type=mysql.DATETIME(),
                    nullable=False)

    from inbox.server.models import session_scope
    from inbox.server.models.ignition import engine
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Message(Base):
        __table__ = Base.metadata.tables['message']

    class ImapUid(Base):
        __table__ = Base.metadata.tables['imapuid']
        message = relationship('Message',
                               backref=backref(
                                   'imapuids',
                                   primaryjoin='and_('
                                   'Message.id == ImapUid.message_id, '
                                   'ImapUid.deleted_at == None)'),
                               primaryjoin='and_('
                               'ImapUid.message_id == Message.id,'
                               'Message.deleted_at == None)')

    with session_scope(versioned=False,
                       ignore_soft_deletes=False) as db_session:
        for uid in db_session.query(ImapUid).yield_per(500):
            if uid.is_seen:
                uid.message.is_read = True

        db_session.commit()
Example #52
0
    def _set_account_info(self):
        with session_scope() as db_session:
            account = db_session.query(ImapAccount).get(self.account_id)

            self.email_address = account.email_address
            self.provider = account.provider
            self.full_name = account.full_name if account.provider == 'Gmail'\
                else ''
            self.sent_folder = account.sent_folder.name

            self.auth_type = AUTH_TYPES.get(account.provider)

            if self.auth_type == 'OAuth':
                # Refresh OAuth token if need be
                account = verify_imap_account(db_session, account)
                self.o_access_token = account.o_access_token
            else:
                assert self.auth_type == 'Password'
                self.password = account.password
Example #53
0
 def stop_sync(self, account_id=None):
     """ Stops all syncs if account_id not specified.
         If account_id doesn't exist, does nothing.
     """
     results = {}
     if account_id:
         account_id = int(account_id)
     with session_scope() as db_session:
         query = db_session.query(Account)
         if account_id is not None:
             query = query.filter_by(id=account_id)
         fqdn = socket.getfqdn()
         for acc in query:
             if (not acc.id in self.monitors) or \
                     (not acc.sync_active):
                 results[acc.id] = "OK sync stopped already"
             try:
                 if acc.sync_host is None:
                     results[acc.id] = 'Sync not running'
                     continue
                 assert acc.sync_host == fqdn, \
                     "sync host FQDN doesn't match: {0} <--> {1}" \
                     .format(acc.sync_host, fqdn)
                 # XXX Can processing this command fail in some way?
                 self.monitors[acc.id].inbox.put_nowait("shutdown")
                 acc.sync_host = None
                 db_session.add(acc)
                 db_session.commit()
                 acc.sync_unlock()
                 del self.monitors[acc.id]
                 # Also stop contacts sync (only relevant for Gmail
                 # accounts)
                 if acc.id in self.contact_sync_monitors:
                     del self.contact_sync_monitors[acc.id]
                 results[acc.id] = "OK sync stopped"
             except Exception as e:
                 results[acc.id] = 'ERROR error encountered: {0}'.format(e)
     if account_id:
         if account_id in results:
             return results[account_id]
         else:
             return "OK no such user"
     return results
Example #54
0
def downgrade_imapaccount():
    class ImapAccount_(Base):
        __table__ = Base.metadata.tables['imapaccount']

    # Get data from table-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapAccount_.id,
                                   ImapAccount_.imap_host).all()
    to_insert = [dict(id=r[0], imap_host=r[1]) for r in results]

    # Drop columns, add new columns + insert data
    op.drop_column('account', 'type')
    op.add_column('account', sa.Column('imap_host', sa.String(512)))

    table_ = table('account', column('imap_host', sa.String(512)),
                   column('id', sa.Integer))

    for r in to_insert:
        op.execute(table_.update().where(table_.c.id == r['id']).values(
            {'imap_host': r['imap_host']}))

    # Table switch-over
    op.drop_constraint('imapuid_ibfk_1', 'imapuid', type_='foreignkey')
    op.drop_constraint('uidvalidity_ibfk_1', 'uidvalidity', type_='foreignkey')
    op.drop_constraint('foldersync_ibfk_1', 'foldersync', type_='foreignkey')
    op.drop_table('imapaccount')

    op.rename_table('account', 'imapaccount')

    op.create_foreign_key('imapuid_ibfk_1',
                          'imapuid',
                          'imapaccount', ['imapaccount_id'], ['id'],
                          ondelete='CASCADE')
    op.create_foreign_key('uidvalidity_ibfk_1',
                          'uidvalidity',
                          'imapaccount', ['imapaccount_id'], ['id'],
                          ondelete='CASCADE')
    op.create_foreign_key('foldersync_ibfk_1',
                          'foldersync',
                          'imapaccount', ['account_id'], ['id'],
                          ondelete='CASCADE')
Example #55
0
 def _get_google_client(self):
     """Return the Google API client."""
     # TODO(emfree) figure out a better strategy for refreshing OAuth
     # credentials as needed
     with session_scope() as db_session:
         try:
             account = db_session.query(ImapAccount).get(self.account_id)
             account = verify_imap_account(db_session, account)
             two_legged_oauth_token = gdata.gauth.OAuth2Token(
                 client_id=GOOGLE_OAUTH_CLIENT_ID,
                 client_secret=GOOGLE_OAUTH_CLIENT_SECRET,
                 scope=OAUTH_SCOPE,
                 user_agent=SOURCE_APP_NAME,
                 access_token=account.o_access_token,
                 refresh_token=account.o_refresh_token)
             google_client = gdata.contacts.client.ContactsClient(
                 source=SOURCE_APP_NAME)
             google_client.auth_token = two_legged_oauth_token
             return google_client
         except gdata.client.BadAuthentication:
             self.log.error('Invalid user credentials given')
             return None
Example #56
0
def test_queue_running(db):
    """ Just the very minimal basics for now: makes sure that the methods run
        without raising an exception. You can use rq-dashboard and a Gmail
        browser window to look in more depth. We'll want to add some
        automatic verification of the behaviour here eventually (see the
        previous tests), but for now I'm leaving it lean and fast.
    """
    from inbox.server.actions.base import (archive, move, copy, delete,
                                           rqworker, register_backends)
    from inbox.server.models.tables.imap import ImapAccount
    from inbox.server.models import session_scope
    register_backends()

    account = db.session.query(ImapAccount).get(ACCOUNT_ID)

    with session_scope() as db_session:
        # "Tips for using Gmail" thread (avoiding all the "Postel lives!" ones)
        archive(db_session, ACCOUNT_ID, 8)
        move(db_session, ACCOUNT_ID, 8, account.all_folder.name,
             account.inbox_folder.name)
    # process actions queue
    rqworker(burst=True)
Example #57
0
def downgrade_imapthread():
    class ImapThread_(Base):
        __table__ = Base.metadata.tables['imapthread']

    # Get data from table-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapThread_.id, ImapThread_.g_thrid).all()
    to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results]

    # Drop columns, add new columns + insert data
    op.drop_column('thread', 'type')
    op.add_column(
        'thread',
        sa.Column('g_thrid', sa.BigInteger(), nullable=True, index=True))
    table_ = table('thread', column('g_thrid', sa.BigInteger),
                   column('id', sa.Integer))

    for r in to_insert:
        op.execute(table_.update().where(table_.c.id == r['id']).values(
            {'g_thrid': r['g_thrid']}))

    # Drop table
    op.drop_table('imapthread')
Example #58
0
def upgrade():

    from inbox.server.config import load_config
    load_config()

    from inbox.server.models import session_scope, Session
    from inbox.server.models.ignition import engine

    from inbox.server.models.tables.base import (Part, Namespace, Message,
                                                 Thread)
    from inbox.sqlalchemy.util import JSON

    print 'Creating table for parts...'
    op.create_table(
        'part', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('message_id', sa.Integer(), nullable=True),
        sa.Column('walk_index', sa.Integer(), nullable=True),
        sa.Column('content_disposition',
                  sa.Enum('inline', 'attachment'),
                  nullable=True),
        sa.Column('content_id', sa.String(length=255), nullable=True),
        sa.Column('misc_keyval', JSON(), nullable=True),
        sa.Column('is_inboxapp_attachment',
                  sa.Boolean(),
                  server_default=sa.sql.expression.false(),
                  nullable=True),
        sa.ForeignKeyConstraint(['id'], ['block.id'], ondelete='CASCADE'),
        sa.ForeignKeyConstraint(['message_id'], ['message.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('message_id', 'walk_index'))

    print 'Reflecting old block table schema'
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Block_(Base):  # old schema, reflected from database table
        __table__ = Base.metadata.tables['block']

    print 'Adding namespace_id column to blocks ',
    op.add_column(u'block',
                  sa.Column('namespace_id', sa.Integer(), nullable=False))

    print 'Migrating from blocks to parts'
    new_parts = []
    with session_scope() as db_session:
        for block in db_session.query(Block_).yield_per(chunk_size):

            # Move relevant fields
            p = Part()
            p.size = block.size
            p.data_sha256 = block.data_sha256
            p.message_id = block.message_id
            p.walk_index = block.walk_index
            p.content_disposition = block.content_disposition
            p.content_id = block.content_id
            p.misc_keyval = block.misc_keyval
            p.is_inboxapp_attachment

            old_namespace = db_session.query(Namespace) \
                .join(Message.thread, Thread.namespace) \
                .filter(Message.id == block.message_id).one()
            p.namespace_id = old_namespace.id

            # Commit after column modifications
            new_parts.append(p)

        print 'Deleting old blocks (now parts)... ',
        db_session.query(Block_).delete()
        db_session.commit()
        print 'Done!'

    print 'Removing `message_id` constraint from block'
    op.drop_constraint('block_ibfk_1', 'block', type_='foreignkey')

    print 'Creating foreign key for block -> namespace on block'
    op.create_foreign_key('block_ibfk_1',
                          'block',
                          'namespace', ['namespace_id'], ['id'],
                          ondelete='CASCADE')

    print 'Dropping old block columns which are now in part'
    op.drop_column(u'block', u'walk_index')
    op.drop_column(u'block', u'content_disposition')
    op.drop_column(u'block', u'misc_keyval')
    op.drop_column(u'block', u'content_id')
    op.drop_column(u'block', u'is_inboxapp_attachment')
    op.drop_constraint(u'message_id', 'block', type_='unique')
    op.drop_column(u'block', u'message_id')

    # Note: here we use the regular database session, since the transaction
    # log requires the `namespace` property on objects. We've set the
    # `namespace_id` foreign key, but need to commit the object before the
    # SQLalchemy reference is valid
    no_tx_session = Session(autoflush=True, autocommit=False)
    no_tx_session.add_all(new_parts)
    no_tx_session.commit()

    print 'Done migration blocks to parts!'
Example #59
0
def upgrade():

    from inbox.server.config import load_config
    load_config()
    from inbox.server.models import session_scope
    from inbox.server.models.ignition import engine
    from inbox.server.models.tables.imap import ImapAccount
    import inbox.server.auth.gmail as gmail

    # Assert we have the dump file
    if not os.path.isfile(SQL_DUMP_FILENAME):
        print "Can't find old user SQL dump at {0}...\nMigration no users."\
            .format(SQL_DUMP_FILENAME)
        return

    # Imports to `imapaccount_old` table
    with open(SQL_DUMP_FILENAME, 'r') as f:
        print 'Importing old account data...',
        op.execute(f.read())
        print 'OK!'

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class ImapAccount_Old(Base):
        __table__ = Base.metadata.tables['imapaccount_old']

    with session_scope() as db_session:
        migrated_accounts = []

        for acct in db_session.query(ImapAccount_Old):
            print 'Importing {0}'.format(acct.email_address)

            existing_account = db_session.query(ImapAccount)\
                .filter_by(email_address=acct.email_address)
            if existing_account.count() > 0:
                print 'Already have account for {0}'.format(acct.email_address)
                continue

            # Create a mock OAuth response using data from the old table
            mock_response = dict(email=acct.email_address,
                                 issued_to=acct.o_token_issued_to,
                                 user_id=acct.o_user_id,
                                 access_token=acct.o_access_token,
                                 id_token=acct.o_id_token,
                                 expires_in=acct.o_expires_in,
                                 access_type=acct.o_access_type,
                                 token_type=acct.o_token_type,
                                 audience=acct.o_audience,
                                 scope=acct.o_scope,
                                 refresh_token=acct.o_refresh_token,
                                 verified_email=acct.o_verified_email)

            new_account = gmail.create_account(db_session, acct.email_address,
                                               mock_response)

            # Note that this doesn't verify **anything** about the account.
            # We're just doing the migration now
            db_session.add(new_account)
            db_session.commit()
            migrated_accounts.append(new_account)

        print '\nDone! Imported {0} accounts.'.format(len(migrated_accounts))
        print '\nNow verifying refresh tokens...\n'

        verified_accounts = []
        for acct in migrated_accounts:
            try:
                print 'Verifying {0}... '.format(acct.email_address),
                gmail.verify_account(db_session, acct)
                verified_accounts.append(acct)
                print 'OK!'
            except Exception, e:
                print 'FAILED!', e

        print 'Done! Verified {0} of {1}'.format(len(verified_accounts),
                                                 len(migrated_accounts))