def upgrade():
    op.add_column('foldersync',
                  sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                            nullable=True))

    if 'easfoldersync' in Base.metadata.tables:
        op.add_column('easfoldersync',
                      sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                                nullable=True))
def upgrade():
    op.add_column(
        'foldersync',
        sa.Column('_sync_status',
                  MutableDict.as_mutable(JSON()),
                  nullable=True))

    if 'easfoldersync' in Base.metadata.tables:
        op.add_column(
            'easfoldersync',
            sa.Column('_sync_status',
                      MutableDict.as_mutable(JSON()),
                      nullable=True))
def upgrade():
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    from inbox.sqlalchemy_ext.util import JSON, MutableDict
    Base = declarative_base()
    Base.metadata.reflect(engine)

    op.add_column('foldersync',
                  sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                            nullable=True))

    if 'easfoldersync' in Base.metadata.tables:
        op.add_column('easfoldersync',
                      sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                                nullable=True))
def upgrade():
    from inbox.ignition import main_engine
    from inbox.sqlalchemy_ext.util import JSON, MutableDict

    engine = main_engine(pool_size=1, max_overflow=0)
    from sqlalchemy.ext.declarative import declarative_base

    from inbox.models.session import session_scope

    op.add_column(
        "account",
        sa.Column(
            "_sync_status", MutableDict.as_mutable(JSON()), default={}, nullable=True
        ),
    )

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Account(Base):
        __table__ = Base.metadata.tables["account"]

    with session_scope(versioned=False) as db_session:
        for acct in db_session.query(Account):
            d = dict(
                sync_start_time=str(acct.sync_start_time),
                sync_end_time=str(acct.sync_end_time),
            )
            acct._sync_status = json_util.dumps(d)

        db_session.commit()

    op.drop_column("account", "sync_start_time")
    op.drop_column("account", "sync_end_time")
def upgrade():
    from inbox.sqlalchemy_ext.util import JSON, MutableDict
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    from inbox.models.session import session_scope
    from sqlalchemy.ext.declarative import declarative_base
    op.add_column('account',
                  sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                            default={}, nullable=True))

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Account(Base):
        __table__ = Base.metadata.tables['account']

    with session_scope(versioned=False, ignore_soft_deletes=False) \
            as db_session:
        for acct in db_session.query(Account):
            d = dict(sync_start_time=str(acct.sync_start_time),
                     sync_end_time=str(acct.sync_end_time))
            acct._sync_status = json_util.dumps(d)

        db_session.commit()

    op.drop_column('account', 'sync_start_time')
    op.drop_column('account', 'sync_end_time')
def upgrade():
    op.add_column('account',
                  sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
                            default={}, nullable=True))

    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Account(Base):
        __table__ = Base.metadata.tables['account']

    with session_scope(versioned=False, ignore_soft_deletes=False) \
            as db_session:
        for acct in db_session.query(Account):
            d = dict(sync_start_time=str(acct.sync_start_time),
                     sync_end_time=str(acct.sync_end_time))
            acct._sync_status = json_util.dumps(d)

        db_session.commit()

    op.drop_column('account', 'sync_start_time')
    op.drop_column('account', 'sync_end_time')
Exemplo n.º 7
0
class ImapFolderSyncStatus(MailSyncBase, HasRunState, UpdatedAtMixin,
                           DeletedAtMixin):
    """ Per-folder status state saving for IMAP folders. """
    account_id = Column(ForeignKey(ImapAccount.id, ondelete='CASCADE'),
                        nullable=False)
    account = relationship(ImapAccount,
                           backref=backref('foldersyncstatuses',
                                           passive_deletes=True))

    folder_id = Column(ForeignKey('folder.id', ondelete='CASCADE'),
                       nullable=False)
    # We almost always need the folder name too, so eager load by default.
    folder = relationship('Folder', lazy='joined', backref=backref(
        'imapsyncstatus', uselist=False, passive_deletes=True))

    # see state machine in mailsync/backends/imap/imap.py
    state = Column(Enum('initial', 'initial uidinvalid',
                        'poll', 'poll uidinvalid', 'finish'),
                   server_default='initial', nullable=False)

    # stats on messages downloaded etc.
    _metrics = Column(MutableDict.as_mutable(JSON), default={}, nullable=True)

    @property
    def metrics(self):
        status = dict(name=self.folder.name, state=self.state)
        status.update(self._metrics or {})

        return status

    def start_sync(self):
        self._metrics = dict(run_state='running',
                             sync_start_time=datetime.utcnow())

    def stop_sync(self):
        self._metrics['run_state'] = 'stopped'
        self._metrics['sync_end_time'] = datetime.utcnow()

    @property
    def is_killed(self):
        return self._metrics.get('run_state') == 'killed'

    def update_metrics(self, metrics):
        sync_status_metrics = ['remote_uid_count', 'delete_uid_count',
                               'update_uid_count', 'download_uid_count',
                               'uid_checked_timestamp',
                               'num_downloaded_since_timestamp',
                               'queue_checked_at', 'percent']

        assert isinstance(metrics, dict)
        for k in metrics.iterkeys():
            assert k in sync_status_metrics, k

        if self._metrics is not None:
            self._metrics.update(metrics)
        else:
            self._metrics = metrics

    @property
    def sync_enabled(self):
        # sync is enabled if the folder's run bit is set, and the account's
        # run bit is set. (this saves us needing to reproduce account-state
        # transition logic on the folder level, and gives us a comparison bit
        # against folder heartbeats.)
        return self.sync_should_run and self.account.sync_should_run

    __table_args__ = (UniqueConstraint('account_id', 'folder_id'),)
Exemplo n.º 8
0
class Account(MailSyncBase, HasPublicID, HasEmailAddress, HasRunState,
              HasRevisions):
    API_OBJECT_NAME = 'account'

    @property
    def provider(self):
        """
        A constant, unique lowercase identifier for the account provider
        (e.g., 'gmail', 'eas'). Subclasses should override this.

        """
        raise NotImplementedError

    @property
    def verbose_provider(self):
        """
        A detailed identifier for the account provider
        (e.g., 'gmail', 'office365', 'outlook').
        Subclasses may override this.

        """
        return self.provider

    @property
    def category_type(self):
        """
        Whether the account is organized by folders or labels
        ('folder'/ 'label'), depending on the provider.
        Subclasses should override this.

        """
        raise NotImplementedError

    @property
    def auth_handler(self):
        from inbox.auth.base import handler_from_provider
        return handler_from_provider(self.provider)

    @property
    def provider_info(self):
        return provider_info(self.provider, self.email_address)

    @property
    def thread_cls(self):
        from inbox.models.thread import Thread
        return Thread

    # The default phrase used when sending mail from this account.
    name = Column(String(256), nullable=False, server_default='')

    # If True, throttle initial sync to reduce resource load
    throttled = Column(Boolean, server_default=false())

    # if True we sync contacts/events/email
    # NOTE: these columns are meaningless for EAS accounts
    sync_email = Column(Boolean, nullable=False, default=True)
    sync_contacts = Column(Boolean, nullable=False, default=False)
    sync_events = Column(Boolean, nullable=False, default=False)

    last_synced_contacts = Column(DateTime, nullable=True)

    # DEPRECATED
    last_synced_events = Column(DateTime, nullable=True)

    emailed_events_calendar_id = Column(BigInteger,
                                        ForeignKey('calendar.id',
                                                   ondelete='SET NULL',
                                                   use_alter=True,
                                                   name='emailed_events_cal'),
                                        nullable=True)

    _emailed_events_calendar = relationship(
        'Calendar',
        post_update=True,
        foreign_keys=[emailed_events_calendar_id])

    def create_emailed_events_calendar(self):
        if not self._emailed_events_calendar:
            calname = "Emailed events"
            cal = Calendar(namespace=self.namespace,
                           description=calname,
                           uid='inbox',
                           name=calname,
                           read_only=True)
            self._emailed_events_calendar = cal

    @property
    def emailed_events_calendar(self):
        self.create_emailed_events_calendar()
        return self._emailed_events_calendar

    @emailed_events_calendar.setter
    def emailed_events_calendar(self, cal):
        self._emailed_events_calendar = cal

    sync_host = Column(String(255), nullable=True)

    # current state of this account
    state = Column(Enum('live', 'down', 'invalid'), nullable=True)

    # Based on account status, should the sync be running?
    # (Note, this is stored via a mixin.)
    # This is set to false if:
    #  - Account credentials are invalid (see mark_invalid())
    #  - External factors no longer require this account to sync
    # The value of this bit should always equal the AND value of all its
    # folders and heartbeats.

    @property
    def sync_enabled(self):
        return self.sync_should_run

    sync_state = Column(Enum('running', 'stopped', 'killed', 'invalid',
                             'connerror'),
                        nullable=True)

    _sync_status = Column(MutableDict.as_mutable(JSON),
                          default={},
                          nullable=True)

    @property
    def sync_status(self):
        d = dict(id=self.id,
                 email=self.email_address,
                 provider=self.provider,
                 is_enabled=self.sync_enabled,
                 state=self.sync_state,
                 sync_host=self.sync_host)
        d.update(self._sync_status or {})

        return d

    @property
    def sync_error(self):
        return self._sync_status.get('sync_error')

    @property
    def initial_sync_start(self):
        if len(self.folders) == 0 or \
           any([f.initial_sync_start is None for f in self.folders]):
            return None
        return min([f.initial_sync_start for f in self.folders])

    @property
    def initial_sync_end(self):
        if len(self.folders) == 0 \
           or any([f.initial_sync_end is None for f in self.folders]):
            return None
        return max([f.initial_sync_end for f in self.folders])

    @property
    def initial_sync_duration(self):
        if not self.initial_sync_start or not self.initial_sync_end:
            return None
        return (self.initial_sync_end - self.initial_sync_end).total_seconds()

    def update_sync_error(self, error=None):
        self._sync_status['sync_error'] = error

    def sync_started(self):
        """
        Record transition to started state. Should be called after the
        sync is actually started, not when the request to start it is made.

        """
        current_time = datetime.utcnow()

        # Never run before (vs restarting stopped/killed)
        if self.sync_state is None and (
                not self._sync_status
                or self._sync_status.get('sync_end_time') is None):
            self._sync_status['original_start_time'] = current_time

        self._sync_status['sync_start_time'] = current_time
        self._sync_status['sync_end_time'] = None
        self._sync_status['sync_error'] = None

        self.sync_state = 'running'

    def enable_sync(self, sync_host=None):
        """ Tell the monitor that this account should be syncing. """
        self.sync_should_run = True
        if sync_host is not None:
            self.sync_host = sync_host

    def disable_sync(self, reason):
        """ Tell the monitor that this account should stop syncing. """
        self.sync_should_run = False
        self._sync_status['sync_disabled_reason'] = reason
        self._sync_status['sync_disabled_on'] = datetime.utcnow()
        self._sync_status['sync_disabled_by'] = os.environ.get(
            'USER', 'unknown')

    def mark_invalid(self, reason='invalid credentials', scope='mail'):
        """
        In the event that the credentials for this account are invalid,
        update the status and sync flag accordingly. Should only be called
        after trying to re-authorize / get new token.

        """
        if scope == 'calendar':
            self.sync_events = False
        elif scope == 'contacts':
            self.sync_contacts = False
        else:
            self.disable_sync(reason)
            self.sync_state = 'invalid'

    def mark_deleted(self):
        """
        Soft-delete the account.
        """
        self.disable_sync('account deleted')

    def sync_stopped(self, reason=None):
        """
        Record transition to stopped state. Should be called after the
        sync is actually stopped, not when the request to stop it is made.

        """
        if self.sync_state == 'running':
            self.sync_state = 'stopped'
        self.sync_host = None
        self._sync_status['sync_end_time'] = datetime.utcnow()

    def kill_sync(self, error=None):
        # Don't disable sync: syncs are not killed on purpose.
        self.sync_state = 'killed'
        self._sync_status['sync_end_time'] = datetime.utcnow()
        self._sync_status['sync_error'] = error

    @classmethod
    def _get_lock_object(cls, account_id, lock_for=dict()):
        """
        Make sure we only create one lock per account per process.

        (Default args are initialized at import time, so `lock_for` acts as a
        module-level memory cache.)

        """
        return lock_for.setdefault(
            account_id, Lock(cls._sync_lockfile_name(account_id), block=False))

    @classmethod
    def get(cls, id_, session):
        q = bakery(lambda session: session.query(cls))
        q += lambda q: q.filter(cls.id == bindparam('id_'))
        return q(session).params(id_=id_).first()

    @classmethod
    def _sync_lockfile_name(cls, account_id):
        return "/var/lock/inbox_sync/{}.lock".format(account_id)

    @property
    def _sync_lock(self):
        return self._get_lock_object(self.id)

    def sync_lock(self):
        """ Prevent mailsync for this account from running more than once. """
        self._sync_lock.acquire()

    def sync_unlock(self):
        self._sync_lock.release()

    @property
    def is_killed(self):
        return self.sync_state == 'killed'

    @property
    def is_running(self):
        return self.sync_state == 'running'

    @property
    def is_deleted(self):
        return self.sync_state in ('stopped', 'killed', 'invalid') and \
            self.sync_should_run is False and \
            self._sync_status.get('sync_disabled_reason') == 'account deleted'

    @property
    def is_sync_locked(self):
        return self._sync_lock.locked()

    @property
    def should_suppress_transaction_creation(self):
        # Only version if new or the `sync_state` has changed.
        obj_state = inspect(self)
        return not (obj_state.pending
                    or inspect(self).attrs.sync_state.history.has_changes())

    discriminator = Column('type', String(16))
    __mapper_args__ = {
        'polymorphic_identity': 'account',
        'polymorphic_on': discriminator
    }
Exemplo n.º 9
0
class Event(MailSyncBase, HasRevisions, HasPublicID):
    """Data for events."""
    API_OBJECT_NAME = 'event'

    # Don't surface 'remote' events in the transaction log since
    # they're an implementation detail we don't want our customers
    # to worry about.
    @property
    def should_suppress_transaction_creation(self):
        return self.source == 'remote'

    namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'),
                          nullable=False)

    namespace = relationship(Namespace, load_on_pending=True)

    calendar_id = Column(ForeignKey(Calendar.id, ondelete='CASCADE'),
                         nullable=False)
    calendar = relationship(Calendar,
                            backref=backref('events', passive_deletes=True),
                            load_on_pending=True)

    # A server-provided unique ID.
    uid = Column(String(767, collation='ascii_general_ci'), nullable=False)

    # A constant, unique identifier for the remote backend this event came
    # from. E.g., 'google', 'eas', 'inbox'
    provider_name = Column(String(64), nullable=False)

    raw_data = Column(Text, nullable=False)

    title = Column(String(TITLE_MAX_LEN), nullable=True)
    owner = Column(String(OWNER_MAX_LEN), nullable=True)
    description = Column(Text, nullable=True)
    location = Column(String(LOCATION_MAX_LEN), nullable=True)
    busy = Column(Boolean, nullable=False, default=True)
    read_only = Column(Boolean, nullable=False)
    reminders = Column(String(REMINDER_MAX_LEN), nullable=True)
    recurrence = Column(String(RECURRENCE_MAX_LEN), nullable=True)
    start = Column(DateTime, nullable=False)
    end = Column(DateTime, nullable=True)
    all_day = Column(Boolean, nullable=False)
    is_owner = Column(Boolean, nullable=False, default=True)
    source = Column('source', Enum('local', 'remote'))

    # Flag to set if the event is deleted in a remote backend.
    # (This is an unmapped attribute, i.e., it does not correspond to a
    # database column.)
    deleted = False

    __table_args__ = (UniqueConstraint('uid', 'source', 'namespace_id',
                                       'provider_name', name='uuid'),)

    _participant_cascade = "save-update, merge, delete, delete-orphan"
    participants_by_email = Column(MutableDict.as_mutable(JSON), default={},
                                   nullable=True)

    def __init__(self, *args, **kwargs):
        MailSyncBase.__init__(self, *args, **kwargs)
        if self.participants_by_email is None:
            self.participants_by_email = {}

    @validates('reminders', 'recurrence', 'owner', 'location', 'title', 'raw_data')
    def validate_length(self, key, value):
        max_len = _LENGTHS[key]
        return value if value is None else value[:max_len]

    @property
    def participants(self):
        return self.participants_by_email.values()

    @participants.setter
    def participants(self, participants):
        # We need to do this because the codes which creates event often
        # does it by calling something like event = Event(..., participants=[])
        # in this case self.participants_by_email is None since the constructor
        # hasn't run yet.
        if self.participants_by_email is None:
            self.participants_by_email = {}

        for p in participants:
            self.participants_by_email[p['email_address']] = p

    # Use a list for lowing to json to preserve original order
    @property
    def participant_list(self):
        return [{'name': p['name'],
                 'email': p['email_address'],
                 'status': p['status'],
                 'notes': p['notes'],
                 'id': p['public_id']}
                for p in self.participants_by_email.values()]

    @participant_list.setter
    def participant_list(self, p_list):
        """ Updates the participant list based off of a list so that order can
        be preserved from creation time. (Doesn't allow re-ordering)"""

        # First add or update the ones we don't have yet
        all_emails = []

        for p in p_list:
            all_emails.append(p['email'])
            existing = self.participants_by_email.get(p['email'])
            if existing:
                existing['name'] = p.get('name')
                existing['notes'] = p.get('notes')
                existing['status'] = p.get('status')
            else:
                new_p = {"name": p.get('name'),
                         "email_address": p['email'],
                         "notes": p.get('notes'),
                         "status": p.get('status')}
                self.participants_by_email[p['email']] = new_p

        # Now remove the ones we have stored that are not in the list
        remove = list(set(self.participants_by_email.keys()) - set(all_emails))
        for email in remove:
            del self.participants_by_email[email]

    def merge_participant(self, p_email, base, remote):
        if p_email not in self.participants_by_email:
            # Removed locally, so don't add
            if base and remote:
                return
            new_p = {"email_address": p_email}
            self.participants_by_email[p_email] = new_p
        else:
            # Removed by remote, don't add
            if base and not remote:
                del self.participants_by_email[p_email]
                return

        dest = self.participants_by_email.get(p_email)

        merge_attrs = ['name', 'status', 'notes']

        for attr in merge_attrs:
            merge_attr(base, remote, dest, attr)

    def merge_participants(self, base, remote):
        all_participants = list(set(base.keys()) |
                                set(remote.keys()) |
                                set(self.participants_by_email.keys()))

        for p_email in all_participants:
            base_value = base.get(p_email)
            remote_value = remote.get(p_email)
            self.merge_participant(p_email, base_value, remote_value)

    def merge_from(self, base, remote):
        # This must be updated when new fields are added to the class.
        merge_attrs = ['title', 'description', 'start', 'end', 'all_day',
                       'read_only', 'location', 'reminders', 'recurrence',
                       'busy', 'raw_data', 'owner', 'is_owner', 'calendar_id']

        for attr in merge_attrs:
            merge_attr(base, remote, self, attr)

        self.merge_participants(base.participants_by_email,
                                remote.participants_by_email)

    def _copy_participant(self, copy, src):
        if src['status'] is None:
            src['status'] = 'noreply'
            copy['status'] = 'noreply'

        if 'email_address' in src:
            copy['email_address'] = src['email_address']

        if 'status' in src:
            copy['status'] = src['status']

        if 'name' in src:
            copy['name'] = src['name']

        if 'notes' in src:
            copy['notes'] = src['notes']

    def copy_from(self, src):
        """ Copy fields from src."""
        self.namespace_id = src.namespace_id
        self.namespace = src.namespace
        self.uid = src.uid
        self.provider_name = src.provider_name
        self.raw_data = src.raw_data
        self.title = src.title
        self.description = src.description
        self.busy = src.busy
        self.read_only = src.read_only
        self.is_owner = src.is_owner
        self.owner = self.owner
        self.location = src.location
        self.reminders = src.reminders
        self.recurrence = src.recurrence
        self.start = src.start
        self.end = src.end
        self.all_day = src.all_day
        self.calendar_id = src.calendar_id

        for p_email, p in src.participants_by_email.iteritems():
            if p_email not in self.participants_by_email:
                self.participants_by_email[p_email] = p
            else:
                old_p = self.participants_by_email[p_email]
                self._copy_participant(old_p, p)

        # For some reason sqlalchemy doesn't like iterating and modifying
        # a collection at the same time.
        emails = self.participants_by_email.keys()
        for p_email in emails:
            if p_email not in src.participants_by_email:
                del self.participants_by_email[p_email]

    @property
    def when(self):
        if self.all_day:
            start = self.start.date()
            end = self.end.date()
            return Date(start) if start == end else DateSpan(start, end)
        else:
            start = self.start
            end = self.end
            return Time(start) if start == end else TimeSpan(start, end)

    @when.setter
    def when(self, when):
        if 'time' in when:
            self.start = self.end = time_parse(when['time'])
            self.all_day = False
        elif 'start_time' in when:
            self.start = time_parse(when['start_time'])
            self.end = time_parse(when['end_time'])
            self.all_day = False
        elif 'date' in when:
            self.start = self.end = date_parse(when['date'])
            self.all_day = True
        elif 'start_date' in when:
            self.start = date_parse(when['start_date'])
            self.end = date_parse(when['end_date'])
            self.all_day = True

    @property
    def versioned_relationships(self):
        return ['participants_by_email']
Exemplo n.º 10
0
class Account(MailSyncBase, HasPublicID, HasEmailAddress, HasRunState,
              HasRevisions, UpdatedAtMixin, DeletedAtMixin):
    API_OBJECT_NAME = 'account'

    @property
    def provider(self):
        """
        A constant, unique lowercase identifier for the account provider
        (e.g., 'gmail', 'eas'). Subclasses should override this.

        """
        raise NotImplementedError

    @property
    def verbose_provider(self):
        """
        A detailed identifier for the account provider
        (e.g., 'gmail', 'office365', 'outlook').
        Subclasses may override this.

        """
        return self.provider

    @property
    def category_type(self):
        """
        Whether the account is organized by folders or labels
        ('folder'/ 'label'), depending on the provider.
        Subclasses should override this.

        """
        raise NotImplementedError

    @property
    def auth_handler(self):
        from inbox.auth.base import handler_from_provider
        return handler_from_provider(self.provider)

    @property
    def provider_info(self):
        return provider_info(self.provider)

    @property
    def thread_cls(self):
        from inbox.models.thread import Thread
        return Thread

    # The default phrase used when sending mail from this account.
    name = Column(String(256), nullable=False, server_default='')

    # If True, throttle initial sync to reduce resource load
    throttled = Column(Boolean, server_default=false())

    # if True we sync contacts/events/email
    # NOTE: these columns are meaningless for EAS accounts
    sync_email = Column(Boolean, nullable=False, default=True)
    sync_contacts = Column(Boolean, nullable=False, default=False)
    sync_events = Column(Boolean, nullable=False, default=False)

    last_synced_contacts = Column(DateTime, nullable=True)

    # DEPRECATED
    last_synced_events = Column(DateTime, nullable=True)

    emailed_events_calendar_id = Column(BigInteger,
                                        ForeignKey('calendar.id',
                                                   ondelete='SET NULL',
                                                   use_alter=True,
                                                   name='emailed_events_cal'),
                                        nullable=True)

    _emailed_events_calendar = relationship(
        'Calendar',
        post_update=True,
        foreign_keys=[emailed_events_calendar_id])

    def create_emailed_events_calendar(self):
        if not self._emailed_events_calendar:
            calname = "Emailed events"
            cal = Calendar(namespace=self.namespace,
                           description=calname,
                           uid='inbox',
                           name=calname,
                           read_only=True)
            self._emailed_events_calendar = cal

    @property
    def emailed_events_calendar(self):
        self.create_emailed_events_calendar()
        return self._emailed_events_calendar

    @emailed_events_calendar.setter
    def emailed_events_calendar(self, cal):
        self._emailed_events_calendar = cal

    sync_host = Column(String(255), nullable=True)
    desired_sync_host = Column(String(255), nullable=True)

    # current state of this account
    state = Column(Enum('live', 'down', 'invalid'), nullable=True)

    # Based on account status, should the sync be running?
    # (Note, this is stored via a mixin.)
    # This is set to false if:
    #  - Account credentials are invalid (see mark_invalid())
    #  - External factors no longer require this account to sync
    # The value of this bit should always equal the AND value of all its
    # folders and heartbeats.

    @property
    def sync_enabled(self):
        return self.sync_should_run

    sync_state = Column(Enum('running', 'stopped', 'killed', 'invalid',
                             'connerror'),
                        nullable=True)

    _sync_status = Column(MutableDict.as_mutable(JSON),
                          default={},
                          nullable=True)

    @property
    def sync_status(self):
        d = dict(id=self.id,
                 email=self.email_address,
                 provider=self.provider,
                 is_enabled=self.sync_enabled,
                 state=self.sync_state,
                 sync_host=self.sync_host,
                 desired_sync_host=self.desired_sync_host)
        d.update(self._sync_status or {})

        return d

    @property
    def sync_error(self):
        return self._sync_status.get('sync_error')

    @property
    def initial_sync_start(self):
        if len(self.folders) == 0 or \
           any([f.initial_sync_start is None for f in self.folders]):
            return None
        return min([f.initial_sync_start for f in self.folders])

    @property
    def initial_sync_end(self):
        if len(self.folders) == 0 \
           or any([f.initial_sync_end is None for f in self.folders]):
            return None
        return max([f.initial_sync_end for f in self.folders])

    @property
    def initial_sync_duration(self):
        if not self.initial_sync_start or not self.initial_sync_end:
            return None
        return (self.initial_sync_end - self.initial_sync_end).total_seconds()

    def update_sync_error(self, error=None):
        if error is None:
            self._sync_status['sync_error'] = None
        else:
            error_obj = {
                'message':
                str(error.message)[:3000],
                'exception':
                "".join(traceback.format_exception_only(type(error),
                                                        error))[:500],
                'traceback':
                traceback.format_exc(20)[:3000]
            }

            self._sync_status['sync_error'] = error_obj

    def sync_started(self):
        """
        Record transition to started state. Should be called after the
        sync is actually started, not when the request to start it is made.

        """
        current_time = datetime.utcnow()

        # Never run before (vs restarting stopped/killed)
        if self.sync_state is None and (
                not self._sync_status
                or self._sync_status.get('sync_end_time') is None):
            self._sync_status['original_start_time'] = current_time

        self._sync_status['sync_start_time'] = current_time
        self._sync_status['sync_end_time'] = None
        self._sync_status['sync_error'] = None
        self._sync_status['sync_disabled_reason'] = None
        self._sync_status['sync_disabled_on'] = None
        self._sync_status['sync_disabled_by'] = None

        self.sync_state = 'running'

    def enable_sync(self):
        """ Tell the monitor that this account should be syncing. """
        self.sync_should_run = True

    def disable_sync(self, reason):
        """ Tell the monitor that this account should stop syncing. """
        self.sync_should_run = False
        self._sync_status['sync_disabled_reason'] = reason
        self._sync_status['sync_disabled_on'] = datetime.utcnow()
        self._sync_status['sync_disabled_by'] = os.environ.get(
            'USER', 'unknown')

    def mark_invalid(self, reason='invalid credentials', scope='mail'):
        """
        In the event that the credentials for this account are invalid,
        update the status and sync flag accordingly. Should only be called
        after trying to re-authorize / get new token.

        """
        if scope == 'calendar':
            self.sync_events = False
        elif scope == 'contacts':
            self.sync_contacts = False
        else:
            self.disable_sync(reason)
            self.sync_state = 'invalid'

    def mark_deleted(self):
        """
        Mark account for deletion
        """
        self.disable_sync('account deleted')
        self.sync_state = 'stopped'

    def sync_stopped(self, requesting_host):
        """
        Record transition to stopped state. Should be called after the
        sync is actually stopped, not when the request to stop it is made.

        """
        if requesting_host == self.sync_host:
            # Perform a compare-and-swap before updating these values.
            # Only if the host requesting to update the account.sync_* attributes
            # here still owns the account sync (i.e is account.sync_host),
            # the request can proceed.
            self.sync_host = None
            if self.sync_state == 'running':
                self.sync_state = 'stopped'
            self._sync_status['sync_end_time'] = datetime.utcnow()
            return True
        return False

    @classmethod
    def get(cls, id_, session):
        q = bakery(lambda session: session.query(cls))
        q += lambda q: q.filter(cls.id == bindparam('id_'))
        return q(session).params(id_=id_).first()

    @property
    def is_killed(self):
        return self.sync_state == 'killed'

    @property
    def is_running(self):
        return self.sync_state == 'running'

    @property
    def is_deleted(self):
        return self.sync_state in ('stopped', 'killed', 'invalid') and \
            self.sync_should_run is False and \
            self._sync_status.get('sync_disabled_reason') == 'account deleted'

    @property
    def should_suppress_transaction_creation(self):
        # Only version if new or the `sync_state` has changed.
        obj_state = inspect(self)
        return not (obj_state.pending
                    or inspect(self).attrs.sync_state.history.has_changes())

    @property
    def server_settings(self):
        return None

    def get_raw_message_contents(self, message):
        # Get the raw contents of a message. We do this differently
        # for every backend (Gmail, IMAP, EAS), and the best way
        # to do this across repos is to make it a method of the
        # account class.
        raise NotImplementedError

    discriminator = Column('type', String(16))
    __mapper_args__ = {
        'polymorphic_identity': 'account',
        'polymorphic_on': discriminator
    }
Exemplo n.º 11
0
class ImapFolderSyncStatus(MailSyncBase):
    """ Per-folder status state saving for IMAP folders. """
    account_id = Column(ForeignKey(ImapAccount.id, ondelete='CASCADE'),
                        nullable=False)
    account = relationship(
        ImapAccount,
        backref=backref('foldersyncstatuses',
                        cascade='delete',
                        primaryjoin='and_('
                        'ImapFolderSyncStatus.account_id == ImapAccount.id, '
                        'ImapFolderSyncStatus.deleted_at.is_(None))'),
        primaryjoin='and_('
        'ImapFolderSyncStatus.account_id == ImapAccount.id, '
        'ImapAccount.deleted_at.is_(None))')

    folder_id = Column(Integer, ForeignKey('folder.id'), nullable=False)
    # We almost always need the folder name too, so eager load by default.
    folder = relationship(
        'Folder',
        lazy='joined',
        backref=backref('imapsyncstatus',
                        primaryjoin='and_('
                        'Folder.id == ImapFolderSyncStatus.folder_id, '
                        'ImapFolderSyncStatus.deleted_at == None)'),
        primaryjoin='and_(ImapFolderSyncStatus.folder_id == Folder.id, '
        'Folder.deleted_at == None)')

    # see state machine in mailsync/backends/imap/imap.py
    state = Column(Enum('initial', 'initial uidinvalid', 'poll',
                        'poll uidinvalid', 'finish'),
                   server_default='initial',
                   nullable=False)

    # stats on messages downloaded etc.
    _metrics = Column(MutableDict.as_mutable(JSON), default={}, nullable=True)

    @property
    def metrics(self):
        status = dict(name=self.folder.name, state=self.state)
        status.update(self._metrics or {})

        return status

    def start_sync(self):
        self._metrics = dict(run_state='running',
                             sync_start_time=datetime.utcnow())

    def stop_sync(self):
        self._metrics['run_state'] = 'stopped'
        self._metrics['sync_end_time'] = datetime.utcnow()

    def kill_sync(self, error=None):
        self._metrics['run_state'] = 'killed'
        self._metrics['sync_end_time'] = datetime.utcnow()
        self._metrics['sync_error'] = error

    @property
    def is_killed(self):
        return self._metrics.get('run_state') == 'killed'

    def update_metrics(self, metrics):
        sync_status_metrics = [
            'remote_uid_count', 'delete_uid_count', 'update_uid_count',
            'download_uid_count', 'uid_checked_timestamp',
            'num_downloaded_since_timestamp', 'queue_checked_at', 'percent'
        ]

        assert isinstance(metrics, dict)
        for k in metrics.iterkeys():
            assert k in sync_status_metrics, k

        if self._metrics is not None:
            self._metrics.update(metrics)
        else:
            self._metrics = metrics

    __table_args__ = (UniqueConstraint('account_id', 'folder_id'), )
Exemplo n.º 12
0
class Account(MailSyncBase, HasPublicID, HasEmailAddress):
    @property
    def provider(self):
        """ A constant, unique lowercase identifier for the account provider
        (e.g., 'gmail', 'eas'). Subclasses should override this.

        We prefix provider folders with this string when we expose them as
        tags through the API. E.g., a 'jobs' folder/label on a Gmail
        backend is exposed as 'gmail-jobs'. Any value returned here
        should also be in Tag.RESERVED_PROVIDER_NAMES.

        """
        raise NotImplementedError

    @property
    def auth_handler(self):
        from inbox.auth import handler_from_provider
        return handler_from_provider(self.provider)

    @property
    def provider_info(self):
        return provider_info(self.provider, self.email_address)

    def verify(self):
        """ Verify that the account is still valid."""
        raise NotImplementedError

    @property
    def thread_cls(self):
        from inbox.models.thread import Thread
        return Thread

    # The default phrase used when sending mail from this account.
    name = Column(String(256), nullable=False, server_default='')

    # If True, throttle initial sync to reduce resource load
    throttled = Column(Boolean, server_default=false())

    # local flags & data
    save_raw_messages = Column(Boolean, server_default=true())

    last_synced_contacts = Column(DateTime, nullable=True)
    last_synced_events = Column(DateTime, nullable=True)

    # Folder mappings for the data we sync back to the account backend.  All
    # account backends will not provide all of these. This may mean that Inbox
    # creates some folders on the remote backend, for example to provide
    # "archive" functionality on non-Gmail remotes.
    inbox_folder_id = Column(Integer,
                             ForeignKey(Folder.id, ondelete='SET NULL'),
                             nullable=True)
    inbox_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.inbox_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')
    sent_folder_id = Column(Integer,
                            ForeignKey(Folder.id, ondelete='SET NULL'),
                            nullable=True)
    sent_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.sent_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    drafts_folder_id = Column(Integer,
                              ForeignKey(Folder.id, ondelete='SET NULL'),
                              nullable=True)
    drafts_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.drafts_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    spam_folder_id = Column(Integer,
                            ForeignKey(Folder.id, ondelete='SET NULL'),
                            nullable=True)
    spam_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.spam_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    trash_folder_id = Column(Integer,
                             ForeignKey(Folder.id, ondelete='SET NULL'),
                             nullable=True)
    trash_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.trash_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    archive_folder_id = Column(Integer,
                               ForeignKey(Folder.id, ondelete='SET NULL'),
                               nullable=True)
    archive_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.archive_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    all_folder_id = Column(Integer,
                           ForeignKey(Folder.id, ondelete='SET NULL'),
                           nullable=True)
    all_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.all_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    starred_folder_id = Column(Integer,
                               ForeignKey(Folder.id, ondelete='SET NULL'),
                               nullable=True)
    starred_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.starred_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    important_folder_id = Column(Integer,
                                 ForeignKey(Folder.id, ondelete='SET NULL'),
                                 nullable=True)
    important_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.important_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    default_calendar_id = Column(Integer,
                                 ForeignKey('calendar.id',
                                            ondelete='SET NULL',
                                            use_alter=True,
                                            name='account_ibfk_10'),
                                 nullable=True)

    _default_calendar = relationship(
        'Calendar',
        post_update=True,
        primaryjoin='and_(Account.default_calendar_id == Calendar.id, '
        'Calendar.deleted_at.is_(None))')

    @property
    def default_calendar(self):
        if not self._default_calendar:
            public_id = generate_public_id()
            new_cal = Calendar()
            new_cal.public_id = public_id
            new_cal.namespace = self.namespace
            new_cal.uid = public_id
            new_cal.read_only = False
            new_cal.name = 'default'
            new_cal.provider_name = 'inbox'
            self._default_calendar = new_cal
        return self._default_calendar

    @default_calendar.setter
    def default_calendar(self, cal):
        self._default_calendar = cal

    sync_host = Column(String(255), nullable=True)

    # current state of this account
    state = Column(Enum('live', 'down', 'invalid'), nullable=True)

    @property
    def sync_enabled(self):
        return self.sync_host is not None

    sync_state = Column(Enum('running', 'stopped', 'killed', 'invalid',
                             'connerror'),
                        nullable=True)

    _sync_status = Column(MutableDict.as_mutable(JSON),
                          default={},
                          nullable=True)

    @property
    def sync_status(self):
        d = dict(id=self.id,
                 email=self.email_address,
                 provider=self.provider,
                 is_enabled=self.sync_enabled,
                 state=self.sync_state,
                 sync_host=self.sync_host)
        d.update(self._sync_status or {})

        return d

    def start_sync(self, sync_host=None):
        if sync_host:
            self.sync_started(sync_host)
        else:
            # If a host isn't provided then start it as a new sync.
            # Setting sync_state = None makes the start condition in service.py
            # hold true, ensuring this sync is picked up and started.
            self.sync_state = None

    def sync_started(self, sync_host):
        self.sync_host = sync_host

        current_time = datetime.utcnow()

        # Never run before (vs restarting stopped/killed)
        if self.sync_state is None and (
                not self._sync_status
                or self._sync_status.get('sync_end_time') is None):
            self._sync_status['original_start_time'] = current_time

        self._sync_status['sync_start_time'] = current_time
        self._sync_status['sync_end_time'] = None
        self._sync_status['sync_error'] = None

        self.sync_state = 'running'

    def stop_sync(self):
        """ Set a flag for the monitor to stop the sync. """

        # Don't overwrite state if Invalid credentials/Connection error/
        # Killed because foldersyncs were killed.
        if not self.sync_state or self.sync_state == 'running':
            self.sync_state = 'stopped'

    def sync_stopped(self):
        """ Called when the sync has actually been stopped. """
        self.sync_host = None
        self._sync_status['sync_end_time'] = datetime.utcnow()

    def kill_sync(self, error=None):
        # Don't change sync_host if moving to state 'killed'

        self.sync_state = 'killed'

        self._sync_status['sync_end_time'] = datetime.utcnow()
        self._sync_status['sync_error'] = error

    @classmethod
    def _get_lock_object(cls, account_id, lock_for=dict()):
        """ Make sure we only create one lock per account per process.

        (Default args are initialized at import time, so `lock_for` acts as a
        module-level memory cache.)
        """
        return lock_for.setdefault(
            account_id, Lock(cls._sync_lockfile_name(account_id), block=False))

    @classmethod
    def _sync_lockfile_name(cls, account_id):
        return "/var/lock/inbox_sync/{}.lock".format(account_id)

    @property
    def _sync_lock(self):
        return self._get_lock_object(self.id)

    def sync_lock(self):
        """ Prevent mailsync for this account from running more than once. """
        self._sync_lock.acquire()

    def sync_unlock(self):
        self._sync_lock.release()

    @property
    def is_killed(self):
        return self.sync_state == 'killed'

    @property
    def is_sync_locked(self):
        return self._sync_lock.locked()

    discriminator = Column('type', String(16))
    __mapper_args__ = {
        'polymorphic_identity': 'account',
        'polymorphic_on': discriminator
    }
Exemplo n.º 13
0
class Account(MailSyncBase, HasPublicID, HasEmailAddress):
    @property
    def provider(self):
        """ A constant, unique lowercase identifier for the account provider
        (e.g., 'gmail', 'eas'). Subclasses should override this.

        We prefix provider folders with this string when we expose them as
        tags through the API. E.g., a 'jobs' folder/label on a Gmail
        backend is exposed as 'gmail-jobs'. Any value returned here
        should also be in Tag.RESERVED_PROVIDER_NAMES.

        """
        raise NotImplementedError

    def verify(self):
        """ Verify that the account is still valid."""
        raise NotImplementedError

    # local flags & data
    save_raw_messages = Column(Boolean, server_default=true())

    last_synced_contacts = Column(DateTime, nullable=True)
    last_synced_events = Column(DateTime, nullable=True)

    # Folder mappings for the data we sync back to the account backend.  All
    # account backends will not provide all of these. This may mean that Inbox
    # creates some folders on the remote backend, for example to provide
    # "archive" functionality on non-Gmail remotes.
    inbox_folder_id = Column(Integer,
                             ForeignKey(Folder.id, ondelete='SET NULL'),
                             nullable=True)
    inbox_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.inbox_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')
    sent_folder_id = Column(Integer,
                            ForeignKey(Folder.id, ondelete='SET NULL'),
                            nullable=True)
    sent_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.sent_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    drafts_folder_id = Column(Integer,
                              ForeignKey(Folder.id, ondelete='SET NULL'),
                              nullable=True)
    drafts_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.drafts_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    spam_folder_id = Column(Integer,
                            ForeignKey(Folder.id, ondelete='SET NULL'),
                            nullable=True)
    spam_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.spam_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    trash_folder_id = Column(Integer,
                             ForeignKey(Folder.id, ondelete='SET NULL'),
                             nullable=True)
    trash_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.trash_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    archive_folder_id = Column(Integer,
                               ForeignKey(Folder.id, ondelete='SET NULL'),
                               nullable=True)
    archive_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.archive_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    all_folder_id = Column(Integer,
                           ForeignKey(Folder.id, ondelete='SET NULL'),
                           nullable=True)
    all_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.all_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    starred_folder_id = Column(Integer,
                               ForeignKey(Folder.id, ondelete='SET NULL'),
                               nullable=True)
    starred_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.starred_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    important_folder_id = Column(Integer,
                                 ForeignKey(Folder.id, ondelete='SET NULL'),
                                 nullable=True)
    important_folder = relationship(
        'Folder',
        post_update=True,
        primaryjoin='and_(Account.important_folder_id == Folder.id, '
        'Folder.deleted_at.is_(None))')

    sync_host = Column(String(255), nullable=True)

    # current state of this account
    state = Column(Enum('live', 'down', 'invalid'), nullable=True)

    # current state of this account
    state = Column(Enum('live', 'down', 'invalid'), nullable=True)

    @property
    def sync_enabled(self):
        return self.sync_host is not None

    sync_state = Column(Enum('running', 'stopped', 'killed', 'invalid',
                             'connerror'),
                        nullable=True)

    _sync_status = Column(MutableDict.as_mutable(JSON),
                          default={},
                          nullable=True)

    @property
    def sync_status(self):
        d = dict(id=self.id,
                 email=self.email_address,
                 provider=self.provider,
                 is_enabled=self.sync_enabled,
                 state=self.sync_state)
        d.update(self._sync_status or {})

        return d

    def start_sync(self, sync_host=None):
        # If a host isn't provided then start it as a new sync
        if sync_host:
            self.sync_started(sync_host)
        else:
            self.sync_state = None

    def sync_started(self, sync_host):
        self.sync_host = sync_host

        # Never run before
        if self.sync_state is None and \
                self._sync_status.get('sync_end_time') is None:
            self._sync_status['sync_type'] = 'new'
            self._sync_status['sync_start_time'] = datetime.utcnow()
        # Restarting stopped/killed
        else:
            self._sync_status['sync_type'] = 'resumed'
            self._sync_status['sync_restart_time'] = datetime.utcnow()

        self.sync_state = 'running'

        self._sync_status['sync_end_time'] = None
        self._sync_status['sync_error'] = None

    def stop_sync(self):
        """ Set a flag for the monitor to stop the sync. """

        # Invalid credentials/Connection error, don't overwrite state
        if self.sync_state == 'invalid' or self.sync_state == 'connerr':
            return
        self.sync_state = 'stopped'

    def sync_stopped(self):
        """ Called when the sync has actually been stopped. """
        self.sync_host = None
        self._sync_status['sync_end_time'] = datetime.utcnow()

    def kill_sync(self, error=None):
        # Don't change sync_host if moving to state 'killed'

        self.sync_state = 'killed'

        self._sync_status['sync_end_time'] = datetime.utcnow()
        self._sync_status['sync_error'] = error

    @property
    def sender_name(self):
        # Used for setting sender information when we send a message.
        # Can be overridden by subclasses that store account name information.
        return ''

    @classmethod
    def _get_lock_object(cls, account_id, lock_for=dict()):
        """ Make sure we only create one lock per account per process.

        (Default args are initialized at import time, so `lock_for` acts as a
        module-level memory cache.)
        """
        return lock_for.setdefault(
            account_id, Lock(cls._sync_lockfile_name(account_id), block=False))

    @classmethod
    def _sync_lockfile_name(cls, account_id):
        return "/var/lock/inbox_sync/{}.lock".format(account_id)

    @property
    def _sync_lock(self):
        return self._get_lock_object(self.id)

    def sync_lock(self):
        """ Prevent mailsync for this account from running more than once. """
        self._sync_lock.acquire()

    def sync_unlock(self):
        self._sync_lock.release()

    @property
    def is_killed(self):
        return self.sync_state == 'killed'

    @property
    def is_sync_locked(self):
        return self._sync_lock.locked()

    discriminator = Column('type', String(16))
    __mapper_args__ = {
        'polymorphic_identity': 'account',
        'polymorphic_on': discriminator
    }
Exemplo n.º 14
0
class ImapFolderSyncStatus(MailSyncBase, HasRunState, UpdatedAtMixin,
                           DeletedAtMixin):
    """ Per-folder status state saving for IMAP folders. """

    account_id = Column(ForeignKey(ImapAccount.id, ondelete="CASCADE"),
                        nullable=False)
    account = relationship(ImapAccount,
                           backref=backref("foldersyncstatuses",
                                           passive_deletes=True))

    folder_id = Column(ForeignKey("folder.id", ondelete="CASCADE"),
                       nullable=False)
    # We almost always need the folder name too, so eager load by default.
    folder = relationship(
        "Folder",
        lazy="joined",
        backref=backref("imapsyncstatus", uselist=False, passive_deletes=True),
    )

    # see state machine in mailsync/backends/imap/imap.py
    state = Column(
        Enum("initial", "initial uidinvalid", "poll", "poll uidinvalid",
             "finish"),
        server_default="initial",
        nullable=False,
    )

    # stats on messages downloaded etc.
    _metrics = Column(MutableDict.as_mutable(JSON), default={}, nullable=True)

    @property
    def metrics(self):
        status = dict(name=self.folder.name, state=self.state)
        status.update(self._metrics or {})

        return status

    def start_sync(self):
        self._metrics = dict(run_state="running",
                             sync_start_time=datetime.utcnow())

    def stop_sync(self):
        self._metrics["run_state"] = "stopped"
        self._metrics["sync_end_time"] = datetime.utcnow()

    @property
    def is_killed(self):
        return self._metrics.get("run_state") == "killed"

    def update_metrics(self, metrics):
        sync_status_metrics = [
            "remote_uid_count",
            "delete_uid_count",
            "update_uid_count",
            "download_uid_count",
            "uid_checked_timestamp",
            "num_downloaded_since_timestamp",
            "queue_checked_at",
            "percent",
        ]

        assert isinstance(metrics, dict)
        for k in metrics:
            assert k in sync_status_metrics, k

        if self._metrics is not None:
            self._metrics.update(metrics)
        else:
            self._metrics = metrics

    @property
    def sync_enabled(self):
        # sync is enabled if the folder's run bit is set, and the account's
        # run bit is set. (this saves us needing to reproduce account-state
        # transition logic on the folder level, and gives us a comparison bit
        # against folder heartbeats.)
        return self.sync_should_run and self.account.sync_should_run

    __table_args__ = (UniqueConstraint("account_id", "folder_id"), )
Exemplo n.º 15
0
class Account(MailSyncBase, HasPublicID, HasEmailAddress, HasRunState):
    @property
    def provider(self):
        """ A constant, unique lowercase identifier for the account provider
        (e.g., 'gmail', 'eas'). Subclasses should override this.

        We prefix provider folders with this string when we expose them as
        tags through the API. E.g., a 'jobs' folder/label on a Gmail
        backend is exposed as 'gmail-jobs'. Any value returned here
        should also be in Tag.RESERVED_PROVIDER_NAMES.

        """
        raise NotImplementedError

    @property
    def auth_handler(self):
        from inbox.auth.base import handler_from_provider
        return handler_from_provider(self.provider)

    @property
    def provider_info(self):
        return provider_info(self.provider, self.email_address)

    @property
    def thread_cls(self):
        from inbox.models.thread import Thread
        return Thread

    # The default phrase used when sending mail from this account.
    name = Column(String(256), nullable=False, server_default='')

    # If True, throttle initial sync to reduce resource load
    throttled = Column(Boolean, server_default=false())

    # local flags & data
    save_raw_messages = Column(Boolean, server_default=true())

    # if True we sync contacts/events
    # NOTE: these columns are meaningless for EAS accounts
    sync_contacts = Column(Boolean, nullable=False, default=False)
    sync_events = Column(Boolean, nullable=False, default=False)

    last_synced_contacts = Column(DateTime, nullable=True)

    # DEPRECATED
    last_synced_events = Column(DateTime, nullable=True)

    # Folder mappings for the data we sync back to the account backend.  All
    # account backends will not provide all of these. This may mean that Inbox
    # creates some folders on the remote backend, for example to provide
    # "archive" functionality on non-Gmail remotes.
    inbox_folder_id = Column(Integer,
                             ForeignKey(Folder.id, ondelete='SET NULL'),
                             nullable=True)
    inbox_folder = relationship('Folder', post_update=True,
                                foreign_keys=[inbox_folder_id])
    sent_folder_id = Column(Integer,
                            ForeignKey(Folder.id, ondelete='SET NULL'),
                            nullable=True)
    sent_folder = relationship('Folder', post_update=True,
                               foreign_keys=[sent_folder_id])

    drafts_folder_id = Column(Integer,
                              ForeignKey(Folder.id, ondelete='SET NULL'),
                              nullable=True)
    drafts_folder = relationship('Folder', post_update=True,
                                 foreign_keys=[drafts_folder_id])

    spam_folder_id = Column(Integer,
                            ForeignKey(Folder.id, ondelete='SET NULL'),
                            nullable=True)
    spam_folder = relationship('Folder', post_update=True,
                               foreign_keys=[spam_folder_id])

    trash_folder_id = Column(Integer,
                             ForeignKey(Folder.id, ondelete='SET NULL'),
                             nullable=True)
    trash_folder = relationship('Folder', post_update=True,
                                foreign_keys=[trash_folder_id])

    archive_folder_id = Column(Integer,
                               ForeignKey(Folder.id, ondelete='SET NULL'),
                               nullable=True)
    archive_folder = relationship('Folder', post_update=True,
                                  foreign_keys=[archive_folder_id])

    all_folder_id = Column(Integer,
                           ForeignKey(Folder.id, ondelete='SET NULL'),
                           nullable=True)
    all_folder = relationship('Folder', post_update=True,
                              foreign_keys=[all_folder_id])

    starred_folder_id = Column(Integer,
                               ForeignKey(Folder.id, ondelete='SET NULL'),
                               nullable=True)
    starred_folder = relationship('Folder', post_update=True,
                                  foreign_keys=[starred_folder_id])

    important_folder_id = Column(Integer,
                                 ForeignKey(Folder.id, ondelete='SET NULL'),
                                 nullable=True)
    important_folder = relationship('Folder', post_update=True,
                                    foreign_keys=[important_folder_id])

    emailed_events_calendar_id = Column(Integer,
                                        ForeignKey('calendar.id',
                                                   ondelete='SET NULL',
                                                   use_alter=True,
                                                   name='emailed_events_cal'),
                                        nullable=True)

    _emailed_events_calendar = relationship(
        'Calendar', post_update=True,
        foreign_keys=[emailed_events_calendar_id])

    def create_emailed_events_calendar(self):
        if not self._emailed_events_calendar:
            calname = "Emailed events"
            cal = Calendar(namespace=self.namespace,
                           description=calname,
                           uid='inbox',
                           name=calname,
                           read_only=True)
            self._emailed_events_calendar = cal

    @property
    def emailed_events_calendar(self):
        self.create_emailed_events_calendar()
        return self._emailed_events_calendar

    @emailed_events_calendar.setter
    def emailed_events_calendar(self, cal):
        self._emailed_events_calendar = cal

    sync_host = Column(String(255), nullable=True)

    # current state of this account
    state = Column(Enum('live', 'down', 'invalid'), nullable=True)

    # Based on account status, should the sync be running?
    # (Note, this is stored via a mixin.)
    # This is set to false if:
    #  - Account credentials are invalid (see mark_invalid())
    #  - External factors no longer require this account to sync
    # The value of this bit should always equal the AND value of all its
    # folders and heartbeats.

    @property
    def sync_enabled(self):
        return self.sync_should_run

    sync_state = Column(Enum('running', 'stopped', 'killed',
                             'invalid', 'connerror'),
                        nullable=True)

    _sync_status = Column(MutableDict.as_mutable(JSON), default={},
                          nullable=True)

    @property
    def sync_status(self):
        d = dict(id=self.id,
                 email=self.email_address,
                 provider=self.provider,
                 is_enabled=self.sync_enabled,
                 state=self.sync_state,
                 sync_host=self.sync_host)
        d.update(self._sync_status or {})

        return d

    @property
    def sync_error(self):
        return self._sync_status.get('sync_error')

    def update_sync_error(self, error=None):
        self._sync_status['sync_error'] = error

    def sync_started(self):
        """ Record transition to started state. Should be called after the
            sync is actually started, not when the request to start it is made.
        """
        current_time = datetime.utcnow()

        # Never run before (vs restarting stopped/killed)
        if self.sync_state is None and (
                not self._sync_status or
                self._sync_status.get('sync_end_time') is None):
            self._sync_status['original_start_time'] = current_time

        self._sync_status['sync_start_time'] = current_time
        self._sync_status['sync_end_time'] = None
        self._sync_status['sync_error'] = None

        self.sync_state = 'running'

    def enable_sync(self, sync_host=None):
        """ Tell the monitor that this account should be syncing. """
        self.sync_should_run = True
        if sync_host is not None:
            self.sync_host = sync_host

    def disable_sync(self, reason=None):
        """ Tell the monitor that this account should stop syncing. """
        self.sync_should_run = False
        if reason:
            self._sync_status['sync_disabled_reason'] = reason

    def mark_invalid(self, reason='invalid credentials'):
        """ In the event that the credentials for this account are invalid,
            update the status and sync flag accordingly. Should only be called
            after trying to re-authorize / get new token.
        """
        self.disable_sync(reason)
        self.sync_state = 'invalid'

    def sync_stopped(self, reason=None):
        """ Record transition to stopped state. Should be called after the
            sync is actually stopped, not when the request to stop it is made.
        """
        if self.sync_state == 'running':
            self.sync_state = 'stopped'
        self.sync_host = None
        self._sync_status['sync_end_time'] = datetime.utcnow()

    def kill_sync(self, error=None):
        # Don't disable sync: syncs are not killed on purpose.
        self.sync_state = 'killed'
        self._sync_status['sync_end_time'] = datetime.utcnow()
        self._sync_status['sync_error'] = error

    @classmethod
    def _get_lock_object(cls, account_id, lock_for=dict()):
        """ Make sure we only create one lock per account per process.

        (Default args are initialized at import time, so `lock_for` acts as a
        module-level memory cache.)
        """
        return lock_for.setdefault(account_id,
                                   Lock(cls._sync_lockfile_name(account_id),
                                        block=False))

    @classmethod
    def _sync_lockfile_name(cls, account_id):
        return "/var/lock/inbox_sync/{}.lock".format(account_id)

    @property
    def _sync_lock(self):
        return self._get_lock_object(self.id)

    def sync_lock(self):
        """ Prevent mailsync for this account from running more than once. """
        self._sync_lock.acquire()

    def sync_unlock(self):
        self._sync_lock.release()

    @property
    def is_killed(self):
        return self.sync_state == 'killed'

    @property
    def is_running(self):
        return self.sync_state == 'running'

    @property
    def is_deleted(self):
        return self.sync_state == 'stopped' and \
            self.sync_should_run is False and \
            self._sync_status.get('sync_disabled_reason') == 'account deleted'

    @property
    def is_sync_locked(self):
        return self._sync_lock.locked()

    discriminator = Column('type', String(16))
    __mapper_args__ = {'polymorphic_identity': 'account',
                       'polymorphic_on': discriminator}