Exemple #1
0
def strip_tags(html):
    s = HTMLTagStripper()
    try:
        s.feed(html)
    except HTMLParseError:
        get_logger().error('error stripping tags', raw_html=html)
    return s.get_data()
def test_name_collision_folders(db, default_account, folder_name_mapping):
    # test that when a user-created folder called 'spam' is created, we don't
    # associate it with the canonical spam tag, but instead give it its own
    # tag

    folder_name_mapping["extra"] = ["spam"]

    with mailsync_session_scope() as db_session:
        log = get_logger()
        save_folder_names(log, default_account.id, folder_name_mapping, db_session)
        spam_tags = db_session.query(Tag).filter_by(namespace_id=default_account.namespace.id, name="spam")
        # There should be one 'Gmail/Spam' canonical tag
        assert spam_tags.count() == 1
        assert spam_tags.first().public_id == "spam"
        # and one 'imap/spam' non-canonical tag with public_id != 'spam'
        spam_tags = db_session.query(Tag).filter_by(namespace_id=default_account.namespace.id, name="imap/spam")
        assert spam_tags.count() == 1
        assert spam_tags.first().public_id != "spam"

    # test that when a folder called 'spam' is deleted, we don't delete
    # the canonical 'spam' tag
    folder_name_mapping["extra"] = []
    with mailsync_session_scope() as db_session:
        log = get_logger()
        save_folder_names(log, default_account.id, folder_name_mapping, db_session)
        spam_tags = db_session.query(Tag).filter_by(namespace_id=default_account.namespace.id, name="spam")
        # The 'Gmail/Spam' canonical tag should still remain.
        assert spam_tags.count() == 1
        assert spam_tags.first().public_id == "spam"
        # The 'imap/spam' non-canonical tag shouldn't
        spam_tags = db_session.query(Tag).filter_by(namespace_id=default_account.namespace.id, name="imap/spam")
        assert spam_tags.count() == 0
Exemple #3
0
def strip_tags(html):
    s = MLStripper()
    try:
        s.feed(html)
    except HTMLParseError:
        get_logger().error('error stripping tags', raw_html=html)
    return s.get_data()
def test_handle_trailing_whitespace(db, default_account, folder_name_mapping):
    folder_name_mapping["extra"] = ["label", "label "]
    log = get_logger()
    save_folder_names(log, default_account.id, folder_name_mapping, db.session)

    # Would raise if tag for label was not committed.
    db.session.query(Tag).filter_by(namespace_id=default_account.namespace.id, name="label").one()
Exemple #5
0
def test_reconcile_message(db, config):
    from inbox.models.util import reconcile_message
    from inbox.sendmail.base import create_draft
    from inbox.models.account import Account
    from inbox.models.message import Message
    from inbox.log import get_logger
    log = get_logger()

    account = db.session.query(Account).get(ACCOUNT_ID)
    draft = create_draft(db.session, account)

    assert draft.inbox_uid == draft.public_id, 'draft has incorrect inbox_uid'
    inbox_uid = draft.inbox_uid

    message = Message()
    message.thread_id = THREAD_ID
    message.received_date = datetime.utcnow()
    message.size = len('')
    message.is_draft = True
    message.is_read = True
    message.sanitized_body = ''
    message.snippet = ''
    message.inbox_uid = draft.inbox_uid
    db.session.add(message)
    db.session.commit()

    reconcile_message(db.session, log, inbox_uid, message)

    assert draft.resolved_message and draft.resolved_message.id == message.id,\
        'draft not reconciled correctly'

    assert message.public_id != draft.public_id, \
        'message has incorrect public_id'
Exemple #6
0
    def __init__(self, account_id):
        self.account_id = account_id
        self.log = get_logger()
        self.log.bind(account_id=account_id)

        with session_scope() as db_session:
            account = db_session.query(ImapAccount).get(self.account_id)

            self.email_address = account.email_address
            self.provider_name = account.provider
            self.sender_name = account.name
            self.smtp_endpoint = account.smtp_endpoint

            if account.sent_folder is None:
                # account has no detected sent folder - create one.
                sent_folder = Folder.find_or_create(db_session, account,
                                                    'sent', 'sent')
                account.sent_folder = sent_folder

            self.sent_folder = account.sent_folder.name

            self.auth_type = provider_info(self.provider_name,
                                           self.email_address)['auth']

            if self.auth_type == 'oauth2':
                try:
                    self.auth_token = account.access_token
                except OAuthError:
                    raise SendMailException('Error logging in.')
            else:
                assert self.auth_type == 'password'
                self.auth_token = account.password
def test_parallel_folder_syncs(db, folder_name_mapping, monkeypatch):
    # test that when we run save_folder_names in parallel, we only create one
    # tag for that folder. this happens when the CondstoreFolderSyncEngine
    # checks for UID changes.

    # patching the heartbeat clear means that we force the first greenlet to
    # wait around (there is a deleted folder in folder_name_mapping), thereby
    # assuring that the second greenlet will overtake it and force any
    # potential race condition around tag creation.
    def clear_heartbeat_patch(w, x, y, z):
        gevent.sleep(1)

    monkeypatch.setattr('inbox.heartbeat.store.HeartbeatStore.remove_folders',
                        clear_heartbeat_patch)

    log = get_logger()
    group = Group()
    with mailsync_session_scope() as db_session:
        group.spawn(save_folder_names, log, ACCOUNT_ID,
                    folder_name_mapping, db_session)
    with mailsync_session_scope() as db_session:
        group.spawn(save_folder_names, log, ACCOUNT_ID,
                    folder_name_mapping, db_session)
    group.join()

    with mailsync_session_scope() as db_session:
        account = db_session.query(Account).get(ACCOUNT_ID)
        random_tags = db_session.query(Tag).filter_by(
            namespace_id=account.namespace.id,
            name='random')
        assert random_tags.count() == 1
Exemple #8
0
def default_json_error(ex):
    """ Exception -> flask JSON responder """
    logger = get_logger()
    logger.error('Uncaught error thrown by Flask/Werkzeug', exc_info=ex)
    response = jsonify(message=str(ex), type='api_error')
    response.status_code = (ex.code if isinstance(ex, HTTPException) else 500)
    return response
Exemple #9
0
    def publish(self, **kwargs):
        def check_schema(**kwargs):
            for kw in kwargs:
                assert kw in self.schema

        try:
            check_schema(**kwargs)
            self.value.update(kwargs or {})
            # If we got a 'heartbeat_at' datetime argument, publish this
            # heartbeat with that timestamp.
            if 'heartbeat_at' in kwargs and \
                    isinstance(kwargs['heartbeat_at'], datetime):
                epoch = time.mktime(kwargs.get('heartbeat_at').timetuple())
                self.heartbeat_at = epoch
                self.value['heartbeat_at'] = str(kwargs['heartbeat_at'])
            else:
                self.heartbeat_at = time.time()
                self.value['heartbeat_at'] = str(
                    datetime.fromtimestamp(self.heartbeat_at))
            self.store.publish(self.key, self.device_id,
                               json.dumps(self.value), self.heartbeat_at)
            if 'action' in self.value:
                del self.value['action']
        except Exception:
            log = get_logger()
            log.error('Error while writing the heartbeat status',
                      account_id=self.key.account_id,
                      folder_id=self.key.folder_id,
                      device_id=self.device_id,
                      exc_info=True)
Exemple #10
0
    def __init__(self, account_id):
        self.account_id = account_id
        self.log = get_logger()
        self.log.bind(account_id=account_id)

        with session_scope() as db_session:
            account = db_session.query(ImapAccount).get(self.account_id)

            self.email_address = account.email_address
            self.provider_name = account.provider
            self.sender_name = account.name
            self.smtp_endpoint = account.smtp_endpoint

            if account.sent_folder is None:
                # account has no detected sent folder - create one.
                sent_folder = Folder.find_or_create(db_session, account,
                                                    'sent', 'sent')
                account.sent_folder = sent_folder

            self.sent_folder = account.sent_folder.name

            self.auth_type = provider_info(self.provider_name,
                                           self.email_address)['auth']

            if self.auth_type == 'oauth2':
                try:
                    self.auth_token = account.access_token
                except OAuthError:
                    raise SendMailException('Error logging in.')
            else:
                assert self.auth_type == 'password'
                self.auth_token = account.password
Exemple #11
0
    def publish(self, **kwargs):
        def check_schema(**kwargs):
            for kw in kwargs:
                assert kw in self.schema

        try:
            check_schema(**kwargs)
            self.value.update(kwargs or {})
            # If we got a 'heartbeat_at' datetime argument, publish this
            # heartbeat with that timestamp.
            if 'heartbeat_at' in kwargs and \
                    isinstance(kwargs['heartbeat_at'], datetime):
                epoch = time.mktime(kwargs.get('heartbeat_at').timetuple())
                self.heartbeat_at = epoch
                self.value['heartbeat_at'] = str(kwargs['heartbeat_at'])
            else:
                self.heartbeat_at = time.time()
                self.value['heartbeat_at'] = str(datetime.fromtimestamp(
                    self.heartbeat_at))
            self.store.publish(
                self.key, self.device_id, json.dumps(self.value),
                self.heartbeat_at)
            if 'action' in self.value:
                del self.value['action']
        except Exception:
            log = get_logger()
            log.error('Error while writing the heartbeat status',
                      account_id=self.key.account_id,
                      folder_id=self.key.folder_id,
                      device_id=self.device_id,
                      exc_info=True)
def test_sync_folder_deletes(db, default_account, folder_name_mapping):
    """Test that folder deletions properly cascade to deletions of
       ImapFolderSyncStatus and ImapFolderInfo.
    """
    with mailsync_session_scope() as db_session:
        log = get_logger()
        save_folder_names(log, default_account.id, folder_name_mapping, db_session)
        folders = db_session.query(Folder).filter_by(account_id=default_account.id)
        for folder in folders:
            add_imap_status_info_rows(folder.id, default_account.id, db_session)
        db_session.commit()
        assert db_session.query(ImapFolderInfo).filter_by(account_id=default_account.id).count() == 7
        assert db_session.query(ImapFolderSyncStatus).filter_by(account_id=default_account.id).count() == 7

        folder_name_mapping["extra"] = ["Jobslist"]
        save_folder_names(log, default_account.id, folder_name_mapping, db_session)
        saved_folder_names = {
            name for name, in db_session.query(Folder.name).filter(Folder.account_id == default_account.id)
        }
        assert saved_folder_names == {
            "Inbox",
            "[Gmail]/Spam",
            "[Gmail]/All Mail",
            "[Gmail]/Sent Mail",
            "[Gmail]/Drafts",
            "Jobslist",
        }
        assert db_session.query(ImapFolderInfo).filter_by(account_id=default_account.id).count() == 6
        assert db_session.query(ImapFolderSyncStatus).filter_by(account_id=default_account.id).count() == 6
Exemple #13
0
    def publish(self, **kwargs):
        schema = {'provider_name', 'folder_name', 'heartbeat_at', 'state',
                  'action'}

        def check_schema(**kwargs):
            for kw in kwargs:
                assert kw in schema

        try:
            client = get_redis_client(STATUS_DATABASE)
            check_schema(**kwargs)
            now = datetime.utcnow()
            self.value['heartbeat_at'] = str(now)
            self.value.update(kwargs or {})
            client.hset(self.key, self.device_id, json.dumps(self.value))
            self.heartbeat_at = now
            if 'action' in self.value:
                del self.value['action']
        except Exception:
            log = get_logger()
            log.error('Error while writing the heartbeat status',
                      account_id=self.key.account_id,
                      folder_id=self.key.folder_id,
                      device_id=self.device_id,
                      exc_info=True)
def test_parallel_folder_syncs(db, folder_name_mapping, default_account,
                               monkeypatch):
    # test that when we run save_folder_names in parallel, we only create one
    # tag for that folder. this happens when the CondstoreFolderSyncEngine
    # checks for UID changes.

    # patching the heartbeat clear means that we force the first greenlet to
    # wait around (there is a deleted folder in folder_name_mapping), thereby
    # assuring that the second greenlet will overtake it and force any
    # potential race condition around tag creation.
    def clear_heartbeat_patch(w, x, y, z):
        gevent.sleep(1)

    monkeypatch.setattr('inbox.heartbeat.store.HeartbeatStore.remove_folders',
                        clear_heartbeat_patch)

    log = get_logger()
    group = Group()
    with mailsync_session_scope() as db_session:
        group.spawn(save_folder_names, log, default_account.id,
                    folder_name_mapping, db_session)
    with mailsync_session_scope() as db_session:
        group.spawn(save_folder_names, log, default_account.id,
                    folder_name_mapping, db_session)
    group.join()

    with mailsync_session_scope() as db_session:
        account = db_session.query(Account).get(default_account.id)
        random_tags = db_session.query(Tag).filter_by(
            namespace_id=account.namespace.id, name='random')
        assert random_tags.count() == 1
def test_sync_folder_deletes(db, default_account, folder_name_mapping):
    """Test that folder deletions properly cascade to deletions of
       ImapFolderSyncStatus and ImapFolderInfo.
    """
    with mailsync_session_scope() as db_session:
        log = get_logger()
        save_folder_names(log, default_account.id, folder_name_mapping,
                          db_session)
        folders = db_session.query(Folder).filter_by(
            account_id=default_account.id)
        for folder in folders:
            add_imap_status_info_rows(folder.id, default_account.id,
                                      db_session)
        db_session.commit()
        assert db_session.query(ImapFolderInfo).filter_by(
            account_id=default_account.id).count() == 7
        assert db_session.query(ImapFolderSyncStatus).filter_by(
            account_id=default_account.id).count() == 7

        folder_name_mapping['extra'] = ['Jobslist']
        save_folder_names(log, default_account.id, folder_name_mapping,
                          db_session)
        saved_folder_names = {
            name
            for name, in db_session.query(Folder.name).filter(
                Folder.account_id == default_account.id)
        }
        assert saved_folder_names == {
            'Inbox', '[Gmail]/Spam', '[Gmail]/All Mail', '[Gmail]/Sent Mail',
            '[Gmail]/Drafts', 'Jobslist'
        }
        assert db_session.query(ImapFolderInfo).filter_by(
            account_id=default_account.id).count() == 6
        assert db_session.query(ImapFolderSyncStatus).filter_by(
            account_id=default_account.id).count() == 6
Exemple #16
0
 def __init__(self, max_blocking_time=MAX_BLOCKING_TIME):
     self.max_blocking_time = max_blocking_time
     self.time_spent_by_id = collections.defaultdict(float)
     self.time_spent_by_context = collections.defaultdict(float)
     self.total_switches = 0
     self._hub = gevent.hub.get_hub()
     self._last_switch_time = None
     self.log = get_logger()
Exemple #17
0
 def __init__(self, max_blocking_time=MAX_BLOCKING_TIME):
     self.max_blocking_time = max_blocking_time
     self.time_spent_by_id = collections.defaultdict(float)
     self.time_spent_by_context = collections.defaultdict(float)
     self.total_switches = 0
     self._hub = gevent.hub.get_hub()
     self._last_switch_time = None
     self.log = get_logger()
def test_handle_trailing_whitespace(db, default_account, folder_name_mapping):
    folder_name_mapping['extra'] = ['label', 'label ']
    log = get_logger()
    save_folder_names(log, default_account.id, folder_name_mapping, db.session)

    # Would raise if tag for label was not committed.
    db.session.query(Tag).filter_by(namespace_id=default_account.namespace.id,
                                    name='label').one()
Exemple #19
0
 def __init__(self, account_id, conn, readonly=True):
     self.log = get_logger(account_id)
     self.account_id = account_id
     # IMAP isn't stateless :(
     self.selected_folder = None
     self._folder_names = None
     self.conn = conn
     self.readonly = readonly
def test_use_starttls():
    conn = SMTPConnection(account_id=1,
                          email_address='*****@*****.**',
                          auth_type='password',
                          auth_token='secret_password',
                          smtp_endpoint=('smtp.gmail.com', 587),
                          log=get_logger())
    assert isinstance(conn.connection, smtplib.SMTP)
Exemple #21
0
 def default_json_error(ex):
     """ Exception -> flask JSON responder """
     logger = get_logger()
     logger.error('Uncaught error thrown by Flask/Werkzeug', exc_info=ex)
     response = flask_jsonify(message=str(ex), type='api_error')
     response.status_code = (ex.code
                             if isinstance(ex, HTTPException)
                             else 500)
     return response
Exemple #22
0
    def __init__(self, account_id, account_namespace):
        self.account_id = account_id
        self.namespace = account_namespace
        self.pool = get_smtp_connection_pool(self.account_id)
        self.sender_name = self.pool.sender_name
        self.email_address = self.pool.email_address
        self.sent_folder = self.pool.sent_folder

        self.log = get_logger(account_id, 'sendmail')
Exemple #23
0
    def __init__(self, account_id, account_namespace):
        self.account_id = account_id
        self.namespace = account_namespace
        self.pool = get_smtp_connection_pool(self.account_id)
        self.sender_name = self.pool.sender_name
        self.email_address = self.pool.email_address
        self.sent_folder = self.pool.sent_folder

        self.log = get_logger()
def test_use_smtp_over_ssl():
    # Auth won't actually work but we just want to test connection
    # initialization here and below.
    conn = SMTPConnection(account_id=1,
                          email_address='*****@*****.**',
                          auth_type='password',
                          auth_token='secret_password',
                          smtp_endpoint=('smtp.gmail.com', 465),
                          log=get_logger())
    assert isinstance(conn.connection, smtplib.SMTP_SSL)
Exemple #25
0
def test_root_filelogger(config, log):
    logger = get_logger()
    logger.info('INFO')
    logger.warning('WARNING')
    logger.error('ERROR')
    # NOTE: This slurps the whole logfile. Hope it's not big.
    log_contents = open(config.get_required('TEST_LOGFILE'), 'r').read()

    assert all(phrase in log_contents
               for phrase in ('INFO', 'WARNING', 'ERROR'))
Exemple #26
0
 def __init__(self, poll_interval=1, chunk_size=22):
     self.workers = defaultdict(set)
     self.log = get_logger()
     self.poll_interval = poll_interval
     self.chunk_size = chunk_size
     self.minimum_id = -1
     self.poller = None
     self.polling = False
     self.encoder = APIEncoder()
     self._on_startup()
def test_root_filelogger(config, log):
    logger = get_logger()
    logger.info('INFO')
    logger.warning('WARNING')
    logger.error('ERROR')
    # NOTE: This slurps the whole logfile. Hope it's not big.
    log_contents = open(config.get_required('TEST_LOGFILE'), 'r').read()

    assert all(phrase in log_contents
               for phrase in ('INFO', 'WARNING', 'ERROR'))
Exemple #28
0
 def __init__(self, poll_interval=1, chunk_size=22):
     self.workers = defaultdict(set)
     self.log = get_logger()
     self.poll_interval = poll_interval
     self.chunk_size = chunk_size
     self.minimum_id = 0
     self.poller = None
     self.polling = False
     self.encoder = APIEncoder()
     self._on_startup()
def test_save_folder_names(db, folder_name_mapping):
    with mailsync_session_scope() as db_session:
        log = get_logger()
        save_folder_names(log, ACCOUNT_ID, folder_name_mapping, db_session)
        saved_folder_names = {name for name, in
                              db_session.query(Folder.name).filter(
                                  Folder.account_id == ACCOUNT_ID)}
        assert saved_folder_names == {'Inbox', '[Gmail]/Spam',
                                      '[Gmail]/All Mail', '[Gmail]/Sent Mail',
                                      '[Gmail]/Drafts', 'Jobslist', 'Random'}
Exemple #30
0
def del_device(account_id, device_id):
    try:
        client = get_redis_client()
        match_key = SyncStatusKey.all_folders(account_id)
        for k in client.scan_iter(match=match_key):
            client.hdel(k, device_id)
    except Exception:
        log = get_logger()
        log.error('Error while deleting from the sync status',
                  account_id=account_id,
                  device_id=device_id,
                  exc_info=True)
Exemple #31
0
 def __init__(self, poll_interval=1, chunk_size=22, max_pool_size=22):
     self.log = get_logger()
     self.actions = ActionRegistry()
     self.worker_pool = gevent.pool.Pool(max_pool_size)
     self.poll_interval = poll_interval
     self.chunk_size = chunk_size
     with session_scope() as db_session:
         # Just start working from the head of the log.
         # TODO(emfree): once we can do retry, persist a pointer into the
         # transaction log and advance it only on syncback success.
         self.minimum_id = db_session.query(func.max(Transaction.id)).one()[0] or -1
     gevent.Greenlet.__init__(self)
 def __init__(self, gather_stats=False,
              max_blocking_time=MAX_BLOCKING_TIME):
     self.gather_stats = gather_stats
     self.max_blocking_time = max_blocking_time
     self.time_spent_by_context = collections.defaultdict(float)
     self.total_switches = 0
     self._last_switch_time = None
     self._switch_flag = False
     self._active_greenlet = None
     self._main_thread_id = gevent._threading.get_ident()
     self._hub = gevent.hub.get_hub()
     self.log = get_logger()
Exemple #33
0
def del_device(account_id, device_id):
    try:
        client = get_redis_client()
        match_key = SyncStatusKey.all_folders(account_id)
        for k in client.scan_iter(match=match_key):
            client.hdel(k, device_id)
    except Exception:
        log = get_logger()
        log.error('Error while deleting from the sync status',
                  account_id=account_id,
                  device_id=device_id,
                  exc_info=True)
Exemple #34
0
    def timed_fn(self, *args, **kwargs):
        start_time = time.time()
        ret = fn(self, *args, **kwargs)

        # TODO some modules like gmail.py don't have self.logger
        try:
            if self.log:
                fn_logger = self.log
        except AttributeError:
            fn_logger = get_logger()
            # out = None
        fn_logger.info("[timer] {0} took {1:.3f} seconds.".format(str(fn), float(time.time() - start_time)))
        return ret
Exemple #35
0
 def __init__(self,
              gather_stats=False,
              max_blocking_time=MAX_BLOCKING_TIME):
     self.gather_stats = gather_stats
     self.max_blocking_time = max_blocking_time
     self.time_spent_by_context = collections.defaultdict(float)
     self.total_switches = 0
     self._last_switch_time = None
     self._switch_flag = False
     self._active_greenlet = None
     self._main_thread_id = gevent._threading.get_ident()
     self._hub = gevent.hub.get_hub()
     self.log = get_logger()
Exemple #36
0
    def __init__(self, account_id, num_connections, debug=False):
        self.log = get_logger(account_id, 'sendmail: connection_pool')
        self.log.info('Creating SMTP connection pool for account {0} with {1} '
                      'connections'.format(account_id, num_connections))

        self.account_id = account_id
        self._set_account_info()

        self.debug = debug

        # 1200s == 20min
        geventconnpool.ConnectionPool.__init__(
            self, num_connections, keepalive=1200)
def test_save_folder_names(db, folder_name_mapping):
    with mailsync_session_scope() as db_session:
        log = get_logger()
        save_folder_names(log, ACCOUNT_ID, folder_name_mapping, db_session)
        saved_folder_names = {
            name
            for name, in db_session.query(Folder.name).filter(
                Folder.account_id == ACCOUNT_ID)
        }
        assert saved_folder_names == {
            'Inbox', '[Gmail]/Spam', '[Gmail]/All Mail', '[Gmail]/Sent Mail',
            '[Gmail]/Drafts', 'Jobslist', 'Random'
        }
def test_name_collision_folders(db, default_account, folder_name_mapping):
    # test that when a user-created folder called 'spam' is created, we don't
    # associate it with the canonical spam tag, but instead give it its own
    # tag

    folder_name_mapping['extra'] = ['spam']

    with mailsync_session_scope() as db_session:
        log = get_logger()
        save_folder_names(log, default_account.id, folder_name_mapping,
                          db_session)
        spam_tags = db_session.query(Tag).filter_by(
            namespace_id=default_account.namespace.id, name='spam')
        # There should be one 'Gmail/Spam' canonical tag
        assert spam_tags.count() == 1
        assert spam_tags.first().public_id == 'spam'
        # and one 'imap/spam' non-canonical tag with public_id != 'spam'
        spam_tags = db_session.query(Tag).filter_by(
            namespace_id=default_account.namespace.id, name='imap/spam')
        assert spam_tags.count() == 1
        assert spam_tags.first().public_id != 'spam'

    # test that when a folder called 'spam' is deleted, we don't delete
    # the canonical 'spam' tag
    folder_name_mapping['extra'] = []
    with mailsync_session_scope() as db_session:
        log = get_logger()
        save_folder_names(log, default_account.id, folder_name_mapping,
                          db_session)
        spam_tags = db_session.query(Tag).filter_by(
            namespace_id=default_account.namespace.id, name='spam')
        # The 'Gmail/Spam' canonical tag should still remain.
        assert spam_tags.count() == 1
        assert spam_tags.first().public_id == 'spam'
        # The 'imap/spam' non-canonical tag shouldn't
        spam_tags = db_session.query(Tag).filter_by(
            namespace_id=default_account.namespace.id, name='imap/spam')
        assert spam_tags.count() == 0
Exemple #39
0
    def timed_fn(self, *args, **kwargs):
        start_time = time.time()
        ret = fn(self, *args, **kwargs)

        # TODO some modules like gmail.py don't have self.logger
        try:
            if self.log:
                fn_logger = self.log
        except AttributeError:
            fn_logger = get_logger()
            # out = None
        fn_logger.info('[timer] {0} took {1:.3f} seconds.'.format(
            str(fn), float(time.time() - start_time)))
        return ret
Exemple #40
0
    def __init__(self, account_id, num_connections, debug=False):
        self.log = get_logger()
        self.log.info('Creating SMTP connection pool for account {0} with {1} '
                      'connections'.format(account_id, num_connections))

        self.account_id = account_id
        self._set_account_info()

        self.debug = debug

        # 1200s == 20min
        geventconnpool.ConnectionPool.__init__(self,
                                               num_connections,
                                               keepalive=1200)
Exemple #41
0
def has_contacts_and_events(account_id):
    try:
        client = get_redis_client(STATUS_DATABASE)
        batch_client = client.pipeline()
        batch_client.keys(HeartbeatStatusKey.contacts(account_id))
        batch_client.keys(HeartbeatStatusKey.events(account_id))
        values = batch_client.execute()
        return (len(values[0]) == 1, len(values[1]) == 1)
    except Exception:
        log = get_logger()
        log.error('Error while reading the heartbeat status',
                  account_id=account_id,
                  exc_info=True)
        return (False, False)
Exemple #42
0
def start():
    g.db_session = InboxSession(engine)

    g.log = get_logger()
    try:
        valid_public_id(g.namespace_public_id)
        g.namespace = g.db_session.query(Namespace).filter(Namespace.public_id == g.namespace_public_id).one()

        g.encoder = APIEncoder(g.namespace.public_id)
    except (NoResultFound, InputError):
        return err(404, "Couldn't find namespace with id `{0}` ".format(g.namespace_public_id))

    g.parser = reqparse.RequestParser(argument_class=ValidatableArgument)
    g.parser.add_argument("limit", default=DEFAULT_LIMIT, type=limit, location="args")
    g.parser.add_argument("offset", default=0, type=int, location="args")
Exemple #43
0
    def __init__(self, account_id, folder_name, folder_id,
                 email_address, provider, shared_state, state_handlers):
        self.account_id = account_id
        self.folder_name = folder_name
        self.folder_id = folder_id
        self.shared_state = shared_state
        self.state_handlers = state_handlers
        self.state = None
        self.conn_pool = connection_pool(self.account_id)

        self.log = get_logger(account_id, 'mailsync')

        Greenlet.__init__(self)
        self.link_value(lambda _: report_stopped(account_id=self.account_id,
                                                 folder_name=self.folder_name))
Exemple #44
0
def send_draft(account_id, draft_id):
    """
    Send the draft with id = `draft_id`.
    """
    with session_scope() as db_session:
        account = db_session.query(Account).get(account_id)

        log = get_logger()
        sendmail_client = get_sendmail_client(account)
        try:
            draft = db_session.query(SpoolMessage).filter(
                SpoolMessage.id == draft_id).one()

        except NoResultFound:
            log.info('NoResultFound for draft_id {0}'.format(draft_id))
            raise SendMailException('No draft with id {0}'.format(draft_id))

        except MultipleResultsFound:
            log.info('MultipleResultsFound for draft_id {0}'.format(draft_id))
            raise SendMailException('Multiple drafts with id {0}'.format(
                draft_id))

        assert draft.is_draft and not draft.is_sent

        recipients = Recipients(draft.to_addr, draft.cc_addr, draft.bcc_addr)
        if not draft.is_reply:
            sendmail_client.send_new(db_session, draft, recipients)
        else:
            sendmail_client.send_reply(db_session, draft, recipients)

        # Update SpoolMessage
        draft.is_sent = True
        draft.is_draft = False
        draft.state = 'sent'

        # Update thread
        sent_tag = account.namespace.tags['sent']
        draft_tag = account.namespace.tags['drafts']
        draft.thread.apply_tag(sent_tag)
        # Remove the drafts tag from the thread if there are no more drafts.
        if not draft.thread.latest_drafts:
            draft.thread.remove_tag(draft_tag)

        db_session.commit()

        delete_draft(account_id, draft.id)

        return draft
def test_save_folder_names(db, default_account, folder_name_mapping):
    with mailsync_session_scope() as db_session:
        log = get_logger()
        save_folder_names(log, default_account.id, folder_name_mapping, db_session)
        saved_folder_names = {
            name for name, in db_session.query(Folder.name).filter(Folder.account_id == default_account.id)
        }
        assert saved_folder_names == {
            "Inbox",
            "[Gmail]/Spam",
            "[Gmail]/All Mail",
            "[Gmail]/Sent Mail",
            "[Gmail]/Drafts",
            "Jobslist",
            "Random",
        }
Exemple #46
0
def start():
    g.db_session = InboxSession(engine)

    g.log = get_logger()
    try:
        g.namespace = g.db_session.query(Namespace) \
            .filter(Namespace.public_id == g.namespace_public_id).one()

        g.encoder = APIEncoder(g.namespace.public_id)
    except NoResultFound:
        return err(404, "Couldn't find namespace with id `{0}` ".format(
            g.namespace_public_id))

    try:
        g.limit = int(request.args.get('limit', DEFAULT_LIMIT))
        g.offset = int(request.args.get('offset', 0))
    except ValueError:
        return err(400, 'limit and offset parameters must be integers')
    if g.limit < 0 or g.offset < 0:
        return err(400, 'limit and offset parameters must be nonnegative '
                        'integers')
    if g.limit > MAX_LIMIT:
        return err(400, 'cannot request more than {} resources at once.'.
                   format(MAX_LIMIT))
    try:
        g.api_filter = Filter(
            namespace_id=g.namespace.id,
            subject=request.args.get('subject'),
            thread_public_id=request.args.get('thread'),
            to_addr=request.args.get('to'),
            from_addr=request.args.get('from'),
            cc_addr=request.args.get('cc'),
            bcc_addr=request.args.get('bcc'),
            any_email=request.args.get('any_email'),
            started_before=request.args.get('started_before'),
            started_after=request.args.get('started_after'),
            last_message_before=request.args.get('last_message_before'),
            last_message_after=request.args.get('last_message_after'),
            filename=request.args.get('filename'),
            tag=request.args.get('tag'),
            limit=g.limit,
            offset=g.offset,
            order_by=request.args.get('order_by'),
            db_session=g.db_session)
    except ValueError as e:
        return err(400, e.message)
Exemple #47
0
def start():
    g.db_session = InboxSession(engine)

    g.log = get_logger()
    try:
        g.namespace = g.db_session.query(Namespace) \
            .filter(Namespace.public_id == g.namespace_public_id).one()

        g.encoder = APIEncoder(g.namespace.public_id)
    except NoResultFound:
        return err(404, "Couldn't find namespace with id `{0}` ".format(
            g.namespace_public_id))

    g.parser = reqparse.RequestParser(argument_class=ValidatableArgument)
    g.parser.add_argument('limit', default=DEFAULT_LIMIT, type=limit,
                          location='args')
    g.parser.add_argument('offset', default=0, type=int, location='args')
Exemple #48
0
def clear_heartbeat_status(account_id, device_id=None):
    try:
        client = get_redis_client(STATUS_DATABASE)
        batch_client = client.pipeline()
        for name in client.scan_iter(
                HeartbeatStatusKey.all_folders(account_id), 100):
            if device_id:
                batch_client.hdel(name, device_id)
            else:
                batch_client.delete(name)
        batch_client.execute()
    except Exception:
        log = get_logger()
        log.error('Error while deleting from the heartbeat status',
                  account_id=account_id,
                  device_id=(device_id or 'all'),
                  exc_info=True)
Exemple #49
0
 def publish(self, **kwargs):
     try:
         client = get_redis_client()
         _check_redis_schema(**kwargs)
         now = datetime.utcnow()
         self.value['heartbeat_at'] = str(now)
         self.value.update(kwargs or {})
         client.hset(self.key, self.device_id, json.dumps(self.value))
         self.heartbeat_at = now
         if 'action' in self.value:
             del self.value['action']
     except Exception:
         log = get_logger()
         log.error('Error while publishing the sync status',
                   account_id=self.key.account_id,
                   folder_id=self.key.folder_id,
                   device_id=self.device_id,
                   exc_info=True)
Exemple #50
0
    def __init__(self, account_id):
        self.account_id = account_id
        with session_scope() as db_session:
            account = db_session.query(ImapAccount).get(self.account_id)

            self.email_address = account.email_address
            self.provider_name = account.provider
            self.sender_name = account.sender_name
            self.sent_folder = account.sent_folder.name

            self.auth_type = provider_info(self.provider_name)['auth']

            if self.auth_type == 'oauth2':
                self.auth_token = account.access_token
            else:
                assert self.auth_type == 'password'
                self.auth_token = account.password

        self.log = get_logger()
        self.log.bind(account_id=account_id)
Exemple #51
0
    def __init__(self, account):
        self.account_id = account.id
        self.log = get_logger()
        self.log.bind(account_id=account.id)
        self.email_address = account.email_address
        self.provider_name = account.provider
        self.sender_name = account.name
        self.smtp_endpoint = account.smtp_endpoint
        self.auth_type = provider_info(self.provider_name,
                                       self.email_address)['auth']

        if self.auth_type == 'oauth2':
            try:
                self.auth_token = token_manager.get_token(account)
            except OAuthError:
                raise SendMailException(
                    'Could not authenticate with the SMTP server.', 403)
        else:
            assert self.auth_type == 'password'
            self.auth_token = account.password
Exemple #52
0
    def __init__(self, cpu_id, total_cpus, poll_interval=1):
        self.keep_running = True
        self.cpu_id = cpu_id
        self.total_cpus = total_cpus
        self.monitor_cls_for = {mod.PROVIDER: getattr(
            mod, mod.SYNC_MONITOR_CLS) for mod in module_registry.values()
            if hasattr(mod, 'SYNC_MONITOR_CLS')}

        for p_name, p in providers.iteritems():
            if p_name not in self.monitor_cls_for:
                self.monitor_cls_for[p_name] = self.monitor_cls_for["generic"]

        self.log = get_logger()
        self.log.bind(cpu_id=cpu_id)
        self.log.info('starting mail sync process',
                      supported_providers=module_registry.keys())

        self.monitors = {}
        self.contact_sync_monitors = {}
        self.event_sync_monitors = {}
        self.poll_interval = poll_interval
Exemple #53
0
def _send(account_id, draft_id, db_session):
    """Send the draft with id = `draft_id`."""
    account = db_session.query(Account).get(account_id)

    log = get_logger()
    sendmail_client = get_sendmail_client(account)
    try:
        draft = db_session.query(Message).filter(Message.id == draft_id).one()

    except NoResultFound:
        log.info('NoResultFound for draft_id {0}'.format(draft_id))
        raise SendMailException('No draft with id {0}'.format(draft_id))

    except MultipleResultsFound:
        log.info('MultipleResultsFound for draft_id {0}'.format(draft_id))
        raise SendMailException('Multiple drafts with id {0}'.format(draft_id))

    if not draft.is_draft or draft.is_sent:
        return

    recipients = Recipients(draft.to_addr, draft.cc_addr, draft.bcc_addr)
    if not draft.is_reply:
        sendmail_client.send_new(db_session, draft, recipients)
    else:
        sendmail_client.send_reply(db_session, draft, recipients)

    # Update message
    draft.is_sent = True
    draft.is_draft = False
    draft.state = 'sent'

    # Update thread
    sent_tag = account.namespace.tags['sent']
    draft_tag = account.namespace.tags['drafts']
    draft.thread.apply_tag(sent_tag)
    # Remove the drafts tag from the thread if there are no more drafts.
    if not draft.thread.drafts:
        draft.thread.remove_tag(draft_tag)

    return draft
Exemple #54
0
    def __init__(self, hook, max_queue_size=22):
        self.id = hook.id
        self.public_id = hook.public_id
        self.lens = hook.lens
        self.min_processed_id = hook.min_processed_id
        self.include_body = hook.include_body
        self.callback_url = hook.callback_url
        self.failure_notify_url = hook.failure_notify_url
        self.max_retries = hook.max_retries
        self.retry_interval = hook.retry_interval
        self.hook_updated_at = hook.updated_at

        # 'frozen' means that the worker has accumulated too large of a failure
        # backlog, and that we aren't enqueueing new events.
        # This is not to be confused with the 'Webhook.active' attribute: an
        # inactive webhook is one that has been manually suspended, and has no
        # associated worker.
        self.frozen = False

        self.retry_queue = gevent.queue.Queue(max_queue_size)
        self.queue = gevent.queue.Queue(max_queue_size)
        self.log = get_logger()
        gevent.Greenlet.__init__(self)