示例#1
0
文件: gmail.py 项目: rbs-pli/inbox
    def __fetch_g_metadata(self, crispin_client, uids):
        assert self.folder_name == crispin_client.selected_folder_name, \
            "crispin selected folder isn't as expected"
        remote_g_metadata = None
        update_uid_count = 0

        with mailsync_session_scope() as db_session:
            saved_folder_info = common.get_folder_info(self.account_id,
                                                       db_session,
                                                       self.folder_name)
            saved_highestmodseq = or_none(saved_folder_info,
                                          lambda i: i.highestmodseq)
        if saved_highestmodseq is not None:
            # If there's no cached validity we probably haven't run before.
            remote_g_metadata, update_uid_count = \
                self.__retrieve_saved_g_metadata(crispin_client, uids,
                                                 saved_highestmodseq)

        if remote_g_metadata is None:
            remote_g_metadata = crispin_client.g_metadata(
                crispin_client.all_uids())
            set_cache(
                remote_g_metadata_cache_file(self.account_id,
                                             self.folder_name),
                remote_g_metadata)
            # Save highestmodseq that corresponds to the saved g_metadata.
        with mailsync_session_scope() as db_session:
            common.update_folder_info(self.account_id, db_session,
                                      self.folder_name,
                                      crispin_client.selected_uidvalidity,
                                      crispin_client.selected_highestmodseq)
            db_session.commit()

        return remote_g_metadata, update_uid_count
示例#2
0
文件: gmail.py 项目: mdeng1024/inbox
    def __fetch_g_metadata(self, crispin_client, uids):
        assert self.folder_name == crispin_client.selected_folder_name, \
            "crispin selected folder isn't as expected"
        remote_g_metadata = None

        with mailsync_session_scope() as db_session:
            saved_folder_info = common.get_folder_info(
                self.account_id, db_session, self.folder_name)
            saved_highestmodseq = or_none(saved_folder_info, lambda i:
                                          i.highestmodseq)
        if saved_highestmodseq is not None:
            # If there's no cached validity we probably haven't run before.
            remote_g_metadata = self.__retrieve_saved_g_metadata(
                crispin_client, uids, saved_highestmodseq)

        if remote_g_metadata is None:
            remote_g_metadata = crispin_client.g_metadata(
                crispin_client.all_uids())
            set_cache(remote_g_metadata_cache_file(self.account_id,
                                                   self.folder_name),
                      remote_g_metadata)
            # Save highestmodseq that corresponds to the saved g_metadata.
        with mailsync_session_scope() as db_session:
            common.update_folder_info(self.account_id, db_session,
                                      self.folder_name,
                                      crispin_client.selected_uidvalidity,
                                      crispin_client.selected_highestmodseq)
            db_session.commit()

        return remote_g_metadata
示例#3
0
def get_g_metadata(crispin_client, log, folder_name, uids, syncmanager_lock):
    assert folder_name == crispin_client.selected_folder_name, \
        "crispin selected folder isn't as expected"
    account_id = crispin_client.account_id
    remote_g_metadata = None
    update_uid_count = 0

    with session_scope(ignore_soft_deletes=False) as db_session:
        saved_folder_info = account.get_folder_info(
            account_id, db_session, folder_name)
        saved_highestmodseq = or_none(saved_folder_info, lambda i:
                                      i.highestmodseq)
    if saved_highestmodseq is not None:
        # If there's no cached validity we probably haven't run before.
        remote_g_metadata, update_uid_count = retrieve_saved_g_metadata(
            crispin_client, log, folder_name, uids,
            saved_highestmodseq, syncmanager_lock)

    if remote_g_metadata is None:
        remote_g_metadata = crispin_client.g_metadata(
            crispin_client.all_uids())
        set_cache(remote_g_metadata_cache_file(account_id, folder_name),
                  remote_g_metadata)
        # Save highestmodseq that corresponds to the saved g_metadata.
    with session_scope(ignore_soft_deletes=False) as db_session:
        account.update_folder_info(account_id, db_session, folder_name,
                                   crispin_client.selected_uidvalidity,
                                   crispin_client.selected_highestmodseq)
        db_session.commit()

    return remote_g_metadata, update_uid_count
示例#4
0
def uidvalidity_cb(account_id, folder_name, select_info):
    assert folder_name is not None and select_info is not None, "must start IMAP session before verifying UIDVALIDITY"
    with session_scope(account_id) as db_session:
        saved_folder_info = common.get_folder_info(account_id, db_session, folder_name)
        saved_uidvalidity = or_none(saved_folder_info, lambda i: i.uidvalidity)
    selected_uidvalidity = select_info["UIDVALIDITY"]
    if saved_folder_info:
        is_valid = saved_uidvalidity is None or selected_uidvalidity <= saved_uidvalidity
        if not is_valid:
            raise UidInvalid(
                "folder: {}, remote uidvalidity: {}, "
                "cached uidvalidity: {}".format(folder_name.encode("utf-8"), selected_uidvalidity, saved_uidvalidity)
            )
    return select_info
示例#5
0
def uidvalidity_cb(account_id, folder_name, select_info):
    assert folder_name is not None and select_info is not None, \
        "must start IMAP session before verifying UIDVALIDITY"
    with mailsync_session_scope() as db_session:
        saved_folder_info = common.get_folder_info(account_id, db_session,
                                                   folder_name)
        saved_uidvalidity = or_none(saved_folder_info, lambda i: i.uidvalidity)
    selected_uidvalidity = select_info['UIDVALIDITY']
    if saved_folder_info:
        is_valid = common.uidvalidity_valid(account_id, selected_uidvalidity,
                                            folder_name, saved_uidvalidity)
        if not is_valid:
            raise UidInvalid('folder: {}, remote uidvalidity: {}, '
                             'cached uidvalidity: {}'.format(
                                 folder_name.encode('utf-8'),
                                 selected_uidvalidity, saved_uidvalidity))
    return select_info
示例#6
0
def uidvalidity_cb(account_id, folder_name, select_info):
    assert (folder_name is not None and select_info is not None
            ), "must start IMAP session before verifying UIDVALIDITY"
    with session_scope(account_id) as db_session:
        saved_folder_info = common.get_folder_info(account_id, db_session,
                                                   folder_name)
        saved_uidvalidity = or_none(saved_folder_info, lambda i: i.uidvalidity)
    selected_uidvalidity = select_info["UIDVALIDITY"]
    if saved_folder_info:
        is_valid = (saved_uidvalidity is None
                    or selected_uidvalidity <= saved_uidvalidity)
        if not is_valid:
            raise UidInvalid("folder: {}, remote uidvalidity: {}, "
                             "cached uidvalidity: {}".format(
                                 folder_name.encode("utf-8"),
                                 selected_uidvalidity, saved_uidvalidity))
    return select_info
示例#7
0
def uidvalidity_cb(account_id, folder_name, select_info):
    assert folder_name is not None and select_info is not None, \
        "must start IMAP session before verifying UIDVALIDITY"
    with mailsync_session_scope() as db_session:
        saved_folder_info = common.get_folder_info(account_id, db_session,
                                                   folder_name)
        saved_uidvalidity = or_none(saved_folder_info, lambda i:
                                    i.uidvalidity)
    selected_uidvalidity = select_info['UIDVALIDITY']
    if saved_folder_info:
        is_valid = common.uidvalidity_valid(account_id,
                                            selected_uidvalidity,
                                            folder_name, saved_uidvalidity)
        if not is_valid:
            raise UidInvalid(
                'folder: {}, remote uidvalidity: {}, '
                'cached uidvalidity: {}'.format(
                    folder_name, selected_uidvalidity, saved_uidvalidity))
    return select_info
示例#8
0
    def fn(folder_name, select_info):
        assert folder_name is not None and select_info is not None, \
            "must start IMAP session before verifying UIDVALIDITY"
        with session_scope(ignore_soft_deletes=False) as db_session:
            saved_folder_info = account.get_folder_info(
                account_id, db_session, folder_name)
            saved_uidvalidity = or_none(saved_folder_info,
                                        lambda i: i.uidvalidity)
        selected_uidvalidity = select_info['UIDVALIDITY']

        if saved_folder_info:
            is_valid = account.uidvalidity_valid(account_id,
                                                 selected_uidvalidity,
                                                 folder_name,
                                                 saved_uidvalidity)
            if not is_valid:
                raise UidInvalid('folder: {}, remote uidvalidity: {}, '
                                 'cached uidvalidity: {}'.format(
                                     folder_name, selected_uidvalidity,
                                     saved_uidvalidity))
        return select_info
示例#9
0
def poll(account_id, provider):
    """Query a remote contacts provider for updates and persist them to the
    database.

    Parameters
    ----------
    account_id: int
        ID for the account whose contacts should be queried.
    db_session: sqlalchemy.orm.session.Session
        Database session

    provider: Interface to the remote contact data provider.
        Must have a PROVIDER_NAME attribute and implement the get_contacts()
        method.
    """
    log = logger.new(account_id=account_id)
    provider_name = provider.PROVIDER_NAME
    with session_scope() as db_session:
        account = db_session.query(Account).get(account_id)
        change_counter = Counter()
        last_sync = or_none(account.last_synced_contacts,
                            datetime.datetime.isoformat)
        to_commit = []
        for remote_contact in provider.get_contacts(last_sync):
            remote_contact.account = account
            assert remote_contact.uid is not None, \
                'Got remote contact with null uid'
            assert isinstance(remote_contact.uid, str)
            matching_contacts = db_session.query(Contact).filter(
                Contact.account == account,
                Contact.provider_name == provider_name,
                Contact.uid == remote_contact.uid)
            # Snapshot of contact data from immediately after last sync:
            cached_contact = matching_contacts. \
                filter(Contact.source == 'remote').first()
            # Contact data reflecting any local modifications since the last
            # sync with the remote provider:
            local_contact = matching_contacts. \
                filter(Contact.source == 'local').first()
            # If the remote contact was deleted, purge the corresponding
            # database entries.
            if remote_contact.deleted:
                if cached_contact is not None:
                    db_session.delete(cached_contact)
                    change_counter['deleted'] += 1
                if local_contact is not None:
                    db_session.delete(local_contact)
                continue
            # Otherwise, update the database.
            if cached_contact is not None:
                # The provider gave an update to a contact we already have.
                if local_contact is not None:
                    try:
                        # Attempt to merge remote updates into local_contact
                        merge(cached_contact, remote_contact, local_contact)
                        # And update cached_contact to reflect both local and
                        # remote updates
                        cached_contact.copy_from(local_contact)
                    except MergeError:
                        log.error('Conflicting local and remote updates to '
                                  'contact.',
                                  local=local_contact, cached=cached_contact,
                                  remote=remote_contact)
                        # TODO(emfree): Come up with a strategy for handling
                        # merge conflicts. For now, just don't update if there
                        # is a conflict.
                        continue
                else:
                    log.warning('Contact already present as remote but not '
                                'local contact', cached_contact=cached_contact)
                    cached_contact.copy_from(remote_contact)
                change_counter['updated'] += 1
            else:
                # This is a new contact, create both local and remote DB
                # entries.
                local_contact = Contact()
                local_contact.copy_from(remote_contact)
                local_contact.source = 'local'
                to_commit.append(local_contact)
                to_commit.append(remote_contact)
                change_counter['added'] += 1

        account.last_synced_contacts = datetime.datetime.now()

        log.info('added contacts', count=change_counter['added'])
        log.info('updated contacts', count=change_counter['updated'])
        log.info('deleted contacts', count=change_counter['deleted'])

        db_session.add_all(to_commit)
        db_session.commit()
示例#10
0
文件: crispin.py 项目: wmv/inbox
 def selected_uidvalidity(self):
     return or_none(self.selected_folder_info, lambda i: i['UIDVALIDITY'])
示例#11
0
文件: crispin.py 项目: wmv/inbox
 def selected_highestmodseq(self):
     return or_none(self.selected_folder_info, lambda i: i['HIGHESTMODSEQ'])
示例#12
0
文件: crispin.py 项目: wmv/inbox
 def selected_folder_name(self):
     return or_none(self.selected_folder, lambda f: f[0])
示例#13
0
文件: crispin.py 项目: wmv/inbox
 def selected_folder_info(self):
     return or_none(self.selected_folder, lambda f: f[1])
示例#14
0
文件: message.py 项目: jre21/inbox
def parse_email_address_list(email_addresses):
    parsed = address.parse_list(email_addresses)
    return [or_none(addr, lambda p:
            (strip_quotes(p.display_name), p.address)) for addr in parsed]
示例#15
0
def poll(account_id, provider):
    """Query a remote contacts provider for updates and persist them to the
    database.

    Parameters
    ----------
    account_id: int
        ID for the account whose contacts should be queried.
    db_session: sqlalchemy.orm.session.Session
        Database session

    provider: Interface to the remote contact data provider.
        Must have a PROVIDER_NAME attribute and implement the get_contacts()
        method.
    """
    provider_name = provider.PROVIDER_NAME
    with session_scope() as db_session:
        account = db_session.query(Account).get(account_id)
        change_counter = Counter()
        last_sync = or_none(account.last_synced_contacts,
                            datetime.datetime.isoformat)
        to_commit = []
        for remote_contact in provider.get_contacts(last_sync):
            remote_contact.account = account
            assert remote_contact.uid is not None, \
                'Got remote contact with null uid'
            assert isinstance(remote_contact.uid, str)
            matching_contacts = db_session.query(Contact).filter(
                Contact.account == account,
                Contact.provider_name == provider_name,
                Contact.uid == remote_contact.uid)
            # Snapshot of contact data from immediately after last sync:
            cached_contact = matching_contacts. \
                filter(Contact.source == 'remote').first()
            # Contact data reflecting any local modifications since the last
            # sync with the remote provider:
            local_contact = matching_contacts. \
                filter(Contact.source == 'local').first()
            # If the remote contact was deleted, purge the corresponding
            # database entries.
            if remote_contact.deleted:
                if cached_contact is not None:
                    db_session.delete(cached_contact)
                    change_counter['deleted'] += 1
                if local_contact is not None:
                    db_session.delete(local_contact)
                continue
            # Otherwise, update the database.
            if cached_contact is not None:
                # The provider gave an update to a contact we already have.
                if local_contact is not None:
                    try:
                        # Attempt to merge remote updates into local_contact
                        merge(cached_contact, remote_contact, local_contact)
                        # And update cached_contact to reflect both local and
                        # remote updates
                        cached_contact.copy_from(local_contact)
                    except MergeError:
                        log.error('Conflicting local and remote updates to '
                                  'contact.\nLocal: {0}\ncached: {1}\n '
                                  'remote: {2}'.format(local_contact,
                                                       cached_contact,
                                                       remote_contact))
                        # TODO(emfree): Come up with a strategy for handling
                        # merge conflicts. For now, just don't update if there
                        # is a conflict.
                        continue
                else:
                    log.warning('Contact {0} already present as remote but '
                                'not local contact'.format(cached_contact))
                    cached_contact.copy_from(remote_contact)
                change_counter['updated'] += 1
            else:
                # This is a new contact, create both local and remote DB
                # entries.
                local_contact = Contact()
                local_contact.copy_from(remote_contact)
                local_contact.source = 'local'
                to_commit.append(local_contact)
                to_commit.append(remote_contact)
                change_counter['added'] += 1

        account.last_synced_contacts = datetime.datetime.now()

        log.info('Added {0} contacts.'.format(change_counter['added']))
        log.info('Updated {0} contacts.'.format(change_counter['updated']))
        log.info('Deleted {0} contacts.'.format(change_counter['deleted']))

        db_session.add_all(to_commit)
        db_session.commit()
示例#16
0
 def selected_uidvalidity(self):
     return or_none(self.selected_folder_info, lambda i: i['UIDVALIDITY'])
示例#17
0
 def selected_folder_name(self):
     return or_none(self.selected_folder, lambda f: f[0])
示例#18
0
def base_poll(account_id, provider_instance, last_sync_fn, target_obj,
              set_last_sync_fn):
    """Query a remote provider for updates and persist them to the
    database.

    Parameters
    ----------
    account_id: int
        ID for the account whose items should be queried.
    db_session: sqlalchemy.orm.session.Session
        Database session

    provider: Interface to the remote item data provider.
        Must have a PROVIDER_NAME attribute and implement the get()
        method.
    """

    log = logger.new(account_id=account_id)
    provider_name = provider_instance.PROVIDER_NAME
    with session_scope() as db_session:
        account = db_session.query(Account).get(account_id)
        last_sync = or_none(last_sync_fn(account),
                            datetime.datetime.isoformat)

    items = provider_instance.get_items(last_sync)
    with session_scope() as db_session:
        account = db_session.query(Account).get(account_id)
        change_counter = Counter()
        to_commit = []
        for item in items:
            item.namespace = account.namespace
            assert item.uid is not None, \
                'Got remote item with null uid'
            assert isinstance(item.uid, str)

            target_obj = target_obj
            matching_items = db_session.query(target_obj).filter(
                target_obj.namespace == account.namespace,
                target_obj.provider_name == provider_name,
                target_obj.uid == item.uid)
            # Snapshot of item data from immediately after last sync:
            cached_item = matching_items. \
                filter(target_obj.source == 'remote').first()

            # Item data reflecting any local modifications since the last
            # sync with the remote provider:
            local_item = matching_items. \
                filter(target_obj.source == 'local').first()
            # If the remote item was deleted, purge the corresponding
            # database entries.
            if item.deleted:
                if cached_item is not None:
                    db_session.delete(cached_item)
                    change_counter['deleted'] += 1
                if local_item is not None:
                    db_session.delete(local_item)
                continue
            # Otherwise, update the database.
            if cached_item is not None:
                # The provider gave an update to a item we already have.
                if local_item is not None:
                    try:
                        # Attempt to merge remote updates into local_item
                        local_item.merge_from(cached_item, item)
                        # And update cached_item to reflect both local and
                        # remote updates
                        cached_item.copy_from(local_item)

                    except MergeError:
                        log.error('Conflicting local and remote updates'
                                  'to item.',
                                  local=local_item, cached=cached_item,
                                  remote=item)
                        # For now, just don't update if conflict ing
                        continue
                else:
                    log.warning('Item already present as remote but not '
                                'local item', cached_item=cached_item)
                    cached_item.copy_from(item)
                change_counter['updated'] += 1
            else:
                # This is a new item, create both local and remote DB
                # entries.
                local_item = target_obj()
                local_item.copy_from(item)
                local_item.source = 'local'
                to_commit.append(item)
                to_commit.append(local_item)
                change_counter['added'] += 1

        set_last_sync_fn(account)

        log.info('sync', added=change_counter['added'],
                 updated=change_counter['updated'],
                 deleted=change_counter['deleted'])

        db_session.add_all(to_commit)
        db_session.commit()
示例#19
0
文件: message.py 项目: cenk/inbox
def create_message(db_session, log, account, mid, folder_name, received_date,
        flags, raw_message):
    """ Parses message data, creates metadata database entries, and writes mail
        parts to disk.

        Returns the new Message, which links to the new Block objects through
        relationships. All new objects are uncommitted.

        Threads are not computed here; you gotta do that separately.

        Parameters
        ----------
        mid : int
            The account backend-specific message identifier; it's only used for
            logging errors.

        raw_message : str
            The full message including headers.
    """
    # trickle-down bugs
    assert account is not None and account.namespace is not None
    try:
        parsed = mime.from_string(raw_message)

        mime_version = parsed.headers.get('Mime-Version')
        # NOTE: sometimes MIME-Version is set to "1.0 (1.0)", hence the .startswith
        if mime_version is not None and not mime_version.startswith('1.0'):
            log.error("Unexpected MIME-Version: %s" % mime_version)

        new_msg = Message()
        new_msg.data_sha256 = sha256(raw_message).hexdigest()

        # clean_subject strips re:, fwd: etc.
        new_msg.subject = parsed.clean_subject
        new_msg.from_addr = parse_email_address(parsed.headers.get('From'))
        new_msg.sender_addr = parse_email_address(parsed.headers.get('Sender'))
        new_msg.reply_to = parse_email_address(parsed.headers.get('Reply-To'))
        new_msg.to_addr = or_none(parsed.headers.getall('To'),
                lambda tos: filter(lambda p: p is not None,
                    [parse_email_address(t) for t in tos]))
        new_msg.cc_addr = or_none(parsed.headers.getall('Cc'),
                lambda ccs: filter(lambda p: p is not None,
                    [parse_email_address(c) for c in ccs]))
        new_msg.bcc_addr = or_none(parsed.headers.getall('Bcc'),
                lambda bccs: filter(lambda p: p is not None,
                    [parse_email_address(c) for c in bccs]))
        new_msg.in_reply_to = parsed.headers.get('In-Reply-To')
        new_msg.message_id_header = parsed.headers.get('Message-Id')

        new_msg.received_date = received_date

        # Optional mailing list headers
        new_msg.mailing_list_headers = parse_ml_headers(parsed.headers)

        new_msg.size = len(raw_message)  # includes headers text

        i = 0  # for walk_index

        # Store all message headers as object with index 0
        headers_part = Block()
        headers_part.message = new_msg
        headers_part.walk_index = i
        headers_part._data = json.dumps(parsed.headers.items())
        headers_part.size = len(headers_part._data)
        headers_part.data_sha256 = sha256(headers_part._data).hexdigest()
        new_msg.parts.append(headers_part)

        for mimepart in parsed.walk(
                with_self=parsed.content_type.is_singlepart()):
            i += 1
            if mimepart.content_type.is_multipart():
                log.warning("multipart sub-part found! on {0}".format(new_msg.g_msgid))
                continue  # TODO should we store relations?

            new_part = Block()
            new_part.message = new_msg
            new_part.walk_index = i
            new_part.misc_keyval = mimepart.headers.items()  # everything
            new_part.content_type = mimepart.content_type.value
            new_part.filename = mimepart.content_type.params.get('name')

            # Content-Disposition attachment; filename="floorplan.gif"
            if mimepart.content_disposition[0] is not None:
                value, params = mimepart.content_disposition
                if value not in ['inline', 'attachment']:
                    errmsg = """
    Unknown Content-Disposition on message {0} found in {1}.
    Bad Content-Disposition was: '{2}'
    Parsed Content-Disposition was: '{3}'""".format(mid, folder_name,
        mimepart.content_disposition)
                    log.error(errmsg)
                    continue
                else:
                    new_part.content_disposition = value
                    if value == 'attachment':
                        new_part.filename = params.get('filename')

            if mimepart.body is None:
                data_to_write = ''
            elif new_part.content_type.startswith('text'):
                data_to_write = mimepart.body.encode('utf-8', 'strict')
            else:
                data_to_write = mimepart.body
            if data_to_write is None:
                data_to_write = ''
            # normalize mac/win/unix newlines
            data_to_write = data_to_write \
                    .replace('\r\n', '\n').replace('\r', '\n')

            new_part.content_id = mimepart.headers.get('Content-Id')

            new_part._data = data_to_write
            new_part.size = len(data_to_write)
            new_part.data_sha256 = sha256(data_to_write).hexdigest()
            new_msg.parts.append(new_part)
    except (mime.DecodingError, RuntimeError):
        # occasionally iconv will fail via maximum recursion depth
        log_decode_error(account.id, folder_name, mid, raw_message)
        log.error("DecodeError encountered, unparseable message logged to {0}" \
                .format(get_errfilename(account.id, folder_name, mid)))
        return
    new_msg.calculate_sanitized_body()

    return new_msg
示例#20
0
def create_message(db_session, log, account, folder_name, uid, internaldate,
        flags, body):
    """ Parses message data, creates metadata database entries, and writes mail
        parts to disk.

        Returns the new ImapUid, which links to new Message and Block
        objects through relationships. All new objects are uncommitted.

        Threads are not computed here; you gotta do that separately.

        This is the one function in this file that gets to take an account
        object instead of an account_id, because we need to relate the
        account to ImapUids for versioning to work, since it needs to look
        up the namespace.
    """
    # trickle-down bugs
    assert account is not None and account.namespace is not None
    try:
        parsed = mime.from_string(body)

        mime_version = parsed.headers.get('Mime-Version')
        # NOTE: sometimes MIME-Version is set to "1.0 (1.0)", hence the .startswith
        if mime_version is not None and not mime_version.startswith('1.0'):
            log.error("Unexpected MIME-Version: %s" % mime_version)

        new_msg = Message()
        new_msg.data_sha256 = sha256(body).hexdigest()

        # clean_subject strips re:, fwd: etc.
        new_msg.subject = parsed.clean_subject
        new_msg.from_addr = parse_email_address(parsed.headers.get('From'))
        new_msg.sender_addr = parse_email_address(parsed.headers.get('Sender'))
        new_msg.reply_to = parse_email_address(parsed.headers.get('Reply-To'))
        new_msg.to_addr = or_none(parsed.headers.getall('To'),
                lambda tos: filter(lambda p: p is not None,
                    [parse_email_address(t) for t in tos]))
        new_msg.cc_addr = or_none(parsed.headers.getall('Cc'),
                lambda ccs: filter(lambda p: p is not None,
                    [parse_email_address(c) for c in ccs]))
        new_msg.bcc_addr = or_none(parsed.headers.getall('Bcc'),
                lambda bccs: filter(lambda p: p is not None,
                    [parse_email_address(c) for c in bccs]))
        new_msg.in_reply_to = parsed.headers.get('In-Reply-To')
        new_msg.message_id = parsed.headers.get('Message-Id')

        new_msg.internaldate = internaldate

        # Optional mailing list headers
        new_msg.mailing_list_headers = parse_ml_headers(parsed.headers)

        imapuid = ImapUid(imapaccount=account, folder_name=folder_name,
                msg_uid=uid, message=new_msg)
        imapuid.update_flags(flags)

        new_msg.size = len(body)  # includes headers text

        i = 0  # for walk_index

        # Store all message headers as object with index 0
        headers_part = Block()
        headers_part.message = new_msg
        headers_part.walk_index = i
        headers_part._data = json.dumps(parsed.headers.items())
        headers_part.size = len(headers_part._data)
        headers_part.data_sha256 = sha256(headers_part._data).hexdigest()
        new_msg.parts.append(headers_part)

        for mimepart in parsed.walk(
                with_self=parsed.content_type.is_singlepart()):
            err = open('err.txt', 'w')
            err.write(mimepart.to_string())
            err.close()
            i += 1
            if mimepart.content_type.is_multipart():
                log.warning("multipart sub-part found! on {0}".format(new_msg.g_msgid))
                continue  # TODO should we store relations?

            new_part = Block()
            new_part.message = new_msg
            new_part.walk_index = i
            new_part.misc_keyval = mimepart.headers.items()  # everything
            new_part.content_type = mimepart.content_type.value
            new_part.filename = mimepart.content_type.params.get('name')

            # Content-Disposition attachment; filename="floorplan.gif"
            if mimepart.content_disposition[0] is not None:
                value, params = mimepart.content_disposition
                if value not in ['inline', 'attachment']:
                    errmsg = """
    Unknown Content-Disposition on message {0} found in {1}.
    Bad Content-Disposition was: '{2}'
    Parsed Content-Disposition was: '{3}'""".format(uid, folder_name,
        mimepart.content_disposition)
                    log.error(errmsg)
                    continue
                else:
                    new_part.content_disposition = value
                    if value == 'attachment':
                        new_part.filename = params.get('filename')

            if mimepart.body is None:
                data_to_write = ''
            elif new_part.content_type.startswith('text'):
                data_to_write = mimepart.body.encode('utf-8', 'strict')
            else:
                data_to_write = mimepart.body
            if data_to_write is None:
                data_to_write = ''
            # normalize mac/win/unix newlines
            data_to_write = data_to_write \
                    .replace('\r\n', '\n').replace('\r', '\n')

            new_part.content_id = mimepart.headers.get('Content-Id')

            new_part._data = data_to_write
            new_part.size = len(data_to_write)
            new_part.data_sha256 = sha256(data_to_write).hexdigest()
            new_msg.parts.append(new_part)
    except mime.DecodingError:
        log_decode_error(account.id, folder_name, uid, body)
        log.error("DecodeError encountered, unparseable message logged to {0}" \
                .format(get_errfilename(account.id, folder_name, uid)))
        return
    new_msg.calculate_sanitized_body()

    return imapuid
示例#21
0
 def selected_folder_info(self):
     return or_none(self.selected_folder, lambda f: f[1])
示例#22
0
def base_poll(account_id, provider_instance, last_sync_fn, target_obj,
              set_last_sync_fn):
    """Query a remote provider for updates and persist them to the
    database.

    Parameters
    ----------
    account_id: int
        ID for the account whose items should be queried.
    db_session: sqlalchemy.orm.session.Session
        Database session

    provider: Interface to the remote item data provider.
        Must have a PROVIDER_NAME attribute and implement the get()
        method.
    """

    log = logger.new(account_id=account_id)
    provider_name = provider_instance.PROVIDER_NAME
    with session_scope() as db_session:
        account = db_session.query(Account).get(account_id)
        last_sync = or_none(last_sync_fn(account),
                            datetime.datetime.isoformat)

    items = provider_instance.get_items(last_sync)
    with session_scope() as db_session:
        account = db_session.query(Account).get(account_id)
        change_counter = Counter()
        to_commit = []
        for item in items:
            item.namespace = account.namespace
            assert item.uid is not None, \
                'Got remote item with null uid'
            assert isinstance(item.uid, str)

            target_obj = target_obj
            matching_items = db_session.query(target_obj).filter(
                target_obj.namespace == account.namespace,
                target_obj.provider_name == provider_name,
                target_obj.uid == item.uid)
            # Snapshot of item data from immediately after last sync:
            cached_item = matching_items. \
                filter(target_obj.source == 'remote').first()

            # Item data reflecting any local modifications since the last
            # sync with the remote provider:
            local_item = matching_items. \
                filter(target_obj.source == 'local').first()
            # If the remote item was deleted, purge the corresponding
            # database entries.
            if item.deleted:
                if cached_item is not None:
                    db_session.delete(cached_item)
                    change_counter['deleted'] += 1
                if local_item is not None:
                    db_session.delete(local_item)
                continue
            # Otherwise, update the database.
            if cached_item is not None:
                # The provider gave an update to a item we already have.
                if local_item is not None:
                    try:
                        # Attempt to merge remote updates into local_item
                        local_item.merge_from(cached_item, item)
                        # And update cached_item to reflect both local and
                        # remote updates
                        cached_item.copy_from(local_item)

                    except MergeError:
                        log.error('Conflicting local and remote updates '
                                  'to item.',
                                  local=local_item, cached=cached_item,
                                  remote=item)
                        # For now, just don't update if conflict ing
                        continue
                else:
                    log.warning('Item already present as remote but not '
                                'local item', cached_item=cached_item)
                    cached_item.copy_from(item)
                change_counter['updated'] += 1
            else:
                # This is a new item, create both local and remote DB
                # entries.
                local_item = target_obj()
                local_item.copy_from(item)
                local_item.source = 'local'
                to_commit.append(item)
                to_commit.append(local_item)
                change_counter['added'] += 1

        set_last_sync_fn(account)

        log.info('sync', added=change_counter['added'],
                 updated=change_counter['updated'],
                 deleted=change_counter['deleted'])

        db_session.add_all(to_commit)
        db_session.commit()
示例#23
0
 def selected_uidnext(self):
     return or_none(self.selected_folder_info, lambda i: i.get('UIDNEXT'))
示例#24
0
 def selected_uidnext(self):
     return or_none(self.selected_folder_info, lambda i: i.get('UIDNEXT'))
示例#25
0
 def selected_highestmodseq(self):
     return or_none(self.selected_folder_info, lambda i: i['HIGHESTMODSEQ'])
示例#26
0
文件: addr.py 项目: wmv/inbox
def parse_email_address_list(email_addresses):
    parsed = address.parse_list(email_addresses)
    return [or_none(addr, lambda p:
            (strip_quotes(p.display_name), p.address)) for addr in parsed]