Exemple #1
0
class ConfigManager(ConfigDict):
    """
    This class manages the live global mailpile configuration. This includes
    the settings themselves, as well as global objects like the index and
    references to any background worker threads.
    """
    DEFAULT_WORKDIR = os.environ.get('MAILPILE_HOME',
                                     os.path.expanduser('~/.mailpile'))

    def __init__(self, workdir=None, rules={}):
        ConfigDict.__init__(self, _rules=rules, _magic=False)

        self.workdir = workdir or self.DEFAULT_WORKDIR
        self.conffile = os.path.join(self.workdir, 'mailpile.cfg')

        self.plugins = None
        self.background = None
        self.cron_worker = None
        self.http_worker = None
        self.dumb_worker = self.slow_worker = DumbWorker('Dumb worker', None)
        self.other_workers = []

        self.jinja_env = None

        self.event_log = None
        self.index = None
        self.vcards = {}
        self._mbox_cache = {}
        self._running = {}

        self._magic = True  # Enable the getattr/getitem magic

    def _mkworkdir(self, session):
        if not os.path.exists(self.workdir):
            if session:
                session.ui.notify(_('Creating: %s') % self.workdir)
            os.mkdir(self.workdir)

    def parse_config(self, session, data, source='internal'):
        """
        Parse a config file fragment. Invalid data will be ignored, but will
        generate warnings in the session UI. Returns True on a clean parse,
        False if any of the settings were bogus.

        >>> cfg.parse_config(session, '[config/sys]\\nfd_cache_size = 123\\n')
        True
        >>> cfg.sys.fd_cache_size
        123

        >>> cfg.parse_config(session, '[config/bogus]\\nblabla = bla\\n')
        False
        >>> [l[1] for l in session.ui.log_buffer if 'bogus' in l[1]][0]
        'Invalid (internal): section config/bogus does not exist'

        >>> cfg.parse_config(session, '[config/sys]\\nhistory_length = 321\\n'
        ...                                          'bogus_variable = 456\\n')
        False
        >>> cfg.sys.history_length
        321
        >>> [l[1] for l in session.ui.log_buffer if 'bogus_var' in l[1]][0]
        u'Invalid (internal): section config/sys, ...

        >>> cfg.parse_config(session, '[config/tags/a]\\nname = TagName\\n')
        True
        >>> cfg.tags['a']._key
        'a'
        >>> cfg.tags['a'].name
        u'TagName'
        """
        parser = CommentedEscapedConfigParser()
        parser.readfp(io.BytesIO(str(data)))

        def item_sorter(i):
            try:
                return (int(i[0], 36), i[1])
            except:
                return i

        all_okay = True
        for section in parser.sections():
            okay = True
            cfgpath = section.split(':')[0].split('/')[1:]
            cfg = self
            added_parts = []
            for part in cfgpath:
                if cfg.fmt_key(part) in cfg.keys():
                    cfg = cfg[part]
                elif '_any' in cfg.rules:
                    cfg[part] = {}
                    cfg = cfg[part]
                else:
                    if session:
                        msg = _('Invalid (%s): section %s does not '
                                'exist') % (source, section)
                        session.ui.warning(msg)
                    all_okay = okay = False
            items = parser.items(section) if okay else []
            items.sort(key=item_sorter)
            for var, val in items:
                try:
                    cfg[var] = val
                except (ValueError, KeyError, IndexError):
                    if session:
                        msg = _(u'Invalid (%s): section %s, variable %s=%s'
                                ) % (source, section, var, val)
                        session.ui.warning(msg)
                    all_okay = okay = False
        return all_okay

    def load(self, session, filename=None):
        self._mkworkdir(session)
        self.index = None
        self.reset(rules=False, data=True)

        filename = filename or self.conffile
        lines = []
        try:
            with open(filename, 'rb') as fd:
                decrypt_and_parse_lines(fd, lambda l: lines.append(l), None)
        except ValueError:
            pass
        except IOError:
            pass

        # Discover plugins and update the config rule to match
        from mailpile.plugins import PluginManager
        self.plugins = PluginManager(config=self, builtin=True).discover([
            os.path.join(os.path.dirname(os.path.realpath(__file__)),
                         '..', 'plugins'),
            os.path.join(self.workdir, 'plugins')
        ])
        self.sys.plugins.rules['_any'][self.RULE_CHECKER
                                       ] = [None] + self.plugins.available()

        # Parse once (silently), to figure out which plugins to load...
        self.parse_config(None, '\n'.join(lines), source=filename)

        if len(self.sys.plugins) == 0:
            self.sys.plugins.extend(self.plugins.DEFAULT)
        self.load_plugins(session)

        # Now all the plugins are loaded, reset and parse again!
        self.reset_rules_from_source()
        self.parse_config(session, '\n'.join(lines), source=filename)

        # Open event log
        self.event_log = EventLog(self.data_directory('event_log',
                                                      mode='rw', mkdir=True),
                                  # FIXME: Disbled encryption for now
                                  lambda: False and self.prefs.obfuscate_index
                                  ).load()

        # Enable translations
        translation = self.get_i18n_translation(session)

        # Configure jinja2
        self.jinja_env = Environment(
            loader=MailpileJinjaLoader(self),
            autoescape=True,
            trim_blocks=True,
            extensions=['jinja2.ext.i18n', 'jinja2.ext.with_',
                        'jinja2.ext.do', 'jinja2.ext.autoescape',
                        'mailpile.jinjaextensions.MailpileCommand']
        )
        self.jinja_env.install_gettext_translations(translation,
                                                    newstyle=True)

        # Load VCards
        self.vcards = VCardStore(self, self.data_directory('vcards',
                                                           mode='rw',
                                                           mkdir=True))

    def reset_rules_from_source(self):
        self.set_rules(self._rules_source)
        self.sys.plugins.rules['_any'][self.RULE_CHECKER
                                       ] = [None] + self.plugins.available()

    def load_plugins(self, session):
        from mailpile.plugins import PluginManager
        plugin_list = set(PluginManager.REQUIRED + self.sys.plugins)
        for plugin in plugin_list:
            if plugin is not None:
                session.ui.mark(_('Loading plugin: %s') % plugin)
                self.plugins.load(plugin)
        session.ui.mark(_('Processing manifests'))
        self.plugins.process_manifests()
        self.prepare_workers(session)

    def save(self):
        self._mkworkdir(None)
        newfile = '%s.new' % self.conffile
        fd = gpg_open(newfile, self.prefs.get('gpg_recipient'), 'wb')
        fd.write(self.as_config_bytes(private=True))
        fd.close()

        # Keep the last 5 config files around... just in case.
        backup_file(self.conffile, backups=5, min_age_delta=10)
        os.rename(newfile, self.conffile)

        self.get_i18n_translation()
        self.prepare_workers()

    def clear_mbox_cache(self):
        self._mbox_cache = {}

    def get_mailboxes(self):
        def fmt_mbxid(k):
            k = b36(int(k, 36))
            if len(k) > MBX_ID_LEN:
                raise ValueError(_('Mailbox ID too large: %s') % k)
            return (('0' * MBX_ID_LEN) + k)[-MBX_ID_LEN:]
        mailboxes = [fmt_mbxid(k) for k in self.sys.mailbox.keys()]
        mailboxes.sort()
        return [(k, self.sys.mailbox[k]) for k in mailboxes]

    def is_editable_message(self, msg_info):
        for ptr in msg_info[MailIndex.MSG_PTRS].split(','):
            if not self.is_editable_mailbox(ptr[: MBX_ID_LEN]):
                return False
        editable = False
        for tid in msg_info[MailIndex.MSG_TAGS].split(','):
            try:
                if self.tags and self.tags[tid].flag_editable:
                    editable = True
            except (KeyError, AttributeError):
                pass
        return editable

    def is_editable_mailbox(self, mailbox_id):
        mailbox_id = ((mailbox_id is None and -1) or
                      (mailbox_id == '' and -1) or
                      int(mailbox_id, 36))
        local_mailbox_id = int(self.sys.get('local_mailbox_id', 'ZZZZZ'), 36)
        return (mailbox_id == local_mailbox_id)

    def load_pickle(self, pfn):
        with open(os.path.join(self.workdir, pfn), 'rb') as fd:
            if self.prefs.obfuscate_index:
                from mailpile.crypto.streamer import DecryptingStreamer
                with DecryptingStreamer(self.prefs.obfuscate_index,
                                        fd) as streamer:
                    return cPickle.loads(streamer.read())
            else:
                return cPickle.loads(fd.read())

    def save_pickle(self, obj, pfn):
        try:
            if self.prefs.obfuscate_index:
                from mailpile.crypto.streamer import EncryptingStreamer
                fd = EncryptingStreamer(self.prefs.obfuscate_index,
                                        dir=self.workdir)
                cPickle.dump(obj, fd, protocol=0)
                fd.save(os.path.join(self.workdir, pfn))
            else:
                fd = open(os.path.join(self.workdir, pfn), 'wb')
                cPickle.dump(obj, fd, protocol=0)
        finally:
            fd.close()

    def open_mailbox(self, session, mailbox_id):
        try:
            mbx_id = mailbox_id.lower()
            mfn = self.sys.mailbox[mbx_id]
            pfn = 'pickled-mailbox.%s' % mbx_id
        except KeyError:
            raise NoSuchMailboxError(_('No such mailbox: %s') % mbx_id)

        try:
            if mbx_id in self._mbox_cache:
                self._mbox_cache[mbx_id].update_toc()
            else:
                if session:
                    session.ui.mark(_('%s: Updating: %s') % (mbx_id, mfn))
                self._mbox_cache[mbx_id] = self.load_pickle(pfn)
        except:
            if self.sys.debug:
                import traceback
                traceback.print_exc()
            if session:
                session.ui.mark(_('%s: Opening: %s (may take a while)'
                                  ) % (mbx_id, mfn))
            editable = self.is_editable_mailbox(mbx_id)
            mbox = OpenMailbox(mfn, self, create=editable)
            mbox.editable = editable
            mbox.save(session,
                      to=pfn,
                      pickler=lambda o, f: self.save_pickle(o, f))
            self._mbox_cache[mbx_id] = mbox

        # Always set this, it can't be pickled
        self._mbox_cache[mbx_id]._encryption_key_func = \
            lambda: self.prefs.obfuscate_index

        return self._mbox_cache[mbx_id]

    def open_local_mailbox(self, session):
        local_id = self.sys.get('local_mailbox_id', None)
        if not local_id:
            mailbox = os.path.join(self.workdir, 'mail')
            mbx = wervd.MailpileMailbox(mailbox)
            mbx._encryption_key_func = lambda: self.prefs.obfuscate_index
            local_id = self.sys.mailbox.append(mailbox)
            local_id = (('0' * MBX_ID_LEN) + local_id)[-MBX_ID_LEN:]
            self.sys.local_mailbox_id = local_id
        else:
            local_id = (('0' * MBX_ID_LEN) + local_id)[-MBX_ID_LEN:]
        return local_id, self.open_mailbox(session, local_id)

    def get_profile(self, email=None):
        find = email or self.prefs.get('default_email', None)
        default_profile = {
            'name': None,
            'email': find,
            'signature': None,
            'messageroute': self.prefs.default_messageroute
        }
        for profile in self.profiles:
            if profile.email == find or not find:
                if not email:
                    self.prefs.default_email = profile.email
                return dict_merge(default_profile, profile)
        return default_profile

    def get_sendmail(self, frm, rcpts=['-t']):
        if len(rcpts) == 1:
            if rcpts[0].lower().endswith('.onion'):
                return {"protocol": "smtorp",
                        "host": rcpts[0].split('@')[-1],
                        "port": 25,
                        "username": "",
                        "password": ""}
        routeid = self.get_profile(frm)['messageroute']
        if self.routes[routeid] is not None:
            return self.routes[routeid]
        else:
            print "Migration notice: Try running 'setup/migrate'."
            raise ValueError(_("Route %s does not exist.") % routeid)

    def data_directory(self, ftype, mode='rb', mkdir=False):
        """
        Return the path to a data directory for a particular type of file
        data, optionally creating the directory if it is missing.

        >>> p = cfg.data_directory('html_theme', mode='r', mkdir=False)
        >>> p == os.path.abspath('static/default')
        True
        """
        # This should raise a KeyError if the ftype is unrecognized
        bpath = self.sys.path.get(ftype)
        if not bpath.startswith('/'):
            cpath = os.path.join(self.workdir, bpath)
            if os.path.exists(cpath) or 'w' in mode:
                bpath = cpath
                if mkdir and not os.path.exists(cpath):
                    os.mkdir(cpath)
            else:
                bpath = os.path.join(os.path.dirname(__file__), '..', bpath)
        return os.path.abspath(bpath)

    def data_file_and_mimetype(self, ftype, fpath, *args, **kwargs):
        # The theme gets precedence
        core_path = self.data_directory(ftype, *args, **kwargs)
        path, mimetype = os.path.join(core_path, fpath), None

        # If there's nothing there, check our plugins
        if not os.path.exists(path):
            from mailpile.plugins import PluginManager
            path, mimetype = PluginManager().get_web_asset(fpath, path)

        if os.path.exists(path):
            return path, mimetype
        else:
            return None, None

    def history_file(self):
        return os.path.join(self.workdir, 'history')

    def mailindex_file(self):
        return os.path.join(self.workdir, 'mailpile.idx')

    def postinglist_dir(self, prefix):
        d = os.path.join(self.workdir, 'search')
        if not os.path.exists(d):
            os.mkdir(d)
        d = os.path.join(d, prefix and prefix[0] or '_')
        if not os.path.exists(d):
            os.mkdir(d)
        return d

    def get_index(self, session):
        if self.index:
            return self.index
        idx = MailIndex(self)
        idx.load(session)
        self.index = idx
        return idx

    def get_tor_socket(self):
        if socks:
            socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5,
                                  'localhost', 9050, True)
        return socks.socksocket

    def get_i18n_translation(self, session=None):
        language = self.prefs.language
        trans = None
        if language != "":
            try:
                trans = translation("mailpile", getLocaleDirectory(),
                                    [language], codeset="utf-8")
            except IOError:
                if session:
                    session.ui.warning('Failed to load language %s' % language)
        if not trans:
            trans = translation("mailpile", getLocaleDirectory(),
                                codeset='utf-8', fallback=True)
            if session and isinstance(trans, NullTranslations):
                session.ui.warning('Failed to configure i18n. Using fallback.')

        if trans:
            trans.set_output_charset("utf-8")
            trans.install(unicode=True)
        return trans

    def open_file(self, ftype, fpath, mode='rb', mkdir=False):
        if '..' in fpath:
            raise ValueError(_('Parent paths are not allowed'))
        fpath, mt = self.data_file_and_mimetype(ftype, fpath,
                                                mode=mode, mkdir=mkdir)
        if not fpath:
            raise IOError(2, 'Not Found')
        return fpath, open(fpath, mode), mt

    def prepare_workers(config, session=None, daemons=False):
        # Set globals from config first...
        import mailpile.util

        # Make sure we have a silent background session
        if not config.background:
            config.background = Session(config)
            config.background.ui = BackgroundInteraction(config)
            config.background.ui.block()

        # Start the workers
        if daemons:
            if config.slow_worker == config.dumb_worker:
                config.slow_worker = Worker('Slow worker', session)
                config.slow_worker.start()
            if not config.cron_worker:
                config.cron_worker = Cron('Cron worker', session)
                config.cron_worker.start()
            if not config.http_worker:
                # Start the HTTP worker if requested
                sspec = (config.sys.http_host, config.sys.http_port)
                if sspec[0].lower() != 'disabled' and sspec[1] >= 0:
                    config.http_worker = HttpWorker(session, sspec)
                    config.http_worker.start()
            if not config.other_workers:
                from mailpile.plugins import PluginManager
                for worker in PluginManager.WORKERS:
                    w = worker(session)
                    w.start()
                    config.other_workers.append(w)

        # Update the cron jobs, if necessary
        if config.cron_worker:
            session = session or config.background

            # Schedule periodic rescanning, if requested.
            rescan_interval = config.prefs.rescan_interval
            if rescan_interval:
                def rescan():
                    if 'rescan' not in config._running:
                        rsc = Rescan(session, 'rescan')
                        rsc.serialize = False
                        config.slow_worker.add_task(session, 'Rescan', rsc.run)
                config.cron_worker.add_task('rescan', rescan_interval, rescan)

            # Schedule plugin jobs
            from mailpile.plugins import PluginManager

            def interval(i):
                if isinstance(i, (str, unicode)):
                    i = config.walk(i)
                return int(i)

            def wrap_fast(func):
                def wrapped():
                    return func(session)
                return wrapped

            def wrap_slow(func):
                def wrapped():
                    config.slow_worker.add_task(session, job,
                                                lambda: func(session))
                return wrapped
            for job, (i, f) in PluginManager.FAST_PERIODIC_JOBS.iteritems():
                config.cron_worker.add_task(job, interval(i), wrap_fast(f))
            for job, (i, f) in PluginManager.SLOW_PERIODIC_JOBS.iteritems():
                config.cron_worker.add_task(job, interval(i), wrap_slow(f))

    def stop_workers(config):
        for wait in (False, True):
            for w in [config.http_worker,
                      config.slow_worker,
                      config.cron_worker] + config.other_workers:
                if w:
                    w.quit(join=wait)
        config.other_workers = []
        config.http_worker = config.cron_worker = None
        config.slow_worker = config.dumb_worker
Exemple #2
0
class ConfigManager(ConfigDict):
    """
    This class manages the live global mailpile configuration. This includes
    the settings themselves, as well as global objects like the index and
    references to any background worker threads.
    """
    DEFAULT_WORKDIR = os.environ.get('MAILPILE_HOME',
                                     os.path.expanduser('~/.mailpile'))

    def __init__(self, workdir=None, rules={}):
        ConfigDict.__init__(self, _rules=rules, _magic=False)

        self.workdir = workdir or self.DEFAULT_WORKDIR
        self.conffile = os.path.join(self.workdir, 'mailpile.cfg')

        self.plugins = None
        self.background = None
        self.cron_worker = None
        self.http_worker = None
        self.dumb_worker = self.slow_worker = DumbWorker('Dumb worker', None)
        self.other_workers = []

        self.jinja_env = None

        self.event_log = None
        self.index = None
        self.vcards = {}
        self._mbox_cache = {}
        self._running = {}

        self._magic = True  # Enable the getattr/getitem magic

    def _mkworkdir(self, session):
        if not os.path.exists(self.workdir):
            if session:
                session.ui.notify(_('Creating: %s') % self.workdir)
            os.mkdir(self.workdir)

    def parse_config(self, session, data, source='internal'):
        """
        Parse a config file fragment. Invalid data will be ignored, but will
        generate warnings in the session UI. Returns True on a clean parse,
        False if any of the settings were bogus.

        >>> cfg.parse_config(session, '[config/sys]\\nfd_cache_size = 123\\n')
        True
        >>> cfg.sys.fd_cache_size
        123

        >>> cfg.parse_config(session, '[config/bogus]\\nblabla = bla\\n')
        False
        >>> [l[1] for l in session.ui.log_buffer if 'bogus' in l[1]][0]
        'Invalid (internal): section config/bogus does not exist'

        >>> cfg.parse_config(session, '[config/sys]\\nhistory_length = 321\\n'
        ...                                          'bogus_variable = 456\\n')
        False
        >>> cfg.sys.history_length
        321
        >>> [l[1] for l in session.ui.log_buffer if 'bogus_var' in l[1]][0]
        u'Invalid (internal): section config/sys, ...

        >>> cfg.parse_config(session, '[config/tags/a]\\nname = TagName\\n')
        True
        >>> cfg.tags['a']._key
        'a'
        >>> cfg.tags['a'].name
        u'TagName'
        """
        parser = CommentedEscapedConfigParser()
        parser.readfp(io.BytesIO(str(data)))

        def item_sorter(i):
            try:
                return (int(i[0], 36), i[1])
            except:
                return i

        all_okay = True
        for section in parser.sections():
            okay = True
            cfgpath = section.split(':')[0].split('/')[1:]
            cfg = self
            added_parts = []
            for part in cfgpath:
                if part in cfg.keys():
                    cfg = cfg[part]
                elif '_any' in cfg.rules:
                    cfg[part] = {}
                    cfg = cfg[part]
                else:
                    if session:
                        msg = _('Invalid (%s): section %s does not '
                                'exist') % (source, section)
                        session.ui.warning(msg)
                    all_okay = okay = False
            items = okay and parser.items(section) or []
            items.sort(key=item_sorter)
            for var, val in items:
                try:
                    cfg[var] = val
                except (ValueError, KeyError, IndexError):
                    if session:
                        msg = _(u'Invalid (%s): section %s, variable %s') % (
                            source, section, var)
                        session.ui.warning(msg)
                    all_okay = okay = False
        return all_okay

    def load(self, session, filename=None):
        self._mkworkdir(session)
        self.index = None
        self.reset(rules=False, data=True)

        filename = filename or self.conffile
        lines = []
        try:
            with open(filename, 'rb') as fd:
                decrypt_and_parse_lines(fd, lambda l: lines.append(l), None)
        except ValueError:
            pass
        except IOError:
            pass

        # Discover plugins and update the config rule to match
        from mailpile.plugins import PluginManager
        self.plugins = PluginManager(config=self, builtin=True).discover([
            os.path.join(os.path.dirname(os.path.realpath(__file__)), '..',
                         'plugins'),
            os.path.join(self.workdir, 'plugins')
        ])
        self.sys.plugins.rules['_any'][1] = self.plugins.available()

        # Parse once (silently), to figure out which plugins to load...
        self.parse_config(None, '\n'.join(lines), source=filename)

        if len(self.sys.plugins) == 0:
            self.sys.plugins.extend(self.plugins.DEFAULT)
        self.load_plugins(session)

        # Now all the plugins are loaded, reset and parse again!
        self.reset_rules_from_source()
        self.parse_config(session, '\n'.join(lines), source=filename)

        # Open event log
        self.event_log = EventLog(
            self.data_directory('event_log', mode='rw', mkdir=True),
            # FIXME: Disbled encryption for now
            lambda: False and self.prefs.obfuscate_index).load()

        # Enable translations
        translation = self.get_i18n_translation(session)

        # Configure jinja2
        self.jinja_env = Environment(
            loader=MailpileJinjaLoader(self),
            autoescape=True,
            trim_blocks=True,
            extensions=[
                'jinja2.ext.i18n', 'jinja2.ext.with_', 'jinja2.ext.do',
                'jinja2.ext.autoescape',
                'mailpile.jinjaextensions.MailpileCommand'
            ])
        self.jinja_env.install_gettext_translations(translation, newstyle=True)

        # Load VCards
        self.vcards = VCardStore(
            self, self.data_directory('vcards', mode='rw', mkdir=True))

    def reset_rules_from_source(self):
        self.set_rules(self._rules_source)
        self.sys.plugins.rules['_any'][1] = self.plugins.available()

    def load_plugins(self, session):
        from mailpile.plugins import PluginManager
        plugin_list = set(PluginManager.REQUIRED + self.sys.plugins)
        for plugin in plugin_list:
            session.ui.mark(_('Loading plugin: %s') % plugin)
            self.plugins.load(plugin)
        session.ui.mark(_('Processing manifests'))
        self.plugins.process_manifests()
        self.prepare_workers(session)

    def save(self):
        self._mkworkdir(None)
        newfile = '%s.new' % self.conffile
        fd = gpg_open(newfile, self.prefs.get('gpg_recipient'), 'wb')
        fd.write(self.as_config_bytes(private=True))
        fd.close()

        # Keep the last 5 config files around... just in case.
        backup_file(self.conffile, backups=5, min_age_delta=10)
        os.rename(newfile, self.conffile)

        self.get_i18n_translation()
        self.prepare_workers()

    def clear_mbox_cache(self):
        self._mbox_cache = {}

    def get_mailboxes(self):
        def fmt_mbxid(k):
            k = b36(int(k, 36))
            if len(k) > MBX_ID_LEN:
                raise ValueError(_('Mailbox ID too large: %s') % k)
            return (('0' * MBX_ID_LEN) + k)[-MBX_ID_LEN:]

        mailboxes = [fmt_mbxid(k) for k in self.sys.mailbox.keys()]
        mailboxes.sort()
        return [(k, self.sys.mailbox[k]) for k in mailboxes]

    def is_editable_message(self, msg_info):
        for ptr in msg_info[MailIndex.MSG_PTRS].split(','):
            if not self.is_editable_mailbox(ptr[:MBX_ID_LEN]):
                return False
        editable = False
        for tid in msg_info[MailIndex.MSG_TAGS].split(','):
            try:
                if self.tags and self.tags[tid].flag_editable:
                    editable = True
            except (KeyError, AttributeError):
                pass
        return editable

    def is_editable_mailbox(self, mailbox_id):
        mailbox_id = ((mailbox_id is None and -1) or (mailbox_id == '' and -1)
                      or int(mailbox_id, 36))
        local_mailbox_id = int(self.sys.get('local_mailbox_id', 'ZZZZZ'), 36)
        return (mailbox_id == local_mailbox_id)

    def load_pickle(self, pfn):
        with open(os.path.join(self.workdir, pfn), 'rb') as fd:
            if self.prefs.obfuscate_index:
                from mailpile.crypto.streamer import DecryptingStreamer
                with DecryptingStreamer(self.prefs.obfuscate_index,
                                        fd) as streamer:
                    return cPickle.loads(streamer.read())
            else:
                return cPickle.loads(fd.read())

    def save_pickle(self, obj, pfn):
        try:
            if self.prefs.obfuscate_index:
                from mailpile.crypto.streamer import EncryptingStreamer
                fd = EncryptingStreamer(self.prefs.obfuscate_index,
                                        dir=self.workdir)
                cPickle.dump(obj, fd, protocol=0)
                fd.save(os.path.join(self.workdir, pfn))
            else:
                fd = open(os.path.join(self.workdir, pfn), 'wb')
                cPickle.dump(obj, fd, protocol=0)
        finally:
            fd.close()

    def open_mailbox(self, session, mailbox_id):
        try:
            mbx_id = mailbox_id.lower()
            mfn = self.sys.mailbox[mbx_id]
            pfn = 'pickled-mailbox.%s' % mbx_id
        except KeyError:
            raise NoSuchMailboxError(_('No such mailbox: %s') % mbx_id)

        try:
            if mbx_id in self._mbox_cache:
                self._mbox_cache[mbx_id].update_toc()
            else:
                if session:
                    session.ui.mark(_('%s: Updating: %s') % (mbx_id, mfn))
                self._mbox_cache[mbx_id] = self.load_pickle(pfn)
        except:
            if self.sys.debug:
                import traceback
                traceback.print_exc()
            if session:
                session.ui.mark(
                    _('%s: Opening: %s (may take a while)') % (mbx_id, mfn))
            editable = self.is_editable_mailbox(mbx_id)
            mbox = OpenMailbox(mfn, self, create=editable)
            mbox.editable = editable
            mbox.save(session,
                      to=pfn,
                      pickler=lambda o, f: self.save_pickle(o, f))
            self._mbox_cache[mbx_id] = mbox

        # Always set this, it can't be pickled
        self._mbox_cache[mbx_id]._encryption_key_func = \
            lambda: self.prefs.obfuscate_index

        return self._mbox_cache[mbx_id]

    def open_local_mailbox(self, session):
        local_id = self.sys.get('local_mailbox_id', None)
        if not local_id:
            mailbox = os.path.join(self.workdir, 'mail')
            mbx = wervd.MailpileMailbox(mailbox)
            mbx._encryption_key_func = lambda: self.prefs.obfuscate_index
            local_id = self.sys.mailbox.append(mailbox)
            local_id = (('0' * MBX_ID_LEN) + local_id)[-MBX_ID_LEN:]
            self.sys.local_mailbox_id = local_id
        else:
            local_id = (('0' * MBX_ID_LEN) + local_id)[-MBX_ID_LEN:]
        return local_id, self.open_mailbox(session, local_id)

    def get_profile(self, email=None):
        find = email or self.prefs.get('default_email', None)
        default_profile = {
            'name': None,
            'email': find,
            'signature': None,
            'messageroute': self.prefs.default_messageroute
        }
        for profile in self.profiles:
            if profile.email == find or not find:
                if not email:
                    self.prefs.default_email = profile.email
                return dict_merge(default_profile, profile)
        return default_profile

    def get_sendmail(self, frm, rcpts=['-t']):
        if len(rcpts) == 1:
            if rcpts[0].lower().endswith('.onion'):
                return {
                    "protocol": "smtorp",
                    "host": rcpts[0].split('@')[-1],
                    "port": 25,
                    "username": "",
                    "password": ""
                }
        routeid = self.get_profile(frm)['messageroute']
        if self.routes[routeid] is not None:
            return self.routes[routeid]
        else:
            print "Migration notice: Try running 'setup/migrate'."
            raise ValueError(_("Route %s does not exist.") % routeid)

    def data_directory(self, ftype, mode='rb', mkdir=False):
        """
        Return the path to a data directory for a particular type of file
        data, optionally creating the directory if it is missing.

        >>> p = cfg.data_directory('html_theme', mode='r', mkdir=False)
        >>> p == os.path.abspath('static/default')
        True
        """
        # This should raise a KeyError if the ftype is unrecognized
        bpath = self.sys.path.get(ftype)
        if not bpath.startswith('/'):
            cpath = os.path.join(self.workdir, bpath)
            if os.path.exists(cpath) or 'w' in mode:
                bpath = cpath
                if mkdir and not os.path.exists(cpath):
                    os.mkdir(cpath)
            else:
                bpath = os.path.join(os.path.dirname(__file__), '..', bpath)
        return os.path.abspath(bpath)

    def data_file_and_mimetype(self, ftype, fpath, *args, **kwargs):
        # The theme gets precedence
        core_path = self.data_directory(ftype, *args, **kwargs)
        path, mimetype = os.path.join(core_path, fpath), None

        # If there's nothing there, check our plugins
        if not os.path.exists(path):
            from mailpile.plugins import PluginManager
            path, mimetype = PluginManager().get_web_asset(fpath, path)

        if os.path.exists(path):
            return path, mimetype
        else:
            return None, None

    def history_file(self):
        return os.path.join(self.workdir, 'history')

    def mailindex_file(self):
        return os.path.join(self.workdir, 'mailpile.idx')

    def postinglist_dir(self, prefix):
        d = os.path.join(self.workdir, 'search')
        if not os.path.exists(d):
            os.mkdir(d)
        d = os.path.join(d, prefix and prefix[0] or '_')
        if not os.path.exists(d):
            os.mkdir(d)
        return d

    def get_index(self, session):
        if self.index:
            return self.index
        idx = MailIndex(self)
        idx.load(session)
        self.index = idx
        return idx

    def get_tor_socket(self):
        if socks:
            socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, 'localhost', 9050,
                                  True)
        return socks.socksocket

    def get_i18n_translation(self, session=None):
        language = self.prefs.language
        trans = None
        if language != "":
            try:
                trans = translation("mailpile",
                                    getLocaleDirectory(), [language],
                                    codeset="utf-8")
            except IOError:
                if session:
                    session.ui.warning('Failed to load language %s' % language)
        if not trans:
            trans = translation("mailpile",
                                getLocaleDirectory(),
                                codeset='utf-8',
                                fallback=True)
            if session and isinstance(trans, NullTranslations):
                session.ui.warning('Failed to configure i18n. Using fallback.')

        if trans:
            trans.set_output_charset("utf-8")
            trans.install(unicode=True)
        return trans

    def open_file(self, ftype, fpath, mode='rb', mkdir=False):
        if '..' in fpath:
            raise ValueError(_('Parent paths are not allowed'))
        fpath, mt = self.data_file_and_mimetype(ftype,
                                                fpath,
                                                mode=mode,
                                                mkdir=mkdir)
        if not fpath:
            raise IOError(2, 'Not Found')
        return fpath, open(fpath, mode), mt

    def prepare_workers(config, session=None, daemons=False):
        # Set globals from config first...
        import mailpile.util

        # Make sure we have a silent background session
        if not config.background:
            config.background = Session(config)
            config.background.ui = BackgroundInteraction(config)
            config.background.ui.block()

        # Start the workers
        if daemons:
            if config.slow_worker == config.dumb_worker:
                config.slow_worker = Worker('Slow worker', session)
                config.slow_worker.start()
            if not config.cron_worker:
                config.cron_worker = Cron('Cron worker', session)
                config.cron_worker.start()
            if not config.http_worker:
                # Start the HTTP worker if requested
                sspec = (config.sys.http_host, config.sys.http_port)
                if sspec[0].lower() != 'disabled' and sspec[1] >= 0:
                    config.http_worker = HttpWorker(session, sspec)
                    config.http_worker.start()
            if not config.other_workers:
                from mailpile.plugins import PluginManager
                for worker in PluginManager.WORKERS:
                    w = worker(session)
                    w.start()
                    config.other_workers.append(w)

        # Update the cron jobs, if necessary
        if config.cron_worker:
            session = session or config.background

            # Schedule periodic rescanning, if requested.
            rescan_interval = config.prefs.rescan_interval
            if rescan_interval:

                def rescan():
                    if 'rescan' not in config._running:
                        rsc = Rescan(session, 'rescan')
                        rsc.serialize = False
                        config.slow_worker.add_task(session, 'Rescan', rsc.run)

                config.cron_worker.add_task('rescan', rescan_interval, rescan)

            # Schedule plugin jobs
            from mailpile.plugins import PluginManager

            def interval(i):
                if isinstance(i, (str, unicode)):
                    i = config.walk(i)
                return int(i)

            def wrap_fast(func):
                def wrapped():
                    return func(session)

                return wrapped

            def wrap_slow(func):
                def wrapped():
                    config.slow_worker.add_task(session, job,
                                                lambda: func(session))

                return wrapped

            for job, (i, f) in PluginManager.FAST_PERIODIC_JOBS.iteritems():
                config.cron_worker.add_task(job, interval(i), wrap_fast(f))
            for job, (i, f) in PluginManager.SLOW_PERIODIC_JOBS.iteritems():
                config.cron_worker.add_task(job, interval(i), wrap_slow(f))

    def stop_workers(config):
        for wait in (False, True):
            for w in [
                    config.http_worker, config.slow_worker, config.cron_worker
            ] + config.other_workers:
                if w:
                    w.quit(join=wait)
        config.other_workers = []
        config.http_worker = config.cron_worker = None
        config.slow_worker = config.dumb_worker