def load(self, session, filename=None): self._mkworkdir(session) self.index = None self.reset(rules=False, data=True) try: ocfg = os.path.join(self.workdir, 'config.rc') ocl = OldConfigLoader(filename=ocfg) if ocl.export(self) and session: session.ui.warning(('WARNING: Imported old config from %s' ) % ocfg) elif ocl.lines and session: session.ui.warning(('WARNING: Failed to import config from %s' ) % ocfg) except: import traceback traceback.print_exc() filename = filename or self.conffile lines = [] try: fd = open(filename, 'rb') try: decrypt_and_parse_lines(fd, lambda l: lines.append(l)) except ValueError: pass fd.close() except IOError: pass self.parse_config(session, '\n'.join(lines), source=filename) self.vcards = VCardStore(self, self.data_directory('vcards')) self.vcards.load_vcards(session)
def load(self, session, filename=None): self._mkworkdir(session) self.index = None self.reset(rules=False, data=True) filename = filename or self.conffile lines = [] try: fd = open(filename, 'rb') try: decrypt_and_parse_lines(fd, lambda l: lines.append(l), None) except ValueError: pass fd.close() except IOError: pass # Discover plugins and update the config rule to match from mailpile.plugins import PluginManager self.plugins = PluginManager(config=self, builtin=True).discover([ os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'plugins'), os.path.join(self.workdir, 'plugins') ]) self.sys.plugins.rules['_any'][1] = self.plugins.available() # Parse once (silently), to figure out which plugins to load... self.parse_config(None, '\n'.join(lines), source=filename) if len(self.sys.plugins) == 0: self.sys.plugins.extend(self.plugins.DEFAULT) self.load_plugins(session) # Now all the plugins are loaded, reset and parse again! self.reset_rules_from_source() self.parse_config(session, '\n'.join(lines), source=filename) # Open event log self.event_log = EventLog( self.data_directory('event_log', mode='rw', mkdir=True), # FIXME: Disbled encryption for now lambda: False and self.prefs.obfuscate_index).load() # Enable translations translation = self.get_i18n_translation(session) # Configure jinja2 self.jinja_env = Environment( loader=MailpileJinjaLoader(self), autoescape=True, trim_blocks=True, extensions=[ 'jinja2.ext.i18n', 'jinja2.ext.with_', 'jinja2.ext.do', 'mailpile.jinjaextensions.MailpileCommand' ]) self.jinja_env.install_gettext_translations(translation, newstyle=True) # Load VCards self.vcards = VCardStore( self, self.data_directory('vcards', mode='rw', mkdir=True))
def load(self, session, filename=None): self._mkworkdir(session) self.index = None self.reset(rules=False, data=True) try: ocfg = os.path.join(self.workdir, 'config.rc') ocl = OldConfigLoader(filename=ocfg) if ocl.export(self) and session: session.ui.warning(_('WARNING: Imported old config from %s' ) % ocfg) elif ocl.lines and session: session.ui.warning(_('WARNING: Failed to import config from %s' ) % ocfg) except: import traceback traceback.print_exc() filename = filename or self.conffile lines = [] try: fd = open(filename, 'rb') try: decrypt_and_parse_lines(fd, lambda l: lines.append(l)) except ValueError: pass fd.close() except IOError: pass # Discover plugins and update the config rule import mailpile.plugins pds = [os.path.join(self.workdir, 'plugins')] self.sys.plugins.rules['_any'][1] = mailpile.plugins.Discover(pds) # Parse the config twice, first to figure out which plugins to load, # and again after loading all the plugins. self.parse_config(session, '\n'.join(lines), source=filename) if len(self.sys.plugins) == 0: self.sys.plugins.extend(mailpile.plugins.BUILTIN) self.load_plugins(session) self.parse_config(session, '\n'.join(lines), source=filename) # Enable translations self.get_i18n_translation(session) # Load VCards self.vcards = VCardStore(self, self.data_directory('vcards', mode='rw', mkdir=True)) self.vcards.load_vcards(session)
def load(self, session, filename=None): self._mkworkdir(session) self.index = None self.reset(rules=False, data=True) filename = filename or self.conffile lines = [] try: fd = open(filename, 'rb') try: decrypt_and_parse_lines(fd, lambda l: lines.append(l), None) except ValueError: pass fd.close() except IOError: pass # Discover plugins and update the config rule import mailpile.plugins pds = [os.path.join(self.workdir, 'plugins')] pds = mailpile.plugins.Discover(pds) self.sys.plugins.rules['_any'][1] = pds # Parse once (silently), to figure out which plugins to load... self.parse_config(None, '\n'.join(lines), source=filename) if len(self.sys.plugins) == 0: self.sys.plugins.extend(mailpile.plugins.__all__) self.load_plugins(session) # Now all the plugins are loaded, reset and parse again! self.set_rules(self._rules_source) self.sys.plugins.rules['_any'][1] = pds self.parse_config(session, '\n'.join(lines), source=filename) # Open event log self.event_log = EventLog( self.data_directory('event_log', mode='rw', mkdir=True), # FIXME: Disbled encryption for now lambda: False and self.prefs.obfuscate_index).load() # Enable translations self.get_i18n_translation(session) # Load VCards self.vcards = VCardStore( self, self.data_directory('vcards', mode='rw', mkdir=True))
def load(self, session, filename=None): self._mkworkdir(session) self.index = None self.reset(rules=False, data=True) filename = filename or self.conffile lines = [] try: fd = open(filename, 'rb') try: decrypt_and_parse_lines(fd, lambda l: lines.append(l)) except ValueError: pass fd.close() except IOError: pass # Discover plugins and update the config rule import mailpile.plugins pds = [os.path.join(self.workdir, 'plugins')] pds = mailpile.plugins.Discover(pds) self.sys.plugins.rules['_any'][1] = pds # Parse once (silently), to figure out which plugins to load... self.parse_config(None, '\n'.join(lines), source=filename) if len(self.sys.plugins) == 0: self.sys.plugins.extend(mailpile.plugins.__all__) self.load_plugins(session) # Now all the plugins are loaded, reset and parse again! self.set_rules(self._rules_source) self.sys.plugins.rules['_any'][1] = pds self.parse_config(session, '\n'.join(lines), source=filename) # Enable translations self.get_i18n_translation(session) # Load VCards self.vcards = VCardStore(self, self.data_directory('vcards', mode='rw', mkdir=True)) self.vcards.load_vcards(session)
class ConfigManager(ConfigDict): """ This class manages the live global mailpile configuration. This includes the settings themselves, as well as global objects like the index and references to any background worker threads. """ DEFAULT_WORKDIR = os.environ.get('MAILPILE_HOME', os.path.expanduser('~/.mailpile')) def __init__(self, workdir=None, rules={}): ConfigDict.__init__(self, _rules=rules) self.workdir = workdir or self.DEFAULT_WORKDIR self.conffile = os.path.join(self.workdir, 'mailpile.cfg') self.background = None self.cron_worker = None self.http_worker = None self.dumb_worker = self.slow_worker = DumbWorker('Dumb worker', None) self.index = None self.vcards = {} self._mbox_cache = {} self._running = {} def _mkworkdir(self, session): if not os.path.exists(self.workdir): if session: session.ui.notify(_('Creating: %s') % self.workdir) os.mkdir(self.workdir) def parse_config(self, session, data, source='internal'): """ Parse a config file fragment. Invalid data will be ignored, but will generate warnings in the session UI. Returns True on a clean parse, False if any of the settings were bogus. >>> cfg.parse_config(session, '[config/sys]\\nfd_cache_size = 123\\n') True >>> cfg.sys.fd_cache_size 123 >>> cfg.parse_config(session, '[config/bogus]\\nblabla = bla\\n') False >>> [l[1] for l in session.ui.log_buffer if 'bogus' in l[1]][0] u'Invalid (internal): section config/bogus does not exist' >>> cfg.parse_config(session, '[config/sys]\\nhistory_length = 321\\n' ... 'bogus_variable = 456\\n') False >>> cfg.sys.history_length 321 >>> [l[1] for l in session.ui.log_buffer if 'bogus_var' in l[1]][0] u'Invalid (internal): section config/sys, ... >>> cfg.parse_config(session, '[config/tags/a]\\nname = TagName\\n') True >>> cfg.tags['a']._key 'a' >>> cfg.tags['a'].name u'TagName' """ parser = CommentedEscapedConfigParser() parser.readfp(io.BytesIO(str(data))) okay = True def item_sorter(i): try: return (int(i[0], 36), i[1]) except: return i for section in parser.sections(): cfgpath = section.split(':')[0].split('/')[1:] cfg = self for part in cfgpath: if part in cfg: cfg = cfg[part] elif '_any' in cfg.rules: if isinstance(cfg, list): cfg.append({}) else: cfg[part] = {} cfg = cfg[part] else: if session: msg = gettext(u'Invalid (%s): section %s does not ' 'exist') % (source, section) session.ui.warning(msg) okay = False items = okay and parser.items(section) or [] items.sort(key=item_sorter) for var, val in items: try: cfg[var] = val except (ValueError, KeyError): if session: msg = gettext(u'Invalid (%s): section %s, variable %s' ) % (source, section, var) session.ui.warning(msg) okay = False return okay def load(self, session, filename=None): self._mkworkdir(session) self.index = None self.reset(rules=False, data=True) try: ocfg = os.path.join(self.workdir, 'config.rc') ocl = OldConfigLoader(filename=ocfg) if ocl.export(self) and session: session.ui.warning(_('WARNING: Imported old config from %s' ) % ocfg) elif ocl.lines and session: session.ui.warning(_('WARNING: Failed to import config from %s' ) % ocfg) except: import traceback traceback.print_exc() filename = filename or self.conffile lines = [] try: fd = open(filename, 'rb') try: decrypt_and_parse_lines(fd, lambda l: lines.append(l)) except ValueError: pass fd.close() except IOError: pass self.parse_config(session, '\n'.join(lines), source=filename) self.get_i18n_translation(session) self.vcards = VCardStore(self, self.data_directory('vcards', mode='rw', mkdir=True)) self.vcards.load_vcards(session) def save(self): self._mkworkdir(None) fd = gpg_open(self.conffile, self.prefs.get('gpg_recipient'), 'wb') fd.write(self.as_config_bytes(private=True)) fd.close() self.get_i18n_translation() def clear_mbox_cache(self): self._mbox_cache = {} def get_mailboxes(self): def fmt_mbxid(k): k = b36(int(k, 36)) if len(k) > MBX_ID_LEN: raise ValueError(_('Mailbox ID too large: %s') % k) return (('0' * MBX_ID_LEN) + k)[-MBX_ID_LEN:] mailboxes = [fmt_mbxid(k) for k in self.sys.mailbox.keys()] mailboxes.sort() return [(k, self.sys.mailbox[k]) for k in mailboxes] def is_editable_message(self, msg_info): for ptr in msg_info[MailIndex.MSG_PTRS].split(', '): if not self.is_editable_mailbox(ptr[: MBX_ID_LEN]): return False editable = False for tid in msg_info[MailIndex.MSG_TAGS].split(', '): try: if self.tags and self.tags[tid].flag_editable: editable = True except (KeyError, AttributeError): pass return editable def is_editable_mailbox(self, mailbox_id): mailbox_id = (mailbox_id is None and -1) or int(mailbox_id, 36) local_mailbox_id = int(self.sys.get('local_mailbox_id', 'ZZZZZ'), 36) return (mailbox_id == local_mailbox_id) def open_mailbox(self, session, mailbox_id): try: mbx_id = mailbox_id.lower() mfn = self.sys.mailbox[mbx_id] pfn = os.path.join(self.workdir, 'pickled-mailbox.%s' % mbx_id) except KeyError: raise NoSuchMailboxError(_('No such mailbox: %s') % mbx_id) try: if mbx_id in self._mbox_cache: self._mbox_cache[mbx_id].update_toc() else: if session: session.ui.mark(_('%s: Updating: %s') % (mbx_id, mfn)) self._mbox_cache[mbx_id] = cPickle.load(open(pfn, 'r')) except: if self.sys.debug: import traceback traceback.print_exc() if session: session.ui.mark(_('%s: Opening: %s (may take a while)' ) % (mbx_id, mfn)) mbox = OpenMailbox(mfn) mbox.editable = self.is_editable_mailbox(mbx_id) mbox.save(session, to=pfn) self._mbox_cache[mbx_id] = mbox return self._mbox_cache[mbx_id] def open_local_mailbox(self, session): local_id = self.sys.get('local_mailbox_id', None) if not local_id: mailbox = os.path.join(self.workdir, 'mail') mbx = IncrementalMaildir(mailbox) local_id = self.sys.mailbox.append(mailbox) local_id = (('0' * MBX_ID_LEN) + local_id)[-MBX_ID_LEN:] self.sys.local_mailbox_id = local_id else: local_id = (('0' * MBX_ID_LEN) + local_id)[-MBX_ID_LEN:] return local_id, self.open_mailbox(session, local_id) def get_profile(self, email=None): find = email or self.prefs.get('default_email', None) default_profile = { 'name': None, 'email': find, 'signature': None, 'route': self.prefs.default_route } for profile in self.profiles: if profile.email == find or not find: if not email: self.prefs.default_email = profile.email return dict_merge(default_profile, profile) return default_profile def get_sendmail(self, frm, rcpts='-t'): return self.get_profile(frm)['route'] % { 'rcpt': ', '.join(rcpts) } def data_directory(self, ftype, mode='rb', mkdir=False): """ Return the path to a data directory for a particular type of file data, optionally creating the directory if it is missing. >>> p = cfg.data_directory('html_theme', mode='r', mkdir=False) >>> p == os.path.abspath('static/default') True """ # This should raise a KeyError if the ftype is unrecognized bpath = self.sys.path.get(ftype) if not bpath.startswith('/'): cpath = os.path.join(self.workdir, bpath) if os.path.exists(cpath) or 'w' in mode: bpath = cpath if mkdir and not os.path.exists(cpath): os.mkdir(cpath) else: bpath = os.path.join(os.path.dirname(__file__), '..', bpath) return os.path.abspath(bpath) def history_file(self): return os.path.join(self.workdir, 'history') def mailindex_file(self): return os.path.join(self.workdir, 'mailpile.idx') def postinglist_dir(self, prefix): d = os.path.join(self.workdir, 'search') if not os.path.exists(d): os.mkdir(d) d = os.path.join(d, prefix and prefix[0] or '_') if not os.path.exists(d): os.mkdir(d) return d def get_index(self, session): if self.index: return self.index idx = MailIndex(self) idx.load(session) self.index = idx return idx def get_i18n_translation(self, session=None): language = self.prefs.language trans = None if language != "": try: trans = translation("mailpile", "locale", [language], codeset="utf-8") except IOError: if session: session.ui.warning('Failed to load language %s' % language) if not trans: try: trans = translation("mailpile", "locale", codeset='utf-8') except IOError: if session: session.ui.warning('Failed to configure i18n') if trans: trans.set_output_charset("utf-8") trans.install(unicode=True) return trans def open_file(self, ftype, fpath, mode='rb', mkdir=False): if '..' in fpath: raise ValueError(_('Parent paths are not allowed')) bpath = self.data_directory(ftype, mode=mode, mkdir=mkdir) fpath = os.path.join(bpath, fpath) return fpath, open(fpath, mode) def prepare_workers(config, session, daemons=False): # Set globals from config first... mailpile.util.APPEND_FD_CACHE_SIZE = config.sys.fd_cache_size if not config.background: # Create a silent background session config.background = Session(config) config.background.ui = BackgroundInteraction(config) config.background.ui.block() # Start the workers if config.slow_worker == config.dumb_worker: config.slow_worker = Worker('Slow worker', session) config.slow_worker.start() if daemons and not config.cron_worker: config.cron_worker = Cron('Cron worker', session) config.cron_worker.start() # Schedule periodic rescanning, if requested. rescan_interval = config.prefs.rescan_interval if rescan_interval: def rescan(): if 'rescan' not in config._running: rsc = Rescan(session, 'rescan') rsc.serialize = False config.slow_worker.add_task(None, 'Rescan', rsc.run) config.cron_worker.add_task('rescan', rescan_interval, rescan) if daemons and not config.http_worker: # Start the HTTP worker if requested sspec = (config.sys.http_host, config.sys.http_port) if sspec[0].lower() != 'disabled' and sspec[1] >= 0: config.http_worker = HttpWorker(session, sspec) config.http_worker.start() def stop_workers(config): for w in (config.http_worker, config.slow_worker, config.cron_worker): if w: w.quit()
class ConfigManager(ConfigDict): """ This class manages the live global mailpile configuration. This includes the settings themselves, as well as global objects like the index and references to any background worker threads. """ DEFAULT_WORKDIR = os.environ.get('MAILPILE_HOME', os.path.expanduser('~/.mailpile')) def __init__(self, workdir=None, rules={}): ConfigDict.__init__(self, _rules=rules) self.workdir = workdir or self.DEFAULT_WORKDIR self.conffile = os.path.join(self.workdir, 'mailpile.cfg') self.background = None self.cron_worker = None self.http_worker = None self.dumb_worker = self.slow_worker = DumbWorker('Dumb worker', None) self.index = None self.vcards = {} self._mbox_cache = {} self._running = {} def _mkworkdir(self, session): if not os.path.exists(self.workdir): if session: session.ui.notify(_('Creating: %s') % self.workdir) os.mkdir(self.workdir) def parse_config(self, session, data, source='internal'): """ Parse a config file fragment. Invalid data will be ignored, but will generate warnings in the session UI. Returns True on a clean parse, False if any of the settings were bogus. >>> cfg.parse_config(session, '[config/sys]\\nfd_cache_size = 123\\n') True >>> cfg.sys.fd_cache_size 123 >>> cfg.parse_config(session, '[config/bogus]\\nblabla = bla\\n') False >>> [l[1] for l in session.ui.log_buffer if 'bogus' in l[1]][0] u'Invalid (internal): section config/bogus does not exist' >>> cfg.parse_config(session, '[config/sys]\\nhistory_length = 321\\n' ... 'bogus_variable = 456\\n') False >>> cfg.sys.history_length 321 >>> [l[1] for l in session.ui.log_buffer if 'bogus_var' in l[1]][0] u'Invalid (internal): section config/sys, ... >>> cfg.parse_config(session, '[config/tags/a]\\nname = TagName\\n') True >>> cfg.tags['a']._key 'a' >>> cfg.tags['a'].name u'TagName' """ parser = CommentedEscapedConfigParser() parser.readfp(io.BytesIO(str(data))) okay = True def item_sorter(i): try: return (int(i[0], 36), i[1]) except: return i for section in parser.sections(): cfgpath = section.split(':')[0].split('/')[1:] cfg = self for part in cfgpath: if part in cfg: cfg = cfg[part] elif '_any' in cfg.rules: if isinstance(cfg, list): cfg.append({}) else: cfg[part] = {} cfg = cfg[part] else: if session: msg = gettext(u'Invalid (%s): section %s does not ' 'exist') % (source, section) session.ui.warning(msg) okay = False items = okay and parser.items(section) or [] items.sort(key=item_sorter) for var, val in items: try: cfg[var] = val except (ValueError, KeyError): if session: msg = gettext(u'Invalid (%s): section %s, variable %s' ) % (source, section, var) session.ui.warning(msg) okay = False return okay def load(self, session, filename=None): self._mkworkdir(session) self.index = None self.reset(rules=False, data=True) try: ocfg = os.path.join(self.workdir, 'config.rc') ocl = OldConfigLoader(filename=ocfg) if ocl.export(self) and session: session.ui.warning( _('WARNING: Imported old config from %s') % ocfg) elif ocl.lines and session: session.ui.warning( _('WARNING: Failed to import config from %s') % ocfg) except: import traceback traceback.print_exc() filename = filename or self.conffile lines = [] try: fd = open(filename, 'rb') try: decrypt_and_parse_lines(fd, lambda l: lines.append(l)) except ValueError: pass fd.close() except IOError: pass self.parse_config(session, '\n'.join(lines), source=filename) self.get_i18n_translation(session) self.vcards = VCardStore( self, self.data_directory('vcards', mode='rw', mkdir=True)) self.vcards.load_vcards(session) def save(self): self._mkworkdir(None) fd = gpg_open(self.conffile, self.prefs.get('gpg_recipient'), 'wb') fd.write(self.as_config_bytes(private=True)) fd.close() self.get_i18n_translation() def clear_mbox_cache(self): self._mbox_cache = {} def get_mailboxes(self): def fmt_mbxid(k): k = b36(int(k, 36)) if len(k) > MBX_ID_LEN: raise ValueError(_('Mailbox ID too large: %s') % k) return (('0' * MBX_ID_LEN) + k)[-MBX_ID_LEN:] mailboxes = [fmt_mbxid(k) for k in self.sys.mailbox.keys()] mailboxes.sort() return [(k, self.sys.mailbox[k]) for k in mailboxes] def is_editable_message(self, msg_info): for ptr in msg_info[MailIndex.MSG_PTRS].split(', '): if not self.is_editable_mailbox(ptr[:MBX_ID_LEN]): return False editable = False for tid in msg_info[MailIndex.MSG_TAGS].split(', '): try: if self.tags and self.tags[tid].flag_editable: editable = True except (KeyError, AttributeError): pass return editable def is_editable_mailbox(self, mailbox_id): mailbox_id = (mailbox_id is None and -1) or int(mailbox_id, 36) local_mailbox_id = int(self.sys.get('local_mailbox_id', 'ZZZZZ'), 36) return (mailbox_id == local_mailbox_id) def open_mailbox(self, session, mailbox_id): try: mbx_id = mailbox_id.lower() mfn = self.sys.mailbox[mbx_id] pfn = os.path.join(self.workdir, 'pickled-mailbox.%s' % mbx_id) except KeyError: raise NoSuchMailboxError(_('No such mailbox: %s') % mbx_id) try: if mbx_id in self._mbox_cache: self._mbox_cache[mbx_id].update_toc() else: if session: session.ui.mark(_('%s: Updating: %s') % (mbx_id, mfn)) self._mbox_cache[mbx_id] = cPickle.load(open(pfn, 'r')) except: if self.sys.debug: import traceback traceback.print_exc() if session: session.ui.mark( _('%s: Opening: %s (may take a while)') % (mbx_id, mfn)) mbox = OpenMailbox(mfn) mbox.editable = self.is_editable_mailbox(mbx_id) mbox.save(session, to=pfn) self._mbox_cache[mbx_id] = mbox return self._mbox_cache[mbx_id] def open_local_mailbox(self, session): local_id = self.sys.get('local_mailbox_id', None) if not local_id: mailbox = os.path.join(self.workdir, 'mail') mbx = IncrementalMaildir(mailbox) local_id = self.sys.mailbox.append(mailbox) local_id = (('0' * MBX_ID_LEN) + local_id)[-MBX_ID_LEN:] self.sys.local_mailbox_id = local_id else: local_id = (('0' * MBX_ID_LEN) + local_id)[-MBX_ID_LEN:] return local_id, self.open_mailbox(session, local_id) def get_profile(self, email=None): find = email or self.prefs.get('default_email', None) default_profile = { 'name': None, 'email': find, 'signature': None, 'route': self.prefs.default_route } for profile in self.profiles: if profile.email == find or not find: if not email: self.prefs.default_email = profile.email return dict_merge(default_profile, profile) return default_profile def get_sendmail(self, frm, rcpts='-t'): return self.get_profile(frm)['route'] % {'rcpt': ', '.join(rcpts)} def data_directory(self, ftype, mode='rb', mkdir=False): """ Return the path to a data directory for a particular type of file data, optionally creating the directory if it is missing. >>> p = cfg.data_directory('html_theme', mode='r', mkdir=False) >>> p == os.path.abspath('static/default') True """ # This should raise a KeyError if the ftype is unrecognized bpath = self.sys.path.get(ftype) if not bpath.startswith('/'): cpath = os.path.join(self.workdir, bpath) if os.path.exists(cpath) or 'w' in mode: bpath = cpath if mkdir and not os.path.exists(cpath): os.mkdir(cpath) else: bpath = os.path.join(os.path.dirname(__file__), '..', bpath) return os.path.abspath(bpath) def history_file(self): return os.path.join(self.workdir, 'history') def mailindex_file(self): return os.path.join(self.workdir, 'mailpile.idx') def postinglist_dir(self, prefix): d = os.path.join(self.workdir, 'search') if not os.path.exists(d): os.mkdir(d) d = os.path.join(d, prefix and prefix[0] or '_') if not os.path.exists(d): os.mkdir(d) return d def get_index(self, session): if self.index: return self.index idx = MailIndex(self) idx.load(session) self.index = idx return idx def get_i18n_translation(self, session=None): language = self.prefs.language trans = None if language != "": try: trans = translation("mailpile", "locale", [language], codeset="utf-8") except IOError: if session: session.ui.warning('Failed to load language %s' % language) if not trans: trans = translation("mailpile", "locale", codeset='utf-8', fallback=True) if session and isinstance(trans, NullTranslations): session.ui.warning('Failed to configure i18n. Using fallback.') if trans: trans.set_output_charset("utf-8") trans.install(unicode=True) return trans def open_file(self, ftype, fpath, mode='rb', mkdir=False): if '..' in fpath: raise ValueError(_('Parent paths are not allowed')) bpath = self.data_directory(ftype, mode=mode, mkdir=mkdir) fpath = os.path.join(bpath, fpath) return fpath, open(fpath, mode) def prepare_workers(config, session, daemons=False): # Set globals from config first... mailpile.util.APPEND_FD_CACHE_SIZE = config.sys.fd_cache_size if not config.background: # Create a silent background session config.background = Session(config) config.background.ui = BackgroundInteraction(config) config.background.ui.block() # Start the workers if config.slow_worker == config.dumb_worker: config.slow_worker = Worker('Slow worker', session) config.slow_worker.start() if daemons and not config.cron_worker: config.cron_worker = Cron('Cron worker', session) config.cron_worker.start() # Schedule periodic rescanning, if requested. rescan_interval = config.prefs.rescan_interval if rescan_interval: def rescan(): if 'rescan' not in config._running: rsc = Rescan(session, 'rescan') rsc.serialize = False config.slow_worker.add_task(None, 'Rescan', rsc.run) config.cron_worker.add_task('rescan', rescan_interval, rescan) if daemons and not config.http_worker: # Start the HTTP worker if requested sspec = (config.sys.http_host, config.sys.http_port) if sspec[0].lower() != 'disabled' and sspec[1] >= 0: config.http_worker = HttpWorker(session, sspec) config.http_worker.start() def stop_workers(config): for w in (config.http_worker, config.slow_worker, config.cron_worker): if w: w.quit()