def getmaxage(self): """Return maxage. maxage is allowed to be either an integer or a date of the form YYYY-mm-dd. This returns a time_struct.""" maxagestr = self.config.getdefault("Account %s" % self.accountname, "maxage", None) if maxagestr is None: return None # Is it a number? try: maxage = int(maxagestr) if maxage < 1: raise OfflineImapError("invalid maxage value %d" % maxage, OfflineImapError.ERROR.MESSAGE) return time.gmtime(time.time() - 60 * 60 * 24 * maxage) except ValueError: pass # Maybe it was a date. # Is it a date string? try: date = time.strptime(maxagestr, "%Y-%m-%d") if date[0] < 1900: raise OfflineImapError("maxage led to year %d. " "Abort syncing." % date[0], OfflineImapError.ERROR.MESSAGE) if (time.mktime(date) - time.mktime(time.localtime())) > 0: raise OfflineImapError("maxage led to future date %s. " "Abort syncing." % maxagestr, OfflineImapError.ERROR.MESSAGE) return date except ValueError: raise OfflineImapError("invalid maxage value %s" % maxagestr, OfflineImapError.ERROR.MESSAGE)
def __new__(cls, account, reqtype): """ :param account: :class:`Account` :param regtype: 'remote', 'local', or 'status'""" if reqtype == 'remote': name = account.getconf('remoterepository') # We don't support Maildirs on the remote side. typemap = {'IMAP': IMAPRepository, 'Gmail': GmailRepository} elif reqtype == 'local': name = account.getconf('localrepository') typemap = { 'IMAP': MappedIMAPRepository, 'Maildir': MaildirRepository, 'GmailMaildir': GmailMaildirRepository } elif reqtype == 'status': # create and return a LocalStatusRepository. name = account.getconf('localrepository') return LocalStatusRepository(name, account) else: errstr = "Repository type %s not supported" % reqtype raise OfflineImapError(errstr, OfflineImapError.ERROR.REPO) # Get repository type. config = account.getconfig() try: repostype = config.get('Repository ' + name, 'type').strip() except NoSectionError as e: errstr = ("Could not find section '%s' in configuration. Required " "for account '%s'." % ('Repository %s' % name, account)) six.reraise(OfflineImapError, OfflineImapError(errstr, OfflineImapError.ERROR.REPO), exc_info()[2]) try: repo = typemap[repostype] except KeyError: errstr = "'%s' repository not supported for '%s' repositories."% \ (repostype, reqtype) six.reraise(OfflineImapError, OfflineImapError(errstr, OfflineImapError.ERROR.REPO), exc_info()[2]) return repo(name, account)
def __init__(self, reposname, account): BaseRepository.__init__(self, reposname, account) # class and root for all backends. self.backends = {} self.backends['sqlite'] = { 'class': LocalStatusSQLiteFolder, 'root': os.path.join(account.getaccountmeta(), 'LocalStatus-sqlite') } self.backends['plain'] = { 'class': LocalStatusFolder, 'root': os.path.join(account.getaccountmeta(), 'LocalStatus') } if self.account.getconf('status_backend', None) is not None: raise OfflineImapError( "the 'status_backend' configuration option is not supported" " anymore; please, remove this configuration option.", OfflineImapError.ERROR.REPO) # Set class and root for sqlite. self.setup_backend('sqlite') if not os.path.exists(self.root): os.mkdir(self.root, 0o700) # self._folders is a dict of name:LocalStatusFolders(). self._folders = {}
def getstartdate(self): """ Retrieve the value of the configuration option startdate """ datestr = self.config.getdefault("Repository " + self.repository.name, 'startdate', None) try: if not datestr: return None date = time.strptime(datestr, "%Y-%m-%d") if date[0] < 1900: raise OfflineImapError("startdate led to year %d. " "Abort syncing."% date[0], OfflineImapError.ERROR.MESSAGE) return date except ValueError: raise OfflineImapError("invalid startdate value %s", OfflineImapError.ERROR.MESSAGE)
def getfolder(self, foldername): """Return a Folder instance of this Maildir If necessary, scan and cache all foldernames to make sure that we only return existing folders and that 2 calls with the same name will return the same object.""" # getfolders() will scan and cache the values *if* necessary folders = self.getfolders() for folder in folders: if foldername == folder.name: return folder raise OfflineImapError( "getfolder() asked for a nonexisting " "folder '%s'." % foldername, OfflineImapError.ERROR.FOLDER)
def copymessageto(self, uid, dstfolder, statusfolder, register=1): """Copies a message from self to dst if needed, updating the status Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode. :param uid: uid of the message to be copied. :param dstfolder: A BaseFolder-derived instance :param statusfolder: A LocalStatusFolder instance :param register: whether we should register a new thread." :returns: Nothing on success, or raises an Exception.""" # Sometimes, it could be the case that if a sync takes awhile, # a message might be deleted from the maildir before it can be # synced to the status cache. This is only a problem with # self.getmessage(). So, don't call self.getmessage unless # really needed. if register: # Output that we start a new thread. self.ui.registerthread(self.repository.account) try: message = None flags = self.getmessageflags(uid) rtime = self.getmessagetime(uid) # If any of the destinations actually stores the message body, # load it up. if dstfolder.storesmessages(): message = self.getmessage(uid) # Succeeded? -> IMAP actually assigned a UID. If newid # remained negative, no server was willing to assign us an # UID. If newid is 0, saving succeeded, but we could not # retrieve the new UID. Ignore message in this case. new_uid = dstfolder.savemessage(uid, message, flags, rtime) if new_uid > 0: if new_uid != uid: # Got new UID, change the local uid to match the new one. self.change_message_uid(uid, new_uid) statusfolder.deletemessage(uid) # Got new UID, change the local uid. # Save uploaded status in the statusfolder. statusfolder.savemessage(new_uid, message, flags, rtime) # Check whether the mail has been seen. if 'S' not in flags: self.have_newmail = True elif new_uid == 0: # Message was stored to dstfolder, but we can't find it's UID # This means we can't link current message to the one created # in IMAP. So we just delete local message and on next run # we'll sync it back # XXX This could cause infinite loop on syncing between two # IMAP servers ... self.deletemessage(uid) else: raise OfflineImapError("Trying to save msg (uid %d) on folder " "%s returned invalid uid %d" % (uid, dstfolder.getvisiblename(), new_uid), OfflineImapError.ERROR.MESSAGE) except KeyboardInterrupt: # Bubble up CTRL-C. raise except OfflineImapError as e: if e.severity > OfflineImapError.ERROR.MESSAGE: raise # Bubble severe errors up. self.ui.error(e, exc_info()[2]) except Exception as e: self.ui.error(e, exc_info()[2], msg="Copying message %s [acc: %s]" % (uid, self.accountname)) raise # Raise on unknown errors, so we can fix those.
def sync_folder_structure(self, dst_repo, status_repo): """Syncs the folders in this repository to those in dest. It does NOT sync the contents of those folders. nametrans rules in both directions will be honored, but there are NO checks yet that forward and backward nametrans actually match up! Configuring nametrans on BOTH repositories therefore could lead to infinite folder creation cycles.""" if not self.get_create_folders() and not dst_repo.get_create_folders(): # quick exit if no folder creation is enabled on either side. return src_repo = self src_folders = src_repo.getfolders() dst_folders = dst_repo.getfolders() # Do we need to refresh the folder list afterwards? src_haschanged, dst_haschanged = False, False # Create hashes with the names, but convert the source folders # to the dest folder's sep. src_hash = {} for folder in src_folders: src_hash[folder.getvisiblename().replace( src_repo.getsep(), dst_repo.getsep())] = folder dst_hash = {} for folder in dst_folders: dst_hash[folder.getvisiblename().replace( dst_repo.getsep(), src_repo.getsep())] = folder # Find new folders on src_repo. for src_name_t, src_folder in src_hash.iteritems(): # Don't create on dst_repo, if it is readonly if not dst_repo.get_create_folders(): break if src_folder.sync_this and not src_name_t in dst_folders: try: dst_repo.makefolder(src_name_t) dst_haschanged = True # Need to refresh list except OfflineImapError as e: self.ui.error( e, exc_info()[2], "Creating folder %s on repository %s" % (src_name_t, dst_repo)) raise status_repo.makefolder( src_name_t.replace(dst_repo.getsep(), status_repo.getsep())) # Find new folders on dst_repo. for dst_name_t, dst_folder in dst_hash.iteritems(): if not src_repo.get_create_folders(): # Don't create missing folder on readonly repo. break if dst_folder.sync_this and not dst_name_t in src_folders: # nametrans sanity check! # Does nametrans back&forth lead to identical names? # 1) would src repo filter out the new folder name? In this # case don't create it on it: if not self.should_sync_folder(dst_name_t): self.ui.debug( '', "Not creating folder '%s' (repository '%s" "') as it would be filtered out on that repository." % (dst_name_t, self)) continue # get IMAPFolder and see if the reverse nametrans # works fine TODO: getfolder() works only because we # succeed in getting inexisting folders which I would # like to change. Take care! folder = self.getfolder(dst_name_t) # apply reverse nametrans to see if we end up with the same name newdst_name = folder.getvisiblename().replace( src_repo.getsep(), dst_repo.getsep()) if dst_folder.name != newdst_name: raise OfflineImapError( "INFINITE FOLDER CREATION DETECTED! " "Folder '%s' (repository '%s') would be created as fold" "er '%s' (repository '%s'). The latter becomes '%s' in " "return, leading to infinite folder creation cycles.\n " "SOLUTION: 1) Do set your nametrans rules on both repos" "itories so they lead to identical names if applied bac" "k and forth. 2) Use folderfilter settings on a reposit" "ory to prevent some folders from being created on the " "other side." % (dst_folder.name, dst_repo, dst_name_t, src_repo, newdst_name), OfflineImapError.ERROR.REPO) # end sanity check, actually create the folder try: src_repo.makefolder(dst_name_t) src_haschanged = True # Need to refresh list except OfflineImapError as e: self.ui.error( e, exc_info()[2], "Creating folder %s on " "repository %s" % (dst_name_t, src_repo)) raise status_repo.makefolder( dst_name_t.replace(src_repo.getsep(), status_repo.getsep())) # Find deleted folders. # TODO: We don't delete folders right now. #Forget old list of cached folders so we get new ones if needed if src_haschanged: self.forgetfolders() if dst_haschanged: dst_repo.forgetfolders()
def sync_folder_structure(self, local_repo, status_repo): """Sync the folders structure. It does NOT sync the contents of those folders. nametrans rules in both directions will be honored Configuring nametrans on BOTH repositories could lead to infinite folder creation cycles.""" if not self.should_create_folders( ) and not local_repo.should_create_folders(): # Quick exit if no folder creation is enabled on either side. return remote_repo = self remote_hash, local_hash = {}, {} for folder in remote_repo.getfolders(): remote_hash[folder.getname()] = folder for folder in local_repo.getfolders(): local_hash[folder.getname()] = folder # Create new folders from remote to local. for remote_name, remote_folder in remote_hash.items(): # Don't create on local_repo, if it is readonly. if not local_repo.should_create_folders(): break # Apply remote nametrans and fix serparator. local_name = remote_folder.getvisiblename().replace( remote_repo.getsep(), local_repo.getsep()) if remote_folder.sync_this and not local_name in local_hash.keys(): try: local_repo.makefolder(local_name) # Need to refresh list. local_repo.forgetfolders() except OfflineImapError as e: self.ui.error( e, exc_info()[2], "Creating folder %s on repository %s" % (local_name, local_repo)) raise status_repo.makefolder( local_name.replace(local_repo.getsep(), status_repo.getsep())) # Create new folders from local to remote. for local_name, local_folder in local_hash.items(): if not remote_repo.should_create_folders(): # Don't create missing folder on readonly repo. break # Apply reverse nametrans and fix serparator. remote_name = local_folder.getvisiblename().replace( local_repo.getsep(), remote_repo.getsep()) if local_folder.sync_this and not remote_name in remote_hash.keys( ): # Would the remote filter out the new folder name? In this case # don't create it. if not remote_repo.should_sync_folder(remote_name): self.ui.debug( '', "Not creating folder '%s' (repository '%s" "') as it would be filtered out on that repository." % (remote_name, self)) continue # nametrans sanity check! Does remote nametrans lead to the # original local name? # # Apply remote nametrans to see if we end up with the same # name. We have: # - remote_name: local_name -> reverse nametrans + separator # We want local_name == loop_name from: # - remote_name -> remote (nametrans + separator) -> loop_name # # Get IMAPFolder and see if the reverse nametrans works fine. # TODO: getfolder() works only because we succeed in getting # inexisting folders which I would like to change. Take care! tmp_remotefolder = remote_repo.getfolder(remote_name) loop_name = tmp_remotefolder.getvisiblename().replace( remote_repo.getsep(), local_repo.getsep()) if local_name != loop_name: raise OfflineImapError( "INFINITE FOLDER CREATION DETECTED! " "Folder '%s' (repository '%s') would be created as fold" "er '%s' (repository '%s'). The latter becomes '%s' in " "return, leading to infinite folder creation cycles.\n " "SOLUTION: 1) Do set your nametrans rules on both repos" "itories so they lead to identical names if applied bac" "k and forth. 2) Use folderfilter settings on a reposit" "ory to prevent some folders from being created on the " "other side." % (local_folder.getname(), local_repo, remote_name, remote_repo, loop_name), OfflineImapError.ERROR.REPO) # End sanity check, actually create the folder. try: remote_repo.makefolder(remote_name) # Need to refresh list. self.forgetfolders() except OfflineImapError as e: self.ui.error( e, exc_info()[2], "Creating folder %s on " "repository %s" % (remote_name, remote_repo)) raise status_repo.makefolder( local_name.replace(local_repo.getsep(), status_repo.getsep())) # Find deleted folders. # TODO: We don't delete folders right now. return None
def run(self): """Parse the commandline and invoke everything""" parser = OptionParser( version=offlineimap.__version__, description="%s.\n\n%s" % (offlineimap.__copyright__, offlineimap.__license__)) parser.add_option( "-1", action="store_true", dest="singlethreading", default=False, help="Disable all multithreading operations and use " "solely a single-thread sync. This effectively sets the " "maxsyncaccounts and all maxconnections configuration file " "variables to 1.") parser.add_option( "-P", dest="profiledir", metavar="DIR", help="Sets OfflineIMAP into profile mode. The program " "will create DIR (it must not already exist). " "As it runs, Python profiling information about each " "thread is logged into profiledir. Please note: " "This option is present for debugging and optimization " "only, and should NOT be used unless you have a " "specific reason to do so. It will significantly " "decrease program performance, may reduce reliability, " "and can generate huge amounts of data. This option " "implies the -1 option.") parser.add_option( "-a", dest="accounts", metavar="ACCOUNTS", help="""Overrides the accounts section in the config file. Lets you specify a particular account or set of accounts to sync without having to edit the config file. You might use this to exclude certain accounts, or to sync some accounts that you normally prefer not to.""") parser.add_option( "-c", dest="configfile", metavar="FILE", default="~/.offlineimaprc", help="Specifies a configuration file to use in lieu of " "%default.") parser.add_option( "-d", dest="debugtype", metavar="type1,[type2...]", help="""Enables debugging for OfflineIMAP. This is useful if you are to track down a malfunction or figure out what is going on under the hood. This option requires one or more debugtypes, separated by commas. These define what exactly will be debugged, and so far include two options: imap, thread, maildir or ALL. The imap option will enable IMAP protocol stream and parsing debugging. Note that the output may contain passwords, so take care to remove that from the debugging output before sending it to anyone else. The maildir option will enable debugging for certain Maildir operations. The use of any debug option (unless 'thread' is included), implies the single-thread option -1.""") parser.add_option("-l", dest="logfile", metavar="FILE", help="Log to FILE") parser.add_option( "-f", dest="folders", metavar="folder1,[folder2...]", help="Only sync the specified folders. The folder names " "are the *untranslated* foldernames. This " "command-line option overrides any 'folderfilter' " "and 'folderincludes' options in the configuration " "file.") parser.add_option( "-k", dest="configoverride", action="append", metavar="[section:]option=value", help="""Override configuration file option. If"section" is omitted, it defaults to "general". Any underscores in the section name are replaced with spaces: for instance, to override option "autorefresh" in the "[Account Personal]" section in the config file one would use "-k Account_Personal:autorefresh=30".""") parser.add_option( "-o", action="store_true", dest="runonce", default=False, help="Run only once, ignoring any autorefresh setting " "in the configuration file.") parser.add_option( "-q", action="store_true", dest="quick", default=False, help="Run only quick synchronizations. Ignore any " "flag updates on IMAP servers (if a flag on the remote IMAP " "changes, and we have the message locally, it will be left " "untouched in a quick run.") parser.add_option( "-u", dest="interface", help="Specifies an alternative user interface to " "use. This overrides the default specified in the " "configuration file. The UI specified with -u will " "be forced to be used, even if checks determine that it is " "not usable. Possible interface choices are: %s " % ", ".join(UI_LIST.keys())) (options, args) = parser.parse_args() #read in configuration file configfilename = os.path.expanduser(options.configfile) config = CustomConfigParser() if not os.path.exists(configfilename): logging.error(" *** Config file '%s' does not exist; aborting!" % configfilename) sys.exit(1) config.read(configfilename) #profile mode chosen? if options.profiledir: if not options.singlethreading: logging.warn("Profile mode: Forcing to singlethreaded.") options.singlethreading = True if os.path.exists(options.profiledir): logging.warn("Profile mode: Directory '%s' already exists!" % options.profiledir) else: os.mkdir(options.profiledir) threadutil.ExitNotifyThread.set_profiledir(options.profiledir) logging.warn("Profile mode: Potentially large data will be " "created in '%s'" % options.profiledir) #override a config value if options.configoverride: for option in options.configoverride: (key, value) = option.split('=', 1) if ':' in key: (secname, key) = key.split(':', 1) section = secname.replace("_", " ") else: section = "general" config.set(section, key, value) #which ui to use? cmd line option overrides config file ui_type = config.getdefault('general', 'ui', 'ttyui') if options.interface != None: ui_type = options.interface if '.' in ui_type: #transform Curses.Blinkenlights -> Blinkenlights ui_type = ui_type.split('.')[-1] logging.warning('Using old interface name, consider using one ' 'of %s' % ', '.join(UI_LIST.keys())) try: # create the ui class ui = UI_LIST[ui_type.lower()](config) except KeyError: logging.error("UI '%s' does not exist, choose one of: %s" % \ (ui_type,', '.join(UI_LIST.keys()))) sys.exit(1) setglobalui(ui) #set up additional log files if options.logfile: ui.setlogfd(open(options.logfile, 'wt')) #welcome blurb ui.init_banner() if options.debugtype: if options.debugtype.lower() == 'all': options.debugtype = 'imap,maildir,thread' #force single threading? if not ('thread' in options.debugtype.split(',') \ and not options.singlethreading): ui._msg("Debug mode: Forcing to singlethreaded.") options.singlethreading = True debugtypes = options.debugtype.split(',') + [''] for type in debugtypes: type = type.strip() ui.add_debug(type) if type.lower() == 'imap': imaplib.Debug = 5 if options.runonce: # FIXME: maybe need a better for section in accounts.getaccountlist(config): config.remove_option('Account ' + section, "autorefresh") if options.quick: for section in accounts.getaccountlist(config): config.set('Account ' + section, "quick", '-1') #custom folder list specified? if options.folders: foldernames = options.folders.split(",") folderfilter = "lambda f: f in %s" % foldernames folderincludes = "[]" for accountname in accounts.getaccountlist(config): account_section = 'Account ' + accountname remote_repo_section = 'Repository ' + \ config.get(account_section, 'remoterepository') local_repo_section = 'Repository ' + \ config.get(account_section, 'localrepository') for section in [remote_repo_section, local_repo_section]: config.set(section, "folderfilter", folderfilter) config.set(section, "folderincludes", folderincludes) self.config = config def sigterm_handler(signum, frame): # die immediately ui = getglobalui() ui.terminate(errormsg="terminating...") signal.signal(signal.SIGTERM, sigterm_handler) try: pidfd = open(config.getmetadatadir() + "/pid", "w") pidfd.write(str(os.getpid()) + "\n") pidfd.close() except: pass try: if options.logfile: sys.stderr = ui.logfile socktimeout = config.getdefaultint("general", "socktimeout", 0) if socktimeout > 0: socket.setdefaulttimeout(socktimeout) activeaccounts = config.get("general", "accounts") if options.accounts: activeaccounts = options.accounts activeaccounts = activeaccounts.replace(" ", "") activeaccounts = activeaccounts.split(",") allaccounts = accounts.AccountHashGenerator(config) syncaccounts = [] for account in activeaccounts: if account not in allaccounts: if len(allaccounts) == 0: errormsg = 'The account "%s" does not exist because no accounts are defined!' % account else: errormsg = 'The account "%s" does not exist. Valid accounts are:' % account for name in allaccounts.keys(): errormsg += '\n%s' % name ui.terminate(1, errortitle='Unknown Account "%s"' % account, errormsg=errormsg) if account not in syncaccounts: syncaccounts.append(account) server = None remoterepos = None localrepos = None threadutil.initInstanceLimit( 'ACCOUNTLIMIT', config.getdefaultint('general', 'maxsyncaccounts', 1)) for reposname in config.getsectionlist('Repository'): for instancename in [ "FOLDER_" + reposname, "MSGCOPY_" + reposname ]: if options.singlethreading: threadutil.initInstanceLimit(instancename, 1) else: threadutil.initInstanceLimit( instancename, config.getdefaultint('Repository ' + reposname, 'maxconnections', 2)) def sig_handler(sig, frame): if sig == signal.SIGUSR1 or sig == signal.SIGHUP: # tell each account to stop sleeping accounts.Account.set_abort_event(self.config, 1) elif sig == signal.SIGUSR2: # tell each account to stop looping accounts.Account.set_abort_event(self.config, 2) signal.signal(signal.SIGHUP, sig_handler) signal.signal(signal.SIGUSR1, sig_handler) signal.signal(signal.SIGUSR2, sig_handler) #various initializations that need to be performed: offlineimap.mbnames.init(config, syncaccounts) #TODO: keep legacy lock for a few versions, then remove. self._legacy_lock = open(self.config.getmetadatadir() + "/lock", 'w') try: fcntl.lockf(self._legacy_lock, fcntl.LOCK_EX | fcntl.LOCK_NB) except NameError: #fcntl not available (Windows), disable file locking... :( pass except IOError: raise OfflineImapError("Could not take global lock.", OfflineImapError.ERROR.REPO) if options.singlethreading: #singlethreaded self.sync_singlethreaded(syncaccounts, config) else: # multithreaded t = threadutil.ExitNotifyThread(target=syncmaster.syncitall, name='Sync Runner', kwargs={ 'accounts': syncaccounts, 'config': config }) t.setDaemon(1) t.start() threadutil.exitnotifymonitorloop(threadutil.threadexited) ui.terminate() except KeyboardInterrupt: ui.terminate(1, errormsg='CTRL-C pressed, aborting...') return except (SystemExit): raise except Exception, e: ui.error(e) ui.terminate()
def copymessageto(self, uid, dstfolder, statusfolder, register = 1): """Copies a message from self to dst if needed, updating the status :param uid: uid of the message to be copied. :param dstfolder: A BaseFolder-derived instance :param statusfolder: A LocalStatusFolder instance :param register: whether we should register a new thread." :returns: Nothing on success, or raises an Exception.""" # Sometimes, it could be the case that if a sync takes awhile, # a message might be deleted from the maildir before it can be # synced to the status cache. This is only a problem with # self.getmessage(). So, don't call self.getmessage unless # really needed. if register: # output that we start a new thread self.ui.registerthread(self.accountname) try: message = None flags = self.getmessageflags(uid) rtime = self.getmessagetime(uid) if uid > 0 and dstfolder.uidexists(uid): # dst has message with that UID already, only update status statusfolder.savemessage(uid, None, flags, rtime) return # If any of the destinations actually stores the message body, # load it up. if dstfolder.storesmessages(): message = self.getmessage(uid) #Succeeded? -> IMAP actually assigned a UID. If newid #remained negative, no server was willing to assign us an #UID. If newid is 0, saving succeeded, but we could not #retrieve the new UID. Ignore message in this case. newuid = dstfolder.savemessage(uid, message, flags, rtime) if newuid > 0: if newuid != uid: # Got new UID, change the local uid. #TODO: Maildir could do this with a rename rather than #load/save/del operation, IMPLEMENT a changeuid() #function or so. self.savemessage(newuid, message, flags, rtime) self.deletemessage(uid) uid = newuid # Save uploaded status in the statusfolder statusfolder.savemessage(uid, message, flags, rtime) elif newuid == 0: # Message was stored to dstfolder, but we can't find it's UID # This means we can't link current message to the one created # in IMAP. So we just delete local message and on next run # we'll sync it back # XXX This could cause infinite loop on syncing between two # IMAP servers ... self.deletemessage(uid) else: raise OfflineImapError("Trying to save msg (uid %d) on folder " "%s returned invalid uid %d" % \ (uid, dstfolder.getvisiblename(), newuid), OfflineImapError.ERROR.MESSAGE) except OfflineImapError, e: if e.severity > OfflineImapError.ERROR.MESSAGE: raise # buble severe errors up self.ui.error(e, exc_info()[2])
class BaseRepository(object, CustomConfig.ConfigHelperMixin): def __init__(self, reposname, account): self.ui = getglobalui() self.account = account self.config = account.getconfig() self.name = reposname self.localeval = account.getlocaleval() self._accountname = self.account.getname() self.uiddir = os.path.join(self.config.getmetadatadir(), 'Repository-' + self.name) if not os.path.exists(self.uiddir): os.mkdir(self.uiddir, 0700) self.mapdir = os.path.join(self.uiddir, 'UIDMapping') if not os.path.exists(self.mapdir): os.mkdir(self.mapdir, 0700) self.uiddir = os.path.join(self.uiddir, 'FolderValidity') if not os.path.exists(self.uiddir): os.mkdir(self.uiddir, 0700) self.nametrans = lambda foldername: foldername self.folderfilter = lambda foldername: 1 self.folderincludes = [] self.foldersort = cmp if self.config.has_option(self.getsection(), 'nametrans'): self.nametrans = self.localeval.eval(self.getconf('nametrans'), {'re': re}) if self.config.has_option(self.getsection(), 'folderfilter'): self.folderfilter = self.localeval.eval( self.getconf('folderfilter'), {'re': re}) if self.config.has_option(self.getsection(), 'folderincludes'): self.folderincludes = self.localeval.eval( self.getconf('folderincludes'), {'re': re}) if self.config.has_option(self.getsection(), 'foldersort'): self.foldersort = self.localeval.eval(self.getconf('foldersort'), {'re': re}) def restore_atime(self): """Sets folders' atime back to their values after a sync Controlled by the 'restoreatime' config parameter (default False), applies only to local Maildir mailboxes and does nothing on all other repository types.""" pass def connect(self): """Establish a connection to the remote, if necessary. This exists so that IMAP connections can all be established up front, gathering passwords as needed. It was added in order to support the error recovery -- we need to connect first outside of the error trap in order to validate the password, and that's the point of this function.""" pass def holdordropconnections(self): pass def dropconnections(self): pass def getaccount(self): return self.account def getname(self): return self.name def __str__(self): return self.name @property def accountname(self): """Account name as string""" return self._accountname def getuiddir(self): return self.uiddir def getmapdir(self): return self.mapdir def getsection(self): return 'Repository ' + self.name def getconfig(self): return self.config def getlocaleval(self): return self.account.getlocaleval() def getfolders(self): """Returns a list of ALL folders on this server.""" return [] def forgetfolders(self): """Forgets the cached list of folders, if any. Useful to run after a sync run.""" pass def getsep(self): raise NotImplementedError def makefolder(self, foldername): raise NotImplementedError def deletefolder(self, foldername): raise NotImplementedError def getfolder(self, foldername): raise NotImplementedError def syncfoldersto(self, dst_repo, status_repo): """Syncs the folders in this repository to those in dest. It does NOT sync the contents of those folders. nametrans rules in both directions will be honored, but there are NO checks yet that forward and backward nametrans actually match up! Configuring nametrans on BOTH repositories therefore could lead to infinite folder creation cycles.""" src_repo = self src_folders = src_repo.getfolders() dst_folders = dst_repo.getfolders() # Do we need to refresh the folder list afterwards? src_haschanged, dst_haschanged = False, False # Create hashes with the names, but convert the source folders # to the dest folder's sep. src_hash = {} for folder in src_folders: src_hash[folder.getvisiblename().replace( src_repo.getsep(), dst_repo.getsep())] = folder dst_hash = {} for folder in dst_folders: dst_hash[folder.name] = folder # Find new folders on src_repo. for src_name, src_folder in src_hash.iteritems(): if src_folder.sync_this and not src_name in dst_hash: try: dst_repo.makefolder(src_name) dst_haschanged = True # Need to refresh list except OfflineImapError, e: self.ui.error(e, exc_info()[2], "Creating folder %s on repository %s" %\ (src_name, dst_repo)) raise status_repo.makefolder( src_name.replace(dst_repo.getsep(), status_repo.getsep())) # Find new folders on dst_repo. for dst_name, dst_folder in dst_hash.iteritems(): if dst_folder.sync_this and not dst_name in src_hash: # nametrans sanity check! # Does nametrans back&forth lead to identical names? #src_name is the unmodified full src_name that would be created newsrc_name = dst_folder.getvisiblename().replace( dst_repo.getsep(), src_repo.getsep()) folder = self.getfolder(newsrc_name) # would src repo filter out the new folder name? In this # case don't create it on it: if not self.folderfilter(newsrc_name): self.ui.debug( '', "Not creating folder '%s' (repository '%s" "') as it would be filtered out on that repository." % (newsrc_name, self)) continue # apply reverse nametrans to see if we end up with the same name newdst_name = folder.getvisiblename().replace( src_repo.getsep(), dst_repo.getsep()) if dst_name != newdst_name: raise OfflineImapError( "INFINITE FOLDER CREATION DETECTED! " "Folder '%s' (repository '%s') would be created as fold" "er '%s' (repository '%s'). The latter becomes '%s' in " "return, leading to infinite folder creation cycles.\n " "SOLUTION: 1) Do set your nametrans rules on both repos" "itories so they lead to identical names if applied bac" "k and forth. 2) Use folderfilter settings on a reposit" "ory to prevent some folders from being created on the " "other side." % (dst_name, dst_repo, newsrc_name, src_repo, newdst_name), OfflineImapError.ERROR.REPO) # end sanity check, actually create the folder try: src_repo.makefolder(newsrc_name) src_haschanged = True # Need to refresh list except OfflineImapError, e: self.ui.error(e, exc_info()[2], "Creating folder %s on repository %s" %\ (src_name, dst_repo)) raise status_repo.makefolder( newsrc_name.replace(src_repo.getsep(), status_repo.getsep()))