def process_w_branch(self, index, branch, args): """Run start_process on latest branch""" robust.check_common_error(branch.on_error, branch.start_process, args) if not branch.caught_exception: branch.start_successful = 1 branch.base_index = index
def process_w_branch(self, index, branch, args): u"""Run start_process on latest branch""" robust.check_common_error(branch.on_error, branch.start_process, args) if not branch.caught_exception: branch.start_successful = 1 branch.base_index = index
def call_end_proc(self): u"""Runs the end_process on self, checking for errors""" if self.finished or not self.start_successful: self.caught_exception = 1 # Since all end_process does is copy over attributes, might as # well run it even if we did get errors earlier. robust.check_common_error(self.on_error, self.end_process) self.finished = 1
def call_end_proc(self): """Runs the end_process on self, checking for errors""" if self.finished or not self.start_successful: self.caught_exception = 1 # Since all end_process does is copy over attributes, might as # well run it even if we did get errors earlier. robust.check_common_error(self.on_error, self.end_process) self.finished = 1
def diryield(path): u"""Generate relevant files in directory path Returns (path, num) where num == 0 means path should be generated normally, num == 1 means the path is a directory and should be included iff something inside is included. """ # Only called by Iterate. Internal. # todo: get around circular dependency issue by importing here from duplicity import robust for filename in robust.listpath(path): new_path = robust.check_common_error(error_handler, Path.append, (path, filename)) if new_path: s = self.Select(new_path) if (new_path.type in [u"reg", u"dir"] and not os.access(new_path.name, os.R_OK)) \ and (s == 1 or s == 2): # Path is a file or folder that cannot be read, but # should be included or scanned. log.Warn( _(u"Error accessing possibly locked file %s") % new_path.uc_name, log.WarningCode.cannot_read, util.escape(new_path.name)) if diffdir.stats: diffdir.stats.Errors += 1 elif s == 1: # Should be included yield (new_path, 0) elif s == 2 and new_path.isdir(): # Is a directory that should be scanned yield (new_path, 1)
def diryield(path): """Generate relevant files in directory path Returns (path, num) where num == 0 means path should be generated normally, num == 1 means the path is a directory and should be included iff something inside is included. """ # todo: get around circular dependency issue by importing here from duplicity import robust #@Reimport for filename in robust.listpath(path): new_path = robust.check_common_error( error_handler, Path.append, (path, filename)) # make sure file is read accessible if (new_path and new_path.type in ["reg", "dir"] and not os.access(new_path.name, os.R_OK)): log.Warn(_("Error accessing possibly locked file %s") % util.ufn(new_path.name), log.WarningCode.cannot_read, util.escape(new_path.name)) if diffdir.stats: diffdir.stats.Errors +=1 new_path = None elif new_path: s = self.Select(new_path) if s == 1: yield (new_path, 0) elif s == 2 and new_path.isdir(): yield (new_path, 1)
def diryield(path): """Generate relevant files in directory path Returns (path, num) where num == 0 means path should be generated normally, num == 1 means the path is a directory and should be included iff something inside is included. """ # todo: get around circular dependency issue by importing here from duplicity import robust #@Reimport for filename in robust.listpath(path): new_path = robust.check_common_error(error_handler, Path.append, (path, filename)) # make sure file is read accessible if (new_path and new_path.type in ["reg", "dir"] and not os.access(new_path.name, os.R_OK)): log.Warn( _("Error accessing possibly locked file %s") % util.ufn(new_path.name), log.WarningCode.cannot_read, util.escape(new_path.name)) if diffdir.stats: diffdir.stats.Errors += 1 new_path = None elif new_path: s = self.Select(new_path) if s == 1: yield (new_path, 0) elif s == 2 and new_path.isdir(): yield (new_path, 1)
def diryield(path): u"""Generate relevant files in directory path Returns (path, num) where num == 0 means path should be generated normally, num == 1 means the path is a directory and should be included iff something inside is included. """ # Only called by Iterate. Internal. # todo: get around circular dependency issue by importing here from duplicity import robust # @Reimport for filename in robust.listpath(path): new_path = robust.check_common_error( error_handler, Path.append, (path, filename)) if new_path: s = self.Select(new_path) if (new_path.type in [u"reg", u"dir"] and not os.access(new_path.name, os.R_OK)) \ and (s == 1 or s == 2): # Path is a file or folder that cannot be read, but # should be included or scanned. log.Warn(_(u"Error accessing possibly locked file %s") % new_path.uc_name, log.WarningCode.cannot_read, util.escape(new_path.name)) if diffdir.stats: diffdir.stats.Errors += 1 elif s == 1: # Should be included yield (new_path, 0) elif s == 2 and new_path.isdir(): # Is a directory that should be scanned yield (new_path, 1)
def __call__(self, *args): u"""Process args, where args[0] is current position in iterator Returns true if args successfully processed, false if index is not in the current tree and thus the final result is available. Also note below we set self.index after doing the necessary start processing, in case there is a crash in the middle. """ index = args[0] if self.index is None: self.process_w_branch(index, self.root_branch, args) self.index = index return 1 if index <= self.index: log.Warn(_(u"Warning: oldindex %s >= newindex %s") % (util.uindex(self.index), util.uindex(index))) return 1 if self.finish_branches(index) is None: return None # We are no longer in the main tree last_branch = self.branches[-1] if last_branch.start_successful: if last_branch.can_fast_process(*args): robust.check_common_error(last_branch.on_error, last_branch.fast_process, args) else: branch = self.add_branch() self.process_w_branch(index, branch, args) else: last_branch.log_prev_error(index) self.index = index return 1
def __call__(self, *args): """Process args, where args[0] is current position in iterator Returns true if args successfully processed, false if index is not in the current tree and thus the final result is available. Also note below we set self.index after doing the necessary start processing, in case there is a crash in the middle. """ index = args[0] if self.index is None: self.process_w_branch(index, self.root_branch, args) self.index = index return 1 if index <= self.index: log.Warn(_("Warning: oldindex %s >= newindex %s") % (util.uindex(self.index), util.uindex(index))) return 1 if self.finish_branches(index) is None: return None # We are no longer in the main tree last_branch = self.branches[-1] if last_branch.start_successful: if last_branch.can_fast_process(*args): robust.check_common_error(last_branch.on_error, last_branch.fast_process, args) else: branch = self.add_branch() self.process_w_branch(index, branch, args) else: last_branch.log_prev_error(index) self.index = index return 1
def exclude_sel_func(path): # do not follow symbolic links when checking for file existence! if path.isdir(): def error_handler(_exc, _filename): # Path is not read accessible # ToDo: Ideally this error would only show if the folder # was ultimately included by the full set of selection # functions. Currently this will give an error for any # locked directory within the folder being backed up. log.Warn(_( u"Error accessing possibly locked file %s") % path.uc_name, log.WarningCode.cannot_read, util.escape(path.uc_name)) if diffdir.stats: diffdir.stats.Errors += 1 return False if check_common_error(error_handler, path.contains, [filename]): return 0 else: return None