def patch_diff_tarfile( base_path, diff_tarfile, restrict_index=() ): """Patch given Path object using delta tarfile (as in tarfile.TarFile) If restrict_index is set, ignore any deltas in diff_tarfile that don't start with restrict_index. """ if base_path.exists(): path_iter = selection.Select( base_path ).set_iter() else: path_iter = empty_iter() # probably untarring full backup diff_path_iter = difftar2path_iter( diff_tarfile ) if restrict_index: diff_path_iter = filter_path_iter( diff_path_iter, restrict_index ) collated = diffdir.collate2iters( path_iter, diff_path_iter ) ITR = IterTreeReducer( PathPatcher, [base_path] ) for basis_path, diff_ropath in collated: if basis_path: log.Info( _( "Patching %s" ) % ( basis_path.get_relative_path(), ), log.InfoCode.patch_file_patching, util.escape( basis_path.get_relative_path() ) ) ITR( basis_path.index, basis_path, diff_ropath ) else: log.Info( _( "Patching %s" ) % ( diff_ropath.get_relative_path(), ), log.InfoCode.patch_file_patching, util.escape( diff_ropath.get_relative_path() ) ) ITR( diff_ropath.index, basis_path, diff_ropath ) ITR.Finish() base_path.setdata()
def can_fast_process( self, index, ropath ): """Can fast process (no recursion) if ropath isn't a directory""" log.Info( _( "Writing %s of type %s" ) % ( ropath.get_relative_path(), ropath.type ), log.InfoCode.patch_file_writing, "%s %s" % ( util.escape( ropath.get_relative_path() ), ropath.type ) ) return not ropath.isdir()
def error_handler(exc, path, filename): # pylint: disable=unused-argument fullpath = os.path.join(path.name, filename) try: mode = os.stat(fullpath)[stat.ST_MODE] if stat.S_ISSOCK(mode): log.Info(_(u"Skipping socket %s") % util.fsdecode(fullpath), log.InfoCode.skipping_socket, util.escape(fullpath)) else: log.Warn(_(u"Error initializing file %s") % util.fsdecode(fullpath), log.WarningCode.cannot_iterate, util.escape(fullpath)) except OSError: log.Warn(_(u"Error accessing possibly locked file %s") % util.fsdecode(fullpath), log.WarningCode.cannot_stat, util.escape(fullpath)) return None
def diryield(path): """Generate relevant files in directory path Returns (path, num) where num == 0 means path should be generated normally, num == 1 means the path is a directory and should be included iff something inside is included. """ # todo: get around circular dependency issue by importing here from duplicity import robust #@Reimport for filename in robust.listpath(path): new_path = robust.check_common_error(error_handler, Path.append, (path, filename)) # make sure file is read accessible if (new_path and new_path.type in ["reg", "dir"] and not os.access(new_path.name, os.R_OK)): log.Warn( _("Error accessing possibly locked file %s") % util.ufn(new_path.name), log.WarningCode.cannot_read, util.escape(new_path.name)) if diffdir.stats: diffdir.stats.Errors += 1 new_path = None elif new_path: s = self.Select(new_path) if s == 1: yield (new_path, 0) elif s == 2 and new_path.isdir(): yield (new_path, 1)
def diryield(path): """Generate relevant files in directory path Returns (path, num) where num == 0 means path should be generated normally, num == 1 means the path is a directory and should be included iff something inside is included. """ # todo: get around circular dependency issue by importing here from duplicity import robust #@Reimport for filename in robust.listpath(path): new_path = robust.check_common_error( error_handler, Path.append, (path, filename)) # make sure file is read accessible if (new_path and new_path.type in ["reg", "dir"] and not os.access(new_path.name, os.R_OK)): log.Warn(_("Error accessing possibly locked file %s") % util.ufn(new_path.name), log.WarningCode.cannot_read, util.escape(new_path.name)) if diffdir.stats: diffdir.stats.Errors +=1 new_path = None elif new_path: s = self.Select(new_path) if s == 1: yield (new_path, 0) elif s == 2 and new_path.isdir(): yield (new_path, 1)
def diryield(path): u"""Generate relevant files in directory path Returns (path, num) where num == 0 means path should be generated normally, num == 1 means the path is a directory and should be included iff something inside is included. """ # Only called by Iterate. Internal. # todo: get around circular dependency issue by importing here from duplicity import robust # @Reimport for filename in robust.listpath(path): new_path = robust.check_common_error( error_handler, Path.append, (path, filename)) if new_path: s = self.Select(new_path) if (new_path.type in [u"reg", u"dir"] and not os.access(new_path.name, os.R_OK)) \ and (s == 1 or s == 2): # Path is a file or folder that cannot be read, but # should be included or scanned. log.Warn(_(u"Error accessing possibly locked file %s") % new_path.uc_name, log.WarningCode.cannot_read, util.escape(new_path.name)) if diffdir.stats: diffdir.stats.Errors += 1 elif s == 1: # Should be included yield (new_path, 0) elif s == 2 and new_path.isdir(): # Is a directory that should be scanned yield (new_path, 1)
def diryield(path): u"""Generate relevant files in directory path Returns (path, num) where num == 0 means path should be generated normally, num == 1 means the path is a directory and should be included iff something inside is included. """ # Only called by Iterate. Internal. # todo: get around circular dependency issue by importing here from duplicity import robust for filename in robust.listpath(path): new_path = robust.check_common_error(error_handler, Path.append, (path, filename)) if new_path: s = self.Select(new_path) if (new_path.type in [u"reg", u"dir"] and not os.access(new_path.name, os.R_OK)) \ and (s == 1 or s == 2): # Path is a file or folder that cannot be read, but # should be included or scanned. log.Warn( _(u"Error accessing possibly locked file %s") % new_path.uc_name, log.WarningCode.cannot_read, util.escape(new_path.name)) if diffdir.stats: diffdir.stats.Errors += 1 elif s == 1: # Should be included yield (new_path, 0) elif s == 2 and new_path.isdir(): # Is a directory that should be scanned yield (new_path, 1)
def error_handler(exc, path, filename): fullpath = os.path.join(path.name, filename) try: mode = os.stat(fullpath)[stat.ST_MODE] if stat.S_ISSOCK(mode): log.Info(_(u"Skipping socket %s") % util.fsdecode(fullpath), log.InfoCode.skipping_socket, util.escape(fullpath)) else: log.Warn(_(u"Error initializing file %s") % util.fsdecode(fullpath), log.WarningCode.cannot_iterate, util.escape(fullpath)) except OSError: log.Warn(_(u"Error accessing possibly locked file %s") % util.fsdecode(fullpath), log.WarningCode.cannot_stat, util.escape(fullpath)) return None
def can_fast_process(self, index, ropath): # pylint: disable=unused-argument u"""Can fast process (no recursion) if ropath isn't a directory""" log.Info( _(u"Writing %s of type %s") % (util.fsdecode(ropath.get_relative_path()), ropath.type), log.InfoCode.patch_file_writing, u"%s %s" % (util.escape(ropath.get_relative_path()), ropath.type)) return not ropath.isdir()
def error_handler(exc, path, filename): fullpath = os.path.join(path.name, filename) try: mode = os.stat(fullpath)[stat.ST_MODE] if stat.S_ISSOCK(mode): log.Log( _("Skipping socket %s") % fullpath, log.DEBUG, log.InfoCode.skipping_socket, util.escape(fullpath)) else: log.Warn( _("Error initializing file %s") % fullpath, log.WarningCode.cannot_iterate, util.escape(fullpath)) except OSError: log.Warn( _("Error accessing possibly locked file %s") % fullpath, log.WarningCode.cannot_stat, util.escape(fullpath)) return None
def check_dirinfo(self): u""" Return None if dirinfo is the same, otherwise error message Does not raise an error message if hostname or local_dirname are not available. @rtype: string @return: None or error message """ if config.allow_source_mismatch: return # Check both hostname and fqdn (we used to write the fqdn into the # manifest, so we want to keep comparing against that) if (self.hostname and self.hostname != config.hostname and self.hostname != config.fqdn): errmsg = _( u"Fatal Error: Backup source host has changed.\n" u"Current hostname: %s\n" u"Previous hostname: %s") % (config.hostname, self.hostname) code = log.ErrorCode.hostname_mismatch code_extra = u"%s %s" % (util.escape( config.hostname), util.escape(self.hostname)) elif (self.local_dirname and self.local_dirname != config.local_path.name): errmsg = _(u"Fatal Error: Backup source directory has changed.\n" u"Current directory: %s\n" u"Previous directory: %s") % (config.local_path.name, self.local_dirname) code = log.ErrorCode.source_dir_mismatch code_extra = u"%s %s" % (util.escape( config.local_path.name), util.escape(self.local_dirname)) else: return log.FatalError( errmsg + u"\n\n" + _(u"Aborting because you may have accidentally tried to " u"backup two different data sets to the same remote " u"location, or using the same archive directory. If " u"this is not a mistake, use the " u"--allow-source-mismatch switch to avoid seeing this " u"message"), code, code_extra)
def log_delta_path(delta_path, new_path=None, stats=None): u""" Look at delta path and log delta. Add stats if new_path is set """ if delta_path.difftype == u"snapshot": if new_path and stats: stats.add_new_file(new_path) log.Info( _(u"A %s") % (util.fsdecode(delta_path.get_relative_path())), log.InfoCode.diff_file_new, util.escape(delta_path.get_relative_path())) else: if new_path and stats: stats.add_changed_file(new_path) log.Info( _(u"M %s") % (util.fsdecode(delta_path.get_relative_path())), log.InfoCode.diff_file_changed, util.escape(delta_path.get_relative_path()))
def log_delta_path(delta_path, new_path = None, stats = None): """ Look at delta path and log delta. Add stats if new_path is set """ if delta_path.difftype == "snapshot": if new_path and stats: stats.add_new_file(new_path) log.Info(_("A %s") % (delta_path.get_relative_path(),), log.InfoCode.diff_file_new, util.escape(delta_path.get_relative_path())) else: if new_path and stats: stats.add_changed_file(new_path) log.Info(_("M %s") % (delta_path.get_relative_path(),), log.InfoCode.diff_file_changed, util.escape(delta_path.get_relative_path()))
def log_prev_error(self, index): """Call function if no pending exception""" if not index: index_str = "." else: index_str = os.path.join(*index) log.Warn(_("Skipping %s because of previous error") % util.fsdecode(index_str), log.WarningCode.process_skipped, util.escape(index_str))
def log_prev_error(self, index): u"""Call function if no pending exception""" if not index: index_str = u"." else: index_str = os.path.join(*index) log.Warn(_(u"Skipping %s because of previous error") % util.fsdecode(index_str), log.WarningCode.process_skipped, util.escape(index_str))
def check_dirinfo(self): """ Return None if dirinfo is the same, otherwise error message Does not raise an error message if hostname or local_dirname are not available. @rtype: string @return: None or error message """ if globals.allow_source_mismatch: return if self.hostname and self.hostname != globals.hostname: errmsg = _( "Fatal Error: Backup source host has changed.\n" "Current hostname: %s\n" "Previous hostname: %s") % (globals.hostname, self.hostname) code = log.ErrorCode.hostname_mismatch code_extra = "%s %s" % (util.escape( globals.hostname), util.escape(self.hostname)) elif (self.local_dirname and self.local_dirname != globals.local_path.name): # @UndefinedVariable errmsg = _("Fatal Error: Backup source directory has changed.\n" "Current directory: %s\n" "Previous directory: %s") % ( globals.local_path.name, self.local_dirname ) # @UndefinedVariable code = log.ErrorCode.source_dir_mismatch code_extra = "%s %s" % (util.escape( globals.local_path.name), util.escape(self.local_dirname) ) # @UndefinedVariable else: return log.FatalError( errmsg + "\n\n" + _("Aborting because you may have accidentally tried to " "backup two different data sets to the same remote " "location, or using the same archive directory. If " "this is not a mistake, use the " "--allow-source-mismatch switch to avoid seeing this " "message"), code, code_extra)
def get_delta_iter(new_iter, sig_iter, sig_fileobj=None): u""" Generate delta iter from new Path iter and sig Path iter. For each delta path of regular file type, path.difftype with be set to "snapshot", "diff". sig_iter will probably iterate ROPaths instead of Paths. If sig_fileobj is not None, will also write signatures to sig_fileobj. """ collated = collate2iters(new_iter, sig_iter) if sig_fileobj: sigTarFile = util.make_tarfile(u"w", sig_fileobj) else: sigTarFile = None for new_path, sig_path in collated: log.Debug( _(u"Comparing %s and %s") % (new_path and util.uindex(new_path.index), sig_path and util.uindex(sig_path.index))) if not new_path or not new_path.type: # File doesn't exist (but ignore attempts to delete base dir; # old versions of duplicity could have written out the sigtar in # such a way as to fool us; LP: #929067) if sig_path and sig_path.exists() and sig_path.index != (): # but signature says it did log.Info( _(u"D %s") % (util.fsdecode(sig_path.get_relative_path())), log.InfoCode.diff_file_deleted, util.escape(sig_path.get_relative_path())) if sigTarFile: ti = ROPath(sig_path.index).get_tarinfo() if sys.version_info.major >= 3: ti.name = u"deleted/" + util.uindex(sig_path.index) else: ti.name = b"deleted/" + b"/".join(sig_path.index) sigTarFile.addfile(ti) stats.add_deleted_file(sig_path) yield ROPath(sig_path.index) elif not sig_path or new_path != sig_path: # Must calculate new signature and create delta delta_path = robust.check_common_error( delta_iter_error_handler, get_delta_path, (new_path, sig_path, sigTarFile)) if delta_path: # log and collect stats log_delta_path(delta_path, new_path, stats) yield delta_path else: # if not, an error must have occurred stats.Errors += 1 else: stats.add_unchanged_file(new_path) stats.close() if sigTarFile: sigTarFile.close()
def error_handler(_exc, _filename): # Path is not read accessible # ToDo: Ideally this error would only show if the folder # was ultimately included by the full set of selection # functions. Currently this will give an error for any # locked directory within the folder being backed up. log.Warn(_( u"Error accessing possibly locked file %s") % path.uc_name, log.WarningCode.cannot_read, util.escape(path.uc_name)) if diffdir.stats: diffdir.stats.Errors += 1 return False
def on_error(self, exc, *args): """This is run on any exception in start/end-process""" self.caught_exception = 1 if args and args[0] and isinstance(args[0], tuple): filename = os.path.join(*args[0]) elif self.index: filename = os.path.join(*self.index) # pylint: disable=not-an-iterable else: filename = "." log.Warn(_("Error '%s' processing %s") % (exc, util.fsdecode(filename)), log.WarningCode.cannot_process, util.escape(filename))
def on_error(self, exc, *args): u"""This is run on any exception in start/end-process""" self.caught_exception = 1 if args and args[0] and isinstance(args[0], tuple): filename = os.path.join(*args[0]) elif self.index: filename = os.path.join(*self.index) # pylint: disable=not-an-iterable else: filename = u"." log.Warn(_(u"Error '%s' processing %s") % (exc, util.fsdecode(filename)), log.WarningCode.cannot_process, util.escape(filename))
def get_delta_iter(new_iter, sig_iter, sig_fileobj=None): """ Generate delta iter from new Path iter and sig Path iter. For each delta path of regular file type, path.difftype with be set to "snapshot", "diff". sig_iter will probably iterate ROPaths instead of Paths. If sig_fileobj is not None, will also write signatures to sig_fileobj. """ collated = collate2iters(new_iter, sig_iter) if sig_fileobj: sigTarFile = util.make_tarfile("w", sig_fileobj) else: sigTarFile = None for new_path, sig_path in collated: log.Debug(_("Comparing %s and %s") % (new_path and new_path.index, sig_path and sig_path.index)) if not new_path or not new_path.type: # File doesn't exist (but ignore attempts to delete base dir; # old versions of duplicity could have written out the sigtar in # such a way as to fool us; LP: #929067) if sig_path and sig_path.exists() and sig_path.index != (): # but signature says it did log.Info(_("D %s") % (sig_path.get_relative_path(),), log.InfoCode.diff_file_deleted, util.escape(sig_path.get_relative_path())) if sigTarFile: ti = ROPath(sig_path.index).get_tarinfo() ti.name = "deleted/" + "/".join(sig_path.index) sigTarFile.addfile(ti) stats.add_deleted_file() yield ROPath(sig_path.index) elif not sig_path or new_path != sig_path: # Must calculate new signature and create delta delta_path = robust.check_common_error(delta_iter_error_handler, get_delta_path, (new_path, sig_path, sigTarFile)) if delta_path: # log and collect stats log_delta_path(delta_path, new_path, stats) yield delta_path else: # if not, an error must have occurred stats.Errors += 1 else: stats.add_unchanged_file(new_path) stats.close() if sigTarFile: sigTarFile.close()
def handle_error(self, raise_error, e, op, file1=None, file2=None): if raise_error: raise e code = log.ErrorCode.backend_error if isinstance(e, GLib.GError): if e.code == Gio.IOErrorEnum.PERMISSION_DENIED: code = log.ErrorCode.backend_permission_denied elif e.code == Gio.IOErrorEnum.NOT_FOUND: code = log.ErrorCode.backend_not_found elif e.code == Gio.IOErrorEnum.NO_SPACE: code = log.ErrorCode.backend_no_space extra = ' '.join([util.escape(x) for x in [file1, file2] if x]) extra = ' '.join([op, extra]) log.FatalError(str(e), code, extra)
def get_delta_iter(new_iter, sig_iter, sig_fileobj=None): """ Generate delta iter from new Path iter and sig Path iter. For each delta path of regular file type, path.difftype with be set to "snapshot", "diff". sig_iter will probably iterate ROPaths instead of Paths. If sig_fileobj is not None, will also write signatures to sig_fileobj. """ collated = collate2iters(new_iter, sig_iter) if sig_fileobj: sigTarFile = util.make_tarfile("w", sig_fileobj) else: sigTarFile = None for new_path, sig_path in collated: log.Debug( _("Comparing %s and %s") % (new_path and new_path.index, sig_path and sig_path.index)) if not new_path or not new_path.type: # file doesn't exist if sig_path and sig_path.exists(): # but signature says it did log.Info( _("D %s") % (sig_path.get_relative_path(), ), log.InfoCode.diff_file_deleted, util.escape(sig_path.get_relative_path())) if sigTarFile: ti = ROPath(sig_path.index).get_tarinfo() ti.name = "deleted/" + "/".join(sig_path.index) sigTarFile.addfile(ti) stats.add_deleted_file() yield ROPath(sig_path.index) elif not sig_path or new_path != sig_path: # Must calculate new signature and create delta delta_path = robust.check_common_error( delta_iter_error_handler, get_delta_path, (new_path, sig_path, sigTarFile)) if delta_path: # log and collect stats log_delta_path(delta_path, new_path, stats) yield delta_path else: # if not, an error must have occurred stats.Errors += 1 else: stats.add_unchanged_file(new_path) stats.close() if sigTarFile: sigTarFile.close()
def handle_error(self, raise_error, e, op, file1=None, file2=None): if raise_error: raise e code = log.ErrorCode.backend_error if isinstance(e, gio.Error): if e.code == gio.ERROR_PERMISSION_DENIED: code = log.ErrorCode.backend_permission_denied elif e.code == gio.ERROR_NOT_FOUND: code = log.ErrorCode.backend_not_found elif e.code == gio.ERROR_NO_SPACE: code = log.ErrorCode.backend_no_space extra = " ".join([util.escape(x) for x in [file1, file2] if x]) extra = " ".join([op, extra]) log.FatalError(str(e), code, extra)
def check_dirinfo(self): """ Return None if dirinfo is the same, otherwise error message Does not raise an error message if hostname or local_dirname are not available. @rtype: string @return: None or error message """ if globals.allow_source_mismatch: return if self.hostname and self.hostname != globals.hostname: errmsg = _("Fatal Error: Backup source host has changed.\n" "Current hostname: %s\n" "Previous hostname: %s") % (globals.hostname, self.hostname) code = log.ErrorCode.hostname_mismatch code_extra = "%s %s" % (util.escape(globals.hostname), util.escape(self.hostname)) elif (self.local_dirname and self.local_dirname != globals.local_path.name): # @UndefinedVariable errmsg = _("Fatal Error: Backup source directory has changed.\n" "Current directory: %s\n" "Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable code = log.ErrorCode.source_dir_mismatch code_extra = "%s %s" % (util.escape(globals.local_path.name), util.escape(self.local_dirname)) # @UndefinedVariable else: return log.FatalError(errmsg + "\n\n" + _("Aborting because you may have accidentally tried to " "backup two different data sets to the same remote " "location, or using the same archive directory. If " "this is not a mistake, use the " "--allow-source-mismatch switch to avoid seeing this " "message"), code, code_extra)
def handle_error(self, e, op, file1 = None, file2 = None): code = log.ErrorCode.backend_error if hasattr(e, 'errno'): if e.errno == errno.EACCES: code = log.ErrorCode.backend_permission_denied elif e.errno == errno.ENOENT: code = log.ErrorCode.backend_not_found elif e.errno == errno.ENOSPC: code = log.ErrorCode.backend_no_space extra = ' '.join([util.escape(x) for x in [file1, file2] if x]) extra = ' '.join([op, extra]) if op != 'delete' and op != 'query': log.FatalError(str(e), code, extra) else: log.Warn(str(e), code, extra)
def handle_error(self, e, op, file1 = None, file2 = None): code = log.ErrorCode.backend_error if hasattr(e, 'errno'): if e.errno == errno.EACCES: code = log.ErrorCode.backend_permission_denied elif e.errno == errno.ENOENT: code = log.ErrorCode.backend_not_found elif e.errno == errno.ENOSPC: code = log.ErrorCode.backend_no_space extra = ' '.join([util.escape(x) for x in [file1, file2] if x]) extra = ' '.join([op, extra]) if op != 'delete' and op != 'query': log.FatalError(util.uexc(e), code, extra) else: log.Warn(util.uexc(e), code, extra)
def handle_error(self, raise_error, op, headers, file1=None, file2=None, ignore=None): from duplicity import log from duplicity import util import json status = int(headers[0].get('status')) if status >= 200 and status < 300: return if ignore and status in ignore: return if status == 400: code = log.ErrorCode.backend_permission_denied elif status == 404: code = log.ErrorCode.backend_not_found elif status == 507: code = log.ErrorCode.backend_no_space else: code = log.ErrorCode.backend_error if file1: file1 = file1.encode("utf8") else: file1 = None if file2: file2 = file2.encode("utf8") else: file2 = None extra = ' '.join([util.escape(x) for x in [file1, file2] if x]) extra = ' '.join([op, extra]) msg = _("Got status code %s") % status if headers[0].get('x-oops-id') is not None: msg += '\nOops-ID: %s' % headers[0].get('x-oops-id') if headers[0].get('content-type') == 'application/json': node = json.loads(headers[1]) if node.get('error'): msg = node.get('error') if raise_error: if status == 503: raise TemporaryLoadException(msg) else: raise BackendException(msg) else: log.FatalError(msg, code, extra)
def exclude_sel_func(path): # do not follow symbolic links when checking for file existence! if path.isdir(): # First check path is read accessible if not (os.access(path.name, os.R_OK)): # Path is not read accessible # ToDo: Ideally this error would only show if the folder # was ultimately included by the full set of selection # functions. Currently this will give an error for any # locked directory within the folder being backed up. log.Warn(_( u"Error accessing possibly locked file %s") % path.uc_name, log.WarningCode.cannot_read, util.escape(path.uc_name)) if diffdir.stats: diffdir.stats.Errors += 1 elif path.append(filename).exists(): return 0 else: return None
def exclude_sel_func(path): # do not follow symbolic links when checking for file existence! if path.isdir(): # First check path is read accessible if not (os.access(path.name, os.R_OK)): # Path is not read accessible # ToDo: Ideally this error would only show if the folder # was ultimately included by the full set of selection # functions. Currently this will give an error for any # locked directory within the folder being backed up. log.Warn( _(u"Error accessing possibly locked file %s") % path.uc_name, log.WarningCode.cannot_read, util.escape(path.uc_name)) if diffdir.stats: diffdir.stats.Errors += 1 elif path.append(filename).exists(): return 0 else: return None
def integrate_patch_iters(iter_list): """Combine a list of iterators of ropath patches The iter_list should be sorted in patch order, and the elements in each iter_list need to be orderd by index. The output will be an iterator of the final ROPaths in index order. """ collated = collate_iters(iter_list) for patch_seq in collated: normalized = normalize_ps(patch_seq) try: final_ropath = patch_seq2ropath(normalized) if final_ropath.exists(): # otherwise final patch was delete yield final_ropath except Exception, e: filename = normalized[-1].get_ropath().get_relative_path() log.Warn( _("Error '%s' patching %s") % (str(e), filename), log.WarningCode.cannot_process, util.escape(filename))
def integrate_patch_iters(iter_list): """Combine a list of iterators of ropath patches The iter_list should be sorted in patch order, and the elements in each iter_list need to be orderd by index. The output will be an iterator of the final ROPaths in index order. """ collated = collate_iters(iter_list) for patch_seq in collated: normalized = normalize_ps(patch_seq) try: final_ropath = patch_seq2ropath(normalized) if final_ropath.exists(): # otherwise final patch was delete yield final_ropath except Exception as e: filename = normalized[-1].get_ropath().get_relative_path() log.Warn(_("Error '%s' patching %s") % (util.uexc(e), util.fsdecode(filename)), log.WarningCode.cannot_process, util.escape(filename))
def handle_error(self, raise_error, op, headers, file1=None, file2=None, ignore=None): from duplicity import log from duplicity import util import json code = self.parse_error(headers, ignore) if code is None: return status = int(headers[0].get('status')) if file1: file1 = file1.encode("utf8") else: file1 = None if file2: file2 = file2.encode("utf8") else: file2 = None extra = ' '.join([util.escape(x) for x in [file1, file2] if x]) extra = ' '.join([op, extra]) msg = _("Got status code %s") % status if headers[0].get('x-oops-id') is not None: msg += '\nOops-ID: %s' % headers[0].get('x-oops-id') if headers[0].get('content-type') == 'application/json': node = json.loads(headers[1]) if node.get('error'): msg = node.get('error') if raise_error: if status == 503: raise TemporaryLoadException(msg) else: raise BackendException(msg) else: log.FatalError(msg, code, extra)
def make_filename(f): if isinstance(f, path.ROPath): return util.escape(f.name) else: return util.escape(f)
def error(): raise TimeException(bad_interval_string % util.escape(interval_string))
def error(): raise TimeException(bad_time_string % util.escape(timestr))