index = src_rorp and src_rorp.index or mir_rorp.index if src_rorp and mir_rorp: if not src_rorp.isreg() and src_rorp == mir_rorp: log_success(src_rorp, mir_rorp) continue # They must be equal, nothing else to check if (src_rorp.isreg() and mir_rorp.isreg() and src_rorp.getsize() == mir_rorp.getsize()): fp = cls.rf_cache.get_fp(base_index + index, mir_rorp) mir_rorp.setfile(fp) mir_rorp.set_attached_filetype('snapshot') if mir_rorp: yield mir_rorp else: yield rpath.RORPath(index) # indicate deleted mir_rorp static.MakeClass(RepoSide) class DataSide(backup.SourceStruct): """On the side that has the current data, compare is like backing up""" def compare_fast(cls, repo_iter): """Compare rorps (metadata only) quickly, return report iter""" src_iter = cls.get_source_select() for src_rorp, mir_rorp in rorpiter.Collate2Iters(src_iter, repo_iter): report = get_basic_report(src_rorp, mir_rorp) if report: yield report else: log_success(src_rorp, mir_rorp) def compare_hash(cls, repo_iter): """Like above, but also compare sha1 sums of any regular files""" def hashes_changed(src_rp, mir_rorp):
cls.line_buffer.append(line) if len(cls.line_buffer) >= 100: cls.write_buffer() def get_size(cls, rorp): """Return the size of rorp as string, or "NA" if not a regular file""" if not rorp: return "NA" if rorp.isreg(): return str(rorp.getsize()) else: return "0" def write_buffer(cls): """Write buffer to file because buffer is full The buffer part is necessary because the GzipFile.write() method seems fairly slow. """ assert cls.line_buffer and cls._fileobj cls.line_buffer.append('') # have join add _line_sep to end also cls._fileobj.write(cls._line_sep.join(cls.line_buffer)) cls.line_buffer = [] def close(cls): """Close file stats file""" assert cls._fileobj, cls._fileobj if cls.line_buffer: cls.write_buffer() assert not cls._fileobj.close() cls._fileobj = cls._rp = None static.MakeClass(FileStats)
src_rp = (source_rps.get(dest_sig.index) or rpath.RORPath(dest_sig.index)) diff_rorp = src_rp.getRORPath() if dest_sig.isflaglinked(): diff_rorp.flaglinked(dest_sig.get_link_flag()) elif src_rp.isreg(): if dest_sig.isreg(): attach_diff(diff_rorp, src_rp, dest_sig) else: attach_snapshot(diff_rorp, src_rp) if reset_perms: src_rp.chmod(src_rp.getperms() & ~0400) else: dest_sig.close_if_necessary() diff_rorp.set_attached_filetype('snapshot') yield diff_rorp static.MakeClass(SourceStruct) class DestinationStruct: """Hold info used by destination side when backing up""" def get_dest_select(cls, rpath, use_metadata = 1): """Return destination select rorpath iterator If metadata file doesn't exist, select all files on destination except rdiff-backup-data directory. """ def get_iter_from_fs(): """Get the combined iterator from the filesystem""" sel = selection.Select(rpath) sel.parse_rbdir_exclude()
def get_indexpath(cls, obj): """Return filename for logging. rp is a rpath, string, or tuple""" try: return obj.get_indexpath() except AttributeError: if type(obj) is types.TupleType: return "/".join(obj) else: return str(obj) def write_if_open(cls, error_type, rp, exc): """Call cls.write(...) if error log open, only log otherwise""" if not Globals.isbackup_writer and Globals.backup_writer: return Globals.backup_writer.log.ErrorLog.write_if_open( error_type, rp, str(exc)) # convert exc bc of exc picking prob if cls.isopen(): cls.write(error_type, rp, exc) else: Log(cls.get_log_string(error_type, rp, exc), 2) def get_log_string(cls, error_type, rp, exc): """Return log string to put in error log""" assert (error_type == "ListError" or error_type == "UpdateError" or error_type == "SpecialFileError"), "Unknown type "+error_type return "%s %s %s" % (error_type, cls.get_indexpath(rp), str(exc)) def close(cls): """Close the error log file""" if not Globals.isbackup_writer: return Globals.backup_writer.log.ErrorLog.close() assert not cls._log_fileobj.close() cls._log_fileobj = None static.MakeClass(ErrorLog)
class SignalException(Exception): """SignalException(signum) means signal signum has been received""" pass class TracebackArchive: """Save last 10 caught exceptions, so they can be printed if fatal""" _traceback_strings = [] def add(cls, extra_args=[]): """Add most recent exception to archived list If extra_args are present, convert to strings and add them as extra information to same traceback archive. """ cls._traceback_strings.append(log.Log.exception_to_string(extra_args)) if len(cls._traceback_strings) > 10: cls._traceback_strings = cls._traceback_strings[:10] def log(cls): """Print all exception information to log file""" if cls._traceback_strings: log.Log( "------------ Old traceback info -----------\n%s\n" "-------------------------------------------" % ("\n".join(cls._traceback_strings), ), 3) static.MakeClass(TracebackArchive)
Hardlink.del_rorp(mir_rorp) if diff: yield diff def get_diff(cls, mir_rorp, target_rorp): """Get a diff for mir_rorp at time""" if not mir_rorp: mir_rorp = rpath.RORPath(target_rorp.index) elif Globals.preserve_hardlinks and Hardlink.islinked(mir_rorp): mir_rorp.flaglinked(Hardlink.get_link_index(mir_rorp)) elif mir_rorp.isreg(): expanded_index = cls.mirror_base.index + mir_rorp.index file_fp = cls.rf_cache.get_fp(expanded_index, mir_rorp) mir_rorp.setfile(hash.FileWrapper(file_fp)) mir_rorp.set_attached_filetype('snapshot') return mir_rorp static.MakeClass(MirrorStruct) class TargetStruct: """Hold functions to be run on the target side when restoring""" _select = None def set_target_select(cls, target, select_opts, *filelists): """Return a selection object iterating the rorpaths in target""" cls._select = selection.Select(target) cls._select.ParseArgs(select_opts, filelists) cls._select.set_iter() def get_initial_iter(cls, target): """Return selector previously set with set_initial_iter""" return cls._select or selection.Select(target).set_iter()