def post_process(self, source_rorp, dest_rorp, changed, success, inc): """Post process source_rorp and dest_rorp. The point of this is to write statistics and metadata. changed will be true if the files have changed. success will be true if the files have been successfully updated (this is always false for un-changed files). """ if Globals.preserve_hardlinks and source_rorp: Hardlink.del_rorp(source_rorp) if not changed or success: if source_rorp: self.statfileobj.add_source_file(source_rorp) if dest_rorp: self.statfileobj.add_dest_file(dest_rorp) if success == 0: metadata_rorp = dest_rorp elif success == 1: metadata_rorp = source_rorp else: metadata_rorp = None # in case deleted because of ListError if success == 1 or success == 2: self.statfileobj.add_changed(source_rorp, dest_rorp) if metadata_rorp and metadata_rorp.lstat(): self.metawriter.write_object(metadata_rorp) if Globals.file_statistics: statistics.FileStats.update(source_rorp, dest_rorp, changed, inc)
def pre_process(self, source_rorp, dest_rorp): """Do initial processing on source_rorp and dest_rorp It will not be clear whether source_rorp and dest_rorp have errors at this point, so don't do anything which assumes they will be backed up correctly. """ if Globals.preserve_hardlinks and source_rorp: Hardlink.add_rorp(source_rorp, dest_rorp) if (dest_rorp and dest_rorp.isdir() and Globals.process_uid != 0 and dest_rorp.getperms() % 01000 < 0700): self.unreadable_dir_init(source_rorp, dest_rorp)
def get_one_sig(cls, dest_base_rpath, index, src_rorp, dest_rorp): """Return a signature given source and destination rorps""" if (Globals.preserve_hardlinks and src_rorp and Hardlink.islinked(src_rorp)): dest_sig = rpath.RORPath(index) dest_sig.flaglinked(Hardlink.get_link_index(src_rorp)) elif dest_rorp: dest_sig = dest_rorp.getRORPath() if dest_rorp.isreg(): dest_rp = longname.get_mirror_rp(dest_base_rpath, dest_rorp) sig_fp = cls.get_one_sig_fp(dest_rp) if sig_fp is None: return None dest_sig.setfile(sig_fp) else: dest_sig = rpath.RORPath(index) return dest_sig
def get_sigs(cls, dest_base_rpath): """Yield signatures of any changed destination files If we are backing up across a pipe, we must flush the pipeline every so often so it doesn't get congested on destination end. """ flush_threshold = Globals.pipeline_max_length - 2 num_rorps_seen = 0 for src_rorp, dest_rorp in cls.CCPP: if (Globals.backup_reader is not Globals.backup_writer): num_rorps_seen += 1 if (num_rorps_seen > flush_threshold): num_rorps_seen = 0 yield iterfile.MiscIterFlushRepeat if not (src_rorp and dest_rorp and src_rorp == dest_rorp and (not Globals.preserve_hardlinks or Hardlink.rorp_eq(src_rorp, dest_rorp))): index = src_rorp and src_rorp.index or dest_rorp.index sig = cls.get_one_sig(dest_base_rpath, index, src_rorp, dest_rorp) if sig: cls.CCPP.flag_changed(index) yield sig
def get_hash (repo_rorp): """ Try to get a sha1 digest from the repository. If hardlinks are saved in the metadata, get the sha1 from the first hardlink """ Hardlink.add_rorp(repo_rorp) if Hardlink.islinked(repo_rorp): verify_sha1 = Hardlink.get_sha1(repo_rorp) elif repo_rorp.has_sha1(): verify_sha1 = repo_rorp.get_sha1() else: verify_sha1 = None Hardlink.del_rorp(repo_rorp) return verify_sha1
def patch_hardlink_to_temp(self, diff_rorp, new): """Hardlink diff_rorp to temp, update hash if necessary""" Hardlink.link_rp(diff_rorp, new, self.basis_root_rp) self.CCPP.update_hardlink_hash(diff_rorp)
def update_hardlink_hash(self, diff_rorp): """Tag associated source_rorp with same hash diff_rorp points to""" sha1sum = Hardlink.get_sha1(diff_rorp) if not sha1sum: return source_rorp = self.get_source_rorp(diff_rorp.index) source_rorp.set_sha1(sha1sum)