def hashes_changed(src_rp, mir_rorp): """Return 0 if their data hashes same, 1 otherwise""" verify_sha1 = Hardlink.get_hash(mir_rorp) if not verify_sha1: log.Log("Metadata file has no digest for mirror file {mf}, " "unable to compare.".format(mf=mir_rorp), log.WARNING) return 0 elif (src_rp.getsize() == mir_rorp.getsize() and hash.compute_sha1(src_rp) == verify_sha1): return 0 return 1
def Verify(mirror_rp, inc_rp, verify_time): """Compute SHA1 sums of repository files and check against metadata""" assert mirror_rp.conn is Globals.local_connection, ( "Only verify mirror locally, not remotely over '{conn}'.".format( conn=mirror_rp.conn)) repo_iter = RepoSide.init_and_get_iter(mirror_rp, inc_rp, verify_time) base_index = RepoSide.mirror_base.index bad_files = 0 no_hash = 0 for repo_rorp in repo_iter: if not repo_rorp.isreg(): continue verify_sha1 = Hardlink.get_hash(repo_rorp) if not verify_sha1: log.Log("Cannot find SHA1 digest for file {fi}, " "perhaps because this feature was added in v1.1.1".format( fi=repo_rorp), log.WARNING) no_hash += 1 continue fp = RepoSide.rf_cache.get_fp(base_index + repo_rorp.index, repo_rorp) computed_hash = hash.compute_sha1_fp(fp) if computed_hash == verify_sha1: log.Log("Verified SHA1 digest of file {fi}".format(fi=repo_rorp), log.INFO) else: bad_files += 1 log.Log("Computed SHA1 digest of file {fi} '{cd}' " "doesn't match recorded digest of '{rd}'. " "Your backup repository may be corrupted!".format( fi=repo_rorp, cd=computed_hash, rd=verify_sha1), log.WARNING) RepoSide.close_rf_cache() if bad_files: log.Log("Verification found {cf} potentially corrupted files".format( cf=bad_files), log.ERROR) return 2 if no_hash: log.Log("Verification found {fi} files without hash, all others " "could be verified successfully".format(fi=no_hash), log.NOTE) else: log.Log("All files verified successfully", log.NOTE) return 0