def reload_and_compare_pdiff_indices(index): reloaded_index = PDiffIndex(index.patches_dir, index.max, index.wants_merged_pdiffs) assert index._history_order == reloaded_index._history_order assert index._history == reloaded_index._history assert index._old_merged_patches_prefix == reloaded_index._old_merged_patches_prefix assert index._unmerged_history_order == reloaded_index._unmerged_history_order # Only part of the history is carried over. Ignore the missing bits. assert [index._history[x][0] for x in index._history_order] == \ [reloaded_index._history[x][0] for x in reloaded_index._history_order] assert [index._history[x][2].size for x in index._history_order] == \ [reloaded_index._history[x][2].size for x in reloaded_index._history_order] assert [index._history[x][2].sha256 for x in index._history_order] == \ [reloaded_index._history[x][2].sha256 for x in reloaded_index._history_order] if index.wants_merged_pdiffs: assert [index._unmerged_history[x][0] for x in index._unmerged_history_order] == \ [reloaded_index._unmerged_history[x][0] for x in reloaded_index._unmerged_history_order] assert [index._unmerged_history[x][2].size for x in index._unmerged_history_order] == \ [reloaded_index._unmerged_history[x][2].size for x in reloaded_index._unmerged_history_order] assert [index._unmerged_history[x][2].sha256 for x in index._unmerged_history_order] == \ [reloaded_index._unmerged_history[x][2].sha256 for x in reloaded_index._unmerged_history_order] assert len(index._history_order) == len(reloaded_index._unmerged_history_order) for unmerged_patch in index._unmerged_history_order: assert unmerged_patch.startswith('patch') for merged_patch in index._history_order: assert merged_patch.startswith('T-patch') assert index.filesizehashes == reloaded_index.filesizehashes assert index.can_path == reloaded_index.can_path assert index.has_merged_pdiffs == reloaded_index.has_merged_pdiffs return reloaded_index
async def test_corrupt_pdiff_index(self): with tempdir() as tmpdir: pdiff_dir = os.path.join(tmpdir, "pdiffs") index_file = os.path.join(pdiff_dir, 'Index') os.mkdir(pdiff_dir) with open(index_file, 'w') as fd: fd.write(MISALIGNED_HISTORY_RESTORABLE) index = PDiffIndex(pdiff_dir, 3, False) assert index._history_order == [ "T-2020-12-12-0800.11-F-2020-11-26-0822.42", "T-2020-12-12-0800.11-F-2020-12-12-0800.11", ] assert index._unmerged_history_order == ["2020-12-12-0800.11"] with open(index_file, 'w') as fd: fd.write(MISALIGNED_HISTORY_BROKEN) index = PDiffIndex(pdiff_dir, 3, False) assert index._history_order == [] assert index._unmerged_history_order == []
async def test_pdiff_index_merged(self): with tempdir() as tmpdir: pdiff_dir = os.path.join(tmpdir, "pdiffs") index_file = os.path.join(pdiff_dir, 'Index') index = PDiffIndex(pdiff_dir, 3, True) data = [ 'Version 0', 'Some', 'data', 'across', '6', 'lines', ] # The pdiff system assumes we start from a non-empty file generate_orig(tmpdir, data) data[0] = 'Version 1' # Non-existing directory => empty history prune_history(index, known_patch_count_before=0, known_patch_count_after=0, detected_obsolete_patches=[] ) # Update should be possible but do nothing # (dak generate-index-diffs relies on this behaviour) index.update_index() # It should not create the directory assert not os.path.isdir(pdiff_dir) # Adding a patch should "just work(tm)" await generate_patch(index, "patch-1", tmpdir, data) assert os.path.isdir(pdiff_dir) assert index.filesizehashes is not None assert index.filesizehashes.size > 0 prune_history(index, known_patch_count_before=1, known_patch_count_after=1, detected_obsolete_patches=[] ) assert not os.path.isfile(index_file) index.update_index() assert os.path.isfile(index_file) reload_and_compare_pdiff_indices(index) index.can_path = "/some/where" data[0] = 'Version 2' data[3] = 'over' await generate_patch(index, "patch-2", tmpdir, data) prune_history(index, known_patch_count_before=2, known_patch_count_after=2, detected_obsolete_patches=[] ) data[2] = 'Text' await generate_patch(index, "patch-3", tmpdir, data) prune_history(index, known_patch_count_before=3, known_patch_count_after=3, detected_obsolete_patches=[] ) data[0] = 'Version 3' await generate_patch(index, "patch-4", tmpdir, data) prune_history(index, known_patch_count_before=4, known_patch_count_after=3, detected_obsolete_patches=['T-patch-1-F-patch-1.gz', 'patch-1.gz'] ) data[0] = 'Version 4' data[-1] = 'lines.' await generate_patch(index, "patch-5", tmpdir, data) prune_history(index, known_patch_count_before=4, known_patch_count_after=3, detected_obsolete_patches=['T-patch-1-F-patch-1.gz', 'T-patch-2-F-patch-1.gz', 'T-patch-2-F-patch-2.gz', 'patch-1.gz', 'patch-2.gz' ] ) index.update_index() # Swap to the reloaded index. Assuming everything works as intended # this should not matter. reload_and_compare_pdiff_indices(index) data[0] = 'Version 5' await generate_patch(index, "patch-6", tmpdir, data) prune_history(index, known_patch_count_before=4, known_patch_count_after=3, detected_obsolete_patches=['T-patch-1-F-patch-1.gz', 'T-patch-2-F-patch-1.gz', 'T-patch-2-F-patch-2.gz', 'T-patch-3-F-patch-1.gz', 'T-patch-3-F-patch-2.gz', 'T-patch-3-F-patch-3.gz', 'patch-1.gz', 'patch-2.gz', 'patch-3.gz', ] ) delete_obsolete_patches(index) data[0] = 'Version 6' await generate_patch(index, "patch-7", tmpdir, data) prune_history(index, known_patch_count_before=4, known_patch_count_after=3, detected_obsolete_patches=[ 'T-patch-4-F-patch-1.gz', 'T-patch-4-F-patch-2.gz', 'T-patch-4-F-patch-3.gz', 'T-patch-4-F-patch-4.gz', 'patch-4.gz', ] ) delete_obsolete_patches(index) index.update_index() reload_and_compare_pdiff_indices(index) # CHANGING TO NON-MERGED INDEX index = PDiffIndex(pdiff_dir, 3, False) data[0] = 'Version 7' # We need to add a patch to trigger the conversion await generate_patch(index, "patch-8", tmpdir, data) prune_history(index, known_patch_count_before=4, known_patch_count_after=3, detected_obsolete_patches=[ 'T-patch-5-F-patch-2.gz', 'T-patch-5-F-patch-3.gz', 'T-patch-5-F-patch-4.gz', 'T-patch-5-F-patch-5.gz', 'T-patch-6-F-patch-3.gz', 'T-patch-6-F-patch-4.gz', 'T-patch-6-F-patch-5.gz', 'T-patch-6-F-patch-6.gz', 'T-patch-7-F-patch-4.gz', 'T-patch-7-F-patch-5.gz', 'T-patch-7-F-patch-6.gz', 'T-patch-7-F-patch-7.gz', 'patch-5.gz', ] ) delete_obsolete_patches(index) index.update_index() # CHANGING BACK TO MERGED index = PDiffIndex(pdiff_dir, 3, True) data[0] = 'Version 8' # We need to add a patch to trigger the conversion await generate_patch(index, "patch-9", tmpdir, data) prune_history(index, known_patch_count_before=4, known_patch_count_after=3, detected_obsolete_patches=['patch-6.gz'] ) delete_obsolete_patches(index) index.update_index() # CHANGING TO NON-MERGED INDEX (AGAIN) # This will trip the removal of all the merged patches, proving they # were generated in the first place. index = PDiffIndex(pdiff_dir, 3, False) data[0] = 'Version 9' # We need to add a patch to trigger the conversion await generate_patch(index, "patch-A", tmpdir, data) prune_history(index, known_patch_count_before=4, known_patch_count_after=3, detected_obsolete_patches=[ 'T-patch-9-F-patch-6.gz', 'T-patch-9-F-patch-7.gz', 'T-patch-9-F-patch-8.gz', 'T-patch-9-F-patch-9.gz', 'patch-7.gz', ] ) delete_obsolete_patches(index) index.update_index()
async def test_pdiff_index_unmerged(self): with tempdir() as tmpdir: pdiff_dir = os.path.join(tmpdir, "pdiffs") index_file = os.path.join(pdiff_dir, 'Index') index = PDiffIndex(pdiff_dir, 3, False) data = [ 'Version 0', 'Some', 'data', 'across', '6', 'lines', ] # The pdiff system assumes we start from a non-empty file generate_orig(tmpdir, data) data[0] = 'Version 1' # Non-existing directory => empty history prune_history(index, known_patch_count_before=0, known_patch_count_after=0, detected_obsolete_patches=[] ) # Update should be possible but do nothing # (dak generate-index-diffs relies on this behaviour) index.update_index() # It should not create the directory assert not os.path.isdir(pdiff_dir) # Adding a patch should "just work(tm)" await generate_patch(index, "patch-1", tmpdir, data) assert os.path.isdir(pdiff_dir) assert index.filesizehashes is not None assert index.filesizehashes.size > 0 prune_history(index, known_patch_count_before=1, known_patch_count_after=1, detected_obsolete_patches=[] ) assert not os.path.isfile(index_file) index.update_index() assert os.path.isfile(index_file) reload_and_compare_pdiff_indices(index) index.can_path = "/some/where" # We should detect obsolete files that are not part of the # history. with open(os.path.join(pdiff_dir, "random-patch"), "w"): pass prune_history(index, known_patch_count_before=1, known_patch_count_after=1, detected_obsolete_patches=['random-patch'] ) delete_obsolete_patches(index) data[0] = 'Version 2' data[3] = 'over' await generate_patch(index, "patch-2", tmpdir, data) prune_history(index, known_patch_count_before=2, known_patch_count_after=2, detected_obsolete_patches=[] ) data[2] = 'Text' await generate_patch(index, "patch-3", tmpdir, data) prune_history(index, known_patch_count_before=3, known_patch_count_after=3, detected_obsolete_patches=[] ) data[0] = 'Version 3' await generate_patch(index, "patch-4", tmpdir, data) prune_history(index, known_patch_count_before=4, known_patch_count_after=3, detected_obsolete_patches=['patch-1.gz'] ) data[0] = 'Version 4' data[-1] = 'lines.' await generate_patch(index, "patch-5", tmpdir, data) prune_history(index, known_patch_count_before=4, known_patch_count_after=3, detected_obsolete_patches=['patch-1.gz', 'patch-2.gz'] ) index.update_index() reload_and_compare_pdiff_indices(index) delete_obsolete_patches(index)
async def genchanges(Options, outdir, oldfile, origfile, maxdiffs=56, merged_pdiffs=False): if "NoAct" in Options: print("Not acting on: od: %s, oldf: %s, origf: %s, md: %s" % (outdir, oldfile, origfile, maxdiffs)) return patchname = Options["PatchName"] # origfile = /path/to/Packages # oldfile = ./Packages # newfile = ./Packages.tmp # (outdir, oldfile, origfile) = argv (oldext, oldstat) = smartstat(oldfile) (origext, origstat) = smartstat(origfile) if not origstat: print("%s: doesn't exist" % (origfile)) return # orig file with the (new) compression extension in case it changed old_full_path = oldfile + origext resolved_orig_path = os.path.realpath(origfile + origext) if not oldstat: print("%s: initial run" % origfile) # The target file might have been copying over the symlink as an accident # in a previous run. if os.path.islink(old_full_path): os.unlink(old_full_path) os.link(resolved_orig_path, old_full_path) return if oldstat[1:3] == origstat[1:3]: return upd = PDiffIndex(outdir, int(maxdiffs), merged_pdiffs) if "CanonicalPath" in Options: upd.can_path = Options["CanonicalPath"] # generate_and_add_patch_file needs an uncompressed file # The `newfile` variable is our uncompressed copy of 'oldfile` thanks to # smartlink newfile = oldfile + ".new" if os.path.exists(newfile): os.unlink(newfile) await smartlink(origfile, newfile) try: await upd.generate_and_add_patch_file(oldfile, newfile, patchname) finally: os.unlink(newfile) upd.prune_patch_history() for obsolete_patch in upd.find_obsolete_patches(): tryunlink(obsolete_patch) upd.update_index() if oldfile + oldext != old_full_path and os.path.islink(old_full_path): # The target file might have been copying over the symlink as an accident # in a previous run. os.unlink(old_full_path) os.unlink(oldfile + oldext) os.link(resolved_orig_path, old_full_path)