def hard_link_in_cwd(filename): with util.directory_created("hardlink_scratch") as directory: with util.chdir(directory): util.touch("a") os.link("a", "b") with util.archive_created(os.path.join("..", "hardlink_archive.xar"), ".") as path: with util.directory_created(os.path.join("..", "hardlink_extracted")) as extracted: subprocess.check_call(["xar", "-x", "-C", extracted, "-f", path]) _assert_same_inodes(os.path.join(extracted, "a"), os.path.join(extracted, "b"))
def _data_test(filename, *args, **kwargs): with util.directory_created("data_scratch") as directory: yield directory # Files are now created with util.archive_created("data.xar", "./data_scratch", *args, **kwargs) as path: _process_toc(path) with util.directory_created("data_extracted") as extracted: subprocess.check_call(["xar", "-x", "-f", path, "-C", extracted]) util.assert_identical_directories(directory, os.path.join(extracted, "data_scratch"))
def hard_link_identical_files(filename): with util.directory_created("hardlink_scratch") as directory: with open(os.path.join(directory, "a"), "w") as f: f.write("1234samecontent") with open(os.path.join(directory, "b"), "w") as f: f.write("1234samecontent") with util.archive_created("hardlink_archive.xar", "hardlink_scratch", "--link-same") as path: with util.directory_created("hardlink_extracted") as extracted: subprocess.check_call(["xar", "-x", "-C", extracted, "-f", path]) _assert_same_inodes(os.path.join(extracted, "hardlink_scratch", "a"), os.path.join(extracted, "hardlink_scratch", "b"))
def hard_link_in_cwd(filename): with util.directory_created("hardlink_scratch") as directory: with util.chdir(directory): util.touch("a") os.link("a", "b") with util.archive_created( os.path.join("..", "hardlink_archive.xar"), ".") as path: with util.directory_created( os.path.join("..", "hardlink_extracted")) as extracted: subprocess.check_call( ["xar", "-x", "-C", extracted, "-f", path]) _assert_same_inodes(os.path.join(extracted, "a"), os.path.join(extracted, "b"))
def coalesce_heap(filename): with util.directory_created("scratch") as directory: shutil.copy("/bin/ls", os.path.join(directory, "ls")) shutil.copy(os.path.join(directory, "ls"), os.path.join(directory, "foo")) with util.chdir(directory): with util.archive_created(os.path.join("..", "heap.xar"), ".", "--coalesce-heap") as path: # Verify file offsets are as we expect offsets = _file_offsets_for_archive(path, os.path.join("..", "heap1.xsl")) (f1, o1) = offsets[0] (f2, o2) = offsets[1] # Make sure extraction goes all right with util.directory_created("extracted") as extracted: subprocess.check_call(["xar", "-x", "-f", path, "-C", extracted])
def hard_link_identical_files(filename): with util.directory_created("hardlink_scratch") as directory: with open(os.path.join(directory, "a"), "w") as f: f.write("1234samecontent") with open(os.path.join(directory, "b"), "w") as f: f.write("1234samecontent") with util.archive_created("hardlink_archive.xar", "hardlink_scratch", "--link-same") as path: with util.directory_created("hardlink_extracted") as extracted: subprocess.check_call( ["xar", "-x", "-C", extracted, "-f", path]) _assert_same_inodes( os.path.join(extracted, "hardlink_scratch", "a"), os.path.join(extracted, "hardlink_scratch", "b"))
def normal_heap(filename): with util.directory_created("scratch") as directory: shutil.copy("/bin/ls", os.path.join(directory, "ls")) shutil.copy(os.path.join(directory, "ls"), os.path.join(directory, "foo")) with util.chdir(directory): with util.archive_created(os.path.join("..", "heap.xar"), ".") as path: # Verify file offsets are as we expect offsets = _file_offsets_for_archive(path, os.path.join("..", "heap1.xsl")) (f1, o1) = offsets[0] (f2, o2) = offsets[1] assert o1 < o2, "offset for first file \"{f1}\" ({o1}) greater than or equal to offset for last file \"{f2}\" ({o2})".format(f1=f1, o1=o1, f2=f2, o2=o2) # Make sure extraction goes all right with util.directory_created("extracted") as extracted: subprocess.check_call(["xar", "-x", "-f", path, "-C", extracted])
def _test_xattr_on_file_with_contents(filename, file_contents, xattrs=[], xar_create_flags=[], xar_extract_flags=[]): try: # Write file out with open(filename, "w") as f: f.write(file_contents) for (key, value) in xattrs: xattr.setxattr(f, key, value) # Store it into a xarchive archive_name = "{f}.xar".format(f=filename) with util.archive_created(archive_name, filename, *xar_create_flags) as path: # Extract xarchive with util.directory_created("extracted") as directory: # Validate resulting xattrs subprocess.check_call(["xar", "-x", "-C", directory, "-f", path] + xar_extract_flags) for (key, value) in xattrs: try: assert xattr.getxattr(os.path.join(directory, filename), key) == value, "extended attribute \"{n}\" has incorrect contents after extraction".format(n=key) except KeyError: raise MissingExtendedAttributeError("extended attribute \"{n}\" missing after extraction".format(n=key)) # Validate file contents with open(os.path.join(directory, filename), "r") as f: if f.read() != file_contents: raise MissingExtendedAttributeError("archived file \"{f}\" has has incorrect contents after extraction".format(f=filename)) finally: os.unlink(filename)
def _verify_extraction_failed(filename): with util.directory_created("extracted") as directory: try: with open("/dev/null", "w") as n: returncode = subprocess.call(["xar", "-x", "-C", directory, "-f", filename], stdout=n, stderr=n) assert returncode != 0, "xar reported success extracting an archive with a broken TOC" finally: if os.path.exists(directory): shutil.rmtree(directory)
def _verify_extraction_failed(filename): with util.directory_created("extracted") as directory: try: with open("/dev/null", "w") as n: returncode = subprocess.call( ["xar", "-x", "-C", directory, "-f", filename], stdout=n, stderr=n) assert returncode != 0, "xar reported success extracting an archive with a broken TOC" finally: if os.path.exists(directory): shutil.rmtree(directory)
def _test_truncation(filename, path_to_be_archived, bytes_to_chop, *args): with util.archive_created(filename, path_to_be_archived) as path: with open("/dev/null", "w") as bitbucket: size = os.stat(path).st_size while size > 0: last_size = size size = max(0, size - bytes_to_chop) with open(path, "w+") as f: f.truncate(size) with util.directory_created("scratch") as directory: returncode = subprocess.call(["xar", "-x", "-f", path, "-C", directory], stderr=bitbucket) assert returncode != 0, "xar claimed to succeed when extracting a truncated archive"
def _check_compression(filename, *args, **kwargs): with util.archive_created(filename, "/bin", *args, **kwargs) as path: with util.directory_created("extracted") as directory: subprocess.check_call(["xar", "-x", "-f", path, "-C", directory]) util.assert_identical_directories("/bin", os.path.join(directory, "bin"))