コード例 #1
0
ファイル: checksums.py プロジェクト: packagesdev/xar
def md5_file_checksum_failure(filename):
	try:
		with open("/dev/null", "a") as devnull:
			with util.archive_created(filename, "/bin", "--file-cksum", "md5", stderr=devnull) as path:
				raise AssertionError("xar succeeded when it should have failed")
	except subprocess.CalledProcessError:
		pass
コード例 #2
0
ファイル: attr.py プロジェクト: P79N6A/apple-opensource
def _test_xattr_on_file_with_contents(filename, file_contents, xattrs=[], xar_create_flags=[], xar_extract_flags=[]):
	try:
		# Write file out
		with open(filename, "w") as f:
			f.write(file_contents)
			for (key, value) in xattrs:
				xattr.setxattr(f, key, value)
		
		# Store it into a xarchive
		archive_name = "{f}.xar".format(f=filename)
		with util.archive_created(archive_name, filename, *xar_create_flags) as path:
			# Extract xarchive
			with util.directory_created("extracted") as directory:
				# Validate resulting xattrs
				subprocess.check_call(["xar", "-x", "-C", directory, "-f", path] + xar_extract_flags)
				for (key, value) in xattrs:
					try:
						assert xattr.getxattr(os.path.join(directory, filename), key) == value, "extended attribute \"{n}\" has incorrect contents after extraction".format(n=key)
					except KeyError:
						raise MissingExtendedAttributeError("extended attribute \"{n}\" missing after extraction".format(n=key))
				
				# Validate file contents
				with open(os.path.join(directory, filename), "r") as f:
					if f.read() != file_contents:
						raise MissingExtendedAttributeError("archived file \"{f}\" has has incorrect contents after extraction".format(f=filename))
	finally:
		os.unlink(filename)
コード例 #3
0
ファイル: checksums.py プロジェクト: 010001111/darling
def broken_toc_default_checksum(filename):
	with util.archive_created(filename, "/bin") as path:
		# Mess up the archive
		toc_start = _get_header_size(path)
		_clobber_bytes_at(range(toc_start + 4, toc_start + 4 + 100), path) # Why did the original test specify 4? No idea.
		
		# Try to extract it
		_verify_extraction_failed(filename)
コード例 #4
0
ファイル: hardlink.py プロジェクト: 010001111/darling
def hard_link_in_cwd(filename):
	with util.directory_created("hardlink_scratch") as directory:
		with util.chdir(directory):
			util.touch("a")
			os.link("a", "b")
			with util.archive_created(os.path.join("..", "hardlink_archive.xar"), ".") as path:
				with util.directory_created(os.path.join("..", "hardlink_extracted")) as extracted:
					subprocess.check_call(["xar", "-x", "-C", extracted, "-f", path])
					_assert_same_inodes(os.path.join(extracted, "a"), os.path.join(extracted, "b"))
コード例 #5
0
ファイル: data.py プロジェクト: P79N6A/apple-opensource
def _data_test(filename, *args, **kwargs):
	with util.directory_created("data_scratch") as directory:
		yield directory
		# Files are now created
		with util.archive_created("data.xar", "./data_scratch", *args, **kwargs) as path:
			_process_toc(path)
			with util.directory_created("data_extracted") as extracted:
				subprocess.check_call(["xar", "-x", "-f", path, "-C", extracted])
				util.assert_identical_directories(directory, os.path.join(extracted, "data_scratch"))
コード例 #6
0
ファイル: checksums.py プロジェクト: 010001111/darling
def broken_heap_default_checksum(filename):
	with util.archive_created(filename, "/bin") as path:
		# Mess up the archive
		toc_start = _get_header_size(path)
		toc_size = _get_toc_size(path)
		# Why 32? That's the size of the default sha256 checksum, which is stored before the heap.
		_clobber_bytes_at(range(toc_start + toc_size + 32, toc_start + toc_size + 32 + 100), path)
		
		# Try to extract it
		_verify_extraction_failed(filename)
コード例 #7
0
def broken_toc_sha1_checksum(filename):
    with util.archive_created(filename, "/bin", "--toc-cksum", "sha1") as path:
        # Mess up the archive
        toc_start = _get_header_size(path)
        _clobber_bytes_at(
            range(toc_start + 4, toc_start + 4 + 100),
            path)  # Why did the original test specify 4? No idea.

        # Try to extract it
        _verify_extraction_failed(filename)
コード例 #8
0
ファイル: hardlink.py プロジェクト: 010001111/darling
def hard_link_identical_files(filename):
	with util.directory_created("hardlink_scratch") as directory:
		with open(os.path.join(directory, "a"), "w") as f:
			f.write("1234samecontent")
		with open(os.path.join(directory, "b"), "w") as f:
			f.write("1234samecontent")
		with util.archive_created("hardlink_archive.xar", "hardlink_scratch", "--link-same") as path:
			with util.directory_created("hardlink_extracted") as extracted:
				subprocess.check_call(["xar", "-x", "-C", extracted, "-f", path])
				_assert_same_inodes(os.path.join(extracted, "hardlink_scratch", "a"), os.path.join(extracted, "hardlink_scratch", "b"))
コード例 #9
0
def md5_file_checksum_failure(filename):
    try:
        with open("/dev/null", "a") as devnull:
            with util.archive_created(filename,
                                      "/bin",
                                      "--file-cksum",
                                      "md5",
                                      stderr=devnull) as path:
                raise AssertionError(
                    "xar succeeded when it should have failed")
    except subprocess.CalledProcessError:
        pass
コード例 #10
0
def broken_heap_default_checksum(filename):
    with util.archive_created(filename, "/bin") as path:
        # Mess up the archive
        toc_start = _get_header_size(path)
        toc_size = _get_toc_size(path)
        # Why 32? That's the size of the default sha256 checksum, which is stored before the heap.
        _clobber_bytes_at(
            range(toc_start + toc_size + 32, toc_start + toc_size + 32 + 100),
            path)

        # Try to extract it
        _verify_extraction_failed(filename)
コード例 #11
0
ファイル: checksums.py プロジェクト: 010001111/darling
def default_checksum_algorithm(filename):
	with util.archive_created(filename, "/bin") as path:
		header = subprocess.check_output(["xar", "--dump-header", "-f", path])
		found = False
		for line in header.splitlines():
			matchdata = re.match("^Checksum algorithm:\s+(\d+)\s+\\((\w+)\\)$", line)
			if not matchdata:
				continue
			found = True
			algorithm = matchdata.groups()[1]
			assert algorithm == "SHA1", "unexpected checksum algorithm default: received {a}, expected SHA1".format(a=algorithm)
		assert found, "unexpected output from `xar --dump-header`:\n{h}".format(h=header)
コード例 #12
0
ファイル: integrity.py プロジェクト: Jean-Daniel/xar
def _test_truncation(filename, path_to_be_archived, bytes_to_chop, *args):
    with util.archive_created(filename, path_to_be_archived) as path:
        with open("/dev/null", "w") as bitbucket:
            size = os.stat(path).st_size
            while size > 0:
                last_size = size
                size = max(0, size - bytes_to_chop)
                with open(path, "w+") as f:
                    f.truncate(size)

                with util.directory_created("scratch") as directory:
                    returncode = subprocess.call(["xar", "-x", "-f", path, "-C", directory], stderr=bitbucket)
                    assert returncode != 0, "xar claimed to succeed when extracting a truncated archive"
コード例 #13
0
def _test_truncation(filename, path_to_be_archived, bytes_to_chop, *args):
	with util.archive_created(filename, path_to_be_archived) as path:
		with open("/dev/null", "w") as bitbucket:
			size = os.stat(path).st_size
			while size > 0:
				last_size = size
				size = max(0, size - bytes_to_chop)
				with open(path, "w+") as f:
					f.truncate(size)
			
				with util.directory_created("scratch") as directory:
					returncode = subprocess.call(["xar", "-x", "-f", path, "-C", directory], stderr=bitbucket)
					assert returncode != 0, "xar claimed to succeed when extracting a truncated archive"
コード例 #14
0
def hard_link_in_cwd(filename):
    with util.directory_created("hardlink_scratch") as directory:
        with util.chdir(directory):
            util.touch("a")
            os.link("a", "b")
            with util.archive_created(
                    os.path.join("..", "hardlink_archive.xar"), ".") as path:
                with util.directory_created(
                        os.path.join("..", "hardlink_extracted")) as extracted:
                    subprocess.check_call(
                        ["xar", "-x", "-C", extracted, "-f", path])
                    _assert_same_inodes(os.path.join(extracted, "a"),
                                        os.path.join(extracted, "b"))
コード例 #15
0
def coalesce_heap(filename):
	with util.directory_created("scratch") as directory:
		shutil.copy("/bin/ls", os.path.join(directory, "ls"))
		shutil.copy(os.path.join(directory, "ls"), os.path.join(directory, "foo"))
		with util.chdir(directory):
			with util.archive_created(os.path.join("..", "heap.xar"), ".", "--coalesce-heap") as path:
				# Verify file offsets are as we expect
				offsets = _file_offsets_for_archive(path, os.path.join("..", "heap1.xsl"))
				(f1, o1) = offsets[0]
				(f2, o2) = offsets[1]
				
				# Make sure extraction goes all right
				with util.directory_created("extracted") as extracted:
					subprocess.check_call(["xar", "-x", "-f", path, "-C", extracted])
コード例 #16
0
def hard_link_identical_files(filename):
    with util.directory_created("hardlink_scratch") as directory:
        with open(os.path.join(directory, "a"), "w") as f:
            f.write("1234samecontent")
        with open(os.path.join(directory, "b"), "w") as f:
            f.write("1234samecontent")
        with util.archive_created("hardlink_archive.xar", "hardlink_scratch",
                                  "--link-same") as path:
            with util.directory_created("hardlink_extracted") as extracted:
                subprocess.check_call(
                    ["xar", "-x", "-C", extracted, "-f", path])
                _assert_same_inodes(
                    os.path.join(extracted, "hardlink_scratch", "a"),
                    os.path.join(extracted, "hardlink_scratch", "b"))
コード例 #17
0
def default_checksum_algorithm(filename):
    with util.archive_created(filename, "/bin") as path:
        header = subprocess.check_output(["xar", "--dump-header", "-f", path])
        found = False
        for line in header.splitlines():
            matchdata = re.match("^Checksum algorithm:\s+(\d+)\s+\\((\w+)\\)$",
                                 line)
            if not matchdata:
                continue
            found = True
            algorithm = matchdata.groups()[1]
            assert algorithm == "SHA1", "unexpected checksum algorithm default: received {a}, expected SHA1".format(
                a=algorithm)
        assert found, "unexpected output from `xar --dump-header`:\n{h}".format(
            h=header)
コード例 #18
0
ファイル: checksums.py プロジェクト: 010001111/darling
def _verify_checksum_algorithm(filename, algorithm):
	additional_args = []
	if algorithm:
		additional_args = ["--file-cksum", algorithm]
	else:
		algorithm = "sha1"
	
	with util.archive_created(filename, "/bin", *additional_args) as path:
		toc = subprocess.check_output(["xar", "--dump-toc=-", "-f", path])
		found = False
		for line in toc.splitlines():
			if '<unarchived-checksum style="{a}">'.format(a=algorithm) in line or '<archived-checksum style="{a}">'.format(a=algorithm) in line:
				break
		else:
			raise AssertionError("unexpected output from `xar --dump-toc=-`:\n{t}".format(t=toc))
コード例 #19
0
def normal_heap(filename):
	with util.directory_created("scratch") as directory:
		shutil.copy("/bin/ls", os.path.join(directory, "ls"))
		shutil.copy(os.path.join(directory, "ls"), os.path.join(directory, "foo"))
		with util.chdir(directory):
			with util.archive_created(os.path.join("..", "heap.xar"), ".") as path:
				# Verify file offsets are as we expect
				offsets = _file_offsets_for_archive(path, os.path.join("..", "heap1.xsl"))
				(f1, o1) = offsets[0]
				(f2, o2) = offsets[1]
				
				assert o1 < o2, "offset for first file \"{f1}\" ({o1}) greater than or equal to offset for last file \"{f2}\" ({o2})".format(f1=f1, o1=o1, f2=f2, o2=o2)
				
				# Make sure extraction goes all right
				with util.directory_created("extracted") as extracted:
					subprocess.check_call(["xar", "-x", "-f", path, "-C", extracted])
コード例 #20
0
def _verify_checksum_algorithm(filename, algorithm):
    additional_args = []
    if algorithm:
        additional_args = ["--file-cksum", algorithm]
    else:
        algorithm = "sha1"

    with util.archive_created(filename, "/bin", *additional_args) as path:
        toc = subprocess.check_output(["xar", "--dump-toc=-", "-f", path])
        found = False
        for line in toc.splitlines():
            if '<unarchived-checksum style="{a}">'.format(
                    a=algorithm
            ) in line or '<archived-checksum style="{a}">'.format(
                    a=algorithm) in line:
                break
        else:
            raise AssertionError(
                "unexpected output from `xar --dump-toc=-`:\n{t}".format(
                    t=toc))
コード例 #21
0
ファイル: compression.py プロジェクト: 010001111/darling
def _check_compression(filename, *args, **kwargs):
	with util.archive_created(filename, "/bin", *args, **kwargs) as path:
		with util.directory_created("extracted") as directory:
			subprocess.check_call(["xar", "-x", "-f", path, "-C", directory])
			util.assert_identical_directories("/bin", os.path.join(directory, "bin"))
コード例 #22
0
def default_toc_checksum_validity(filename):
    with util.archive_created(filename, "/bin") as path:
        _verify_header_checksum(path, "sha1")
コード例 #23
0
def sha512_toc_checksum_validity(filename):
    with util.archive_created(filename, "/bin", "--toc-cksum",
                              "sha512") as path:
        _verify_header_checksum(path, "sha512")
コード例 #24
0
ファイル: checksums.py プロジェクト: 010001111/darling
def sha512_toc_checksum_validity(filename):
	with util.archive_created(filename, "/bin", "--toc-cksum", "sha512") as path:
		_verify_header_checksum(path, "sha512")
コード例 #25
0
ファイル: checksums.py プロジェクト: 010001111/darling
def default_toc_checksum_validity(filename):
	with util.archive_created(filename, "/bin") as path:
		_verify_header_checksum(path, "sha1")