def setUp(self): pkg5unittest.SingleDepotTestCase.setUp(self) self.make_misc_files(self.misc_files) # We want at least one ELF file here to check for ELF action # verification. portable.copyfile("/usr/bin/ls", os.path.join(self.test_root, "ls")) self.pkgsend_bulk(self.rurl, self.foo10)
def seed_ta_dir(self, certs, dest_dir=None): if isinstance(certs, six.string_types): certs = [certs] if not dest_dir: dest_dir = self.ta_dir for c in certs: name = "{0}_cert.pem".format(c) portable.copyfile(os.path.join(self.raw_trust_anchor_dir, name), os.path.join(dest_dir, name)) DebugValues["ssl_ca_file"] = os.path.join(dest_dir, name)
def seed_ta_dir(self, certs, dest_dir=None): if isinstance(certs, six.string_types): certs = [certs] if not dest_dir: dest_dir = self.ta_dir self.assertTrue(dest_dir) self.assertTrue(self.raw_trust_anchor_dir) for c in certs: name = "{0}_cert.pem".format(c) portable.copyfile(os.path.join(self.raw_trust_anchor_dir, name), os.path.join(dest_dir, name))
def __seed_ta_dir(self, certs, dest_dir=None): if isinstance(certs, basestring): certs = [certs] if not dest_dir: dest_dir = self.ta_dir self.assert_(dest_dir) self.assert_(self.raw_trust_anchor_dir) for c in certs: name = "%s_cert.pem" % c portable.copyfile( os.path.join(self.raw_trust_anchor_dir, name), os.path.join(dest_dir, name))
def seed_ta_dir(self, certs, dest_dir=None): if isinstance(certs, basestring): certs = [certs] if not dest_dir: dest_dir = self.ta_dir for c in certs: name = "%s_cert.pem" % c portable.copyfile( os.path.join(self.raw_trust_anchor_dir, name), os.path.join(dest_dir, name)) DebugValues["ssl_ca_file"] = os.path.join(dest_dir, name)
def setUp(self): pkg5unittest.SingleDepotTestCase.setUp(self, start_depot=True) self.make_misc_files(self.misc_files) self.make_misc_files(self.misc_files2) self.make_misc_files(self.misc_ftpfile) # Grab a well known ELF file so it can be used for ELF tests. portable.copyfile("/usr/bin/ls", os.path.join(self.test_root, "ls")) self.plist = {} for p in self.pkgsend_bulk(self.rurl, (self.amber10, self.licensed13, self.dir10, self.file10, self.preserve10, self.preserve11, self.preserve12, self.driver10, self.driver_prep10, self.sysattr, self.sysattr_no_overlay, self.sysattr_o, self.gss, self.krb, self.pkg_dupfile, self.pkg_duplink, self.mismatched_attr, self.ftpd)): pfmri = fmri.PkgFmri(p) old_publisher = pfmri.publisher pfmri.publisher = None sfmri = pfmri.get_short_fmri().replace("pkg:/", "") self.plist[sfmri] = pfmri pfmri.publisher = old_publisher
def test_sysattrs(self): """Test that system attributes are verified correctly.""" if portable.osname != "sunos": raise pkg5unittest.TestSkippedException( "System attributes unsupported on this platform.") self.pkgsend_bulk(self.rurl, [self.sysattr, self.sysattr2]) # Need to create an image in /var/tmp since sysattrs don't work # in tmpfs. old_img_path = self.img_path() self.set_img_path(tempfile.mkdtemp(prefix="test-suite", dir="/var/tmp")) self.image_create(self.rurl) self.pkg("install sysattr sysattr2") self.pkg("verify") fpath = os.path.join(self.img_path(), "p1/bobcat") # Need to get creative here to remove the system attributes # since you need the sys_linkdir privilege which we don't have: # see run.py:393 # So we re-create the file with correct owner and mode and the # only thing missing are the sysattrs. portable.remove(fpath) portable.copyfile(os.path.join(self.test_root, "bobcat"), fpath) os.chmod(fpath, 0o555) os.chown(fpath, -1, 2) self.pkg("verify", exit=1) for sattr in ('H', 'S'): expected = "System attribute '{0}' not set".format(sattr) self.assertTrue(expected in self.output, "Missing in verify output: {0}".format(expected)) shutil.rmtree(self.img_path()) self.set_img_path(old_img_path)
def __test_offline_fix(self, configure_cb, offline_cb, online_cb): """Private helper function for ensuring that offline operation is supported for 'pkg fix' when no package data retrieval is required.""" # If only attributes are wrong and no local modification # is on the file content, fix doesn't need to download the # file data. # Test the system attribute. # Need to create an image in /var/tmp since sysattrs don't work # in tmpfs. old_img_path = self.img_path() self.set_img_path(tempfile.mkdtemp(prefix="test-suite", dir="/var/tmp")) self.image_create(self.durl) configure_cb() self.pkg("install sysattr") self.pkg("verify") fpath = os.path.join(self.img_path(), "amber1") # Need to get creative here to remove the system attributes # since you need the sys_linkdir privilege which we don't have: # see run.py:393 # So we re-create the file with correct owner and mode and the # only thing missing are the sysattrs. portable.remove(fpath) portable.copyfile(os.path.join(self.test_root, "amber1"), fpath) os.chmod(fpath, 0o555) os.chown(fpath, -1, 2) self.pkg("verify", exit=1) # Make the repository offline. offline_cb() # If only attributes on a file are wrong, pkg fix still # succeeds even if the repository is offline. self.pkg("fix sysattr") self.pkg("verify") online_cb() self.image_destroy() # Test other attributes: mode, owner, group and timestamp. self.image_create(self.durl) configure_cb() for p in ("[email protected]","[email protected]", "[email protected]", "[email protected]", "[email protected]", "[email protected]"): pfmri = self.plist[p] self.pkg("install {0}".format(pfmri)) offline_cb() self.__do_alter_verify(pfmri, parsable=True) self.pkg("verify --parsable=0 {0}".format(pfmri)) self.pkg("uninstall {0}".format(pfmri)) online_cb() # If modify the file content locally and its attributes, for the # editable file delivered with preserve=true, fix doesn't need to # download the file data. pfmri = self.plist["[email protected]"] self.pkg("install {0}".format(pfmri)) self.file_append("amber1", "junk") offline_cb() self.__do_alter_verify(pfmri, verbose=True) self.pkg("uninstall {0}".format(pfmri)) online_cb() # For editable files delivered with preserve=renamenew or # preserve=renameold, and non-editable files, fix needs to # download the file data. for p in ("[email protected]", "[email protected]", "[email protected]"): pfmri = self.plist[p] self.pkg("install {0}".format(pfmri)) self.file_append("amber1", "junk") offline_cb() self.__do_alter_verify(pfmri, verbose=True, exit=1) self.pkg("uninstall {0}".format(pfmri)) online_cb() # Prepare for next test iteration. self.image_destroy()
def test_05_invalid(self): """Verify that pkg(7) archive class handles broken archives and items that aren't archives as expected.""" arc_path = os.path.join(self.test_root, "nosucharchive.p5p") # # Check that no archive is handled. # self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") # # Check that empty archive file is handled. # arc_path = os.path.join(self.test_root, "retrieve.p5p") open(arc_path, "wb").close() self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") os.unlink(arc_path) # # Check that invalid archive file is handled. # with open(arc_path, "w") as f: f.write("not_a_valid_archive") self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") os.unlink(arc_path) # # Check that a truncated archive is handled. # repo = self.get_repo(self.dc.get_repodir()) arc = pkg.p5p.Archive(arc_path, mode="w") arc.add_repo_package(self.foo, repo) arc.add_repo_package(self.signed, repo) arc.add_repo_package(self.quux, repo) arc.close() # # Check that truncated archives, or archives with invalid # indexes are handled as expected. # # Determine where to truncate archive by looking for specific # package file and then setting truncate location to halfway # through data for file. arc = ptf.PkgTarFile(name=arc_path, mode="r") idx_data_offset = 0 src_offset = 0 src_bytes = 0 dest_offset = 0 trunc_sz = 0 src_fhash = "b265f2ec87c4a55eb2b6b4c926e7c65f7247a27e" dest_fhash = "801eebbfe8c526bf092d98741d4228e4d0fc99ae" for m in arc.getmembers(): if m.name.endswith("/" + dest_fhash): dest_offset = m.offset trunc_sz = m.offset_data + int(m.size // 2) elif m.name.endswith("pkg5.index.0.gz"): idx_data_offset = m.offset_data elif m.name.endswith("/" + src_fhash): # Calculate size of source entry. src_bytes = m.offset_data - m.offset blocks, rem = divmod(m.size, tf.BLOCKSIZE) if rem > 0: blocks += 1 src_bytes += blocks * tf.BLOCKSIZE src_offset = m.offset arc.close() # Test truncated archive case. bad_arc_path = os.path.join(self.test_root, "bad_arc.p5p") portable.copyfile(arc_path, bad_arc_path) self.debug("{0} size: {1:d} truncate: {2:d}".format( arc_path, os.stat(arc_path).st_size, trunc_sz)) with open(bad_arc_path, "ab+") as f: f.truncate(trunc_sz) ext_dir = os.path.join(self.test_root, "extracted") shutil.rmtree(ext_dir, True) arc = pkg.p5p.Archive(bad_arc_path, mode="r") self.assertRaisesStringify(pkg.p5p.InvalidArchive, arc.extract_package_files, [dest_fhash], ext_dir, pub="test2") arc.close() # Test archive with invalid index; do this by writing some bogus # bytes into the data area for the index. portable.copyfile(arc_path, bad_arc_path) with open(bad_arc_path, "ab+") as dest: dest.seek(idx_data_offset) dest.truncate() with open(arc_path, "rb") as src: bogus_data = b"invalid_index_data" dest.write(bogus_data) src.seek(idx_data_offset + len(bogus_data)) dest.write(src.read()) shutil.rmtree(ext_dir, True) self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, bad_arc_path, mode="r") # Test archive with invalid index offsets; do this by truncating # an existing archive at the offset of one of its files and then # appending the data for a different archive member in its # place. portable.copyfile(arc_path, bad_arc_path) with open(bad_arc_path, "ab+") as dest: dest.seek(dest_offset) dest.truncate() with open(arc_path, "rb") as src: src.seek(src_offset) dest.write(src.read(src_bytes)) shutil.rmtree(ext_dir, True) arc = pkg.p5p.Archive(bad_arc_path, mode="r") self.assertRaisesStringify(pkg.p5p.InvalidArchive, arc.extract_package_files, [dest_fhash], ext_dir, pub="test2") arc.close() os.unlink(arc_path) os.unlink(bad_arc_path) # # Check that directory where archive expected is handled. # os.mkdir(arc_path) self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") os.rmdir(arc_path) # Temporarily change the current archive version and create a # a new archive, and then verify that the expected exception is # raised when an attempt to read it is made. orig_ver = pkg.p5p.Archive.CURRENT_VERSION try: pkg.p5p.Archive.CURRENT_VERSION = 99 # EVIL arc = pkg.p5p.Archive(arc_path, mode="w") arc.close() finally: # Ensure this is reset to the right value. pkg.p5p.Archive.CURRENT_VERSION = orig_ver self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") os.unlink(arc_path)
def test_02_add_package(self): """Verify that pkg(7) archive creation using add_package() works as expected. """ # Get repository. repo = self.get_repo(self.dc.get_repodir()) # Create a directory and copy package files from repository to # it (this is how pkgrecv stores content during republication # or when using --raw). dfroot = os.path.join(self.test_root, "pfiles") os.mkdir(dfroot, pkg.misc.PKG_DIR_MODE) foo_path = os.path.join(dfroot, "foo.p5m") portable.copyfile(repo.manifest(self.foo), foo_path) signed_path = os.path.join(dfroot, "signed.p5m") portable.copyfile(repo.manifest(self.signed), signed_path) quux_path = os.path.join(dfroot, "quux.p5m") portable.copyfile(repo.manifest(self.quux), quux_path) for rstore in repo.rstores: for dirpath, dirnames, filenames in os.walk(rstore.file_root): if not filenames: continue for f in filenames: portable.copyfile(os.path.join(dirpath, f), os.path.join(dfroot, f)) # Prep the archive. progtrack = pkg.client.progress.QuietProgressTracker() arc_path = os.path.join(self.test_root, "add_package.p5p") arc = pkg.p5p.Archive(arc_path, mode="w") # Create an archive with just one package. arc.add_package(self.foo, foo_path, dfroot) arc.close(progtrack=progtrack) # Verify the result. arc = ptf.PkgTarFile(name=arc_path, mode="r") expected = self.foo_expected actual = [m.name for m in arc.getmembers()] self.assertEqualDiff(expected, actual) # Prep a new archive. os.unlink(arc_path) arc = pkg.p5p.Archive(arc_path, mode="w") # Create an archive with multiple packages. # (Don't use progtrack this time.) arc.add_package(self.foo, foo_path, dfroot) arc.add_package(self.signed, signed_path, dfroot) arc.add_package(self.quux, quux_path, dfroot) arc.close() # Verify the result. arc = ptf.PkgTarFile(name=arc_path, mode="r") expected = self.multi_expected[:] action_certs = [ self.calc_pem_hash(t) for t in ( os.path.join(self.cs_dir, "cs1_ch5_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch1_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch2_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch3_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch4_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch5_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch1_ta3_cert.pem"), ) ] for hsh in action_certs: d = "publisher/test/file/{0}".format(hsh[0:2]) f = "{0}/{1}".format(d, hsh) expected.append(d) expected.append(f) actual = sorted(m.name for m in arc.getmembers()) self.assertEqualDiff(sorted(set(expected)), actual) os.unlink(arc_path) os.unlink(foo_path) os.unlink(quux_path) os.unlink(signed_path)
def setUp(self): pkg5unittest.ManyDepotTestCase.setUp( self, ["test", "test", "empty", "test2"]) self.make_misc_files(self.misc_files) # First repository will contain all packages. self.all_rurl = self.dcs[1].get_repo_url() # Second repository will contain only foo. self.foo_rurl = self.dcs[2].get_repo_url() # Third will be empty. self.empty_rurl = self.dcs[3].get_repo_url() # Fourth will be for license packages only. self.licensed_rurl = self.dcs[4].get_repo_url() # Setup base test paths. self.path_to_certs = os.path.join(self.ro_data_root, "signing_certs", "produced") self.keys_dir = os.path.join(self.path_to_certs, "keys") self.cs_dir = os.path.join(self.path_to_certs, "code_signing_certs") self.chain_certs_dir = os.path.join(self.path_to_certs, "chain_certs") self.raw_trust_anchor_dir = os.path.join(self.path_to_certs, "trust_anchors") self.crl_dir = os.path.join(self.path_to_certs, "crl") # Publish packages. plist = self.__publish_packages(self.all_rurl) # Copy foo to second repository. self.pkgrecv(self.all_rurl, "-d {0} foo".format(self.foo_rurl)) # Now create a package archive containing all packages, and # then one for each. repo = self.dcs[1].get_repo() self.all_arc = self.__archive_packages("all_pkgs.p5p", repo, plist) for alist in ([plist[0]], [plist[1], plist[2]], [plist[3]], [plist[4], plist[5]]): arc_path = self.__archive_packages( "{0}.p5p".format(alist[0].pkg_name), repo, alist) setattr(self, "{0}_arc".format(alist[0].pkg_name), arc_path) self.ta_dir = None # Copy an archive and set its permissions to 0000 for testing # unprivileged user access attempts. self.perm_arc = os.path.join(self.test_root, "noaccess.p5p") portable.copyfile(self.foo_arc, self.perm_arc) os.chmod(self.perm_arc, 0) # Create an empty archive. arc_path = os.path.join(self.test_root, "empty.p5p") arc = pkg.p5p.Archive(arc_path, mode="w") arc.close() self.empty_arc = arc_path # Store FMRIs for later use. self.foo10 = plist[0] self.incorp10 = plist[1] self.incorp20 = plist[2] self.signed10 = plist[3] self.quux01 = plist[4] self.quux10 = plist[5] # Handle license package specially. self.licensed10 = self.pkgsend_bulk(self.licensed_rurl, self.licensed_pkg)[0] self.licensed20 = self.pkgsend_bulk(self.licensed_rurl, self.licensed_pkg_2)[0]
def test_fix_overlay(self): """Test that pkg verify / fix should tell the users to look at the overlaying package in the error message if fix won't repair the overlaid package.""" file_path = "etc/gss/mech" file_path_1 = "etc/gss/mech_1" self.image_create(self.rurl) pfmri_gss = self.plist["[email protected]"] pfmri_krb = self.plist["[email protected]"] pfmri_sysattr = self.plist["[email protected]"] pfmri_sysattr_o = self.plist["[email protected]"] # First, only install the package that has a file with # attribute overlay=allow. self.pkg("install gss") self.file_exists(file_path) self.file_remove(file_path) self.file_doesnt_exist(file_path) # Verify should report an error if the file is missing. self.pkg("verify -v gss", exit=1) # Fix should be able to repair the file. self.pkg("fix -v gss") self.file_exists(file_path) self.__do_alter_verify(pfmri_gss) # Install the overlaying package. self.pkg("install krb5") self.file_exists(file_path) self.file_remove(file_path) self.file_doesnt_exist(file_path) # Now pkg verify should still report an error on the overlaid # package and tell the users to verify the overlaying package. self.pkg("verify gss", exit=1) self.assertTrue("package: {0}".format( pfmri_krb.get_pkg_stem(anarchy=True)) in self.output) # Verify should report an error on the overlaying package. self.pkg("verify krb5", exit=1) # Fix won't repair the overlaid package but will tell the users # to fix the overlaying package in the verbose mode. self.pkg("fix gss", exit=4) self.pkg("fix -v gss", exit=4) self.assertTrue("Could not repair: {0}".format(pfmri_gss) in self.output) self.assertTrue("package: {0}".format( pfmri_krb.get_pkg_stem(anarchy=True)) in self.output) self.file_doesnt_exist(file_path) # Fix should be able to repair the file by fixing the overlaying # package. self.pkg("fix -v pkg:/krb5") self.pkg("verify gss") self.file_exists(file_path) # Test that multiple overlaid files are missing. self.file_remove(file_path) self.file_remove(file_path_1) self.pkg("verify gss", exit=1) # Test that the overlay warning only emits once for each # package. self.pkg("verify gss | grep 'verify or fix' | wc -l | grep 1") self.pkg("fix krb5") # Test the owner, group and mode change. self.__do_alter_verify(pfmri_gss, verbose=True, exit=4) self.assertTrue("Could not repair: {0}".format(pfmri_gss) in self.output) self.assertTrue("package: {0}".format( pfmri_krb.get_pkg_stem(anarchy=True)) in self.output) self.__do_alter_verify(pfmri_krb, verbose=True) # Test that verify / fix on system wide could report / fix the # error on the overlaid and overlaying packges. self.file_remove(file_path) self.pkg("verify", exit=1) # Test that verify / fix on all packages should not emit the # overlaying warning. self.assertTrue("verify or fix" not in self.output) self.pkg("fix") self.assertTrue("verify or fix" not in self.output) self.pkg("verify") self.file_exists(file_path) # Test different file types install. Since fix will repair the # overlaid package in this case, we don't need to tell the users # to look at the overlaying package. self.pkg("-D broken-conflicting-action-handling=1 install " "dupfile duplink") self.pkg("verify dupfile", exit=1) self.pkg("fix dupfile") self.pkg("verify dupfile") # Test overlaid package that contains system attribute error. self.set_img_path(tempfile.mkdtemp(prefix="test-suite", dir="/var/tmp")) self.image_create(self.rurl) self.pkg("install sysattr") fpath = os.path.join(self.img_path(), "amber1") # Install the overlaying package. self.pkg("install sysattr_overlay") portable.remove(fpath) portable.copyfile(os.path.join(self.test_root, "amber1"), fpath) os.chmod(fpath, 0o555) os.chown(fpath, -1, 2) self.pkg("verify sysattr", exit=1) self.pkg("fix -v sysattr", exit=4) self.assertTrue("Could not repair: {0}".format(pfmri_sysattr) in self.output, self.plist) self.assertTrue("package: {0}".format( pfmri_sysattr_o.get_pkg_stem(anarchy=True)) in self.output) self.pkg("fix sysattr_overlay") self.pkg("verify sysattr") self.image_destroy()
def add_file(self, f, basename=None, size=None): """Adds the file to the Transaction.""" # If basename provided, just store the file as-is with the # basename. if basename: fileneeded = True try: dst_path = self.rstore.file(basename) fileneeded = False except Exception as e: dst_path = os.path.join(self.dir, basename) if not fileneeded: return if isinstance(f, six.string_types): portable.copyfile(f, dst_path) return bufsz = 128 * 1024 if bufsz > size: bufsz = size with open(dst_path, "wb") as wf: while True: data = f.read(bufsz) # data is bytes if data == b"": break wf.write(data) return hashes, data = misc.get_data_digest(f, length=size, return_content=True, hash_attrs=digest.DEFAULT_HASH_ATTRS, hash_algs=digest.HASH_ALGS) if size is None: size = len(data) fname = None try: # We don't have an Action yet, so passing None is fine. default_hash_attr = digest.get_least_preferred_hash( None)[0] fname = hashes[default_hash_attr] dst_path = self.rstore.file(fname) except Exception as e: # The specific exception can't be named here due # to the cyclic dependency between this class # and the repository class. if getattr(e, "data", "") != fname: raise dst_path = None misc.compute_compressed_attrs(fname, dst_path, data, size, self.dir, chash_attrs=digest.DEFAULT_CHASH_ATTRS, chash_algs=digest.CHASH_ALGS) self.remaining_payload_cnt -= 1
def append(self, rstore, client_release, pfmri): self.rstore = rstore self.append_trans = True if client_release is None: raise TransactionOperationError(client_release=None, pfmri=pfmri) if pfmri is None: raise TransactionOperationError(pfmri=None) if not isinstance(pfmri, six.string_types): pfmri = str(pfmri) self.client_release = client_release self.pkg_name = pfmri self.esc_pkg_name = quote(pfmri, "") # attempt to construct an FMRI object try: self.fmri = fmri.PkgFmri(self.pkg_name, self.client_release) except fmri.FmriError as e: raise TransactionOperationError(e) # Version and timestamp is required for appending. if self.fmri.version is None or not self.fmri.get_timestamp(): raise TransactionOperationError(fmri_version=None, pfmri=pfmri) # Ensure that the FMRI has been fully qualified with publisher # information or apply the default if appropriate. if not self.fmri.publisher: default_pub = rstore.publisher if not default_pub: # A publisher is required. raise TransactionOperationError( publisher_required=True, pfmri=pfmri) self.fmri.publisher = default_pub pkg_name = self.pkg_name pub_string = "pkg://{0}/".format(default_pub) if not pkg_name.startswith("pkg:/"): pkg_name = pub_string + pkg_name else: pkg_name = pkg_name.replace("pkg:/", pub_string) self.pkg_name = pkg_name self.esc_pkg_name = quote(pkg_name, "") # record transaction metadata: opening_time, package, user self.open_time = self.fmri.get_timestamp() # Strip the timestamp information for consistency with # the case where it was not specified. self.pkg_name = ":".join(pfmri.split(":")[:-1]) self.esc_pkg_name = quote(self.pkg_name, "") if not rstore.valid_append_fmri(self.fmri): raise TransactionOperationError(missing_fmri=True, pfmri=self.fmri) trans_basename = self.get_basename() self.dir = os.path.join(rstore.trans_root, trans_basename) try: os.makedirs(self.dir, misc.PKG_DIR_MODE) except EnvironmentError as e: if e.errno == errno.EEXIST: raise TransactionAlreadyOpenError( trans_basename) raise TransactionOperationError(e) # Record that this is an append operation so that it can be # reopened correctly. open(os.path.join(self.dir, "append"), "wb").close() # copy in existing manifest, then open it for appending. portable.copyfile(rstore.manifest(self.fmri), os.path.join(self.dir, "manifest"))
def test_06_download(self): """Test that pkg fix won't try to download all data for files that fail verification when the data is not going to be used.""" # If only attributes are wrong and no local modification # is on the file content, fix doesn't need to download the # file data. # Test the system attribute. # Need to create an image in /var/tmp since sysattrs don't work # in tmpfs. old_img_path = self.img_path() self.set_img_path(tempfile.mkdtemp(prefix="test-suite", dir="/var/tmp")) self.image_create(self.durl) self.pkg("install sysattr") self.pkg("verify") fpath = os.path.join(self.img_path(), "amber1") # Need to get creative here to remove the system attributes # since you need the sys_linkdir privilege which we don't have: # see run.py:393 # So we re-create the file with correct owner and mode and the # only thing missing are the sysattrs. portable.remove(fpath) portable.copyfile(os.path.join(self.test_root, "amber1"), fpath) os.chmod(fpath, 0o555) os.chown(fpath, -1, 2) self.pkg("verify", exit=1) # Make the repository offline. self.dc.stop() # If only attributes on a file are wrong, pkg fix still # succeeds even if the repository is offline. self.pkg("fix sysattr") self.pkg("verify") self.dc.start() shutil.rmtree(self.img_path()) # Test other attributes: mode, owner, group and timestamp. self.image_create(self.durl) for p in ("[email protected]", "[email protected]", "[email protected]", "[email protected]", "[email protected]", "[email protected]"): pfmri = self.plist[p] self.pkg("install {0}".format(pfmri)) self.dc.stop() self.__do_alter_verify(pfmri, parsable=True) self.pkg("verify --parsable=0 {0}".format(pfmri)) self.pkg("uninstall {0}".format(pfmri)) self.dc.start() # If modify the file content locally and its attributes, for the # editable file delivered with preserve=true, fix doesn't need to # download the file data. pfmri = self.plist["[email protected]"] self.pkg("install {0}".format(pfmri)) self.file_append("amber1", "junk") self.dc.stop() self.__do_alter_verify(pfmri, verbose=True) self.pkg("uninstall {0}".format(pfmri)) self.dc.start() # For editable files delivered with preserve=renamenew or # preserve=renameold, and non-editable files, fix needs to # download the file data. for p in ("[email protected]", "[email protected]", "[email protected]"): pfmri = self.plist[p] self.pkg("install {0}".format(pfmri)) self.file_append("amber1", "junk") self.dc.stop() self.__do_alter_verify(pfmri, verbose=True, exit=1) self.pkg("uninstall {0}".format(pfmri)) self.dc.start()
def test_fix_overlay(self): """Test that pkg verify / fix should tell the users to look at the overlaying package in the error message if fix won't repair the overlaid package.""" file_path = "etc/gss/mech" file_path_1 = "etc/gss/mech_1" self.image_create(self.rurl) pfmri_gss = self.plist["[email protected]"] pfmri_krb = self.plist["[email protected]"] pfmri_sysattr = self.plist["[email protected]"] pfmri_sysattr_o = self.plist["[email protected]"] # First, only install the package that has a file with # attribute overlay=allow. self.pkg("install gss") # Path verification should report ok. self.pkg("verify -v -p {0}".format(file_path)) self.assertTrue("OK" in self.output and file_path not in self.output and pfmri_gss.get_pkg_stem() in self.output) self.file_exists(file_path) self.file_remove(file_path) self.file_doesnt_exist(file_path) # Verify should report an error if the file is missing. self.pkg("verify -v gss", exit=1) # Path verification should report error. self.pkg("verify -v -p {0}".format(file_path), exit=1) self.assertTrue("OK" not in self.output and "ERROR" in self.output) self.assertTrue(file_path in self.output and \ pfmri_gss.get_pkg_stem() in self.output) # Fix should be able to repair the file. self.pkg("fix -v gss") self.file_exists(file_path) # On-disk action attributes should be changed and should be # fixed. self.__do_alter_verify(pfmri_gss, exit=0) # Install the overlaying package. self.pkg("install krb5") # Path verification should report ok for both the overlaid package # and the overlaying package. self.pkg("verify -v -p {0}".format(file_path)) self.assertTrue( self.output.count("OK") == 2 and "ERROR" not in self.output) self.assertTrue(pfmri_krb.get_pkg_stem() in self.output and pfmri_gss.get_pkg_stem() in self.output) self.pkg("verify -v -p {0} gss".format(file_path)) self.assertTrue( self.output.count("OK") == 2 and "ERROR" not in self.output) self.assertTrue(pfmri_krb.get_pkg_stem() in self.output and pfmri_gss.get_pkg_stem() in self.output) self.file_exists(file_path) self.file_remove(file_path) self.file_doesnt_exist(file_path) # Path verification should report error for both the overlaid package # and the overlaying package. self.pkg("verify -v -p {0}".format(file_path), exit=1) self.assertTrue("OK" not in self.output and self.output.count("ERROR") == 4) self.assertTrue(pfmri_krb.get_pkg_stem() in self.output and pfmri_gss.get_pkg_stem() in self.output) self.pkg("verify -v -p {0} gss".format(file_path), exit=1) self.assertTrue("OK" not in self.output and self.output.count("ERROR") == 4) self.assertTrue(pfmri_krb.get_pkg_stem() in self.output and pfmri_gss.get_pkg_stem() in self.output) # Now pkg verify should still report an error on the overlaid # package and tell the users it is from the overlaying package. self.pkg("verify gss", exit=1) self.assertTrue("from {0}".format(pfmri_krb.get_pkg_stem( anarchy=True)) in self.output) # Verify should report an error on the overlaying package. self.pkg("verify krb5", exit=1) # Fix will fix the overlaying action (from krb5) for gss # directly. self.pkg("fix gss") self.file_exists(file_path) # Fix has fixed that one before. self.pkg("fix -v pkg:/krb5", exit=4) self.pkg("verify gss") self.file_exists(file_path) # Test that multiple overlaid files are missing. self.file_remove(file_path) self.file_remove(file_path_1) self.pkg("verify gss", exit=1) self.pkg("fix krb5") # Test the owner, group and mode change. self.__do_alter_verify(pfmri_gss, verbose=True) self.__do_alter_verify(pfmri_krb, verbose=True) # Test that verify / fix on system wide could report / fix the # error on the overlaid and overlaying packges. self.file_remove(file_path) self.pkg("verify", exit=1) self.assertTrue("(from " in self.output) self.pkg("fix") self.assertTrue("(from " in self.output) self.pkg("verify") self.file_exists(file_path) # Test different file types install. Since fix will repair the # overlaid package in this case, we don't need to tell the users # to look at the overlaying package. self.pkg("-D broken-conflicting-action-handling=1 install " "dupfile duplink") self.pkg("verify dupfile", exit=1) self.pkg("fix dupfile") self.pkg("verify dupfile") # Test overlaid package that contains system attribute error. self.set_img_path(tempfile.mkdtemp(prefix="test-suite", dir="/var/tmp")) self.image_create(self.rurl) self.pkg("install sysattr") fpath = os.path.join(self.img_path(), "amber1") # Install the overlaying package. self.pkg("install sysattr_overlay") portable.remove(fpath) portable.copyfile(os.path.join(self.test_root, "amber1"), fpath) os.chmod(fpath, 0o555) os.chown(fpath, -1, 2) self.pkg("verify sysattr", exit=1) self.pkg("fix -v sysattr") self.assertTrue( "from {0}".format(pfmri_sysattr_o.get_pkg_stem( anarchy=True)) in self.output) self.pkg("fix sysattr_overlay", exit=4) self.pkg("verify sysattr") self.image_destroy()
def setUp(self): pkg5unittest.ManyDepotTestCase.setUp(self, ["test", "test", "empty", "test2"]) self.make_misc_files(self.misc_files) # First repository will contain all packages. self.all_rurl = self.dcs[1].get_repo_url() # Second repository will contain only foo. self.foo_rurl = self.dcs[2].get_repo_url() # Third will be empty. self.empty_rurl = self.dcs[3].get_repo_url() # Fourth will be for license packages only. self.licensed_rurl = self.dcs[4].get_repo_url() # Setup base test paths. self.path_to_certs = os.path.join(self.ro_data_root, "signing_certs", "produced") self.keys_dir = os.path.join(self.path_to_certs, "keys") self.cs_dir = os.path.join(self.path_to_certs, "code_signing_certs") self.chain_certs_dir = os.path.join(self.path_to_certs, "chain_certs") self.raw_trust_anchor_dir = os.path.join(self.path_to_certs, "trust_anchors") self.crl_dir = os.path.join(self.path_to_certs, "crl") # Publish packages. plist = self.__publish_packages(self.all_rurl) # Copy foo to second repository. self.pkgrecv(self.all_rurl, "-d %s foo" % self.foo_rurl) # Now create a package archive containing all packages, and # then one for each. repo = self.dcs[1].get_repo() self.all_arc = self.__archive_packages("all_pkgs.p5p", repo, plist) for alist in ([plist[0]], [plist[1], plist[2]], [plist[3]], [plist[4], plist[5]]): arc_path = self.__archive_packages( "%s.p5p" % alist[0].pkg_name, repo, alist) setattr(self, "%s_arc" % alist[0].pkg_name, arc_path) self.ta_dir = None # Copy an archive and set its permissions to 0000 for testing # unprivileged user access attempts. self.perm_arc = os.path.join(self.test_root, "noaccess.p5p") portable.copyfile(self.foo_arc, self.perm_arc) os.chmod(self.perm_arc, 0) # Create an empty archive. arc_path = os.path.join(self.test_root, "empty.p5p") arc = pkg.p5p.Archive(arc_path, mode="w") arc.close() self.empty_arc = arc_path # Store FMRIs for later use. self.foo10 = plist[0] self.incorp10 = plist[1] self.incorp20 = plist[2] self.signed10 = plist[3] self.quux01 = plist[4] self.quux10 = plist[5] # Handle license package specially. self.licensed10 = self.pkgsend_bulk(self.licensed_rurl, self.licensed_pkg)[0] self.licensed20 = self.pkgsend_bulk(self.licensed_rurl, self.licensed_pkg_2)[0]
try: os.makedirs(self.dir, misc.PKG_DIR_MODE) except EnvironmentError, e: if e.errno == errno.EEXIST: raise TransactionAlreadyOpenError( trans_basename) raise TransactionOperationError(e) # Record that this is an append operation so that it can be # reopened correctly. with open(os.path.join(self.dir, "append"), "wb") as fh: pass # copy in existing manifest, then open it for appending. portable.copyfile(rstore.manifest(self.fmri), os.path.join(self.dir, "manifest")) def reopen(self, rstore, trans_dir): """The reopen() method is invoked by the repository as needed to load Transaction data.""" self.rstore = rstore try: open_time_str, self.esc_pkg_name = \ os.path.basename(trans_dir).split("_", 1) except ValueError: raise TransactionUnknownIDError(os.path.basename( trans_dir)) self.open_time = \ datetime.datetime.utcfromtimestamp(int(open_time_str))
def test_05_invalid(self): """Verify that pkg(5) archive class handles broken archives and items that aren't archives as expected.""" arc_path = os.path.join(self.test_root, "nosucharchive.p5p") # # Check that no archive is handled. # self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") # # Check that empty archive file is handled. # arc_path = os.path.join(self.test_root, "retrieve.p5p") open(arc_path, "wb").close() self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") os.unlink(arc_path) # # Check that invalid archive file is handled. # with open(arc_path, "wb") as f: f.write("not_a_valid_archive") self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") os.unlink(arc_path) # # Check that a truncated archive is handled. # repo = self.get_repo(self.dc.get_repodir()) arc = pkg.p5p.Archive(arc_path, mode="w") arc.add_repo_package(self.foo, repo) arc.add_repo_package(self.signed, repo) arc.add_repo_package(self.quux, repo) arc.close() # # Check that truncated archives, or archives with invalid # indexes are handled as expected. # # Determine where to truncate archive by looking for specific # package file and then setting truncate location to halfway # through data for file. arc = ptf.PkgTarFile(name=arc_path, mode="r") idx_data_offset = 0 src_offset = 0 src_bytes = 0 dest_offset = 0 trunc_sz = 0 src_fhash = "b265f2ec87c4a55eb2b6b4c926e7c65f7247a27e" dest_fhash = "801eebbfe8c526bf092d98741d4228e4d0fc99ae" for m in arc.getmembers(): if m.name.endswith("/" + dest_fhash): dest_offset = m.offset trunc_sz = m.offset_data + int(m.size / 2) elif m.name.endswith("pkg5.index.0.gz"): idx_data_offset = m.offset_data elif m.name.endswith("/" + src_fhash): # Calculate size of source entry. src_bytes = m.offset_data - m.offset blocks, rem = divmod(m.size, tf.BLOCKSIZE) if rem > 0: blocks += 1 src_bytes += blocks * tf.BLOCKSIZE src_offset = m.offset arc.close() # Test truncated archive case. bad_arc_path = os.path.join(self.test_root, "bad_arc.p5p") portable.copyfile(arc_path, bad_arc_path) self.debug("%s size: %d truncate: %d" % (arc_path, os.stat(arc_path).st_size, trunc_sz)) with open(bad_arc_path, "ab+") as f: f.truncate(trunc_sz) ext_dir = os.path.join(self.test_root, "extracted") shutil.rmtree(ext_dir, True) arc = pkg.p5p.Archive(bad_arc_path, mode="r") self.assertRaisesStringify(pkg.p5p.CorruptArchiveFiles, arc.extract_package_files, [dest_fhash], ext_dir, pub="test2") arc.close() # Test archive with invalid index; do this by writing some bogus # bytes into the data area for the index. portable.copyfile(arc_path, bad_arc_path) with open(bad_arc_path, "ab+") as dest: dest.seek(idx_data_offset) dest.truncate() with open(arc_path, "rb") as src: bogus_data = "invalid_index_data" dest.write(bogus_data) src.seek(idx_data_offset + len(bogus_data)) dest.write(src.read()) shutil.rmtree(ext_dir, True) self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, bad_arc_path, mode="r") # Test archive with invalid index offsets; do this by truncating # an existing archive at the offset of one of its files and then # appending the data for a different archive member in its # place. portable.copyfile(arc_path, bad_arc_path) with open(bad_arc_path, "ab+") as dest: dest.seek(dest_offset) dest.truncate() with open(arc_path, "rb") as src: src.seek(src_offset) dest.write(src.read(src_bytes)) shutil.rmtree(ext_dir, True) arc = pkg.p5p.Archive(bad_arc_path, mode="r") self.assertRaisesStringify(pkg.p5p.InvalidArchive, arc.extract_package_files, [dest_fhash], ext_dir, pub="test2") arc.close() os.unlink(arc_path) os.unlink(bad_arc_path) # # Check that directory where archive expected is handled. # os.mkdir(arc_path) self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") os.rmdir(arc_path) # Temporarily change the current archive version and create a # a new archive, and then verify that the expected exception is # raised when an attempt to read it is made. orig_ver = pkg.p5p.Archive.CURRENT_VERSION try: pkg.p5p.Archive.CURRENT_VERSION = 99 # EVIL arc = pkg.p5p.Archive(arc_path, mode="w") arc.close() finally: # Ensure this is reset to the right value. pkg.p5p.Archive.CURRENT_VERSION = orig_ver self.assertRaisesStringify(pkg.p5p.InvalidArchive, pkg.p5p.Archive, arc_path, mode="r") os.unlink(arc_path)
def test_02_add_package(self): """Verify that pkg(5) archive creation using add_package() works as expected. """ # Get repository. repo = self.get_repo(self.dc.get_repodir()) # Create a directory and copy package files from repository to # it (this is how pkgrecv stores content during republication # or when using --raw). dfroot = os.path.join(self.test_root, "pfiles") os.mkdir(dfroot, pkg.misc.PKG_DIR_MODE) foo_path = os.path.join(dfroot, "foo.p5m") portable.copyfile(repo.manifest(self.foo), foo_path) signed_path = os.path.join(dfroot, "signed.p5m") portable.copyfile(repo.manifest(self.signed), signed_path) quux_path = os.path.join(dfroot, "quux.p5m") portable.copyfile(repo.manifest(self.quux), quux_path) for rstore in repo.rstores: for dirpath, dirnames, filenames in os.walk( rstore.file_root): if not filenames: continue for f in filenames: portable.copyfile( os.path.join(dirpath, f), os.path.join(dfroot, f)) # Prep the archive. progtrack = pkg.client.progress.QuietProgressTracker() arc_path = os.path.join(self.test_root, "add_package.p5p") arc = pkg.p5p.Archive(arc_path, mode="w") # Create an archive with just one package. arc.add_package(self.foo, foo_path, dfroot) arc.close(progtrack=progtrack) # Verify the result. arc = ptf.PkgTarFile(name=arc_path, mode="r") expected = self.foo_expected actual = [m.name for m in arc.getmembers()] self.assertEqualDiff(expected, actual) # Prep a new archive. os.unlink(arc_path) arc = pkg.p5p.Archive(arc_path, mode="w") # Create an archive with multiple packages. # (Don't use progtrack this time.) arc.add_package(self.foo, foo_path, dfroot) arc.add_package(self.signed, signed_path, dfroot) arc.add_package(self.quux, quux_path, dfroot) arc.close() # Verify the result. arc = ptf.PkgTarFile(name=arc_path, mode="r") expected = self.multi_expected[:] action_certs = [self.calc_pem_hash(t) for t in ( os.path.join(self.cs_dir, "cs1_ch5_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch1_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch2_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch3_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch4_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch5_ta1_cert.pem"), os.path.join(self.chain_certs_dir, "ch1_ta3_cert.pem"), )] for hsh in action_certs: d = "publisher/test/file/%s" % hsh[0:2] f = "%s/%s" % (d, hsh) expected.append(d) expected.append(f) actual = sorted(m.name for m in arc.getmembers()) self.assertEqualDiff(sorted(set(expected)), actual) os.unlink(arc_path) os.unlink(foo_path) os.unlink(quux_path) os.unlink(signed_path)