def test_store_to_disk(self): """Verfies that a FactoredManifest gets force-loaded before it gets stored to disk.""" m1 = manifest.FactoredManifest("[email protected]", self.cache_dir, pathname=self.foo_content_p5m) tmpdir = tempfile.mkdtemp(dir=self.test_root) path = os.path.join(tmpdir, "manifest.p5m") m1.store(path) self.assertEqual(misc.get_data_digest(path, hash_func=digest.DEFAULT_HASH_FUNC), misc.get_data_digest(self.foo_content_p5m, hash_func=digest.DEFAULT_HASH_FUNC))
def verify(self, img, pfmri, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image.""" errors = [] warnings = [] info = [] path = os.path.join(img.get_license_dir(pfmri), "license." + quote(self.attrs["license"], "")) hash_attr, hash_val, hash_func = \ digest.get_preferred_hash(self) if args["forever"] == True: try: chash, cdata = misc.get_data_digest(path, hash_func=hash_func) except EnvironmentError as e: if e.errno == errno.ENOENT: errors.append( _("License file {0} does " "not exist.").format(path)) return errors, warnings, info raise if chash != hash_val: errors.append( _("Hash: '{found}' should be " "'{expected}'").format(found=chash, expected=hash_val)) return errors, warnings, info
def install(self, pkgplan, orig): """Client-side method that installs a file.""" path = self.attrs["path"] mode = int(self.attrs["mode"], 8) owner, group = self.get_uid_gid(pkgplan.image) final_path = os.path.normpath(os.path.sep.join( (pkgplan.image.get_root(), path))) if not os.path.exists(os.path.dirname(final_path)): self.makedirs(os.path.dirname(final_path), mode=0755) # XXX If we're upgrading, do we need to preserve file perms from # exisiting file? # check if we have a save_file active; if so, simulate file # being already present rather than installed from scratch if "save_file" in self.attrs: orig = self.restore_file(pkgplan.image) # If the action has been marked with a preserve attribute, and # the file exists and has a contents hash different from what # the system expected it to be, then we preserve the original # file in some way, depending on the value of preserve. # # XXX What happens when we transition from preserve to # non-preserve or vice versa? Do we want to treat a preserve # attribute as turning the action into a critical action? if "preserve" in self.attrs and os.path.isfile(final_path): chash, cdata = misc.get_data_digest(final_path) # XXX We should save the originally installed file. It # can be used as an ancestor for a three-way merge, for # example. Where should it be stored? if not orig or chash != orig.hash: pres_type = self.attrs["preserve"] if pres_type == "renameold": old_path = final_path + ".old" elif pres_type == "renamenew": final_path = final_path + ".new" else: return # If it is a directory (and not empty) then we should # salvage the contents. if os.path.exists(final_path) and \ not os.path.islink(final_path) and \ os.path.isdir(final_path): try: os.rmdir(final_path) except OSError, e: if e.errno == errno.ENOENT: pass elif e.errno == errno.EEXIST or \ e.errno == errno.ENOTEMPTY: pkgplan.image.salvagedir(final_path) elif e.errno != errno.EACCES: # this happens on Windows raise
def test_correct_cert_validation(self): """ Test that an expired cert for one publisher doesn't prevent making changes to other publishers due to certifcate checks on all configured publishers. (Bug 17018362)""" bad_cert_path = os.path.join(self.cs_dir, "cs3_ch1_ta3_cert.pem") good_cert_path = os.path.join(self.cs_dir, self.get_cli_cert("test")) self.ac.start() self.image_create() # Set https-based publisher with correct cert. self.seed_ta_dir("ta7") self.pkg("set-publisher -k {key} -c {cert} -p {url}".format( url=self.acurl1, cert=good_cert_path, key=os.path.join(self.keys_dir, self.get_cli_key("test")), )) # Set a second publisher self.pkg("set-publisher -p {url}".format(url=self.rurl2)) # Replace cert of first publisher with one that is expired. # It doesn't need to match the key because we just want to # test if the cert validation code works correctly so we are not # actually using the cert. # Cert is stored by content hash in the pkg config of the image, # which must be a SHA-1 hash for backwards compatibility. ch = misc.get_data_digest(good_cert_path, hash_func=hashlib.sha1)[0] pkg_cert_path = os.path.join(self.get_img_path(), "var", "pkg", "ssl", ch) shutil.copy(bad_cert_path, pkg_cert_path) # Refreshing the second publisher should not try to validate # the cert for the first publisher. self.pkg("refresh {0}".format(self.tmppub))
def get_text(self, img, pfmri, alt_pub=None): """Retrieves and returns the payload of the license (which should be text). This may require remote retrieval of resources and so this could raise a TransportError or other ApiException. 'alt_pub' is an optional alternate Publisher to use for any required transport operations. """ path = self.get_local_path(img, pfmri) hash_attr, hash_attr_val, hash_func = \ digest.get_least_preferred_hash(self) try: with open(path, "rb") as fh: length = os.stat(path).st_size chash, txt = misc.get_data_digest(fh, length=length, return_content=True, hash_func=hash_func) if chash == hash_attr_val: return misc.force_str(txt) except EnvironmentError as e: if e.errno != errno.ENOENT: raise # If we get here, either the license file wasn't on disk, or the # hash didn't match. In either case, go retrieve it from the # publisher. try: if not alt_pub: alt_pub = img.get_publisher(pfmri.publisher) assert pfmri.publisher == alt_pub.prefix return img.transport.get_content(alt_pub, hash_attr_val, fmri=pfmri, hash_func=hash_func) finally: img.cleanup_downloads()
def verify(self, img, pkg_fmri, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image.""" errors = [] warnings = [] info = [] path = os.path.normpath(os.path.join(img.imgdir, "pkg", pkg_fmri.get_dir_path(), "license." + self.attrs["license"])) if args["forever"] == True: try: chash, cdata = misc.get_data_digest(path) except EnvironmentError, e: if e.errno == errno.ENOENT: errors.append(_("License file %s does " "not exist.") % path) return errors, warnings, info raise if chash != self.hash: errors.append(_("Hash: '%(found)s' should be " "'%(expected)s'") % { "found": chash, "expected": self.hash})
def install(self, pkgplan, orig): """Client-side method that installs a file.""" path = self.attrs["path"] mode = int(self.attrs["mode"], 8) owner, group = self.get_uid_gid(pkgplan.image) final_path = os.path.normpath( os.path.sep.join((pkgplan.image.get_root(), path))) if not os.path.exists(os.path.dirname(final_path)): self.makedirs(os.path.dirname(final_path), mode=0755) # XXX If we're upgrading, do we need to preserve file perms from # exisiting file? # check if we have a save_file active; if so, simulate file # being already present rather than installed from scratch if "save_file" in self.attrs: orig = self.restore_file(pkgplan.image) # If the action has been marked with a preserve attribute, and # the file exists and has a contents hash different from what # the system expected it to be, then we preserve the original # file in some way, depending on the value of preserve. # # XXX What happens when we transition from preserve to # non-preserve or vice versa? Do we want to treat a preserve # attribute as turning the action into a critical action? if "preserve" in self.attrs and os.path.isfile(final_path): chash, cdata = misc.get_data_digest(final_path) # XXX We should save the originally installed file. It # can be used as an ancestor for a three-way merge, for # example. Where should it be stored? if not orig or chash != orig.hash: pres_type = self.attrs["preserve"] if pres_type == "renameold": old_path = final_path + ".old" elif pres_type == "renamenew": final_path = final_path + ".new" else: return # If it is a directory (and not empty) then we should # salvage the contents. if os.path.exists(final_path) and \ not os.path.islink(final_path) and \ os.path.isdir(final_path): try: os.rmdir(final_path) except OSError, e: if e.errno == errno.ENOENT: pass elif e.errno == errno.EEXIST or \ e.errno == errno.ENOTEMPTY: pkgplan.image.salvagedir(final_path) elif e.errno != errno.EACCES: # this happens on Windows raise
def verify(self, img, pkg_fmri, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image.""" errors = [] warnings = [] info = [] path = os.path.normpath( os.path.join(img.imgdir, "pkg", pkg_fmri.get_dir_path(), "license." + self.attrs["license"])) if args["forever"] == True: try: chash, cdata = misc.get_data_digest(path) except EnvironmentError, e: if e.errno == errno.ENOENT: errors.append( _("License file %s does " "not exist.") % path) return errors, warnings, info raise if chash != self.hash: errors.append( _("Hash: '%(found)s' should be " "'%(expected)s'") % { "found": chash, "expected": self.hash })
def __check_preserve(self, orig, pkgplan): """Return the type of preservation needed for this action. Returns None if preservation is not defined by the action. Returns False if it is, but no preservation is necessary. Returns True for the normal preservation form. Returns one of the strings 'renameold' or 'renamenew' for each of the respective forms of preservation. """ if not "preserve" in self.attrs: return None final_path = os.path.normpath(os.path.sep.join( (pkgplan.image.get_root(), self.attrs["path"]))) pres_type = False # If the action has been marked with a preserve attribute, and # the file exists and has a content hash different from what the # system expected it to be, then we preserve the original file # in some way, depending on the value of preserve. if os.path.isfile(final_path): chash, cdata = misc.get_data_digest(final_path) if not orig or chash != orig.hash: pres_type = self.attrs["preserve"] if pres_type in ("renameold", "renamenew"): return pres_type else: return True return pres_type
def __check_preserve(self, orig, pkgplan): """Return the type of preservation needed for this action. Returns None if preservation is not defined by the action. Returns False if it is, but no preservation is necessary. Returns True for the normal preservation form. Returns one of the strings 'renameold' or 'renamenew' for each of the respective forms of preservation. """ if not "preserve" in self.attrs: return None final_path = os.path.normpath( os.path.sep.join((pkgplan.image.get_root(), self.attrs["path"]))) pres_type = False # If the action has been marked with a preserve attribute, and # the file exists and has a content hash different from what the # system expected it to be, then we preserve the original file # in some way, depending on the value of preserve. if os.path.isfile(final_path): chash, cdata = misc.get_data_digest(final_path) if not orig or chash != orig.hash: pres_type = self.attrs["preserve"] if pres_type in ("renameold", "renamenew"): return pres_type else: return True return pres_type
def get_test_sum(fname=None): """ Helper to get sha256 sum of installed test file.""" if fname is None: fname = os.path.join(self.get_img_path(), "bin/true") fsum , data = misc.get_data_digest(fname, hash_func=hashlib.sha256) return fsum
def test_11_ssl_key_cert_set(self): """Verify that pkg image create will still pass if repo_uri doesn't have ssl_scheme but one of the origins or mirrors have schemes""" self.image_create(self.rurl1) # Set the first publisher to a https URL key_path = os.path.join(self.keys_dir, "cs1_ch1_ta3_key.pem") cert_path = os.path.join(self.cs_dir, "cs1_ch1_ta3_cert.pem") img_key_path = os.path.join( self.img_path(), "var", "pkg", "ssl", misc.get_data_digest(key_path, hash_func=hashlib.sha1)[0]) img_cert_path = os.path.join( self.img_path(), "var", "pkg", "ssl", misc.get_data_digest(cert_path, hash_func=hashlib.sha1)[0]) img_path = os.path.join(self.test_root, "img") # Test image create will fail if repo_uri # does not have https self.pkg(("image-create --no-refresh -p foo=http://{0}" " -k {1} -c {2} {3}").format(self.bogus_url, key_path, cert_path, img_path), exit=1) # Test image create will fail if there are no https url self.pkg(("image-create --no-refresh -p foo=http://{0}" " -k {1} -c {2} -g http://{0} {3}").format( self.bogus_url, key_path, cert_path, img_path), exit=1) # Test image create will succeed if one origin as https self.pkg(("image-create --no-refresh -p foo=http://{0}" " -k {1} -c {2} -g https://{0} {3}").format( self.bogus_url, key_path, cert_path, img_path), exit=0) shutil.rmtree(img_path) # Test image create will succeed if one mirror has https self.pkg(("image-create --no-refresh -p foo=http://{0}" " -k {1} -c {2} -m https://{0} {3}").format( self.bogus_url, key_path, cert_path, img_path), exit=0) shutil.rmtree(img_path)
def test_expired_certs(self): """ Test that certificate validation needs to validate all certificates before raising an exception. (Bug 15507548)""" bad_cert_path = os.path.join(self.cs_dir, "cs3_ch1_ta3_cert.pem") good_cert_path_1 = os.path.join(self.cs_dir, self.get_cli_cert("test")) good_cert_path_2 = os.path.join(self.cs_dir, self.get_cli_cert("tmp")) self.ac.start() self.image_create() # Set https-based publisher with correct cert. self.seed_ta_dir("ta7") self.pkg("set-publisher -k {key} -c {cert} -p {url}".format( url=self.acurl1, cert=good_cert_path_1, key=os.path.join(self.keys_dir, self.get_cli_key("test")), )) # Set a second publisher self.pkg("set-publisher -k {key} -c {cert} -p {url}".format( url=self.acurl2, cert=good_cert_path_2, key=os.path.join(self.keys_dir, self.get_cli_key("tmp")), )) # Replace cert of first publisher with one that is expired. # Cert is stored by content hash in the pkg config of the image, # which must be a SHA-1 hash for backwards compatibility. ch = misc.get_data_digest(good_cert_path_1, hash_func=hashlib.sha1)[0] pkg_cert_path = os.path.join(self.get_img_path(), "var", "pkg", "ssl", ch) shutil.copy(bad_cert_path, pkg_cert_path) # Replace the second certificate with one that is expired. ch = misc.get_data_digest(good_cert_path_2, hash_func=hashlib.sha1)[0] pkg_cert_path = os.path.join(self.get_img_path(), "var", "pkg", "ssl", ch) shutil.copy(bad_cert_path, pkg_cert_path) # Refresh all publishers should try to validate all certs. self.pkg("refresh", exit=1) self.assertTrue("Publisher: tmp" in self.errout, self.errout) self.assertTrue("Publisher: test" in self.errout, self.errout)
def remove(self, pkgplan): path = self.get_installed_path(pkgplan.image.get_root()) # Are we supposed to save this file to restore it elsewhere # or in another pkg? 'save_file' is set by the imageplan. save_file = self.attrs.get("save_file") if save_file: # 'save_file' contains a tuple of (orig_name, # remove_file). remove = save_file[1] self.save_file(pkgplan.image, path) if remove != "true": # File must be left in place (this file is # likely overlaid and is moving). return if self.attrs.get("preserve") == "abandon": return try: # Make file writable so it can be deleted. os.chmod(path, stat.S_IWRITE | stat.S_IREAD) except OSError as e: if e.errno == errno.ENOENT: # Already gone; don't care. return raise if not pkgplan.destination_fmri and \ self.attrs.get("preserve", "false").lower() != "false": # Preserved files are salvaged if they have been # modified since they were installed and this is # not an upgrade. try: hash_attr, hash_val, hash_func = \ digest.get_preferred_hash(self) ihash, cdata = misc.get_data_digest(path, hash_func=hash_func) if ihash != hash_val: pkgplan.salvage(path) # Nothing more to do. return except EnvironmentError as e: if e.errno == errno.ENOENT: # Already gone; don't care. return raise # Attempt to remove the file. self.remove_fsobj(pkgplan, path)
def verify(self, img, pkg_fmri, **args): path = os.path.normpath(os.path.join(img.imgdir, "pkg", pkg_fmri.get_dir_path(), "license." + self.attrs["license"])) if args["forever"] == True: try: chash, cdata = misc.get_data_digest(path) except EnvironmentError, e: if e.errno == errno.ENOENT: return [_("License file %s does not " "exist.") % path] raise if chash != self.hash: return [_("Hash: '%(found)s' should be " "'%(expected)s'") % { "found": chash, "expected": self.hash}]
def add_file(self, f, size=None): """Adds the file to the Transaction.""" fname, data = misc.get_data_digest(f, length=size, return_content=True) if size is None: size = len(data) try: dst_path = self.rstore.file(fname) except Exception, e: # The specific exception can't be named here due # to the cyclic dependency between this class # and the repository class. if getattr(e, "data", "") != fname: raise dst_path = None
def verify(self, img, pkg_fmri, **args): path = os.path.normpath( os.path.join(img.imgdir, "pkg", pkg_fmri.get_dir_path(), "license." + self.attrs["license"])) if args["forever"] == True: try: chash, cdata = misc.get_data_digest(path) except EnvironmentError, e: if e.errno == errno.ENOENT: return [_("License file %s does not " "exist.") % path] raise if chash != self.hash: return [ _("Hash: '%(found)s' should be " "'%(expected)s'") % { "found": chash, "expected": self.hash } ]
def get_text(self, img, pfmri, alt_pub=None): """Retrieves and returns the payload of the license (which should be text). This may require remote retrieval of resources and so this could raise a TransportError or other ApiException. 'alt_pub' is an optional alternate Publisher to use for any required transport operations. """ path = self.get_local_path(img, pfmri) try: with open(path, "rb") as fh: length = os.stat(path).st_size chash, txt = misc.get_data_digest(fh, length=length, return_content=True) if chash == self.hash: return txt except EnvironmentError, e: if e.errno != errno.ENOENT: raise
def get_text(self, img, pfmri, alt_pub=None): """Retrieves and returns the payload of the license (which should be text). This may require remote retrieval of resources and so this could raise a TransportError or other ApiException. If there are UTF-8 encoding errors in the text replace them so that we still have a license to show rather than failing the entire operation. The copy saved on disk is left as is. 'alt_pub' is an optional alternate Publisher to use for any required transport operations. """ path = self.get_local_path(img, pfmri) hash_attr, hash_attr_val, hash_func = \ digest.get_least_preferred_hash(self) try: with open(path, "rb") as fh: length = os.stat(path).st_size chash, txt = misc.get_data_digest(fh, length=length, return_content=True, hash_func=hash_func) if chash == hash_attr_val: return misc.force_str(txt, errors='replace') except EnvironmentError as e: if e.errno != errno.ENOENT: raise try: if not alt_pub: alt_pub = img.get_publisher(pfmri.publisher) assert pfmri.publisher == alt_pub.prefix return img.transport.get_content(alt_pub, hash_attr_val, fmri=pfmri, hash_func=hash_func, errors="replace") finally: img.cleanup_downloads()
def __set_chain_certs_data(self, chain_certs, chash_dir): """Store the information about the certs needed to validate this signature in the signature. The 'chain_certs' parameter is a list of paths to certificates. """ self.chain_cert_openers = [] hshes = [] sizes = [] chshes = [] csizes = [] for pth in chain_certs: if not os.path.exists(pth): raise pkg.actions.ActionDataError( _("No such file: '%s'.") % pth, path=pth) elif os.path.isdir(pth): raise pkg.actions.ActionDataError( _("'%s' is not a file.") % pth, path=pth) file_opener = self.make_opener(pth) self.chain_cert_openers.append(file_opener) self.attrs.setdefault("chain.sizes", []) try: fs = os.stat(pth) sizes.append(str(fs.st_size)) except EnvironmentError, e: raise pkg.actions.ActionDataError(e, path=pth) # misc.get_data_digest takes care of closing the file # that's opened below. with file_opener() as fh: hsh, data = misc.get_data_digest(fh, length=fs.st_size, return_content=True) hshes.append(hsh) csize, chash = misc.compute_compressed_attrs(hsh, None, data, fs.st_size, chash_dir) csizes.append(csize) chshes.append(chash.hexdigest())
def add_file(self, f, size=None): """Adds the file to the Transaction.""" hashes, data = misc.get_data_digest( f, length=size, return_content=True, hash_attrs=digest.DEFAULT_HASH_ATTRS, hash_algs=digest.HASH_ALGS) if size is None: size = len(data) try: # We don't have an Action yet, so passing None is fine. default_hash_attr = digest.get_least_preferred_hash(None)[0] fname = hashes[default_hash_attr] dst_path = self.rstore.file(fname) except Exception as e: # The specific exception can't be named here due # to the cyclic dependency between this class # and the repository class. if getattr(e, "data", "") != fname: raise dst_path = None csize, chashes = misc.compute_compressed_attrs( fname, dst_path, data, size, self.dir, chash_attrs=digest.DEFAULT_CHASH_ATTRS, chash_algs=digest.CHASH_ALGS) chashes = None data = None self.remaining_payload_cnt -= 1
def add_file(self, f, basename=None, size=None): """Adds the file to the Transaction.""" # If basename provided, just store the file as-is with the # basename. if basename: fileneeded = True try: dst_path = self.rstore.file(basename) fileneeded = False except Exception as e: dst_path = os.path.join(self.dir, basename) if not fileneeded: return if isinstance(f, six.string_types): portable.copyfile(f, dst_path) return bufsz = 128 * 1024 if bufsz > size: bufsz = size with open(dst_path, "wb") as wf: while True: data = f.read(bufsz) # data is bytes if data == b"": break wf.write(data) return hashes, data = misc.get_data_digest(f, length=size, return_content=True, hash_attrs=digest.DEFAULT_HASH_ATTRS, hash_algs=digest.HASH_ALGS) if size is None: size = len(data) fname = None try: # We don't have an Action yet, so passing None is fine. default_hash_attr = digest.get_least_preferred_hash( None)[0] fname = hashes[default_hash_attr] dst_path = self.rstore.file(fname) except Exception as e: # The specific exception can't be named here due # to the cyclic dependency between this class # and the repository class. if getattr(e, "data", "") != fname: raise dst_path = None misc.compute_compressed_attrs(fname, dst_path, data, size, self.dir, chash_attrs=digest.DEFAULT_CHASH_ATTRS, chash_algs=digest.CHASH_ALGS) self.remaining_payload_cnt -= 1
def add_content(self, action): """Adds the content of the provided action (if applicable) to the Transaction.""" # Perform additional publication-time validation of actions # before further processing is done. try: action.validate() except actions.ActionError as e: raise TransactionOperationError(e) if self.append_trans and action.name != "signature": raise TransactionOperationError(non_sig=True) size = int(action.attrs.get("pkg.size", 0)) if action.has_payload and size <= 0: # XXX hack for empty files action.data = lambda: open(os.devnull, "rb") if action.data is not None: # get all hashes for this action hashes, data = misc.get_data_digest(action.data(), length=size, return_content=True, hash_attrs=digest.LEGACY_HASH_ATTRS, hash_algs=digest.HASH_ALGS) # set the hash member for backwards compatibility and # remove it from the dictionary action.hash = hashes.pop("hash", None) action.attrs.update(hashes) # now set the hash value that will be used for storing # the file in the repository. hash_attr, hash_val, hash_func = \ digest.get_least_preferred_hash(action) fname = hash_val # Extract ELF information if not already provided. # XXX This needs to be modularized. if haveelf and data[:4] == b"\x7fELF" and ( "elfarch" not in action.attrs or "elfbits" not in action.attrs or "elfhash" not in action.attrs): elf_name = os.path.join(self.dir, ".temp-{0}".format(fname)) elf_file = open(elf_name, "wb") elf_file.write(data) elf_file.close() try: elf_info = elf.get_info(elf_name) except elf.ElfError as e: raise TransactionContentError(e) try: # Check which content checksums to # compute and add to the action elf1 = "elfhash" if elf1 in \ digest.LEGACY_CONTENT_HASH_ATTRS: get_sha1 = True else: get_sha1 = False hashes = elf.get_hashes(elf_name, elfhash=get_sha1) if get_sha1: action.attrs[elf1] = hashes[elf1] except elf.ElfError: pass action.attrs["elfbits"] = str(elf_info["bits"]) action.attrs["elfarch"] = elf_info["arch"] os.unlink(elf_name) try: dst_path = self.rstore.file(fname) except Exception as e: # The specific exception can't be named here due # to the cyclic dependency between this class # and the repository class. if getattr(e, "data", "") != fname: raise dst_path = None csize, chashes = misc.compute_compressed_attrs( fname, dst_path, data, size, self.dir) for attr in chashes: action.attrs[attr] = chashes[attr] action.attrs["pkg.csize"] = csize self.remaining_payload_cnt = \ len(action.attrs.get("chain.sizes", "").split()) # Do some sanity checking on packages marked or being marked # obsolete or renamed. if action.name == "set" and \ action.attrs["name"] == "pkg.obsolete" and \ action.attrs["value"] == "true": self.obsolete = True if self.types_found.difference( set(("set", "signature"))): raise TransactionOperationError(_("An obsolete " "package cannot contain actions other than " "'set' and 'signature'.")) elif action.name == "set" and \ action.attrs["name"] == "pkg.renamed" and \ action.attrs["value"] == "true": self.renamed = True if self.types_found.difference( set(("depend", "set", "signature"))): raise TransactionOperationError(_("A renamed " "package cannot contain actions other than " "'set', 'depend', and 'signature'.")) if not self.has_reqdeps and action.name == "depend" and \ action.attrs["type"] == "require": self.has_reqdeps = True if self.obsolete and self.renamed: # Reset either obsolete or renamed, depending on which # action this was. if action.attrs["name"] == "pkg.obsolete": self.obsolete = False else: self.renamed = False raise TransactionOperationError(_("A package may not " " be marked for both obsoletion and renaming.")) elif self.obsolete and action.name not in ("set", "signature"): raise TransactionOperationError(_("A '{type}' action " "cannot be present in an obsolete package: " "{action}").format( type=action.name, action=action)) elif self.renamed and action.name not in \ ("depend", "set", "signature"): raise TransactionOperationError(_("A '{type}' action " "cannot be present in a renamed package: " "{action}").format( type=action.name, action=action)) # Now that the action is known to be sane, we can add it to the # manifest. tfpath = os.path.join(self.dir, "manifest") tfile = open(tfpath, "a+") print(action, file=tfile) tfile.close() self.types_found.add(action.name)
excludedirs = map(os.path.normpath, excludedirs) for d in excludedirs: #Include the files and sub-dir in excludedir #to excludefiles and excludedir for root, dirs, files in os.walk(os.path.join(base, d)): reldir = root[len(base)+1:] for name in files: excludefiles.append(os.path.normpath(os.path.join(reldir, name))) for name in dirs: excludedirs.append(os.path.normpath(os.path.join(reldir, name))) if p.has_key("attributes"): for an, av in p["attributes"].iteritems(): al = ['name=' + an, 'value=' + av] if (an == pkg_attributes[0]): hash, cdata = get_data_digest(os.path.join(base, av)) al = ['name=' + an, 'value=' + hash] action = pkg.actions.fromlist("set", al) t.add(action) if (an == pkg_attributes[0]): addfile(av, t, base, None, opsys) if p.has_key("depends"): for fmri, attributes in p["depends"].iteritems(): dl = ['type=' + attributes["type"], 'fmri=' + fmri] action = pkg.actions.fromlist("depend", dl) t.add(action) if p.has_key("files"): for path, attributes in p["files"].iteritems(): addfile(path, t, base, attributes, opsys)
def verify(self, img, **args): """ verify that file is present and if preserve attribute not present, that hashes match""" path = os.path.normpath(os.path.sep.join( (img.get_root(), self.attrs["path"]))) lstat, errors, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors return errors if path.lower().endswith("/cat") and args["verbose"] == True: errors.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append("Timestamp: %s should be %s" % (misc.time_to_timestamp(lstat.st_mtime), self.attrs["timestamp"])) # avoid checking pkg.size if elfhash present; # different size files may have the same elfhash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ "elfhash" not in self.attrs and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append("Size: %d bytes should be %d" % \ (lstat.st_size, int(self.attrs["pkg.size"]))) if "preserve" in self.attrs: return errors if args["forever"] != True: return errors # # Check file contents # try: elfhash = None elferror = None if "elfhash" in self.attrs and haveelf: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: elfhash = elf.get_dynamic(path)["hash"] except RuntimeError, e: errors.append("Elfhash: %s" % e) if elfhash is not None and \ elfhash != self.attrs["elfhash"]: elferror = "Elfhash: %s should be %s" % \ (elfhash, self.attrs["elfhash"]) # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if elfhash is None or elferror: hashvalue, data = misc.get_data_digest(path) if hashvalue != self.hash: # Prefer the content hash error message. if elferror: errors.append(elferror) else: errors.append("Hash: %s should be %s" % \ (hashvalue, self.hash)) self.replace_required = True
def remove(self, pkgplan): path = self.get_installed_path(pkgplan.image.get_root()) # Are we supposed to save this file to restore it elsewhere # or in another pkg? 'save_file' is set by the imageplan. save_file = self.attrs.get("save_file") if save_file: # 'save_file' contains a tuple of (orig_name, # remove_file). remove = save_file[1] self.save_file(pkgplan.image, path) if remove != "true": # File must be left in place (this file is # likely overlaid and is moving). return if self.attrs.get("preserve") in ("abandon", "install-only"): return if not pkgplan.destination_fmri and \ self.attrs.get("preserve", "false").lower() != "false": # Preserved files are salvaged if they have been # modified since they were installed and this is # not an upgrade. try: hash_attr, hash_val, hash_func = \ digest.get_preferred_hash(self) ihash, cdata = misc.get_data_digest(path, hash_func=hash_func) if ihash != hash_val: pkgplan.salvage(path) # Nothing more to do. return except EnvironmentError as e: if e.errno == errno.ENOENT: # Already gone; don't care. return raise # Attempt to remove the file. rm_exc = None try: self.remove_fsobj(pkgplan, path) return except Exception as e: if e.errno != errno.EACCES: raise rm_exc = e # There are only two likely reasons we couldn't remove the file; # either because the parent directory isn't writable, or # because the file is read-only and the OS isn't allowing its # removal. Assume both and try making both the parent directory # and the file writable, removing the file, and finally # resetting the directory to its original mode. pdir = os.path.dirname(path) pmode = None try: if pdir != pkgplan.image.get_root(): # Parent directory is not image root (e.g. '/'). ps = os.lstat(pdir) pmode = ps.st_mode os.chmod(pdir, misc.PKG_DIR_MODE) # Make file writable and try removing it again; required # on some operating systems or potentially for some # filesystems? os.chmod(path, stat.S_IWRITE | stat.S_IREAD) self.remove_fsobj(pkgplan, path) except Exception as e: # Raise new exception chained to old. six.raise_from(e, rm_exc) finally: # If parent directory wasn't image root, then assume # mode needs reset. if pmode is not None: try: os.chmod(pdir, pmode) except Exception as e: # Ignore failure to reset parent mode. pass
def _process_action(self, action, exact=False, path=None): """Adds all expected attributes to the provided action and upload the file for the action if needed. If 'exact' is True and 'path' is 'None', the action won't be modified and no file will be uploaded. If 'exact' is True and a 'path' is provided, the file of that path will be uploaded as-is (it is assumed that the file is already in repository format). """ if self._append_mode and action.name != "signature": raise TransactionOperationError(non_sig=True) size = int(action.attrs.get("pkg.size", 0)) if action.has_payload and size <= 0: # XXX hack for empty files action.data = lambda: open(os.devnull, "rb") if action.data is None: return if exact: if path: self.add_file(path, basename=action.hash, progtrack=self.progtrack) return # Get all hashes for this action. hashes, dummy = misc.get_data_digest( action.data(), length=size, hash_attrs=digest.DEFAULT_HASH_ATTRS, hash_algs=digest.HASH_ALGS) # Set the hash member for backwards compatibility and # remove it from the dictionary. action.hash = hashes.pop("hash", None) action.attrs.update(hashes) # Add file content-hash when preferred_hash is SHA2 or higher. if action.name != "signature" and \ digest.PREFERRED_HASH != "sha1": hash_attr = "{0}:{1}".format(digest.EXTRACT_FILE, digest.PREFERRED_HASH) file_content_hash, dummy = misc.get_data_digest( action.data(), length=size, return_content=False, hash_attrs=[hash_attr], hash_algs=digest.HASH_ALGS) action.attrs["pkg.content-hash"] = "{0}:{1}".format( hash_attr, file_content_hash[hash_attr]) # Now set the hash value that will be used for storing the file # in the repository. hash_attr, hash_val, hash_func = \ digest.get_least_preferred_hash(action) fname = hash_val hdata = self.__uploads.get(fname) if hdata is not None: elf_attrs, csize, chashes = hdata else: # We haven't processed this file before, determine if # it needs to be uploaded and what information the # repository knows about it. elf_attrs = self.__get_elf_attrs(action, fname, size) csize, chashes = self.__get_compressed_attrs(fname) # 'csize' indicates that if file needs to be uploaded. fileneeded = csize is None if fileneeded: fpath = os.path.join(self._tmpdir, fname) csize, chashes = misc.compute_compressed_attrs( fname, data=action.data(), size=size, compress_dir=self._tmpdir) # Upload the compressed file for each action. self.add_file(fpath, basename=fname, progtrack=self.progtrack) os.unlink(fpath) self.__uploaded += 1 elif not chashes: # If not fileneeded, and repository can't # provide desired hashes, call # compute_compressed_attrs() in a way that # avoids writing the file to get the attributes # we need. csize, chashes = misc.compute_compressed_attrs( fname, data=action.data(), size=size) self.__uploads[fname] = (elf_attrs, csize, chashes) for k, v in six.iteritems(elf_attrs): if isinstance(v, list): action.attrs[k] = v + action.attrlist(k) else: action.attrs[k] = v for k, v in six.iteritems(chashes): if k == "pkg.content-hash": action.attrs[k] = action.attrlist(k) + [v] else: action.attrs[k] = v action.attrs["pkg.csize"] = csize
def main_func(): global_settings.client_name = "pkgsign" try: opts, pargs = getopt.getopt(sys.argv[1:], "a:c:i:k:ns:D:", ["help", "no-index", "no-catalog"]) except getopt.GetoptError as e: usage(_("illegal global option -- {0}").format(e.opt)) show_usage = False sig_alg = "rsa-sha256" cert_path = None key_path = None chain_certs = [] add_to_catalog = True set_alg = False dry_run = False repo_uri = os.getenv("PKG_REPO", None) for opt, arg in opts: if opt == "-a": sig_alg = arg set_alg = True elif opt == "-c": cert_path = os.path.abspath(arg) if not os.path.isfile(cert_path): usage(_("{0} was expected to be a certificate " "but isn't a file.").format(cert_path)) elif opt == "-i": p = os.path.abspath(arg) if not os.path.isfile(p): usage(_("{0} was expected to be a certificate " "but isn't a file.").format(p)) chain_certs.append(p) elif opt == "-k": key_path = os.path.abspath(arg) if not os.path.isfile(key_path): usage(_("{0} was expected to be a key file " "but isn't a file.").format(key_path)) elif opt == "-n": dry_run = True elif opt == "-s": repo_uri = misc.parse_uri(arg) elif opt == "--help": show_usage = True elif opt == "--no-catalog": add_to_catalog = False elif opt == "-D": try: key, value = arg.split("=", 1) DebugValues.set_value(key, value) except (AttributeError, ValueError): error(_("{opt} takes argument of form " "name=value, not {arg}").format( opt=opt, arg=arg)) if show_usage: usage(retcode=EXIT_OK) if not repo_uri: usage(_("a repository must be provided")) if key_path and not cert_path: usage(_("If a key is given to sign with, its associated " "certificate must be given.")) if cert_path and not key_path: usage(_("If a certificate is given, its associated key must be " "given.")) if chain_certs and not cert_path: usage(_("Intermediate certificates are only valid if a key " "and certificate are also provided.")) if not pargs: usage(_("At least one fmri or pattern must be provided to " "sign.")) if not set_alg and not key_path: sig_alg = "sha256" s, h = actions.signature.SignatureAction.decompose_sig_alg(sig_alg) if h is None: usage(_("{0} is not a recognized signature algorithm.").format( sig_alg)) if s and not key_path: usage(_("Using {0} as the signature algorithm requires that a " "key and certificate pair be presented using the -k and -c " "options.").format(sig_alg)) if not s and key_path: usage(_("The {0} hash algorithm does not use a key or " "certificate. Do not use the -k or -c options with this " "algorithm.").format(sig_alg)) if DebugValues: reload(digest) errors = [] t = misc.config_temp_root() temp_root = tempfile.mkdtemp(dir=t) del t cache_dir = tempfile.mkdtemp(dir=temp_root) incoming_dir = tempfile.mkdtemp(dir=temp_root) chash_dir = tempfile.mkdtemp(dir=temp_root) cert_dir = tempfile.mkdtemp(dir=temp_root) try: chain_certs = [ __make_tmp_cert(cert_dir, c) for c in chain_certs ] if cert_path is not None: cert_path = __make_tmp_cert(cert_dir, cert_path) xport, xport_cfg = transport.setup_transport() xport_cfg.add_cache(cache_dir, readonly=False) xport_cfg.incoming_root = incoming_dir # Configure publisher(s) transport.setup_publisher(repo_uri, "source", xport, xport_cfg, remote_prefix=True) pats = pargs successful_publish = False concrete_fmris = [] unmatched_pats = set(pats) all_pats = frozenset(pats) get_all_pubs = False pub_prefs = set() # Gather the publishers whose catalogs will be needed. for pat in pats: try: p_obj = fmri.MatchingPkgFmri(pat) except fmri.IllegalMatchingFmri as e: errors.append(e) continue pub_prefix = p_obj.get_publisher() if pub_prefix: pub_prefs.add(pub_prefix) else: get_all_pubs = True # Check each publisher for matches to our patterns. for p in xport_cfg.gen_publishers(): if not get_all_pubs and p.prefix not in pub_prefs: continue cat = fetch_catalog(p, xport, temp_root) ms, tmp1, u = cat.get_matching_fmris(pats) # Find which patterns matched. matched_pats = all_pats - u # Remove those patterns from the unmatched set. unmatched_pats -= matched_pats for v_list in ms.values(): concrete_fmris.extend([(v, p) for v in v_list]) if unmatched_pats: raise api_errors.PackageMatchErrors( unmatched_fmris=unmatched_pats) for pfmri, src_pub in sorted(set(concrete_fmris)): try: # Get the existing manifest for the package to # be signed. m_str = xport.get_manifest(pfmri, content_only=True, pub=src_pub) m = manifest.Manifest() m.set_content(content=m_str) # Construct the base signature action. attrs = { "algorithm": sig_alg } a = actions.signature.SignatureAction(cert_path, **attrs) a.hash = cert_path # Add the action to the manifest to be signed # since the action signs itself. m.add_action(a, misc.EmptyI) # Set the signature value and certificate # information for the signature action. a.set_signature(m.gen_actions(), key_path=key_path, chain_paths=chain_certs, chash_dir=chash_dir) # The hash of 'a' is currently a path, we need # to find the hash of that file to allow # comparison to existing signatures. hsh = None if cert_path: # Action identity still uses the 'hash' # member of the action, so we need to # stay with the sha1 hash. hsh, _dummy = \ misc.get_data_digest(cert_path, hash_func=hashlib.sha1) # Check whether the signature about to be added # is identical, or almost identical, to existing # signatures on the package. Because 'a' has # already been added to the manifest, it is # generated by gen_actions_by_type, so the cnt # must be 2 or higher to be an issue. cnt = 0 almost_identical = False for a2 in m.gen_actions_by_type("signature"): try: if a.identical(a2, hsh): cnt += 1 except api_errors.AlmostIdentical as e: e.pkg = pfmri errors.append(e) almost_identical = True if almost_identical: continue if cnt == 2: continue elif cnt > 2: raise api_errors.DuplicateSignaturesAlreadyExist(pfmri) assert cnt == 1, "Cnt was:{0}".format(cnt) if not dry_run: # Append the finished signature action # to the published manifest. t = trans.Transaction(repo_uri, pkg_name=str(pfmri), xport=xport, pub=src_pub) try: t.append() t.add(a) for c in chain_certs: t.add_file(c) t.close(add_to_catalog= add_to_catalog) except: if t.trans_id: t.close(abandon=True) raise msg(_("Signed {0}").format(pfmri.get_fmri( include_build=False))) successful_publish = True except (api_errors.ApiException, fmri.FmriError, trans.TransactionError) as e: errors.append(e) if errors: error("\n".join([str(e) for e in errors])) if successful_publish: return EXIT_PARTIAL else: return EXIT_OOPS return EXIT_OK except api_errors.ApiException as e: error(e) return EXIT_OOPS finally: shutil.rmtree(temp_root)
def verify(self, img, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image. In detail, this verifies that the file is present, and if the preserve attribute is not present, that the hashes and other attributes of the file match.""" if self.attrs.get("preserve") == "abandon": return [], [], [] path = self.get_installed_path(img.get_root()) lstat, errors, warnings, info, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors self.replace_required = True return errors, warnings, info if path.lower().endswith("/bobcat") and args["verbose"] == True: # Returned as a purely informational (untranslated) # message so that no client should interpret it as a # reason to fail verification. info.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") preserve = self.attrs.get("preserve") if (preserve is None and "timestamp" in self.attrs and lstat.st_mtime != misc.timestamp_to_time(self.attrs["timestamp"])): errors.append( _("Timestamp: {found} should be " "{expected}").format(found=misc.time_to_timestamp( lstat.st_mtime), expected=self.attrs["timestamp"])) # avoid checking pkg.size if we have any content-hashes present; # different size files may have the same content-hash pkg_size = int(self.attrs.get("pkg.size", 0)) if preserve is None and pkg_size > 0 and \ not set(digest.DEFAULT_GELF_HASH_ATTRS).intersection( set(self.attrs.keys())) and \ lstat.st_size != pkg_size: errors.append( _("Size: {found:d} bytes should be " "{expected:d}").format(found=lstat.st_size, expected=pkg_size)) if (preserve is not None and args["verbose"] == False or lstat is None): return errors, warnings, info if args["forever"] != True: return errors, warnings, info # # Check file contents. # try: # This is a generic mechanism, but only used for libc on # x86, where the "best" version of libc is lofs-mounted # on the canonical path, foiling the standard verify # checks. is_mtpt = self.attrs.get("mountpoint", "").lower() == "true" elfhash = None elferror = None elf_hash_attr, elf_hash_val, \ elf_hash_func = \ digest.get_preferred_hash(self, hash_type=pkg.digest.HASH_GELF) if elf_hash_attr and haveelf and not is_mtpt: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: # On path, only calculate the # content hash that matches # the preferred one on the # action get_elfhash = \ elf_hash_attr == "elfhash" get_sha256 = (not get_elfhash and elf_hash_func == digest.GELF_HASH_ALGS["gelf:sha256"]) get_sha512t_256 = ( not get_elfhash and elf_hash_func == digest.GELF_HASH_ALGS["gelf:sha512t_256"]) elfhash = elf.get_hashes( path, elfhash=get_elfhash, sha256=get_sha256, sha512t_256=get_sha512t_256)[elf_hash_attr] if get_elfhash: elfhash = [elfhash] else: elfhash = list(digest.ContentHash(elfhash).values()) except elf.ElfError as e: # Any ELF error means there is something bad # with the file, mark as needing to be replaced. elferror = _("ELF failure: {0}").format(e) if (elfhash is not None and elf_hash_val != elfhash[0]): elferror = _("ELF content hash: " "{found} " "should be {expected}").format( found=elfhash[0], expected=elf_hash_val) # Always check on the file hash because the ELF hash # check only checks on the ELF parts and does not # check for some other file integrity issues. if not is_mtpt: hash_attr, hash_val, hash_func = \ digest.get_preferred_hash(self) sha_hash, data = misc.get_data_digest(path, hash_func=hash_func) if sha_hash != hash_val: # Prefer the ELF content hash error message. if preserve is not None: info.append(_("editable file has " "been changed")) elif elferror: errors.append(elferror) self.replace_required = True else: errors.append( _("Hash: " "{found} should be " "{expected}").format(found=sha_hash, expected=hash_val)) self.replace_required = True # Check system attributes. # Since some attributes like 'archive' or 'av_modified' # are set automatically by the FS, it makes no sense to # check for 1:1 matches. So we only check that the # system attributes specified in the action are still # set on the file. sattr = self.attrs.get("sysattr", None) if sattr: if isinstance(sattr, list): sattr = ",".join(sattr) sattrs = sattr.split(",") if len(sattrs) == 1 and \ sattrs[0] not in portable.get_sysattr_dict(): # not a verbose attr, try as a compact set_attrs = portable.fgetattr(path, compact=True) sattrs = sattrs[0] else: set_attrs = portable.fgetattr(path) for a in sattrs: if a not in set_attrs: errors.append( _("System attribute '{0}' " "not set").format(a)) except EnvironmentError as e: if e.errno == errno.EACCES: errors.append(_("Skipping: Permission Denied")) else: errors.append(_("Unexpected Error: {0}").format(e)) except Exception as e: errors.append(_("Unexpected Exception: {0}").format(e)) return errors, warnings, info
def test_1_basics(self): """Test basic resurfacing operation.""" # Copy target repo to tmp repo self.copy_repository(self.dpath2, self.dpath_tmp, {"selachii": "selachii"}) # The new repository won't have a catalog, so rebuild it. self.dcs[4].get_repo(auto_create=True).rebuild() #self.assertTrue(False) # Check that empty repos get handled correctly tempdir = tempfile.mkdtemp(dir=self.test_root) # No repo at all self.pkgsurf("-s {0} -r {1}".format(tempdir, self.dpath1), exit=1) self.pkgsurf("-s {0} -r {1}".format(self.dpath1, tempdir), exit=1) # Repo empty self.pkgrepo("create -s {0}".format(tempdir)) self.pkgsurf("-s {0} -r {1}".format(tempdir, self.dpath1), exit=1) self.pkgsurf("-s {0} -r {1}".format(self.dpath1, tempdir), exit=1) # No packages self.pkgrepo("add-publisher -s {0} selachii".format(tempdir)) self.pkgsurf("-s {0} -r {1}".format(tempdir, self.dpath1)) self.assertTrue("No packages to reversion." in self.output) self.pkgsurf("-s {0} -r {1}".format(self.dpath1, tempdir)) self.assertTrue("No packages to reversion." in self.output) shutil.rmtree(tempdir) # Now check if it actually works. self.pkgsurf("-s {0} -r {1}".format(self.dpath_tmp, self.dpath1)) ref_repo = self.get_repo(self.dpath1) targ_repo = self.get_repo(self.dpath_tmp) exp_repo = self.get_repo(self.dpath3) for s in self.published_exp: f = fmri.PkgFmri(s, None) targ = targ_repo.manifest(f) # Load target manifest targm = manifest.Manifest() targm.set_content(pathname=targ) # Load expected manifest exp = exp_repo.manifest(f) expm = manifest.Manifest() expm.set_content(pathname=exp) ta, ra, ca = manifest.Manifest.comm([targm, expm]) self.debug("{0}: {1:d} {2:d}".format(str(s), len(ta), len(ra))) self.assertEqual( 0, len(ta), "{0} had unexpected actions:" " \n{1}".format(s, "\n".join([str(x) for x in ta]))) self.assertEqual( 0, len(ra), "{0} had missing actions: " "\n{1}".format(s, "\n".join([str(x) for x in ra]))) # Check that pkgsurf informed the user that there is a newer # version of a pkg in the ref repo. self.assertTrue("Packages with successors" in self.output) # Check that ignore option works. # Just run again and see if goblin pkg now gets reversioned. self.pkgsurf("-s {0} -r {1} -i info.home".format( self.dpath_tmp, self.dpath1)) # Find goblin package for s in self.published_ref: if "goblin" in s: break f = fmri.PkgFmri(s, None) targ = targ_repo.manifest(f) ref = ref_repo.manifest(f) self.assertEqual( misc.get_data_digest(targ, hash_func=digest.DEFAULT_HASH_FUNC), misc.get_data_digest(ref, hash_func=digest.DEFAULT_HASH_FUNC)) # Check that running the tool again doesn't find any pkgs # to reversion. Use http for accessing reference repo this time. self.pkgsurf("-s {0} -r {1}".format(self.dpath_tmp, self.durl1)) self.assertTrue("No packages to reversion." in self.output)
raise tx.InvalidContentException( path, "zlib.error:%s" % (" ".join([str(a) for a in e.args])), size=s.st_size ) ifile.close() ofile.close() if action.hash != fhash: s = os.stat(filepath) os.remove(filepath) raise tx.InvalidContentException( action.path, "hash failure: expected: %s" "computed: %s" % (action.hash, fhash), size=s.st_size ) return newhash = misc.get_data_digest(filepath)[0] if chash != newhash: s = os.stat(filepath) os.remove(filepath) raise tx.InvalidContentException( path, "chash failure: expected: %s computed: %s" % (chash, newhash), size=s.st_size ) class MultiFile(object): """A transport object for performing multi-file requests using pkg actions. This takes care of matching the publisher with the actions, and performs the download and content verification necessary to assure correct content installation.""" def __init__(self, pub, xport, progtrack, ccancel):
def verify(self, img, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image. In detail, this verifies that the file is present, and if the preserve attribute is not present, that the hashes and other attributes of the file match.""" path = os.path.normpath( os.path.sep.join((img.get_root(), self.attrs["path"]))) lstat, errors, warnings, info, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors return errors, warnings, info if path.lower().endswith("/bobcat") and args["verbose"] == True: # Returned as a purely informational (untranslated) # message so that no client should interpret it as a # reason to fail verification. info.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "preserve" not in self.attrs and \ "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append( _("Timestamp: %(found)s should be " "%(expected)s") % { "found": misc.time_to_timestamp(lstat.st_mtime), "expected": self.attrs["timestamp"] }) # avoid checking pkg.size if elfhash present; # different size files may have the same elfhash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ "elfhash" not in self.attrs and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append( _("Size: %(found)d bytes should be " "%(expected)d") % { "found": lstat.st_size, "expected": int(self.attrs["pkg.size"]) }) if "preserve" in self.attrs: return errors, warnings, info if args["forever"] != True: return errors, warnings, info # # Check file contents # try: elfhash = None elferror = None if "elfhash" in self.attrs and haveelf: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: elfhash = elf.get_dynamic(path)["hash"] except RuntimeError, e: errors.append("Elfhash: %s" % e) if elfhash is not None and \ elfhash != self.attrs["elfhash"]: elferror = _("Elfhash: %(found)s " "should be %(expected)s") % { "found": elfhash, "expected": self.attrs["elfhash"] } # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if elfhash is None or elferror: hashvalue, data = misc.get_data_digest(path) if hashvalue != self.hash: # Prefer the content hash error message. if elferror: errors.append(elferror) else: errors.append( _("Hash: " "%(found)s should be " "%(expected)s") % { "found": hashvalue, "expected": self.hash }) self.replace_required = True
#Include the files and sub-dir in excludedir #to excludefiles and excludedir for root, dirs, files in os.walk(os.path.join(base, d)): reldir = root[len(base) + 1:] for name in files: excludefiles.append( os.path.normpath(os.path.join(reldir, name))) for name in dirs: excludedirs.append( os.path.normpath(os.path.join(reldir, name))) if p.has_key("attributes"): for an, av in p["attributes"].iteritems(): al = ['name=' + an, 'value=' + av] if (an == pkg_attributes[0]): hash, cdata = get_data_digest(os.path.join(base, av)) al = ['name=' + an, 'value=' + hash] action = pkg.actions.fromlist("set", al) t.add(action) if (an == pkg_attributes[0]): addfile(av, t, base, None, opsys) if p.has_key("depends"): for fmri, attributes in p["depends"].iteritems(): dl = ['type=' + attributes["type"], 'fmri=' + fmri] action = pkg.actions.fromlist("depend", dl) t.add(action) if p.has_key("files"): for path, attributes in p["files"].iteritems(): addfile(path, t, base, attributes, opsys)
try: action.validate() except actions.ActionError, e: raise TransactionOperationError(e) if self.append_trans and action.name != "signature": raise TransactionOperationError(non_sig=True) size = int(action.attrs.get("pkg.size", 0)) if action.has_payload and size <= 0: # XXX hack for empty files action.data = lambda: open(os.devnull, "rb") if action.data is not None: fname, data = misc.get_data_digest(action.data(), length=size, return_content=True) action.hash = fname # Extract ELF information # XXX This needs to be modularized. if haveelf and data[:4] == "\x7fELF": elf_name = os.path.join(self.dir, ".temp-%s" % fname) elf_file = open(elf_name, "wb") elf_file.write(data) elf_file.close() try: elf_info = elf.get_info(elf_name) except elf.ElfError, e:
def verify(self, img, **args): """ verify that file is present and if preserve attribute not present, that hashes match""" path = os.path.normpath( os.path.sep.join((img.get_root(), self.attrs["path"]))) lstat, errors, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors return errors if path.lower().endswith("/cat") and args["verbose"] == True: errors.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append("Timestamp: %s should be %s" % (misc.time_to_timestamp( lstat.st_mtime), self.attrs["timestamp"])) # avoid checking pkg.size if elfhash present; # different size files may have the same elfhash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ "elfhash" not in self.attrs and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append("Size: %d bytes should be %d" % \ (lstat.st_size, int(self.attrs["pkg.size"]))) if "preserve" in self.attrs: return errors if args["forever"] != True: return errors # # Check file contents # try: elfhash = None elferror = None if "elfhash" in self.attrs and haveelf: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: elfhash = elf.get_dynamic(path)["hash"] except RuntimeError, e: errors.append("Elfhash: %s" % e) if elfhash is not None and \ elfhash != self.attrs["elfhash"]: elferror = "Elfhash: %s should be %s" % \ (elfhash, self.attrs["elfhash"]) # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if elfhash is None or elferror: hashvalue, data = misc.get_data_digest(path) if hashvalue != self.hash: # Prefer the content hash error message. if elferror: errors.append(elferror) else: errors.append("Hash: %s should be %s" % \ (hashvalue, self.hash)) self.replace_required = True
def verify(self, img, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image. In detail, this verifies that the file is present, and if the preserve attribute is not present, that the hashes and other attributes of the file match.""" if self.attrs.get("preserve") == "abandon": return [], [], [] path = self.get_installed_path(img.get_root()) lstat, errors, warnings, info, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors self.replace_required = True return errors, warnings, info if path.lower().endswith("/bobcat") and args["verbose"] == True: # Returned as a purely informational (untranslated) # message so that no client should interpret it as a # reason to fail verification. info.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "preserve" not in self.attrs and \ "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append(_("Timestamp: {found} should be " "{expected}").format( found=misc.time_to_timestamp(lstat.st_mtime), expected=self.attrs["timestamp"])) # avoid checking pkg.size if we have any content-hashes present; # different size files may have the same content-hash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ not set(digest.RANKED_CONTENT_HASH_ATTRS).intersection( set(self.attrs.keys())) and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append(_("Size: {found:d} bytes should be " "{expected:d}").format(found=lstat.st_size, expected=int(self.attrs["pkg.size"]))) if "preserve" in self.attrs: if args["verbose"] == False or lstat is None: return errors, warnings, info if args["forever"] != True: return errors, warnings, info # # Check file contents. At the moment, the only content-hash # supported in pkg(5) is for ELF files, so this will need work # when additional content-hashes are added. # try: # This is a generic mechanism, but only used for libc on # x86, where the "best" version of libc is lofs-mounted # on the canonical path, foiling the standard verify # checks. is_mtpt = self.attrs.get("mountpoint", "").lower() == "true" elfhash = None elferror = None ehash_attr, elfhash_val, hash_func = \ digest.get_preferred_hash(self, hash_type=pkg.digest.CONTENT_HASH) if ehash_attr and haveelf and not is_mtpt: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: # Annoying that we have to hardcode this if ehash_attr == \ "pkg.content-hash.sha256": get_sha256 = True get_sha1 = False else: get_sha256 = False get_sha1 = True elfhash = elf.get_dynamic(path, sha1=get_sha1, sha256=get_sha256)[ehash_attr] except RuntimeError as e: errors.append( "ELF content hash: {0}".format(e)) if elfhash is not None and \ elfhash != elfhash_val: elferror = _("ELF content hash: " "{found} " "should be {expected}").format( found=elfhash, expected=elfhash_val) # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if (elfhash is None or elferror) and not is_mtpt: hash_attr, hash_val, hash_func = \ digest.get_preferred_hash(self) sha_hash, data = misc.get_data_digest(path, hash_func=hash_func) if sha_hash != hash_val: # Prefer the content hash error message. if "preserve" in self.attrs: info.append(_( "editable file has " "been changed")) elif elferror: errors.append(elferror) self.replace_required = True else: errors.append(_("Hash: " "{found} should be " "{expected}").format( found=sha_hash, expected=hash_val)) self.replace_required = True # Check system attributes. # Since some attributes like 'archive' or 'av_modified' # are set automatically by the FS, it makes no sense to # check for 1:1 matches. So we only check that the # system attributes specified in the action are still # set on the file. sattr = self.attrs.get("sysattr", None) if sattr: sattrs = sattr.split(",") if len(sattrs) == 1 and \ sattrs[0] not in portable.get_sysattr_dict(): # not a verbose attr, try as a compact set_attrs = portable.fgetattr(path, compact=True) sattrs = sattrs[0] else: set_attrs = portable.fgetattr(path) for a in sattrs: if a not in set_attrs: errors.append( _("System attribute '{0}' " "not set").format(a)) except EnvironmentError as e: if e.errno == errno.EACCES: errors.append(_("Skipping: Permission Denied")) else: errors.append(_("Unexpected Error: {0}").format( e)) except Exception as e: errors.append(_("Unexpected Exception: {0}").format(e)) return errors, warnings, info
def test_2_recv_compare(self): """Verify that a received package is identical to the original source.""" f = fmri.PkgFmri(self.published[4], None) # First, pkgrecv the pkg to a directory. The files are # kept compressed so they can be compared directly to the # repository's internal copy. self.pkgrecv(self.durl1, "--raw -k -d %s %s" % (self.tempdir, f)) # Next, compare the manifests. orepo = self.get_repo(self.dpath1) old = orepo.manifest(f) new = os.path.join(self.tempdir, f.get_dir_path(), "manifest") self.assertEqual(misc.get_data_digest(old), misc.get_data_digest(new)) # Next, load the manifest. m = manifest.Manifest() raw = open(new, "rb").read() m.set_content(raw) # Next, compare the package actions that have data. for atype in ("file", "license"): for a in m.gen_actions_by_type(atype): if not hasattr(a, "hash"): continue old = orepo.file(a.hash) new = os.path.join(self.tempdir, f.get_dir_path(), a.hash) self.assertNotEqual(old, new) self.assertEqual(misc.get_data_digest(old), misc.get_data_digest(new)) # Second, pkgrecv to the pkg to a file repository. npath = tempfile.mkdtemp(dir=self.test_root) self.pkgsend("file://%s" % npath, "create-repository --set-property publisher.prefix=test1") self.pkgrecv(self.durl1, "-d file://%s %s" % (npath, f)) # Next, compare the manifests (this will also only succeed if # the fmris are exactly the same including timestamp). nrepo = self.get_repo(npath) old = orepo.manifest(f) new = nrepo.manifest(f) self.debug(old) self.debug(new) self.assertEqual(misc.get_data_digest(old), misc.get_data_digest(new)) # Next, load the manifest. m = manifest.Manifest() raw = open(new, "rb").read() m.set_content(raw) # Next, compare the package actions that have data. for atype in ("file", "license"): for a in m.gen_actions_by_type(atype): if not hasattr(a, "hash"): continue old = orepo.file(a.hash) new = nrepo.file(a.hash) self.assertNotEqual(old, new) self.assertEqual(misc.get_data_digest(old), misc.get_data_digest(new)) # Third, pkgrecv to the pkg to a http repository from the # file repository from the last test. self.pkgrecv("file://%s" % npath, "-d %s %s" % (self.durl2, f)) orepo = nrepo # Next, compare the manifests (this will also only succeed if # the fmris are exactly the same including timestamp). nrepo = self.get_repo(self.dpath2) old = orepo.manifest(f) new = nrepo.manifest(f) self.assertEqual(misc.get_data_digest(old), misc.get_data_digest(new)) # Next, load the manifest. m = manifest.Manifest() raw = open(new, "rb").read() m.set_content(raw) # Next, compare the package actions that have data. for atype in ("file", "license"): for a in m.gen_actions_by_type(atype): if not hasattr(a, "hash"): continue old = orepo.file(a.hash) new = nrepo.file(a.hash) self.assertNotEqual(old, new) self.assertEqual(misc.get_data_digest(old), misc.get_data_digest(new)) # Fourth, create an image and verify that the sent package is # seen by the client. self.wait_repo(self.dpath2) self.image_create(self.durl2, prefix="test1") self.pkg("info -r [email protected]") # Fifth, pkgrecv the pkg to a file repository and compare the # manifest of a package published with the scheme (pkg:/) given. f = fmri.PkgFmri(self.published[6], None) npath = tempfile.mkdtemp(dir=self.test_root) self.pkgsend("file://%s" % npath, "create-repository --set-property publisher.prefix=test1") self.pkgrecv(self.durl1, "-d file://%s %s" % (npath, f)) # Next, compare the manifests (this will also only succeed if # the fmris are exactly the same including timestamp). orepo = self.get_repo(self.dpath1) nrepo = self.get_repo(npath) old = orepo.manifest(f) new = nrepo.manifest(f) self.assertEqual(misc.get_data_digest(old), misc.get_data_digest(new))
try: # Make file writable so it can be deleted. os.chmod(path, stat.S_IWRITE|stat.S_IREAD) except OSError, e: if e.errno == errno.ENOENT: # Already gone; don't care. return raise if not pkgplan.destination_fmri and \ self.attrs.get("preserve", "false").lower() != "false": # Preserved files are salvaged if they have been # modified since they were installed and this is # not an upgrade. try: ihash, cdata = misc.get_data_digest(path) if ihash != self.hash: pkgplan.salvage(path) # Nothing more to do. return except EnvironmentError, e: if e.errno == errno.ENOENT: # Already gone; don't care. return raise # Attempt to remove the file. self.remove_fsobj(pkgplan, path) def different(self, other, cmp_hash=True): # Override the generic different() method to ignore the file
def _check_preserve(self, orig, pkgplan, orig_path=None): """Return the type of preservation needed for this action. Returns None if preservation is not defined by the action. Returns False if it is, but no preservation is necessary. Returns True for the normal preservation form. Returns one of the strings 'renameold', 'renameold.update', 'renamenew', 'legacy', or 'abandon' for each of the respective forms of preservation. """ # If the logic in this function ever changes, all callers will # need to be updated to reflect how they interpret return # values. try: pres_type = self.attrs["preserve"] except KeyError: return # Should ultimately be conditioned on file type if "elfhash" in self.attrs: # Don't allow preserve logic to be applied to elf files; # if we ever stop tagging elf binaries with this # attribute, this will need to be updated. return if pres_type == "abandon": return pres_type final_path = self.get_installed_path(pkgplan.image.get_root()) # 'legacy' preservation is very different than other forms of # preservation as it doesn't account for the on-disk state of # the action's payload. if pres_type == "legacy": if not orig: # This is an initial install or a repair, so # there's nothing to deliver. return True return pres_type # If action has been marked with a preserve attribute, the # hash of the preserved file has changed between versions, # and the package being installed is older than the package # that was installed, and the version on disk is different # than the installed package's original version, then preserve # the installed file by renaming it. # # If pkgplan.origin_fmri isn't set, but there is an orig action, # then this file is moving between packages and it can't be # a downgrade since that isn't allowed across rename or obsolete # boundaries. is_file = os.path.isfile(final_path) # 'install-only' preservation has very specific semantics as # well; if there's an 'orig' or this is an initial install and # the file exists, we should not modify the file content. if pres_type == "install-only": if orig or is_file: return True return False changed_hash = False if orig: # We must use the same hash algorithm when comparing old # and new actions. Look for the most-preferred common # hash between old and new. Since the two actions may # not share a common hash (in which case, we get a tuple # of 'None' objects) we also need to know the preferred # hash to use when examining the old action on its own. common_hash_attr, common_hash_val, \ common_orig_hash_val, common_hash_func = \ digest.get_common_preferred_hash(self, orig) hattr, orig_hash_val, orig_hash_func = \ digest.get_preferred_hash(orig) if common_orig_hash_val and common_hash_val: changed_hash = common_hash_val != common_orig_hash_val else: # we don't have a common hash, so we must treat # this as a changed action changed_hash = True if pkgplan.destination_fmri and \ changed_hash and \ pkgplan.origin_fmri and \ pkgplan.destination_fmri.version < pkgplan.origin_fmri.version: # Installed, preserved file is for a package # newer than what will be installed. So check if # the version on disk is different than what # was originally delivered, and if so, preserve # it. if not is_file: return False preserve_version = self.__check_preserve_version(orig) if not preserve_version: return False ihash, cdata = misc.get_data_digest(final_path, hash_func=orig_hash_func) if ihash != orig_hash_val: return preserve_version return True if (orig and orig_path): # Comparison will be based on a file being moved. is_file = os.path.isfile(orig_path) # If the action has been marked with a preserve attribute, and # the file exists and has a content hash different from what the # system expected it to be, then we preserve the original file # in some way, depending on the value of preserve. if is_file: # if we had an action installed, then we know what hash # function was used to compute it's hash attribute. if orig: if not orig_path: orig_path = final_path chash, cdata = misc.get_data_digest(orig_path, hash_func=orig_hash_func) if not orig or chash != orig_hash_val: if pres_type in ("renameold", "renamenew"): return pres_type return True elif not changed_hash and chash == orig_hash_val: # If packaged content has not changed since last # version and on-disk content matches the last # version, preserve on-disk file. return True return False
def verify(self, img, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image. In detail, this verifies that the file is present, and if the preserve attribute is not present, that the hashes and other attributes of the file match.""" path = os.path.normpath(os.path.sep.join( (img.get_root(), self.attrs["path"]))) lstat, errors, warnings, info, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors self.replace_required = True return errors, warnings, info if path.lower().endswith("/bobcat") and args["verbose"] == True: # Returned as a purely informational (untranslated) # message so that no client should interpret it as a # reason to fail verification. info.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") if "preserve" not in self.attrs and \ "timestamp" in self.attrs and lstat.st_mtime != \ misc.timestamp_to_time(self.attrs["timestamp"]): errors.append(_("Timestamp: %(found)s should be " "%(expected)s") % { "found": misc.time_to_timestamp(lstat.st_mtime), "expected": self.attrs["timestamp"] }) # avoid checking pkg.size if elfhash present; # different size files may have the same elfhash if "preserve" not in self.attrs and \ "pkg.size" in self.attrs and \ "elfhash" not in self.attrs and \ lstat.st_size != int(self.attrs["pkg.size"]): errors.append(_("Size: %(found)d bytes should be " "%(expected)d") % { "found": lstat.st_size, "expected": int(self.attrs["pkg.size"]) }) if "preserve" in self.attrs: if args["verbose"] == False or lstat is None: return errors, warnings, info if args["forever"] != True: return errors, warnings, info # # Check file contents # try: # This is a generic mechanism, but only used for libc on # x86, where the "best" version of libc is lofs-mounted # on the canonical path, foiling the standard verify # checks. is_mtpt = self.attrs.get("mountpoint", "").lower() == "true" elfhash = None elferror = None if "elfhash" in self.attrs and haveelf and not is_mtpt: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: elfhash = elf.get_dynamic(path)["hash"] except RuntimeError, e: errors.append("Elfhash: %s" % e) if elfhash is not None and \ elfhash != self.attrs["elfhash"]: elferror = _("Elfhash: %(found)s " "should be %(expected)s") % { "found": elfhash, "expected": self.attrs["elfhash"] } # If we failed to compute the content hash, or the # content hash failed to verify, try the file hash. # If the content hash fails to match but the file hash # matches, it indicates that the content hash algorithm # changed, since obviously the file hash is a superset # of the content hash. if (elfhash is None or elferror) and not is_mtpt: hashvalue, data = misc.get_data_digest(path) if hashvalue != self.hash: # Prefer the content hash error message. if "preserve" in self.attrs: info.append(_( "editable file has" " been changed")) elif elferror: errors.append(elferror) else: errors.append(_("Hash: " "%(found)s should be " "%(expected)s") % { "found": hashvalue, "expected": self.hash }) self.replace_required = True
try: action.validate() except actions.ActionError, e: raise TransactionOperationError(e) size = int(action.attrs.get("pkg.size", 0)) if action.name in ("file", "license") and size <= 0: # XXX hack for empty files action.data = lambda: open(os.devnull, "rb") if action.data is not None: bufsz = 64 * 1024 fname, data = misc.get_data_digest(action.data(), length=size, return_content=True) action.hash = fname # Extract ELF information # XXX This needs to be modularized. if haveelf and data[:4] == "\x7fELF": elf_name = "%s/.temp" % self.dir elf_file = open(elf_name, "wb") elf_file.write(data) elf_file.close() try: elf_info = elf.get_info(elf_name) except elf.ElfError, e:
def __check_preserve(self, orig, pkgplan): """Return the type of preservation needed for this action. Returns None if preservation is not defined by the action. Returns False if it is, but no preservation is necessary. Returns True for the normal preservation form. Returns one of the strings 'renameold', 'renameold.update', 'renamenew', or 'legacy' for each of the respective forms of preservation. """ try: pres_type = self.attrs["preserve"] except KeyError: return None final_path = os.path.normpath(os.path.sep.join( (pkgplan.image.get_root(), self.attrs["path"]))) # 'legacy' preservation is very different than other forms of # preservation as it doesn't account for the on-disk state of # the action's payload. if pres_type == "legacy": if not orig: # This is an initial install or a repair, so # there's nothing to deliver. return True return pres_type # If action has been marked with a preserve attribute, the # hash of the preserved file has changed between versions, # and the package being installed is older than the package # that was installed, and the version on disk is different # than the installed package's original version, then preserve # the installed file by renaming it. # # If pkgplan.origin_fmri isn't set, but there is an orig action, # then this file is moving between packages and it can't be # a downgrade since that isn't allowed across rename or obsolete # boundaries. is_file = os.path.isfile(final_path) if orig and pkgplan.destination_fmri and \ self.hash != orig.hash and \ pkgplan.origin_fmri and \ pkgplan.destination_fmri.version < pkgplan.origin_fmri.version: # Installed, preserved file is for a package newer than # what will be installed. So check if the version on # disk is different than what was originally delivered, # and if so, preserve it. if is_file: ihash, cdata = misc.get_data_digest(final_path) if ihash != orig.hash: # .old is intentionally avoided here to # prevent accidental collisions with the # normal install process. return "renameold.update" return False # If the action has been marked with a preserve attribute, and # the file exists and has a content hash different from what the # system expected it to be, then we preserve the original file # in some way, depending on the value of preserve. If the # action is an overlay, then we always overwrite. overlay = self.attrs.get("overlay") == "true" if is_file and not overlay: chash, cdata = misc.get_data_digest(final_path) if not orig or chash != orig.hash: if pres_type in ("renameold", "renamenew"): return pres_type return True return False
def sig_str(self, a, version): """Create a stable string representation of an action that is deterministic in its creation. If creating a string from an action is non-deterministic, then manifest signing cannot work. The parameter 'a' is the signature action that's going to use the string produced. It's needed for the signature string action, and is here to keep the method signature the same. """ # Any changes to this function mean Action.sig_version must be # incremented. if version != generic.Action.sig_version: raise apx.UnsupportedSignatureVersion(version, sig=self) # Signature actions don't sign other signature actions. So if # the action that's doing the signing isn't ourself, return # nothing. if str(a) != str(self): return None # It's necessary to sign the action as the client will see it, # post publication. To do that, it's necessary to simulate the # publication process on a copy of the action, converting # paths to hashes and adding size information. tmp_a = SignatureAction(None, **self.attrs) # The signature action can't sign the value of the value # attribute, but it can sign that attribute's name. tmp_a.attrs["value"] = "" if hasattr(self.data, "__call__"): size = int(self.attrs.get("pkg.size", 0)) tmp_dir = tempfile.mkdtemp() with self.data() as fh: hashes, data = misc.get_data_digest( fh, size, return_content=True, hash_attrs=digest.DEFAULT_HASH_ATTRS, hash_algs=digest.HASH_ALGS) tmp_a.attrs.update(hashes) # "hash" is special since it shouldn't appear in # the action attributes, it gets set as a member # instead. if "hash" in tmp_a.attrs: tmp_a.hash = tmp_a.attrs["hash"] del tmp_a.attrs["hash"] # The use of self.hash here is just to point to a # filename, the type of hash used for self.hash is # irrelevant. Note that our use of self.hash for the # basename will need to be modified when we finally move # off SHA-1 hashes. csize, chashes = misc.compute_compressed_attrs( os.path.basename(self.hash), self.hash, data, size, tmp_dir) shutil.rmtree(tmp_dir) tmp_a.attrs["pkg.csize"] = csize for attr in chashes: tmp_a.attrs[attr] = chashes[attr] elif self.hash: tmp_a.hash = self.hash for attr in digest.DEFAULT_HASH_ATTRS: if attr in self.attrs: tmp_a.attrs[attr] = self.attrs[attr] csizes = [] chain_hashes = {} chain_chashes = {} for attr in digest.DEFAULT_CHAIN_ATTRS: chain_hashes[attr] = [] for attr in digest.DEFAULT_CHAIN_CHASH_ATTRS: chain_chashes[attr] = [] sizes = self.attrs.get("chain.sizes", "").split() for i, c in enumerate(self.chain_cert_openers): size = int(sizes[i]) tmp_dir = tempfile.mkdtemp() hshes, data = misc.get_data_digest( c(), size, return_content=True, hash_attrs=digest.DEFAULT_CHAIN_ATTRS, hash_algs=digest.CHAIN_ALGS) for attr in hshes: chain_hashes[attr].append(hshes[attr]) csize, chashes = misc.compute_compressed_attrs( "tmp", None, data, size, tmp_dir, chash_attrs=digest.DEFAULT_CHAIN_CHASH_ATTRS, chash_algs=digest.CHAIN_CHASH_ALGS) shutil.rmtree(tmp_dir) csizes.append(csize) for attr in chashes: chain_chashes[attr].append(chashes[attr]) if chain_hashes: for attr in digest.DEFAULT_CHAIN_ATTRS: if chain_hashes[attr]: tmp_a.attrs[attr] = " ".join(chain_hashes[attr]) # Now that tmp_a looks like the post-published action, transform # it into a string using the generic sig_str method. return generic.Action.sig_str(tmp_a, tmp_a, version)
size=s.st_size) ifile.close() ofile.close() if action.hash != fhash: s = os.stat(filepath) os.remove(filepath) raise tx.InvalidContentException(action.path, "hash failure: expected: %s" "computed: %s" % (action.hash, fhash), size=s.st_size) return newhash = misc.get_data_digest(filepath)[0] if chash != newhash: s = os.stat(filepath) os.remove(filepath) raise tx.InvalidContentException(path, "chash failure: expected: %s computed: %s" % \ (chash, newhash), size=s.st_size) class MultiFile(object): """A transport object for performing multi-file requests using pkg actions. This takes care of matching the publisher with the actions, and performs the download and content verification necessary to assure correct content installation.""" def __init__(self, pub, xport, progtrack, ccancel): """Supply the destination publisher in the pub argument.
# since the action signs itself. m.add_action(a, misc.EmptyI) # Set the signature value and certificate # information for the signature action. a.set_signature(m.gen_actions(), key_path=key_path, chain_paths=chain_certs, chash_dir=chash_dir) # The hash of 'a' is currently a path, we need # to find the hash of that file to allow # comparison to existing signatures. hsh = None if cert_path: hsh, _dummy = \ misc.get_data_digest(cert_path) # Check whether the signature about to be added # is identical, or almost identical, to existing # signatures on the package. Because 'a' has # already been added to the manifest, it is # generated by gen_actions_by_type, so the cnt # must be 2 or higher to be an issue. cnt = 0 almost_identical = False for a2 in m.gen_actions_by_type("signature"): try: if a.identical(a2, hsh): cnt += 1 except api_errors.AlmostIdentical, e: e.pkg = pfmri
def add_content(self, action): """Adds the content of the provided action (if applicable) to the Transaction.""" size = int(action.attrs.get("pkg.size", 0)) if action.name in ("file", "license") and size <= 0: # XXX hack for empty files action.data = lambda: open(os.devnull, "rb") if action.data is not None: bufsz = 64 * 1024 fname, data = misc.get_data_digest(action.data(), length=size, return_content=True) action.hash = fname # Extract ELF information # XXX This needs to be modularized. if haveelf and data[:4] == "\x7fELF": elf_name = "%s/.temp" % self.dir elf_file = open(elf_name, "wb") elf_file.write(data) elf_file.close() try: elf_info = elf.get_info(elf_name) except elf.ElfError, e: raise TransactionContentError(e) try: elf_hash = elf.get_dynamic( elf_name)["hash"] action.attrs["elfhash"] = elf_hash except elf.ElfError: pass action.attrs["elfbits"] = str(elf_info["bits"]) action.attrs["elfarch"] = elf_info["arch"] os.unlink(elf_name) # # This check prevents entering into the depot store # a file which is already there in the store. # This takes CPU load off the depot on large imports # of mostly-the-same stuff. And in general it saves # disk bandwidth, and on ZFS in particular it saves # us space in differential snapshots. We also need # to check that the destination is in the same # compression format as the source, as we must have # properly formed files for chash/csize properties # to work right. # fpath = misc.hash_file_name(fname) dst_path = "%s/%s" % (self.cfg.file_root, fpath) fileneeded = True if os.path.exists(dst_path): if PkgGzipFile.test_is_pkggzipfile(dst_path): fileneeded = False opath = dst_path if fileneeded: opath = os.path.join(self.dir, fname) ofile = PkgGzipFile(opath, "wb") nbuf = size / bufsz for n in range(0, nbuf): l = n * bufsz h = (n + 1) * bufsz ofile.write(data[l:h]) m = nbuf * bufsz ofile.write(data[m:]) ofile.close() data = None # Now that the file has been compressed, determine its # size and store that as an attribute in the manifest # for the file. fs = os.stat(opath) action.attrs["pkg.csize"] = str(fs.st_size) # Compute the SHA hash of the compressed file. # Store this as the chash attribute of the file's # action. In order for this to work correctly, we # have to use the PkgGzipFile class. It omits # filename and timestamp information from the gzip # header, allowing us to generate deterministic # hashes for different files with identical content. cfile = open(opath, "rb") chash = sha.new() while True: cdata = cfile.read(bufsz) if cdata == "": break chash.update(cdata) cfile.close() action.attrs["chash"] = chash.hexdigest() cdata = None
def __set_chain_certs_data(self, chain_certs, chash_dir): """Store the information about the certs needed to validate this signature in the signature. The 'chain_certs' parameter is a list of paths to certificates. """ self.chain_cert_openers = [] # chain_hshes and chain_chshes are dictionaries which map a # given hash or compressed hash attribute to a list of the hash # values for each path in chain_certs. chain_hshes = {} chain_chshes = {} chain_csizes = [] chain_sizes = [] for attr in digest.DEFAULT_CHAIN_ATTRS: chain_hshes[attr] = [] for attr in digest.DEFAULT_CHAIN_CHASH_ATTRS: chain_chshes[attr] = [] for pth in chain_certs: if not os.path.exists(pth): raise pkg.actions.ActionDataError( _("No such file: '{0}'.").format(pth), path=pth) elif os.path.isdir(pth): raise pkg.actions.ActionDataError( _("'{0}' is not a file.").format(pth), path=pth) file_opener = self.make_opener(pth) self.chain_cert_openers.append(file_opener) self.attrs.setdefault("chain.sizes", []) self.attrs.setdefault("chain.csizes", []) try: fs = os.stat(pth) chain_sizes.append(str(fs.st_size)) except EnvironmentError as e: raise pkg.actions.ActionDataError(e, path=pth) # misc.get_data_digest takes care of closing the file # that's opened below. with file_opener() as fh: hshes, data = misc.get_data_digest( fh, length=fs.st_size, return_content=True, hash_attrs=digest.DEFAULT_CHAIN_ATTRS, hash_algs=digest.CHAIN_ALGS) for attr in hshes: chain_hshes[attr].append(hshes[attr]) # We need a filename to use for the uncompressed chain # cert, so get the preferred chain hash value from the # chain_hshes alg = digest.PREFERRED_HASH if alg == "sha1": attr = "chain" else: attr = "pkg.chain.{0}".format(alg) chain_val = hshes.get(attr) csize, chashes = misc.compute_compressed_attrs( chain_val, None, data, fs.st_size, chash_dir, chash_attrs=digest.DEFAULT_CHAIN_CHASH_ATTRS, chash_algs=digest.CHAIN_CHASH_ALGS) chain_csizes.append(csize) for attr in chashes: chain_chshes[attr].append(chashes[attr]) # Remove any unused hash attributes. for cattrs in (chain_hshes, chain_chshes): for attr in list(cattrs.keys()): if not cattrs[attr]: cattrs.pop(attr, None) if chain_hshes: # These attributes are stored as a single value with # spaces in it rather than multiple values to ensure # the ordering remains consistent. self.attrs["chain.sizes"] = " ".join(chain_sizes) self.attrs["chain.csizes"] = " ".join(chain_csizes) for attr in digest.DEFAULT_CHAIN_ATTRS: self.attrs[attr] = " ".join(chain_hshes[attr]) for attr in digest.DEFAULT_CHAIN_CHASH_ATTRS: self.attrs[attr] = " ".join(chain_chshes[attr])
def sig_str(self, a, version): """Create a stable string representation of an action that is deterministic in its creation. If creating a string from an action is non-deterministic, then manifest signing cannot work. The parameter 'a' is the signature action that's going to use the string produced. It's needed for the signature string action, and is here to keep the method signature the same. """ # Any changes to this function mean Action.sig_version must be # incremented. if version != generic.Action.sig_version: raise apx.UnsupportedSignatureVersion(version, sig=self) # Signature actions don't sign other signature actions. So if # the action that's doing the signing isn't ourself, return # nothing. if str(a) != str(self): return None # It's necessary to sign the action as the client will see it, # post publication. To do that, it's necessary to simulate the # publication process on a copy of the action, converting # paths to hashes and adding size information. tmp_a = SignatureAction(None, **self.attrs) # The signature action can't sign the value of the value # attribute, but it can sign that attribute's name. tmp_a.attrs["value"] = "" if callable(self.data): size = int(self.attrs.get("pkg.size", 0)) tmp_dir = tempfile.mkdtemp() with self.data() as fh: tmp_a.hash, data = misc.get_data_digest(fh, size, return_content=True) csize, chash = misc.compute_compressed_attrs( os.path.basename(self.hash), self.hash, data, size, tmp_dir) shutil.rmtree(tmp_dir) tmp_a.attrs["pkg.csize"] = csize tmp_a.attrs["chash"] = chash.hexdigest() elif self.hash: tmp_a.hash = self.hash hashes = [] csizes = [] chashes = [] sizes = self.attrs.get("chain.sizes", "").split() for i, c in enumerate(self.chain_cert_openers): size = int(sizes[i]) tmp_dir = tempfile.mkdtemp() hsh, data = misc.get_data_digest(c(), size, return_content=True) hashes.append(hsh) csize, chash = misc.compute_compressed_attrs("tmp", None, data, size, tmp_dir) shutil.rmtree(tmp_dir) csizes.append(csize) chashes.append(chash.hexdigest()) if hashes: tmp_a.attrs["chain"] = " ".join(hashes) # Now that tmp_a looks like the post-published action, transform # it into a string using the generic sig_str method. return generic.Action.sig_str(tmp_a, tmp_a, version)