def test_valid_elf(self): """Test that elf routines work on a small set of objects.""" arch = pkg.portable.get_isainfo()[0] for p in self.elf_paths: p = re.sub("__ARCH__", arch, p) self.debug("testing elf file {0}".format(p)) self.assertTrue(os.path.exists(p), "{0} does not exist".format(p)) self.assertEqual(elf.is_elf_object(p), True) elf.get_dynamic(p) elf.get_hashes(p) self.debug("elf.get_info {0}".format(elf.get_info(p)))
def test_get_hashes_params(self): """Test that get_hashes(..) returns checksums according to the parameters passed to the method.""" # Check that the hashes generated have the correct length # depending on the algorithm used to generated. sha1_len = 40 sha256_len = 64 # the default is to return both the SHA-1 elfhash and # the SHA-256 pkg.content-hash d = elf.get_hashes(self.elf_paths[0]) self.assertTrue(len(d["elfhash"]) == sha1_len) self.assertTrue("pkg.content-hash" in d) self.assertTrue(len(d["pkg.content-hash"]) == 2) for h in range(2): v = d["pkg.content-hash"][h].split(":") self.assertTrue(len(v) == 3) self.assertTrue(v[1] == "sha256") self.assertTrue(len(v[2]) == sha256_len) d = elf.get_hashes(self.elf_paths[0], elfhash=False, sha512t_256=True) self.assertTrue("elfhash" not in d) self.assertTrue("pkg.content-hash" in d) self.assertTrue(len(d["pkg.content-hash"]) == 4) sha256_count = 0 sha512t_256_count = 0 unsigned_count = 0 for h in range(4): v = d["pkg.content-hash"][h].split(":") self.assertTrue(len(v) == 3) self.assertTrue(len(v[2]) == sha256_len) if v[0].endswith(".unsigned"): unsigned_count += 1 if v[1] == "sha256": sha256_count += 1 elif v[1] == "sha512t_256": sha512t_256_count += 1 self.assertTrue(sha256_count == 2) self.assertTrue(sha512t_256_count == 2) self.assertTrue(unsigned_count == 2) d = elf.get_hashes(self.elf_paths[0], elfhash=False, sha256=False) self.assertTrue(len(d) == 0)
def test_valid_elf_hash(self): """Test that the elf routines generate the expected hash""" o = os.path.join(self.test_root, "ro_data/elftest.so.1") d = elf.get_hashes(o, elfhash=True, sha256=True, sha512t_256=True) expected = [ 'gelf:sha256:7c4f1f347b6d6e65e7542ffb21a05471728a80a5449fddd715186c6cbfdba4b0', 'gelf.unsigned:sha256:7c4f1f347b6d6e65e7542ffb21a05471728a80a5449fddd715186c6cbfdba4b0', 'gelf:sha512t_256:54f4cba7527ab9f78a85f7bb5a4e63315c8cae4a7e38f884e4bfd16bcab00821', 'gelf.unsigned:sha512t_256:54f4cba7527ab9f78a85f7bb5a4e63315c8cae4a7e38f884e4bfd16bcab00821', ] import pprint pprint.pprint(d) self.assertEqual(d['elfhash'], '083308992c921537fd757548964f89452234dd11') for hash in expected: self.assertTrue(hash in d['pkg.content-hash'])
def __get_elf_attrs(self, action, fname, size): """Helper function to get the ELF information.""" # This currently uses the presence of "elfhash" to indicate the # need for *any* content hashes to be added. This will work as # expected until elfhash is no longer generated by default, and # then this logic will need to be updated accordingly. need_elf_info = False need_elfhash = False bufsz = misc.PKG_FILE_BUFSIZ if bufsz > size: bufsz = size f = action.data() magic = f.read(4) if haveelf and magic == b"\x7fELF": need_elf_info = ("elfarch" not in action.attrs or "elfbits" not in action.attrs) need_elfhash = "elfhash" not in action.attrs if not need_elf_info or not need_elfhash: f.close() return misc.EmptyDict elf_name = os.path.join(self._tmpdir, ".temp-{0}".format(fname)) with open(elf_name, "wb") as elf_file: elf_file.write(magic) while True: data = f.read(bufsz) if not data: break elf_file.write(data) f.close() attrs = {} if need_elf_info: try: elf_info = elf.get_info(elf_name) except elf.ElfError as e: raise TransactionError(e) attrs["elfbits"] = str(elf_info["bits"]) attrs["elfarch"] = elf_info["arch"] # Check which content checksums to compute and add to the action get_elfhash = (need_elfhash and "elfhash" in digest.DEFAULT_GELF_HASH_ATTRS) get_sha256 = (need_elfhash and not digest.sha512_supported and "pkg.content-hash" in digest.DEFAULT_GELF_HASH_ATTRS) get_sha512t_256 = (need_elfhash and digest.sha512_supported and "pkg.content-hash" in digest.DEFAULT_GELF_HASH_ATTRS) if get_elfhash or get_sha256 or get_sha512t_256: try: attrs.update( elf.get_hashes(elf_name, elfhash=get_elfhash, sha256=get_sha256, sha512t_256=get_sha512t_256)) except elf.ElfError: pass os.unlink(elf_name) return attrs
def verify(self, img, **args): """Returns a tuple of lists of the form (errors, warnings, info). The error list will be empty if the action has been correctly installed in the given image. In detail, this verifies that the file is present, and if the preserve attribute is not present, that the hashes and other attributes of the file match.""" if self.attrs.get("preserve") == "abandon": return [], [], [] path = self.get_installed_path(img.get_root()) lstat, errors, warnings, info, abort = \ self.verify_fsobj_common(img, stat.S_IFREG) if lstat: if not stat.S_ISREG(lstat.st_mode): self.replace_required = True if abort: assert errors self.replace_required = True return errors, warnings, info if path.lower().endswith("/bobcat") and args["verbose"] == True: # Returned as a purely informational (untranslated) # message so that no client should interpret it as a # reason to fail verification. info.append("Warning: package may contain bobcat! " "(http://xkcd.com/325/)") preserve = self.attrs.get("preserve") if (preserve is None and "timestamp" in self.attrs and lstat.st_mtime != misc.timestamp_to_time(self.attrs["timestamp"])): errors.append( _("Timestamp: {found} should be " "{expected}").format(found=misc.time_to_timestamp( lstat.st_mtime), expected=self.attrs["timestamp"])) # avoid checking pkg.size if we have any content-hashes present; # different size files may have the same content-hash pkg_size = int(self.attrs.get("pkg.size", 0)) if preserve is None and pkg_size > 0 and \ not set(digest.DEFAULT_GELF_HASH_ATTRS).intersection( set(self.attrs.keys())) and \ lstat.st_size != pkg_size: errors.append( _("Size: {found:d} bytes should be " "{expected:d}").format(found=lstat.st_size, expected=pkg_size)) if (preserve is not None and args["verbose"] == False or lstat is None): return errors, warnings, info if args["forever"] != True: return errors, warnings, info # # Check file contents. # try: # This is a generic mechanism, but only used for libc on # x86, where the "best" version of libc is lofs-mounted # on the canonical path, foiling the standard verify # checks. is_mtpt = self.attrs.get("mountpoint", "").lower() == "true" elfhash = None elferror = None elf_hash_attr, elf_hash_val, \ elf_hash_func = \ digest.get_preferred_hash(self, hash_type=pkg.digest.HASH_GELF) if elf_hash_attr and haveelf and not is_mtpt: # # It's possible for the elf module to # throw while computing the hash, # especially if the file is badly # corrupted or truncated. # try: # On path, only calculate the # content hash that matches # the preferred one on the # action get_elfhash = \ elf_hash_attr == "elfhash" get_sha256 = (not get_elfhash and elf_hash_func == digest.GELF_HASH_ALGS["gelf:sha256"]) get_sha512t_256 = ( not get_elfhash and elf_hash_func == digest.GELF_HASH_ALGS["gelf:sha512t_256"]) elfhash = elf.get_hashes( path, elfhash=get_elfhash, sha256=get_sha256, sha512t_256=get_sha512t_256)[elf_hash_attr] if get_elfhash: elfhash = [elfhash] else: elfhash = list(digest.ContentHash(elfhash).values()) except elf.ElfError as e: # Any ELF error means there is something bad # with the file, mark as needing to be replaced. elferror = _("ELF failure: {0}").format(e) if (elfhash is not None and elf_hash_val != elfhash[0]): elferror = _("ELF content hash: " "{found} " "should be {expected}").format( found=elfhash[0], expected=elf_hash_val) # Always check on the file hash because the ELF hash # check only checks on the ELF parts and does not # check for some other file integrity issues. if not is_mtpt: hash_attr, hash_val, hash_func = \ digest.get_preferred_hash(self) sha_hash, data = misc.get_data_digest(path, hash_func=hash_func) if sha_hash != hash_val: # Prefer the ELF content hash error message. if preserve is not None: info.append(_("editable file has " "been changed")) elif elferror: errors.append(elferror) self.replace_required = True else: errors.append( _("Hash: " "{found} should be " "{expected}").format(found=sha_hash, expected=hash_val)) self.replace_required = True # Check system attributes. # Since some attributes like 'archive' or 'av_modified' # are set automatically by the FS, it makes no sense to # check for 1:1 matches. So we only check that the # system attributes specified in the action are still # set on the file. sattr = self.attrs.get("sysattr", None) if sattr: if isinstance(sattr, list): sattr = ",".join(sattr) sattrs = sattr.split(",") if len(sattrs) == 1 and \ sattrs[0] not in portable.get_sysattr_dict(): # not a verbose attr, try as a compact set_attrs = portable.fgetattr(path, compact=True) sattrs = sattrs[0] else: set_attrs = portable.fgetattr(path) for a in sattrs: if a not in set_attrs: errors.append( _("System attribute '{0}' " "not set").format(a)) except EnvironmentError as e: if e.errno == errno.EACCES: errors.append(_("Skipping: Permission Denied")) else: errors.append(_("Unexpected Error: {0}").format(e)) except Exception as e: errors.append(_("Unexpected Exception: {0}").format(e)) return errors, warnings, info
def add_content(self, action): """Adds the content of the provided action (if applicable) to the Transaction.""" # Perform additional publication-time validation of actions # before further processing is done. try: action.validate() except actions.ActionError as e: raise TransactionOperationError(e) if self.append_trans and action.name != "signature": raise TransactionOperationError(non_sig=True) size = int(action.attrs.get("pkg.size", 0)) if action.has_payload and size <= 0: # XXX hack for empty files action.data = lambda: open(os.devnull, "rb") if action.data is not None: # get all hashes for this action hashes, data = misc.get_data_digest(action.data(), length=size, return_content=True, hash_attrs=digest.LEGACY_HASH_ATTRS, hash_algs=digest.HASH_ALGS) # set the hash member for backwards compatibility and # remove it from the dictionary action.hash = hashes.pop("hash", None) action.attrs.update(hashes) # now set the hash value that will be used for storing # the file in the repository. hash_attr, hash_val, hash_func = \ digest.get_least_preferred_hash(action) fname = hash_val # Extract ELF information if not already provided. # XXX This needs to be modularized. if haveelf and data[:4] == b"\x7fELF" and ( "elfarch" not in action.attrs or "elfbits" not in action.attrs or "elfhash" not in action.attrs): elf_name = os.path.join(self.dir, ".temp-{0}".format(fname)) elf_file = open(elf_name, "wb") elf_file.write(data) elf_file.close() try: elf_info = elf.get_info(elf_name) except elf.ElfError as e: raise TransactionContentError(e) try: # Check which content checksums to # compute and add to the action elf1 = "elfhash" if elf1 in \ digest.LEGACY_CONTENT_HASH_ATTRS: get_sha1 = True else: get_sha1 = False hashes = elf.get_hashes(elf_name, elfhash=get_sha1) if get_sha1: action.attrs[elf1] = hashes[elf1] except elf.ElfError: pass action.attrs["elfbits"] = str(elf_info["bits"]) action.attrs["elfarch"] = elf_info["arch"] os.unlink(elf_name) try: dst_path = self.rstore.file(fname) except Exception as e: # The specific exception can't be named here due # to the cyclic dependency between this class # and the repository class. if getattr(e, "data", "") != fname: raise dst_path = None csize, chashes = misc.compute_compressed_attrs( fname, dst_path, data, size, self.dir) for attr in chashes: action.attrs[attr] = chashes[attr] action.attrs["pkg.csize"] = csize self.remaining_payload_cnt = \ len(action.attrs.get("chain.sizes", "").split()) # Do some sanity checking on packages marked or being marked # obsolete or renamed. if action.name == "set" and \ action.attrs["name"] == "pkg.obsolete" and \ action.attrs["value"] == "true": self.obsolete = True if self.types_found.difference( set(("set", "signature"))): raise TransactionOperationError(_("An obsolete " "package cannot contain actions other than " "'set' and 'signature'.")) elif action.name == "set" and \ action.attrs["name"] == "pkg.renamed" and \ action.attrs["value"] == "true": self.renamed = True if self.types_found.difference( set(("depend", "set", "signature"))): raise TransactionOperationError(_("A renamed " "package cannot contain actions other than " "'set', 'depend', and 'signature'.")) if not self.has_reqdeps and action.name == "depend" and \ action.attrs["type"] == "require": self.has_reqdeps = True if self.obsolete and self.renamed: # Reset either obsolete or renamed, depending on which # action this was. if action.attrs["name"] == "pkg.obsolete": self.obsolete = False else: self.renamed = False raise TransactionOperationError(_("A package may not " " be marked for both obsoletion and renaming.")) elif self.obsolete and action.name not in ("set", "signature"): raise TransactionOperationError(_("A '{type}' action " "cannot be present in an obsolete package: " "{action}").format( type=action.name, action=action)) elif self.renamed and action.name not in \ ("depend", "set", "signature"): raise TransactionOperationError(_("A '{type}' action " "cannot be present in a renamed package: " "{action}").format( type=action.name, action=action)) # Now that the action is known to be sane, we can add it to the # manifest. tfpath = os.path.join(self.dir, "manifest") tfile = open(tfpath, "a+") print(action, file=tfile) tfile.close() self.types_found.add(action.name)