コード例 #1
0
ファイル: t_elf.py プロジェクト: ripudamank2/pkg5
    def test_get_dynamic_params(self):
        """Test that get_dynamic(..) returns checksums according to the
                parameters passed to the method."""

        # Check that the hashes generated have the correct length
        # depending on the algorithm used to generated.
        sha1_len = 40
        sha256_len = 64

        # the default is to return an SHA-1 elfhash only
        d = elf.get_dynamic(self.elf_paths[0])
        self.assertTrue(len(d["elfhash"]) == sha1_len)
        self.assertTrue("pkg.content-type.sha256" not in d)

        d = elf.get_dynamic(self.elf_paths[0], sha256=True)
        self.assertTrue(len(d["elfhash"]) == sha1_len)
        self.assertTrue(len(d["pkg.content-type.sha256"]) == sha256_len)

        d = elf.get_dynamic(self.elf_paths[0], sha1=False, sha256=True)
        self.assertTrue("elfhash" not in d)
        self.assertTrue(len(d["pkg.content-type.sha256"]) == sha256_len)

        d = elf.get_dynamic(self.elf_paths[0], sha1=False, sha256=False)
        self.assertTrue("elfhash" not in d)
        self.assertTrue("pkg.content-type.sha256" not in d)
コード例 #2
0
ファイル: t_elf.py プロジェクト: ripudamank2/pkg5
 def test_valid_elf(self):
     """Test that elf routines work on a small set of objects."""
     arch = pkg.portable.get_isainfo()[0]
     for p in self.elf_paths:
         p = re.sub("__ARCH__", arch, p)
         self.debug("testing elf file {0}".format(p))
         self.assertTrue(os.path.exists(p), "{0} does not exist".format(p))
         self.assertEqual(elf.is_elf_object(p), True)
         elf.get_dynamic(p)
         elf.get_info(p)
コード例 #3
0
def process_elf_dependencies(action, pkg_vars, dyn_tok_conv, kernel_paths,
                             **kwargs):
    """Produce the elf dependencies for the file delivered in the action
        provided.

        'action' is the file action to analyze.

        'pkg_vars' is the list of variants against which the package delivering
        the action was published.

        'dyn_tok_conv' is the dictionary which maps the dynamic tokens, like
        $PLATFORM, to the values they should be expanded to.

        'kernel_paths' contains the run paths which kernel modules should use.
        """

    if not action.name == "file":
        return [], [], {}

    installed_path = action.attrs[action.key_attr]

    proto_file = action.attrs[PD_LOCAL_PATH]

    if not os.path.exists(proto_file):
        raise base.MissingFile(proto_file)

    if not elf.is_elf_object(proto_file):
        return [], [], {}

    try:
        ei = elf.get_info(proto_file)
        ed = elf.get_dynamic(proto_file)
    except elf.ElfError, e:
        raise BadElfFile(proto_file, e)
コード例 #4
0
ファイル: userland.py プロジェクト: tsoome/oi-userland
    def __elf_runpath_check(self, path, engine):
        result = None
        list = []

        ed = elf.get_dynamic(path)
        ei = elf.get_info(path)
        bits = ei.get("bits")
        for dir in ed.get("runpath", "").split(":"):
            if dir == None or dir == '':
                continue

            match = False
            for expr in self.runpath_re:
                if expr.match(dir):
                    match = True
                    break

            # The RUNPATH shouldn't contain any runtime linker
            # default paths (or the /64 equivalent link)
            if dir in [
                    '/lib', '/lib/64', '/lib/amd64', '/lib/sparcv9',
                    '/usr/lib', '/usr/lib/64', '/usr/lib/amd64',
                    '/usr/lib/sparcv9'
            ]:
                list.append(dir)

            if match == False:
                list.append(dir)

            if bits == 32:
                for expr in self.runpath_64_re:
                    if expr.search(dir):
                        engine.warning(_(
                            "64-bit runpath in 32-bit binary, '%s' includes '%s'"
                        ) % (path, dir),
                                       msgid="%s%s.3" % (self.name, "001"))
            else:
                match = False
                for expr in self.runpath_64_re:
                    if expr.search(dir):
                        match = True
                        break
                if match == False:
                    engine.warning(
                        _("32-bit runpath in 64-bit binary, '%s' includes '%s'"
                          ) % (path, dir),
                        msgid="%s%s.3" % (self.name, "001"))
        if len(list) > 0:
            result = _("bad RUNPATH, '%%s' includes '%s'" % ":".join(list))

        return result
コード例 #5
0
    def __elf_runpath_check(self, path, engine):
        result = None
        list = []

        ed = elf.get_dynamic(path)
        ei = elf.get_info(path)
        bits = ei.get("bits")
        for dir in ed.get("runpath", "").split(":"):
            if dir == None or dir == '':
                continue

            match = False
            for expr in self.runpath_re:
                if expr.match(dir):
                    match = True
                    break

            if match == False:
                list.append(dir)

            if bits == 32:
                for expr in self.runpath_64_re:
                    if expr.search(dir):
                        engine.warning(_(
                            "64-bit runpath in 32-bit binary, '%s' includes '%s'"
                        ) % (path, dir),
                                       msgid="%s%s.3" % (self.name, "001"))
            else:
                match = False
                for expr in self.runpath_64_re:
                    if expr.search(dir):
                        match = True
                        break
                if match == False:
                    engine.warning(
                        _("32-bit runpath in 64-bit binary, '%s' includes '%s'"
                          ) % (path, dir),
                        msgid="%s%s.3" % (self.name, "001"))
        if len(list) > 0:
            result = _("bad RUNPATH, '%%s' includes '%s'" % ":".join(list))

        return result
コード例 #6
0
ファイル: elf.py プロジェクト: pombredanne/svnedge-console
def process_elf_dependencies(action, proto_dir, pkg_vars, **kwargs):
    """Given a file action and proto directory, produce the elf dependencies
        for that file."""

    if not action.name == "file":
        return []

    installed_path = action.attrs[action.key_attr]

    proto_file = os.path.join(proto_dir, installed_path)

    if not os.path.exists(proto_file):
        raise base.MissingFile(proto_file)

    if not elf.is_elf_object(proto_file):
        return []

    try:
        ei = elf.get_info(proto_file)
        ed = elf.get_dynamic(proto_file)
    except elf.ElfError, e:
        raise BadElfFile(proto_file, e)
コード例 #7
0
	def __elf_runpath_check(self, path):
		result = None
		list = []

		ed = elf.get_dynamic(path)
		for dir in ed.get("runpath", "").split(":"):
			if dir == None or dir == '':
				continue

			match = False
			for expr in self.runpath_re:
				if expr.match(dir):
					match = True
					break

			if match == False:
				list.append(dir)

		if len(list) > 0:
			result = _("bad RUNPATH, '%%s' includes '%s'" %
				   ":".join(list))

		return result
コード例 #8
0
def process_elf_dependencies(action, pkg_vars, dyn_tok_conv, run_paths,
    **kwargs):
        """Produce the elf dependencies for the file delivered in the action
        provided.

        'action' is the file action to analyze.

        'pkg_vars' is the list of variants against which the package delivering
        the action was published.

        'dyn_tok_conv' is the dictionary which maps the dynamic tokens, like
        $PLATFORM, to the values they should be expanded to.

        'run_paths' contains the run paths which elf binaries should use.
        """

        if not action.name == "file":
                return [], [], {}

        installed_path = action.attrs[action.key_attr]

        proto_file = action.attrs[PD_LOCAL_PATH]

        if not os.path.exists(proto_file):
                raise base.MissingFile(proto_file)

        if not elf.is_elf_object(proto_file):
                return [], [], {}

        try:
                ei = elf.get_info(proto_file)
                ed = elf.get_dynamic(proto_file)
        except elf.ElfError as e:
                raise BadElfFile(proto_file, e)
        deps = [
            d[0]
            for d in ed.get("deps", [])
        ]
        rp = ed.get("runpath", "").split(":")
        if len(rp) == 1 and rp[0] == "":
                rp = []

        dyn_tok_conv["$ORIGIN"] = [os.path.join("/",
            os.path.dirname(installed_path))]

        kernel64 = None

        # For kernel modules, default path resolution is /platform/<platform>,
        # /kernel, /usr/kernel.  But how do we know what <platform> would be for
        # a given module?  Does it do fallbacks to, say, sun4u?
        if installed_path.startswith("kernel") or \
            installed_path.startswith("usr/kernel") or \
            (installed_path.startswith("platform") and \
            installed_path.split("/")[2] == "kernel"):
                if rp and (len(rp) > 1 or
                    not re.match(r'^/usr/gcc/\d/lib$', rp[0])):
                        raise RuntimeError("RUNPATH set for kernel module "
                            "({0}): {1}".format(installed_path, rp))
                # Add this platform to the search path.
                if installed_path.startswith("platform"):
                        rp.append("/platform/{0}/kernel".format(
                            installed_path.split("/")[1]))
                else:
                        for p in dyn_tok_conv.get("$PLATFORM", []):
                                rp.append("/platform/{0}/kernel".format(p))
                # Default kernel search path
                rp.extend(["/kernel", "/usr/kernel"])
                # What subdirectory should we look in for 64-bit kernel modules?
                if ei["bits"] == 64:
                        if ei["arch"] == "i386":
                                kernel64 = "amd64"
                        elif ei["arch"] == "sparc":
                                kernel64 = "sparcv9"
                        else:
                                raise RuntimeError("Unknown arch:{0}".format(
                                    ei["arch"]))
        else:
                for p in default_run_paths:
                        if ei["bits"] == 64:
                                p += "/64"
                        if p not in rp:
                                rp.append(p)

        elist = []
        if run_paths:
                # add our detected runpaths into the user-supplied one (if any)
                rp = base.insert_default_runpath(rp, run_paths)

        rp, errs = expand_variables(rp, dyn_tok_conv)

        elist.extend([
            UnsupportedDynamicToken(proto_file, installed_path, p, tok)
            for p, tok in errs
        ])

        res = []

        for d in deps:
                pn, fn = os.path.split(d)
                pathlist = []
                for p in rp:
                        if kernel64:
                                # Find 64-bit modules the way krtld does.
                                # XXX We don't resolve dependencies found in
                                # /platform, since we don't know where under
                                # /platform to look.
                                deppath = \
                                    os.path.join(p, pn, kernel64, fn).lstrip(
                                    os.path.sep)
                        else:
                                deppath = os.path.join(p, d).lstrip(os.path.sep)
                        # deppath includes filename; remove that.
                        head, tail = os.path.split(deppath)
                        if head:
                                pathlist.append(head)
                res.append(ElfDependency(action, fn, pathlist, pkg_vars,
                    action.attrs[PD_PROTO_DIR]))
        del dyn_tok_conv["$ORIGIN"]
        return res, elist, {}
コード例 #9
0
ファイル: file.py プロジェクト: ripudamank2/pkg5
        def verify(self, img, **args):
                """Returns a tuple of lists of the form (errors, warnings,
                info).  The error list will be empty if the action has been
                correctly installed in the given image.

                In detail, this verifies that the file is present, and if
                the preserve attribute is not present, that the hashes
                and other attributes of the file match."""

                if self.attrs.get("preserve") == "abandon":
                        return [], [], []

                path = self.get_installed_path(img.get_root())

                lstat, errors, warnings, info, abort = \
                    self.verify_fsobj_common(img, stat.S_IFREG)
                if lstat:
                        if not stat.S_ISREG(lstat.st_mode):
                                self.replace_required = True

                if abort:
                        assert errors
                        self.replace_required = True
                        return errors, warnings, info

                if path.lower().endswith("/bobcat") and args["verbose"] == True:
                        # Returned as a purely informational (untranslated)
                        # message so that no client should interpret it as a
                        # reason to fail verification.
                        info.append("Warning: package may contain bobcat!  "
                            "(http://xkcd.com/325/)")

                if "preserve" not in self.attrs and \
                    "timestamp" in self.attrs and lstat.st_mtime != \
                    misc.timestamp_to_time(self.attrs["timestamp"]):
                        errors.append(_("Timestamp: {found} should be "
                            "{expected}").format(
                            found=misc.time_to_timestamp(lstat.st_mtime),
                            expected=self.attrs["timestamp"]))

                # avoid checking pkg.size if we have any content-hashes present;
                # different size files may have the same content-hash
                if "preserve" not in self.attrs and \
                    "pkg.size" in self.attrs and    \
                    not set(digest.RANKED_CONTENT_HASH_ATTRS).intersection(
                    set(self.attrs.keys())) and \
                    lstat.st_size != int(self.attrs["pkg.size"]):
                        errors.append(_("Size: {found:d} bytes should be "
                            "{expected:d}").format(found=lstat.st_size,
                            expected=int(self.attrs["pkg.size"])))

                if "preserve" in self.attrs:
                        if args["verbose"] == False or lstat is None:
                                return errors, warnings, info

                if args["forever"] != True:
                        return errors, warnings, info

                #
                # Check file contents. At the moment, the only content-hash
                # supported in pkg(5) is for ELF files, so this will need work
                # when additional content-hashes are added.
                #
                try:
                        # This is a generic mechanism, but only used for libc on
                        # x86, where the "best" version of libc is lofs-mounted
                        # on the canonical path, foiling the standard verify
                        # checks.
                        is_mtpt = self.attrs.get("mountpoint", "").lower() == "true"
                        elfhash = None
                        elferror = None
                        ehash_attr, elfhash_val, hash_func = \
                            digest.get_preferred_hash(self,
                                hash_type=pkg.digest.CONTENT_HASH)
                        if ehash_attr and haveelf and not is_mtpt:
                                #
                                # It's possible for the elf module to
                                # throw while computing the hash,
                                # especially if the file is badly
                                # corrupted or truncated.
                                #
                                try:
                                        # Annoying that we have to hardcode this
                                        if ehash_attr == \
                                            "pkg.content-hash.sha256":
                                                get_sha256 = True
                                                get_sha1 = False
                                        else:
                                                get_sha256 = False
                                                get_sha1 = True
                                        elfhash = elf.get_dynamic(path,
                                            sha1=get_sha1,
                                            sha256=get_sha256)[ehash_attr]
                                except RuntimeError as e:
                                        errors.append(
                                            "ELF content hash: {0}".format(e))

                                if elfhash is not None and \
                                    elfhash != elfhash_val:
                                        elferror = _("ELF content hash: "
                                            "{found} "
                                            "should be {expected}").format(
                                            found=elfhash,
                                            expected=elfhash_val)

                        # If we failed to compute the content hash, or the
                        # content hash failed to verify, try the file hash.
                        # If the content hash fails to match but the file hash
                        # matches, it indicates that the content hash algorithm
                        # changed, since obviously the file hash is a superset
                        # of the content hash.
                        if (elfhash is None or elferror) and not is_mtpt:
                                hash_attr, hash_val, hash_func = \
                                    digest.get_preferred_hash(self)
                                sha_hash, data = misc.get_data_digest(path,
                                    hash_func=hash_func)
                                if sha_hash != hash_val:
                                        # Prefer the content hash error message.
                                        if "preserve" in self.attrs:
                                                info.append(_(
                                                    "editable file has "
                                                    "been changed"))
                                        elif elferror:
                                                errors.append(elferror)
                                                self.replace_required = True
                                        else:
                                                errors.append(_("Hash: "
                                                    "{found} should be "
                                                    "{expected}").format(
                                                    found=sha_hash,
                                                    expected=hash_val))
                                                self.replace_required = True

                        # Check system attributes.
                        # Since some attributes like 'archive' or 'av_modified'
                        # are set automatically by the FS, it makes no sense to
                        # check for 1:1 matches. So we only check that the
                        # system attributes specified in the action are still
                        # set on the file.
                        sattr = self.attrs.get("sysattr", None)
                        if sattr:
                                sattrs = sattr.split(",")
                                if len(sattrs) == 1 and \
                                    sattrs[0] not in portable.get_sysattr_dict():
                                        # not a verbose attr, try as a compact
                                        set_attrs = portable.fgetattr(path,
                                            compact=True)
                                        sattrs = sattrs[0]
                                else:
                                        set_attrs = portable.fgetattr(path)

                                for a in sattrs:
                                        if a not in set_attrs:
                                                errors.append(
                                                    _("System attribute '{0}' "
                                                    "not set").format(a))

                except EnvironmentError as e:
                        if e.errno == errno.EACCES:
                                errors.append(_("Skipping: Permission Denied"))
                        else:
                                errors.append(_("Unexpected Error: {0}").format(
                                    e))
                except Exception as e:
                        errors.append(_("Unexpected Exception: {0}").format(e))

                return errors, warnings, info
コード例 #10
0
ファイル: file.py プロジェクト: vincent2628/pluribus_userland
    def verify(self, img, **args):
        """Returns a tuple of lists of the form (errors, warnings,
                info).  The error list will be empty if the action has been
                correctly installed in the given image.

                In detail, this verifies that the file is present, and if
                the preserve attribute is not present, that the hashes
                and other attributes of the file match."""

        path = os.path.normpath(
            os.path.sep.join((img.get_root(), self.attrs["path"])))

        lstat, errors, warnings, info, abort = \
            self.verify_fsobj_common(img, stat.S_IFREG)
        if lstat:
            if not stat.S_ISREG(lstat.st_mode):
                self.replace_required = True

        if abort:
            assert errors
            return errors, warnings, info

        if path.lower().endswith("/bobcat") and args["verbose"] == True:
            # Returned as a purely informational (untranslated)
            # message so that no client should interpret it as a
            # reason to fail verification.
            info.append("Warning: package may contain bobcat!  "
                        "(http://xkcd.com/325/)")

        if "preserve" not in self.attrs and \
            "timestamp" in self.attrs and lstat.st_mtime != \
            misc.timestamp_to_time(self.attrs["timestamp"]):
            errors.append(
                _("Timestamp: %(found)s should be "
                  "%(expected)s") % {
                      "found": misc.time_to_timestamp(lstat.st_mtime),
                      "expected": self.attrs["timestamp"]
                  })

        # avoid checking pkg.size if elfhash present;
        # different size files may have the same elfhash
        if "preserve" not in self.attrs and \
            "pkg.size" in self.attrs and    \
            "elfhash" not in self.attrs and \
            lstat.st_size != int(self.attrs["pkg.size"]):
            errors.append(
                _("Size: %(found)d bytes should be "
                  "%(expected)d") % {
                      "found": lstat.st_size,
                      "expected": int(self.attrs["pkg.size"])
                  })

        if "preserve" in self.attrs:
            return errors, warnings, info

        if args["forever"] != True:
            return errors, warnings, info

        #
        # Check file contents
        #
        try:
            elfhash = None
            elferror = None
            if "elfhash" in self.attrs and haveelf:
                #
                # It's possible for the elf module to
                # throw while computing the hash,
                # especially if the file is badly
                # corrupted or truncated.
                #
                try:
                    elfhash = elf.get_dynamic(path)["hash"]
                except RuntimeError, e:
                    errors.append("Elfhash: %s" % e)

                if elfhash is not None and \
                    elfhash != self.attrs["elfhash"]:
                    elferror = _("Elfhash: %(found)s "
                                 "should be %(expected)s") % {
                                     "found": elfhash,
                                     "expected": self.attrs["elfhash"]
                                 }

            # If we failed to compute the content hash, or the
            # content hash failed to verify, try the file hash.
            # If the content hash fails to match but the file hash
            # matches, it indicates that the content hash algorithm
            # changed, since obviously the file hash is a superset
            # of the content hash.
            if elfhash is None or elferror:
                hashvalue, data = misc.get_data_digest(path)
                if hashvalue != self.hash:
                    # Prefer the content hash error message.
                    if elferror:
                        errors.append(elferror)
                    else:
                        errors.append(
                            _("Hash: "
                              "%(found)s should be "
                              "%(expected)s") % {
                                  "found": hashvalue,
                                  "expected": self.hash
                              })
                    self.replace_required = True
コード例 #11
0
    def __elf_runpath_check(self, path, engine):
        result = None
        list = []

        ed = elf.get_dynamic(path)
        ei = elf.get_info(path)
        bits = ei.get("bits")
        for dir in ed.get("runpath", "").split(":"):
            if dir == None or dir == '':
                continue

            match = False
            for expr in self.runpath_re:
                if expr.match(dir):
                    match = True
                    break

            if match == False:
                list.append(dir)
            # Make sure RUNPATH matches against a packaged path.
            # Don't check runpaths starting with $ORIGIN, which
            # is specially handled by the linker.

            elif not dir.startswith('$ORIGIN/'):

                # Strip out leading and trailing '/' in the
                # runpath, since the reference paths don't start
                # with '/' and trailing '/' could cause mismatches.
                # Check first if there is an exact match, then check
                # if any reference path starts with this runpath
                # plus a trailing slash, since it may still be a link
                # to a directory that has no action because it uses
                # the default attributes.

                relative_dir = dir.strip('/')
                if not relative_dir in self.ref_paths and \
                    not any(key.startswith(relative_dir + '/')
                        for key in self.ref_paths):

                    # If still no match, if the runpath contains
                    # an embedded symlink, emit a warning; it may or may
                    # not resolve to a legitimate path.
                    # E.g., for usr/openwin/lib, usr/openwin->X11 and
                    # usr/X11/lib are packaged, but usr/openwin/lib is not.
                    # Otherwise, runpath is bad; add it to list.
                    embedded_link = False
                    pdir = os.path.dirname(relative_dir)
                    while pdir != '':
                        if (pdir in self.ref_paths
                                and self.ref_paths[pdir][0][1].name == "link"):
                            embedded_link = True
                            engine.warning(_(
                                "runpath '%s' in '%s' not found in reference paths but contains symlink at '%s'"
                            ) % (dir, path, pdir),
                                           msgid="%s%s.3" % (self.name, "001"))
                            break
                        pdir = os.path.dirname(pdir)
                    if not embedded_link:
                        list.append(dir)

            if bits == 32:
                for expr in self.runpath_64_re:
                    if expr.search(dir):
                        engine.warning(_(
                            "64-bit runpath in 32-bit binary, '%s' includes '%s'"
                        ) % (path, dir),
                                       msgid="%s%s.3" % (self.name, "001"))
            else:
                match = False
                for expr in self.runpath_64_re:
                    if expr.search(dir):
                        match = True
                        break
                if match == False:
                    engine.warning(
                        _("32-bit runpath in 64-bit binary, '%s' includes '%s'"
                          ) % (path, dir),
                        msgid="%s%s.3" % (self.name, "001"))
        if len(list) > 0:
            result = _("bad RUNPATH, '%%s' includes '%s'" % ":".join(list))

        return result
コード例 #12
0
    def __elf_runpath_check(self, path, engine, _pkglint_id):
        """Verify that RUNPATH of given binary is correct."""
        runpath_list = []

        dyninfo = elf.get_dynamic(path)
        elfinfo = elf.get_info(path)
        for runpath in dyninfo.get("runpath", "").split(":"):
            if not runpath:
                continue

            match = False
            for expr in self.runpath_re:
                if expr.match(runpath):
                    match = True
                    break

            if not match:
                runpath_list.append(runpath)

            # Make sure RUNPATH matches against a packaged path.
            # Don't check runpaths starting with $ORIGIN, which
            # is specially handled by the linker.
            elif not runpath.startswith("$ORIGIN/"):

                # Strip out leading and trailing '/' in the
                # runpath, since the reference paths don't start
                # with '/' and trailing '/' could cause mismatches.
                # Check first if there is an exact match, then check
                # if any reference path starts with this runpath
                # plus a trailing slash, since it may still be a link
                # to a directory that has no action because it uses
                # the default attributes.
                relative_dir = runpath.strip("/")
                if relative_dir not in self.ref_paths and not any(
                        key.startswith(relative_dir + "/")
                        for key in self.ref_paths):

                    # If still no match, if the runpath contains
                    # an embedded symlink, emit a warning; it may or may
                    # not resolve to a legitimate path.
                    # E.g., for usr/openwin/lib, usr/openwin->X11 and
                    # usr/X11/lib are packaged, but usr/openwin/lib is not.
                    # Otherwise, runpath is bad; add it to list.
                    pdir = os.path.dirname(relative_dir)
                    while pdir != "":
                        if pdir in self.ref_paths and self.ref_paths[pdir][0][
                                1].name == "link":
                            engine.warning(
                                f"runpath '{runpath}' in '{path}' not found in reference "
                                f"paths but contains symlink at '{pdir}'",
                                msgid=f"{self.name}{_pkglint_id}.3")
                            break
                        pdir = os.path.dirname(pdir)
                    else:
                        runpath_list.append(runpath)

            if elfinfo["bits"] == 32:
                for expr in self.runpath_64_re:
                    if expr.search(runpath):
                        engine.warning(
                            f"64-bit runpath in 32-bit binary, '{path}' includes '{runpath}'",
                            msgid=f"{self.name}{_pkglint_id}.3")
            else:
                for expr in self.runpath_64_re:
                    if expr.search(runpath):
                        break
                else:
                    engine.warning(
                        f"32-bit runpath in 64-bit binary, '{path}' includes '{runpath}'",
                        msgid=f"{self.name}{_pkglint_id}.3")

        # handle all incorrect RUNPATHs in a single error
        if runpath_list:
            engine.error(
                f"bad RUNPATH, '{path}' includes '{':'.join(runpath_list)}'",
                msgid=f"{self.name}{_pkglint_id}.3")
コード例 #13
0
ファイル: transaction.py プロジェクト: fatman2021/pkg5
    def add_content(self, action):
        """Adds the content of the provided action (if applicable) to
                the Transaction."""

        # Perform additional publication-time validation of actions
        # before further processing is done.
        try:
            action.validate()
        except actions.ActionError as e:
            raise TransactionOperationError(e)

        if self.append_trans and action.name != "signature":
            raise TransactionOperationError(non_sig=True)

        size = int(action.attrs.get("pkg.size", 0))

        if action.has_payload and size <= 0:
            # XXX hack for empty files
            action.data = lambda: open(os.devnull, "rb")

        if action.data is not None:
            # get all hashes for this action
            hashes, data = misc.get_data_digest(
                action.data(),
                length=size,
                return_content=True,
                hash_attrs=digest.DEFAULT_HASH_ATTRS,
                hash_algs=digest.HASH_ALGS)

            # set the hash member for backwards compatibility and
            # remove it from the dictionary
            action.hash = hashes.pop("hash", None)
            action.attrs.update(hashes)

            # now set the hash value that will be used for storing
            # the file in the repository.
            hash_attr, hash_val, hash_func = \
                digest.get_least_preferred_hash(action)
            fname = hash_val

            # Extract ELF information
            # XXX This needs to be modularized.
            if haveelf and data[:4] == "\x7fELF":
                elf_name = os.path.join(self.dir, ".temp-{0}".format(fname))
                elf_file = open(elf_name, "wb")
                elf_file.write(data)
                elf_file.close()

                try:
                    elf_info = elf.get_info(elf_name)
                except elf.ElfError as e:
                    raise TransactionContentError(e)

                try:
                    # Check which content checksums to
                    # compute and add to the action
                    elf256 = "pkg.content-type.sha256"
                    elf1 = "elfhash"

                    if elf256 in \
                        digest.DEFAULT_CONTENT_HASH_ATTRS:
                        get_sha256 = True
                    else:
                        get_sha256 = False

                    if elf1 in \
                        digest.DEFAULT_CONTENT_HASH_ATTRS:
                        get_sha1 = True
                    else:
                        get_sha1 = False

                    dyn = elf.get_dynamic(elf_name,
                                          sha1=get_sha1,
                                          sha256=get_sha256)

                    if get_sha1:
                        action.attrs[elf1] = dyn[elf1]

                    if get_sha256:
                        action.attrs[elf256] = \
                            dyn[elf256]

                except elf.ElfError:
                    pass
                action.attrs["elfbits"] = str(elf_info["bits"])
                action.attrs["elfarch"] = elf_info["arch"]
                os.unlink(elf_name)

            try:
                dst_path = self.rstore.file(fname)
            except Exception as e:
                # The specific exception can't be named here due
                # to the cyclic dependency between this class
                # and the repository class.
                if getattr(e, "data", "") != fname:
                    raise
                dst_path = None

            csize, chashes = misc.compute_compressed_attrs(
                fname, dst_path, data, size, self.dir)
            for attr in chashes:
                action.attrs[attr] = chashes[attr].hexdigest()
            action.attrs["pkg.csize"] = csize
            chash = None
            data = None

        self.remaining_payload_cnt = \
            len(action.attrs.get("chain.sizes", "").split())

        # Do some sanity checking on packages marked or being marked
        # obsolete or renamed.
        if action.name == "set" and \
            action.attrs["name"] == "pkg.obsolete" and \
            action.attrs["value"] == "true":
            self.obsolete = True
            if self.types_found.difference(set(("set", "signature"))):
                raise TransactionOperationError(
                    _("An obsolete "
                      "package cannot contain actions other than "
                      "'set' and 'signature'."))
        elif action.name == "set" and \
            action.attrs["name"] == "pkg.renamed" and \
            action.attrs["value"] == "true":
            self.renamed = True
            if self.types_found.difference(set(
                ("depend", "set", "signature"))):
                raise TransactionOperationError(
                    _("A renamed "
                      "package cannot contain actions other than "
                      "'set', 'depend', and 'signature'."))

        if not self.has_reqdeps and action.name == "depend" and \
            action.attrs["type"] == "require":
            self.has_reqdeps = True

        if self.obsolete and self.renamed:
            # Reset either obsolete or renamed, depending on which
            # action this was.
            if action.attrs["name"] == "pkg.obsolete":
                self.obsolete = False
            else:
                self.renamed = False
            raise TransactionOperationError(
                _("A package may not "
                  " be marked for both obsoletion and renaming."))
        elif self.obsolete and action.name not in ("set", "signature"):
            raise TransactionOperationError(
                _("A '{type}' action "
                  "cannot be present in an obsolete package: "
                  "{action}").format(type=action.name, action=action))
        elif self.renamed and action.name not in \
            ("depend", "set", "signature"):
            raise TransactionOperationError(
                _("A '{type}' action "
                  "cannot be present in a renamed package: "
                  "{action}").format(type=action.name, action=action))

        # Now that the action is known to be sane, we can add it to the
        # manifest.
        tfpath = os.path.join(self.dir, "manifest")
        tfile = file(tfpath, "ab+")
        print(action, file=tfile)
        tfile.close()

        self.types_found.add(action.name)
コード例 #14
0
ファイル: file.py プロジェクト: pombredanne/svnedge-console
    def verify(self, img, **args):
        """ verify that file is present and if preserve attribute
                not present, that hashes match"""
        path = os.path.normpath(
            os.path.sep.join((img.get_root(), self.attrs["path"])))

        lstat, errors, abort = \
            self.verify_fsobj_common(img, stat.S_IFREG)
        if lstat:
            if not stat.S_ISREG(lstat.st_mode):
                self.replace_required = True

        if abort:
            assert errors
            return errors

        if path.lower().endswith("/cat") and args["verbose"] == True:
            errors.append("Warning: package may contain bobcat!  "
                          "(http://xkcd.com/325/)")

        if "timestamp" in self.attrs and lstat.st_mtime != \
            misc.timestamp_to_time(self.attrs["timestamp"]):
            errors.append("Timestamp: %s should be %s" %
                          (misc.time_to_timestamp(
                              lstat.st_mtime), self.attrs["timestamp"]))

        # avoid checking pkg.size if elfhash present;
        # different size files may have the same elfhash
        if "preserve" not in self.attrs and \
            "pkg.size" in self.attrs and    \
            "elfhash" not in self.attrs and \
            lstat.st_size != int(self.attrs["pkg.size"]):
            errors.append("Size: %d bytes should be %d" % \
                (lstat.st_size, int(self.attrs["pkg.size"])))

        if "preserve" in self.attrs:
            return errors

        if args["forever"] != True:
            return errors

        #
        # Check file contents
        #
        try:
            elfhash = None
            elferror = None
            if "elfhash" in self.attrs and haveelf:
                #
                # It's possible for the elf module to
                # throw while computing the hash,
                # especially if the file is badly
                # corrupted or truncated.
                #
                try:
                    elfhash = elf.get_dynamic(path)["hash"]
                except RuntimeError, e:
                    errors.append("Elfhash: %s" % e)

                if elfhash is not None and \
                    elfhash != self.attrs["elfhash"]:
                    elferror = "Elfhash: %s should be %s" % \
                        (elfhash, self.attrs["elfhash"])

            # If we failed to compute the content hash, or the
            # content hash failed to verify, try the file hash.
            # If the content hash fails to match but the file hash
            # matches, it indicates that the content hash algorithm
            # changed, since obviously the file hash is a superset
            # of the content hash.
            if elfhash is None or elferror:
                hashvalue, data = misc.get_data_digest(path)
                if hashvalue != self.hash:
                    # Prefer the content hash error message.
                    if elferror:
                        errors.append(elferror)
                    else:
                        errors.append("Hash: %s should be %s" % \
                            (hashvalue, self.hash))
                    self.replace_required = True
コード例 #15
0
            # Extract ELF information
            # XXX This needs to be modularized.
            if haveelf and data[:4] == "\x7fELF":
                elf_name = "%s/.temp" % self.dir
                elf_file = open(elf_name, "wb")
                elf_file.write(data)
                elf_file.close()

                try:
                    elf_info = elf.get_info(elf_name)
                except elf.ElfError, e:
                    raise TransactionContentError(e)

                try:
                    elf_hash = elf.get_dynamic(elf_name)["hash"]
                    action.attrs["elfhash"] = elf_hash
                except elf.ElfError:
                    pass
                action.attrs["elfbits"] = str(elf_info["bits"])
                action.attrs["elfarch"] = elf_info["arch"]
                os.unlink(elf_name)

            #
            # This check prevents entering into the depot store
            # a file which is already there in the store.
            # This takes CPU load off the depot on large imports
            # of mostly-the-same stuff.  And in general it saves
            # disk bandwidth, and on ZFS in particular it saves
            # us space in differential snapshots.  We also need
            # to check that the destination is in the same
コード例 #16
0
        def add_content(self, action):
                """Adds the content of the provided action (if applicable) to
                the Transaction."""

                size = int(action.attrs.get("pkg.size", 0))

                if action.name in ("file", "license") and size <= 0:
                        # XXX hack for empty files
                        action.data = lambda: open(os.devnull, "rb")

                if action.data is not None:
                        bufsz = 64 * 1024

                        fname, data = misc.get_data_digest(action.data(),
                            length=size, return_content=True)

                        action.hash = fname

                        # Extract ELF information
                        # XXX This needs to be modularized.
                        if haveelf and data[:4] == "\x7fELF":
                                elf_name = "%s/.temp" % self.dir
                                elf_file = open(elf_name, "wb")
                                elf_file.write(data)
                                elf_file.close()

                                try:
                                        elf_info = elf.get_info(elf_name)
                                except elf.ElfError, e:
                                        raise TransactionContentError(e)

                                try:
                                        elf_hash = elf.get_dynamic(
                                            elf_name)["hash"]
                                        action.attrs["elfhash"] = elf_hash
                                except elf.ElfError:
                                        pass
                                action.attrs["elfbits"] = str(elf_info["bits"])
                                action.attrs["elfarch"] = elf_info["arch"]
                                os.unlink(elf_name)

                        #
                        # This check prevents entering into the depot store
                        # a file which is already there in the store.
                        # This takes CPU load off the depot on large imports
                        # of mostly-the-same stuff.  And in general it saves
                        # disk bandwidth, and on ZFS in particular it saves
                        # us space in differential snapshots.  We also need
                        # to check that the destination is in the same
                        # compression format as the source, as we must have
                        # properly formed files for chash/csize properties
                        # to work right.
                        #
                        fpath = misc.hash_file_name(fname)
                        dst_path = "%s/%s" % (self.cfg.file_root, fpath)
                        fileneeded = True
                        if os.path.exists(dst_path):
                                if PkgGzipFile.test_is_pkggzipfile(dst_path):
                                        fileneeded = False
                                        opath = dst_path

                        if fileneeded:
                                opath = os.path.join(self.dir, fname)
                                ofile = PkgGzipFile(opath, "wb")

                                nbuf = size / bufsz

                                for n in range(0, nbuf):
                                        l = n * bufsz
                                        h = (n + 1) * bufsz
                                        ofile.write(data[l:h])

                                m = nbuf * bufsz
                                ofile.write(data[m:])
                                ofile.close()

                        data = None

                        # Now that the file has been compressed, determine its
                        # size and store that as an attribute in the manifest
                        # for the file.
                        fs = os.stat(opath)
                        action.attrs["pkg.csize"] = str(fs.st_size)

                        # Compute the SHA hash of the compressed file.
                        # Store this as the chash attribute of the file's
                        # action.  In order for this to work correctly, we
                        # have to use the PkgGzipFile class.  It omits
                        # filename and timestamp information from the gzip
                        # header, allowing us to generate deterministic
                        # hashes for different files with identical content.
                        cfile = open(opath, "rb")
                        chash = sha.new()
                        while True:
                                cdata = cfile.read(bufsz)
                                if cdata == "":
                                        break
                                chash.update(cdata)
                        cfile.close()
                        action.attrs["chash"] = chash.hexdigest()
                        cdata = None