示例#1
0
    def get_chain_certs(self, least_preferred=False):
        """Return a list of the chain certificates needed to validate
                this signature. When retrieving the content from the
                repository, we use the "least preferred" hash for backwards
                compatibility, but when verifying the content, we use the
                "most preferred" hash."""

        if least_preferred:
            chain_attr, chain_val, hash_func = \
                digest.get_least_preferred_hash(self,
                hash_type=digest.CHAIN)
        else:
            chain_attr, chain_val, hash_func = \
                digest.get_preferred_hash(self,
                hash_type=digest.CHAIN)
        if not chain_val:
            return []
        return chain_val.split()
示例#2
0
文件: license.py 项目: omniosorg/pkg5
    def get_text(self, img, pfmri, alt_pub=None):
        """Retrieves and returns the payload of the license (which
                should be text).  This may require remote retrieval of
                resources and so this could raise a TransportError or other
                ApiException.
                If there are UTF-8 encoding errors in the text replace them
                so that we still have a license to show rather than failing
                the entire operation.  The copy saved on disk is left as is.

                'alt_pub' is an optional alternate Publisher to use for
                any required transport operations.
                """

        path = self.get_local_path(img, pfmri)
        hash_attr, hash_attr_val, hash_func = \
            digest.get_least_preferred_hash(self)
        try:
            with open(path, "rb") as fh:
                length = os.stat(path).st_size
                chash, txt = misc.get_data_digest(fh,
                                                  length=length,
                                                  return_content=True,
                                                  hash_func=hash_func)
                if chash == hash_attr_val:
                    return misc.force_str(txt, errors='replace')
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise
        try:
            if not alt_pub:
                alt_pub = img.get_publisher(pfmri.publisher)
            assert pfmri.publisher == alt_pub.prefix
            return img.transport.get_content(alt_pub,
                                             hash_attr_val,
                                             fmri=pfmri,
                                             hash_func=hash_func,
                                             errors="replace")
        finally:
            img.cleanup_downloads()
示例#3
0
    def get_text(self, img, pfmri, alt_pub=None):
        """Retrieves and returns the payload of the license (which
                should be text).  This may require remote retrieval of
                resources and so this could raise a TransportError or other
                ApiException.

                'alt_pub' is an optional alternate Publisher to use for
                any required transport operations.
                """

        path = self.get_local_path(img, pfmri)
        hash_attr, hash_attr_val, hash_func = \
            digest.get_least_preferred_hash(self)
        try:
            with open(path, "rb") as fh:
                length = os.stat(path).st_size
                chash, txt = misc.get_data_digest(fh,
                                                  length=length,
                                                  return_content=True,
                                                  hash_func=hash_func)
                if chash == hash_attr_val:
                    return misc.force_str(txt)
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise
        # If we get here, either the license file wasn't on disk, or the
        # hash didn't match.  In either case, go retrieve it from the
        # publisher.
        try:
            if not alt_pub:
                alt_pub = img.get_publisher(pfmri.publisher)
            assert pfmri.publisher == alt_pub.prefix
            return img.transport.get_content(alt_pub,
                                             hash_attr_val,
                                             fmri=pfmri,
                                             hash_func=hash_func)
        finally:
            img.cleanup_downloads()
示例#4
0
    def add_file(self, f, size=None):
        """Adds the file to the Transaction."""
        hashes, data = misc.get_data_digest(
            f,
            length=size,
            return_content=True,
            hash_attrs=digest.DEFAULT_HASH_ATTRS,
            hash_algs=digest.HASH_ALGS)

        if size is None:
            size = len(data)

        try:
            # We don't have an Action yet, so passing None is fine.
            default_hash_attr = digest.get_least_preferred_hash(None)[0]
            fname = hashes[default_hash_attr]
            dst_path = self.rstore.file(fname)
        except Exception as e:
            # The specific exception can't be named here due
            # to the cyclic dependency between this class
            # and the repository class.
            if getattr(e, "data", "") != fname:
                raise
            dst_path = None

        csize, chashes = misc.compute_compressed_attrs(
            fname,
            dst_path,
            data,
            size,
            self.dir,
            chash_attrs=digest.DEFAULT_CHASH_ATTRS,
            chash_algs=digest.CHASH_ALGS)
        chashes = None
        data = None

        self.remaining_payload_cnt -= 1
示例#5
0
    def __set_data(self, pkgplan):
        """Private helper function to set the data field of the
                action."""

        hash_attr, hash_attr_val, hash_func = \
            digest.get_least_preferred_hash(self)

        retrieved = pkgplan.image.imageplan._retrieved
        retrieved.add(self.get_installed_path(pkgplan.image.get_root()))
        if len(retrieved) > 50 or \
            DebugValues['max-plan-execute-retrievals'] == 1:
            raise api_errors.PlanExecutionError(retrieved)

        # This is an unexpected file retrieval, so the retrieved file
        # will be streamed directly from the source to the final
        # destination and will not be stored in the image download
        # cache.
        try:
            pub = pkgplan.image.get_publisher(
                pkgplan.destination_fmri.publisher)
            data = pkgplan.image.transport.get_datastream(pub, hash_attr_val)
            return lambda: data
        finally:
            pkgplan.image.cleanup_downloads()
示例#6
0
    def verify_sig(self,
                   acts,
                   pub,
                   trust_anchors,
                   use_crls,
                   required_names=None):
        """Try to verify this signature.  It can return True or
                None.  None means we didn't know how to verify this signature.
                If we do know how to verify the signature but it doesn't verify,
                then an exception is raised.

                The 'acts' parameter is the iterable of actions against which
                to verify the signature.

                The 'pub' parameter is the publisher that published the
                package this action signed.

                The 'trust_anchors' parameter contains the trust anchors to use
                when verifying the signature.

                The 'required_names' parameter is a set of strings that must
                be seen as a CN in the chain of trust for the certificate."""

        ver = int(self.attrs["version"])
        # If this signature is tagged with variants, if the version is
        # higher than one we know about, or it uses an unrecognized
        # hash algorithm, we can't handle it yet.
        if self.get_variant_template() or \
            ver > generic.Action.sig_version or not self.hash_alg:
            return None
        # Turning this into a list makes debugging vastly more
        # tractable.
        acts = list(acts)
        # If self.hash is None, then the signature is storing a hash
        # of the actions, not a signed value.
        if self.hash is None:
            assert self.sig_alg is None
            h = hashlib.new(self.hash_alg)
            h.update(misc.force_bytes(self.actions_to_str(acts, ver)))
            computed_hash = h.digest()
            # The attrs value is stored in hex so that it's easy
            # to read.
            if misc.hex_to_binary(self.attrs["value"]) != \
                computed_hash:
                raise apx.UnverifiedSignature(
                    self,
                    _("The signature value did not match the "
                      "expected value. action: {0}").format(self))
            return True
        # Verify a signature that's not just a hash.
        if self.sig_alg is None:
            return None
        # Get the certificate paired with the key which signed this
        # action.
        attr, hash_val, hash_func = \
            digest.get_least_preferred_hash(self)
        cert = pub.get_cert_by_hash(hash_val,
                                    verify_hash=True,
                                    hash_func=hash_func)
        # Make sure that the intermediate certificates that are needed
        # to validate this signature are present.
        self.retrieve_chain_certs(pub)
        try:
            # This import is placed here to break a circular
            # import seen when merge.py is used.
            from pkg.client.publisher import CODE_SIGNING_USE
            # Verify the certificate whose key created this
            # signature action.
            pub.verify_chain(cert,
                             trust_anchors,
                             0,
                             use_crls,
                             required_names=required_names,
                             usages=CODE_SIGNING_USE)
        except apx.SigningException as e:
            e.act = self
            raise
        # Check that the certificate verifies against this signature.
        pub_key = cert.public_key()
        hhash = self.__get_hash_by_name(self.hash_alg)
        signature = misc.hex_to_binary(self.attrs["value"])

        try:
            pub_key.verify(signature,
                           misc.force_bytes(self.actions_to_str(acts, ver)),
                           padding.PKCS1v15(), hhash())
        except InvalidSignature:
            raise apx.UnverifiedSignature(
                self,
                _("The signature value did not match the expected "
                  "value."))

        return True
示例#7
0
    def _process_action(self, action, exact=False, path=None):
        """Adds all expected attributes to the provided action and
                upload the file for the action if needed.

                If 'exact' is True and 'path' is 'None', the action won't
                be modified and no file will be uploaded.

                If 'exact' is True and a 'path' is provided, the file of that
                path will be uploaded as-is (it is assumed that the file is
                already in repository format).
                """

        if self._append_mode and action.name != "signature":
            raise TransactionOperationError(non_sig=True)

        size = int(action.attrs.get("pkg.size", 0))

        if action.has_payload and size <= 0:
            # XXX hack for empty files
            action.data = lambda: open(os.devnull, "rb")

        if action.data is None:
            return

        if exact:
            if path:
                self.add_file(path,
                              basename=action.hash,
                              progtrack=self.progtrack)
            return

        # Get all hashes for this action.
        hashes, dummy = misc.get_data_digest(
            action.data(),
            length=size,
            hash_attrs=digest.DEFAULT_HASH_ATTRS,
            hash_algs=digest.HASH_ALGS)
        # Set the hash member for backwards compatibility and
        # remove it from the dictionary.
        action.hash = hashes.pop("hash", None)
        action.attrs.update(hashes)

        # Add file content-hash when preferred_hash is SHA2 or higher.
        if action.name != "signature" and \
            digest.PREFERRED_HASH != "sha1":
            hash_attr = "{0}:{1}".format(digest.EXTRACT_FILE,
                                         digest.PREFERRED_HASH)
            file_content_hash, dummy = misc.get_data_digest(
                action.data(),
                length=size,
                return_content=False,
                hash_attrs=[hash_attr],
                hash_algs=digest.HASH_ALGS)
            action.attrs["pkg.content-hash"] = "{0}:{1}".format(
                hash_attr, file_content_hash[hash_attr])

        # Now set the hash value that will be used for storing the file
        # in the repository.
        hash_attr, hash_val, hash_func = \
            digest.get_least_preferred_hash(action)
        fname = hash_val

        hdata = self.__uploads.get(fname)
        if hdata is not None:
            elf_attrs, csize, chashes = hdata
        else:
            # We haven't processed this file before, determine if
            # it needs to be uploaded and what information the
            # repository knows about it.
            elf_attrs = self.__get_elf_attrs(action, fname, size)
            csize, chashes = self.__get_compressed_attrs(fname)

            # 'csize' indicates that if file needs to be uploaded.
            fileneeded = csize is None
            if fileneeded:
                fpath = os.path.join(self._tmpdir, fname)
                csize, chashes = misc.compute_compressed_attrs(
                    fname,
                    data=action.data(),
                    size=size,
                    compress_dir=self._tmpdir)
                # Upload the compressed file for each action.
                self.add_file(fpath, basename=fname, progtrack=self.progtrack)
                os.unlink(fpath)
                self.__uploaded += 1
            elif not chashes:
                # If not fileneeded, and repository can't
                # provide desired hashes, call
                # compute_compressed_attrs() in a way that
                # avoids writing the file to get the attributes
                # we need.
                csize, chashes = misc.compute_compressed_attrs(
                    fname, data=action.data(), size=size)

            self.__uploads[fname] = (elf_attrs, csize, chashes)

        for k, v in six.iteritems(elf_attrs):
            if isinstance(v, list):
                action.attrs[k] = v + action.attrlist(k)
            else:
                action.attrs[k] = v
        for k, v in six.iteritems(chashes):
            if k == "pkg.content-hash":
                action.attrs[k] = action.attrlist(k) + [v]
            else:
                action.attrs[k] = v
        action.attrs["pkg.csize"] = csize
示例#8
0
文件: t_p5p.py 项目: omniosorg/pkg5
    def __verify_extract(self,
                         repo,
                         arc_path,
                         hashes,
                         ext_dir,
                         archive_index=None):
        """Helper method to test extraction and retrieval functionality.
                """

        arc = pkg.p5p.Archive(arc_path, mode="r", archive_index=archive_index)

        #
        # Verify behaviour of extract_package_manifest().
        #

        # Test bad FMRI.
        self.assertRaises(pkg.fmri.IllegalFmri, arc.extract_package_manifest,
                          "pkg:/^[email protected],5.11", ext_dir)

        # Test unqualified (no publisher) FMRI.
        self.assertRaises(AssertionError, arc.extract_package_manifest,
                          "pkg:/[email protected],5.11", ext_dir)

        # Test unknown FMRI.
        self.assertRaisesStringify(pkg.p5p.UnknownPackageManifest,
                                   arc.extract_package_manifest,
                                   "pkg://test/[email protected],5.11", ext_dir)

        # Test extraction when not specifying filename.
        fpath = os.path.join(ext_dir, self.foo.get_dir_path())
        arc.extract_package_manifest(self.foo, ext_dir)
        self.__verify_manifest_file_sig(repo, self.foo, fpath)

        # Test extraction specifying directory that does not exist.
        shutil.rmtree(ext_dir)
        arc.extract_package_manifest(self.foo, ext_dir, filename="foo.p5m")
        self.__verify_manifest_file_sig(repo, self.foo,
                                        os.path.join(ext_dir, "foo.p5m"))

        # Test extraction specifying directory that already exists.
        arc.extract_package_manifest(self.quux, ext_dir, filename="quux.p5m")
        self.__verify_manifest_file_sig(repo, self.quux,
                                        os.path.join(ext_dir, "quux.p5m"))

        # Test extraction in the case that manifest already exists.
        arc.extract_package_manifest(self.quux, ext_dir, filename="quux.p5m")
        self.__verify_manifest_file_sig(repo, self.quux,
                                        os.path.join(ext_dir, "quux.p5m"))

        #
        # Verify behaviour of extract_package_files().
        #
        arc.close()
        arc = pkg.p5p.Archive(arc_path, mode="r", archive_index=archive_index)
        shutil.rmtree(ext_dir)

        # Test unknown hashes.
        self.assertRaisesStringify(pkg.p5p.UnknownArchiveFiles,
                                   arc.extract_package_files, ["a", "b", "c"],
                                   ext_dir)

        # Test extraction specifying directory that does not exist.
        arc.extract_package_files(hashes["all"], ext_dir)
        for h in hashes["all"]:
            fpath = os.path.join(ext_dir, h)
            assert os.path.exists(fpath)

            # Now change mode to readonly.
            os.chmod(fpath, pkg.misc.PKG_RO_FILE_MODE)

        # Test extraction in the case that files already exist
        # (and those files are readonly).
        arc.extract_package_files(hashes["all"], ext_dir)
        for h in hashes["all"]:
            assert os.path.exists(os.path.join(ext_dir, h))

        # Test extraction when publisher is specified.
        shutil.rmtree(ext_dir)
        arc.extract_package_files(hashes["test"], ext_dir, pub="test")
        for h in hashes["test"]:
            assert os.path.exists(os.path.join(ext_dir, h))

        #
        # Verify behaviour of extract_to().
        #
        arc.close()
        arc = pkg.p5p.Archive(arc_path, mode="r", archive_index=archive_index)
        shutil.rmtree(ext_dir)

        # Test unknown file.
        self.assertRaisesStringify(pkg.p5p.UnknownArchiveFiles, arc.extract_to,
                                   "no/such/file", ext_dir)

        # Test extraction when not specifying filename (archive
        # member should be extracted into target directory using
        # full path in archive; that is, the target dir is pre-
        # pended).
        for pub in hashes:
            if pub == "all":
                continue
            for h in hashes[pub]:
                arcname = os.path.join("publisher", pub, "file", h[:2], h)
                arc.extract_to(arcname, ext_dir)

                fpath = os.path.join(ext_dir, arcname)
                assert os.path.exists(fpath)

        # Test extraction specifying directory that does not exist.
        shutil.rmtree(ext_dir)
        for pub in hashes:
            if pub == "all":
                continue
            for h in hashes[pub]:
                arcname = os.path.join("publisher", pub, "file", h[:2], h)
                arc.extract_to(arcname, ext_dir, filename=h)

                fpath = os.path.join(ext_dir, h)
                assert os.path.exists(fpath)

                # Now change mode to readonly.
                os.chmod(fpath, pkg.misc.PKG_RO_FILE_MODE)

        # Test extraction in the case that files already exist
        # (and those files are readonly).
        for pub in hashes:
            if pub == "all":
                continue
            for h in hashes[pub]:
                arcname = os.path.join("publisher", pub, "file", h[:2], h)
                arc.extract_to(arcname, ext_dir, filename=h)

                fpath = os.path.join(ext_dir, h)
                assert os.path.exists(fpath)

        #
        # Verify behaviour of get_file().
        #
        arc.close()
        arc = pkg.p5p.Archive(arc_path, mode="r", archive_index=archive_index)

        # Test behaviour for non-existent file.
        self.assertRaisesStringify(pkg.p5p.UnknownArchiveFiles, arc.get_file,
                                   "no/such/file")

        # Test that archived content retrieved is identical.
        arcname = os.path.join("publisher", self.foo.publisher, "pkg",
                               self.foo.get_dir_path())
        fobj = arc.get_file(arcname)
        self.__verify_manifest_sig(repo, self.foo, fobj.read())
        fobj.close()

        #
        # Verify behaviour of get_package_file().
        #
        arc.close()
        arc = pkg.p5p.Archive(arc_path, mode="r", archive_index=archive_index)

        # We always store content using the least_preferred hash, so
        # determine what that is so that we can verify it using
        # gunzip_from_stream.
        hash_func = digest.get_least_preferred_hash(None)[2]

        # Test behaviour when specifying publisher.
        nullf = open(os.devnull, "wb")
        for h in hashes["test"]:
            fobj = arc.get_package_file(h, pub="test")
            uchash = pkg.misc.gunzip_from_stream(fobj,
                                                 nullf,
                                                 hash_func=hash_func)
            self.assertEqual(uchash, h)
            fobj.close()

        # Test behaviour when not specifying publisher.
        for h in hashes["test"]:
            fobj = arc.get_package_file(h)
            uchash = pkg.misc.gunzip_from_stream(fobj,
                                                 nullf,
                                                 hash_func=hash_func)
            self.assertEqual(uchash, h)
            fobj.close()
        nullf.close()

        #
        # Verify behaviour of get_package_manifest().
        #
        arc.close()
        arc = pkg.p5p.Archive(arc_path, mode="r", archive_index=archive_index)

        # Test bad FMRI.
        self.assertRaises(pkg.fmri.IllegalFmri, arc.get_package_manifest,
                          "pkg:/^[email protected],5.11")

        # Test unqualified (no publisher) FMRI.
        self.assertRaises(AssertionError, arc.get_package_manifest,
                          "pkg:/[email protected],5.11")

        # Test unknown FMRI.
        self.assertRaisesStringify(pkg.p5p.UnknownPackageManifest,
                                   arc.get_package_manifest,
                                   "pkg://test/[email protected],5.11")

        # Test that archived content retrieved is identical.
        mobj = arc.get_package_manifest(self.foo)
        self.__verify_manifest_sig(repo, self.foo, mobj)

        mobj = arc.get_package_manifest(self.signed)
        self.__verify_manifest_sig(repo, self.signed, mobj)

        #
        # Verify behaviour of extract_catalog1().
        #
        arc.close()
        arc = pkg.p5p.Archive(arc_path, mode="r", archive_index=archive_index)
        ext_tmp_dir = tempfile.mkdtemp(dir=self.test_root)

        def verify_catalog(pub, pfmris):
            for pname in ("catalog.attrs", "catalog.base.C",
                          "catalog.dependency.C", "catalog.summary.C"):
                expected = os.path.join(ext_tmp_dir, pname)
                try:
                    arc.extract_catalog1(pname, ext_tmp_dir, pub=pub)
                except pkg.p5p.UnknownArchiveFiles:
                    if pname == "catalog.dependency.C":
                        # No dependencies, so exeception
                        # is only expected for this.
                        continue
                    raise

                assert os.path.exists(expected)

            cat = pkg.catalog.Catalog(meta_root=ext_tmp_dir)
            self.assertEqual(sorted([f for f in cat.fmris()]), sorted(pfmris))

        verify_catalog("test", [self.foo, self.signed])
        shutil.rmtree(ext_tmp_dir)
        os.mkdir(ext_tmp_dir)

        verify_catalog("test2", [self.quux])
        shutil.rmtree(ext_tmp_dir)
        return arc
示例#9
0
        def add_file(self, f, basename=None, size=None):
                """Adds the file to the Transaction."""

                # If basename provided, just store the file as-is with the
                # basename.
                if basename:
                        fileneeded = True
                        try:
                                dst_path = self.rstore.file(basename)
                                fileneeded = False
                        except Exception as e:
                                dst_path = os.path.join(self.dir, basename)

                        if not fileneeded:
                                return

                        if isinstance(f, six.string_types):
                                portable.copyfile(f, dst_path)
                                return

                        bufsz = 128 * 1024
                        if bufsz > size:
                                bufsz = size

                        with open(dst_path, "wb") as wf:
                                while True:
                                        data = f.read(bufsz)
                                        # data is bytes
                                        if data == b"":
                                                break
                                        wf.write(data)
                        return

                hashes, data = misc.get_data_digest(f, length=size,
                    return_content=True, hash_attrs=digest.DEFAULT_HASH_ATTRS,
                    hash_algs=digest.HASH_ALGS)

                if size is None:
                        size = len(data)

                fname = None
                try:
                        # We don't have an Action yet, so passing None is fine.
                        default_hash_attr = digest.get_least_preferred_hash(
                            None)[0]
                        fname = hashes[default_hash_attr]
                        dst_path = self.rstore.file(fname)
                except Exception as e:
                        # The specific exception can't be named here due
                        # to the cyclic dependency between this class
                        # and the repository class.
                        if getattr(e, "data", "") != fname:
                                raise
                        dst_path = None

                misc.compute_compressed_attrs(fname, dst_path,
                    data, size, self.dir,
                    chash_attrs=digest.DEFAULT_CHASH_ATTRS,
                    chash_algs=digest.CHASH_ALGS)

                self.remaining_payload_cnt -= 1
示例#10
0
        def add_content(self, action):
                """Adds the content of the provided action (if applicable) to
                the Transaction."""

                # Perform additional publication-time validation of actions
                # before further processing is done.
                try:
                        action.validate()
                except actions.ActionError as e:
                        raise TransactionOperationError(e)

                if self.append_trans and action.name != "signature":
                        raise TransactionOperationError(non_sig=True)

                size = int(action.attrs.get("pkg.size", 0))

                if action.has_payload and size <= 0:
                        # XXX hack for empty files
                        action.data = lambda: open(os.devnull, "rb")

                if action.data is not None:
                        # get all hashes for this action
                        hashes, data = misc.get_data_digest(action.data(),
                            length=size, return_content=True,
                            hash_attrs=digest.LEGACY_HASH_ATTRS,
                            hash_algs=digest.HASH_ALGS)

                        # set the hash member for backwards compatibility and
                        # remove it from the dictionary
                        action.hash = hashes.pop("hash", None)
                        action.attrs.update(hashes)

                        # now set the hash value that will be used for storing
                        # the file in the repository.
                        hash_attr, hash_val, hash_func = \
                            digest.get_least_preferred_hash(action)
                        fname = hash_val

                        # Extract ELF information if not already provided.
                        # XXX This needs to be modularized.
                        if haveelf and data[:4] == b"\x7fELF" and (
                            "elfarch" not in action.attrs or
                            "elfbits" not in action.attrs or
                            "elfhash" not in action.attrs):
                                elf_name = os.path.join(self.dir,
                                    ".temp-{0}".format(fname))
                                elf_file = open(elf_name, "wb")
                                elf_file.write(data)
                                elf_file.close()

                                try:
                                        elf_info = elf.get_info(elf_name)
                                except elf.ElfError as e:
                                        raise TransactionContentError(e)

                                try:
                                        # Check which content checksums to
                                        # compute and add to the action
                                        elf1 = "elfhash"

                                        if elf1 in \
                                            digest.LEGACY_CONTENT_HASH_ATTRS:
                                                get_sha1 = True
                                        else:
                                                get_sha1 = False

                                        hashes = elf.get_hashes(elf_name,
                                            elfhash=get_sha1)

                                        if get_sha1:
                                                action.attrs[elf1] = hashes[elf1]

                                except elf.ElfError:
                                        pass
                                action.attrs["elfbits"] = str(elf_info["bits"])
                                action.attrs["elfarch"] = elf_info["arch"]
                                os.unlink(elf_name)

                        try:
                                dst_path = self.rstore.file(fname)
                        except Exception as e:
                                # The specific exception can't be named here due
                                # to the cyclic dependency between this class
                                # and the repository class.
                                if getattr(e, "data", "") != fname:
                                        raise
                                dst_path = None

                        csize, chashes = misc.compute_compressed_attrs(
                            fname, dst_path, data, size, self.dir)
                        for attr in chashes:
                                action.attrs[attr] = chashes[attr]
                        action.attrs["pkg.csize"] = csize

                self.remaining_payload_cnt = \
                    len(action.attrs.get("chain.sizes", "").split())

                # Do some sanity checking on packages marked or being marked
                # obsolete or renamed.
                if action.name == "set" and \
                    action.attrs["name"] == "pkg.obsolete" and \
                    action.attrs["value"] == "true":
                        self.obsolete = True
                        if self.types_found.difference(
                            set(("set", "signature"))):
                                raise TransactionOperationError(_("An obsolete "
                                    "package cannot contain actions other than "
                                    "'set' and 'signature'."))
                elif action.name == "set" and \
                    action.attrs["name"] == "pkg.renamed" and \
                    action.attrs["value"] == "true":
                        self.renamed = True
                        if self.types_found.difference(
                            set(("depend", "set", "signature"))):
                                raise TransactionOperationError(_("A renamed "
                                    "package cannot contain actions other than "
                                    "'set', 'depend', and 'signature'."))

                if not self.has_reqdeps and action.name == "depend" and \
                    action.attrs["type"] == "require":
                        self.has_reqdeps = True

                if self.obsolete and self.renamed:
                        # Reset either obsolete or renamed, depending on which
                        # action this was.
                        if action.attrs["name"] == "pkg.obsolete":
                                self.obsolete = False
                        else:
                                self.renamed = False
                        raise TransactionOperationError(_("A package may not "
                            " be marked for both obsoletion and renaming."))
                elif self.obsolete and action.name not in ("set", "signature"):
                        raise TransactionOperationError(_("A '{type}' action "
                            "cannot be present in an obsolete package: "
                            "{action}").format(
                            type=action.name, action=action))
                elif self.renamed and action.name not in \
                    ("depend", "set", "signature"):
                        raise TransactionOperationError(_("A '{type}' action "
                            "cannot be present in a renamed package: "
                            "{action}").format(
                            type=action.name, action=action))

                # Now that the action is known to be sane, we can add it to the
                # manifest.
                tfpath = os.path.join(self.dir, "manifest")
                tfile = open(tfpath, "a+")
                print(action, file=tfile)
                tfile.close()

                self.types_found.add(action.name)