Esempio n. 1
0
def main_func():
        global dry_run, tmpdir, xport, dest_xport, target_pub

        dest_repo     = None
        source_list   = []
        variant_list  = []
        pub_list      = []
        use_pub_list  = False

        try:
                opts, pargs = getopt.getopt(sys.argv[1:], "d:np:s:?",
                    ["help"])
                for opt, arg in opts:
                        if opt == "-d":
                                dest_repo = misc.parse_uri(arg)
                        elif opt == "-n":
                                dry_run = True
                        elif opt == "-s":
                                s = arg.split(",")
                                if len(s) < 2:
                                        usage("-s option must specify "
                                            "variant=value,repo_uri")

                                # All but last part should be variant.
                                src_vars = {}
                                for v in s[:-1]:
                                        try:
                                                vname, vval = v.split("=")
                                        except ValueError:
                                                usage("-s option must specify "
                                                    "variant=value,repo_uri")

                                        if not vname.startswith("variant."):
                                                vname = "variant.%s" % vname
                                        src_vars[vname] = vval

                                variant_list.append(src_vars)
                                source_list.append(publisher.RepositoryURI(
                                    misc.parse_uri(s[-1])))
                        elif opt == "-p":
                                use_pub_list = True
                                pub_list.append(arg)

                        if opt in ("--help", "-?"):
                                usage(exitcode=0)
        except getopt.GetoptError, e:
                usage(_("illegal option -- %s") % e.opt)
Esempio n. 2
0
def trans_publish(repo_uri, fargs):
        """Publish packages in a single step using provided manifest data and
        sources."""

        # --no-index is now silently ignored as the publication process no
        # longer builds search indexes automatically.
        opts, pargs = getopt.getopt(fargs, "b:d:s:T:", ["fmri-in-manifest",
            "no-index", "no-catalog"])

        add_to_catalog = True
        basedirs = []
        bundles = []
        timestamp_files = []
        for opt, arg in opts:
                if opt == "-b":
                        bundles.append(arg)
                elif opt == "-d":
                        basedirs.append(arg)
                elif opt == "-s":
                        repo_uri = arg
                        if repo_uri and not repo_uri.startswith("null:"):
                                repo_uri = misc.parse_uri(repo_uri)
                elif opt == "-T":
                        timestamp_files.append(arg)
                elif opt == "--no-catalog":
                        add_to_catalog = False

        if not repo_uri:
                usage(_("A destination package repository must be provided "
                    "using -s."), cmd="publish")
 
        if not pargs:
                filelist = [("<stdin>", sys.stdin)]
        else:
                try:
                        filelist = [(f, file(f)) for f in pargs]
                except IOError, e:
                        error(e, cmd="publish")
                        return 1
Esempio n. 3
0
File: pull.py Progetto: aszeszo/test
def transfer_pkgs(pargs, target, list_newest, all_versions, all_timestamps,
    keep_compressed, raw, recursive, dry_run, dest_xport_cfg, src_uri):
        """Retrieve source package data and optionally republish it as each
        package is retrieved.
        """

        global cache_dir, download_start, xport, xport_cfg, dest_xport, targ_pub

        any_unmatched = []
        any_matched = []
        invalid_manifests = []
        total_processed = 0

        for src_pub in xport_cfg.gen_publishers():
                tracker = get_tracker()
                if list_newest:
			# Make sure the prog tracker knows we're doing a listing
			# operation so that it suppresses irrelevant output.
                        tracker.set_purpose(tracker.PURPOSE_LISTING)

                        if pargs or len(pargs) > 0:
                                usage(_("--newest takes no options"))

                        src_cat = fetch_catalog(src_pub, tracker,
                            xport, False)
                        for f in src_cat.fmris(ordered=True, last=True):
                                msg(f.get_fmri())
                        continue

                msg(_("Processing packages for publisher %s ...") %
                    src_pub.prefix)
                if pargs == None or len(pargs) == 0:
                        usage(_("must specify at least one pkgfmri"))

                republish = False

                if not raw:
                        basedir = tempfile.mkdtemp(dir=temp_root,
                            prefix=global_settings.client_name + "-")
                        tmpdirs.append(basedir)
                        republish = True

                        # Turn target into a valid URI.
                        target = misc.parse_uri(target)

                        # Setup target for transport.
                        targ_pub = transport.setup_publisher(target,
                            src_pub.prefix, dest_xport, dest_xport_cfg)

                        # Files have to be decompressed for republishing.
                        keep_compressed = False
                        if target.startswith("file://"):
                                # Check to see if the repository exists first.
                                try:
                                        t = trans.Transaction(target,
                                            xport=dest_xport, pub=targ_pub)
                                except trans.TransactionRepositoryInvalidError, e:
                                        txt = str(e) + "\n\n"
                                        txt += _("To create a repository, use "
                                            "the pkgrepo command.")
                                        abort(err=txt)
                                except trans.TransactionRepositoryConfigError, e:
                                        txt = str(e) + "\n\n"
                                        txt += _("The repository configuration "
                                            "for the repository located at "
                                            "'%s' is not valid or the "
                                            "specified path does not exist.  "
                                            "Please correct the configuration "
                                            "of the repository or create a new "
                                            "one.") % target
                                        abort(err=txt)
                                except trans.TransactionError, e:
                                        abort(err=e)
Esempio n. 4
0
File: pull.py Progetto: aszeszo/test
                elif opt == "--newest":
                        list_newest = True
                elif opt == "--raw":
                        raw = True
                elif opt == "--key":
                        key = arg
                elif opt == "--cert":
                        cert = arg

        if not list_newest and not target:
                usage(_("a destination must be provided"))

        if not src_uri:
                usage(_("a source repository must be provided"))
        else:
                src_uri = misc.parse_uri(src_uri)

        if not cache_dir:
                cache_dir = tempfile.mkdtemp(dir=temp_root,
                    prefix=global_settings.client_name + "-")
                # Only clean-up cache dir if implicitly created by pkgrecv.
                # User's cache-dirs should be preserved
                tmpdirs.append(cache_dir)

        incoming_dir = tempfile.mkdtemp(dir=temp_root,
            prefix=global_settings.client_name + "-")
        tmpdirs.append(incoming_dir)

        # Create transport and transport config
        xport, xport_cfg = transport.setup_transport()
        xport_cfg.add_cache(cache_dir, readonly=False)
Esempio n. 5
0
    def test_03_install_update(self):
        """Verify that install and update work as expected when
                compositing publishers.
                """

        #
        # Create an image and verify no packages are known.
        #
        self.image_create(self.empty_rurl, prefix=None)
        self.pkg("set-property signature-policy ignore")
        self.pkg("set-publisher --set-property signature-policy=ignore "
                 "test")
        self.pkg("list -a", exit=1)

        # Verify that packages with dependencies can be installed when
        # using multiple, disparate sources.
        self.pkg("set-publisher -g {0} -g {1} test".format(
            self.foo_arc, self.signed_arc))
        self.pkg("install signed")
        self.pkg("list foo signed")
        self.pkg("uninstall \\*")

        # Verify publisher can be removed.
        self.pkg("unset-publisher test")

        #
        # Create an image using the signed archive.
        #
        self.image_create(misc.parse_uri(self.signed_arc), prefix=None)
        self.__seed_ta_dir("ta1")

        # Verify that signed package can be installed and the archive
        # configured for the publisher allows dependencies to be
        # satisfied.
        self.pkg("set-publisher -g {0} test".format(self.foo_arc))
        self.pkg("set-property signature-policy verify")
        self.pkg("publisher test")
        self.pkg("install signed")
        self.pkg("list foo signed")

        # Verify that removing all packages and the signed archive as
        # a source leaves only foo known.
        self.pkg("uninstall \\*")
        self.pkg("set-publisher -G {0} test".format(self.signed_arc))
        self.pkg("list -aH")
        expected = "foo 1.0 ---\n"
        output = self.reduceSpaces(self.output)
        self.assertEqualDiff(expected, output)

        #
        # Create an image and verify no packages are known.
        #
        self.image_create(self.empty_rurl, prefix=None)
        self.pkg("list -a", exit=1)

        # Install an older version of a known package.
        self.pkg("set-publisher -g {0} test".format(self.all_arc))
        self.pkg("set-publisher -g {0} test2".format(self.all_arc))
        self.pkg("install [email protected]")
        self.pkg("list [email protected] [email protected]")

        # Verify that packages can be updated when using multiple,
        # disparate sources (that have some overlap).
        self.pkg("set-publisher -g {0} test".format(self.incorp_arc))
        self.pkg("update")
        self.pkg("list [email protected] [email protected]")

        #
        # Create an image using the signed archive.
        #
        self.image_create(misc.parse_uri(self.signed_arc), prefix=None)
        self.__seed_ta_dir("ta1")

        # Add the incorp archive as a source.
        self.pkg("set-publisher -g {0} test".format(self.incorp_arc))

        # Now verify that temporary package sources can be used during
        # package operations when multiple, disparate sources are
        # already configured for the same publisher.
        self.pkg("install -g {0} incorp signed".format(self.foo_rurl))
        self.pkg("list incorp foo signed")
Esempio n. 6
0
                                        value = True
                                else:
                                        try:
                                                key, value = arg.split("=", 1)
                                        except (AttributeError, ValueError):
                                                usage(_("%(opt)s takes argument of form "
                                                    "name=value, not %(arg)s") % {
                                                    "opt":  opt, "arg": arg })
                                DebugValues.set_value(key, value)
                        elif opt in ("--help", "-?"):
                                show_usage = True
        except getopt.GetoptError, e:
                usage(_("illegal global option -- %s") % e.opt)

        if repo_uri and not repo_uri.startswith("null:"):
                repo_uri = misc.parse_uri(repo_uri)

        subcommand = None
        if pargs:
                subcommand = pargs.pop(0)
                if subcommand == "help":
                        show_usage = True

        if show_usage:
                usage(retcode=0)
        elif not subcommand:
                usage()

        if not repo_uri and subcommand not in ("create-repository", "generate",
            "publish"):
                usage(_("A destination package repository must be provided "
Esempio n. 7
0
        def test_03_install_update(self):
                """Verify that install and update work as expected for temporary
                origins.
                """

                #
                # Create an image with no configured package sources, and
                # verify that a package can be installed from a temporary
                # source.
                #
                self.image_create(self.empty_rurl, prefix=None)
                self.pkg("set-property signature-policy ignore")
                self.pkg("list -a", exit=1)
                self.pkg("install -g {0} foo".format(self.foo_arc))

                #
                # Create an image with a network-based source, then make that
                # source unreachable and verify that a package can be installed
                # from a temporary source.
                #
                self.dcs[4].start()
                self.image_create(self.dcs[4].get_depot_url(), prefix=None)
                self.dcs[4].stop()
                self.pkg("set-property signature-policy ignore")
                self.pkg("list -a")
                # --no-refresh is required for now because -g combines temporary
                # sources with configured soures and pkg(5) currently treats
                # refresh failure as fatal.  See bug 18323.
                self.pkg("install --no-refresh -g {0} foo".format(self.foo_arc))

                #
                # Create an image and verify no packages are known.
                #
                self.image_create(self.empty_rurl, prefix=None)
                self.pkg("set-property signature-policy ignore")
                self.pkg("set-publisher --set-property signature-policy=ignore "
                    "test")
                self.pkg("list -a", exit=1)

                # Verify graceful failure if source doesn't exist.
                self.pkg("install -g {0} foo".format(self.foo_arc + ".nosuchpkg"),
                    exit=1)

                # Verify graceful failure if user doesn't have permission to
                # access temporary source.
                self.pkg("install -g {0} foo".format(self.perm_arc), su_wrap=True,
                    exit=1)

                # Verify attempting to install a package with a missing
                # dependency fails gracefully.
                self.pkg("install -g {0} signed".format(self.signed_arc), exit=1)

                # Verify a package from a publisher not already configured can
                # be installed using temporary origins.  Installing a package
                # in this scenario will result in the publisher being added
                # but without any origin information.
                self.pkg("install -g {0} foo".format(self.foo_arc))
                self.pkg("list foo")

                # Verify that publisher exists now (without origin information)
                # and is enabled and sticky (-n omits disabled publishers).
                self.pkg("publisher -nH")
                expected = """\
empty origin online F {0}/
test 
""".format(self.empty_rurl)
                output = self.reduceSpaces(self.output)
                self.assertEqualDiff(expected, output)

                # Verify that signed package can now be installed since
                # dependency was satisfied.
                self.pkg("install -g {0} signed".format(self.signed_arc))
                self.pkg("list foo signed")

                # Verify that removing all packages leaves no packages known
                # even though publisher remains configured.
                self.pkg("uninstall \*")
                self.pkg("list -af", exit=1)

                # Verify publisher can be removed.
                self.pkg("unset-publisher test")

                #
                # Create an image using the foo archive.
                #
                self.image_create(misc.parse_uri(self.foo_arc), prefix=None)
                self.__seed_ta_dir("ta1")

                # Verify that signed package can be installed and the archive
                # configured for the publisher allows dependencies to be
                # satisfied.
                self.pkg("set-property signature-policy verify")
                self.pkg("install -g {0} signed".format(self.signed_arc))
                self.pkg("list foo signed")

                # Verify that removing all packages leaves only foo known.
                self.pkg("uninstall \*")
                self.pkg("list -aH")
                expected = "foo 1.0 ---\n"
                output = self.reduceSpaces(self.output)
                self.assertEqualDiff(expected, output)

                #
                # Create an image and verify no packages are known.
                #
                self.image_create(self.empty_rurl, prefix=None)
                self.pkg("list -a", exit=1)

                # Install an older version of a known package.
                self.pkg("install -g {0} [email protected]".format(self.all_arc))
                self.pkg("list [email protected] [email protected]")

                # Verify graceful failure if source doesn't exist.
                self.pkg("update -g {0} foo".format(self.foo_arc + ".nosuchpkg"),
                    exit=1)

                # Verify graceful failure if user doesn't have permission to
                # access temporary source.
                self.pkg("update -g {0} foo".format(self.perm_arc), su_wrap=True,
                    exit=1)

                # Verify that packages can be updated using temporary origins.
                self.pkg("update -g {0} -g {1}".format(self.incorp_arc,
                    self.quux_arc))
                self.pkg("list [email protected] [email protected]")

                # Verify that both test and test2 are configured without
                # origins.
                self.pkg("publisher -H")
                expected = """\
empty origin online F {0}/
test 
test2 
""".format(self.empty_rurl)
                output = self.reduceSpaces(self.output)
                self.assertEqualDiff(expected, output)
Esempio n. 8
0
def opts_table_cb_origins(api_inst, opts, opts_new):
        origins = set()
        for o in opts[ORIGINS]:
                origins.add(misc.parse_uri(o, cwd=_orig_cwd))
        opts_new[ORIGINS] = origins
Esempio n. 9
0
def main_func():

        global temp_root, repo_modified, repo_finished, repo_uri, tracker
        global dry_run

        global_settings.client_name = PKG_CLIENT_NAME

        try:
                opts, pargs = getopt.getopt(sys.argv[1:], "?c:i:np:r:s:u",
                    ["help"])
        except getopt.GetoptError as e:
                usage(_("illegal option -- {0}").format(e.opt))

        dry_run = False
        ref_repo_uri = None
        repo_uri = os.getenv("PKG_REPO", None)
        changes = set()
        ignores = set()
        publishers = set()
        cmp_policy = CMP_ALL
        
        processed_pubs = 0

        for opt, arg in opts:
                if opt == "-c":
                        changes.add(arg)
                elif opt == "-i":
                        ignores.add(arg)
                elif opt == "-n":
                        dry_run = True
                elif opt == "-p":
                        publishers.add(arg)
                elif opt == "-r":
                        ref_repo_uri = misc.parse_uri(arg)
                elif opt == "-s":
                        repo_uri = misc.parse_uri(arg)
                elif opt == "-u":
                        cmp_policy = CMP_UNSIGNED
                elif opt == "-?" or opt == "--help":
                        usage(retcode=pkgdefs.EXIT_OK)

        if pargs:
                usage(_("Unexpected argument(s): {0}").format(" ".join(pargs)))

        if not repo_uri:
                usage(_("A target repository must be provided."))

        if not ref_repo_uri:
                usage(_("A reference repository must be provided."))

        target = publisher.RepositoryURI(misc.parse_uri(repo_uri))
        if target.scheme != "file":
                abort(err=_("Target repository must be filesystem-based."))
        try:
                target_repo = sr.Repository(read_only=dry_run,
                    root=target.get_pathname())
        except sr.RepositoryError as e:
                abort(str(e))

        # Use the tmp directory in target repo for efficient file rename since
        # files are in the same file system.
        temp_root = target_repo.temp_root
        if not os.path.exists(temp_root):
                os.makedirs(temp_root)

        ref_incoming_dir = tempfile.mkdtemp(dir=temp_root)
        ref_pkg_root = tempfile.mkdtemp(dir=temp_root)

        ref_xport, ref_xport_cfg = transport.setup_transport()
        ref_xport_cfg.incoming_root = ref_incoming_dir
        ref_xport_cfg.pkg_root = ref_pkg_root
        transport.setup_publisher(ref_repo_uri, "ref", ref_xport,
            ref_xport_cfg, remote_prefix=True)

        ref_repo = None
        ref = publisher.RepositoryURI(misc.parse_uri(ref_repo_uri))
        if ref.scheme == "file":
                try:
                        # It is possible that the client does not
                        # have write access to the reference repo
                        # so open it read-only to prevent the
                        # attempt to create a lock file in it.
                        ref_repo = sr.Repository(read_only=True,
                            root=ref.get_pathname())
                except sr.RepositoryError as e:
                        abort(str(e))

        tracker = get_tracker()

        for pub in target_repo.publishers:
                if publishers and pub not in publishers \
                    and '*' not in publishers:
                        continue

                msg(_("Processing packages for publisher {0} ...").format(pub))
                # Find the matching pub in the ref repo.
                for ref_pub in ref_xport_cfg.gen_publishers():
                        if ref_pub.prefix == pub:
                                found = True
                                break
                else:
                        txt = _("Publisher {0} not found in reference "
                            "repository.").format(pub)
                        if publishers:
                                abort(err=txt)
                        else:
                                txt += _(" Skipping.")
                                msg(txt)
                        continue

                processed_pubs += 1

                rev = do_reversion(pub, ref_pub, target_repo, ref_xport,
                    changes, ignores, cmp_policy, ref_repo, ref, ref_xport_cfg)

                # Only rebuild catalog if anything got actually reversioned.
                if rev and not dry_run:
                        msg(_("Rebuilding repository catalog."))
                        target_repo.rebuild(pub=pub)
                repo_finished = True

        ret = pkgdefs.EXIT_OK
        if processed_pubs == 0:
                msg(_("No matching publishers could be found."))
                ret = pkgdefs.EXIT_OOPS
        cleanup()
        return ret
Esempio n. 10
0
File: sign.py Progetto: aszeszo/test
                                    "but isn't a file.") % cert_path)
                elif opt == "-i":
                        p = os.path.abspath(arg)
                        if not os.path.isfile(p):
                                usage(_("%s was expected to be a certificate "
                                    "but isn't a file.") % p)
                        chain_certs.append(p)
                elif opt == "-k":
                        key_path = os.path.abspath(arg)
                        if not os.path.isfile(key_path):
                                usage(_("%s was expected to be a key file "
                                    "but isn't a file.") % key_path)
                elif opt == "-n":
                        dry_run = True
                elif opt == "-s":
                        repo_uri = misc.parse_uri(arg)
                elif opt == "--help":
                        show_usage = True
                elif opt == "--no-catalog":
                        add_to_catalog = False

        if show_usage:
                usage(retcode=EXIT_OK)

        if not repo_uri:
                usage(_("a repository must be provided"))

        if key_path and not cert_path:
                usage(_("If a key is given to sign with, its associated "
                    "certificate must be given."))
Esempio n. 11
0
def main_func():
        global_settings.client_name = "pkgsign"

        try:
                opts, pargs = getopt.getopt(sys.argv[1:], "a:c:i:k:ns:D:",
                    ["help", "no-index", "no-catalog"])
        except getopt.GetoptError as e:
                usage(_("illegal global option -- {0}").format(e.opt))

        show_usage = False
        sig_alg = "rsa-sha256"
        cert_path = None
        key_path = None
        chain_certs = []
        add_to_catalog = True
        set_alg = False
        dry_run = False

        repo_uri = os.getenv("PKG_REPO", None)
        for opt, arg in opts:
                if opt == "-a":
                        sig_alg = arg
                        set_alg = True
                elif opt == "-c":
                        cert_path = os.path.abspath(arg)
                        if not os.path.isfile(cert_path):
                                usage(_("{0} was expected to be a certificate "
                                    "but isn't a file.").format(cert_path))
                elif opt == "-i":
                        p = os.path.abspath(arg)
                        if not os.path.isfile(p):
                                usage(_("{0} was expected to be a certificate "
                                    "but isn't a file.").format(p))
                        chain_certs.append(p)
                elif opt == "-k":
                        key_path = os.path.abspath(arg)
                        if not os.path.isfile(key_path):
                                usage(_("{0} was expected to be a key file "
                                    "but isn't a file.").format(key_path))
                elif opt == "-n":
                        dry_run = True
                elif opt == "-s":
                        repo_uri = misc.parse_uri(arg)
                elif opt == "--help":
                        show_usage = True
                elif opt == "--no-catalog":
                        add_to_catalog = False
                elif opt == "-D":
                        try:
                                key, value = arg.split("=", 1)
                                DebugValues.set_value(key, value)
                        except (AttributeError, ValueError):
                                error(_("{opt} takes argument of form "
                                    "name=value, not {arg}").format(
                                    opt=opt, arg=arg))
        if show_usage:
                usage(retcode=EXIT_OK)

        if not repo_uri:
                usage(_("a repository must be provided"))

        if key_path and not cert_path:
                usage(_("If a key is given to sign with, its associated "
                    "certificate must be given."))

        if cert_path and not key_path:
                usage(_("If a certificate is given, its associated key must be "
                    "given."))

        if chain_certs and not cert_path:
                usage(_("Intermediate certificates are only valid if a key "
                    "and certificate are also provided."))

        if not pargs:
                usage(_("At least one fmri or pattern must be provided to "
                    "sign."))

        if not set_alg and not key_path:
                sig_alg = "sha256"

        s, h = actions.signature.SignatureAction.decompose_sig_alg(sig_alg)
        if h is None:
                usage(_("{0} is not a recognized signature algorithm.").format(
                    sig_alg))
        if s and not key_path:
                usage(_("Using {0} as the signature algorithm requires that a "
                    "key and certificate pair be presented using the -k and -c "
                    "options.").format(sig_alg))
        if not s and key_path:
                usage(_("The {0} hash algorithm does not use a key or "
                    "certificate.  Do not use the -k or -c options with this "
                    "algorithm.").format(sig_alg))

        if DebugValues:
                reload(digest)

        errors = []

        t = misc.config_temp_root()
        temp_root = tempfile.mkdtemp(dir=t)
        del t

        cache_dir = tempfile.mkdtemp(dir=temp_root)
        incoming_dir = tempfile.mkdtemp(dir=temp_root)
        chash_dir = tempfile.mkdtemp(dir=temp_root)
        cert_dir = tempfile.mkdtemp(dir=temp_root)

        try:
                chain_certs = [
                    __make_tmp_cert(cert_dir, c) for c in chain_certs
                ]
                if cert_path is not None:
                        cert_path = __make_tmp_cert(cert_dir, cert_path)

                xport, xport_cfg = transport.setup_transport()
                xport_cfg.add_cache(cache_dir, readonly=False)
                xport_cfg.incoming_root = incoming_dir

                # Configure publisher(s)
                transport.setup_publisher(repo_uri, "source", xport,
                    xport_cfg, remote_prefix=True)
                pats = pargs
                successful_publish = False

                concrete_fmris = []
                unmatched_pats = set(pats)
                all_pats = frozenset(pats)
                get_all_pubs = False
                pub_prefs = set()
                # Gather the publishers whose catalogs will be needed.
                for pat in pats:
                        try:
                                p_obj = fmri.MatchingPkgFmri(pat)
                        except fmri.IllegalMatchingFmri as e:
                                errors.append(e)
                                continue
                        pub_prefix = p_obj.get_publisher()
                        if pub_prefix:
                                pub_prefs.add(pub_prefix)
                        else:
                                get_all_pubs = True
                # Check each publisher for matches to our patterns.
                for p in xport_cfg.gen_publishers():
                        if not get_all_pubs and p.prefix not in pub_prefs:
                                continue
                        cat = fetch_catalog(p, xport, temp_root)
                        ms, tmp1, u = cat.get_matching_fmris(pats)
                        # Find which patterns matched.
                        matched_pats = all_pats - u
                        # Remove those patterns from the unmatched set.
                        unmatched_pats -= matched_pats
                        for v_list in ms.values():
                                concrete_fmris.extend([(v, p) for v in v_list])
                if unmatched_pats:
                        raise api_errors.PackageMatchErrors(
                            unmatched_fmris=unmatched_pats)

                for pfmri, src_pub in sorted(set(concrete_fmris)):
                        try:
                                # Get the existing manifest for the package to
                                # be signed.
                                m_str = xport.get_manifest(pfmri,
                                    content_only=True, pub=src_pub)
                                m = manifest.Manifest()
                                m.set_content(content=m_str)

                                # Construct the base signature action.
                                attrs = { "algorithm": sig_alg }
                                a = actions.signature.SignatureAction(cert_path,
                                    **attrs)
                                a.hash = cert_path

                                # Add the action to the manifest to be signed
                                # since the action signs itself.
                                m.add_action(a, misc.EmptyI)

                                # Set the signature value and certificate
                                # information for the signature action.
                                a.set_signature(m.gen_actions(),
                                    key_path=key_path, chain_paths=chain_certs,
                                    chash_dir=chash_dir)

                                # The hash of 'a' is currently a path, we need
                                # to find the hash of that file to allow
                                # comparison to existing signatures.
                                hsh = None
                                if cert_path:
                                        # Action identity still uses the 'hash'
                                        # member of the action, so we need to
                                        # stay with the sha1 hash.
                                        hsh, _dummy = \
                                            misc.get_data_digest(cert_path,
                                            hash_func=hashlib.sha1)

                                # Check whether the signature about to be added
                                # is identical, or almost identical, to existing
                                # signatures on the package.  Because 'a' has
                                # already been added to the manifest, it is
                                # generated by gen_actions_by_type, so the cnt
                                # must be 2 or higher to be an issue.
                                cnt = 0
                                almost_identical = False
                                for a2 in m.gen_actions_by_type("signature"):
                                        try:
                                                if a.identical(a2, hsh):
                                                        cnt += 1
                                        except api_errors.AlmostIdentical as e:
                                                e.pkg = pfmri
                                                errors.append(e)
                                                almost_identical = True
                                if almost_identical:
                                        continue
                                if cnt == 2:
                                        continue
                                elif cnt > 2:
                                        raise api_errors.DuplicateSignaturesAlreadyExist(pfmri)
                                assert cnt == 1, "Cnt was:{0}".format(cnt)

                                if not dry_run:
                                        # Append the finished signature action
                                        # to the published manifest.
                                        t = trans.Transaction(repo_uri,
                                            pkg_name=str(pfmri), xport=xport,
                                            pub=src_pub)
                                        try:
                                                t.append()
                                                t.add(a)
                                                for c in chain_certs:
                                                        t.add_file(c)
                                                t.close(add_to_catalog=
                                                    add_to_catalog)
                                        except:
                                                if t.trans_id:
                                                        t.close(abandon=True)
                                                raise
                                msg(_("Signed {0}").format(pfmri.get_fmri(
                                    include_build=False)))
                                successful_publish = True
                        except (api_errors.ApiException, fmri.FmriError,
                            trans.TransactionError) as e:
                                errors.append(e)
                if errors:
                        error("\n".join([str(e) for e in errors]))
                        if successful_publish:
                                return EXIT_PARTIAL
                        else:
                                return EXIT_OOPS
                return EXIT_OK
        except api_errors.ApiException as e:
                error(e)
                return EXIT_OOPS
        finally:
                shutil.rmtree(temp_root)
Esempio n. 12
0
def main_func():
    gettext.install("pkg",
                    "/usr/share/locale",
                    codeset=locale.getpreferredencoding())

    repo_uri = os.getenv("PKG_REPO", None)

    show_usage = False
    global_settings.client_name = "pkgsend"
    try:
        opts, pargs = getopt.getopt(sys.argv[1:], "s:D:?", ["help", "debug="])
        for opt, arg in opts:
            if opt == "-s":
                repo_uri = arg
            elif opt == "-D" or opt == "--debug":
                if arg == "allow-timestamp":
                    key = arg
                    value = True
                else:
                    try:
                        key, value = arg.split("=", 1)
                    except (AttributeError, ValueError):
                        usage(
                            _("{opt} takes argument of form "
                              "name=value, not {arg}").format(opt=opt,
                                                              arg=arg))
                DebugValues.set_value(key, value)
            elif opt in ("--help", "-?"):
                show_usage = True
    except getopt.GetoptError as e:
        usage(_("illegal global option -- {0}").format(e.opt))

    if repo_uri and not repo_uri.startswith("null:"):
        repo_uri = misc.parse_uri(repo_uri)

    if DebugValues:
        reload(pkg.digest)
    subcommand = None
    if pargs:
        subcommand = pargs.pop(0)
        if subcommand == "help":
            show_usage = True

    if show_usage:
        usage(retcode=0)
    elif not subcommand:
        usage()

    if not repo_uri and subcommand not in ("create-repository", "generate",
                                           "publish"):
        usage(_("A destination package repository must be provided "
                "using -s."),
              cmd=subcommand)

    visitors = [SolarisBundleVisitor()]
    ret = 0
    try:
        if subcommand == "create-repository":
            ret = trans_create_repository(repo_uri, pargs)
        elif subcommand == "open":
            ret = trans_open(repo_uri, pargs)
        elif subcommand == "append":
            ret = trans_append(repo_uri, pargs)
        elif subcommand == "close":
            ret = trans_close(repo_uri, pargs)
        elif subcommand == "add":
            ret = trans_add(repo_uri, pargs)
        elif subcommand == "import":
            ret = trans_import(repo_uri, pargs, visitors=visitors)
        elif subcommand == "include":
            ret = trans_include(repo_uri, pargs)
        elif subcommand == "publish":
            ret = trans_publish(repo_uri, pargs)
        elif subcommand == "generate":
            ret = trans_generate(pargs, visitors=visitors)
        elif subcommand == "refresh-index":
            ret = trans_refresh_index(repo_uri, pargs)
        else:
            usage(_("unknown subcommand '{0}'").format(subcommand))

        printed_space = False
        for visitor in visitors:
            for warn in visitor.warnings:
                if not printed_space:
                    print("")
                    printed_space = True
                error(warn, cmd=subcommand)

            for err in visitor.errors:
                if not printed_space:
                    print("")
                    printed_space = True
                error(err, cmd=subcommand)
                ret = 1
    except pkg.bundle.InvalidBundleException as e:
        error(e, cmd=subcommand)
        ret = 1
    except getopt.GetoptError as e:
        usage(
            _("illegal {cmd} option -- {opt}").format(cmd=subcommand,
                                                      opt=e.opt))

    return ret
Esempio n. 13
0
def trans_publish(repo_uri, fargs):
    """Publish packages in a single step using provided manifest data and
        sources."""

    # --no-index is now silently ignored as the publication process no
    # longer builds search indexes automatically.
    opts, pargs = getopt.getopt(
        fargs, "b:d:s:T:",
        ["fmri-in-manifest", "no-index", "no-catalog", "key=", "cert="])

    add_to_catalog = True
    basedirs = []
    bundles = []
    timestamp_files = []
    key = None
    cert = None
    for opt, arg in opts:
        if opt == "-b":
            bundles.append(arg)
        elif opt == "-d":
            basedirs.append(arg)
        elif opt == "-s":
            repo_uri = arg
            if repo_uri and not repo_uri.startswith("null:"):
                repo_uri = misc.parse_uri(repo_uri)
        elif opt == "-T":
            timestamp_files.append(arg)
        elif opt == "--no-catalog":
            add_to_catalog = False
        elif opt == "--key":
            key = arg
        elif opt == "--cert":
            cert = arg

    if not repo_uri:
        usage(_("A destination package repository must be provided "
                "using -s."),
              cmd="publish")

    if not pargs:
        filelist = [("<stdin>", sys.stdin)]
    else:
        try:
            filelist = [(f, file(f)) for f in pargs]
        except IOError as e:
            error(e, cmd="publish")
            return 1

    lines = ""  # giant string of all input files concatenated together
    linecnts = []  # tuples of starting line number, ending line number
    linecounter = 0  # running total

    for filename, f in filelist:
        try:
            data = f.read()
        except IOError as e:
            error(e, cmd="publish")
            return 1
        lines += data
        linecnt = len(data.splitlines())
        linecnts.append((linecounter, linecounter + linecnt))
        linecounter += linecnt

    m = pkg.manifest.Manifest()
    try:
        m.set_content(content=lines)
    except apx.InvalidPackageErrors as err:
        e = err.errors[0]
        lineno = e.lineno
        for i, tup in enumerate(linecnts):
            if lineno > tup[0] and lineno <= tup[1]:
                filename = filelist[i][0]
                lineno -= tup[0]
                break
        else:
            filename = "???"
            lineno = "???"

        error(_("File {filename} line {lineno}: {err}").format(
            filename=filename, lineno=lineno, err=e),
              cmd="publish")
        return 1

    try:
        pfmri = pkg.fmri.PkgFmri(m["pkg.fmri"])
        if not pfmri.version:
            # Cannot have a FMRI without version
            error(_("The pkg.fmri attribute '{0}' in the package "
                    "manifest must include a version.").format(pfmri),
                  cmd="publish")
            return 1
        if not DebugValues["allow-timestamp"]:
            # If not debugging, timestamps are ignored.
            pfmri.version.timestr = None
        pkg_name = pfmri.get_fmri()
    except KeyError:
        error(_("Manifest does not set pkg.fmri"))
        return 1

    xport, pub = setup_transport_and_pubs(repo_uri, ssl_key=key, ssl_cert=cert)
    t = trans.Transaction(repo_uri, pkg_name=pkg_name, xport=xport, pub=pub)
    t.open()

    target_files = []
    if bundles:
        # Ensure hardlinks marked as files in the manifest are
        # treated as files.  This necessary when sourcing files
        # from some bundle types.
        target_files.extend(a.attrs["path"] for a in m.gen_actions()
                            if a.name == "file")

    bundles = [
        pkg.bundle.make_bundle(bundle, targetpaths=target_files)
        for bundle in bundles
    ]

    for a in m.gen_actions():
        # don't publish these actions
        if a.name == "signature":
            msg(_("WARNING: Omitting signature action '{0}'".format(a)))
            continue
        if a.name == "set" and a.attrs["name"] in ["pkg.fmri", "fmri"]:
            continue
        elif a.has_payload:
            # Don't trust values provided; forcibly discard these.
            a.attrs.pop("pkg.size", None)
            a.attrs.pop("pkg.csize", None)
            path = pkg.actions.set_action_data(a.hash,
                                               a,
                                               basedirs=basedirs,
                                               bundles=bundles)[0]
        elif a.name in nopub_actions:
            error(_("invalid action for publication: {0}").format(action),
                  cmd="publish")
            t.close(abandon=True)
            return 1
        if a.name == "file":
            basename = os.path.basename(a.attrs["path"])
            for pattern in timestamp_files:
                if fnmatch.fnmatch(basename, pattern):
                    if not isinstance(path, basestring):
                        # Target is from bundle; can't
                        # apply timestamp now.
                        continue
                    ts = misc.time_to_timestamp(os.stat(path).st_mtime)
                    a.attrs["timestamp"] = ts
                    break
        try:
            t.add(a)
        except:
            t.close(abandon=True)
            raise

    pkg_state, pkg_fmri = t.close(abandon=False, add_to_catalog=add_to_catalog)
    for val in (pkg_state, pkg_fmri):
        if val is not None:
            msg(val)
    return 0
Esempio n. 14
0
        def test_03_install_update(self):
                """Verify that install and update work as expected for temporary
                origins.
                """

                #
                # Create an image with no configured package sources, and
                # verify that a package can be installed from a temporary
                # source.
                #
                self.image_create(self.empty_rurl, prefix=None)
                self.pkg("set-property signature-policy ignore")
                self.pkg("list -a", exit=1)
                self.pkg("install -g %s foo" % self.foo_arc)

                #
                # Create an image with a network-based source, then make that
                # source unreachable and verify that a package can be installed
                # from a temporary source.
                #
                self.dcs[4].start()
                self.image_create(self.dcs[4].get_depot_url(), prefix=None)
                self.dcs[4].stop()
                self.pkg("set-property signature-policy ignore")
                self.pkg("list -a")
                # --no-refresh is required for now because -g combines temporary
                # sources with configured soures and pkg(5) currently treats
                # refresh failure as fatal.  See bug 18323.
                self.pkg("install --no-refresh -g %s foo" % self.foo_arc)

                #
                # Create an image and verify no packages are known.
                #
                self.image_create(self.empty_rurl, prefix=None)
                self.pkg("set-property signature-policy ignore")
                self.pkg("set-publisher --set-property signature-policy=ignore "
                    "test")
                self.pkg("list -a", exit=1)

                # Verify graceful failure if source doesn't exist.
                self.pkg("install -g %s foo" % (self.foo_arc + ".nosuchpkg"),
                    exit=1)

                # Verify graceful failure if user doesn't have permission to
                # access temporary source.
                self.pkg("install -g %s foo" % self.perm_arc, su_wrap=True,
                    exit=1)

                # Verify attempting to install a package with a missing
                # dependency fails gracefully.
                self.pkg("install -g %s signed" % self.signed_arc, exit=1)

                # Verify a package from a publisher not already configured can
                # be installed using temporary origins.  Installing a package
                # in this scenario will result in the publisher being added
                # but without any origin information.
                self.pkg("install -g %s foo" % self.foo_arc)
                self.pkg("list foo")

                # Verify that publisher exists now (without origin information)
                # and is enabled and sticky (-n omits disabled publishers).
                self.pkg("publisher -nH")
                expected = """\
empty origin online F %s/
test 
""" % self.empty_rurl
                output = self.reduceSpaces(self.output)
                self.assertEqualDiff(expected, output)

                # Verify that signed package can now be installed since
                # dependency was satisfied.
                self.pkg("install -g %s signed" % self.signed_arc)
                self.pkg("list foo signed")

                # Verify that removing all packages leaves no packages known
                # even though publisher remains configured.
                self.pkg("uninstall \*")
                self.pkg("list -af", exit=1)

                # Verify publisher can be removed.
                self.pkg("unset-publisher test")

                #
                # Create an image using the foo archive.
                #
                self.image_create(misc.parse_uri(self.foo_arc), prefix=None)
                self.__seed_ta_dir("ta1")

                # Verify that signed package can be installed and the archive
                # configured for the publisher allows dependencies to be
                # satisfied.
                self.pkg("set-property signature-policy verify")
                self.pkg("install -g %s signed" % self.signed_arc)
                self.pkg("list foo signed")

                # Verify that removing all packages leaves only foo known.
                self.pkg("uninstall \*")
                self.pkg("list -aH")
                expected = "foo 1.0 ---\n"
                output = self.reduceSpaces(self.output)
                self.assertEqualDiff(expected, output)

                #
                # Create an image and verify no packages are known.
                #
                self.image_create(self.empty_rurl, prefix=None)
                self.pkg("list -a", exit=1)

                # Install an older version of a known package.
                self.pkg("install -g %s [email protected]" % self.all_arc)
                self.pkg("list [email protected] [email protected]")

                # Verify graceful failure if source doesn't exist.
                self.pkg("update -g %s foo" % (self.foo_arc + ".nosuchpkg"),
                    exit=1)

                # Verify graceful failure if user doesn't have permission to
                # access temporary source.
                self.pkg("update -g %s foo" % self.perm_arc, su_wrap=True,
                    exit=1)

                # Verify that packages can be updated using temporary origins.
                self.pkg("update -g %s -g %s" % (self.incorp_arc,
                    self.quux_arc))
                self.pkg("list [email protected] [email protected]")

                # Verify that both test and test2 are configured without
                # origins.
                self.pkg("publisher -H")
                expected = """\
empty origin online F %s/
test 
test2 
""" % self.empty_rurl
                output = self.reduceSpaces(self.output)
                self.assertEqualDiff(expected, output)
Esempio n. 15
0
def main_func():
    global dry_run, tmpdir, xport, dest_xport, target_pub

    dest_repo = None
    source_list = []
    variant_list = []
    pub_list = []
    use_pub_list = False

    try:
        opts, pargs = getopt.getopt(sys.argv[1:], "d:np:s:?", ["help"])
        for opt, arg in opts:
            if opt == "-d":
                dest_repo = misc.parse_uri(arg)
            elif opt == "-n":
                dry_run = True
            elif opt == "-s":
                s = arg.split(",")
                if len(s) < 2:
                    usage("-s option must specify " "variant=value,repo_uri")

                # All but last part should be variant.
                src_vars = {}
                for v in s[:-1]:
                    try:
                        vname, vval = v.split("=")
                    except ValueError:
                        usage("-s option must specify "
                              "variant=value,repo_uri")

                    if not vname.startswith("variant."):
                        vname = "variant.{0}".format(vname)
                    src_vars[vname] = vval

                variant_list.append(src_vars)
                source_list.append(
                    publisher.RepositoryURI(misc.parse_uri(s[-1])))
            elif opt == "-p":
                use_pub_list = True
                pub_list.append(arg)

            if opt in ("--help", "-?"):
                usage(exitcode=0)
    except getopt.GetoptError as e:
        usage(_("illegal option -- {0}").format(e.opt))

    if not source_list:
        usage(
            _("At least one variant name, value, and package source "
              "must be provided using -s."))

    if not dest_repo:
        usage(
            _("A destination package repository must be provided "
              "using -d."))

    # Determine the unique set of variants across all sources.
    variants = set()
    vcombos = collections.defaultdict(set)
    for src_vars in variant_list:
        for v, vval in six.iteritems(src_vars):
            variants.add(v)
            vcombos[v].add((v, vval))

    # merge_fmris() expects this to be a list. Sort it to make sure
    # combo is determinstic in the later construction.
    variants = sorted(variants, reverse=True)

    # Require that the user specified the same variants for all sources.
    for i, src_vars in enumerate(variant_list):
        missing = set(v for v in variants if v not in variant_list[i])
        if missing:
            missing = ", ".join(missing)
            source = source_list[i]
            usage(
                _("Source {source} missing values for "
                  "variants: {missing}").format(**locals()))

    # Require that each unique variant combination has a source.
    for combo in itertools.product(*vcombos.values()):
        found = False
        for i, src in enumerate(source_list):
            for vname, vval in combo:
                if variant_list[i].get(vname, None) != vval:
                    found = False
                    break
            else:
                found = True
                break

        if not found:
            combo = " ".join("{0}={1}".format(vname, vval)
                             for vname, vval in combo)
            usage(
                _("No source was specified for variant "
                  "combination {combo}.").format(**locals()))

    # initialize transport
    # we use a single endpoint for now, since the transport code
    # uses publisher as a unique key... so we just flop the repo
    # list as needed to access the different catalogs/manifests/files.
    temp_root = misc.config_temp_root()

    tmpdir = tempfile.mkdtemp(dir=temp_root, prefix="pkgmerge")
    xport, xport_cfg = transport.setup_transport()
    xport_cfg.incoming_root = tmpdir

    # we don't use the publisher returned by setup_publisher, as that only
    # returns one of the publishers in source_list.  Instead we use
    # xport_cfg to access all publishers.
    transport.setup_publisher(source_list,
                              "pkgmerge",
                              xport,
                              xport_cfg,
                              remote_prefix=True)
    cat_dir = tempfile.mkdtemp(dir=tmpdir)

    # we must have at least one matching publisher if -p was used.
    known_pubs = set([pub.prefix for pub in xport_cfg.gen_publishers()])
    if pub_list and len(set(pub_list).intersection(known_pubs)) == 0:
        error(
            _("no publishers from source repositories match "
              "the given -p options."))

    errors = set()
    tracker = get_tracker()

    # iterate over all publishers in our source repositories.  If errors
    # are encountered for a given publisher, we accumulate those, and
    # skip to the next publisher.
    for pub in xport_cfg.gen_publishers():

        if use_pub_list:
            if pub.prefix not in pub_list:
                continue
            else:
                # remove publishers from pub_list as we go, so
                # that when we're finished, any remaining
                # publishers in pub_list suggest superfluous
                # -p options, which will cause us to exit with
                # an error.
                pub_list.remove(pub.prefix)

        pub.meta_root = cat_dir
        pub.transport = xport

        # Use separate transport for destination repository in case
        # source and destination have identical publisher configuration.
        dest_xport, dest_xport_cfg = transport.setup_transport()
        dest_xport_cfg.incoming_root = tmpdir

        # retrieve catalogs for all specified repositories
        for s in source_list:
            load_catalog(s, pub)

        # determine the list of packages we'll be processing
        if not pargs:
            # use the latest versions and merge everything
            fmri_arguments = list(
                set(name for s in source_list
                    for name in get_all_pkg_names(s)))
            exclude_args = []
        else:
            fmri_arguments = [f for f in pargs if not f.startswith("!")]

            exclude_args = [f[1:] for f in pargs if f.startswith("!")]

        # build fmris to be merged
        masterlist = [
            build_merge_list(fmri_arguments, exclude_args, catalog_dict[s.uri])
            for s in source_list
        ]

        # check for unmatched patterns
        in_none = reduce(lambda x, y: x & y, (set(u) for d, u in masterlist))
        if in_none:
            errors.add(
                _("The following pattern(s) did not match any "
                  "packages in any of the specified repositories for "
                  "publisher {pub_name}:"
                  "\n{patterns}").format(patterns="\n".join(in_none),
                                         pub_name=pub.prefix))
            continue

        # generate set of all package names to be processed, and dict
        # of lists indexed by order in source_list; if that repo has no
        # fmri for this pkg then use None.
        allpkgs = set(name for d, u in masterlist for name in d)

        processdict = {}
        for p in allpkgs:
            for d, u in masterlist:
                processdict.setdefault(p, []).append(d.setdefault(p, None))

        # check to make sure all fmris are at same version modulo
        # timestamp
        for entry in processdict:
            if len(
                    set([
                        str(a).rsplit(":")[0]
                        for a in processdict[entry] if a is not None
                    ])) > 1:
                errors.add(
                    _("fmris matching the following patterns do"
                      " not have matching versions across all "
                      "repositories for publisher {pubs}: "
                      "{patterns}").format(pub=pub.prefix,
                                           patterns=processdict[entry]))
                continue

        target_pub = transport.setup_publisher(dest_repo,
                                               pub.prefix,
                                               dest_xport,
                                               dest_xport_cfg,
                                               remote_prefix=True)

        tracker.republish_set_goal(len(processdict), 0, 0)
        # republish packages for this publisher. If we encounter any
        # publication errors, we move on to the next publisher.
        try:
            pkg_tmpdir = tempfile.mkdtemp(dir=tmpdir)
            republish_packages(pub,
                               target_pub,
                               processdict,
                               source_list,
                               variant_list,
                               variants,
                               tracker,
                               xport,
                               dest_repo,
                               dest_xport,
                               pkg_tmpdir,
                               dry_run=dry_run)
        except (trans.TransactionError, PkgmergeException) as e:
            errors.add(str(e))
            tracker.reset()
            continue
        finally:
            # if we're handling an exception, this still gets called
            # in spite of the 'continue' that the handler ends with.
            if os.path.exists(pkg_tmpdir):
                shutil.rmtree(pkg_tmpdir)

        tracker.republish_done(dryrun=dry_run)
        tracker.reset()

    # If -p options were supplied, we should have processed all of them
    # by now. Remaining entries suggest -p options that were not merged.
    if use_pub_list and pub_list:
        errors.add(
            _("the following publishers were not found in "
              "source repositories: {0}").format(" ".join(pub_list)))

    # If we have encountered errors for some publishers, print them now
    # and exit.
    tracker.flush()
    for message in errors:
        error(message, exitcode=None)
    if errors:
        exit(EXIT_OOPS)

    return EXIT_OK
Esempio n. 16
0
        def test_03_install_update(self):
                """Verify that install and update work as expected when
                compositing publishers.
                """

                #
                # Create an image and verify no packages are known.
                #
                self.image_create(self.empty_rurl, prefix=None)
                self.pkg("set-property signature-policy ignore")
                self.pkg("set-publisher --set-property signature-policy=ignore "
                    "test")
                self.pkg("list -a", exit=1)

                # Verify that packages with dependencies can be installed when
                # using multiple, disparate sources.
                self.pkg("set-publisher -g %s -g %s test" % (self.foo_arc,
                    self.signed_arc))
                self.pkg("install signed")
                self.pkg("list foo signed")
                self.pkg("uninstall \*")

                # Verify publisher can be removed.
                self.pkg("unset-publisher test")

                #
                # Create an image using the signed archive.
                #
                self.image_create(misc.parse_uri(self.signed_arc), prefix=None)
                self.__seed_ta_dir("ta1")

                # Verify that signed package can be installed and the archive
                # configured for the publisher allows dependencies to be
                # satisfied.
                self.pkg("set-publisher -g %s test" % self.foo_arc)
                self.pkg("set-property signature-policy verify")
                self.pkg("publisher test")
                self.pkg("install signed")
                self.pkg("list foo signed")

                # Verify that removing all packages and the signed archive as
                # a source leaves only foo known.
                self.pkg("uninstall \*")
                self.pkg("set-publisher -G %s test" % self.signed_arc)
                self.pkg("list -aH")
                expected = "foo 1.0 ---\n"
                output = self.reduceSpaces(self.output)
                self.assertEqualDiff(expected, output)

                #
                # Create an image and verify no packages are known.
                #
                self.image_create(self.empty_rurl, prefix=None)
                self.pkg("list -a", exit=1)

                # Install an older version of a known package.
                self.pkg("set-publisher -g %s test" % self.all_arc)
                self.pkg("set-publisher -g %s test2" % self.all_arc)
                self.pkg("install [email protected]")
                self.pkg("list [email protected] [email protected]")

                # Verify that packages can be updated when using multiple,
                # disparate sources (that have some overlap).
                self.pkg("set-publisher -g %s test" % self.incorp_arc)
                self.pkg("update")
                self.pkg("list [email protected] [email protected]")

                #
                # Create an image using the signed archive.
                #
                self.image_create(misc.parse_uri(self.signed_arc), prefix=None)
                self.__seed_ta_dir("ta1")

                # Add the incorp archive as a source.
                self.pkg("set-publisher -g %s test" % self.incorp_arc)

                # Now verify that temporary package sources can be used during
                # package operations when multiple, disparate sources are
                # already configured for the same publisher.
                self.pkg("install -g %s incorp signed" % self.foo_rurl)
                self.pkg("list incorp foo signed")
Esempio n. 17
0
def opts_table_cb_pub_opts(api_inst, opts, opts_new):
        del opts_new[PUB_DISABLE]
        del opts_new[PUB_ENABLE]
        del opts_new[PUB_STICKY]
        del opts_new[PUB_NON_STICKY]

        if opts[PUB_DISABLE] and opts[PUB_ENABLE]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [PUB_DISABLE, PUB_ENABLE])

        if opts[PUB_STICKY] and opts[PUB_NON_STICKY]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [PUB_STICKY, PUB_NON_STICKY])

        opts_new[PUB_DISABLE] = None
        if opts[PUB_DISABLE]:
                opts_new[PUB_DISABLE] = True

        if opts[PUB_ENABLE]:
                opts_new[PUB_DISABLE] = False

        opts_new[PUB_STICKY] = None
        if opts[PUB_STICKY]:
                opts_new[PUB_STICKY] = True

        if opts[PUB_NON_STICKY]:
                opts_new[PUB_STICKY] = False

        if opts[ORIGIN_URI] and opts[ADD_ORIGINS]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [ORIGIN_URI, ADD_ORIGINS])

        if opts[ORIGIN_URI] and opts[REMOVE_ORIGINS]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [ORIGIN_URI, REMOVE_ORIGINS])

        if opts[REPO_URI] and opts[ADD_ORIGINS]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [REPO_URI, ADD_ORIGINS])
        if opts[REPO_URI] and opts[ADD_MIRRORS]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [REPO_URI, ADD_MIRRORS])
        if opts[REPO_URI] and opts[REMOVE_ORIGINS]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [REPO_URI, REMOVE_ORIGINS])
        if opts[REPO_URI] and opts[REMOVE_MIRRORS]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [REPO_URI, REMOVE_MIRRORS])
        if opts[REPO_URI] and opts[PUB_DISABLE]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [REPO_URI, PUB_DISABLE])
        if opts[REPO_URI] and opts[PUB_ENABLE]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [REPO_URI, PUB_ENABLE])
        if opts[REPO_URI] and not opts[REFRESH_ALLOWED]:
                raise InvalidOptionError(InvalidOptionError.REQUIRED,
                    [REPO_URI, REFRESH_ALLOWED])
        if opts[REPO_URI] and opts[RESET_UUID]:
                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
                    [REPO_URI, RESET_UUID])

        if opts[PROXY_URI] and not (opts[ADD_ORIGINS] or opts[ADD_MIRRORS]
            or opts[REPO_URI] or opts[REMOVE_ORIGINS] or opts[REMOVE_MIRRORS]):
                raise InvalidOptionError(InvalidOptionError.REQUIRED_ANY,
                    [PROXY_URI, ADD_ORIGINS, ADD_MIRRORS, REMOVE_ORIGINS,
                    REMOVE_MIRRORS, REPO_URI])

        opts_new[ADD_ORIGINS] = set()
        opts_new[REMOVE_ORIGINS] = set()
        opts_new[ADD_MIRRORS] = set()
        opts_new[REMOVE_MIRRORS] = set()
        for e in opts[ADD_ORIGINS]:
                opts_new[ADD_ORIGINS].add(misc.parse_uri(e, cwd=_orig_cwd))
        for e in opts[REMOVE_ORIGINS]:
                if e == "*":
                        # Allow wildcard to support an easy, scriptable
                        # way of removing all existing entries.
                        opts_new[REMOVE_ORIGINS].add("*")
                else:
                        opts_new[REMOVE_ORIGINS].add(misc.parse_uri(e,
                            cwd=_orig_cwd))

        for e in opts[ADD_MIRRORS]:
                opts_new[ADD_MIRRORS].add(misc.parse_uri(e, cwd=_orig_cwd))
        for e in opts[REMOVE_MIRRORS]:
                if e == "*":
                        # Allow wildcard to support an easy, scriptable
                        # way of removing all existing entries.
                        opts_new[REMOVE_MIRRORS].add("*")
                else:
                        opts_new[REMOVE_MIRRORS].add(misc.parse_uri(e,
                            cwd=_orig_cwd))

        if opts[REPO_URI]:
                opts_new[REPO_URI] = misc.parse_uri(opts[REPO_URI],
                    cwd=_orig_cwd)