コード例 #1
0
    def test_no_origins_means_no_image_catalog_updates(self):
        """Make sure we don't update the image catalog
                unnecessarily."""

        # create an image with no publishers
        api_obj = self.image_create()
        repo = publisher.Repository()
        pub = publisher.Publisher(self.pubs[0], repository=repo)
        api_obj.add_publisher(pub)

        # make sure we've created a local catalog for this publisher
        api_obj.refresh(immediate=True)

        # get the image catalog timestamp
        ts1 = api_obj._img.get_last_modified(string=True)

        # force a delay so if the catalog is updated we'll notice.
        time.sleep(1)

        # refresh the image catalog (should be a noop)
        api_obj.refresh(immediate=True)

        # make sure the image catalog wasn't updated
        ts2 = api_obj._img.get_last_modified(string=True)
        self.assertEqual(ts1, ts2)
コード例 #2
0
ファイル: t_publish_api.py プロジェクト: ripudamank2/pkg5
    def test_stress_file_publish(self):
        """Publish lots of packages rapidly ensuring that file
                publication can handle it."""

        location = self.dc.get_repodir()
        location = os.path.abspath(location)
        location = urlunparse(("file", "", pathname2url(location), "", "", ""))

        repouriobj = publisher.RepositoryURI(location)
        repo = publisher.Repository(origins=[repouriobj])
        pub = publisher.Publisher(prefix="repo1", repository=repo)
        xport_cfg = transport.GenericTransportCfg()
        xport_cfg.add_publisher(pub)
        xport = transport.Transport(xport_cfg)

        # Each version number must be unique since multiple packages
        # will be published within the same second.
        for i in range(100):
            pf = fmri.PkgFmri("foo@{0:d}.0".format(i))
            t = trans.Transaction(location,
                                  pkg_name=str(pf),
                                  xport=xport,
                                  pub=pub)
            t.open()
            pkg_fmri, pkg_state = t.close()
            self.debug("{0}: {1}".format(pkg_fmri, pkg_state))
コード例 #3
0
    def __update_repo_config(self):
        """Helper function to generate test repository configuration."""
        # Find and load the repository configuration.
        rpath = self.dc.get_repodir()
        assert os.path.isdir(rpath)
        rcpath = os.path.join(rpath, "cfg_cache")

        rc = sr.RepositoryConfig(target=rcpath)

        # Update the configuration with our sample data.
        cfgdata = self.repo_cfg
        for section in cfgdata:
            for prop in cfgdata[section]:
                rc.set_property(section, prop, cfgdata[section][prop])

        # Save it.
        rc.write()

        # Apply publisher properties and update.
        repo = self.dc.get_repo()
        try:
            pub = repo.get_publisher("org.opensolaris.pending")
        except sr.RepositoryUnknownPublisher:
            pub = publisher.Publisher("org.opensolaris.pending")
            repo.add_publisher(pub)

        pub_repo = pub.repository
        if not pub_repo:
            pub_repo = publisher.Repository()
            pub.repository = pub_repo

        for attr, val in self.pub_repo_cfg.iteritems():
            setattr(pub_repo, attr, val)
        repo.update_publisher(pub)
コード例 #4
0
    def __get_bobcat_pub(self, omit_repo=False):
        # First build a publisher object matching our expected data.
        repo = None
        if not omit_repo:
            repo = publisher.Repository(
                description="xkcd.net/325",
                legal_uris=["http://xkcd.com/license.html"],
                name="source",
                origins=["http://localhost:12001/"],
                refresh_seconds=43200)
        pub = publisher.Publisher("bobcat", alias="cat", repository=repo)

        return pub
コード例 #5
0
def parse(data=None, fileobj=None, location=None):
    """Reads the pkg(5) publisher JSON formatted data at 'location'
        or from the provided file-like object 'fileobj' and returns a
        list of tuples of the format (publisher object, pkg_names).
        pkg_names is a list of strings representing package names or
        FMRIs.  If any pkg_names not specific to a publisher were
        provided, the last tuple returned will be of the format (None,
        pkg_names).

        'data' is an optional string containing the p5i data.

        'fileobj' is an optional file-like object that must support a
        'read' method for retrieving data.

        'location' is an optional string value that should either start
        with a leading slash and be pathname of a file or a URI string.
        If it is a URI string, supported protocol schemes are 'file',
        'ftp', 'http', and 'https'.

        'data' or 'fileobj' or 'location' must be provided."""

    if data is None and location is None and fileobj is None:
        raise api_errors.InvalidResourceLocation(location)

    if location is not None:
        if location.find("://") == -1 and \
            not location.startswith("file:/"):
            # Convert the file path to a URI.
            location = os.path.abspath(location)
            location = urlparse.urlunparse(
                ("file", "", urllib.pathname2url(location), "", "", ""))

        try:
            fileobj = urllib2.urlopen(location)
        except (EnvironmentError, ValueError, urllib2.HTTPError) as e:
            raise api_errors.RetrievalError(e, location=location)

    try:
        if data is not None:
            dump_struct = json.loads(data)
        else:
            dump_struct = json.load(fileobj)
    except (EnvironmentError, urllib2.HTTPError) as e:
        raise api_errors.RetrievalError(e)
    except ValueError as e:
        # Not a valid JSON file.
        raise api_errors.InvalidP5IFile(e)

    try:
        ver = int(dump_struct["version"])
    except KeyError:
        raise api_errors.InvalidP5IFile(_("missing version"))
    except ValueError:
        raise api_errors.InvalidP5IFile(_("invalid version"))

    if ver > CURRENT_VERSION:
        raise api_errors.UnsupportedP5IFile()

    result = []
    try:
        plist = dump_struct.get("publishers", [])

        for p in plist:
            alias = p.get("alias", None)
            prefix = p.get("name", None)

            if not prefix:
                prefix = "Unknown"

            pub = publisher.Publisher(prefix, alias=alias)
            pkglist = p.get("packages", [])
            result.append((pub, pkglist))

            for r in p.get("repositories", []):
                rargs = {}
                for prop in ("collection_type", "description", "name",
                             "refresh_seconds", "registration_uri"):
                    val = r.get(prop, None)
                    if val is None or val == "None":
                        continue
                    rargs[prop] = val

                for prop in ("legal_uris", "mirrors", "origins",
                             "related_uris"):
                    val = r.get(prop, [])
                    if not isinstance(val, list):
                        continue
                    rargs[prop] = val

                repo = publisher.Repository(**rargs)
                pub.repository = repo

        pkglist = dump_struct.get("packages", [])
        if pkglist:
            result.append((None, pkglist))
    except (api_errors.PublisherError, TypeError, ValueError) as e:
        raise api_errors.InvalidP5IFile(str(e))
    return result
コード例 #6
0
ファイル: t_publisher.py プロジェクト: jimklimov/pkg5
        def test_03_publisher(self):
                """Verify that a Repository object can be created, copied,
                modified, and used as expected."""

                robj = publisher.Repository(
                    collection_type=publisher.REPO_CTYPE_SUPPLEMENTAL,
                    description="Provides only the best BobCat packages!",
                    legal_uris=[
                        "http://legal1.example.com",
                        "http://legal2.example.com"
                    ],
                    mirrors=[
                        "http://mirror1.example.com/",
                        "http://mirror2.example.com/"
                    ],
                    name="First Repository",
                    origins=[
                        "http://origin1.example.com/",
                        "http://origin2.example.com/"
                    ],
                    refresh_seconds=70000,
                    registered=True,
                    registration_uri="http://register.example.com/",
                    related_uris=[
                        "http://related1.example.com",
                        "http://related2.example.com"
                    ],
                    sort_policy=publisher.URI_SORT_PRIORITY,
                )

                r2obj = copy.copy(robj)
                r2obj.origins = ["http://origin3.example.com"]
                r2obj.name = "Second Repository"
                r2obj.reset_mirrors()

                pprops = {
                    "alias": "cat",
                    "client_uuid": "2c6a8ff8-20e5-11de-a818-001fd0979039",
                    "disabled": True,
                    "meta_root": os.path.join(self.test_root, "bobcat"),
                    "repository": r2obj,
                }

                # Check that all properties can be set at construction time.
                pobj = publisher.Publisher("bobcat", **pprops)

                # Verify that all properties provided at construction time were
                # set as expected.
                for p in pprops:
                        self.assertEqual(pprops[p], getattr(pobj, p))

                # Verify that a copy matches its original.
                cpobj = copy.copy(pobj)
                for p in pprops:
                        if p == "repository":
                                # These attributes can't be directly compared.
                                continue
                        self.assertEqual(getattr(pobj, p), getattr(cpobj, p))

                # Assume that if the origins match, we have the right selected
                # repository.
                self.assertEqual(cpobj.repository.origins,
                    r2obj.origins)

                # Compare the source_object_id of the copied repository object
                # with the id of the source repository object.
                self.assertEqual(id(pobj), cpobj._source_object_id)

                cpobj = None

                # Verify that individual properties can be set.
                pobj = publisher.Publisher("tomcat")
                pobj.prefix = "bobcat"
                self.assertEqual(pobj.prefix, "bobcat")

                for p in pprops:
                        if p == "repositories":
                                for r in pprops[p]:
                                        pobj.add_repository(r)
                        else:
                                setattr(pobj, p, pprops[p])
                        self.assertEqual(getattr(pobj, p), pprops[p])

                pobj.repository = robj
                self.assertEqual(pobj.repository, robj)

                # An invalid value shouldn't be allowed.
                self.assertRaises(api_errors.UnknownRepository, setattr,
                    pobj, "repository", -1)

                pobj.reset_client_uuid()
                self.assertNotEqual(pobj.client_uuid, None)
                self.assertNotEqual(pobj.client_uuid, pprops["client_uuid"])

                pobj.create_meta_root()
                self.assertTrue(os.path.exists(pobj.meta_root))

                pobj.remove_meta_root()
                self.assertFalse(os.path.exists(pobj.meta_root))
コード例 #7
0
ファイル: t_publisher.py プロジェクト: jimklimov/pkg5
        def test_02_repository(self):
                """Verify that a Repository object can be created, copied,
                modified, and used as expected."""

                tcert = os.path.join(self.test_root, "test.cert")
                tkey = os.path.join(self.test_root, "test.key")

                t2cert = os.path.join(self.test_root, "test2.cert")
                t2key = os.path.join(self.test_root, "test2.key")

                rprops = {
                    "collection_type": publisher.REPO_CTYPE_SUPPLEMENTAL,
                    "description": "Provides only the best BobCat packages!",
                    "legal_uris": [
                        "http://legal1.example.com",
                        "http://legal2.example.com"
                    ],
                    "mirrors": [
                        "http://mirror1.example.com/",
                        "http://mirror2.example.com/"
                    ],
                    "name": "BobCat Repository",
                    "origins": [
                        "http://origin1.example.com/",
                        "http://origin2.example.com/"
                    ],
                    "refresh_seconds": 70000,
                    "registered": True,
                    "registration_uri": "http://register.example.com/",
                    "related_uris": [
                        "http://related1.example.com",
                        "http://related2.example.com"
                    ],
                    "sort_policy": publisher.URI_SORT_PRIORITY,
                }

                # Check that all properties can be set at construction time.
                robj = publisher.Repository(**rprops)

                # Verify that all properties provided at construction time were
                # set as expected.
                for p in rprops:
                        self.assertEqual(rprops[p], getattr(robj, p))

                # Verify that a copy matches its original.
                crobj = copy.copy(robj)
                for p in rprops:
                        self.assertEqual(getattr(robj, p), getattr(crobj, p))
                crobj = None

                # New set of rprops for testing (all the URI use https so that
                # setting ssl_key and ssl_cert can be tested).
                rprops = {
                    "collection_type": publisher.REPO_CTYPE_SUPPLEMENTAL,
                    "description": "Provides only the best BobCat packages!",
                    "legal_uris": [
                        "https://legal1.example.com",
                        "https://legal2.example.com"
                    ],
                    "mirrors": [
                        "https://mirror1.example.com/",
                        "https://mirror2.example.com/"
                    ],
                    "name": "BobCat Repository",
                    "origins": [
                        "https://origin1.example.com/",
                        "https://origin2.example.com/"
                    ],
                    "refresh_seconds": 70000,
                    "registered": True,
                    "registration_uri": "https://register.example.com/",
                    "related_uris": [
                        "https://related1.example.com",
                        "https://related2.example.com"
                    ],
                    "sort_policy": publisher.URI_SORT_PRIORITY,
                }

                # Verify that individual properties can be set.
                robj = publisher.Repository()
                for p in rprops:
                        setattr(robj, p, rprops[p])
                        self.assertEqual(getattr(robj, p), rprops[p])

                # Verify that setting invalid property values raises the
                # expected exception.
                self.assertRaises(api_errors.BadRepositoryCollectionType,
                    setattr, robj, "collection_type", -1)
                self.assertRaises(api_errors.BadRepositoryAttributeValue,
                    setattr, robj, "refresh_seconds", -1)
                self.assertRaises(api_errors.BadRepositoryURISortPolicy,
                    setattr, robj, "sort_policy", -1)

                # Verify that add functions work as expected.
                robj = publisher.Repository()
                for utype in ("legal_uri", "mirror", "origin", "related_uri"):
                        prop = utype + "s"
                        for u in rprops[prop]:
                                method = getattr(robj, "add_{0}".format(utype))
                                method(u, priority=1, ssl_cert=tcert,
                                    ssl_key=tkey)

                # Verify that has and get functions work as expected.
                for utype in ("mirror", "origin"):
                        prop = utype + "s"
                        for u in rprops[prop]:
                                method = getattr(robj, "has_{0}".format(utype))
                                self.assertTrue(method(u))

                                method = getattr(robj, "get_{0}".format(utype))
                                cu = publisher.RepositoryURI(u, priority=1,
                                    ssl_cert=tcert, ssl_key=tkey,
                                    trailing_slash=True)
                                ou = method(u)

                                # This verifies that the expected URI object is
                                # returned and that all of the properties match
                                # exactly as they were added.
                                for uprop in ("uri", "priority", "ssl_cert",
                                    "ssl_key", "trailing_slash"):
                                        self.assertEqual(getattr(cu, uprop),
                                            getattr(ou, uprop))

                # Verify that remove functions work as expected.
                for utype in ("legal_uri", "mirror", "origin", "related_uri"):
                        prop = utype + "s"

                        # Remove only the first URI for each property.
                        u = rprops[prop][0]
                        method = getattr(robj, "remove_{0}".format(utype))
                        method(u)
                        self.assertTrue(u not in getattr(robj, prop))
                        self.assertEqual(len(getattr(robj, prop)), 1)

                # Verify that update functions work as expected.
                for utype in ("mirror", "origin"):
                        prop = utype + "s"

                        # Update only the last entry for each property.
                        u = rprops[prop][-1]

                        method = getattr(robj, "update_{0}".format(utype))
                        method(u, priority=2, ssl_cert=t2cert, ssl_key=t2key)

                        method = getattr(robj, "get_{0}".format(utype))
                        ou = method(u)

                        # This verifies that the expected URI object is
                        # returned and that all of the properties match
                        # exactly as specified to the update method.
                        cu = publisher.RepositoryURI(u, priority=2,
                            ssl_cert=t2cert, ssl_key=t2key)
                        for uprop in ("uri", "priority", "ssl_cert",
                            "ssl_key", "trailing_slash"):
                                self.assertEqual(getattr(cu, uprop),
                                    getattr(ou, uprop))

                # Verify that reset functions work as expected.
                for prop in ("mirrors", "origins"):
                        method = getattr(robj, "reset_{0}".format(prop))
                        method()
                        self.assertEqual(getattr(robj, prop), [])
コード例 #8
0
ファイル: p5i.py プロジェクト: vincent2628/pluribus_userland
                rargs = {}
                for prop in ("collection_type", "description", "name",
                             "refresh_seconds", "registration_uri"):
                    val = r.get(prop, None)
                    if val is None or val == "None":
                        continue
                    rargs[prop] = val

                for prop in ("legal_uris", "mirrors", "origins",
                             "related_uris"):
                    val = r.get(prop, [])
                    if not isinstance(val, list):
                        continue
                    rargs[prop] = val

                repo = publisher.Repository(**rargs)
                pub.add_repository(repo)

        pkglist = dump_struct.get("packages", [])
        if pkglist:
            result.append((None, pkglist))
    except (api_errors.PublisherError, TypeError, ValueError), e:
        raise api_errors.InvalidP5IFile(str(e))
    return result


def write(fileobj, pubs, pkg_names=None):
    """Writes the publisher, repository, and provided package names to the
        provided file-like object 'fileobj' in JSON p5i format.

        'fileobj' is an object that has a 'write' method that accepts data to be
コード例 #9
0
ファイル: p5s.py プロジェクト: thenovum/pkg5
def parse(proxy_host, data):
    """Reads the pkg(5) publisher JSON formatted data at 'location'
        or from the provided file-like object 'fileobj' and returns a
        tuple.  The first element of the tuple is a list of publisher objects.
        The second element is a dictionary of image properties.

        'proxy_host' is the string to replace the special string
        'http://<sysrepo>' with when it starts any uri.

        'data' is a string containing the p5s data.
        """
    def transform_urls(urls):
        res = []
        for val in urls:
            # If the URI contains <sysrepo> then it's served
            # directly by the system-repository.
            if val.startswith("http://{0}".format(publisher.SYSREPO_PROXY)):
                scheme, netloc, path, params, query, fragment =\
                    urlparse(val)
                r = publisher.RepositoryURI(
                    urlunparse(
                        (scheme, proxy_host, path, params, query, fragment)))
            else:
                # This URI needs to be proxied through the
                # system-repository, so we assign it a special
                # ProxyURI, which gets replaced by the actual
                # URI of the system-repository in
                # imageconfig.BlendedConfig.__merge_publishers
                r = publisher.RepositoryURI(val)
                r.proxies = [publisher.ProxyURI(None, system=True)]
            res.append(r)
        return res

    try:
        dump_struct = json.loads(data)
    except ValueError as e:
        # Not a valid JSON file.
        raise api_errors.InvalidP5SFile(e)

    try:
        ver = int(dump_struct["version"])
    except KeyError:
        raise api_errors.InvalidP5SFile(_("missing version"))
    except ValueError:
        raise api_errors.InvalidP5SFile(_("invalid version"))

    if ver > CURRENT_VERSION:
        raise api_errors.UnsupportedP5SFile()

    pubs = []
    props = {}
    try:
        plist = dump_struct.get("publishers", [])

        # For each set of publisher information in the parsed p5s file,
        # build a Publisher object.
        for p in plist:
            alias = p.get("alias", None)
            prefix = p.get("name", None)
            sticky = p.get("sticky", True)

            if not prefix:
                prefix = "Unknown"

            pub = publisher.Publisher(prefix, alias=alias, sticky=sticky)
            v = p.get("signature-policy")
            if v is not None:
                pub.properties["signature-policy"] = v
            v = p.get("signature-required-names")
            if v is not None:
                pub.properties["signature-required-names"] = v

            r = p.get("repository", None)
            if r:
                rargs = {}
                for prop in ("collection_type", "description", "name",
                             "refresh_seconds", "sticky"):
                    val = r.get(prop, None)
                    if val is None or val == "None":
                        continue
                    rargs[prop] = val

                for prop in ("legal_uris", "related_uris"):
                    val = r.get(prop, [])
                    if not isinstance(val, list):
                        continue
                    rargs[prop] = val

                for prop in ("mirrors", "origins"):
                    urls = r.get(prop, [])
                    if not isinstance(urls, list):
                        continue
                    rargs[prop] = transform_urls(urls)
                repo = publisher.Repository(**rargs)
                pub.repository = repo
            pubs.append(pub)

        props["publisher-search-order"] = \
            dump_struct["image_properties"]["publisher-search-order"]

        sig_pol = dump_struct["image_properties"].get("signature-policy")
        if sig_pol is not None:
            props["signature-policy"] = sig_pol

        req_names = dump_struct["image_properties"].get(
            "signature-required-names")
        if req_names is not None:
            props["signature-required-names"] = req_names
    except (api_errors.PublisherError, TypeError, ValueError) as e:
        raise api_errors.InvalidP5SFile(str(e))
    return pubs, props
コード例 #10
0
    def test_writable_root(self):
        """Tests whether the index and feed cache file are written to
                the writable root parameter."""

        self.make_misc_files(TestPkgDepot.misc_files)
        writable_root = os.path.join(self.test_root, "writ_root")
        o_index_dir = os.path.join(self._get_repo_index_dir(), "index")

        timeout = 10

        def check_state(check_feed):
            index_dir = os.path.join(self._get_repo_writ_dir(), "index")
            feed = os.path.join(writable_root, "publisher", "test", "feed.xml")
            found = not os.path.exists(o_index_dir) and \
                os.path.isdir(index_dir) and \
                (not check_feed or os.path.isfile(feed))
            start_time = time.time()
            while not found and time.time() - start_time < timeout:
                time.sleep(1)
                found = not os.path.exists(o_index_dir) and \
                    os.path.isdir(index_dir) and \
                    (not check_feed or os.path.isfile(feed))

            self.assert_(not os.path.exists(o_index_dir))
            self.assert_(os.path.isdir(index_dir))
            if check_feed:
                try:
                    self.assert_(os.path.isfile(feed))
                except:
                    raise RuntimeError("Feed cache file "
                                       "not found at '{0}'.".format(feed))

        def get_feed(durl, pub=""):
            start_time = time.time()
            got = False
            while not got and (time.time() - start_time) < timeout:
                if pub:
                    pub = "{0}/".format(pub)
                try:
                    urllib2.urlopen("{0}{1}/feed".format(durl, pub))
                    got = True
                except urllib2.HTTPError as e:
                    self.debug(str(e))
                    time.sleep(1)
            self.assert_(got)

        self.__dc.set_port(self.next_free_port)
        durl = self.__dc.get_depot_url()

        repo = self.__dc.get_repo()
        pub = repo.get_publisher("test")
        pub_repo = pub.repository
        if not pub_repo:
            pub_repo = publisher.Repository()
            pub.repository = pub_repo
        pub_repo.origins = [durl]
        repo.update_publisher(pub)

        self.__dc.set_writable_root(writable_root)
        self.__dc.set_property("publisher", "prefix", "test")
        self.__dc.start()
        check_state(False)
        self.pkgsend_bulk(durl, TestPkgDepot.quux10, refresh_index=True)
        get_feed(durl)
        check_state(True)

        self.image_create(durl)
        self.pkg("search -r cat")
        self.__dc.stop()
        self.__dc.set_readonly()
        shutil.rmtree(writable_root)
        self.__dc.start()
        get_feed(durl)
        check_state(True)
        self.pkg("search -r cat")
        self.__dc.stop()
        self.__dc.set_refresh_index()
        shutil.rmtree(writable_root)
        self.__dc.start()
        check_state(False)
        self.__dc.stop()
        self.__dc.set_norefresh_index()
        self.__dc.start()
        get_feed(durl)
        check_state(True)
        self.pkg("search -r cat")
コード例 #11
0
        def read_publisher(self, meta_root, cp, s):
                # publisher block has alias, prefix, origin, and mirrors
                changed = False
                try:
                        alias = cp.get(s, "alias")
                        if alias == "None":
                                alias = None
                except ConfigParser.NoOptionError:
                        alias = None

                prefix = cp.get(s, "prefix")

                if prefix.startswith(fmri.PREF_PUB_PFX):
                        raise RuntimeError(
                            "Invalid Publisher name: %s" % prefix)

                try:
                        sticky = cp.getboolean(s, "sticky")
                except (ConfigParser.NoOptionError, ValueError):
                        sticky = True

                try:
                        d = cp.get(s, "disabled")
                except ConfigParser.NoOptionError:
                        d = 'False'
                disabled = d.lower() in ("true", "yes")

                origin = cp.get(s, "origin")
                try:
                        sysrepo_uristr = cp.get(s, "sysrepo.uri")
                except ConfigParser.NoOptionError:
                        sysrepo_uristr = "None"
                try:
                        sysrepo_sock_path = cp.get(s, "sysrepo.sock_path")
                except ConfigParser.NoOptionError:
                        sysrepo_sock_path = "None"

                try:
                        org_str = cp.get(s, "origins")
                except ConfigParser.NoOptionError:
                        org_str = "None"

                if org_str == "None":
                        origins = []
                else:
                        origins = self.read_list(org_str)

                # Ensure that the list of origins is unique and complete.
                origins = set(origins)
                if origin != "None":
                        origins.add(origin)

                if sysrepo_uristr in origins:
                        origins.remove(sysrepo_uristr)

                mir_str = cp.get(s, "mirrors")
                if mir_str == "None":
                        mirrors = []
                else:
                        mirrors = self.read_list(mir_str)

                try:
                        ssl_key = cp.get(s, "ssl_key")
                        if ssl_key == "None":
                                ssl_key = None
                except ConfigParser.NoOptionError:
                        ssl_key = None

                try:
                        ssl_cert = cp.get(s, "ssl_cert")
                        if ssl_cert == "None":
                                ssl_cert = None
                except ConfigParser.NoOptionError:
                        ssl_cert = None

                try:
                        # XXX this should really be client_uuid, but is being
                        # left with this name for compatibility with older
                        # clients.
                        client_uuid = cp.get(s, "uuid")
                        if client_uuid == "None":
                                client_uuid = None
                except ConfigParser.NoOptionError:
                        client_uuid = None

                # Load selected repository data.
                # XXX this is temporary until a switch to a more expressive
                # configuration format is made.
                repo_data = {
                    "collection_type": None,
                    "description": None,
                    "legal_uris": None,
                    "name": None,
                    "refresh_seconds": None,
                    "registered": None,
                    "registration_uri": None,
                    "related_uris": None,
                    "sort_policy": None,
                }

                for key in repo_data:
                        try:
                                val = cp.get(s, "repo.%s" % key)
                                if key.endswith("_uris"):
                                        val = self.read_list(val)
                                        if val == "None":
                                                val = []
                                else:
                                        if val == "None":
                                                val = None
                                repo_data[key] = val
                        except ConfigParser.NoOptionError:
                                if key.endswith("_uris"):
                                        repo_data[key] = []
                                else:
                                        repo_data[key] = None

                # Normalize/sanitize repository data.
                val = repo_data["registered"]
                if val is not None and val.lower() in ("true", "yes", "1"):
                        repo_data["registered"] = True
                else:
                        repo_data["registered"] = False

                for attr in ("collection_type", "sort_policy"):
                        if not repo_data[attr]:
                                # Assume default value for attr.
                                del repo_data[attr]

                if repo_data["refresh_seconds"] is None:
                        repo_data["refresh_seconds"] = \
                            REPO_REFRESH_SECONDS_DEFAULT

                # Guard against invalid configuration for ssl information. If
                # this isn't done, the user won't be able to load the client
                # to fix the problem.
                for origin in origins:
                        if not origin.startswith("https"):
                                ssl_key = None
                                ssl_cert = None
                                break

                #
                # For zones, where the reachability of an absolute path
                # changes depending on whether you're in the zone or not.  So
                # we have a different policy: ssl_key and ssl_cert are treated
                # as zone root relative.
                #
                ngz = self.variants.get("variant.opensolaris.zone",
                    "global") == "nonglobal"

                if ssl_key:
                        if ngz:
                                ssl_key = os.path.normpath(self.__imgroot +
                                    os.sep + ssl_key)
                        else:
                                ssl_key = os.path.abspath(ssl_key)
                        if not os.path.exists(ssl_key):
                                logger.error(api_errors.NoSuchKey(ssl_key,
                                    uri=list(origins)[0], publisher=prefix))
                                ssl_key = None

                if ssl_cert:
                        if ngz:
                                ssl_cert = os.path.normpath(self.__imgroot +
                                    os.sep + ssl_cert)
                        else:
                                ssl_cert = os.path.abspath(ssl_cert)
                        if not os.path.exists(ssl_cert):
                                logger.error(api_errors.NoSuchCertificate(
                                    ssl_cert, uri=list(origins)[0],
                                    publisher=prefix))
                                ssl_cert = None

                r = publisher.Repository(**repo_data)
                if sysrepo_uristr != "None" and sysrepo_sock_path != "None":
                        r.set_system_repo(sysrepo_uristr,
                            socket_path=sysrepo_sock_path)
                for o in origins:
                        r.add_origin(o, ssl_cert=ssl_cert, ssl_key=ssl_key)
                for m in mirrors:
                        r.add_mirror(m, ssl_cert=ssl_cert, ssl_key=ssl_key)

                # Root directory for this publisher's metadata.
                pmroot = os.path.join(meta_root, prefix)

                pub = publisher.Publisher(prefix, alias=alias,
                    client_uuid=client_uuid, disabled=disabled,
                    meta_root=pmroot, repositories=[r], sticky=sticky)

                # write out the UUID if it was set
                if pub.client_uuid != client_uuid:
                        changed = True

                return prefix, pub, changed