def test_stress_file_publish(self): """Publish lots of packages rapidly ensuring that file publication can handle it.""" location = self.dc.get_repodir() location = os.path.abspath(location) location = urlunparse(("file", "", pathname2url(location), "", "", "")) repouriobj = publisher.RepositoryURI(location) repo = publisher.Repository(origins=[repouriobj]) pub = publisher.Publisher(prefix="repo1", repository=repo) xport_cfg = transport.GenericTransportCfg() xport_cfg.add_publisher(pub) xport = transport.Transport(xport_cfg) # Each version number must be unique since multiple packages # will be published within the same second. for i in range(100): pf = fmri.PkgFmri("foo@{0:d}.0".format(i)) t = trans.Transaction(location, pkg_name=str(pf), xport=xport, pub=pub) t.open() pkg_fmri, pkg_state = t.close() self.debug("{0}: {1}".format(pkg_fmri, pkg_state))
def resolve_cb(sd_hdl, flags, interface_idx, error_code, full_name, host_target, port, txt_record): if error_code != pybonjour.kDNSServiceErr_NoError: return tr = pybonjour.TXTRecord.parse(txt_record) if "url" in tr: url = tr["url"] if not misc.valid_pub_url(url): return self._mirrors.append(pub.RepositoryURI(url))
def transform_urls(urls): res = [] for val in urls: # If the URI contains <sysrepo> then it's served # directly by the system-repository. if val.startswith("http://{0}".format(publisher.SYSREPO_PROXY)): scheme, netloc, path, params, query, fragment =\ urlparse(val) r = publisher.RepositoryURI( urlunparse( (scheme, proxy_host, path, params, query, fragment))) else: # This URI needs to be proxied through the # system-repository, so we assign it a special # ProxyURI, which gets replaced by the actual # URI of the system-repository in # imageconfig.BlendedConfig.__merge_publishers r = publisher.RepositoryURI(val) r.proxies = [publisher.ProxyURI(None, system=True)] res.append(r) return res
def main_func(): global dry_run, tmpdir, xport, dest_xport, target_pub dest_repo = None source_list = [] variant_list = [] pub_list = [] use_pub_list = False try: opts, pargs = getopt.getopt(sys.argv[1:], "d:np:s:?", ["help"]) for opt, arg in opts: if opt == "-d": dest_repo = misc.parse_uri(arg) elif opt == "-n": dry_run = True elif opt == "-s": s = arg.split(",") if len(s) < 2: usage("-s option must specify " "variant=value,repo_uri") # All but last part should be variant. src_vars = {} for v in s[:-1]: try: vname, vval = v.split("=") except ValueError: usage("-s option must specify " "variant=value,repo_uri") if not vname.startswith("variant."): vname = "variant.{0}".format(vname) src_vars[vname] = vval variant_list.append(src_vars) source_list.append(publisher.RepositoryURI( misc.parse_uri(s[-1]))) elif opt == "-p": use_pub_list = True pub_list.append(arg) if opt in ("--help", "-?"): usage(exitcode=0) except getopt.GetoptError as e: usage(_("illegal option -- {0}").format(e.opt)) if not source_list: usage(_("At least one variant name, value, and package source " "must be provided using -s.")) if not dest_repo: usage(_("A destination package repository must be provided " "using -d.")) # Determine the unique set of variants across all sources. variants = set() vcombos = collections.defaultdict(set) for src_vars in variant_list: for v, vval in src_vars.iteritems(): variants.add(v) vcombos[v].add((v, vval)) # merge_fmris() expects this to be a list. variants = list(variants) # Require that the user specified the same variants for all sources. for i, src_vars in enumerate(variant_list): missing = set(v for v in variants if v not in variant_list[i]) if missing: missing = ", ".join(missing) source = source_list[i] usage(_("Source {source} missing values for " "variants: {missing}").format(**locals())) # Require that each unique variant combination has a source. for combo in itertools.product(*vcombos.values()): found = False for i, src in enumerate(source_list): for vname, vval in combo: if variant_list[i].get(vname, None) != vval: found = False break else: found = True break if not found: combo = " ".join( "{0}={1}".format(vname, vval) for vname, vval in combo ) usage(_("No source was specified for variant " "combination {combo}.").format(**locals())) # initialize transport # we use a single endpoint for now, since the transport code # uses publisher as a unique key... so we just flop the repo # list as needed to access the different catalogs/manifests/files. temp_root = misc.config_temp_root() tmpdir = tempfile.mkdtemp(dir=temp_root, prefix="pkgmerge") xport, xport_cfg = transport.setup_transport() xport_cfg.incoming_root = tmpdir # we don't use the publisher returned by setup_publisher, as that only # returns one of the publishers in source_list. Instead we use # xport_cfg to access all publishers. transport.setup_publisher(source_list, "pkgmerge", xport, xport_cfg, remote_prefix=True) cat_dir = tempfile.mkdtemp(dir=tmpdir) # we must have at least one matching publisher if -p was used. known_pubs = set([pub.prefix for pub in xport_cfg.gen_publishers()]) if pub_list and len(set(pub_list).intersection(known_pubs)) == 0: error(_("no publishers from source repositories match " "the given -p options.")) errors = set() tracker = get_tracker() # iterate over all publishers in our source repositories. If errors # are encountered for a given publisher, we accumulate those, and # skip to the next publisher. for pub in xport_cfg.gen_publishers(): if use_pub_list: if pub.prefix not in pub_list: continue else: # remove publishers from pub_list as we go, so # that when we're finished, any remaining # publishers in pub_list suggest superfluous # -p options, which will cause us to exit with # an error. pub_list.remove(pub.prefix) pub.meta_root = cat_dir pub.transport = xport # Use separate transport for destination repository in case # source and destination have identical publisher configuration. dest_xport, dest_xport_cfg = transport.setup_transport() dest_xport_cfg.incoming_root = tmpdir # retrieve catalogs for all specified repositories for s in source_list: load_catalog(s, pub) # determine the list of packages we'll be processing if not pargs: # use the latest versions and merge everything fmri_arguments = list(set( name for s in source_list for name in get_all_pkg_names(s) )) exclude_args = [] else: fmri_arguments = [ f for f in pargs if not f.startswith("!") ] exclude_args = [ f[1:] for f in pargs if f.startswith("!") ] # build fmris to be merged masterlist = [ build_merge_list(fmri_arguments, exclude_args, catalog_dict[s.uri]) for s in source_list ] # check for unmatched patterns in_none = reduce(lambda x, y: x & y, (set(u) for d, u in masterlist)) if in_none: errors.add( _("The following pattern(s) did not match any " "packages in any of the specified repositories for " "publisher {pub_name}:" "\n{patterns}").format(patterns="\n".join(in_none), pub_name=pub.prefi)) continue # generate set of all package names to be processed, and dict # of lists indexed by order in source_list; if that repo has no # fmri for this pkg then use None. allpkgs = set(name for d, u in masterlist for name in d) processdict = {} for p in allpkgs: for d, u in masterlist: processdict.setdefault(p, []).append( d.setdefault(p, None)) # check to make sure all fmris are at same version modulo # timestamp for entry in processdict: if len(set([ str(a).rsplit(":")[0] for a in processdict[entry] if a is not None ])) > 1: errors.add( _("fmris matching the following patterns do" " not have matching versions across all " "repositories for publisher {pubs}: " "{patterns}").format(pub=pub.prefix, patterns=processdict[entry])) continue # we're ready to merge if not dry_run: target_pub = transport.setup_publisher(dest_repo, pub.prefix, dest_xport, dest_xport_cfg, remote_prefix=True) else: target_pub = None tracker.republish_set_goal(len(processdict), 0, 0) # republish packages for this publisher. If we encounter any # publication errors, we move on to the next publisher. try: pkg_tmpdir = tempfile.mkdtemp(dir=tmpdir) republish_packages(pub, target_pub, processdict, source_list, variant_list, variants, tracker, xport, dest_repo, dest_xport, pkg_tmpdir, dry_run=dry_run) except (trans.TransactionError, PkgmergeException) as e: errors.add(str(e)) tracker.reset() continue finally: # if we're handling an exception, this still gets called # in spite of the 'continue' that the handler ends with. if os.path.exists(pkg_tmpdir): shutil.rmtree(pkg_tmpdir) tracker.republish_done(dryrun=dry_run) tracker.reset() # If -p options were supplied, we should have processed all of them # by now. Remaining entries suggest -p options that were not merged. if use_pub_list and pub_list: errors.add(_("the following publishers were not found in " "source repositories: {0}").format(" ".join(pub_list))) # If we have encountered errors for some publishers, print them now # and exit. tracker.flush() for message in errors: error(message, exitcode=None) if errors: exit(EXIT_OOPS) return EXIT_OK
def main_func(): global temp_root, repo_modified, repo_finished, repo_uri, tracker global dry_run global_settings.client_name = PKG_CLIENT_NAME try: opts, pargs = getopt.getopt(sys.argv[1:], "?c:i:np:r:s:u", ["help"]) except getopt.GetoptError as e: usage(_("illegal option -- {0}").format(e.opt)) dry_run = False ref_repo_uri = None repo_uri = os.getenv("PKG_REPO", None) changes = set() ignores = set() publishers = set() cmp_policy = CMP_ALL processed_pubs = 0 for opt, arg in opts: if opt == "-c": changes.add(arg) elif opt == "-i": ignores.add(arg) elif opt == "-n": dry_run = True elif opt == "-p": publishers.add(arg) elif opt == "-r": ref_repo_uri = misc.parse_uri(arg) elif opt == "-s": repo_uri = misc.parse_uri(arg) elif opt == "-u": cmp_policy = CMP_UNSIGNED elif opt == "-?" or opt == "--help": usage(retcode=pkgdefs.EXIT_OK) if pargs: usage(_("Unexpected argument(s): {0}").format(" ".join(pargs))) if not repo_uri: usage(_("A target repository must be provided.")) if not ref_repo_uri: usage(_("A reference repository must be provided.")) target = publisher.RepositoryURI(misc.parse_uri(repo_uri)) if target.scheme != "file": abort(err=_("Target repository must be filesystem-based.")) try: target_repo = sr.Repository(read_only=dry_run, root=target.get_pathname()) except sr.RepositoryError as e: abort(str(e)) # Use the tmp directory in target repo for efficient file rename since # files are in the same file system. temp_root = target_repo.temp_root if not os.path.exists(temp_root): os.makedirs(temp_root) ref_incoming_dir = tempfile.mkdtemp(dir=temp_root) ref_pkg_root = tempfile.mkdtemp(dir=temp_root) ref_xport, ref_xport_cfg = transport.setup_transport() ref_xport_cfg.incoming_root = ref_incoming_dir ref_xport_cfg.pkg_root = ref_pkg_root transport.setup_publisher(ref_repo_uri, "ref", ref_xport, ref_xport_cfg, remote_prefix=True) ref_repo = None ref = publisher.RepositoryURI(misc.parse_uri(ref_repo_uri)) if ref.scheme == "file": try: ref_repo = sr.Repository(read_only=dry_run, root=ref.get_pathname()) except sr.RepositoryError as e: abort(str(e)) tracker = get_tracker() for pub in target_repo.publishers: if publishers and pub not in publishers \ and '*' not in publishers: continue msg(_("Processing packages for publisher {0} ...").format(pub)) # Find the matching pub in the ref repo. for ref_pub in ref_xport_cfg.gen_publishers(): if ref_pub.prefix == pub: found = True break else: txt = _("Publisher {0} not found in reference " "repository.").format(pub) if publishers: abort(err=txt) else: txt += _(" Skipping.") msg(txt) continue processed_pubs += 1 rev = do_reversion(pub, ref_pub, target_repo, ref_xport, changes, ignores, cmp_policy, ref_repo, ref, ref_xport_cfg) # Only rebuild catalog if anything got actually reversioned. if rev and not dry_run: msg(_("Rebuilding repository catalog.")) target_repo.rebuild(pub=pub) repo_finished = True ret = pkgdefs.EXIT_OK if processed_pubs == 0: msg(_("No matching publishers could be found.")) ret = pkgdefs.EXIT_OOPS cleanup() return ret
def test_01_repository_uri(self): """Verify that a RepositoryURI object can be created, copied, modified, and used as expected.""" nsfile = os.path.join(self.test_root, "nosuchfile") tcert = os.path.join(self.test_root, "test.cert") tkey = os.path.join(self.test_root, "test.key") uprops = { "priority": 1, "ssl_cert": tcert, "ssl_key": tkey, "trailing_slash": False, } # Check that all properties can be set at construction time. uobj = publisher.RepositoryURI("https://example.com", **uprops) # Verify that all properties provided at construction time were # set as expected. self.assertEqual(uobj.uri, "https://example.com") for p in uprops: self.assertEqual(uprops[p], getattr(uobj, p)) # Verify that scheme matches provided URI. self.assertEqual(uobj.scheme, "https") # Verify that a copy matches its original. cuobj = copy.copy(uobj) self.assertEqual(uobj.uri, cuobj.uri) for p in uprops: self.assertEqual(getattr(uobj, p), getattr(cuobj, p)) cuobj = None # Verify that setting invalid property values raises the # expected exception. self.assertRaises(api_errors.BadRepositoryURI, setattr, uobj, "uri", None) self.assertRaises(api_errors.UnsupportedRepositoryURI, setattr, uobj, "uri", ":/notvalid") # this value is valid only for ProxyURI objects, not # RepositoryURI objects self.assertRaises(api_errors.BadRepositoryURI, setattr, uobj, "uri", "http://*****:*****@server") self.assertRaises(api_errors.BadRepositoryURIPriority, setattr, uobj, "priority", "foo") self.assertRaises(api_errors.BadRepositoryAttributeValue, setattr, uobj, "ssl_cert", -1) self.assertRaises(api_errors.BadRepositoryAttributeValue, setattr, uobj, "ssl_key", -1) # Verify that changing the URI scheme will null properties that # no longer apply. uobj.uri = "http://example.com" self.assertEqual(uobj.ssl_cert, None) self.assertEqual(uobj.ssl_key, None) # Verify that scheme matches provided URI. self.assertEqual(uobj.scheme, "http") # Verify that attempting to set properties not valid for the # current URI scheme raises the expected exception. self.assertRaises(api_errors.UnsupportedRepositoryURIAttribute, setattr, uobj, "ssl_cert", tcert) self.assertRaises(api_errors.UnsupportedRepositoryURIAttribute, setattr, uobj, "ssl_key", tkey) # Verify that individual properties can be set. uobj = publisher.RepositoryURI("https://example.com/") for p in uprops: setattr(uobj, p, uprops[p]) self.assertEqual(getattr(uobj, p), uprops[p]) # Finally, verify all properties (except URI and trailing_slash) # can be set to None. for p in ("priority", "ssl_cert", "ssl_key"): setattr(uobj, p, None) self.assertEqual(getattr(uobj, p), None) # Verify that proxies are set properly uobj = publisher.RepositoryURI("https://example.com", proxies=[]) uobj = publisher.RepositoryURI("https://example.com", proxies=[publisher.ProxyURI("http://foo.com")]) self.assertTrue(uobj.proxies == [publisher.ProxyURI( "http://foo.com")]) uobj.proxies = [] self.assertTrue(uobj.proxies == []) # Verify that proxies and proxy are linked uobj.proxies = [publisher.ProxyURI("http://foo.com")] self.assertTrue(uobj.proxy == "http://foo.com") uobj.proxy = "http://bar" self.assertTrue(uobj.proxies == [publisher.ProxyURI("http://bar")]) try: raised = False publisher.RepositoryURI("http://foo", proxies=[ publisher.ProxyURI("http://bar")], proxy="http://foo") except api_errors.PublisherError: raised = True finally: self.assertTrue(raised, "No exception raised when " "creating a RepositoryURI obj with proxies & proxy") # Check that we detect bad values for proxies self.assertRaises(api_errors.BadRepositoryAttributeValue, setattr, uobj, "proxies", "foo") self.assertRaises(api_errors.BadRepositoryAttributeValue, setattr, uobj, "proxies", [None]) # we only support a single proxy per RepositoryURI self.assertRaises(api_errors.BadRepositoryAttributeValue, setattr, uobj, "proxies", [ publisher.ProxyURI("http://foo.com"), publisher.ProxyURI("http://bar.com")])
def test_02_repository(self): """Verify that a Repository object can be created, copied, modified, and used as expected.""" tcert = os.path.join(self.test_root, "test.cert") tkey = os.path.join(self.test_root, "test.key") t2cert = os.path.join(self.test_root, "test2.cert") t2key = os.path.join(self.test_root, "test2.key") rprops = { "collection_type": publisher.REPO_CTYPE_SUPPLEMENTAL, "description": "Provides only the best BobCat packages!", "legal_uris": [ "http://legal1.example.com", "http://legal2.example.com" ], "mirrors": [ "http://mirror1.example.com/", "http://mirror2.example.com/" ], "name": "BobCat Repository", "origins": [ "http://origin1.example.com/", "http://origin2.example.com/" ], "refresh_seconds": 70000, "registered": True, "registration_uri": "http://register.example.com/", "related_uris": [ "http://related1.example.com", "http://related2.example.com" ], "sort_policy": publisher.URI_SORT_PRIORITY, } # Check that all properties can be set at construction time. robj = publisher.Repository(**rprops) # Verify that all properties provided at construction time were # set as expected. for p in rprops: self.assertEqual(rprops[p], getattr(robj, p)) # Verify that a copy matches its original. crobj = copy.copy(robj) for p in rprops: self.assertEqual(getattr(robj, p), getattr(crobj, p)) crobj = None # New set of rprops for testing (all the URI use https so that # setting ssl_key and ssl_cert can be tested). rprops = { "collection_type": publisher.REPO_CTYPE_SUPPLEMENTAL, "description": "Provides only the best BobCat packages!", "legal_uris": [ "https://legal1.example.com", "https://legal2.example.com" ], "mirrors": [ "https://mirror1.example.com/", "https://mirror2.example.com/" ], "name": "BobCat Repository", "origins": [ "https://origin1.example.com/", "https://origin2.example.com/" ], "refresh_seconds": 70000, "registered": True, "registration_uri": "https://register.example.com/", "related_uris": [ "https://related1.example.com", "https://related2.example.com" ], "sort_policy": publisher.URI_SORT_PRIORITY, } # Verify that individual properties can be set. robj = publisher.Repository() for p in rprops: setattr(robj, p, rprops[p]) self.assertEqual(getattr(robj, p), rprops[p]) # Verify that setting invalid property values raises the # expected exception. self.assertRaises(api_errors.BadRepositoryCollectionType, setattr, robj, "collection_type", -1) self.assertRaises(api_errors.BadRepositoryAttributeValue, setattr, robj, "refresh_seconds", -1) self.assertRaises(api_errors.BadRepositoryURISortPolicy, setattr, robj, "sort_policy", -1) # Verify that add functions work as expected. robj = publisher.Repository() for utype in ("legal_uri", "mirror", "origin", "related_uri"): prop = utype + "s" for u in rprops[prop]: method = getattr(robj, "add_{0}".format(utype)) method(u, priority=1, ssl_cert=tcert, ssl_key=tkey) # Verify that has and get functions work as expected. for utype in ("mirror", "origin"): prop = utype + "s" for u in rprops[prop]: method = getattr(robj, "has_{0}".format(utype)) self.assertTrue(method(u)) method = getattr(robj, "get_{0}".format(utype)) cu = publisher.RepositoryURI(u, priority=1, ssl_cert=tcert, ssl_key=tkey, trailing_slash=True) ou = method(u) # This verifies that the expected URI object is # returned and that all of the properties match # exactly as they were added. for uprop in ("uri", "priority", "ssl_cert", "ssl_key", "trailing_slash"): self.assertEqual(getattr(cu, uprop), getattr(ou, uprop)) # Verify that remove functions work as expected. for utype in ("legal_uri", "mirror", "origin", "related_uri"): prop = utype + "s" # Remove only the first URI for each property. u = rprops[prop][0] method = getattr(robj, "remove_{0}".format(utype)) method(u) self.assertTrue(u not in getattr(robj, prop)) self.assertEqual(len(getattr(robj, prop)), 1) # Verify that update functions work as expected. for utype in ("mirror", "origin"): prop = utype + "s" # Update only the last entry for each property. u = rprops[prop][-1] method = getattr(robj, "update_{0}".format(utype)) method(u, priority=2, ssl_cert=t2cert, ssl_key=t2key) method = getattr(robj, "get_{0}".format(utype)) ou = method(u) # This verifies that the expected URI object is # returned and that all of the properties match # exactly as specified to the update method. cu = publisher.RepositoryURI(u, priority=2, ssl_cert=t2cert, ssl_key=t2key) for uprop in ("uri", "priority", "ssl_cert", "ssl_key", "trailing_slash"): self.assertEqual(getattr(cu, uprop), getattr(ou, uprop)) # Verify that reset functions work as expected. for prop in ("mirrors", "origins"): method = getattr(robj, "reset_{0}".format(prop)) method() self.assertEqual(getattr(robj, prop), [])
if opt == "-n": get_files = False if len(varlist) < 2: usage(_("at least two -v arguments needed to merge")) if not basedir: basedir = os.getcwd() incomingdir = os.path.normpath( os.path.join(basedir, "incoming-%d" % os.getpid())) os.makedirs(incomingdir) tmpdirs.append(incomingdir) server_list = [ publisher.RepositoryURI(v.split(",", 1)[1]) for v in varlist ] xport, xport_cfg = transport.setup_transport() xport_cfg.incoming_root = incomingdir pub = transport.setup_publisher(server_list, "merge", xport, xport_cfg, remote_prefix=True) if len(pargs) == 1: recursive = False overall_set = set() for s in server_list: for name in get_all_pkg_names(s):