def setup_transport_and_pubs(repo_uri, remote=True): if repo_uri.startswith("null:"): return None, None xport, xport_cfg = transport.setup_transport() targ_pub = transport.setup_publisher(repo_uri, "default", xport, xport_cfg, remote_prefix=remote) return xport, targ_pub
def setup_transport_and_pubs(repo_uri, remote=True, ssl_key=None, ssl_cert=None): if repo_uri.startswith("null:"): return None, None xport, xport_cfg = transport.setup_transport() targ_pub = transport.setup_publisher(repo_uri, "default", xport, xport_cfg, remote_prefix=remote, ssl_key=ssl_key, ssl_cert=ssl_cert) return xport, targ_pub
def main_func(): global dry_run, tmpdir, xport, dest_xport, target_pub dest_repo = None source_list = [] variant_list = [] pub_list = [] use_pub_list = False try: opts, pargs = getopt.getopt(sys.argv[1:], "d:np:s:?", ["help"]) for opt, arg in opts: if opt == "-d": dest_repo = misc.parse_uri(arg) elif opt == "-n": dry_run = True elif opt == "-s": s = arg.split(",") if len(s) < 2: usage("-s option must specify " "variant=value,repo_uri") # All but last part should be variant. src_vars = {} for v in s[:-1]: try: vname, vval = v.split("=") except ValueError: usage("-s option must specify " "variant=value,repo_uri") if not vname.startswith("variant."): vname = "variant.{0}".format(vname) src_vars[vname] = vval variant_list.append(src_vars) source_list.append(publisher.RepositoryURI( misc.parse_uri(s[-1]))) elif opt == "-p": use_pub_list = True pub_list.append(arg) if opt in ("--help", "-?"): usage(exitcode=0) except getopt.GetoptError as e: usage(_("illegal option -- {0}").format(e.opt)) if not source_list: usage(_("At least one variant name, value, and package source " "must be provided using -s.")) if not dest_repo: usage(_("A destination package repository must be provided " "using -d.")) # Determine the unique set of variants across all sources. variants = set() vcombos = collections.defaultdict(set) for src_vars in variant_list: for v, vval in src_vars.iteritems(): variants.add(v) vcombos[v].add((v, vval)) # merge_fmris() expects this to be a list. variants = list(variants) # Require that the user specified the same variants for all sources. for i, src_vars in enumerate(variant_list): missing = set(v for v in variants if v not in variant_list[i]) if missing: missing = ", ".join(missing) source = source_list[i] usage(_("Source {source} missing values for " "variants: {missing}").format(**locals())) # Require that each unique variant combination has a source. for combo in itertools.product(*vcombos.values()): found = False for i, src in enumerate(source_list): for vname, vval in combo: if variant_list[i].get(vname, None) != vval: found = False break else: found = True break if not found: combo = " ".join( "{0}={1}".format(vname, vval) for vname, vval in combo ) usage(_("No source was specified for variant " "combination {combo}.").format(**locals())) # initialize transport # we use a single endpoint for now, since the transport code # uses publisher as a unique key... so we just flop the repo # list as needed to access the different catalogs/manifests/files. temp_root = misc.config_temp_root() tmpdir = tempfile.mkdtemp(dir=temp_root, prefix="pkgmerge") xport, xport_cfg = transport.setup_transport() xport_cfg.incoming_root = tmpdir # we don't use the publisher returned by setup_publisher, as that only # returns one of the publishers in source_list. Instead we use # xport_cfg to access all publishers. transport.setup_publisher(source_list, "pkgmerge", xport, xport_cfg, remote_prefix=True) cat_dir = tempfile.mkdtemp(dir=tmpdir) # we must have at least one matching publisher if -p was used. known_pubs = set([pub.prefix for pub in xport_cfg.gen_publishers()]) if pub_list and len(set(pub_list).intersection(known_pubs)) == 0: error(_("no publishers from source repositories match " "the given -p options.")) errors = set() tracker = get_tracker() # iterate over all publishers in our source repositories. If errors # are encountered for a given publisher, we accumulate those, and # skip to the next publisher. for pub in xport_cfg.gen_publishers(): if use_pub_list: if pub.prefix not in pub_list: continue else: # remove publishers from pub_list as we go, so # that when we're finished, any remaining # publishers in pub_list suggest superfluous # -p options, which will cause us to exit with # an error. pub_list.remove(pub.prefix) pub.meta_root = cat_dir pub.transport = xport # Use separate transport for destination repository in case # source and destination have identical publisher configuration. dest_xport, dest_xport_cfg = transport.setup_transport() dest_xport_cfg.incoming_root = tmpdir # retrieve catalogs for all specified repositories for s in source_list: load_catalog(s, pub) # determine the list of packages we'll be processing if not pargs: # use the latest versions and merge everything fmri_arguments = list(set( name for s in source_list for name in get_all_pkg_names(s) )) exclude_args = [] else: fmri_arguments = [ f for f in pargs if not f.startswith("!") ] exclude_args = [ f[1:] for f in pargs if f.startswith("!") ] # build fmris to be merged masterlist = [ build_merge_list(fmri_arguments, exclude_args, catalog_dict[s.uri]) for s in source_list ] # check for unmatched patterns in_none = reduce(lambda x, y: x & y, (set(u) for d, u in masterlist)) if in_none: errors.add( _("The following pattern(s) did not match any " "packages in any of the specified repositories for " "publisher {pub_name}:" "\n{patterns}").format(patterns="\n".join(in_none), pub_name=pub.prefi)) continue # generate set of all package names to be processed, and dict # of lists indexed by order in source_list; if that repo has no # fmri for this pkg then use None. allpkgs = set(name for d, u in masterlist for name in d) processdict = {} for p in allpkgs: for d, u in masterlist: processdict.setdefault(p, []).append( d.setdefault(p, None)) # check to make sure all fmris are at same version modulo # timestamp for entry in processdict: if len(set([ str(a).rsplit(":")[0] for a in processdict[entry] if a is not None ])) > 1: errors.add( _("fmris matching the following patterns do" " not have matching versions across all " "repositories for publisher {pubs}: " "{patterns}").format(pub=pub.prefix, patterns=processdict[entry])) continue # we're ready to merge if not dry_run: target_pub = transport.setup_publisher(dest_repo, pub.prefix, dest_xport, dest_xport_cfg, remote_prefix=True) else: target_pub = None tracker.republish_set_goal(len(processdict), 0, 0) # republish packages for this publisher. If we encounter any # publication errors, we move on to the next publisher. try: pkg_tmpdir = tempfile.mkdtemp(dir=tmpdir) republish_packages(pub, target_pub, processdict, source_list, variant_list, variants, tracker, xport, dest_repo, dest_xport, pkg_tmpdir, dry_run=dry_run) except (trans.TransactionError, PkgmergeException) as e: errors.add(str(e)) tracker.reset() continue finally: # if we're handling an exception, this still gets called # in spite of the 'continue' that the handler ends with. if os.path.exists(pkg_tmpdir): shutil.rmtree(pkg_tmpdir) tracker.republish_done(dryrun=dry_run) tracker.reset() # If -p options were supplied, we should have processed all of them # by now. Remaining entries suggest -p options that were not merged. if use_pub_list and pub_list: errors.add(_("the following publishers were not found in " "source repositories: {0}").format(" ".join(pub_list))) # If we have encountered errors for some publishers, print them now # and exit. tracker.flush() for message in errors: error(message, exitcode=None) if errors: exit(EXIT_OOPS) return EXIT_OK
def main_func(): global temp_root, repo_modified, repo_finished, repo_uri, tracker global dry_run global_settings.client_name = PKG_CLIENT_NAME try: opts, pargs = getopt.getopt(sys.argv[1:], "?c:i:np:r:s:u", ["help"]) except getopt.GetoptError as e: usage(_("illegal option -- {0}").format(e.opt)) dry_run = False ref_repo_uri = None repo_uri = os.getenv("PKG_REPO", None) changes = set() ignores = set() publishers = set() cmp_policy = CMP_ALL processed_pubs = 0 for opt, arg in opts: if opt == "-c": changes.add(arg) elif opt == "-i": ignores.add(arg) elif opt == "-n": dry_run = True elif opt == "-p": publishers.add(arg) elif opt == "-r": ref_repo_uri = misc.parse_uri(arg) elif opt == "-s": repo_uri = misc.parse_uri(arg) elif opt == "-u": cmp_policy = CMP_UNSIGNED elif opt == "-?" or opt == "--help": usage(retcode=pkgdefs.EXIT_OK) if pargs: usage(_("Unexpected argument(s): {0}").format(" ".join(pargs))) if not repo_uri: usage(_("A target repository must be provided.")) if not ref_repo_uri: usage(_("A reference repository must be provided.")) target = publisher.RepositoryURI(misc.parse_uri(repo_uri)) if target.scheme != "file": abort(err=_("Target repository must be filesystem-based.")) try: target_repo = sr.Repository(read_only=dry_run, root=target.get_pathname()) except sr.RepositoryError as e: abort(str(e)) # Use the tmp directory in target repo for efficient file rename since # files are in the same file system. temp_root = target_repo.temp_root if not os.path.exists(temp_root): os.makedirs(temp_root) ref_incoming_dir = tempfile.mkdtemp(dir=temp_root) ref_pkg_root = tempfile.mkdtemp(dir=temp_root) ref_xport, ref_xport_cfg = transport.setup_transport() ref_xport_cfg.incoming_root = ref_incoming_dir ref_xport_cfg.pkg_root = ref_pkg_root transport.setup_publisher(ref_repo_uri, "ref", ref_xport, ref_xport_cfg, remote_prefix=True) ref_repo = None ref = publisher.RepositoryURI(misc.parse_uri(ref_repo_uri)) if ref.scheme == "file": try: ref_repo = sr.Repository(read_only=dry_run, root=ref.get_pathname()) except sr.RepositoryError as e: abort(str(e)) tracker = get_tracker() for pub in target_repo.publishers: if publishers and pub not in publishers \ and '*' not in publishers: continue msg(_("Processing packages for publisher {0} ...").format(pub)) # Find the matching pub in the ref repo. for ref_pub in ref_xport_cfg.gen_publishers(): if ref_pub.prefix == pub: found = True break else: txt = _("Publisher {0} not found in reference " "repository.").format(pub) if publishers: abort(err=txt) else: txt += _(" Skipping.") msg(txt) continue processed_pubs += 1 rev = do_reversion(pub, ref_pub, target_repo, ref_xport, changes, ignores, cmp_policy, ref_repo, ref, ref_xport_cfg) # Only rebuild catalog if anything got actually reversioned. if rev and not dry_run: msg(_("Rebuilding repository catalog.")) target_repo.rebuild(pub=pub) repo_finished = True ret = pkgdefs.EXIT_OK if processed_pubs == 0: msg(_("No matching publishers could be found.")) ret = pkgdefs.EXIT_OOPS cleanup() return ret
def main_func(): global_settings.client_name = "pkgsign" try: opts, pargs = getopt.getopt(sys.argv[1:], "a:c:i:k:ns:D:", ["help", "no-index", "no-catalog"]) except getopt.GetoptError as e: usage(_("illegal global option -- {0}").format(e.opt)) show_usage = False sig_alg = "rsa-sha256" cert_path = None key_path = None chain_certs = [] add_to_catalog = True set_alg = False dry_run = False repo_uri = os.getenv("PKG_REPO", None) for opt, arg in opts: if opt == "-a": sig_alg = arg set_alg = True elif opt == "-c": cert_path = os.path.abspath(arg) if not os.path.isfile(cert_path): usage(_("{0} was expected to be a certificate " "but isn't a file.").format(cert_path)) elif opt == "-i": p = os.path.abspath(arg) if not os.path.isfile(p): usage(_("{0} was expected to be a certificate " "but isn't a file.").format(p)) chain_certs.append(p) elif opt == "-k": key_path = os.path.abspath(arg) if not os.path.isfile(key_path): usage(_("{0} was expected to be a key file " "but isn't a file.").format(key_path)) elif opt == "-n": dry_run = True elif opt == "-s": repo_uri = misc.parse_uri(arg) elif opt == "--help": show_usage = True elif opt == "--no-catalog": add_to_catalog = False elif opt == "-D": try: key, value = arg.split("=", 1) DebugValues.set_value(key, value) except (AttributeError, ValueError): error(_("{opt} takes argument of form " "name=value, not {arg}").format( opt=opt, arg=arg)) if show_usage: usage(retcode=EXIT_OK) if not repo_uri: usage(_("a repository must be provided")) if key_path and not cert_path: usage(_("If a key is given to sign with, its associated " "certificate must be given.")) if cert_path and not key_path: usage(_("If a certificate is given, its associated key must be " "given.")) if chain_certs and not cert_path: usage(_("Intermediate certificates are only valid if a key " "and certificate are also provided.")) if not pargs: usage(_("At least one fmri or pattern must be provided to " "sign.")) if not set_alg and not key_path: sig_alg = "sha256" s, h = actions.signature.SignatureAction.decompose_sig_alg(sig_alg) if h is None: usage(_("{0} is not a recognized signature algorithm.").format( sig_alg)) if s and not key_path: usage(_("Using {0} as the signature algorithm requires that a " "key and certificate pair be presented using the -k and -c " "options.").format(sig_alg)) if not s and key_path: usage(_("The {0} hash algorithm does not use a key or " "certificate. Do not use the -k or -c options with this " "algorithm.").format(sig_alg)) if DebugValues: reload(digest) errors = [] t = misc.config_temp_root() temp_root = tempfile.mkdtemp(dir=t) del t cache_dir = tempfile.mkdtemp(dir=temp_root) incoming_dir = tempfile.mkdtemp(dir=temp_root) chash_dir = tempfile.mkdtemp(dir=temp_root) cert_dir = tempfile.mkdtemp(dir=temp_root) try: chain_certs = [ __make_tmp_cert(cert_dir, c) for c in chain_certs ] if cert_path is not None: cert_path = __make_tmp_cert(cert_dir, cert_path) xport, xport_cfg = transport.setup_transport() xport_cfg.add_cache(cache_dir, readonly=False) xport_cfg.incoming_root = incoming_dir # Configure publisher(s) transport.setup_publisher(repo_uri, "source", xport, xport_cfg, remote_prefix=True) pats = pargs successful_publish = False concrete_fmris = [] unmatched_pats = set(pats) all_pats = frozenset(pats) get_all_pubs = False pub_prefs = set() # Gather the publishers whose catalogs will be needed. for pat in pats: try: p_obj = fmri.MatchingPkgFmri(pat) except fmri.IllegalMatchingFmri as e: errors.append(e) continue pub_prefix = p_obj.get_publisher() if pub_prefix: pub_prefs.add(pub_prefix) else: get_all_pubs = True # Check each publisher for matches to our patterns. for p in xport_cfg.gen_publishers(): if not get_all_pubs and p.prefix not in pub_prefs: continue cat = fetch_catalog(p, xport, temp_root) ms, tmp1, u = cat.get_matching_fmris(pats) # Find which patterns matched. matched_pats = all_pats - u # Remove those patterns from the unmatched set. unmatched_pats -= matched_pats for v_list in ms.values(): concrete_fmris.extend([(v, p) for v in v_list]) if unmatched_pats: raise api_errors.PackageMatchErrors( unmatched_fmris=unmatched_pats) for pfmri, src_pub in sorted(set(concrete_fmris)): try: # Get the existing manifest for the package to # be signed. m_str = xport.get_manifest(pfmri, content_only=True, pub=src_pub) m = manifest.Manifest() m.set_content(content=m_str) # Construct the base signature action. attrs = { "algorithm": sig_alg } a = actions.signature.SignatureAction(cert_path, **attrs) a.hash = cert_path # Add the action to the manifest to be signed # since the action signs itself. m.add_action(a, misc.EmptyI) # Set the signature value and certificate # information for the signature action. a.set_signature(m.gen_actions(), key_path=key_path, chain_paths=chain_certs, chash_dir=chash_dir) # The hash of 'a' is currently a path, we need # to find the hash of that file to allow # comparison to existing signatures. hsh = None if cert_path: # Action identity still uses the 'hash' # member of the action, so we need to # stay with the sha1 hash. hsh, _dummy = \ misc.get_data_digest(cert_path, hash_func=hashlib.sha1) # Check whether the signature about to be added # is identical, or almost identical, to existing # signatures on the package. Because 'a' has # already been added to the manifest, it is # generated by gen_actions_by_type, so the cnt # must be 2 or higher to be an issue. cnt = 0 almost_identical = False for a2 in m.gen_actions_by_type("signature"): try: if a.identical(a2, hsh): cnt += 1 except api_errors.AlmostIdentical as e: e.pkg = pfmri errors.append(e) almost_identical = True if almost_identical: continue if cnt == 2: continue elif cnt > 2: raise api_errors.DuplicateSignaturesAlreadyExist(pfmri) assert cnt == 1, "Cnt was:{0}".format(cnt) if not dry_run: # Append the finished signature action # to the published manifest. t = trans.Transaction(repo_uri, pkg_name=str(pfmri), xport=xport, pub=src_pub) try: t.append() t.add(a) for c in chain_certs: t.add_file(c) t.close(add_to_catalog= add_to_catalog) except: if t.trans_id: t.close(abandon=True) raise msg(_("Signed {0}").format(pfmri.get_fmri( include_build=False))) successful_publish = True except (api_errors.ApiException, fmri.FmriError, trans.TransactionError) as e: errors.append(e) if errors: error("\n".join([str(e) for e in errors])) if successful_publish: return EXIT_PARTIAL else: return EXIT_OOPS return EXIT_OK except api_errors.ApiException as e: error(e) return EXIT_OOPS finally: shutil.rmtree(temp_root)
if len(varlist) < 2: usage(_("at least two -v arguments needed to merge")) if not basedir: basedir = os.getcwd() incomingdir = os.path.normpath( os.path.join(basedir, "incoming-%d" % os.getpid())) os.makedirs(incomingdir) tmpdirs.append(incomingdir) server_list = [ publisher.RepositoryURI(v.split(",", 1)[1]) for v in varlist ] xport, xport_cfg = transport.setup_transport() xport_cfg.incoming_root = incomingdir pub = transport.setup_publisher(server_list, "merge", xport, xport_cfg, remote_prefix=True) if len(pargs) == 1: recursive = False overall_set = set() for s in server_list: for name in get_all_pkg_names(s): overall_set.add(name) fmri_arguments = list(overall_set)
else: src_uri = misc.parse_uri(src_uri) if not cache_dir: cache_dir = tempfile.mkdtemp(dir=temp_root, prefix=global_settings.client_name + "-") # Only clean-up cache dir if implicitly created by pkgrecv. # User's cache-dirs should be preserved tmpdirs.append(cache_dir) incoming_dir = tempfile.mkdtemp(dir=temp_root, prefix=global_settings.client_name + "-") tmpdirs.append(incoming_dir) # Create transport and transport config xport, xport_cfg = transport.setup_transport() xport_cfg.add_cache(cache_dir, readonly=False) xport_cfg.incoming_root = incoming_dir # Since publication destinations may only have one repository configured # per publisher, create destination as separate transport in case source # and destination have identical publisher configuration but different # repository endpoints. dest_xport, dest_xport_cfg = transport.setup_transport() dest_xport_cfg.add_cache(cache_dir, readonly=False) dest_xport_cfg.incoming_root = incoming_dir # Configure src publisher(s). transport.setup_publisher(src_uri, "source", xport, xport_cfg, remote_prefix=True, ssl_key=key, ssl_cert=cert)
temp_root = tempfile.mkdtemp(dir=t) del t cache_dir = tempfile.mkdtemp(dir=temp_root) incoming_dir = tempfile.mkdtemp(dir=temp_root) chash_dir = tempfile.mkdtemp(dir=temp_root) cert_dir = tempfile.mkdtemp(dir=temp_root) try: chain_certs = [ __make_tmp_cert(cert_dir, c) for c in chain_certs ] if cert_path is not None: cert_path = __make_tmp_cert(cert_dir, cert_path) xport, xport_cfg = transport.setup_transport() xport_cfg.add_cache(cache_dir, readonly=False) xport_cfg.incoming_root = incoming_dir # Configure publisher(s) transport.setup_publisher(repo_uri, "source", xport, xport_cfg, remote_prefix=True) pats = pargs successful_publish = False concrete_fmris = [] unmatched_pats = set(pats) all_pats = frozenset(pats) get_all_pubs = False pub_prefs = set() # Gather the publishers whose catalogs will be needed.