def _get_image(image_dir): """Return a pkg.client.api.ImageInterface for the provided image directory.""" cdir = os.getcwd() if not image_dir: image_dir = "/" api_inst = None tracker = progress.QuietProgressTracker() try: api_inst = pkg.client.api.ImageInterface( image_dir, CLIENT_API_VERSION, tracker, None, PKG_CLIENT_NAME) if api_inst.root != image_dir: msg(_("Problem getting image at {0}").format( image_dir)) except Exception as err: raise SysrepoException( _("Unable to get image at {dir}: {reason}").format( dir=image_dir, reason=str(err))) # restore the current directory, which ImageInterace had changed os.chdir(cdir) return api_inst
def trans_append(repo_uri, args): """DEPRECATED""" opts, pargs = getopt.getopt(args, "en") parsed = [] eval_form = True for opt, arg in opts: parsed.append(opt) if opt == "-e": eval_form = True if opt == "-n": eval_form = False if "-e" in parsed and "-n" in parsed: usage(_("only -e or -n may be specified"), cmd="open") if len(pargs) != 1: usage(_("append requires one package name"), cmd="open") xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, pkg_name=pargs[0], xport=xport, pub=pub) if eval_form: msg("export PKG_TRANS_ID={0}".format(t.append())) else: msg(t.append()) return EXIT_OK
def refresh_conf(repo_info, log_dir, host, port, runtime_dir, template_dir, cache_dir, cache_size, sroot, fragment=False, allow_refresh=False): """Creates a new configuration for the depot.""" try: ret = EXIT_OK cleanup_conf(runtime_dir=runtime_dir) if not repo_info: raise DepotException(_("no repositories found")) htdocs_path = os.path.join(runtime_dir, DEPOT_HTDOCS_DIRNAME, sroot) misc.makedirs(htdocs_path) # pubs and default_pubs are lists of tuples of the form: # (publisher prefix, repository root dir, repository prefix) pubs = [] default_pubs = [] repo_prefixes = [prefix for root, prefix in repo_info] errors = [] # Query each repository for its publisher information. for (repo_root, repo_prefix) in repo_info: try: publishers, default_pub = \ _get_publishers(repo_root) for pub in publishers: pubs.append( (pub, repo_root, repo_prefix)) default_pubs.append((default_pub, repo_root, repo_prefix)) except DepotException, err: errors.append(str(err)) if errors: raise DepotException(_("Unable to get publisher " "information: %s") % "\n".join(errors)) # Write the publisher/0 response for each repository pubs_by_repo = {} for pub_prefix, repo_root, repo_prefix in pubs: pubs_by_repo.setdefault(repo_prefix, []).append( pub_prefix) for repo_prefix in pubs_by_repo: _write_publisher_response( pubs_by_repo[repo_prefix], htdocs_path, repo_prefix) _write_httpd_conf(pubs, default_pubs, runtime_dir, log_dir, template_dir, cache_dir, cache_size, host, port, sroot, fragment=fragment, allow_refresh=allow_refresh) _write_versions_response(htdocs_path, fragment=fragment) # If we're writing a configuration fragment, then the web server # is probably not running as DEPOT_USER:DEPOT_GROUP if not fragment: _chown_dir(runtime_dir) _chown_dir(cache_dir) else: msg(_("Created %s/depot.conf") % runtime_dir)
def dump(self): """Write the repo statistics to stdout.""" hfmt = "%-21s %-6s %-4s %-4s %-8s %-10s %-5s %-7s %-4s" dfmt = "%-21s %-6s %-4s %-4s %-8s %-10s %-5s %-6f %-4s" misc.msg(hfmt % ("URL", "Good", "Err", "Conn", "Speed", "Size", "Used", "CSpeed", "Qual")) for ds in self.__rsobj.values(): speedstr = misc.bytes_to_str(ds.transfer_speed, "%(num).0f %(unit)s/s") sizestr = misc.bytes_to_str(ds.bytes_xfr) url_tup = urlparse.urlsplit(ds.url) res_path = url_tup[1] if not res_path: # Some schemes don't have a netloc. res_path = url_tup[2] misc.msg(dfmt % (res_path, ds.success, ds.failures, ds.num_connect, speedstr, sizestr, ds.used, ds.connect_time, ds.quality))
def restore_image(self): """Restore a failed image-update attempt.""" self.__reset_image_state(failure=True) # Leave the clone around for debugging purposes if we're # operating on the live BE. if self.is_live_BE: emsg( _(" The running system has not been modified. " "Modifications were only made to a clone of the " "running system. This clone is mounted at %s " "should you wish to inspect it.") % self.clone_dir) else: # Rollback and destroy the snapshot. try: if be.beRollback(self.be_name, self.snapshot_name) != 0: emsg( _("pkg: unable to rollback BE %s " "and restore image") % self.be_name) self.destroy_snapshot() os.rmdir(self.clone_dir) except Exception, e: self.img.history.log_operation_error(error=e) raise e msg( _("%s failed to be updated. No changes have been " "made to %s.") % (self.be_name, self.be_name))
def trans_close(repo_uri, args): """DEPRECATED""" abandon = False trans_id = None add_to_catalog = True # --no-index is now silently ignored as the publication process no # longer builds search indexes automatically. opts, pargs = getopt.getopt(args, "At:", ["no-index", "no-catalog"]) for opt, arg in opts: if opt == "-A": abandon = True elif opt == "-t": trans_id = arg elif opt == "--no-catalog": add_to_catalog = False if trans_id is None: try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: usage(_("No transaction ID specified using -t or in " "$PKG_TRANS_ID."), cmd="close") xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, trans_id=trans_id, xport=xport, pub=pub) pkg_state, pkg_fmri = t.close(abandon=abandon, add_to_catalog=add_to_catalog) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) return EXIT_OK
def trans_close(repo_uri, args): """DEPRECATED""" abandon = False trans_id = None add_to_catalog = True # --no-index is now silently ignored as the publication process no # longer builds search indexes automatically. opts, pargs = getopt.getopt(args, "At:", ["no-index", "no-catalog"]) for opt, arg in opts: if opt == "-A": abandon = True elif opt == "-t": trans_id = arg elif opt == "--no-catalog": add_to_catalog = False if trans_id is None: try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: usage(_("No transaction ID specified using -t or in " "$PKG_TRANS_ID."), cmd="close") xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, trans_id=trans_id, xport=xport, pub=pub) pkg_state, pkg_fmri = t.close(abandon=abandon, add_to_catalog=add_to_catalog) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) return 0
def activate_live_be(cmd): cmd += [self.clone_dir] # Activate the clone. exec_cmd(cmd) if be.beActivate(self.be_name_clone) != 0: emsg(_("pkg: unable to activate %s") \ % self.be_name_clone) return # Consider the last operation a success, and log it as # ending here so that it will be recorded in the new # image's history. self.img.history.log_operation_end() if be.beUnmount(self.be_name_clone) != 0: emsg(_("pkg: unable to unmount %s") \ % self.clone_dir) return os.rmdir(self.clone_dir) msg(_(""" A clone of %s exists and has been updated and activated. On the next boot the Boot Environment %s will be mounted on '/'. Reboot when ready to switch to this updated BE. """) % \ (self.be_name, self.be_name_clone))
def trans_publish(repo_uri, fargs): error_occurred = False opts, pargs = getopt.getopt(fargs, "-d:") include_opts = [] for opt, arg in opts: if opt == "-d": include_opts += [opt, arg] if not pargs: usage(_("No fmri argument specified for subcommand"), cmd="publish") t = trans.Transaction(repo_uri, pkg_name=pargs[0]) t.open() del pargs[0] if trans_include(repo_uri, include_opts + pargs, transaction=t): abandon = True else: abandon = False pkg_state, pkg_fmri = t.close(abandon=abandon) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) if abandon: return 1 return 0
def trans_append(repo_uri, args): """DEPRECATED""" opts, pargs = getopt.getopt(args, "en") parsed = [] eval_form = True for opt, arg in opts: parsed.append(opt) if opt == "-e": eval_form = True if opt == "-n": eval_form = False if "-e" in parsed and "-n" in parsed: usage(_("only -e or -n may be specified"), cmd="open") if len(pargs) != 1: usage(_("append requires one package name"), cmd="open") xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, pkg_name=pargs[0], xport=xport, pub=pub) if eval_form: msg("export PKG_TRANS_ID=%s" % t.append()) else: msg(t.append()) return 0
def trans_open(repo_uri, args): opts, pargs = getopt.getopt(args, "en") parsed = [] eval_form = True for opt, arg in opts: parsed.append(opt) if opt == "-e": eval_form = True if opt == "-n": eval_form = False if "-e" in parsed and "-n" in parsed: usage(_("only -e or -n may be specified"), cmd="open") if len(pargs) != 1: usage(_("open requires one package name"), cmd="open") t = trans.Transaction(repo_uri, pkg_name=pargs[0]) if eval_form: msg("export PKG_TRANS_ID=%s" % t.open()) else: msg(t.open()) return 0
def trans_close(repo_uri, args): abandon = False trans_id = None opts, pargs = getopt.getopt(args, "At:") for opt, arg in opts: if opt == "-A": abandon = True if opt == "-t": trans_id = arg if trans_id is None: try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: usage(_("No transaction ID specified using -t or in " "$PKG_TRANS_ID."), cmd="close") t = trans.Transaction(repo_uri, trans_id=trans_id) pkg_state, pkg_fmri = t.close(abandon) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) return 0
def add_pkg_plan(self, pfmri): """add a pkg plan to imageplan for fully evaluated frmi""" m = self.image.get_manifest(pfmri) pp = pkgplan.PkgPlan(self.image, self.progtrack, \ self.check_cancelation) if self.old_excludes != self.new_excludes: if self.image.install_file_present(pfmri): pp.propose_reinstall(pfmri, m) else: pp.propose_destination(pfmri, m) else: try: pp.propose_destination(pfmri, m) except RuntimeError: msg("pkg: %s already installed" % pfmri) return pp.evaluate(self.old_excludes, self.new_excludes) if pp.origin_fmri: self.target_update_count += 1 else: self.target_insall_count += 1 self.pkg_plans.append(pp)
def restore_image(self): """Restore a failed image-update attempt.""" self.__reset_image_state(failure=True) # Leave the clone around for debugging purposes if we're # operating on the live BE. if self.is_live_BE: emsg(_(" The running system has not been modified. " "Modifications were only made to a clone of the " "running system. This clone is mounted at %s " "should you wish to inspect it.") % self.clone_dir) else: # Rollback and destroy the snapshot. try: if be.beRollback(self.be_name, self.snapshot_name) != 0: emsg(_("pkg: unable to rollback BE %s " "and restore image") % self.be_name) self.destroy_snapshot() os.rmdir(self.clone_dir) except Exception, e: self.img.history.log_operation_error(error=e) raise e msg(_("%s failed to be updated. No changes have been " "made to %s.") % (self.be_name, self.be_name))
def execute_removal(self, src, dest): """ handle action removals""" try: src.remove(self) except Exception, e: msg( "Action removal failed for '%s' (%s):\n %s: %s" % (src.attrs.get(src.key_attr, id(src)), self.origin_fmri.get_pkg_stem(), e.__class__.__name__, e) ) raise
def abort(err=None, retcode=pkgdefs.EXIT_OOPS): """To be called when a fatal error is encountered.""" if err: # Clear any possible output first. msg("") error(err) cleanup() sys.exit(retcode)
def execute_removal(self, src, dest): """ handle action removals""" try: src.remove(self) except Exception, e: msg("Action removal failed for '%s' (%s):\n %s: %s" % \ (src.attrs.get(src.key_attr, id(src)), self.origin_fmri.get_pkg_stem(), e.__class__.__name__, e)) raise
def execute_install(self, src, dest): """ perform action for installation of package""" try: dest.install(self, src) except Exception, e: msg("Action install failed for '%s' (%s):\n %s: %s" % \ (dest.attrs.get(dest.key_attr, id(dest)), self.destination_fmri.get_pkg_stem(), e.__class__.__name__, e)) raise
def abort(err=None, retcode=1): """To be called when a fatal error is encountered.""" if err: # Clear any possible output first. msg("") error(err) cleanup(caller_error=True) sys.exit(retcode)
def execute_update(self, src, dest): """ handle action updates""" try: dest.install(self, src) except Exception, e: msg("Action upgrade failed for '%s' (%s):\n %s: %s" % \ (dest.attrs.get(dest.key_attr, id(dest)), self.destination_fmri.get_pkg_stem(), e.__class__.__name__, e)) raise
def activate_be(cmd): # Delete the snapshot that was taken before we # updated the image and update the the boot archive. cmd += [self.root] exec_cmd(cmd) msg(_("%s has been updated successfully") % \ (self.be_name)) os.rmdir(self.clone_dir) self.destroy_snapshot()
def usage(usage_error=None, retcode=2): """Emit a usage message and optionally prefix it with a more specific error message. Causes program to exit.""" if usage_error: error(usage_error) msg( _( """\ Usage: pkgrecv [-s src_repo_uri] [-d (path|dest_uri)] [-k] [-m] [-n] [-r] (fmri|pattern) ... pkgrecv [-s src_repo_uri] -n Options: -d path_or_uri The path of a directory to save the retrieved package to, or the URI of a repository to republish it to. If not provided, the default value is the current working directory. If a directory path is provided, then package content will only be retrieved if it does not already exist in the target directory. If a repository URI is provided, a temporary directory will be created and all of the package data retrieved before attempting to republish it. -h Display this usage message. -k Keep the retrieved package content compressed, ignored when republishing. Should not be used with pkgsend. -m match Controls matching behaviour using the following values: all-timestamps includes all matching timestamps, not just latest (implies all-versions) all-versions includes all matching versions, not just latest -n List the most recent versions of the packages available from the specified repository and exit (all other options except -s will be ignored). -r Recursively evaluates all dependencies for the provided list of packages and adds them to the list. -s src_repo_uri A URI representing the location of a pkg(5) repository to retrieve package data from. Environment: PKG_DEST Destination directory or repository URI PKG_SRC Source repository URI""" ) ) sys.exit(retcode)
def usage(usage_error=None, cmd=None, retcode=EXIT_BADOPT): """Emit a usage message and optionally prefix it with a more specific error message. Causes program to exit. """ if usage_error: error(usage_error, cmd=cmd) msg(_("""\ Usage: pkg.sysrepo -p <port> [-R image_root] [ -c cache_dir] [-h hostname] [-l logs_dir] [-r runtime_dir] [-s cache_size] [-t template_dir] [-T http_timeout] [-w http_proxy] [-W https_proxy] """)) sys.exit(retcode)
def execute_update(self, src, dest): """ handle action updates""" try: dest.install(self, src) except Exception, e: msg( "Action upgrade failed for '%s' (%s):\n %s: %s" % ( dest.attrs.get(dest.key_attr, id(dest)), self.destination_fmri.get_pkg_stem(), e.__class__.__name__, e, ) ) raise
def execute_install(self, src, dest): """ perform action for installation of package""" try: dest.install(self, src) except Exception, e: msg( "Action install failed for '%s' (%s):\n %s: %s" % ( dest.attrs.get(dest.key_attr, id(dest)), self.destination_fmri.get_pkg_stem(), e.__class__.__name__, e, ) ) raise
def usage(usage_error=None, cmd=None, retcode=EXIT_BADOPT): """Emit a usage message and optionally prefix it with a more specific error message. Causes program to exit. """ if usage_error: error(usage_error, cmd=cmd) msg(_("""\ Usage: pkg.depot-config ( -d repository_dir | -S ) -r runtime_dir [-c cache_dir] [-s cache_size] [-p port] [-h hostname] [-l logs_dir] [-T template_dir] [-A] [-t server_type] ( [-F] [-P server_prefix] ) """)) sys.exit(retcode)
def _follow_redirects(uri_list, http_timeout): """ Follow HTTP redirects from servers. Needed so that we can create RewriteRules for all repository URLs that pkg clients may encounter. We return a sorted list of URIs that were found having followed all redirects in 'uri_list'. We also return a boolean, True if we timed out when following any of the URIs. """ ret_uris = set(uri_list) timed_out = False class SysrepoRedirectHandler(HTTPRedirectHandler): """ A HTTPRedirectHandler that saves URIs we've been redirected to along the path to our eventual destination.""" def __init__(self): self.redirects = set() def redirect_request(self, req, fp, code, msg, hdrs, newurl): self.redirects.add(newurl) return HTTPRedirectHandler.redirect_request( self, req, fp, code, msg, hdrs, newurl) for uri in uri_list: handler = SysrepoRedirectHandler() opener = build_opener(handler) if not uri.startswith("http:"): ret_uris.update([uri]) continue # otherwise, open a known url to check for redirects try: opener.open("{0}/versions/0".format(uri), None, http_timeout) ret_uris.update( set([ item.replace("/versions/0", "").rstrip("/") for item in handler.redirects ])) except URLError as err: # We need to log this, and carry on - the url # could become available at a later date. msg( _("WARNING: unable to access {uri} when checking " "for redirects: {err}").format(**locals())) timed_out = True return sorted(list(ret_uris)), timed_out
def usage(errmsg="", exitcode=2): """Emit a usage message and optionally prefix it with a more specific error message. Causes program to exit.""" if errmsg: emsg("pkgmerge: {0}".format(errmsg)) msg( _("""\ Usage: pkgmerge [-n] -d dest_repo -s variant=value[,...],src_repo ... [-p publisher_prefix ... ] [pkg_fmri_pattern ...] Options: -d dest_repo The filesystem path or URI of the target repository to publish the merged packages to. The target repository must already exist; new repositories can be created using pkgrepo(1). -n Perform a trial run with no changes made to the target repository. -s variant=value,src_repo The variant name and value to use for packages from this source, followed by the filesystem path or URI of the source repository or package archive to retrieve packages from. Multiple variants may be specified separated by commas. The same variants must be named for all sources. This option may be specified multiple times. -p publisher_prefix The name of the publisher we should merge packages from. This option may be specified multiple times. If no -p option is used, the default is to merge packages from all publishers in all source repositories. --help or -? Displays a usage message. Environment: TMPDIR, TEMP, TMP The absolute path of the directory where temporary data should be stored during program execution. """)) sys.exit(exitcode)
def evaluate_fmri_removal(self, pfmri): # prob. needs breaking up as well assert self.image.has_manifest(pfmri) self.progtrack.evaluate_progress(pfmri) dependents = set(self.image.get_dependents(pfmri, self.progtrack)) # Don't consider those dependencies already being removed in # this imageplan transaction. dependents = dependents.difference(self.target_rem_fmris) if dependents and not self.recursive_removal: raise api_errors.NonLeafPackageException(pfmri, dependents) pp = pkgplan.PkgPlan(self.image, self.progtrack, \ self.check_cancelation) self.image.state.set_target(pfmri, self.__intent) m = self.image.get_manifest(pfmri) try: pp.propose_removal(pfmri, m) except RuntimeError: self.image.state.set_target() msg("pkg %s not installed" % pfmri) return pp.evaluate([], self.old_excludes) for d in dependents: if self.is_proposed_rem_fmri(d): continue if not self.image.has_version_installed(d): continue self.target_rem_fmris.append(d) self.progtrack.evaluate_progress(d) self.evaluate_fmri_removal(d) # Post-order append will ensure topological sorting for acyclic # dependency graphs. Cycles need to be arbitrarily broken, and # are done so in the loop above. self.pkg_plans.append(pp) self.image.state.set_target()
def usage(errmsg="", exitcode=2): """Emit a usage message and optionally prefix it with a more specific error message. Causes program to exit.""" if errmsg: emsg("pkgmerge: %s" % errmsg) msg(_("""\ Usage: pkgmerge [-n] -d dest_repo -s variant=value[,...],src_repo ... [-p publisher_prefix ... ] [pkg_fmri_pattern ...] Options: -d dest_repo The filesystem path or URI of the target repository to publish the merged packages to. The target repository must already exist; new repositories can be created using pkgrepo(1). -n Perform a trial run with no changes made to the target repository. -s variant=value,src_repo The variant name and value to use for packages from this source, followed by the filesystem path or URI of the source repository or package archive to retrieve packages from. Multiple variants may be specified separated by commas. The same variants must be named for all sources. This option may be specified multiple times. -p publisher_prefix The name of the publisher we should merge packages from. This option may be specified multiple times. If no -p option is used, the default is to merge packages from all publishers in all source repositories. --help or -? Displays a usage message. Environment: TMPDIR, TEMP, TMP The absolute path of the directory where temporary data should be stored during program execution. """)) sys.exit(exitcode)
def usage(usage_error=None, retcode=2): """Emit a usage message and optionally prefix it with a more specific error message. Causes program to exit.""" if usage_error: error(usage_error) msg(_("""\ Usage: pkgrecv [-s src_repo_uri] [-d (path|dest_uri)] [-k] [-m] [-n] [-r] (fmri|pattern) ... pkgrecv [-s src_repo_uri] -n Options: -d path_or_uri The path of a directory to save the retrieved package to, or the URI of a repository to republish it to. If not provided, the default value is the current working directory. If a directory path is provided, then package content will only be retrieved if it does not already exist in the target directory. If a repository URI is provided, a temporary directory will be created and all of the package data retrieved before attempting to republish it. -h Display this usage message. -k Keep the retrieved package content compressed, ignored when republishing. Should not be used with pkgsend. -m match Controls matching behaviour using the following values: all-timestamps includes all matching timestamps, not just latest (implies all-versions) all-versions includes all matching versions, not just latest -n List the most recent versions of the packages available from the specified repository and exit (all other options except -s will be ignored). -r Recursively evaluates all dependencies for the provided list of packages and adds them to the list. -s src_repo_uri A URI representing the location of a pkg(5) repository to retrieve package data from. Environment: PKG_DEST Destination directory or repository URI PKG_SRC Source repository URI""")) sys.exit(retcode)
def dump(self): """Write the repo statistics to stdout.""" hfmt = "%-41.41s %-30s %-6s %-4s %-4s %-8s %-10s %-5s %-7s %-4s" dfmt = "%-41.41s %-30s %-6s %-4s %-4s %-8s %-10s %-5s %-6f %-4s" misc.msg(hfmt % ("URL", "Proxy", "Good", "Err", "Conn", "Speed", "Size", "Used", "CSpeed", "Qual")) for ds in self.__rsobj.values(): speedstr = misc.bytes_to_str(ds.transfer_speed, "%(num).0f %(unit)s/s") sizestr = misc.bytes_to_str(ds.bytes_xfr) proxy = self.__get_proxy(ds) misc.msg(dfmt % (ds.url, proxy, ds.success, ds.failures, ds.num_connect, speedstr, sizestr, ds.used, ds.connect_time, ds.quality))
def dump(self): """Write the repo statistics to stdout.""" hfmt = "{0:41.41} {1:30} {2:6} {3:4} {4:4} {5:8} {6:10} {7:5} {8:7} {9:4}" dfmt = "{0:41.41} {1:30} {2:6} {3:4} {4:4} {5:8} {6:10} {7:5} {8:6f} {9:4}" misc.msg(hfmt.format("URL", "Proxy", "Good", "Err", "Conn", "Speed", "Size", "Used", "CSpeed", "Qual")) for ds in self.__rsobj.values(): speedstr = misc.bytes_to_str(ds.transfer_speed, "{num:>.0f} {unit}/s") sizestr = misc.bytes_to_str(ds.bytes_xfr) proxy = self.__get_proxy(ds) misc.msg(dfmt.format(ds.url, proxy, ds.success, ds.failures, ds.num_connect, speedstr, sizestr, ds.used, ds.connect_time, ds.quality))
def _follow_redirects(uri_list, http_timeout): """ Follow HTTP redirects from servers. Needed so that we can create RewriteRules for all repository URLs that pkg clients may encounter. We return a sorted list of URIs that were found having followed all redirects in 'uri_list'. We also return a boolean, True if we timed out when following any of the URIs. """ ret_uris = set(uri_list) timed_out = False class SysrepoRedirectHandler(urllib2.HTTPRedirectHandler): """ A HTTPRedirectHandler that saves URIs we've been redirected to along the path to our eventual destination.""" def __init__(self): self.redirects = set() def redirect_request(self, req, fp, code, msg, hdrs, newurl): self.redirects.add(newurl) return urllib2.HTTPRedirectHandler.redirect_request( self, req, fp, code, msg, hdrs, newurl) for uri in uri_list: handler = SysrepoRedirectHandler() opener = urllib2.build_opener(handler) if not uri.startswith("http:"): ret_uris.update([uri]) continue # otherwise, open a known url to check for redirects try: opener.open("%s/versions/0" % uri, None, http_timeout) ret_uris.update(set( [item.replace("/versions/0", "").rstrip("/") for item in handler.redirects])) except urllib2.URLError, err: # We need to log this, and carry on - the url # could become available at a later date. msg(_("WARNING: unable to access %(uri)s when checking " "for redirects: %(err)s") % locals()) timed_out = True
def usage(usage_error=None, cmd=None, retcode=EXIT_BADOPT): """Emit a usage message and optionally prefix it with a more specific error message. Causes program to exit. """ if usage_error: error(usage_error, cmd=cmd) msg(_("""\ Usage: pkg.depot-config ( -d repository_dir | -S ) -r runtime_dir [-c cache_dir] [-s cache_size] [-p port] [-h hostname] [-l logs_dir] [-T template_dir] [-A] [-t server_type] ( ( [-F] [-P server_prefix] ) | [--https ( ( --cert server_cert_file --key server_key_file [--cert-chain ssl_cert_chain_file] ) | --cert-key-dir cert_key_directory ) [ (--ca-cert ca_cert_file --ca-key ca_key_file ) ] [--smf-fmri smf_pkg_depot_fmri] ] ) """)) sys.exit(retcode)
def list_newest_fmris(fmri_list): """List the provided fmris.""" fm_hash = {} fm_list = [] # Order all fmris by package name for f in sorted(fmri_list): if f.pkg_name in fm_hash: fm_hash[f.pkg_name].append(f) else: fm_hash[f.pkg_name] = [f] # sort each fmri list for k in fm_hash.keys(): fm_hash[k].sort(reverse=True) l = fm_hash[k] fm_list.append(l[0]) for e in fm_list: msg(e)
def list_newest_fmris(fmri_list): """List the provided fmris.""" fm_hash = {} fm_list = [] # Order all fmris by package name for f in sorted(fmri_list): if f.pkg_name in fm_hash: fm_hash[f.pkg_name].append(f) else: fm_hash[f.pkg_name] = [ f ] # sort each fmri list for k in fm_hash.keys(): fm_hash[k].sort(reverse = True) l = fm_hash[k] fm_list.append(l[0]) for e in fm_list: msg(e)
def _get_image(image_dir): """Return a pkg.client.api.ImageInterface for the provided image directory.""" cdir = os.getcwd() if not image_dir: image_dir = "/" api_inst = None tracker = progress.QuietProgressTracker() try: api_inst = pkg.client.api.ImageInterface( image_dir, CLIENT_API_VERSION, tracker, None, PKG_CLIENT_NAME) if api_inst.root != image_dir: msg(_("Problem getting image at %s") % image_dir) except Exception, err: raise SysrepoException( _("Unable to get image at %(dir)s: %(reason)s") % {"dir": image_dir, "reason": str(err)})
def pkgdeps_to_screen(pkg_deps, manifest_paths, echo_manifest): """Write the resolved package dependencies to stdout. 'pkg_deps' is a dictionary that maps a path to a manifest to the dependencies that were resolved for that manifest. 'manifest_paths' is a list of the paths to the manifests for which file dependencies were resolved. 'echo_manifest' is a boolean which determines whether the original manifest will be written out or not.""" ret_code = 0 first = True for p in manifest_paths: if not first: msg("\n\n") first = False msg("# {0}".format(p)) if echo_manifest: __echo_manifest(p, msg, strip_newline=True) for d in pkg_deps[p]: msg(d) return ret_code
def pkgdeps_to_screen(pkg_deps, manifest_paths, echo_manifest): """Write the resolved package dependencies to stdout. 'pkg_deps' is a dictionary that maps a path to a manifest to the dependencies that were resolved for that manifest. 'manifest_paths' is a list of the paths to the manifests for which file dependencies were resolved. 'echo_manifest' is a boolean which determines whether the original manifest will be written out or not.""" ret_code = 0 first = True for p in manifest_paths: if not first: msg("\n\n") first = False msg("# %s" % p) if echo_manifest: __echo_manifest(p, msg, strip_newline=True) for d in pkg_deps[p]: msg(d) return ret_code
def resolve(args, img_dir): """Take a list of manifests and resolve any file dependencies, first against the other published manifests and then against what is installed on the machine.""" out_dir = None echo_manifest = False output_to_screen = False suffix = None verbose = False use_system_to_resolve = True constraint_files = [] extra_external_info = False try: opts, pargs = getopt.getopt(args, "d:e:Emos:Sv") except getopt.GetoptError as e: usage(_("illegal global option -- {0}").format(e.opt)) for opt, arg in opts: if opt == "-d": out_dir = arg elif opt == "-e": constraint_files.append(arg) elif opt == "-E": extra_external_info = True elif opt == "-m": echo_manifest = True elif opt == "-o": output_to_screen = True elif opt == "-s": suffix = arg elif opt == "-S": use_system_to_resolve = False elif opt == "-v": verbose = True if (out_dir or suffix) and output_to_screen: usage(_("-o cannot be used with -d or -s")) manifest_paths = [os.path.abspath(fp) for fp in pargs] for manifest in manifest_paths: if not os.path.isfile(manifest): usage(_("The manifest file {0} could not be found.").format( manifest), retcode=2) if out_dir: out_dir = os.path.abspath(out_dir) if not os.path.isdir(out_dir): usage(_("The output directory {0} is not a directory.").format( out_dir), retcode=2) provided_image_dir = True pkg_image_used = False if img_dir == None: orig_cwd = None try: orig_cwd = os.getcwd() except OSError: # May be unreadable by user or have other problem. pass img_dir, provided_image_dir = api.get_default_image_root( orig_cwd=orig_cwd) if os.environ.get("PKG_IMAGE"): # It's assumed that this has been checked by the above # function call and hasn't been removed from the # environment. pkg_image_used = True if not img_dir: error( _("Could not find image. Use the -R option or set " "$PKG_IMAGE to the\nlocation of an image.")) return 1 system_patterns = misc.EmptyI if constraint_files: system_patterns = [] for f in constraint_files: try: with open(f, "rb") as fh: for l in fh: l = l.strip() if l and not l.startswith("#"): system_patterns.append(l) except EnvironmentError as e: if e.errno == errno.ENOENT: error("{0}: '{1}'".format(e.args[1], e.filename), cmd="resolve") return 1 raise api_errors._convert_error(e) if not system_patterns: error( _("External package list files were provided but " "did not contain any fmri patterns.")) return 1 elif use_system_to_resolve: system_patterns = ["*"] # Becuase building an ImageInterface permanently changes the cwd for # python, it's necessary to do this step after resolving the paths to # the manifests. try: api_inst = api.ImageInterface(img_dir, CLIENT_API_VERSION, progress.QuietProgressTracker(), None, PKG_CLIENT_NAME, exact_match=provided_image_dir) except api_errors.ImageNotFoundException as e: if e.user_specified: if pkg_image_used: error( _("No image rooted at '{0}' " "(set by $PKG_IMAGE)").format(e.user_dir)) else: error(_("No image rooted at '{0}'").format(e.user_dir)) else: error(_("No image found.")) return 1 except api_errors.PermissionsException as e: error(e) return 1 except api_errors.ImageFormatUpdateNeeded as e: # This should be a very rare error case. format_update_error(e) return 1 try: pkg_deps, errs, unused_fmris, external_deps = \ dependencies.resolve_deps(manifest_paths, api_inst, system_patterns, prune_attrs=not verbose) except (actions.MalformedActionError, actions.UnknownActionError) as e: error( _("Could not parse one or more manifests because of " "the following line:\n{0}").format(e.actionstr)) return 1 except dependencies.DependencyError as e: error(e) return 1 except api_errors.ApiException as e: error(e) return 1 ret_code = 0 if output_to_screen: ret_code = pkgdeps_to_screen(pkg_deps, manifest_paths, echo_manifest) elif out_dir: ret_code = pkgdeps_to_dir(pkg_deps, manifest_paths, out_dir, suffix, echo_manifest) else: ret_code = pkgdeps_in_place(pkg_deps, manifest_paths, suffix, echo_manifest) if extra_external_info: if constraint_files and unused_fmris: msg( _("\nThe following fmris matched a pattern in a " "constraint file but were not used in\ndependency " "resolution:")) for pfmri in sorted(unused_fmris): msg("\t{0}".format(pfmri)) if not constraint_files and external_deps: msg(_("\nThe following fmris had dependencies resolve " "to them:")) for pfmri in sorted(external_deps): msg("\t{0}".format(pfmri)) for e in errs: if ret_code == 0: ret_code = 1 emsg(e) return ret_code
def generate(args): """Produce a list of file dependencies from a manfiest and a proto area.""" try: opts, pargs = getopt.getopt(args, "d:D:Ik:Mm?", ["help"]) except getopt.GetoptError as e: usage(_("illegal global option -- {0}").format(e.opt)) remove_internal_deps = True echo_manf = False show_missing = False show_usage = False isa_paths = [] run_paths = [] platform_paths = [] dyn_tok_conv = {} proto_dirs = [] for opt, arg in opts: if opt == "-d": if not os.path.isdir(arg): usage(_("The proto directory {0} could not be " "found.".format(arg)), retcode=2) proto_dirs.append(os.path.abspath(arg)) elif opt == "-D": try: dyn_tok_name, dyn_tok_val = arg.split("=", 1) except: usage(_("-D arguments must be of the form " "'name=value'.")) if not dyn_tok_name[0] == "$": dyn_tok_name = "$" + dyn_tok_name dyn_tok_conv.setdefault(dyn_tok_name, []).append(dyn_tok_val) elif opt == "-I": remove_internal_deps = False elif opt == "-k": run_paths.append(arg) elif opt == "-m": echo_manf = True elif opt == "-M": show_missing = True elif opt in ("--help", "-?"): show_usage = True if show_usage: usage(retcode=0) if len(pargs) > 2 or len(pargs) < 1: usage(_("Generate only accepts one or two arguments.")) if "$ORIGIN" in dyn_tok_conv: usage( _("ORIGIN may not be specified using -D. It will be " "inferred from the\ninstall paths of the files.")) retcode = 0 manf = pargs[0] if not os.path.isfile(manf): usage(_("The manifest file {0} could not be found.").format(manf), retcode=2) if len(pargs) > 1: if not os.path.isdir(pargs[1]): usage(_("The proto directory {0} could not be found.").format( pargs[1]), retcode=2) proto_dirs.insert(0, os.path.abspath(pargs[1])) if not proto_dirs: usage(_("At least one proto directory must be provided."), retcode=2) try: ds, es, ms, pkg_attrs = dependencies.list_implicit_deps( manf, proto_dirs, dyn_tok_conv, run_paths, remove_internal_deps) except (actions.MalformedActionError, actions.UnknownActionError) as e: error( _("Could not parse manifest {manifest} because of the " "following line:\n{line}").format(manifest=manf, line=e.actionstr)) return 1 except api_errors.ApiException as e: error(e) return 1 if echo_manf: fh = open(manf, "rb") for l in fh: msg(l.rstrip()) fh.close() for d in sorted(ds): msg(d) for key, value in pkg_attrs.iteritems(): msg(actions.attribute.AttributeAction(**{key: value})) if show_missing: for m in ms: emsg(m) for e in es: emsg(e) retcode = 1 return retcode
def main_func(): # some sensible defaults host = "0.0.0.0" # the port we listen on port = None # a list of (repo_dir, repo_prefix) tuples repo_info = [] # the path where we store disk caches cache_dir = None # our maximum cache size, in megabytes cache_size = 0 # whether we're writing a full httpd.conf, or just a fragment fragment = False # Whether we support https service. https = False # The location of server certificate file. ssl_cert_file = "" # The location of server key file. ssl_key_file = "" # The location of the server ca certificate file. ssl_ca_cert_file = "" # The location of the server ca key file. ssl_ca_key_file = "" # Directory for storing generated certificates and keys cert_key_dir = "" # SSL certificate chain file path if the server certificate is not # signed by the top level CA. ssl_cert_chain_file = "" # The pkg/depot smf instance fmri. smf_fmri = "" # an optional url-prefix, used to separate pkg5 services from the rest # of the webserver url namespace, only used when running in fragment # mode, otherwise we assume we're the only service running on this # web server instance, and control the entire server URL namespace. sroot = "" # the path where our Mako templates and wsgi scripts are stored template_dir = "/etc/pkg/depot" # a volatile directory used at runtime for storing state runtime_dir = None # where logs are written log_dir = "/var/log/pkg/depot" # whether we should pull configuration from # svc:/application/pkg/server instances use_smf_instances = False # whether we allow admin/0 operations to rebuild the index allow_refresh = False # the current server_type server_type = "apache2" writable_root_set = False try: opts, pargs = getopt.getopt(sys.argv[1:], "Ac:d:Fh:l:P:p:r:Ss:t:T:?", [ "help", "debug=", "https", "cert=", "key=", "ca-cert=", "ca-key=", "cert-chain=", "cert-key-dir=", "smf-fmri=" ]) for opt, arg in opts: if opt == "--help": usage() elif opt == "-h": host = arg elif opt == "-c": cache_dir = arg elif opt == "-s": cache_size = arg elif opt == "-l": log_dir = arg elif opt == "-p": port = arg elif opt == "-r": runtime_dir = arg elif opt == "-T": template_dir = arg elif opt == "-t": server_type = arg elif opt == "-d": if "=" not in arg: usage( _("-d arguments must be in the " "form <prefix>=<repo path>" "[=writable root]")) components = arg.split("=", 2) if len(components) == 3: prefix, root, writable_root = components writable_root_set = True elif len(components) == 2: prefix, root = components writable_root = None repo_info.append((root, _affix_slash(prefix), writable_root)) elif opt == "-P": sroot = _affix_slash(arg) elif opt == "-F": fragment = True elif opt == "-S": use_smf_instances = True elif opt == "-A": allow_refresh = True elif opt == "--https": https = True elif opt == "--cert": ssl_cert_file = arg elif opt == "--key": ssl_key_file = arg elif opt == "--ca-cert": ssl_ca_cert_file = arg elif opt == "--ca-key": ssl_ca_key_file = arg elif opt == "--cert-chain": ssl_cert_chain_file = arg elif opt == "--cert-key-dir": cert_key_dir = arg elif opt == "--smf-fmri": smf_fmri = arg elif opt == "--debug": try: key, value = arg.split("=", 1) except (AttributeError, ValueError): usage( _("{opt} takes argument of form " "name=value, not {arg}").format(opt=opt, arg=arg)) DebugValues.set_value(key, value) else: usage("unknown option {0}".format(opt)) except getopt.GetoptError as e: usage(_("illegal global option -- {0}").format(e.opt)) if not runtime_dir: usage(_("required runtime dir option -r missing.")) # we need a cache_dir to store the SSLSessionCache if not cache_dir and not fragment: usage(_("cache_dir option -c is required if -F is not used.")) if not fragment and not port: usage(_("required port option -p missing.")) if not use_smf_instances and not repo_info: usage(_("at least one -d option is required if -S is " "not used.")) if repo_info and use_smf_instances: usage(_("cannot use -d and -S together.")) if https: if fragment: usage( _("https configuration is not supported in " "fragment mode.")) if bool(ssl_cert_file) != bool(ssl_key_file): usage( _("certificate and key files must be presented " "at the same time.")) elif not ssl_cert_file and not ssl_key_file: if not cert_key_dir: usage( _("cert-key-dir option is require to " "store the generated certificates and keys")) if ssl_cert_chain_file: usage(_("Cannot use --cert-chain without " "--cert and --key")) if bool(ssl_ca_cert_file) != bool(ssl_ca_key_file): usage( _("server CA certificate and key files " "must be presented at the same time.")) # If fmri is specifed for pkg/depot instance, we need # record the proporty values for updating. if smf_fmri: orig = (ssl_ca_cert_file, ssl_ca_key_file, ssl_cert_file, ssl_key_file) try: ssl_ca_cert_file, ssl_ca_key_file, ssl_cert_file, \ ssl_key_file = \ _generate_server_cert_key(host, port, ca_cert_file=ssl_ca_cert_file, ca_key_file=ssl_ca_key_file, output_dir=cert_key_dir) if ssl_ca_cert_file: msg( _("Server CA certificate is " "located at {0}. Please deploy it " "into /etc/certs/CA directory of " "each client.").format(ssl_ca_cert_file)) except (DepotException, EnvironmentError) as e: error(e) return EXIT_OOPS # Update the pkg/depot instance smf properties if # anything changes. if smf_fmri: dest = (ssl_ca_cert_file, ssl_ca_key_file, ssl_cert_file, ssl_key_file) if orig != dest: prop_list = [ "config/ssl_ca_cert_file", "config/ssl_ca_key_file", "config/ssl_cert_file", "config/ssl_key_file" ] try: _update_smf_props(smf_fmri, prop_list, orig, dest) except (smf.NonzeroExitException, RuntimeError) as e: error(e) return EXIT_OOPS else: if not os.path.exists(ssl_cert_file): error( _("User provided server certificate " "file {0} does not exist.").format(ssl_cert_file)) return EXIT_OOPS if not os.path.exists(ssl_key_file): error( _("User provided server key file {0} " "does not exist.").format(ssl_key_file)) return EXIT_OOPS if ssl_cert_chain_file and not os.path.exists(ssl_cert_chain_file): error( _("User provided certificate chain file " "{0} does not exist.").format(ssl_cert_chain_file)) return EXIT_OOPS else: if ssl_cert_file or ssl_key_file or ssl_ca_cert_file \ or ssl_ca_key_file or ssl_cert_chain_file: usage( _("certificate or key files are given before " "https service is turned on. Use --https to turn " "on the service.")) if smf_fmri: usage(_("cannot use --smf-fmri without --https.")) # We can't support httpd.conf fragments with writable root, because # we don't have the mod_wsgi app that can build the index or serve # search requests everywhere the fragments might be used. (eg. on # non-Solaris systems) if writable_root_set and fragment: usage(_("cannot use -d with writable roots and -F together.")) if fragment and port: usage(_("cannot use -F and -p together.")) if fragment and allow_refresh: usage(_("cannot use -F and -A together.")) if sroot and not fragment: usage(_("cannot use -P without -F.")) if use_smf_instances: try: repo_info = get_smf_repo_info() except DepotException as e: error(e) # In the future we may produce configuration for different # HTTP servers. For now, we only support "apache2" if server_type not in KNOWN_SERVER_TYPES: usage( _("unknown server type {type}. " "Known types are: {known}").format( type=server_type, known=", ".join(KNOWN_SERVER_TYPES))) try: _check_unique_repo_properties(repo_info) except DepotException as e: error(e) ret = refresh_conf(repo_info, log_dir, host, port, runtime_dir, template_dir, cache_dir, cache_size, sroot, fragment=fragment, allow_refresh=allow_refresh, ssl_cert_file=ssl_cert_file, ssl_key_file=ssl_key_file, ssl_cert_chain_file=ssl_cert_chain_file) return ret
def refresh_conf(repo_info, log_dir, host, port, runtime_dir, template_dir, cache_dir, cache_size, sroot, fragment=False, allow_refresh=False, ssl_cert_file="", ssl_key_file="", ssl_cert_chain_file=""): """Creates a new configuration for the depot.""" try: ret = EXIT_OK if not repo_info: raise DepotException(_("no repositories found")) htdocs_path = os.path.join(runtime_dir, DEPOT_HTDOCS_DIRNAME, sroot) cleanup_htdocs(htdocs_path) misc.makedirs(htdocs_path) # pubs and default_pubs are lists of tuples of the form: # (publisher prefix, repository root dir, repository prefix, # writable_root) pubs = [] default_pubs = [] errors = [] # Query each repository for its publisher information. for (repo_root, repo_prefix, writable_root) in repo_info: try: publishers, default_pub, status = \ _get_publishers(repo_root) for pub in publishers: pubs.append((pub, repo_root, repo_prefix, writable_root)) default_pubs.append((default_pub, repo_root, repo_prefix)) _write_status_response(status, htdocs_path, repo_prefix) # The writable root must exist and must be # owned by pkg5srv:pkg5srv if writable_root: misc.makedirs(writable_root) _chown_dir(writable_root) except DepotException as err: errors.append(str(err)) if errors: raise DepotException( _("Unable to write configuration: " "{0}").format("\n".join(errors))) # Write the publisher/0 response for each repository pubs_by_repo = {} for pub_prefix, repo_root, repo_prefix, writable_root in pubs: pubs_by_repo.setdefault(repo_prefix, []).append(pub_prefix) for repo_prefix in pubs_by_repo: _write_publisher_response(pubs_by_repo[repo_prefix], htdocs_path, repo_prefix) _write_httpd_conf(pubs, default_pubs, runtime_dir, log_dir, template_dir, cache_dir, cache_size, host, port, sroot, fragment=fragment, allow_refresh=allow_refresh, ssl_cert_file=ssl_cert_file, ssl_key_file=ssl_key_file, ssl_cert_chain_file=ssl_cert_chain_file) _write_versions_response(htdocs_path, fragment=fragment) # If we're writing a configuration fragment, then the web server # is probably not running as DEPOT_USER:DEPOT_GROUP if not fragment: _chown_dir(runtime_dir) _chown_dir(cache_dir) else: msg(_("Created {0}/depot.conf").format(runtime_dir)) except (DepotException, OSError, apx.ApiException) as err: error(err) ret = EXIT_OOPS return ret
def main_func(): global temp_root, repo_modified, repo_finished, repo_uri, tracker global dry_run global_settings.client_name = PKG_CLIENT_NAME try: opts, pargs = getopt.getopt(sys.argv[1:], "?c:i:np:r:s:u", ["help"]) except getopt.GetoptError as e: usage(_("illegal option -- {0}").format(e.opt)) dry_run = False ref_repo_uri = None repo_uri = os.getenv("PKG_REPO", None) changes = set() ignores = set() publishers = set() cmp_policy = CMP_ALL processed_pubs = 0 for opt, arg in opts: if opt == "-c": changes.add(arg) elif opt == "-i": ignores.add(arg) elif opt == "-n": dry_run = True elif opt == "-p": publishers.add(arg) elif opt == "-r": ref_repo_uri = misc.parse_uri(arg) elif opt == "-s": repo_uri = misc.parse_uri(arg) elif opt == "-u": cmp_policy = CMP_UNSIGNED elif opt == "-?" or opt == "--help": usage(retcode=pkgdefs.EXIT_OK) if pargs: usage(_("Unexpected argument(s): {0}").format(" ".join(pargs))) if not repo_uri: usage(_("A target repository must be provided.")) if not ref_repo_uri: usage(_("A reference repository must be provided.")) target = publisher.RepositoryURI(misc.parse_uri(repo_uri)) if target.scheme != "file": abort(err=_("Target repository must be filesystem-based.")) try: target_repo = sr.Repository(read_only=dry_run, root=target.get_pathname()) except sr.RepositoryError as e: abort(str(e)) # Use the tmp directory in target repo for efficient file rename since # files are in the same file system. temp_root = target_repo.temp_root if not os.path.exists(temp_root): os.makedirs(temp_root) ref_incoming_dir = tempfile.mkdtemp(dir=temp_root) ref_pkg_root = tempfile.mkdtemp(dir=temp_root) ref_xport, ref_xport_cfg = transport.setup_transport() ref_xport_cfg.incoming_root = ref_incoming_dir ref_xport_cfg.pkg_root = ref_pkg_root transport.setup_publisher(ref_repo_uri, "ref", ref_xport, ref_xport_cfg, remote_prefix=True) ref_repo = None ref = publisher.RepositoryURI(misc.parse_uri(ref_repo_uri)) if ref.scheme == "file": try: ref_repo = sr.Repository(read_only=dry_run, root=ref.get_pathname()) except sr.RepositoryError as e: abort(str(e)) tracker = get_tracker() for pub in target_repo.publishers: if publishers and pub not in publishers \ and '*' not in publishers: continue msg(_("Processing packages for publisher {0} ...").format(pub)) # Find the matching pub in the ref repo. for ref_pub in ref_xport_cfg.gen_publishers(): if ref_pub.prefix == pub: found = True break else: txt = _("Publisher {0} not found in reference " "repository.").format(pub) if publishers: abort(err=txt) else: txt += _(" Skipping.") msg(txt) continue processed_pubs += 1 rev = do_reversion(pub, ref_pub, target_repo, ref_xport, changes, ignores, cmp_policy, ref_repo, ref, ref_xport_cfg) # Only rebuild catalog if anything got actually reversioned. if rev and not dry_run: msg(_("Rebuilding repository catalog.")) target_repo.rebuild(pub=pub) repo_finished = True ret = pkgdefs.EXIT_OK if processed_pubs == 0: msg(_("No matching publishers could be found.")) ret = pkgdefs.EXIT_OOPS cleanup() return ret
abort(str(e)) # Track anything that failed to match. any_unmatched.extend(unmatched) any_matched.extend(set(p for p in refs.values())) matches = list(set(f for m in matches.values() for f in m)) else: matches = [f for f in src_cat.fmris()] if not matches: # No matches at all; nothing to do for this publisher. return matches matches = prune(matches, all_versions, all_timestamps) if recursive: msg(_("Retrieving manifests for dependency " "evaluation ...")) matches = prune(get_dependencies(matches, xport_cfg, tracker), all_versions, all_timestamps) return matches def archive_pkgs(pargs, target, list_newest, all_versions, all_timestamps, keep_compresed, raw, recursive, dry_run, dest_xport_cfg, src_uri): """Retrieve source package data completely and then archive it.""" global cache_dir, download_start, xport, xport_cfg target = os.path.abspath(target) if os.path.exists(target): error(_("Target archive '%s' already " "exists.") % target)
def archive_pkgs(pargs, target, list_newest, all_versions, all_timestamps, keep_compresed, raw, recursive, dry_run, dest_xport_cfg, src_uri): """Retrieve source package data completely and then archive it.""" global cache_dir, download_start, xport, xport_cfg target = os.path.abspath(target) if os.path.exists(target): error(_("Target archive '%s' already " "exists.") % target) abort() # Open the archive early so that permissions failures, etc. can be # detected before actual work is started. if not dry_run: pkg_arc = pkg.p5p.Archive(target, mode="w") basedir = tempfile.mkdtemp(dir=temp_root, prefix=global_settings.client_name + "-") tmpdirs.append(basedir) # Retrieve package data for all publishers. any_unmatched = [] any_matched = [] invalid_manifests = [] total_processed = 0 arc_bytes = 0 archive_list = [] for src_pub in xport_cfg.gen_publishers(): # Root must be per publisher on the off chance that multiple # publishers have the same package. xport_cfg.pkg_root = os.path.join(basedir, src_pub.prefix) tracker = get_tracker() msg(_("Retrieving packages for publisher %s ...") % src_pub.prefix) if pargs == None or len(pargs) == 0: usage(_("must specify at least one pkgfmri")) matches = get_matches(src_pub, tracker, xport, pargs, any_unmatched, any_matched, all_versions, all_timestamps, recursive) if not matches: # No matches at all; nothing to do for this publisher. continue # First, retrieve the manifests and calculate package transfer # sizes. npkgs = len(matches) get_bytes = 0 get_files = 0 if not recursive: msg(_("Retrieving and evaluating %d package(s)...") % npkgs) tracker.manifest_fetch_start(npkgs) good_matches = [] for f in matches: try: m = get_manifest(f, xport_cfg) except apx.InvalidPackageErrors, e: invalid_manifests.extend(e.errors) continue good_matches.append(f) getb, getf, arcb, arccb = get_sizes(m) get_bytes += getb get_files += getf # Since files are going into the archive, progress # can be tracked in terms of compressed bytes for # the package files themselves. arc_bytes += arccb # Also include the the manifest file itself in the # amount of bytes to archive. try: fs = os.stat(m.pathname) arc_bytes += fs.st_size except EnvironmentError, e: raise apx._convert_error(e) tracker.manifest_fetch_progress(completion=True)
def transfer_pkgs(pargs, target, list_newest, all_versions, all_timestamps, keep_compressed, raw, recursive, dry_run, dest_xport_cfg, src_uri): """Retrieve source package data and optionally republish it as each package is retrieved. """ global cache_dir, download_start, xport, xport_cfg, dest_xport, targ_pub any_unmatched = [] any_matched = [] invalid_manifests = [] total_processed = 0 for src_pub in xport_cfg.gen_publishers(): tracker = get_tracker() if list_newest: # Make sure the prog tracker knows we're doing a listing # operation so that it suppresses irrelevant output. tracker.set_purpose(tracker.PURPOSE_LISTING) if pargs or len(pargs) > 0: usage(_("--newest takes no options")) src_cat = fetch_catalog(src_pub, tracker, xport, False) for f in src_cat.fmris(ordered=True, last=True): msg(f.get_fmri()) continue msg(_("Processing packages for publisher %s ...") % src_pub.prefix) if pargs == None or len(pargs) == 0: usage(_("must specify at least one pkgfmri")) republish = False if not raw: basedir = tempfile.mkdtemp(dir=temp_root, prefix=global_settings.client_name + "-") tmpdirs.append(basedir) republish = True # Turn target into a valid URI. target = misc.parse_uri(target) # Setup target for transport. targ_pub = transport.setup_publisher(target, src_pub.prefix, dest_xport, dest_xport_cfg) # Files have to be decompressed for republishing. keep_compressed = False if target.startswith("file://"): # Check to see if the repository exists first. try: t = trans.Transaction(target, xport=dest_xport, pub=targ_pub) except trans.TransactionRepositoryInvalidError, e: txt = str(e) + "\n\n" txt += _("To create a repository, use " "the pkgrepo command.") abort(err=txt) except trans.TransactionRepositoryConfigError, e: txt = str(e) + "\n\n" txt += _("The repository configuration " "for the repository located at " "'%s' is not valid or the " "specified path does not exist. " "Please correct the configuration " "of the repository or create a new " "one.") % target abort(err=txt) except trans.TransactionError, e: abort(err=e)
def usage(usage_error=None, retcode=2): """Emit a usage message and optionally prefix it with a more specific error message. Causes program to exit.""" if usage_error: error(usage_error) msg(_("""\ Usage: pkgrecv [-s src_uri] [-a] [-d (path|dest_uri)] [-c cache_dir] [-kr] [-m match] [-n] [--raw] [--key keyfile --cert certfile] (fmri|pattern) ... pkgrecv [-s src_repo_uri] --newest Options: -a Store the retrieved package data in a pkg(5) archive at the location specified by -d. The file may not already exist, and this option may only be used with filesystem-based destinations. -c cache_dir The path to a directory that will be used to cache downloaded content. If one is not supplied, the client will automatically pick a cache directory. In the case where a download is interrupted, and a cache directory was automatically chosen, use this option to resume the download. -d path_or_uri The filesystem path or URI of the target repository to republish packages to. The target must already exist. New repositories can be created using pkgrepo(1). -h Display this usage message. -k Keep the retrieved package content compressed, ignored when republishing. Should not be used with pkgsend. -m match Controls matching behaviour using the following values: all-timestamps includes all matching timestamps, not just latest (implies all-versions) all-versions includes all matching versions, not just latest -n Perform a trial run with no changes made. -r Recursively evaluates all dependencies for the provided list of packages and adds them to the list. -s src_repo_uri A URI representing the location of a pkg(5) repository to retrieve package data from. --newest List the most recent versions of the packages available from the specified repository and exit. (All other options except -s will be ignored.) --raw Retrieve and store the raw package data in a set of directory structures by stem and version at the location specified by -d. May only be used with filesystem- based destinations. This can be used with pkgsend(1) include to conveniently modify and republish packages, perhaps by correcting file contents or providing additional package metadata. --key keyfile Specify a client SSL key file to use for pkg retrieval. --cert certfile Specify a client SSL certificate file to use for pkg retrieval. Environment: PKG_DEST Destination directory or URI PKG_SRC Source URI or path""")) sys.exit(retcode)
def trans_publish(repo_uri, fargs): """Publish packages in a single step using provided manifest data and sources.""" # --no-index is now silently ignored as the publication process no # longer builds search indexes automatically. opts, pargs = getopt.getopt( fargs, "b:d:s:T:", ["fmri-in-manifest", "no-index", "no-catalog", "key=", "cert="]) add_to_catalog = True basedirs = [] bundles = [] timestamp_files = [] key = None cert = None for opt, arg in opts: if opt == "-b": bundles.append(arg) elif opt == "-d": basedirs.append(arg) elif opt == "-s": repo_uri = arg if repo_uri and not repo_uri.startswith("null:"): repo_uri = misc.parse_uri(repo_uri) elif opt == "-T": timestamp_files.append(arg) elif opt == "--no-catalog": add_to_catalog = False elif opt == "--key": key = arg elif opt == "--cert": cert = arg if not repo_uri: usage(_("A destination package repository must be provided " "using -s."), cmd="publish") if not pargs: filelist = [("<stdin>", sys.stdin)] else: try: filelist = [(f, open(f)) for f in pargs] except IOError as e: error(e, cmd="publish") return EXIT_OOPS lines = "" # giant string of all input files concatenated together linecnts = [] # tuples of starting line number, ending line number linecounter = 0 # running total for filename, f in filelist: try: data = f.read() except IOError as e: error(e, cmd="publish") return EXIT_OOPS lines += data linecnt = len(data.splitlines()) linecnts.append((linecounter, linecounter + linecnt)) linecounter += linecnt f.close() m = pkg.manifest.Manifest() try: m.set_content(content=lines) except apx.InvalidPackageErrors as err: e = err.errors[0] lineno = e.lineno for i, tup in enumerate(linecnts): if lineno > tup[0] and lineno <= tup[1]: filename = filelist[i][0] lineno -= tup[0] break else: filename = "???" lineno = "???" error(_("File {filename} line {lineno}: {err}").format( filename=filename, lineno=lineno, err=e), cmd="publish") return EXIT_OOPS try: pfmri = pkg.fmri.PkgFmri(m["pkg.fmri"]) if not pfmri.version: # Cannot have a FMRI without version error(_("The pkg.fmri attribute '{0}' in the package " "manifest must include a version.").format(pfmri), cmd="publish") return EXIT_OOPS if not DebugValues["allow-timestamp"]: # If not debugging, timestamps are ignored. pfmri.version.timestr = None pkg_name = pfmri.get_fmri() except KeyError: error(_("Manifest does not set pkg.fmri")) return EXIT_OOPS xport, pub = setup_transport_and_pubs(repo_uri, ssl_key=key, ssl_cert=cert) t = trans.Transaction(repo_uri, pkg_name=pkg_name, xport=xport, pub=pub) t.open() target_files = [] if bundles: # Ensure hardlinks marked as files in the manifest are # treated as files. This necessary when sourcing files # from some bundle types. target_files.extend(a.attrs["path"] for a in m.gen_actions() if a.name == "file") bundles = [ pkg.bundle.make_bundle(bundle, targetpaths=target_files) for bundle in bundles ] for a in m.gen_actions(): # don't publish these actions if a.name == "signature": msg(_("WARNING: Omitting signature action '{0}'".format(a))) continue if a.name == "set" and a.attrs["name"] in ["pkg.fmri", "fmri"]: continue elif a.has_payload: # Forcibly discard content-related attributes to prevent # errors when reusing manifests with different content. for attr in strip_attrs: a.attrs.pop(attr, None) path = pkg.actions.set_action_data(a.hash, a, basedirs=basedirs, bundles=bundles)[0] elif a.name in nopub_actions: error(_("invalid action for publication: {0}").format(action), cmd="publish") t.close(abandon=True) return EXIT_OOPS if a.name == "file": basename = os.path.basename(a.attrs["path"]) for pattern in timestamp_files: if fnmatch.fnmatch(basename, pattern): if not isinstance(path, six.string_types): # Target is from bundle; can't # apply timestamp now. continue ts = misc.time_to_timestamp(os.stat(path).st_mtime) a.attrs["timestamp"] = ts break try: t.add(a) except: t.close(abandon=True) raise pkg_state, pkg_fmri = t.close(abandon=False, add_to_catalog=add_to_catalog) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) return EXIT_OK
try: ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(manf, proto_dirs, dyn_tok_conv, run_paths, remove_internal_deps) except (actions.MalformedActionError, actions.UnknownActionError), e: error(_("Could not parse manifest %(manifest)s because of the " "following line:\n%(line)s") % { 'manifest': manf , 'line': e.actionstr}) return 1 except api_errors.ApiException, e: error(e) return 1 if echo_manf: fh = open(manf, "rb") for l in fh: msg(l.rstrip()) fh.close() for d in sorted(ds): msg(d) for key, value in pkg_attrs.iteritems(): msg(actions.attribute.AttributeAction(**{key: value})) if show_missing: for m in ms: emsg(m) for e in es: emsg(e) retcode = 1