def trans_publish(repo_uri, fargs): error_occurred = False opts, pargs = getopt.getopt(fargs, "-d:") include_opts = [] for opt, arg in opts: if opt == "-d": include_opts += [opt, arg] if not pargs: usage(_("No fmri argument specified for subcommand"), cmd="publish") t = trans.Transaction(repo_uri, pkg_name=pargs[0]) t.open() del pargs[0] if trans_include(repo_uri, include_opts + pargs, transaction=t): abandon = True else: abandon = False pkg_state, pkg_fmri = t.close(abandon=abandon) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) if abandon: return 1 return 0
def trans_close(repo_uri, args): abandon = False trans_id = None opts, pargs = getopt.getopt(args, "At:") for opt, arg in opts: if opt == "-A": abandon = True if opt == "-t": trans_id = arg if trans_id is None: try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: usage(_("No transaction ID specified using -t or in " "$PKG_TRANS_ID."), cmd="close") t = trans.Transaction(repo_uri, trans_id=trans_id) pkg_state, pkg_fmri = t.close(abandon) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) return 0
def trans_include(repo_uri, fargs, transaction=None): basedirs = [] error_occurred = False opts, pargs = getopt.getopt(fargs, "d:") for opt, arg in opts: if opt == "-d": basedirs.append(arg) if transaction == None: try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: usage(_("No transaction ID specified in $PKG_TRANS_ID"), cmd="include") t = trans.Transaction(repo_uri, trans_id=trans_id) else: t = transaction if not pargs: filelist = [("<stdin>", sys.stdin)] else: try: filelist = [(f, file(f)) for f in pargs] except IOError, e: error(e, cmd="include") return 1
def trans_open(repo_uri, args): opts, pargs = getopt.getopt(args, "en") parsed = [] eval_form = True for opt, arg in opts: parsed.append(opt) if opt == "-e": eval_form = True if opt == "-n": eval_form = False if "-e" in parsed and "-n" in parsed: usage(_("only -e or -n may be specified"), cmd="open") if len(pargs) != 1: usage(_("open requires one package name"), cmd="open") t = trans.Transaction(repo_uri, pkg_name=pargs[0]) if eval_form: msg("export PKG_TRANS_ID=%s" % t.open()) else: msg(t.open()) return 0
def trans_create_repository(repo_uri, args): """DEPRECATED""" repo_props = {} opts, pargs = getopt.getopt(args, "", ["set-property="]) for opt, arg in opts: if opt == "--set-property": try: prop, p_value = arg.split("=", 1) p_sec, p_name = prop.split(".", 1) except ValueError: usage(_("property arguments must be of " "the form '<section.property>=" "<value>'."), cmd="create-repository") repo_props.setdefault(p_sec, {}) repo_props[p_sec][p_name] = p_value xport, pub = setup_transport_and_pubs(repo_uri, remote=False) try: trans.Transaction(repo_uri, create_repo=True, repo_props=repo_props, xport=xport, pub=pub) except trans.TransactionRepositoryConfigError as e: error(e, cmd="create-repository") emsg(_("Invalid repository configuration values were " "specified using --set-property or required values are " "missing. Please provide the correct and/or required " "values using the --set-property option.")) except trans.TransactionError as e: error(e, cmd="create-repository") return 1 return 0
def trans_import(repo_uri, args, visitors=[]): """DEPRECATED""" try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: print(_("No transaction ID specified in $PKG_TRANS_ID"), file=sys.stderr) sys.exit(1) opts, pargs = getopt.getopt(args, "T:", ["target="]) timestamp_files = [] target_files = [] for opt, arg in opts: if opt == "-T": timestamp_files.append(arg) elif opt == "--target": target_files.append(arg) if not args: usage(_("No arguments specified for subcommand."), cmd="import") xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, trans_id=trans_id, xport=xport, pub=pub) ret = EXIT_OK abandon = False try: for action, err in gen_actions(pargs, timestamp_files, target_files, visitors=visitors, use_default_owner=True): if err: error(_("invalid action for publication: {0}").format(action), cmd="import") abandon = True else: if not abandon: t.add(action) except TypeError as e: error(e, cmd="import") return EXIT_OOPS except EnvironmentError as e: if e.errno == errno.ENOENT: error("{0}: '{1}'".format(e.args[1], e.filename), cmd="import") return EXIT_OOPS else: raise for visitor in visitors: if visitor.errors: abandon = True ret = EXIT_OOPS if abandon: error("Abandoning transaction due to errors.") t.close(abandon=True) return ret
def trans_close(repo_uri, args): """DEPRECATED""" abandon = False trans_id = None add_to_catalog = True # --no-index is now silently ignored as the publication process no # longer builds search indexes automatically. opts, pargs = getopt.getopt(args, "At:", ["no-index", "no-catalog"]) for opt, arg in opts: if opt == "-A": abandon = True elif opt == "-t": trans_id = arg elif opt == "--no-catalog": add_to_catalog = False if trans_id is None: try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: usage(_("No transaction ID specified using -t or in " "$PKG_TRANS_ID."), cmd="close") xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, trans_id=trans_id, xport=xport, pub=pub) pkg_state, pkg_fmri = t.close(abandon=abandon, add_to_catalog=add_to_catalog) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) return EXIT_OK
def test_stress_file_publish(self): """Publish lots of packages rapidly ensuring that file publication can handle it.""" location = self.dc.get_repodir() location = os.path.abspath(location) location = urlunparse(("file", "", pathname2url(location), "", "", "")) repouriobj = publisher.RepositoryURI(location) repo = publisher.Repository(origins=[repouriobj]) pub = publisher.Publisher(prefix="repo1", repository=repo) xport_cfg = transport.GenericTransportCfg() xport_cfg.add_publisher(pub) xport = transport.Transport(xport_cfg) # Each version number must be unique since multiple packages # will be published within the same second. for i in range(100): pf = fmri.PkgFmri("foo@{0:d}.0".format(i)) t = trans.Transaction(location, pkg_name=str(pf), xport=xport, pub=pub) t.open() pkg_fmri, pkg_state = t.close() self.debug("{0}: {1}".format(pkg_fmri, pkg_state))
def trans_append(repo_uri, args): """DEPRECATED""" opts, pargs = getopt.getopt(args, "en") parsed = [] eval_form = True for opt, arg in opts: parsed.append(opt) if opt == "-e": eval_form = True if opt == "-n": eval_form = False if "-e" in parsed and "-n" in parsed: usage(_("only -e or -n may be specified"), cmd="open") if len(pargs) != 1: usage(_("append requires one package name"), cmd="open") xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, pkg_name=pargs[0], xport=xport, pub=pub) if eval_form: msg("export PKG_TRANS_ID={0}".format(t.append())) else: msg(t.append()) return EXIT_OK
def trans_create_repository(repo_uri, args): """Creates a new repository at the location indicated by repo_uri.""" if args: usage(_("command does not take operands"), cmd="create-repository") try: trans.Transaction(repo_uri, create_repo=True) except trans.TransactionError, e: error(e, cmd="create-repository") return 1
def trans_refresh_index(repo_uri, args): """DEPRECATED""" if args: usage(_("command does not take operands"), cmd="refresh-index") xport, pub = setup_transport_and_pubs(repo_uri) try: t = trans.Transaction(repo_uri, xport=xport, pub=pub).refresh_index() except trans.TransactionError, e: error(e, cmd="refresh-index") return EXIT_OOPS
def trans_add(repo_uri, args): """DEPRECATED""" try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: usage(_("No transaction ID specified in $PKG_TRANS_ID"), cmd="add") if not args: usage(_("No arguments specified for subcommand."), cmd="add") action, lp = pkg.actions.internalizelist(args[0], args[1:]) if action.name in nopub_actions: error(_("invalid action for publication: {0}").format(action), cmd="add") return EXIT_OOPS xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, trans_id=trans_id, xport=xport, pub=pub) t.add(action) return EXIT_OK
def trans_import(repo_uri, args): try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: print >> sys.stderr, \ _("No transaction ID specified in $PKG_TRANS_ID") sys.exit(1) opts, pargs = getopt.getopt(args, "T:") timestamp_files = [] for opt, arg in opts: if opt == "-T": timestamp_files.append(arg) if not args: usage(_("No arguments specified for subcommand."), cmd="import") t = trans.Transaction(repo_uri, trans_id=trans_id) for action in gen_actions(pargs, timestamp_files): t.add(action) return 0
if len(args) < 2: raise RuntimeError, _("A filename must be provided " "for this action.") aargs = args[2:] data = args[1] else: aargs = args[1:] try: action = pkg.actions.fromlist(atype, aargs, data=data) except ValueError, e: error(e[0], cmd="add") return 1 t = trans.Transaction(repo_uri, trans_id=trans_id) t.add(action) return 0 def trans_publish(repo_uri, fargs): error_occurred = False opts, pargs = getopt.getopt(fargs, "-d:") include_opts = [] for opt, arg in opts: if opt == "-d": include_opts += [opt, arg] if not pargs: usage(_("No fmri argument specified for subcommand"), cmd="publish")
def trans_include(repo_uri, fargs, transaction=None): """DEPRECATED""" basedirs = [] timestamp_files = [] error_occurred = False opts, pargs = getopt.getopt(fargs, "d:T:") for opt, arg in opts: if opt == "-d": basedirs.append(arg) elif opt == "-T": timestamp_files.append(arg) if transaction == None: try: trans_id = os.environ["PKG_TRANS_ID"] except KeyError: usage(_("No transaction ID specified in $PKG_TRANS_ID"), cmd="include") xport, pub = setup_transport_and_pubs(repo_uri) t = trans.Transaction(repo_uri, trans_id=trans_id, xport=xport, pub=pub) else: t = transaction if not pargs: filelist = [("<stdin>", sys.stdin)] else: try: filelist = [(f, open(f)) for f in pargs] except IOError as e: error(e, cmd="include") return EXIT_OOPS lines = [] # giant string of all input files concatenated together linecnts = [] # tuples of starting line number, ending line number linecounter = 0 # running total for filename, f in filelist: try: data = f.read() except IOError as e: error(e, cmd="include") return EXIT_OOPS lines.append(data) linecnt = len(data.splitlines()) linecnts.append((linecounter, linecounter + linecnt)) linecounter += linecnt m = pkg.manifest.Manifest() try: m.set_content(content="\n".join(lines)) except apx.InvalidPackageErrors as err: e = err.errors[0] lineno = e.lineno for i, tup in enumerate(linecnts): if lineno > tup[0] and lineno <= tup[1]: filename = filelist[i][0] lineno -= tup[0] break else: filename = "???" lineno = "???" error(_("File {filename} line {lineno}: {err}").format( filename=filename, lineno=lineno, err=e), cmd="include") return EXIT_OOPS invalid_action = False for a in m.gen_actions(): # don't publish this action if a.name == "set" and a.attrs["name"] in ["pkg.fmri", "fmri"]: continue elif a.has_payload: path, bd = pkg.actions.set_action_data(a.hash, a, basedirs) if a.name == "file": basename = os.path.basename(a.attrs["path"]) for pattern in timestamp_files: if fnmatch.fnmatch(basename, pattern): ts = misc.time_to_timestamp(os.stat(path).st_mtime) a.attrs["timestamp"] = ts break if a.name in nopub_actions: error(_("invalid action for publication: {0}").format(str(a)), cmd="include") invalid_action = True else: t.add(a) if invalid_action: return EXIT_PARTIAL else: return EXIT_OK
def trans_publish(repo_uri, fargs): """Publish packages in a single step using provided manifest data and sources.""" # --no-index is now silently ignored as the publication process no # longer builds search indexes automatically. opts, pargs = getopt.getopt( fargs, "b:d:s:T:", ["fmri-in-manifest", "no-index", "no-catalog", "key=", "cert="]) add_to_catalog = True basedirs = [] bundles = [] timestamp_files = [] key = None cert = None for opt, arg in opts: if opt == "-b": bundles.append(arg) elif opt == "-d": basedirs.append(arg) elif opt == "-s": repo_uri = arg if repo_uri and not repo_uri.startswith("null:"): repo_uri = misc.parse_uri(repo_uri) elif opt == "-T": timestamp_files.append(arg) elif opt == "--no-catalog": add_to_catalog = False elif opt == "--key": key = arg elif opt == "--cert": cert = arg if not repo_uri: usage(_("A destination package repository must be provided " "using -s."), cmd="publish") if not pargs: filelist = [("<stdin>", sys.stdin)] else: try: filelist = [(f, open(f)) for f in pargs] except IOError as e: error(e, cmd="publish") return EXIT_OOPS lines = "" # giant string of all input files concatenated together linecnts = [] # tuples of starting line number, ending line number linecounter = 0 # running total for filename, f in filelist: try: data = f.read() except IOError as e: error(e, cmd="publish") return EXIT_OOPS lines += data linecnt = len(data.splitlines()) linecnts.append((linecounter, linecounter + linecnt)) linecounter += linecnt f.close() m = pkg.manifest.Manifest() try: m.set_content(content=lines) except apx.InvalidPackageErrors as err: e = err.errors[0] lineno = e.lineno for i, tup in enumerate(linecnts): if lineno > tup[0] and lineno <= tup[1]: filename = filelist[i][0] lineno -= tup[0] break else: filename = "???" lineno = "???" error(_("File {filename} line {lineno}: {err}").format( filename=filename, lineno=lineno, err=e), cmd="publish") return EXIT_OOPS try: pfmri = pkg.fmri.PkgFmri(m["pkg.fmri"]) if not pfmri.version: # Cannot have a FMRI without version error(_("The pkg.fmri attribute '{0}' in the package " "manifest must include a version.").format(pfmri), cmd="publish") return EXIT_OOPS if not DebugValues["allow-timestamp"]: # If not debugging, timestamps are ignored. pfmri.version.timestr = None pkg_name = pfmri.get_fmri() except KeyError: error(_("Manifest does not set pkg.fmri")) return EXIT_OOPS xport, pub = setup_transport_and_pubs(repo_uri, ssl_key=key, ssl_cert=cert) t = trans.Transaction(repo_uri, pkg_name=pkg_name, xport=xport, pub=pub) t.open() target_files = [] if bundles: # Ensure hardlinks marked as files in the manifest are # treated as files. This necessary when sourcing files # from some bundle types. target_files.extend(a.attrs["path"] for a in m.gen_actions() if a.name == "file") bundles = [ pkg.bundle.make_bundle(bundle, targetpaths=target_files) for bundle in bundles ] for a in m.gen_actions(): # don't publish these actions if a.name == "signature": msg(_("WARNING: Omitting signature action '{0}'".format(a))) continue if a.name == "set" and a.attrs["name"] in ["pkg.fmri", "fmri"]: continue elif a.has_payload: # Forcibly discard content-related attributes to prevent # errors when reusing manifests with different content. for attr in strip_attrs: a.attrs.pop(attr, None) path = pkg.actions.set_action_data(a.hash, a, basedirs=basedirs, bundles=bundles)[0] elif a.name in nopub_actions: error(_("invalid action for publication: {0}").format(action), cmd="publish") t.close(abandon=True) return EXIT_OOPS if a.name == "file": basename = os.path.basename(a.attrs["path"]) for pattern in timestamp_files: if fnmatch.fnmatch(basename, pattern): if not isinstance(path, six.string_types): # Target is from bundle; can't # apply timestamp now. continue ts = misc.time_to_timestamp(os.stat(path).st_mtime) a.attrs["timestamp"] = ts break try: t.add(a) except: t.close(abandon=True) raise pkg_state, pkg_fmri = t.close(abandon=False, add_to_catalog=add_to_catalog) for val in (pkg_state, pkg_fmri): if val is not None: msg(val) return EXIT_OK
def republish_packages(pub, target_pub, processdict, source_list, variant_list, variants, tracker, xport, dest_repo, dest_xport, pkg_tmpdir, dry_run=False): """Republish packages for publisher pub to dest_repo. If we try to republish a package that we have already published, an exception is raise. pub the publisher from source_list that we are republishing target_pub the destination publisher processdict a dict indexed by package name of the pkgs to merge source_list a list of source respositories variant_list a list of dicts containing variant names/values variants the unique set of variants across all sources. tracker a progress tracker xport the transport handling our source repositories dest_repo our destination repository dest_xport the transport handling our destination repository pkg_tmpdir a temporary dir used when downloading pkg content which may be deleted and recreated by this method. dry_run True if we should not actually publish """ def get_basename(pfmri): open_time = pfmri.get_timestamp() return "{0:d}_{0}".format( calendar.timegm(open_time.utctimetuple()), urllib.quote(str(pfmri), "")) for entry in processdict: man, retrievals = merge_fmris(source_list, processdict[entry], variant_list, variants) # Determine total bytes to retrieve for this package; this must # be done using the retrievals dict since they are coalesced by # hash. getbytes = sum( misc.get_pkg_otw_size(a) for i, uri in enumerate(source_list) for a in retrievals[i] ) # Determine total bytes to send for this package; this must be # done using the manifest since retrievals are coalesced based # on hash, but sends are not. sendbytes = sum( int(a.attrs.get("pkg.size", 0)) for a in man.gen_actions() ) f = man.fmri tracker.republish_start_pkg(f, getbytes=getbytes, sendbytes=sendbytes) if dry_run: # Dry-run; attempt a merge of everything but don't # write any data or publish packages. continue target_pub.prefix = f.publisher # Retrieve package data from each package source. for i, uri in enumerate(source_list): pub.repository.origins = [uri] mfile = xport.multi_file_ni(pub, pkg_tmpdir, decompress=True, progtrack=tracker) for a in retrievals[i]: mfile.add_action(a) mfile.wait_files() trans_id = get_basename(f) pkg_name = f.get_fmri() pubs.add(target_pub.prefix) # Publish merged package. t = trans.Transaction(dest_repo, pkg_name=pkg_name, trans_id=trans_id, xport=dest_xport, pub=target_pub, progtrack=tracker) # Remove any previous failed attempt to # to republish this package. try: t.close(abandon=True) except: # It might not exist already. pass t.open() for a in man.gen_actions(): if (a.name == "set" and a.attrs["name"] == "pkg.fmri"): # To be consistent with the # server, the fmri can't be # added to the manifest. continue if hasattr(a, "hash"): fname = os.path.join(pkg_tmpdir, a.hash) a.data = lambda: open( fname, "rb") t.add(a) # Always defer catalog update. t.close(add_to_catalog=False) # Done with this package. tracker.republish_end_pkg(f) # Dump retrieved package data after each republication and # recreate the directory for the next package. shutil.rmtree(pkg_tmpdir) os.mkdir(pkg_tmpdir)
def main_func(): global_settings.client_name = "pkgsign" try: opts, pargs = getopt.getopt(sys.argv[1:], "a:c:i:k:ns:D:", ["help", "no-index", "no-catalog"]) except getopt.GetoptError as e: usage(_("illegal global option -- {0}").format(e.opt)) show_usage = False sig_alg = "rsa-sha256" cert_path = None key_path = None chain_certs = [] add_to_catalog = True set_alg = False dry_run = False repo_uri = os.getenv("PKG_REPO", None) for opt, arg in opts: if opt == "-a": sig_alg = arg set_alg = True elif opt == "-c": cert_path = os.path.abspath(arg) if not os.path.isfile(cert_path): usage(_("{0} was expected to be a certificate " "but isn't a file.").format(cert_path)) elif opt == "-i": p = os.path.abspath(arg) if not os.path.isfile(p): usage(_("{0} was expected to be a certificate " "but isn't a file.").format(p)) chain_certs.append(p) elif opt == "-k": key_path = os.path.abspath(arg) if not os.path.isfile(key_path): usage(_("{0} was expected to be a key file " "but isn't a file.").format(key_path)) elif opt == "-n": dry_run = True elif opt == "-s": repo_uri = misc.parse_uri(arg) elif opt == "--help": show_usage = True elif opt == "--no-catalog": add_to_catalog = False elif opt == "-D": try: key, value = arg.split("=", 1) DebugValues.set_value(key, value) except (AttributeError, ValueError): error(_("{opt} takes argument of form " "name=value, not {arg}").format( opt=opt, arg=arg)) if show_usage: usage(retcode=EXIT_OK) if not repo_uri: usage(_("a repository must be provided")) if key_path and not cert_path: usage(_("If a key is given to sign with, its associated " "certificate must be given.")) if cert_path and not key_path: usage(_("If a certificate is given, its associated key must be " "given.")) if chain_certs and not cert_path: usage(_("Intermediate certificates are only valid if a key " "and certificate are also provided.")) if not pargs: usage(_("At least one fmri or pattern must be provided to " "sign.")) if not set_alg and not key_path: sig_alg = "sha256" s, h = actions.signature.SignatureAction.decompose_sig_alg(sig_alg) if h is None: usage(_("{0} is not a recognized signature algorithm.").format( sig_alg)) if s and not key_path: usage(_("Using {0} as the signature algorithm requires that a " "key and certificate pair be presented using the -k and -c " "options.").format(sig_alg)) if not s and key_path: usage(_("The {0} hash algorithm does not use a key or " "certificate. Do not use the -k or -c options with this " "algorithm.").format(sig_alg)) if DebugValues: reload(digest) errors = [] t = misc.config_temp_root() temp_root = tempfile.mkdtemp(dir=t) del t cache_dir = tempfile.mkdtemp(dir=temp_root) incoming_dir = tempfile.mkdtemp(dir=temp_root) chash_dir = tempfile.mkdtemp(dir=temp_root) cert_dir = tempfile.mkdtemp(dir=temp_root) try: chain_certs = [ __make_tmp_cert(cert_dir, c) for c in chain_certs ] if cert_path is not None: cert_path = __make_tmp_cert(cert_dir, cert_path) xport, xport_cfg = transport.setup_transport() xport_cfg.add_cache(cache_dir, readonly=False) xport_cfg.incoming_root = incoming_dir # Configure publisher(s) transport.setup_publisher(repo_uri, "source", xport, xport_cfg, remote_prefix=True) pats = pargs successful_publish = False concrete_fmris = [] unmatched_pats = set(pats) all_pats = frozenset(pats) get_all_pubs = False pub_prefs = set() # Gather the publishers whose catalogs will be needed. for pat in pats: try: p_obj = fmri.MatchingPkgFmri(pat) except fmri.IllegalMatchingFmri as e: errors.append(e) continue pub_prefix = p_obj.get_publisher() if pub_prefix: pub_prefs.add(pub_prefix) else: get_all_pubs = True # Check each publisher for matches to our patterns. for p in xport_cfg.gen_publishers(): if not get_all_pubs and p.prefix not in pub_prefs: continue cat = fetch_catalog(p, xport, temp_root) ms, tmp1, u = cat.get_matching_fmris(pats) # Find which patterns matched. matched_pats = all_pats - u # Remove those patterns from the unmatched set. unmatched_pats -= matched_pats for v_list in ms.values(): concrete_fmris.extend([(v, p) for v in v_list]) if unmatched_pats: raise api_errors.PackageMatchErrors( unmatched_fmris=unmatched_pats) for pfmri, src_pub in sorted(set(concrete_fmris)): try: # Get the existing manifest for the package to # be signed. m_str = xport.get_manifest(pfmri, content_only=True, pub=src_pub) m = manifest.Manifest() m.set_content(content=m_str) # Construct the base signature action. attrs = { "algorithm": sig_alg } a = actions.signature.SignatureAction(cert_path, **attrs) a.hash = cert_path # Add the action to the manifest to be signed # since the action signs itself. m.add_action(a, misc.EmptyI) # Set the signature value and certificate # information for the signature action. a.set_signature(m.gen_actions(), key_path=key_path, chain_paths=chain_certs, chash_dir=chash_dir) # The hash of 'a' is currently a path, we need # to find the hash of that file to allow # comparison to existing signatures. hsh = None if cert_path: # Action identity still uses the 'hash' # member of the action, so we need to # stay with the sha1 hash. hsh, _dummy = \ misc.get_data_digest(cert_path, hash_func=hashlib.sha1) # Check whether the signature about to be added # is identical, or almost identical, to existing # signatures on the package. Because 'a' has # already been added to the manifest, it is # generated by gen_actions_by_type, so the cnt # must be 2 or higher to be an issue. cnt = 0 almost_identical = False for a2 in m.gen_actions_by_type("signature"): try: if a.identical(a2, hsh): cnt += 1 except api_errors.AlmostIdentical as e: e.pkg = pfmri errors.append(e) almost_identical = True if almost_identical: continue if cnt == 2: continue elif cnt > 2: raise api_errors.DuplicateSignaturesAlreadyExist(pfmri) assert cnt == 1, "Cnt was:{0}".format(cnt) if not dry_run: # Append the finished signature action # to the published manifest. t = trans.Transaction(repo_uri, pkg_name=str(pfmri), xport=xport, pub=src_pub) try: t.append() t.add(a) for c in chain_certs: t.add_file(c) t.close(add_to_catalog= add_to_catalog) except: if t.trans_id: t.close(abandon=True) raise msg(_("Signed {0}").format(pfmri.get_fmri( include_build=False))) successful_publish = True except (api_errors.ApiException, fmri.FmriError, trans.TransactionError) as e: errors.append(e) if errors: error("\n".join([str(e) for e in errors])) if successful_publish: return EXIT_PARTIAL else: return EXIT_OOPS return EXIT_OK except api_errors.ApiException as e: error(e) return EXIT_OOPS finally: shutil.rmtree(temp_root)
if attributes.has_key("os"): if is_valid_for_os(attributes, opsys): # We match the os. Use it pversion = ver break else: continue else: # Version entry not tagged with OS. Be ready to use it pversion = ver else: pversion = p["version"] pname = ("%s@%s" % (p["name"], pversion)).replace( "@{version}", version) t = trans.Transaction(depoturl, pkg_name=pname) id = t.open() print "open of %s: id=%s" % (pname, id) resetDefaults() if p.has_key("defaults"): setDefaults(p["defaults"]) if p.has_key("excludefiles"): excludefiles = p["excludefiles"] else: excludefiles = [] print "excludefiles = %s" % (excludefiles) #Normalize the files in excludefiles
# Get first line of original manifest so that inclusion of the # scheme can be determined. use_scheme = True contents = get_manifest(src_uri, f, basedir, contents=True) if contents.splitlines()[0].find("pkg:/") == -1: use_scheme = False pkg_name = f.get_fmri(include_scheme=use_scheme) pkgdir = os.path.join(basedir, f.get_dir_path()) # This is needed so any previous failures for a package # can be aborted. trans_id = get_basename(f) try: t = trans.Transaction(target, create_repo=create_repo, pkg_name=pkg_name, trans_id=trans_id) # Remove any previous failed attempt to # to republish this package. try: t.close(abandon=True) except: # It might not exist already. pass t.open() for a in m.gen_actions(): if a.name == "set" and \ a.attrs.get("name", "") == "fmri": # To be consistent with the server, # the fmri can't be added to the